mirror of
https://github.com/superseriousbusiness/gotosocial.git
synced 2024-11-22 03:36:39 +00:00
[chore]: Bump github.com/gin-contrib/cors from 1.4.0 to 1.5.0 (#2388)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
This commit is contained in:
parent
e4e0a5e3f6
commit
66b77acb1c
13
go.mod
13
go.mod
|
@ -24,7 +24,7 @@ require (
|
|||
github.com/buckket/go-blurhash v1.1.0
|
||||
github.com/coreos/go-oidc/v3 v3.7.0
|
||||
github.com/disintegration/imaging v1.6.2
|
||||
github.com/gin-contrib/cors v1.4.0
|
||||
github.com/gin-contrib/cors v1.5.0
|
||||
github.com/gin-contrib/gzip v0.0.6
|
||||
github.com/gin-contrib/sessions v0.0.5
|
||||
github.com/gin-gonic/gin v1.9.1
|
||||
|
@ -86,10 +86,11 @@ require (
|
|||
codeberg.org/gruf/go-maps v1.0.3 // indirect
|
||||
github.com/aymerick/douceur v0.2.0 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/bytedance/sonic v1.9.1 // indirect
|
||||
github.com/bytedance/sonic v1.10.1 // indirect
|
||||
github.com/cenkalti/backoff/v4 v4.2.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.2.0 // indirect
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
|
||||
github.com/chenzhuoyu/iasm v0.9.0 // indirect
|
||||
github.com/cilium/ebpf v0.9.1 // indirect
|
||||
github.com/containerd/cgroups/v3 v3.0.1 // indirect
|
||||
github.com/coreos/go-systemd/v22 v22.3.2 // indirect
|
||||
|
@ -112,7 +113,7 @@ require (
|
|||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.14.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.15.5 // indirect
|
||||
github.com/go-xmlfmt/xmlfmt v0.0.0-20211206191508-7fd73a941850 // indirect
|
||||
github.com/goccy/go-json v0.10.2 // indirect
|
||||
github.com/godbus/dbus/v5 v5.0.4 // indirect
|
||||
|
@ -143,7 +144,7 @@ require (
|
|||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/opencontainers/runtime-spec v1.0.2 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.0.8 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.1.0 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/prometheus/client_model v0.5.0 // indirect
|
||||
|
@ -169,7 +170,7 @@ require (
|
|||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.21.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.21.0 // indirect
|
||||
go.opentelemetry.io/proto/otlp v1.0.0 // indirect
|
||||
golang.org/x/arch v0.3.0 // indirect
|
||||
golang.org/x/arch v0.5.0 // indirect
|
||||
golang.org/x/mod v0.12.0 // indirect
|
||||
golang.org/x/sync v0.3.0 // indirect
|
||||
golang.org/x/sys v0.14.0 // indirect
|
||||
|
|
29
go.sum
29
go.sum
|
@ -96,16 +96,20 @@ github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6r
|
|||
github.com/buckket/go-blurhash v1.1.0 h1:X5M6r0LIvwdvKiUtiNcRL2YlmOfMzYobI3VCKCZc9Do=
|
||||
github.com/buckket/go-blurhash v1.1.0/go.mod h1:aT2iqo5W9vu9GpyoLErKfTHwgODsZp3bQfXjXJUxNb8=
|
||||
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
|
||||
github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s=
|
||||
github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U=
|
||||
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
|
||||
github.com/bytedance/sonic v1.10.1 h1:7a1wuFXL1cMy7a3f7/VFcEtriuXQnUBhtoVfOZiaysc=
|
||||
github.com/bytedance/sonic v1.10.1/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
|
||||
github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM=
|
||||
github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
|
||||
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
|
||||
github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo=
|
||||
github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
|
||||
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
||||
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
|
||||
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
||||
|
@ -176,8 +180,8 @@ github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q
|
|||
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
|
||||
github.com/gavv/httpexpect v2.0.0+incompatible h1:1X9kcRshkSKEjNJJxX9Y9mQ5BRfbxU5kORdjhlA1yX8=
|
||||
github.com/gavv/httpexpect v2.0.0+incompatible/go.mod h1:x+9tiU1YnrOvnB725RkpoLv1M62hOWzwo5OXotisrKc=
|
||||
github.com/gin-contrib/cors v1.4.0 h1:oJ6gwtUl3lqV0WEIwM/LxPF1QZ5qe2lGWdY2+bz7y0g=
|
||||
github.com/gin-contrib/cors v1.4.0/go.mod h1:bs9pNM0x/UsmHPBWT2xZz9ROh8xYjYkiURUfmBoMlcs=
|
||||
github.com/gin-contrib/cors v1.5.0 h1:DgGKV7DDoOn36DFkNtbHrjoRiT5ExCe+PC9/xp7aKvk=
|
||||
github.com/gin-contrib/cors v1.5.0/go.mod h1:TvU7MAZ3EwrPLI2ztzTt3tqgvBCq+wn8WpZmfADjupI=
|
||||
github.com/gin-contrib/gzip v0.0.6 h1:NjcunTcGAj5CO1gn4N8jHOSIeRFHIbn51z6K+xaN4d4=
|
||||
github.com/gin-contrib/gzip v0.0.6/go.mod h1:QOJlmV2xmayAjkNS2Y8NQsMneuRShOU/kjovCXNuzzk=
|
||||
github.com/gin-contrib/sessions v0.0.5 h1:CATtfHmLMQrMNpJRgzjWXD7worTh7g7ritsQfmF+0jE=
|
||||
|
@ -216,8 +220,8 @@ github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl
|
|||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos=
|
||||
github.com/go-playground/validator/v10 v10.14.1 h1:9c50NUPC30zyuKprjL3vNZ0m5oG+jU0zvx4AqHGnv4k=
|
||||
github.com/go-playground/validator/v10 v10.14.1/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
||||
github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7NLylN+x8TTueE24=
|
||||
github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
||||
github.com/go-session/session v3.1.2+incompatible/go.mod h1:8B3iivBQjrz/JtC68Np2T1yBBLxTan3mn/3OM0CyRt0=
|
||||
github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM=
|
||||
github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE=
|
||||
|
@ -369,6 +373,7 @@ github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa02
|
|||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||
github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg=
|
||||
github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
|
||||
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||
|
@ -428,8 +433,8 @@ github.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/
|
|||
github.com/orcaman/writerseeker v0.0.0-20200621085525-1d3f536ff85e h1:s2RNOM/IGdY0Y6qfTeUKhDawdHDpK9RGBdx80qN4Ttw=
|
||||
github.com/orcaman/writerseeker v0.0.0-20200621085525-1d3f536ff85e/go.mod h1:nBdnFKj15wFbf94Rwfq4m30eAcyY9V/IyKAGQFtqkW0=
|
||||
github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo=
|
||||
github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ=
|
||||
github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4=
|
||||
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
|
||||
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
|
@ -493,7 +498,6 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
|
|||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/subosito/gotenv v1.4.2 h1:X1TuBLAMDFbaTAChgCBLu3DU3UPyELpnF2jjJ2cz/S8=
|
||||
|
@ -620,8 +624,8 @@ go.uber.org/automaxprocs v1.5.3/go.mod h1:eRbA25aqJrxAbsLO0xy5jVwPt7FQnRgjW+efnw
|
|||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||
golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k=
|
||||
golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||
golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y=
|
||||
golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
|
@ -1027,6 +1031,7 @@ modernc.org/z v1.7.3 h1:zDJf6iHjrnB+WRD88stbXokugjyc0/pB91ri1gO6LZY=
|
|||
modernc.org/z v1.7.3/go.mod h1:Ipv4tsdxZRbQyLq9Q1M6gdbkxYzdlrciF2Hi/lS7nWE=
|
||||
mvdan.cc/xurls/v2 v2.5.0 h1:lyBNOm8Wo71UknhUs4QTFUNNMyxy2JEIaKKo0RWOh+8=
|
||||
mvdan.cc/xurls/v2 v2.5.0/go.mod h1:yQgaGQ1rFtJUzkmKiHYSSfuQxqfYmd//X6PxvholpeE=
|
||||
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
|
||||
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
||||
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
|
||||
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
|
||||
|
|
15
vendor/github.com/bytedance/sonic/Makefile
generated
vendored
15
vendor/github.com/bytedance/sonic/Makefile
generated
vendored
|
@ -23,12 +23,12 @@ CPU_avx := amd64
|
|||
CPU_avx2 := amd64
|
||||
CPU_sse := amd64
|
||||
|
||||
TMPL_avx := fastint_amd64_test fastfloat_amd64_test native_amd64_test native_export_amd64
|
||||
TMPL_avx2 := fastint_amd64_test fastfloat_amd64_test native_amd64_test native_export_amd64
|
||||
TMPL_sse := fastint_amd64_test fastfloat_amd64_test native_amd64_test native_export_amd64
|
||||
TMPL_avx := fastint_amd64_test fastfloat_amd64_test native_amd64_test recover_amd64_test
|
||||
TMPL_avx2 := fastint_amd64_test fastfloat_amd64_test native_amd64_test recover_amd64_test
|
||||
TMPL_sse := fastint_amd64_test fastfloat_amd64_test native_amd64_test recover_amd64_test
|
||||
|
||||
CFLAGS_avx := -msse -mno-sse4 -mavx -mpclmul -mno-avx2 -DUSE_AVX=1 -DUSE_AVX2=0
|
||||
CFLAGS_avx2 := -msse -mno-sse4 -mavx -mpclmul -mavx2 -DUSE_AVX=1 -DUSE_AVX2=1
|
||||
CFLAGS_avx := -msse -mno-sse4 -mavx -mpclmul -mno-avx2 -mstack-alignment=0 -DUSE_AVX=1 -DUSE_AVX2=0
|
||||
CFLAGS_avx2 := -msse -mno-sse4 -mavx -mpclmul -mavx2 -mstack-alignment=0 -DUSE_AVX=1 -DUSE_AVX2=1
|
||||
CFLAGS_sse := -msse -mno-sse4 -mno-avx -mno-avx2 -mpclmul
|
||||
|
||||
CC_amd64 := clang
|
||||
|
@ -66,7 +66,7 @@ define build_arch
|
|||
$(eval @cpu := $(value CPU_$(1)))
|
||||
$(eval @deps := $(foreach tmpl,$(value TMPL_$(1)),${OUT_DIR}/$(1)/${tmpl}.go))
|
||||
$(eval @asmin := ${TMP_DIR}/$(1)/native.s)
|
||||
$(eval @asmout := ${OUT_DIR}/$(1)/native_${@cpu}.s)
|
||||
$(eval @asmout := ${OUT_DIR}/$(1)/native_text_${@cpu}.go)
|
||||
$(eval @stubin := ${OUT_DIR}/native_${@cpu}.tmpl)
|
||||
$(eval @stubout := ${OUT_DIR}/$(1)/native_${@cpu}.go)
|
||||
|
||||
|
@ -75,8 +75,7 @@ $(1): ${@asmout} ${@deps}
|
|||
${@asmout}: ${@stubout} ${NATIVE_SRC}
|
||||
mkdir -p ${TMP_DIR}/$(1)
|
||||
$${CC_${@cpu}} $${CFLAGS} $${CFLAGS_$(1)} -S -o ${TMP_DIR}/$(1)/native.s ${SRC_FILE}
|
||||
python3 $${ASM2ASM_${@cpu}} ${@asmout} ${TMP_DIR}/$(1)/native.s
|
||||
asmfmt -w ${@asmout}
|
||||
python3 $${ASM2ASM_${@cpu}} -r ${@stubout} ${TMP_DIR}/$(1)/native.s
|
||||
|
||||
$(eval $(call \
|
||||
build_tmpl, \
|
||||
|
|
55
vendor/github.com/bytedance/sonic/README.md
generated
vendored
55
vendor/github.com/bytedance/sonic/README.md
generated
vendored
|
@ -5,8 +5,8 @@ English | [中文](README_ZH_CN.md)
|
|||
A blazingly fast JSON serializing & deserializing library, accelerated by JIT (just-in-time compiling) and SIMD (single-instruction-multiple-data).
|
||||
|
||||
## Requirement
|
||||
- Go 1.15~1.20
|
||||
- Linux/MacOS/Windows
|
||||
- Go 1.16~1.21
|
||||
- Linux / MacOS / Windows(need go1.17 above)
|
||||
- Amd64 ARCH
|
||||
|
||||
## Features
|
||||
|
@ -76,13 +76,17 @@ BenchmarkSetOne_Jsoniter-16 79475 ns/op 163.8
|
|||
BenchmarkSetOne_Parallel_Sonic-16 850.9 ns/op 15305.31 MB/s 1584 B/op 17 allocs/op
|
||||
BenchmarkSetOne_Parallel_Sjson-16 18194 ns/op 715.77 MB/s 52247 B/op 9 allocs/op
|
||||
BenchmarkSetOne_Parallel_Jsoniter-16 33560 ns/op 388.05 MB/s 45892 B/op 964 allocs/op
|
||||
BenchmarkLoadNode/LoadAll()-16 11384 ns/op 1143.93 MB/s 6307 B/op 25 allocs/op
|
||||
BenchmarkLoadNode_Parallel/LoadAll()-16 5493 ns/op 2370.68 MB/s 7145 B/op 25 allocs/op
|
||||
BenchmarkLoadNode/Interface()-16 17722 ns/op 734.85 MB/s 13323 B/op 88 allocs/op
|
||||
BenchmarkLoadNode_Parallel/Interface()-16 10330 ns/op 1260.70 MB/s 15178 B/op 88 allocs/op
|
||||
```
|
||||
- [Small](https://github.com/bytedance/sonic/blob/main/testdata/small.go) (400B, 11 keys, 3 layers)
|
||||
![small benchmarks](./docs/imgs/bench-small.png)
|
||||
- [Large](https://github.com/bytedance/sonic/blob/main/testdata/twitter.json) (635KB, 10000+ key, 6 layers)
|
||||
![large benchmarks](./docs/imgs/bench-large.png)
|
||||
|
||||
See [bench.sh](https://github.com/bytedance/sonic/blob/main/bench.sh) for benchmark codes.
|
||||
See [bench.sh](https://github.com/bytedance/sonic/blob/main/scripts/bench.sh) for benchmark codes.
|
||||
|
||||
## How it works
|
||||
See [INTRODUCTION.md](./docs/INTRODUCTION.md).
|
||||
|
@ -282,6 +286,42 @@ println(string(buf) == string(exp)) // true
|
|||
- iteration: `Values()`, `Properties()`, `ForEach()`, `SortKeys()`
|
||||
- modification: `Set()`, `SetByIndex()`, `Add()`
|
||||
|
||||
### Ast.Visitor
|
||||
Sonic provides an advanced API for fully parsing JSON into non-standard types (neither `struct` not `map[string]interface{}`) without using any intermediate representation (`ast.Node` or `interface{}`). For example, you might have the following types which are like `interface{}` but actually not `interface{}`:
|
||||
```go
|
||||
type UserNode interface {}
|
||||
|
||||
// the following types implement the UserNode interface.
|
||||
type (
|
||||
UserNull struct{}
|
||||
UserBool struct{ Value bool }
|
||||
UserInt64 struct{ Value int64 }
|
||||
UserFloat64 struct{ Value float64 }
|
||||
UserString struct{ Value string }
|
||||
UserObject struct{ Value map[string]UserNode }
|
||||
UserArray struct{ Value []UserNode }
|
||||
)
|
||||
```
|
||||
Sonic provides the following API to return **the preorder traversal of a JSON AST**. The `ast.Visitor` is a SAX style interface which is used in some C++ JSON library. You should implement `ast.Visitor` by yourself and pass it to `ast.Preorder()` method. In your visitor you can make your custom types to represent JSON values. There may be an O(n) space container (such as stack) in your visitor to record the object / array hierarchy.
|
||||
```go
|
||||
func Preorder(str string, visitor Visitor, opts *VisitorOptions) error
|
||||
|
||||
type Visitor interface {
|
||||
OnNull() error
|
||||
OnBool(v bool) error
|
||||
OnString(v string) error
|
||||
OnInt64(v int64, n json.Number) error
|
||||
OnFloat64(v float64, n json.Number) error
|
||||
OnObjectBegin(capacity int) error
|
||||
OnObjectKey(key string) error
|
||||
OnObjectEnd() error
|
||||
OnArrayBegin(capacity int) error
|
||||
OnArrayEnd() error
|
||||
}
|
||||
```
|
||||
|
||||
See [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go) for detailed usage. We also implement a demo visitor for `UserNode` in [ast/visitor_test.go](https://github.com/bytedance/sonic/blob/main/ast/visitor_test.go).
|
||||
|
||||
## Compatibility
|
||||
Sonic **DOES NOT** ensure to support all environments, due to the difficulty of developing high-performance codes. For developers who use sonic to build their applications in different environments, we have the following suggestions:
|
||||
|
||||
|
@ -358,5 +398,14 @@ Why? Because `ast.Node` stores its children using `array`:
|
|||
|
||||
**CAUTION:** `ast.Node` **DOESN'T** ensure concurrent security directly, due to its **lazy-load** design. However, you can call `Node.Load()`/`Node.LoadAll()` to achieve that, which may bring performance reduction while it still works faster than converting to `map` or `interface{}`
|
||||
|
||||
### Ast.Node or Ast.Visitor?
|
||||
For generic data, `ast.Node` should be enough for your needs in most cases.
|
||||
|
||||
However, `ast.Node` is designed for partially processing JSON string. It has some special designs such as lazy-load which might not be suitable for directly parsing the whole JSON string like `Unmarshal()`. Although `ast.Node` is better then `map` or `interface{}`, it's also a kind of intermediate representation after all if your final types are customized and you have to convert the above types to your custom types after parsing.
|
||||
|
||||
For better performance, in previous case the `ast.Visitor` will be the better choice. It performs JSON decoding like `Unmarshal()` and you can directly use your final types to represents a JSON AST without any intermediate representations.
|
||||
|
||||
But `ast.Visitor` is not a very handy API. You might need to write a lot of code to implement your visitor and carefully maintain the tree hierarchy during decoding. Please read the comments in [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go) carefully if you decide to use this API.
|
||||
|
||||
## Community
|
||||
Sonic is a subproject of [CloudWeGo](https://www.cloudwego.io/). We are committed to building a cloud native ecosystem.
|
||||
|
|
54
vendor/github.com/bytedance/sonic/README_ZH_CN.md
generated
vendored
54
vendor/github.com/bytedance/sonic/README_ZH_CN.md
generated
vendored
|
@ -6,8 +6,8 @@
|
|||
|
||||
## 依赖
|
||||
|
||||
- Go 1.15~1.20
|
||||
- Linux/MacOS/Windows
|
||||
- Go 1.16~1.21
|
||||
- Linux / MacOS / Windows(需要 Go1.17 以上)
|
||||
- Amd64 架构
|
||||
|
||||
## 特色
|
||||
|
@ -79,13 +79,17 @@ BenchmarkSetOne_Jsoniter-16 79475 ns/op 163.8
|
|||
BenchmarkSetOne_Parallel_Sonic-16 850.9 ns/op 15305.31 MB/s 1584 B/op 17 allocs/op
|
||||
BenchmarkSetOne_Parallel_Sjson-16 18194 ns/op 715.77 MB/s 52247 B/op 9 allocs/op
|
||||
BenchmarkSetOne_Parallel_Jsoniter-16 33560 ns/op 388.05 MB/s 45892 B/op 964 allocs/op
|
||||
BenchmarkLoadNode/LoadAll()-16 11384 ns/op 1143.93 MB/s 6307 B/op 25 allocs/op
|
||||
BenchmarkLoadNode_Parallel/LoadAll()-16 5493 ns/op 2370.68 MB/s 7145 B/op 25 allocs/op
|
||||
BenchmarkLoadNode/Interface()-16 17722 ns/op 734.85 MB/s 13323 B/op 88 allocs/op
|
||||
BenchmarkLoadNode_Parallel/Interface()-16 10330 ns/op 1260.70 MB/s 15178 B/op 88 allocs/op
|
||||
```
|
||||
- [小型](https://github.com/bytedance/sonic/blob/main/testdata/small.go) (400B, 11 个键, 3 层)
|
||||
![small benchmarks](./docs/imgs/bench-small.png)
|
||||
- [大型](https://github.com/bytedance/sonic/blob/main/testdata/twitter.json) (635kB, 10000+ 个键, 6 层)
|
||||
![large benchmarks](./docs/imgs/bench-large.png)
|
||||
|
||||
要查看基准测试代码,请参阅 [bench.sh](https://github.com/bytedance/sonic/blob/main/bench.sh) 。
|
||||
要查看基准测试代码,请参阅 [bench.sh](https://github.com/bytedance/sonic/blob/main/scripts/bench.sh) 。
|
||||
|
||||
## 工作原理
|
||||
|
||||
|
@ -298,6 +302,41 @@ println(string(buf) == string(exp)) // true
|
|||
- 迭代: `Values()`, `Properties()`, `ForEach()`, `SortKeys()`
|
||||
- 修改: `Set()`, `SetByIndex()`, `Add()`
|
||||
|
||||
### `Ast.Visitor`
|
||||
Sonic 提供了一个高级的 API 用于直接全量解析 JSON 到非标准容器里 (既不是 `struct` 也不是 `map[string]interface{}`) 且不需要借助任何中间表示 (`ast.Node` 或 `interface{}`)。举个例子,你可能定义了下述的类型,它们看起来像 `interface{}`,但实际上并不是:
|
||||
```go
|
||||
type UserNode interface {}
|
||||
|
||||
// the following types implement the UserNode interface.
|
||||
type (
|
||||
UserNull struct{}
|
||||
UserBool struct{ Value bool }
|
||||
UserInt64 struct{ Value int64 }
|
||||
UserFloat64 struct{ Value float64 }
|
||||
UserString struct{ Value string }
|
||||
UserObject struct{ Value map[string]UserNode }
|
||||
UserArray struct{ Value []UserNode }
|
||||
)
|
||||
```
|
||||
Sonic 提供了下述的 API 来返回 **“对 JSON AST 的前序遍历”**。`ast.Visitor` 是一个 SAX 风格的接口,这在某些 C++ 的 JSON 解析库中被使用到。你需要自己实现一个 `ast.Visitor`,将它传递给 `ast.Preorder()` 方法。在你的实现中你可以使用自定义的类型来表示 JSON 的值。在你的 `ast.Visitor` 中,可能需要有一个 O(n) 空间复杂度的容器(比如说栈)来记录 object / array 的层级。
|
||||
```go
|
||||
func Preorder(str string, visitor Visitor, opts *VisitorOptions) error
|
||||
|
||||
type Visitor interface {
|
||||
OnNull() error
|
||||
OnBool(v bool) error
|
||||
OnString(v string) error
|
||||
OnInt64(v int64, n json.Number) error
|
||||
OnFloat64(v float64, n json.Number) error
|
||||
OnObjectBegin(capacity int) error
|
||||
OnObjectKey(key string) error
|
||||
OnObjectEnd() error
|
||||
OnArrayBegin(capacity int) error
|
||||
OnArrayEnd() error
|
||||
}
|
||||
```
|
||||
详细用法参看 [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go),我们还为 `UserNode` 实现了一个示例 `ast.Visitor`,你可以在 [ast/visitor_test.go](https://github.com/bytedance/sonic/blob/main/ast/visitor_test.go) 中找到它。
|
||||
|
||||
## 兼容性
|
||||
由于开发高性能代码的困难性, Sonic **不**保证对所有环境的支持。对于在不同环境中使用 Sonic 构建应用程序的开发者,我们有以下建议:
|
||||
|
||||
|
@ -377,6 +416,15 @@ go someFunc(user)
|
|||
|
||||
**注意**:由于 `ast.Node` 的惰性加载设计,其**不能**直接保证并发安全性,但你可以调用 `Node.Load()` / `Node.LoadAll()` 来实现并发安全。尽管可能会带来性能损失,但仍比转换成 `map` 或 `interface{}` 更为高效。
|
||||
|
||||
### 使用 `ast.Node` 还是 `ast.Visitor`?
|
||||
对于泛型数据的解析,`ast.Node` 在大多数场景上应该能够满足你的需求。
|
||||
|
||||
然而,`ast.Node` 是一种针对部分解析 JSON 而设计的泛型容器,它包含一些特殊设计,比如惰性加载,如果你希望像 `Unmarshal()` 那样直接解析整个 JSON,这些设计可能并不合适。尽管 `ast.Node` 相较于 `map` 或 `interface{}` 来说是更好的一种泛型容器,但它毕竟也是一种中间表示,如果你的最终类型是自定义的,你还得在解析完成后将上述类型转化成你自定义的类型。
|
||||
|
||||
在上述场景中,如果想要有更极致的性能,`ast.Visitor` 会是更好的选择。它采用和 `Unmarshal()` 类似的形式解析 JSON,并且你可以直接使用你的最终类型去表示 JSON AST,而不需要经过额外的任何中间表示。
|
||||
|
||||
但是,`ast.Visitor` 并不是一个很易用的 API。你可能需要写大量的代码去实现自己的 `ast.Visitor`,并且需要在解析过程中仔细维护树的层级。如果你决定要使用这个 API,请先仔细阅读 [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go) 中的注释。
|
||||
|
||||
## 社区
|
||||
|
||||
Sonic 是 [CloudWeGo](https://www.cloudwego.io/) 下的一个子项目。我们致力于构建云原生生态系统。
|
||||
|
|
10
vendor/github.com/bytedance/sonic/api.go
generated
vendored
10
vendor/github.com/bytedance/sonic/api.go
generated
vendored
|
@ -69,6 +69,10 @@ type Config struct {
|
|||
// ValidateString indicates decoder and encoder to valid string values: decoder will return errors
|
||||
// when unescaped control chars(\u0000-\u001f) in the string value of JSON.
|
||||
ValidateString bool
|
||||
|
||||
// NoValidateJSONMarshaler indicates that the encoder should not validate the output string
|
||||
// after encoding the JSONMarshaler to JSON.
|
||||
NoValidateJSONMarshaler bool
|
||||
}
|
||||
|
||||
var (
|
||||
|
@ -87,6 +91,7 @@ type Config struct {
|
|||
// ConfigFastest is the fastest config of APIs, aiming at speed.
|
||||
ConfigFastest = Config{
|
||||
NoQuoteTextMarshaler: true,
|
||||
NoValidateJSONMarshaler: true,
|
||||
}.Froze()
|
||||
)
|
||||
|
||||
|
@ -184,3 +189,8 @@ func Get(src []byte, path ...interface{}) (ast.Node, error) {
|
|||
func GetFromString(src string, path ...interface{}) (ast.Node, error) {
|
||||
return ast.NewSearcher(src).GetByPath(path...)
|
||||
}
|
||||
|
||||
// Valid reports whether data is a valid JSON encoding.
|
||||
func Valid(data []byte) bool {
|
||||
return ConfigDefault.Valid(data)
|
||||
}
|
||||
|
|
10
vendor/github.com/bytedance/sonic/ast/api_amd64.go
generated
vendored
10
vendor/github.com/bytedance/sonic/ast/api_amd64.go
generated
vendored
|
@ -1,4 +1,4 @@
|
|||
// +build amd64,go1.15,!go1.21
|
||||
// +build amd64,go1.16,!go1.22
|
||||
|
||||
/*
|
||||
* Copyright 2022 ByteDance Inc.
|
||||
|
@ -87,7 +87,13 @@ func encodeBase64(src []byte) string {
|
|||
|
||||
func (self *Parser) decodeValue() (val types.JsonState) {
|
||||
sv := (*rt.GoString)(unsafe.Pointer(&self.s))
|
||||
self.p = native.Value(sv.Ptr, sv.Len, self.p, &val, 0)
|
||||
flag := types.F_USE_NUMBER
|
||||
if self.dbuf != nil {
|
||||
flag = 0
|
||||
val.Dbuf = self.dbuf
|
||||
val.Dcap = types.MaxDigitNums
|
||||
}
|
||||
self.p = native.Value(sv.Ptr, sv.Len, self.p, &val, uint64(flag))
|
||||
return
|
||||
}
|
||||
|
||||
|
|
8
vendor/github.com/bytedance/sonic/ast/api_compat.go
generated
vendored
8
vendor/github.com/bytedance/sonic/ast/api_compat.go
generated
vendored
|
@ -1,4 +1,4 @@
|
|||
// +build !amd64 go1.21
|
||||
// +build !amd64 !go1.16 go1.22
|
||||
|
||||
/*
|
||||
* Copyright 2022 ByteDance Inc.
|
||||
|
@ -27,6 +27,10 @@
|
|||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
func init() {
|
||||
println("WARNING: sonic only supports Go1.16~1.20 && CPU amd64, but your environment is not suitable")
|
||||
}
|
||||
|
||||
func quote(buf *[]byte, val string) {
|
||||
quoteString(buf, val)
|
||||
}
|
||||
|
@ -49,7 +53,7 @@ func encodeBase64(src []byte) string {
|
|||
}
|
||||
|
||||
func (self *Parser) decodeValue() (val types.JsonState) {
|
||||
e, v := decodeValue(self.s, self.p)
|
||||
e, v := decodeValue(self.s, self.p, self.dbuf == nil)
|
||||
if e < 0 {
|
||||
return v
|
||||
}
|
||||
|
|
329
vendor/github.com/bytedance/sonic/ast/buffer.go
generated
vendored
Normal file
329
vendor/github.com/bytedance/sonic/ast/buffer.go
generated
vendored
Normal file
|
@ -0,0 +1,329 @@
|
|||
/**
|
||||
* Copyright 2023 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
`sort`
|
||||
`unsafe`
|
||||
)
|
||||
|
||||
type nodeChunk [_DEFAULT_NODE_CAP]Node
|
||||
|
||||
type linkedNodes struct {
|
||||
head nodeChunk
|
||||
tail []*nodeChunk
|
||||
size int
|
||||
}
|
||||
|
||||
func (self *linkedNodes) Cap() int {
|
||||
if self == nil {
|
||||
return 0
|
||||
}
|
||||
return (len(self.tail)+1)*_DEFAULT_NODE_CAP
|
||||
}
|
||||
|
||||
func (self *linkedNodes) Len() int {
|
||||
if self == nil {
|
||||
return 0
|
||||
}
|
||||
return self.size
|
||||
}
|
||||
|
||||
func (self *linkedNodes) At(i int) (*Node) {
|
||||
if self == nil {
|
||||
return nil
|
||||
}
|
||||
if i >= 0 && i<self.size && i < _DEFAULT_NODE_CAP {
|
||||
return &self.head[i]
|
||||
} else if i >= _DEFAULT_NODE_CAP && i<self.size {
|
||||
a, b := i/_DEFAULT_NODE_CAP-1, i%_DEFAULT_NODE_CAP
|
||||
if a < len(self.tail) {
|
||||
return &self.tail[a][b]
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (self *linkedNodes) Add(v Node) {
|
||||
if self.size < _DEFAULT_NODE_CAP {
|
||||
self.head[self.size] = v
|
||||
self.size++
|
||||
return
|
||||
}
|
||||
|
||||
a, b, c := self.size/_DEFAULT_NODE_CAP-1 , self.size%_DEFAULT_NODE_CAP, cap(self.tail)
|
||||
if a - c >= 0 {
|
||||
c += 1 + c>>_APPEND_GROW_SHIFT
|
||||
tmp := make([]*nodeChunk, a + 1, c)
|
||||
copy(tmp, self.tail)
|
||||
self.tail = tmp
|
||||
} else if a >= len(self.tail) {
|
||||
self.tail = self.tail[:a+1]
|
||||
}
|
||||
|
||||
var n = &self.tail[a]
|
||||
if *n == nil {
|
||||
*n = new(nodeChunk)
|
||||
}
|
||||
(*n)[b] = v
|
||||
self.size++
|
||||
}
|
||||
|
||||
func (self *linkedNodes) ToSlice(con []Node) {
|
||||
if len(con) < self.size {
|
||||
return
|
||||
}
|
||||
i := (self.size-1)
|
||||
a, b := i/_DEFAULT_NODE_CAP-1, i%_DEFAULT_NODE_CAP
|
||||
if a < 0 {
|
||||
copy(con, self.head[:b+1])
|
||||
return
|
||||
} else {
|
||||
copy(con, self.head[:])
|
||||
con = con[_DEFAULT_NODE_CAP:]
|
||||
}
|
||||
|
||||
for i:=0; i<a; i++ {
|
||||
copy(con, self.tail[i][:])
|
||||
con = con[_DEFAULT_NODE_CAP:]
|
||||
}
|
||||
copy(con, self.tail[a][:b+1])
|
||||
}
|
||||
|
||||
func (self *linkedNodes) FromSlice(con []Node) {
|
||||
self.size = len(con)
|
||||
i := self.size-1
|
||||
a, b := i/_DEFAULT_NODE_CAP-1, i%_DEFAULT_NODE_CAP
|
||||
if a < 0 {
|
||||
copy(self.head[:b+1], con)
|
||||
return
|
||||
} else {
|
||||
copy(self.head[:], con)
|
||||
con = con[_DEFAULT_NODE_CAP:]
|
||||
}
|
||||
|
||||
if cap(self.tail) <= a {
|
||||
c := (a+1) + (a+1)>>_APPEND_GROW_SHIFT
|
||||
self.tail = make([]*nodeChunk, a+1, c)
|
||||
}
|
||||
self.tail = self.tail[:a+1]
|
||||
|
||||
for i:=0; i<a; i++ {
|
||||
self.tail[i] = new(nodeChunk)
|
||||
copy(self.tail[i][:], con)
|
||||
con = con[_DEFAULT_NODE_CAP:]
|
||||
}
|
||||
|
||||
self.tail[a] = new(nodeChunk)
|
||||
copy(self.tail[a][:b+1], con)
|
||||
}
|
||||
|
||||
type pairChunk [_DEFAULT_NODE_CAP]Pair
|
||||
|
||||
type linkedPairs struct {
|
||||
head pairChunk
|
||||
tail []*pairChunk
|
||||
size int
|
||||
}
|
||||
|
||||
func (self *linkedPairs) Cap() int {
|
||||
if self == nil {
|
||||
return 0
|
||||
}
|
||||
return (len(self.tail)+1)*_DEFAULT_NODE_CAP
|
||||
}
|
||||
|
||||
func (self *linkedPairs) Len() int {
|
||||
if self == nil {
|
||||
return 0
|
||||
}
|
||||
return self.size
|
||||
}
|
||||
|
||||
func (self *linkedPairs) At(i int) *Pair {
|
||||
if self == nil {
|
||||
return nil
|
||||
}
|
||||
if i >= 0 && i < _DEFAULT_NODE_CAP && i<self.size {
|
||||
return &self.head[i]
|
||||
} else if i >= _DEFAULT_NODE_CAP && i<self.size {
|
||||
a, b := i/_DEFAULT_NODE_CAP-1, i%_DEFAULT_NODE_CAP
|
||||
if a < len(self.tail) {
|
||||
return &self.tail[a][b]
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (self *linkedPairs) Add(v Pair) {
|
||||
if self.size < _DEFAULT_NODE_CAP {
|
||||
self.head[self.size] = v
|
||||
self.size++
|
||||
return
|
||||
}
|
||||
|
||||
a, b, c := self.size/_DEFAULT_NODE_CAP-1 , self.size%_DEFAULT_NODE_CAP, cap(self.tail)
|
||||
if a - c >= 0 {
|
||||
c += 1 + c>>_APPEND_GROW_SHIFT
|
||||
tmp := make([]*pairChunk, a + 1, c)
|
||||
copy(tmp, self.tail)
|
||||
self.tail = tmp
|
||||
} else if a >= len(self.tail) {
|
||||
self.tail = self.tail[:a+1]
|
||||
}
|
||||
|
||||
var n = &self.tail[a]
|
||||
if *n == nil {
|
||||
*n = new(pairChunk)
|
||||
}
|
||||
(*n)[b] = v
|
||||
self.size++
|
||||
}
|
||||
|
||||
// linear search
|
||||
func (self *linkedPairs) Get(key string) (*Pair, int) {
|
||||
for i:=0; i<self.size; i++ {
|
||||
if n := self.At(i); n.Key == key {
|
||||
return n, i
|
||||
}
|
||||
}
|
||||
return nil, -1
|
||||
}
|
||||
|
||||
func (self *linkedPairs) ToSlice(con []Pair) {
|
||||
if len(con) < self.size {
|
||||
return
|
||||
}
|
||||
i := self.size-1
|
||||
a, b := i/_DEFAULT_NODE_CAP-1, i%_DEFAULT_NODE_CAP
|
||||
|
||||
if a < 0 {
|
||||
copy(con, self.head[:b+1])
|
||||
return
|
||||
} else {
|
||||
copy(con, self.head[:])
|
||||
con = con[_DEFAULT_NODE_CAP:]
|
||||
}
|
||||
|
||||
for i:=0; i<a; i++ {
|
||||
copy(con, self.tail[i][:])
|
||||
con = con[_DEFAULT_NODE_CAP:]
|
||||
}
|
||||
copy(con, self.tail[a][:b+1])
|
||||
}
|
||||
|
||||
func (self *linkedPairs) ToMap(con map[string]Node) {
|
||||
for i:=0; i<self.size; i++ {
|
||||
n := self.At(i)
|
||||
con[n.Key] = n.Value
|
||||
}
|
||||
}
|
||||
|
||||
func (self *linkedPairs) FromSlice(con []Pair) {
|
||||
self.size = len(con)
|
||||
i := self.size-1
|
||||
a, b := i/_DEFAULT_NODE_CAP-1, i%_DEFAULT_NODE_CAP
|
||||
if a < 0 {
|
||||
copy(self.head[:b+1], con)
|
||||
return
|
||||
} else {
|
||||
copy(self.head[:], con)
|
||||
con = con[_DEFAULT_NODE_CAP:]
|
||||
}
|
||||
|
||||
if cap(self.tail) <= a {
|
||||
c := (a+1) + (a+1)>>_APPEND_GROW_SHIFT
|
||||
self.tail = make([]*pairChunk, a+1, c)
|
||||
}
|
||||
self.tail = self.tail[:a+1]
|
||||
|
||||
for i:=0; i<a; i++ {
|
||||
self.tail[i] = new(pairChunk)
|
||||
copy(self.tail[i][:], con)
|
||||
con = con[_DEFAULT_NODE_CAP:]
|
||||
}
|
||||
|
||||
self.tail[a] = new(pairChunk)
|
||||
copy(self.tail[a][:b+1], con)
|
||||
}
|
||||
|
||||
func (self *linkedPairs) Less(i, j int) bool {
|
||||
return lessFrom(self.At(i).Key, self.At(j).Key, 0)
|
||||
}
|
||||
|
||||
func (self *linkedPairs) Swap(i, j int) {
|
||||
a, b := self.At(i), self.At(j)
|
||||
*a, *b = *b, *a
|
||||
}
|
||||
|
||||
func (self *linkedPairs) Sort() {
|
||||
sort.Sort(self)
|
||||
}
|
||||
|
||||
// Compare two strings from the pos d.
|
||||
func lessFrom(a, b string, d int) bool {
|
||||
l := len(a)
|
||||
if l > len(b) {
|
||||
l = len(b)
|
||||
}
|
||||
for i := d; i < l; i++ {
|
||||
if a[i] == b[i] {
|
||||
continue
|
||||
}
|
||||
return a[i] < b[i]
|
||||
}
|
||||
return len(a) < len(b)
|
||||
}
|
||||
|
||||
type parseObjectStack struct {
|
||||
parser Parser
|
||||
v linkedPairs
|
||||
}
|
||||
|
||||
type parseArrayStack struct {
|
||||
parser Parser
|
||||
v linkedNodes
|
||||
}
|
||||
|
||||
func newLazyArray(p *Parser) Node {
|
||||
s := new(parseArrayStack)
|
||||
s.parser = *p
|
||||
return Node{
|
||||
t: _V_ARRAY_LAZY,
|
||||
p: unsafe.Pointer(s),
|
||||
}
|
||||
}
|
||||
|
||||
func newLazyObject(p *Parser) Node {
|
||||
s := new(parseObjectStack)
|
||||
s.parser = *p
|
||||
return Node{
|
||||
t: _V_OBJECT_LAZY,
|
||||
p: unsafe.Pointer(s),
|
||||
}
|
||||
}
|
||||
|
||||
func (self *Node) getParserAndArrayStack() (*Parser, *parseArrayStack) {
|
||||
stack := (*parseArrayStack)(self.p)
|
||||
return &stack.parser, stack
|
||||
}
|
||||
|
||||
func (self *Node) getParserAndObjectStack() (*Parser, *parseObjectStack) {
|
||||
stack := (*parseObjectStack)(self.p)
|
||||
return &stack.parser, stack
|
||||
}
|
||||
|
12
vendor/github.com/bytedance/sonic/ast/decode.go
generated
vendored
12
vendor/github.com/bytedance/sonic/ast/decode.go
generated
vendored
|
@ -220,7 +220,7 @@ func decodeFloat64(src string, pos int) (ret int, v float64, err error) {
|
|||
return ret, v, nil
|
||||
}
|
||||
|
||||
func decodeValue(src string, pos int) (ret int, v types.JsonState) {
|
||||
func decodeValue(src string, pos int, skipnum bool) (ret int, v types.JsonState) {
|
||||
pos = skipBlank(src, pos)
|
||||
if pos < 0 {
|
||||
return pos, types.JsonState{Vt: types.ValueType(pos)}
|
||||
|
@ -256,6 +256,14 @@ func decodeValue(src string, pos int) (ret int, v types.JsonState) {
|
|||
}
|
||||
return ret, types.JsonState{Vt: types.V_FALSE}
|
||||
case '-', '+', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
if skipnum {
|
||||
ret = skipNumber(src, pos)
|
||||
if ret >= 0 {
|
||||
return ret, types.JsonState{Vt: types.V_DOUBLE, Iv: 0, Ep: pos}
|
||||
} else {
|
||||
return ret, types.JsonState{Vt: types.ValueType(ret)}
|
||||
}
|
||||
} else {
|
||||
var iv int64
|
||||
ret, iv, _ = decodeInt64(src, pos)
|
||||
if ret >= 0 {
|
||||
|
@ -270,6 +278,8 @@ func decodeValue(src string, pos int) (ret int, v types.JsonState) {
|
|||
} else {
|
||||
return ret, types.JsonState{Vt: types.ValueType(ret)}
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
return -int(types.ERR_INVALID_CHAR), types.JsonState{Vt:-types.ValueType(types.ERR_INVALID_CHAR)}
|
||||
}
|
||||
|
|
56
vendor/github.com/bytedance/sonic/ast/encode.go
generated
vendored
56
vendor/github.com/bytedance/sonic/ast/encode.go
generated
vendored
|
@ -19,8 +19,6 @@
|
|||
import (
|
||||
`sync`
|
||||
`unicode/utf8`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -165,18 +163,18 @@ func (self *Node) encodeFalse(buf *[]byte) error {
|
|||
}
|
||||
|
||||
func (self *Node) encodeNumber(buf *[]byte) error {
|
||||
str := rt.StrFrom(self.p, self.v)
|
||||
str := self.toString()
|
||||
*buf = append(*buf, str...)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (self *Node) encodeString(buf *[]byte) error {
|
||||
if self.v == 0 {
|
||||
if self.l == 0 {
|
||||
*buf = append(*buf, '"', '"')
|
||||
return nil
|
||||
}
|
||||
|
||||
quote(buf, rt.StrFrom(self.p, self.v))
|
||||
quote(buf, self.toString())
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -195,16 +193,28 @@ func (self *Node) encodeArray(buf *[]byte) error {
|
|||
|
||||
*buf = append(*buf, '[')
|
||||
|
||||
var p = (*Node)(self.p)
|
||||
err := p.encode(buf)
|
||||
if err != nil {
|
||||
var s = (*linkedNodes)(self.p)
|
||||
var started bool
|
||||
if nb > 0 {
|
||||
n := s.At(0)
|
||||
if n.Exists() {
|
||||
if err := n.encode(buf); err != nil {
|
||||
return err
|
||||
}
|
||||
started = true
|
||||
}
|
||||
}
|
||||
|
||||
for i := 1; i < nb; i++ {
|
||||
n := s.At(i)
|
||||
if !n.Exists() {
|
||||
continue
|
||||
}
|
||||
if started {
|
||||
*buf = append(*buf, ',')
|
||||
p = p.unsafe_next()
|
||||
err := p.encode(buf)
|
||||
if err != nil {
|
||||
}
|
||||
started = true
|
||||
if err := n.encode(buf); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
@ -240,16 +250,28 @@ func (self *Node) encodeObject(buf *[]byte) error {
|
|||
|
||||
*buf = append(*buf, '{')
|
||||
|
||||
var p = (*Pair)(self.p)
|
||||
err := p.encode(buf)
|
||||
if err != nil {
|
||||
var s = (*linkedPairs)(self.p)
|
||||
var started bool
|
||||
if nb > 0 {
|
||||
n := s.At(0)
|
||||
if n.Value.Exists() {
|
||||
if err := n.encode(buf); err != nil {
|
||||
return err
|
||||
}
|
||||
started = true
|
||||
}
|
||||
}
|
||||
|
||||
for i := 1; i < nb; i++ {
|
||||
n := s.At(i)
|
||||
if !n.Value.Exists() {
|
||||
continue
|
||||
}
|
||||
if started {
|
||||
*buf = append(*buf, ',')
|
||||
p = p.unsafe_next()
|
||||
err := p.encode(buf)
|
||||
if err != nil {
|
||||
}
|
||||
started = true
|
||||
if err := n.encode(buf); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
|
38
vendor/github.com/bytedance/sonic/ast/error.go
generated
vendored
38
vendor/github.com/bytedance/sonic/ast/error.go
generated
vendored
|
@ -8,6 +8,33 @@
|
|||
`github.com/bytedance/sonic/internal/native/types`
|
||||
)
|
||||
|
||||
|
||||
func newError(err types.ParsingError, msg string) *Node {
|
||||
return &Node{
|
||||
t: V_ERROR,
|
||||
l: uint(err),
|
||||
p: unsafe.Pointer(&msg),
|
||||
}
|
||||
}
|
||||
|
||||
// Error returns error message if the node is invalid
|
||||
func (self Node) Error() string {
|
||||
if self.t != V_ERROR {
|
||||
return ""
|
||||
} else {
|
||||
return *(*string)(self.p)
|
||||
}
|
||||
}
|
||||
|
||||
func newSyntaxError(err SyntaxError) *Node {
|
||||
msg := err.Description()
|
||||
return &Node{
|
||||
t: V_ERROR,
|
||||
l: uint(err.Code),
|
||||
p: unsafe.Pointer(&msg),
|
||||
}
|
||||
}
|
||||
|
||||
func (self *Parser) syntaxError(err types.ParsingError) SyntaxError {
|
||||
return SyntaxError{
|
||||
Pos : self.p,
|
||||
|
@ -16,13 +43,18 @@ func (self *Parser) syntaxError(err types.ParsingError) SyntaxError {
|
|||
}
|
||||
}
|
||||
|
||||
func newSyntaxError(err SyntaxError) *Node {
|
||||
msg := err.Description()
|
||||
func unwrapError(err error) *Node {
|
||||
if se, ok := err.(*Node); ok {
|
||||
return se
|
||||
}else if sse, ok := err.(Node); ok {
|
||||
return &sse
|
||||
} else {
|
||||
msg := err.Error()
|
||||
return &Node{
|
||||
t: V_ERROR,
|
||||
v: int64(err.Code),
|
||||
p: unsafe.Pointer(&msg),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type SyntaxError struct {
|
||||
|
|
75
vendor/github.com/bytedance/sonic/ast/iterator.go
generated
vendored
75
vendor/github.com/bytedance/sonic/ast/iterator.go
generated
vendored
|
@ -82,26 +82,54 @@ type ObjectIterator struct {
|
|||
Iterator
|
||||
}
|
||||
|
||||
func (self *ListIterator) next() *Node {
|
||||
next_start:
|
||||
if !self.HasNext() {
|
||||
return nil
|
||||
} else {
|
||||
n := self.p.nodeAt(self.i)
|
||||
self.i++
|
||||
if !n.Exists() {
|
||||
goto next_start
|
||||
}
|
||||
return n
|
||||
}
|
||||
}
|
||||
|
||||
// Next scans through children of underlying V_ARRAY,
|
||||
// copies each child to v, and returns .HasNext().
|
||||
func (self *ListIterator) Next(v *Node) bool {
|
||||
if !self.HasNext() {
|
||||
n := self.next()
|
||||
if n == nil {
|
||||
return false
|
||||
} else {
|
||||
*v, self.i = *self.p.nodeAt(self.i), self.i + 1
|
||||
}
|
||||
*v = *n
|
||||
return true
|
||||
}
|
||||
|
||||
func (self *ObjectIterator) next() *Pair {
|
||||
next_start:
|
||||
if !self.HasNext() {
|
||||
return nil
|
||||
} else {
|
||||
n := self.p.pairAt(self.i)
|
||||
self.i++
|
||||
if !n.Value.Exists() {
|
||||
goto next_start
|
||||
}
|
||||
return n
|
||||
}
|
||||
}
|
||||
|
||||
// Next scans through children of underlying V_OBJECT,
|
||||
// copies each child to v, and returns .HasNext().
|
||||
func (self *ObjectIterator) Next(p *Pair) bool {
|
||||
if !self.HasNext() {
|
||||
n := self.next()
|
||||
if n == nil {
|
||||
return false
|
||||
} else {
|
||||
*p, self.i = *self.p.pairAt(self.i), self.i + 1
|
||||
return true
|
||||
}
|
||||
*p = *n
|
||||
return true
|
||||
}
|
||||
|
||||
// Sequence represents scanning path of single-layer nodes.
|
||||
|
@ -129,36 +157,39 @@ func (s Sequence) String() string {
|
|||
//
|
||||
// Especailly, if the node is not V_ARRAY or V_OBJECT,
|
||||
// the node itself will be returned and Sequence.Index == -1.
|
||||
//
|
||||
// NOTICE: A unsetted node WON'T trigger sc, but its index still counts into Path.Index
|
||||
func (self *Node) ForEach(sc Scanner) error {
|
||||
switch self.itype() {
|
||||
case types.V_ARRAY:
|
||||
ns, err := self.UnsafeArray()
|
||||
iter, err := self.Values()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for i := range ns {
|
||||
if !sc(Sequence{i, nil}, &ns[i]) {
|
||||
return err
|
||||
v := iter.next()
|
||||
for v != nil {
|
||||
if !sc(Sequence{iter.i-1, nil}, v) {
|
||||
return nil
|
||||
}
|
||||
v = iter.next()
|
||||
}
|
||||
case types.V_OBJECT:
|
||||
ns, err := self.UnsafeMap()
|
||||
iter, err := self.Properties()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for i := range ns {
|
||||
if !sc(Sequence{i, &ns[i].Key}, &ns[i].Value) {
|
||||
return err
|
||||
v := iter.next()
|
||||
for v != nil {
|
||||
if !sc(Sequence{iter.i-1, &v.Key}, &v.Value) {
|
||||
return nil
|
||||
}
|
||||
v = iter.next()
|
||||
}
|
||||
default:
|
||||
if self.Check() != nil {
|
||||
return self
|
||||
}
|
||||
sc(Sequence{-1, nil}, self)
|
||||
}
|
||||
return self.Check()
|
||||
}
|
||||
|
||||
type PairSlice []Pair
|
||||
|
||||
func (self PairSlice) Sort() {
|
||||
radixQsort(self, 0, maxDepth(len(self)))
|
||||
return nil
|
||||
}
|
657
vendor/github.com/bytedance/sonic/ast/node.go
generated
vendored
657
vendor/github.com/bytedance/sonic/ast/node.go
generated
vendored
File diff suppressed because it is too large
Load diff
99
vendor/github.com/bytedance/sonic/ast/parser.go
generated
vendored
99
vendor/github.com/bytedance/sonic/ast/parser.go
generated
vendored
|
@ -18,11 +18,15 @@
|
|||
|
||||
import (
|
||||
`fmt`
|
||||
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
const _DEFAULT_NODE_CAP int = 16
|
||||
const (
|
||||
_DEFAULT_NODE_CAP int = 8
|
||||
_APPEND_GROW_SHIFT = 1
|
||||
)
|
||||
|
||||
const (
|
||||
_ERR_NOT_FOUND types.ParsingError = 33
|
||||
|
@ -30,7 +34,10 @@
|
|||
)
|
||||
|
||||
var (
|
||||
// ErrNotExist means both key and value doesn't exist
|
||||
ErrNotExist error = newError(_ERR_NOT_FOUND, "value not exists")
|
||||
|
||||
// ErrUnsupportType means API on the node is unsupported
|
||||
ErrUnsupportType error = newError(_ERR_UNSUPPORT_TYPE, "unsupported type")
|
||||
)
|
||||
|
||||
|
@ -39,6 +46,7 @@ type Parser struct {
|
|||
s string
|
||||
noLazy bool
|
||||
skipValue bool
|
||||
dbuf *byte
|
||||
}
|
||||
|
||||
/** Parser Private Methods **/
|
||||
|
@ -107,7 +115,7 @@ func (self *Parser) lspace(sp int) int {
|
|||
return sp
|
||||
}
|
||||
|
||||
func (self *Parser) decodeArray(ret []Node) (Node, types.ParsingError) {
|
||||
func (self *Parser) decodeArray(ret *linkedNodes) (Node, types.ParsingError) {
|
||||
sp := self.p
|
||||
ns := len(self.s)
|
||||
|
||||
|
@ -119,7 +127,7 @@ func (self *Parser) decodeArray(ret []Node) (Node, types.ParsingError) {
|
|||
/* check for empty array */
|
||||
if self.s[self.p] == ']' {
|
||||
self.p++
|
||||
return emptyArrayNode, 0
|
||||
return Node{t: types.V_ARRAY}, 0
|
||||
}
|
||||
|
||||
/* allocate array space and parse every element */
|
||||
|
@ -149,7 +157,7 @@ func (self *Parser) decodeArray(ret []Node) (Node, types.ParsingError) {
|
|||
}
|
||||
|
||||
/* add the value to result */
|
||||
ret = append(ret, val)
|
||||
ret.Add(val)
|
||||
self.p = self.lspace(self.p)
|
||||
|
||||
/* check for EOF */
|
||||
|
@ -160,17 +168,17 @@ func (self *Parser) decodeArray(ret []Node) (Node, types.ParsingError) {
|
|||
/* check for the next character */
|
||||
switch self.s[self.p] {
|
||||
case ',' : self.p++
|
||||
case ']' : self.p++; return NewArray(ret), 0
|
||||
case ']' : self.p++; return newArray(ret), 0
|
||||
default:
|
||||
if val.isLazy() {
|
||||
return newLazyArray(self, ret), 0
|
||||
}
|
||||
// if val.isLazy() {
|
||||
// return newLazyArray(self, ret), 0
|
||||
// }
|
||||
return Node{}, types.ERR_INVALID_CHAR
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *Parser) decodeObject(ret []Pair) (Node, types.ParsingError) {
|
||||
func (self *Parser) decodeObject(ret *linkedPairs) (Node, types.ParsingError) {
|
||||
sp := self.p
|
||||
ns := len(self.s)
|
||||
|
||||
|
@ -182,7 +190,7 @@ func (self *Parser) decodeObject(ret []Pair) (Node, types.ParsingError) {
|
|||
/* check for empty object */
|
||||
if self.s[self.p] == '}' {
|
||||
self.p++
|
||||
return emptyObjectNode, 0
|
||||
return Node{t: types.V_OBJECT}, 0
|
||||
}
|
||||
|
||||
/* decode each pair */
|
||||
|
@ -235,7 +243,8 @@ func (self *Parser) decodeObject(ret []Pair) (Node, types.ParsingError) {
|
|||
}
|
||||
|
||||
/* add the value to result */
|
||||
ret = append(ret, Pair{Key: key, Value: val})
|
||||
// FIXME: ret's address may change here, thus previous referred node in ret may be invalid !!
|
||||
ret.Add(Pair{Key: key, Value: val})
|
||||
self.p = self.lspace(self.p)
|
||||
|
||||
/* check for EOF */
|
||||
|
@ -246,11 +255,11 @@ func (self *Parser) decodeObject(ret []Pair) (Node, types.ParsingError) {
|
|||
/* check for the next character */
|
||||
switch self.s[self.p] {
|
||||
case ',' : self.p++
|
||||
case '}' : self.p++; return NewObject(ret), 0
|
||||
case '}' : self.p++; return newObject(ret), 0
|
||||
default:
|
||||
if val.isLazy() {
|
||||
return newLazyObject(self, ret), 0
|
||||
}
|
||||
// if val.isLazy() {
|
||||
// return newLazyObject(self, ret), 0
|
||||
// }
|
||||
return Node{}, types.ERR_INVALID_CHAR
|
||||
}
|
||||
}
|
||||
|
@ -290,15 +299,23 @@ func (self *Parser) Parse() (Node, types.ParsingError) {
|
|||
case types.V_FALSE : return falseNode, 0
|
||||
case types.V_STRING : return self.decodeString(val.Iv, val.Ep)
|
||||
case types.V_ARRAY:
|
||||
if self.noLazy {
|
||||
return self.decodeArray(make([]Node, 0, _DEFAULT_NODE_CAP))
|
||||
if p := skipBlank(self.s, self.p); p >= self.p && self.s[p] == ']' {
|
||||
self.p = p + 1
|
||||
return Node{t: types.V_ARRAY}, 0
|
||||
}
|
||||
return newLazyArray(self, make([]Node, 0, _DEFAULT_NODE_CAP)), 0
|
||||
if self.noLazy {
|
||||
return self.decodeArray(new(linkedNodes))
|
||||
}
|
||||
return newLazyArray(self), 0
|
||||
case types.V_OBJECT:
|
||||
if self.noLazy {
|
||||
return self.decodeObject(make([]Pair, 0, _DEFAULT_NODE_CAP))
|
||||
if p := skipBlank(self.s, self.p); p >= self.p && self.s[p] == '}' {
|
||||
self.p = p + 1
|
||||
return Node{t: types.V_OBJECT}, 0
|
||||
}
|
||||
return newLazyObject(self, make([]Pair, 0, _DEFAULT_NODE_CAP)), 0
|
||||
if self.noLazy {
|
||||
return self.decodeObject(new(linkedPairs))
|
||||
}
|
||||
return newLazyObject(self), 0
|
||||
case types.V_DOUBLE : return NewNumber(self.s[val.Ep:self.p]), 0
|
||||
case types.V_INTEGER : return NewNumber(self.s[val.Ep:self.p]), 0
|
||||
default : return Node{}, types.ParsingError(-val.Vt)
|
||||
|
@ -429,7 +446,7 @@ func (self *Node) skipNextNode() *Node {
|
|||
}
|
||||
|
||||
parser, stack := self.getParserAndArrayStack()
|
||||
ret := stack.v
|
||||
ret := &stack.v
|
||||
sp := parser.p
|
||||
ns := len(parser.s)
|
||||
|
||||
|
@ -458,7 +475,8 @@ func (self *Node) skipNextNode() *Node {
|
|||
}
|
||||
|
||||
/* add the value to result */
|
||||
ret = append(ret, val)
|
||||
ret.Add(val)
|
||||
self.l++
|
||||
parser.p = parser.lspace(parser.p)
|
||||
|
||||
/* check for EOF */
|
||||
|
@ -470,12 +488,11 @@ func (self *Node) skipNextNode() *Node {
|
|||
switch parser.s[parser.p] {
|
||||
case ',':
|
||||
parser.p++
|
||||
self.setLazyArray(parser, ret)
|
||||
return &ret[len(ret)-1]
|
||||
return ret.At(ret.Len()-1)
|
||||
case ']':
|
||||
parser.p++
|
||||
self.setArray(ret)
|
||||
return &ret[len(ret)-1]
|
||||
return ret.At(ret.Len()-1)
|
||||
default:
|
||||
return newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))
|
||||
}
|
||||
|
@ -487,7 +504,7 @@ func (self *Node) skipNextPair() (*Pair) {
|
|||
}
|
||||
|
||||
parser, stack := self.getParserAndObjectStack()
|
||||
ret := stack.v
|
||||
ret := &stack.v
|
||||
sp := parser.p
|
||||
ns := len(parser.s)
|
||||
|
||||
|
@ -541,7 +558,8 @@ func (self *Node) skipNextPair() (*Pair) {
|
|||
}
|
||||
|
||||
/* add the value to result */
|
||||
ret = append(ret, Pair{Key: key, Value: val})
|
||||
ret.Add(Pair{Key: key, Value: val})
|
||||
self.l++
|
||||
parser.p = parser.lspace(parser.p)
|
||||
|
||||
/* check for EOF */
|
||||
|
@ -553,12 +571,11 @@ func (self *Node) skipNextPair() (*Pair) {
|
|||
switch parser.s[parser.p] {
|
||||
case ',':
|
||||
parser.p++
|
||||
self.setLazyObject(parser, ret)
|
||||
return &ret[len(ret)-1]
|
||||
return ret.At(ret.Len()-1)
|
||||
case '}':
|
||||
parser.p++
|
||||
self.setObject(ret)
|
||||
return &ret[len(ret)-1]
|
||||
return ret.At(ret.Len()-1)
|
||||
default:
|
||||
return &Pair{key, *newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))}
|
||||
}
|
||||
|
@ -601,10 +618,30 @@ func LoadsUseNumber(src string) (int, interface{}, error) {
|
|||
}
|
||||
}
|
||||
|
||||
// NewParser returns pointer of new allocated parser
|
||||
func NewParser(src string) *Parser {
|
||||
return &Parser{s: src}
|
||||
}
|
||||
|
||||
// NewParser returns new allocated parser
|
||||
func NewParserObj(src string) Parser {
|
||||
return Parser{s: src}
|
||||
}
|
||||
|
||||
// decodeNumber controls if parser decodes the number values instead of skip them
|
||||
// WARN: once you set decodeNumber(true), please set decodeNumber(false) before you drop the parser
|
||||
// otherwise the memory CANNOT be reused
|
||||
func (self *Parser) decodeNumber(decode bool) {
|
||||
if !decode && self.dbuf != nil {
|
||||
types.FreeDbuf(self.dbuf)
|
||||
self.dbuf = nil
|
||||
return
|
||||
}
|
||||
if decode && self.dbuf == nil {
|
||||
self.dbuf = types.NewDbuf()
|
||||
}
|
||||
}
|
||||
|
||||
// ExportError converts types.ParsingError to std Error
|
||||
func (self *Parser) ExportError(err types.ParsingError) error {
|
||||
if err == _ERR_NOT_FOUND {
|
||||
|
|
206
vendor/github.com/bytedance/sonic/ast/sort.go
generated
vendored
206
vendor/github.com/bytedance/sonic/ast/sort.go
generated
vendored
|
@ -1,206 +0,0 @@
|
|||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package ast
|
||||
|
||||
// Algorithm 3-way Radix Quicksort, d means the radix.
|
||||
// Reference: https://algs4.cs.princeton.edu/51radix/Quick3string.java.html
|
||||
func radixQsort(kvs PairSlice, d, maxDepth int) {
|
||||
for len(kvs) > 11 {
|
||||
// To avoid the worst case of quickSort (time: O(n^2)), use introsort here.
|
||||
// Reference: https://en.wikipedia.org/wiki/Introsort and
|
||||
// https://github.com/golang/go/issues/467
|
||||
if maxDepth == 0 {
|
||||
heapSort(kvs, 0, len(kvs))
|
||||
return
|
||||
}
|
||||
maxDepth--
|
||||
|
||||
p := pivot(kvs, d)
|
||||
lt, i, gt := 0, 0, len(kvs)
|
||||
for i < gt {
|
||||
c := byteAt(kvs[i].Key, d)
|
||||
if c < p {
|
||||
swap(kvs, lt, i)
|
||||
i++
|
||||
lt++
|
||||
} else if c > p {
|
||||
gt--
|
||||
swap(kvs, i, gt)
|
||||
} else {
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
// kvs[0:lt] < v = kvs[lt:gt] < kvs[gt:len(kvs)]
|
||||
// Native implemention:
|
||||
// radixQsort(kvs[:lt], d, maxDepth)
|
||||
// if p > -1 {
|
||||
// radixQsort(kvs[lt:gt], d+1, maxDepth)
|
||||
// }
|
||||
// radixQsort(kvs[gt:], d, maxDepth)
|
||||
// Optimize as follows: make recursive calls only for the smaller parts.
|
||||
// Reference: https://www.geeksforgeeks.org/quicksort-tail-call-optimization-reducing-worst-case-space-log-n/
|
||||
if p == -1 {
|
||||
if lt > len(kvs) - gt {
|
||||
radixQsort(kvs[gt:], d, maxDepth)
|
||||
kvs = kvs[:lt]
|
||||
} else {
|
||||
radixQsort(kvs[:lt], d, maxDepth)
|
||||
kvs = kvs[gt:]
|
||||
}
|
||||
} else {
|
||||
ml := maxThree(lt, gt-lt, len(kvs)-gt)
|
||||
if ml == lt {
|
||||
radixQsort(kvs[lt:gt], d+1, maxDepth)
|
||||
radixQsort(kvs[gt:], d, maxDepth)
|
||||
kvs = kvs[:lt]
|
||||
} else if ml == gt-lt {
|
||||
radixQsort(kvs[:lt], d, maxDepth)
|
||||
radixQsort(kvs[gt:], d, maxDepth)
|
||||
kvs = kvs[lt:gt]
|
||||
d += 1
|
||||
} else {
|
||||
radixQsort(kvs[:lt], d, maxDepth)
|
||||
radixQsort(kvs[lt:gt], d+1, maxDepth)
|
||||
kvs = kvs[gt:]
|
||||
}
|
||||
}
|
||||
}
|
||||
insertRadixSort(kvs, d)
|
||||
}
|
||||
|
||||
func insertRadixSort(kvs PairSlice, d int) {
|
||||
for i := 1; i < len(kvs); i++ {
|
||||
for j := i; j > 0 && lessFrom(kvs[j].Key, kvs[j-1].Key, d); j-- {
|
||||
swap(kvs, j, j-1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func pivot(kvs PairSlice, d int) int {
|
||||
m := len(kvs) >> 1
|
||||
if len(kvs) > 40 {
|
||||
// Tukey's ``Ninther,'' median of three mediankvs of three.
|
||||
t := len(kvs) / 8
|
||||
return medianThree(
|
||||
medianThree(byteAt(kvs[0].Key, d), byteAt(kvs[t].Key, d), byteAt(kvs[2*t].Key, d)),
|
||||
medianThree(byteAt(kvs[m].Key, d), byteAt(kvs[m-t].Key, d), byteAt(kvs[m+t].Key, d)),
|
||||
medianThree(byteAt(kvs[len(kvs)-1].Key, d),
|
||||
byteAt(kvs[len(kvs)-1-t].Key, d),
|
||||
byteAt(kvs[len(kvs)-1-2*t].Key, d)))
|
||||
}
|
||||
return medianThree(byteAt(kvs[0].Key, d), byteAt(kvs[m].Key, d), byteAt(kvs[len(kvs)-1].Key, d))
|
||||
}
|
||||
|
||||
func medianThree(i, j, k int) int {
|
||||
if i > j {
|
||||
i, j = j, i
|
||||
} // i < j
|
||||
if k < i {
|
||||
return i
|
||||
}
|
||||
if k > j {
|
||||
return j
|
||||
}
|
||||
return k
|
||||
}
|
||||
|
||||
func maxThree(i, j, k int) int {
|
||||
max := i
|
||||
if max < j {
|
||||
max = j
|
||||
}
|
||||
if max < k {
|
||||
max = k
|
||||
}
|
||||
return max
|
||||
}
|
||||
|
||||
// maxDepth returns a threshold at which quicksort should switch
|
||||
// to heapsort. It returnkvs 2*ceil(lg(n+1)).
|
||||
func maxDepth(n int) int {
|
||||
var depth int
|
||||
for i := n; i > 0; i >>= 1 {
|
||||
depth++
|
||||
}
|
||||
return depth * 2
|
||||
}
|
||||
|
||||
// siftDown implements the heap property on kvs[lo:hi].
|
||||
// first is an offset into the array where the root of the heap lies.
|
||||
func siftDown(kvs PairSlice, lo, hi, first int) {
|
||||
root := lo
|
||||
for {
|
||||
child := 2*root + 1
|
||||
if child >= hi {
|
||||
break
|
||||
}
|
||||
if child+1 < hi && kvs[first+child].Key < kvs[first+child+1].Key {
|
||||
child++
|
||||
}
|
||||
if kvs[first+root].Key >= kvs[first+child].Key {
|
||||
return
|
||||
}
|
||||
swap(kvs, first+root, first+child)
|
||||
root = child
|
||||
}
|
||||
}
|
||||
|
||||
func heapSort(kvs PairSlice, a, b int) {
|
||||
first := a
|
||||
lo := 0
|
||||
hi := b - a
|
||||
|
||||
// Build heap with the greatest element at top.
|
||||
for i := (hi - 1) / 2; i >= 0; i-- {
|
||||
siftDown(kvs, i, hi, first)
|
||||
}
|
||||
|
||||
// Pop elements, the largest first, into end of kvs.
|
||||
for i := hi - 1; i >= 0; i-- {
|
||||
swap(kvs, first, first+i)
|
||||
siftDown(kvs, lo, i, first)
|
||||
}
|
||||
}
|
||||
|
||||
// Note that Pair.Key is NOT pointed to Pair.m when map key is integer after swap
|
||||
func swap(kvs PairSlice, a, b int) {
|
||||
kvs[a].Key, kvs[b].Key = kvs[b].Key, kvs[a].Key
|
||||
kvs[a].Value, kvs[b].Value = kvs[b].Value, kvs[a].Value
|
||||
}
|
||||
|
||||
// Compare two strings from the pos d.
|
||||
func lessFrom(a, b string, d int) bool {
|
||||
l := len(a)
|
||||
if l > len(b) {
|
||||
l = len(b)
|
||||
}
|
||||
for i := d; i < l; i++ {
|
||||
if a[i] == b[i] {
|
||||
continue
|
||||
}
|
||||
return a[i] < b[i]
|
||||
}
|
||||
return len(a) < len(b)
|
||||
}
|
||||
|
||||
func byteAt(b string, p int) int {
|
||||
if p < len(b) {
|
||||
return int(b[p])
|
||||
}
|
||||
return -1
|
||||
}
|
315
vendor/github.com/bytedance/sonic/ast/visitor.go
generated
vendored
Normal file
315
vendor/github.com/bytedance/sonic/ast/visitor.go
generated
vendored
Normal file
|
@ -0,0 +1,315 @@
|
|||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
`encoding/json`
|
||||
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
)
|
||||
|
||||
// Visitor handles the callbacks during preorder traversal of a JSON AST.
|
||||
//
|
||||
// According to the JSON RFC8259, a JSON AST can be defined by
|
||||
// the following rules without seperator / whitespace tokens.
|
||||
//
|
||||
// JSON-AST = value
|
||||
// value = false / null / true / object / array / number / string
|
||||
// object = begin-object [ member *( member ) ] end-object
|
||||
// member = string value
|
||||
// array = begin-array [ value *( value ) ] end-array
|
||||
//
|
||||
type Visitor interface {
|
||||
|
||||
// OnNull handles a JSON null value.
|
||||
OnNull() error
|
||||
|
||||
// OnBool handles a JSON true / false value.
|
||||
OnBool(v bool) error
|
||||
|
||||
// OnString handles a JSON string value.
|
||||
OnString(v string) error
|
||||
|
||||
// OnInt64 handles a JSON number value with int64 type.
|
||||
OnInt64(v int64, n json.Number) error
|
||||
|
||||
// OnFloat64 handles a JSON number value with float64 type.
|
||||
OnFloat64(v float64, n json.Number) error
|
||||
|
||||
// OnObjectBegin handles the beginning of a JSON object value with a
|
||||
// suggested capacity that can be used to make your custom object container.
|
||||
//
|
||||
// After this point the visitor will receive a sequence of callbacks like
|
||||
// [string, value, string, value, ......, ObjectEnd].
|
||||
//
|
||||
// Note:
|
||||
// 1. This is a recursive definition which means the value can
|
||||
// also be a JSON object / array described by a sequence of callbacks.
|
||||
// 2. The suggested capacity will be 0 if current object is empty.
|
||||
// 3. Currently sonic use a fixed capacity for non-empty object (keep in
|
||||
// sync with ast.Node) which might not be very suitable. This may be
|
||||
// improved in future version.
|
||||
OnObjectBegin(capacity int) error
|
||||
|
||||
// OnObjectKey handles a JSON object key string in member.
|
||||
OnObjectKey(key string) error
|
||||
|
||||
// OnObjectEnd handles the ending of a JSON object value.
|
||||
OnObjectEnd() error
|
||||
|
||||
// OnArrayBegin handles the beginning of a JSON array value with a
|
||||
// suggested capacity that can be used to make your custom array container.
|
||||
//
|
||||
// After this point the visitor will receive a sequence of callbacks like
|
||||
// [value, value, value, ......, ArrayEnd].
|
||||
//
|
||||
// Note:
|
||||
// 1. This is a recursive definition which means the value can
|
||||
// also be a JSON object / array described by a sequence of callbacks.
|
||||
// 2. The suggested capacity will be 0 if current array is empty.
|
||||
// 3. Currently sonic use a fixed capacity for non-empty array (keep in
|
||||
// sync with ast.Node) which might not be very suitable. This may be
|
||||
// improved in future version.
|
||||
OnArrayBegin(capacity int) error
|
||||
|
||||
// OnArrayEnd handles the ending of a JSON array value.
|
||||
OnArrayEnd() error
|
||||
}
|
||||
|
||||
// VisitorOptions contains all Visitor's options. The default value is an
|
||||
// empty VisitorOptions{}.
|
||||
type VisitorOptions struct {
|
||||
// OnlyNumber indicates parser to directly return number value without
|
||||
// conversion, then the first argument of OnInt64 / OnFloat64 will always
|
||||
// be zero.
|
||||
OnlyNumber bool
|
||||
}
|
||||
|
||||
var defaultVisitorOptions = &VisitorOptions{}
|
||||
|
||||
// Preorder decodes the whole JSON string and callbacks each AST node to visitor
|
||||
// during preorder traversal. Any visitor method with an error returned will
|
||||
// break the traversal and the given error will be directly returned. The opts
|
||||
// argument can be reused after every call.
|
||||
func Preorder(str string, visitor Visitor, opts *VisitorOptions) error {
|
||||
if opts == nil {
|
||||
opts = defaultVisitorOptions
|
||||
}
|
||||
// process VisitorOptions first to guarantee that all options will be
|
||||
// constant during decoding and make options more readable.
|
||||
var (
|
||||
optDecodeNumber = !opts.OnlyNumber
|
||||
)
|
||||
|
||||
tv := &traverser{
|
||||
parser: Parser{
|
||||
s: str,
|
||||
noLazy: true,
|
||||
skipValue: false,
|
||||
},
|
||||
visitor: visitor,
|
||||
}
|
||||
|
||||
if optDecodeNumber {
|
||||
tv.parser.decodeNumber(true)
|
||||
}
|
||||
|
||||
err := tv.decodeValue()
|
||||
|
||||
if optDecodeNumber {
|
||||
tv.parser.decodeNumber(false)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
type traverser struct {
|
||||
parser Parser
|
||||
visitor Visitor
|
||||
}
|
||||
|
||||
// NOTE: keep in sync with (*Parser).Parse method.
|
||||
func (self *traverser) decodeValue() error {
|
||||
switch val := self.parser.decodeValue(); val.Vt {
|
||||
case types.V_EOF:
|
||||
return types.ERR_EOF
|
||||
case types.V_NULL:
|
||||
return self.visitor.OnNull()
|
||||
case types.V_TRUE:
|
||||
return self.visitor.OnBool(true)
|
||||
case types.V_FALSE:
|
||||
return self.visitor.OnBool(false)
|
||||
case types.V_STRING:
|
||||
return self.decodeString(val.Iv, val.Ep)
|
||||
case types.V_DOUBLE:
|
||||
return self.visitor.OnFloat64(val.Dv,
|
||||
json.Number(self.parser.s[val.Ep:self.parser.p]))
|
||||
case types.V_INTEGER:
|
||||
return self.visitor.OnInt64(val.Iv,
|
||||
json.Number(self.parser.s[val.Ep:self.parser.p]))
|
||||
case types.V_ARRAY:
|
||||
return self.decodeArray()
|
||||
case types.V_OBJECT:
|
||||
return self.decodeObject()
|
||||
default:
|
||||
return types.ParsingError(-val.Vt)
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: keep in sync with (*Parser).decodeArray method.
|
||||
func (self *traverser) decodeArray() error {
|
||||
sp := self.parser.p
|
||||
ns := len(self.parser.s)
|
||||
|
||||
/* check for EOF */
|
||||
self.parser.p = self.parser.lspace(sp)
|
||||
if self.parser.p >= ns {
|
||||
return types.ERR_EOF
|
||||
}
|
||||
|
||||
/* check for empty array */
|
||||
if self.parser.s[self.parser.p] == ']' {
|
||||
self.parser.p++
|
||||
if err := self.visitor.OnArrayBegin(0); err != nil {
|
||||
return err
|
||||
}
|
||||
return self.visitor.OnArrayEnd()
|
||||
}
|
||||
|
||||
/* allocate array space and parse every element */
|
||||
if err := self.visitor.OnArrayBegin(_DEFAULT_NODE_CAP); err != nil {
|
||||
return err
|
||||
}
|
||||
for {
|
||||
/* decode the value */
|
||||
if err := self.decodeValue(); err != nil {
|
||||
return err
|
||||
}
|
||||
self.parser.p = self.parser.lspace(self.parser.p)
|
||||
|
||||
/* check for EOF */
|
||||
if self.parser.p >= ns {
|
||||
return types.ERR_EOF
|
||||
}
|
||||
|
||||
/* check for the next character */
|
||||
switch self.parser.s[self.parser.p] {
|
||||
case ',':
|
||||
self.parser.p++
|
||||
case ']':
|
||||
self.parser.p++
|
||||
return self.visitor.OnArrayEnd()
|
||||
default:
|
||||
return types.ERR_INVALID_CHAR
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: keep in sync with (*Parser).decodeObject method.
|
||||
func (self *traverser) decodeObject() error {
|
||||
sp := self.parser.p
|
||||
ns := len(self.parser.s)
|
||||
|
||||
/* check for EOF */
|
||||
self.parser.p = self.parser.lspace(sp)
|
||||
if self.parser.p >= ns {
|
||||
return types.ERR_EOF
|
||||
}
|
||||
|
||||
/* check for empty object */
|
||||
if self.parser.s[self.parser.p] == '}' {
|
||||
self.parser.p++
|
||||
if err := self.visitor.OnObjectBegin(0); err != nil {
|
||||
return err
|
||||
}
|
||||
return self.visitor.OnObjectEnd()
|
||||
}
|
||||
|
||||
/* allocate object space and decode each pair */
|
||||
if err := self.visitor.OnObjectBegin(_DEFAULT_NODE_CAP); err != nil {
|
||||
return err
|
||||
}
|
||||
for {
|
||||
var njs types.JsonState
|
||||
var err types.ParsingError
|
||||
|
||||
/* decode the key */
|
||||
if njs = self.parser.decodeValue(); njs.Vt != types.V_STRING {
|
||||
return types.ERR_INVALID_CHAR
|
||||
}
|
||||
|
||||
/* extract the key */
|
||||
idx := self.parser.p - 1
|
||||
key := self.parser.s[njs.Iv:idx]
|
||||
|
||||
/* check for escape sequence */
|
||||
if njs.Ep != -1 {
|
||||
if key, err = unquote(key); err != 0 {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := self.visitor.OnObjectKey(key); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
/* expect a ':' delimiter */
|
||||
if err = self.parser.delim(); err != 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
/* decode the value */
|
||||
if err := self.decodeValue(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
self.parser.p = self.parser.lspace(self.parser.p)
|
||||
|
||||
/* check for EOF */
|
||||
if self.parser.p >= ns {
|
||||
return types.ERR_EOF
|
||||
}
|
||||
|
||||
/* check for the next character */
|
||||
switch self.parser.s[self.parser.p] {
|
||||
case ',':
|
||||
self.parser.p++
|
||||
case '}':
|
||||
self.parser.p++
|
||||
return self.visitor.OnObjectEnd()
|
||||
default:
|
||||
return types.ERR_INVALID_CHAR
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: keep in sync with (*Parser).decodeString method.
|
||||
func (self *traverser) decodeString(iv int64, ep int) error {
|
||||
p := self.parser.p - 1
|
||||
s := self.parser.s[iv:p]
|
||||
|
||||
/* fast path: no escape sequence */
|
||||
if ep == -1 {
|
||||
return self.visitor.OnString(s)
|
||||
}
|
||||
|
||||
/* unquote the string */
|
||||
out, err := unquote(s)
|
||||
if err != 0 {
|
||||
return err
|
||||
}
|
||||
return self.visitor.OnString(out)
|
||||
}
|
14
vendor/github.com/bytedance/sonic/bench-arm.sh
generated
vendored
14
vendor/github.com/bytedance/sonic/bench-arm.sh
generated
vendored
|
@ -1,14 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
pwd=$(pwd)
|
||||
export SONIC_NO_ASYNC_GC=1
|
||||
|
||||
cd $pwd/ast
|
||||
go test -benchmem -run=^$ -benchtime=1000000x -bench "^(BenchmarkGet.*|BenchmarkSet.*)$"
|
||||
|
||||
go test -benchmem -run=^$ -benchtime=10000x -bench "^(BenchmarkParser_.*|BenchmarkEncode.*)$"
|
||||
|
||||
go test -benchmem -run=^$ -benchtime=10000000x -bench "^(BenchmarkNodeGetByPath|BenchmarkStructGetByPath|BenchmarkNodeIndex|BenchmarkStructIndex|BenchmarkSliceIndex|BenchmarkMapIndex|BenchmarkNodeGet|BenchmarkSliceGet|BenchmarkMapGet|BenchmarkNodeSet|BenchmarkMapSet|BenchmarkNodeSetByIndex|BenchmarkSliceSetByIndex|BenchmarkStructSetByIndex|BenchmarkNodeUnset|BenchmarkMapUnset|BenchmarkNodUnsetByIndex|BenchmarkSliceUnsetByIndex|BenchmarkNodeAdd|BenchmarkSliceAdd|BenchmarkMapAdd)$"
|
||||
|
||||
unset SONIC_NO_ASYNC_GC
|
||||
cd $pwd
|
134
vendor/github.com/bytedance/sonic/bench.py
generated
vendored
134
vendor/github.com/bytedance/sonic/bench.py
generated
vendored
|
@ -1,134 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright 2022 ByteDance Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import tempfile
|
||||
import os
|
||||
import subprocess
|
||||
import argparse
|
||||
|
||||
gbench_prefix = "SONIC_NO_ASYNC_GC=1 go test -benchmem -run=none "
|
||||
|
||||
def run(cmd):
|
||||
print(cmd)
|
||||
if os.system(cmd):
|
||||
print ("Failed to run cmd: %s"%(cmd))
|
||||
exit(1)
|
||||
|
||||
def run_s(cmd):
|
||||
print (cmd)
|
||||
try:
|
||||
res = os.popen(cmd)
|
||||
except subprocess.CalledProcessError as e:
|
||||
if e.returncode:
|
||||
print (e.output)
|
||||
exit(1)
|
||||
return res.read()
|
||||
|
||||
def run_r(cmd):
|
||||
print (cmd)
|
||||
try:
|
||||
cmds = cmd.split(' ')
|
||||
data = subprocess.check_output(cmds, stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
if e.returncode:
|
||||
print (e.output)
|
||||
exit(1)
|
||||
return data.decode("utf-8")
|
||||
|
||||
def compare(args):
|
||||
# detech current branch.
|
||||
# result = run_r("git branch")
|
||||
current_branch = run_s("git status | head -n1 | sed 's/On branch //'")
|
||||
# for br in result.split('\n'):
|
||||
# if br.startswith("* "):
|
||||
# current_branch = br.lstrip('* ')
|
||||
# break
|
||||
|
||||
if not current_branch:
|
||||
print ("Failed to detech current branch")
|
||||
return None
|
||||
|
||||
# get the current diff
|
||||
(fd, diff) = tempfile.mkstemp()
|
||||
run("git diff > %s"%diff)
|
||||
|
||||
# early return if currrent is main branch.
|
||||
print ("Current branch: %s"%(current_branch))
|
||||
if current_branch == "main":
|
||||
print ("Cannot compare at the main branch.Please build a new branch")
|
||||
return None
|
||||
|
||||
# benchmark current branch
|
||||
(fd, target) = tempfile.mkstemp(".target.txt")
|
||||
run("%s %s ./... 2>&1 | tee %s" %(gbench_prefix, args, target))
|
||||
|
||||
# trying to switch to the latest main branch
|
||||
run("git checkout -- .")
|
||||
if current_branch != "main":
|
||||
run("git checkout main")
|
||||
run("git pull --allow-unrelated-histories origin main")
|
||||
|
||||
# benchmark main branch
|
||||
(fd, main) = tempfile.mkstemp(".main.txt")
|
||||
run("%s %s ./... 2>&1 | tee %s" %(gbench_prefix, args, main))
|
||||
|
||||
# diff the result
|
||||
# benchstat = "go get golang.org/x/perf/cmd/benchstat && go install golang.org/x/perf/cmd/benchstat"
|
||||
run( "benchstat -sort=delta %s %s"%(main, target))
|
||||
run("git checkout -- .")
|
||||
|
||||
# restore branch
|
||||
if current_branch != "main":
|
||||
run("git checkout %s"%(current_branch))
|
||||
run("patch -p1 < %s" % (diff))
|
||||
return target
|
||||
|
||||
def main():
|
||||
argparser = argparse.ArgumentParser(description='Tools to test the performance. Example: ./bench.py -b Decoder_Generic_Sonic -c')
|
||||
argparser.add_argument('-b', '--bench', dest='filter', required=False,
|
||||
help='Specify the filter for golang benchmark')
|
||||
argparser.add_argument('-c', '--compare', dest='compare', action='store_true', required=False,
|
||||
help='Compare with the main benchmarking')
|
||||
argparser.add_argument('-t', '--times', dest='times', required=False,
|
||||
help='benchmark the times')
|
||||
argparser.add_argument('-r', '--repeat_times', dest='count', required=False,
|
||||
help='benchmark the count')
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.filter:
|
||||
gbench_args = "-bench=%s"%(args.filter)
|
||||
else:
|
||||
gbench_args = "-bench=."
|
||||
|
||||
if args.times:
|
||||
gbench_args += " -benchtime=%s"%(args.times)
|
||||
|
||||
if args.count:
|
||||
gbench_args += " -count=%s"%(args.count)
|
||||
else:
|
||||
gbench_args += " -count=10"
|
||||
|
||||
if args.compare:
|
||||
target = compare(gbench_args)
|
||||
else:
|
||||
target = None
|
||||
|
||||
if not target:
|
||||
(fd, target) = tempfile.mkstemp(".target.txt")
|
||||
run("%s %s ./... 2>&1 | tee %s" %(gbench_prefix, gbench_args, target))
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
27
vendor/github.com/bytedance/sonic/bench.sh
generated
vendored
27
vendor/github.com/bytedance/sonic/bench.sh
generated
vendored
|
@ -1,27 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
pwd=$(pwd)
|
||||
export SONIC_NO_ASYNC_GC=1
|
||||
|
||||
cd $pwd/encoder
|
||||
go test -benchmem -run=^$ -benchtime=100000x -bench "^(BenchmarkEncoder_.*)$"
|
||||
|
||||
cd $pwd/decoder
|
||||
go test -benchmem -run=^$ -benchtime=100000x -bench "^(BenchmarkDecoder_.*)$"
|
||||
|
||||
cd $pwd/ast
|
||||
go test -benchmem -run=^$ -benchtime=1000000x -bench "^(BenchmarkGet.*|BenchmarkSet.*)$"
|
||||
|
||||
go test -benchmem -run=^$ -benchtime=10000x -bench "^(BenchmarkParser_.*|BenchmarkEncode.*)$"
|
||||
|
||||
go test -benchmem -run=^$ -benchtime=10000000x -bench "^(BenchmarkNodeGetByPath|BenchmarkStructGetByPath|BenchmarkNodeIndex|BenchmarkStructIndex|BenchmarkSliceIndex|BenchmarkMapIndex|BenchmarkNodeGet|BenchmarkSliceGet|BenchmarkMapGet|BenchmarkNodeSet|BenchmarkMapSet|BenchmarkNodeSetByIndex|BenchmarkSliceSetByIndex|BenchmarkStructSetByIndex|BenchmarkNodeUnset|BenchmarkMapUnset|BenchmarkNodUnsetByIndex|BenchmarkSliceUnsetByIndex|BenchmarkNodeAdd|BenchmarkSliceAdd|BenchmarkMapAdd)$"
|
||||
|
||||
cd $pwd/external_jsonlib_test/benchmark_test
|
||||
go test -benchmem -run=^$ -benchtime=100000x -bench "^(BenchmarkEncoder_.*|BenchmarkDecoder_.*)$"
|
||||
|
||||
go test -benchmem -run=^$ -benchtime=1000000x -bench "^(BenchmarkGet.*|BenchmarkSet.*)$"
|
||||
|
||||
go test -benchmem -run=^$ -benchtime=10000x -bench "^(BenchmarkParser_.*)$"
|
||||
|
||||
unset SONIC_NO_ASYNC_GC
|
||||
cd $pwd
|
10
vendor/github.com/bytedance/sonic/check_branch_name.sh
generated
vendored
10
vendor/github.com/bytedance/sonic/check_branch_name.sh
generated
vendored
|
@ -1,10 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
current=$(git status | head -n1 | sed 's/On branch //')
|
||||
name=${1:-$current}
|
||||
if [[ ! $name =~ ^(((opt(imize)?|feat(ure)?|doc|(bug|hot)?fix|test|refact(or)?|ci)/.+)|(main|develop)|(release/.+)|(release-v[0-9]+\.[0-9]+)|(release/v[0-9]+\.[0-9]+\.[0-9]+(-[a-z0-9.]+(\+[a-z0-9.]+)?)?)|revert-[a-z0-9]+)$ ]]; then
|
||||
echo "branch name '$name' is invalid"
|
||||
exit 1
|
||||
else
|
||||
echo "branch name '$name' is valid"
|
||||
fi
|
2
vendor/github.com/bytedance/sonic/compat.go
generated
vendored
2
vendor/github.com/bytedance/sonic/compat.go
generated
vendored
|
@ -1,4 +1,4 @@
|
|||
// +build !amd64 go1.21
|
||||
// +build !amd64 !go1.16 go1.22
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
|
|
8
vendor/github.com/bytedance/sonic/decoder/decoder_amd64.go
generated
vendored
8
vendor/github.com/bytedance/sonic/decoder/decoder_amd64.go
generated
vendored
|
@ -1,4 +1,4 @@
|
|||
// +build amd64,go1.15,!go1.21
|
||||
// +build amd64,go1.16,!go1.22
|
||||
|
||||
/*
|
||||
* Copyright 2023 ByteDance Inc.
|
||||
|
@ -25,6 +25,10 @@
|
|||
// Decoder is the decoder context object
|
||||
type Decoder = decoder.Decoder
|
||||
|
||||
// SyntaxError represents json syntax error
|
||||
type SyntaxError = decoder.SyntaxError
|
||||
|
||||
// MismatchTypeError represents dismatching between json and object
|
||||
type MismatchTypeError = decoder.MismatchTypeError
|
||||
|
||||
// Options for decode.
|
||||
|
@ -42,8 +46,6 @@
|
|||
// StreamDecoder is the decoder context object for streaming input.
|
||||
type StreamDecoder = decoder.StreamDecoder
|
||||
|
||||
type SyntaxError = decoder.SyntaxError
|
||||
|
||||
var (
|
||||
// NewDecoder creates a new decoder instance.
|
||||
NewDecoder = decoder.NewDecoder
|
||||
|
|
70
vendor/github.com/bytedance/sonic/decoder/decoder_compat.go
generated
vendored
70
vendor/github.com/bytedance/sonic/decoder/decoder_compat.go
generated
vendored
|
@ -1,4 +1,4 @@
|
|||
// +build !amd64 go1.21
|
||||
// +build !amd64 !go1.16 go1.22
|
||||
|
||||
/*
|
||||
* Copyright 2023 ByteDance Inc.
|
||||
|
@ -14,28 +14,34 @@
|
|||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
*/
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`encoding/json`
|
||||
`bytes`
|
||||
`encoding/json`
|
||||
`io`
|
||||
`reflect`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/option`
|
||||
`io`
|
||||
)
|
||||
|
||||
const (
|
||||
_F_use_int64 = iota
|
||||
_F_use_number
|
||||
_F_disable_urc
|
||||
_F_disable_unknown
|
||||
_F_copy_string
|
||||
_F_validate_string
|
||||
func init() {
|
||||
println("WARNING: sonic only supports Go1.16~1.20 && CPU amd64, but your environment is not suitable")
|
||||
}
|
||||
|
||||
_F_allow_control = 31
|
||||
const (
|
||||
_F_use_int64 = 0
|
||||
_F_disable_urc = 2
|
||||
_F_disable_unknown = 3
|
||||
_F_copy_string = 4
|
||||
|
||||
_F_use_number = types.B_USE_NUMBER
|
||||
_F_validate_string = types.B_VALIDATE_STRING
|
||||
_F_allow_control = types.B_ALLOW_CONTROL
|
||||
)
|
||||
|
||||
type Options uint64
|
||||
|
@ -106,10 +112,10 @@ func (self *Decoder) CheckTrailings() error {
|
|||
func (self *Decoder) Decode(val interface{}) error {
|
||||
r := bytes.NewBufferString(self.s)
|
||||
dec := json.NewDecoder(r)
|
||||
if (self.f | uint64(OptionUseNumber)) != 0 {
|
||||
if (self.f & uint64(OptionUseNumber)) != 0 {
|
||||
dec.UseNumber()
|
||||
}
|
||||
if (self.f | uint64(OptionDisableUnknown)) != 0 {
|
||||
if (self.f & uint64(OptionDisableUnknown)) != 0 {
|
||||
dec.DisallowUnknownFields()
|
||||
}
|
||||
return dec.Decode(val)
|
||||
|
@ -163,34 +169,26 @@ func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
type StreamDecoder struct {
|
||||
r io.Reader
|
||||
buf []byte
|
||||
scanp int
|
||||
scanned int64
|
||||
err error
|
||||
Decoder
|
||||
}
|
||||
type StreamDecoder = json.Decoder
|
||||
|
||||
// NewStreamDecoder adapts to encoding/json.NewDecoder API.
|
||||
//
|
||||
// NewStreamDecoder returns a new decoder that reads from r.
|
||||
func NewStreamDecoder(r io.Reader) *StreamDecoder {
|
||||
return &StreamDecoder{r : r}
|
||||
return json.NewDecoder(r)
|
||||
}
|
||||
|
||||
// Decode decodes input stream into val with corresponding data.
|
||||
// Redundantly bytes may be read and left in its buffer, and can be used at next call.
|
||||
// Either io error from underlying io.Reader (except io.EOF)
|
||||
// or syntax error from data will be recorded and stop subsequently decoding.
|
||||
func (self *StreamDecoder) Decode(val interface{}) (err error) {
|
||||
dec := json.NewDecoder(self.r)
|
||||
if (self.f | uint64(OptionUseNumber)) != 0 {
|
||||
dec.UseNumber()
|
||||
}
|
||||
if (self.f | uint64(OptionDisableUnknown)) != 0 {
|
||||
dec.DisallowUnknownFields()
|
||||
}
|
||||
return dec.Decode(val)
|
||||
// SyntaxError represents json syntax error
|
||||
type SyntaxError json.SyntaxError
|
||||
|
||||
// Description
|
||||
func (s SyntaxError) Description() string {
|
||||
return (*json.SyntaxError)(unsafe.Pointer(&s)).Error()
|
||||
}
|
||||
// Error
|
||||
func (s SyntaxError) Error() string {
|
||||
return (*json.SyntaxError)(unsafe.Pointer(&s)).Error()
|
||||
}
|
||||
|
||||
// MismatchTypeError represents dismatching between json and object
|
||||
type MismatchTypeError json.UnmarshalTypeError
|
6
vendor/github.com/bytedance/sonic/encoder/encoder_amd64.go
generated
vendored
6
vendor/github.com/bytedance/sonic/encoder/encoder_amd64.go
generated
vendored
|
@ -1,4 +1,4 @@
|
|||
// +build amd64,go1.15,!go1.21
|
||||
// +build amd64,go1.16,!go1.22
|
||||
|
||||
/*
|
||||
* Copyright 2023 ByteDance Inc.
|
||||
|
@ -59,6 +59,10 @@
|
|||
// before encoding it into JSON.
|
||||
ValidateString Options = encoder.ValidateString
|
||||
|
||||
// NoValidateJSONMarshaler indicates that the encoder should not validate the output string
|
||||
// after encoding the JSONMarshaler to JSON.
|
||||
NoValidateJSONMarshaler Options = encoder.NoValidateJSONMarshaler
|
||||
|
||||
// CompatibleWithStd is used to be compatible with std encoder.
|
||||
CompatibleWithStd Options = encoder.CompatibleWithStd
|
||||
)
|
||||
|
|
35
vendor/github.com/bytedance/sonic/encoder/encoder_compat.go
generated
vendored
35
vendor/github.com/bytedance/sonic/encoder/encoder_compat.go
generated
vendored
|
@ -1,4 +1,4 @@
|
|||
// +build !amd64 go1.21
|
||||
// +build !amd64 !go1.16 go1.22
|
||||
|
||||
/*
|
||||
* Copyright 2023 ByteDance Inc.
|
||||
|
@ -27,6 +27,10 @@
|
|||
`github.com/bytedance/sonic/option`
|
||||
)
|
||||
|
||||
func init() {
|
||||
println("WARNING: sonic only supports Go1.16~1.20 && CPU amd64, but your environment is not suitable")
|
||||
}
|
||||
|
||||
// Options is a set of encoding options.
|
||||
type Options uint64
|
||||
|
||||
|
@ -37,6 +41,7 @@
|
|||
bitNoQuoteTextMarshaler
|
||||
bitNoNullSliceOrMap
|
||||
bitValidateString
|
||||
bitNoValidateJSONMarshaler
|
||||
|
||||
// used for recursive compile
|
||||
bitPointerValue = 63
|
||||
|
@ -69,6 +74,10 @@
|
|||
// before encoding it into JSON.
|
||||
ValidateString Options = 1 << bitValidateString
|
||||
|
||||
// NoValidateJSONMarshaler indicates that the encoder should not validate the output string
|
||||
// after encoding the JSONMarshaler to JSON.
|
||||
NoValidateJSONMarshaler Options = 1 << bitNoValidateJSONMarshaler
|
||||
|
||||
// CompatibleWithStd is used to be compatible with std encoder.
|
||||
CompatibleWithStd Options = SortMapKeys | EscapeHTML | CompactMarshaler
|
||||
)
|
||||
|
@ -112,6 +121,15 @@ func (self *Encoder) SetValidateString(f bool) {
|
|||
}
|
||||
}
|
||||
|
||||
// SetNoValidateJSONMarshaler specifies if option NoValidateJSONMarshaler opens
|
||||
func (self *Encoder) SetNoValidateJSONMarshaler(f bool) {
|
||||
if f {
|
||||
self.Opts |= NoValidateJSONMarshaler
|
||||
} else {
|
||||
self.Opts &= ^NoValidateJSONMarshaler
|
||||
}
|
||||
}
|
||||
|
||||
// SetCompactMarshaler specifies if option CompactMarshaler opens
|
||||
func (self *Encoder) SetCompactMarshaler(f bool) {
|
||||
if f {
|
||||
|
@ -212,23 +230,12 @@ func Valid(data []byte) (ok bool, start int) {
|
|||
}
|
||||
|
||||
// StreamEncoder uses io.Writer as
|
||||
type StreamEncoder struct {
|
||||
w io.Writer
|
||||
Encoder
|
||||
}
|
||||
type StreamEncoder = json.Encoder
|
||||
|
||||
// NewStreamEncoder adapts to encoding/json.NewDecoder API.
|
||||
//
|
||||
// NewStreamEncoder returns a new encoder that write to w.
|
||||
func NewStreamEncoder(w io.Writer) *StreamEncoder {
|
||||
return &StreamEncoder{w: w}
|
||||
return json.NewEncoder(w)
|
||||
}
|
||||
|
||||
// Encode encodes interface{} as JSON to io.Writer
|
||||
func (enc *StreamEncoder) Encode(val interface{}) (err error) {
|
||||
jenc := json.NewEncoder(enc.w)
|
||||
jenc.SetEscapeHTML((enc.Opts & EscapeHTML) != 0)
|
||||
jenc.SetIndent(enc.prefix, enc.indent)
|
||||
err = jenc.Encode(val)
|
||||
return err
|
||||
}
|
||||
|
|
197
vendor/github.com/bytedance/sonic/internal/abi/abi.go
generated
vendored
Normal file
197
vendor/github.com/bytedance/sonic/internal/abi/abi.go
generated
vendored
Normal file
|
@ -0,0 +1,197 @@
|
|||
/*
|
||||
* Copyright 2022 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package abi
|
||||
|
||||
import (
|
||||
`fmt`
|
||||
`reflect`
|
||||
`sort`
|
||||
`strings`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
type FunctionLayout struct {
|
||||
FP uint32
|
||||
Args []Parameter
|
||||
Rets []Parameter
|
||||
}
|
||||
|
||||
func (self FunctionLayout) String() string {
|
||||
return self.formatFn()
|
||||
}
|
||||
|
||||
func (self FunctionLayout) ArgSize() uint32 {
|
||||
size := uintptr(0)
|
||||
for _, arg := range self.Args {
|
||||
size += arg.Type.Size()
|
||||
}
|
||||
return uint32(size)
|
||||
}
|
||||
|
||||
type slot struct {
|
||||
p bool
|
||||
m uint32
|
||||
}
|
||||
|
||||
func (self FunctionLayout) StackMap() *rt.StackMap {
|
||||
var st []slot
|
||||
var mb rt.StackMapBuilder
|
||||
|
||||
/* add arguments */
|
||||
for _, v := range self.Args {
|
||||
st = append(st, slot {
|
||||
m: v.Mem,
|
||||
p: v.IsPointer,
|
||||
})
|
||||
}
|
||||
|
||||
/* add stack-passed return values */
|
||||
for _, v := range self.Rets {
|
||||
if !v.InRegister {
|
||||
st = append(st, slot {
|
||||
m: v.Mem,
|
||||
p: v.IsPointer,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/* sort by memory offset */
|
||||
sort.Slice(st, func(i int, j int) bool {
|
||||
return st[i].m < st[j].m
|
||||
})
|
||||
|
||||
/* add the bits */
|
||||
for _, v := range st {
|
||||
mb.AddField(v.p)
|
||||
}
|
||||
|
||||
/* build the stack map */
|
||||
return mb.Build()
|
||||
}
|
||||
|
||||
func (self FunctionLayout) formatFn() string {
|
||||
fp := self.FP
|
||||
return fmt.Sprintf("\n%#04x\nRets:\n%s\nArgs:\n%s", fp, self.formatSeq(self.Rets, &fp), self.formatSeq(self.Args, &fp))
|
||||
}
|
||||
|
||||
func (self FunctionLayout) formatSeq(v []Parameter, fp *uint32) string {
|
||||
nb := len(v)
|
||||
mm := make([]string, 0, len(v))
|
||||
|
||||
/* convert each part */
|
||||
for i := nb-1; i >=0; i-- {
|
||||
*fp -= PtrSize
|
||||
mm = append(mm, fmt.Sprintf("%#04x %s", *fp, v[i].String()))
|
||||
}
|
||||
|
||||
/* join them together */
|
||||
return strings.Join(mm, "\n")
|
||||
}
|
||||
|
||||
type Frame struct {
|
||||
desc *FunctionLayout
|
||||
locals []bool
|
||||
ccall bool
|
||||
}
|
||||
|
||||
func NewFrame(desc *FunctionLayout, locals []bool, ccall bool) Frame {
|
||||
fr := Frame{}
|
||||
fr.desc = desc
|
||||
fr.locals = locals
|
||||
fr.ccall = ccall
|
||||
return fr
|
||||
}
|
||||
|
||||
func (self *Frame) String() string {
|
||||
out := self.desc.String()
|
||||
|
||||
off := -8
|
||||
out += fmt.Sprintf("\n%#4x [Return PC]", off)
|
||||
off -= 8
|
||||
out += fmt.Sprintf("\n%#4x [RBP]", off)
|
||||
off -= 8
|
||||
|
||||
for _, v := range ReservedRegs(self.ccall) {
|
||||
out += fmt.Sprintf("\n%#4x [%v]", off, v)
|
||||
off -= PtrSize
|
||||
}
|
||||
|
||||
for _, b := range self.locals {
|
||||
out += fmt.Sprintf("\n%#4x [%v]", off, b)
|
||||
off -= PtrSize
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func (self *Frame) Prev() uint32 {
|
||||
return self.Size() + PtrSize
|
||||
}
|
||||
|
||||
func (self *Frame) Size() uint32 {
|
||||
return uint32(self.Offs() + PtrSize)
|
||||
}
|
||||
|
||||
func (self *Frame) Offs() uint32 {
|
||||
return uint32(len(ReservedRegs(self.ccall)) * PtrSize + len(self.locals)*PtrSize)
|
||||
}
|
||||
|
||||
func (self *Frame) ArgPtrs() *rt.StackMap {
|
||||
return self.desc.StackMap()
|
||||
}
|
||||
|
||||
func (self *Frame) LocalPtrs() *rt.StackMap {
|
||||
var m rt.StackMapBuilder
|
||||
for _, b := range self.locals {
|
||||
m.AddFields(len(ReservedRegs(self.ccall)), b)
|
||||
}
|
||||
return m.Build()
|
||||
}
|
||||
|
||||
func alignUp(n uint32, a int) uint32 {
|
||||
return (uint32(n) + uint32(a) - 1) &^ (uint32(a) - 1)
|
||||
}
|
||||
|
||||
func isPointer(vt reflect.Type) bool {
|
||||
switch vt.Kind() {
|
||||
case reflect.Bool : fallthrough
|
||||
case reflect.Int : fallthrough
|
||||
case reflect.Int8 : fallthrough
|
||||
case reflect.Int16 : fallthrough
|
||||
case reflect.Int32 : fallthrough
|
||||
case reflect.Int64 : fallthrough
|
||||
case reflect.Uint : fallthrough
|
||||
case reflect.Uint8 : fallthrough
|
||||
case reflect.Uint16 : fallthrough
|
||||
case reflect.Uint32 : fallthrough
|
||||
case reflect.Uint64 : fallthrough
|
||||
case reflect.Float32 : fallthrough
|
||||
case reflect.Float64 : fallthrough
|
||||
case reflect.Uintptr : return false
|
||||
case reflect.Chan : fallthrough
|
||||
case reflect.Func : fallthrough
|
||||
case reflect.Map : fallthrough
|
||||
case reflect.Ptr : fallthrough
|
||||
case reflect.UnsafePointer : return true
|
||||
case reflect.Complex64 : fallthrough
|
||||
case reflect.Complex128 : fallthrough
|
||||
case reflect.Array : fallthrough
|
||||
case reflect.Struct : panic("abi: unsupported types")
|
||||
default : panic("abi: invalid value type")
|
||||
}
|
||||
}
|
282
vendor/github.com/bytedance/sonic/internal/abi/abi_amd64.go
generated
vendored
Normal file
282
vendor/github.com/bytedance/sonic/internal/abi/abi_amd64.go
generated
vendored
Normal file
|
@ -0,0 +1,282 @@
|
|||
/*
|
||||
* Copyright 2022 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package abi
|
||||
|
||||
import (
|
||||
`fmt`
|
||||
`reflect`
|
||||
`unsafe`
|
||||
|
||||
. `github.com/chenzhuoyu/iasm/x86_64`
|
||||
)
|
||||
|
||||
const (
|
||||
PtrSize = 8 // pointer size
|
||||
PtrAlign = 8 // pointer alignment
|
||||
)
|
||||
|
||||
var iregOrderC = []Register{
|
||||
RDI,
|
||||
RSI,
|
||||
RDX,
|
||||
RCX,
|
||||
R8,
|
||||
R9,
|
||||
}
|
||||
|
||||
var xregOrderC = []Register{
|
||||
XMM0,
|
||||
XMM1,
|
||||
XMM2,
|
||||
XMM3,
|
||||
XMM4,
|
||||
XMM5,
|
||||
XMM6,
|
||||
XMM7,
|
||||
}
|
||||
|
||||
var (
|
||||
intType = reflect.TypeOf(0)
|
||||
ptrType = reflect.TypeOf(unsafe.Pointer(nil))
|
||||
)
|
||||
|
||||
func (self *Frame) argv(i int) *MemoryOperand {
|
||||
return Ptr(RSP, int32(self.Prev() + self.desc.Args[i].Mem))
|
||||
}
|
||||
|
||||
// spillv is used for growstack spill registers
|
||||
func (self *Frame) spillv(i int) *MemoryOperand {
|
||||
// remain one slot for caller return pc
|
||||
return Ptr(RSP, PtrSize + int32(self.desc.Args[i].Mem))
|
||||
}
|
||||
|
||||
func (self *Frame) retv(i int) *MemoryOperand {
|
||||
return Ptr(RSP, int32(self.Prev() + self.desc.Rets[i].Mem))
|
||||
}
|
||||
|
||||
func (self *Frame) resv(i int) *MemoryOperand {
|
||||
return Ptr(RSP, int32(self.Offs() - uint32((i+1) * PtrSize)))
|
||||
}
|
||||
|
||||
func (self *Frame) emitGrowStack(p *Program, entry *Label) {
|
||||
// spill all register arguments
|
||||
for i, v := range self.desc.Args {
|
||||
if v.InRegister {
|
||||
if v.IsFloat == floatKind64 {
|
||||
p.MOVSD(v.Reg, self.spillv(i))
|
||||
} else if v.IsFloat == floatKind32 {
|
||||
p.MOVSS(v.Reg, self.spillv(i))
|
||||
}else {
|
||||
p.MOVQ(v.Reg, self.spillv(i))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// call runtime.morestack_noctxt
|
||||
p.MOVQ(F_morestack_noctxt, R12)
|
||||
p.CALLQ(R12)
|
||||
// load all register arguments
|
||||
for i, v := range self.desc.Args {
|
||||
if v.InRegister {
|
||||
if v.IsFloat == floatKind64 {
|
||||
p.MOVSD(self.spillv(i), v.Reg)
|
||||
} else if v.IsFloat == floatKind32 {
|
||||
p.MOVSS(self.spillv(i), v.Reg)
|
||||
}else {
|
||||
p.MOVQ(self.spillv(i), v.Reg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// jump back to the function entry
|
||||
p.JMP(entry)
|
||||
}
|
||||
|
||||
func (self *Frame) GrowStackTextSize() uint32 {
|
||||
p := DefaultArch.CreateProgram()
|
||||
// spill all register arguments
|
||||
for i, v := range self.desc.Args {
|
||||
if v.InRegister {
|
||||
if v.IsFloat == floatKind64 {
|
||||
p.MOVSD(v.Reg, self.spillv(i))
|
||||
} else if v.IsFloat == floatKind32 {
|
||||
p.MOVSS(v.Reg, self.spillv(i))
|
||||
}else {
|
||||
p.MOVQ(v.Reg, self.spillv(i))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// call runtime.morestack_noctxt
|
||||
p.MOVQ(F_morestack_noctxt, R12)
|
||||
p.CALLQ(R12)
|
||||
// load all register arguments
|
||||
for i, v := range self.desc.Args {
|
||||
if v.InRegister {
|
||||
if v.IsFloat == floatKind64 {
|
||||
p.MOVSD(self.spillv(i), v.Reg)
|
||||
} else if v.IsFloat == floatKind32 {
|
||||
p.MOVSS(self.spillv(i), v.Reg)
|
||||
} else {
|
||||
p.MOVQ(self.spillv(i), v.Reg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// jump back to the function entry
|
||||
l := CreateLabel("")
|
||||
p.Link(l)
|
||||
p.JMP(l)
|
||||
|
||||
return uint32(len(p.Assemble(0)))
|
||||
}
|
||||
|
||||
func (self *Frame) emitPrologue(p *Program) {
|
||||
p.SUBQ(self.Size(), RSP)
|
||||
p.MOVQ(RBP, Ptr(RSP, int32(self.Offs())))
|
||||
p.LEAQ(Ptr(RSP, int32(self.Offs())), RBP)
|
||||
}
|
||||
|
||||
func (self *Frame) emitEpilogue(p *Program) {
|
||||
p.MOVQ(Ptr(RSP, int32(self.Offs())), RBP)
|
||||
p.ADDQ(self.Size(), RSP)
|
||||
p.RET()
|
||||
}
|
||||
|
||||
func (self *Frame) emitReserveRegs(p *Program) {
|
||||
// spill reserved registers
|
||||
for i, r := range ReservedRegs(self.ccall) {
|
||||
switch r.(type) {
|
||||
case Register64:
|
||||
p.MOVQ(r, self.resv(i))
|
||||
case XMMRegister:
|
||||
p.MOVSD(r, self.resv(i))
|
||||
default:
|
||||
panic(fmt.Sprintf("unsupported register type %t to reserve", r))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *Frame) emitSpillPtrs(p *Program) {
|
||||
// spill pointer argument registers
|
||||
for i, r := range self.desc.Args {
|
||||
if r.InRegister && r.IsPointer {
|
||||
p.MOVQ(r.Reg, self.argv(i))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *Frame) emitClearPtrs(p *Program) {
|
||||
// spill pointer argument registers
|
||||
for i, r := range self.desc.Args {
|
||||
if r.InRegister && r.IsPointer {
|
||||
p.MOVQ(int64(0), self.argv(i))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *Frame) emitCallC(p *Program, addr uintptr) {
|
||||
p.MOVQ(addr, RAX)
|
||||
p.CALLQ(RAX)
|
||||
}
|
||||
|
||||
type floatKind uint8
|
||||
|
||||
const (
|
||||
notFloatKind floatKind = iota
|
||||
floatKind32
|
||||
floatKind64
|
||||
)
|
||||
|
||||
type Parameter struct {
|
||||
InRegister bool
|
||||
IsPointer bool
|
||||
IsFloat floatKind
|
||||
Reg Register
|
||||
Mem uint32
|
||||
Type reflect.Type
|
||||
}
|
||||
|
||||
func mkIReg(vt reflect.Type, reg Register64) (p Parameter) {
|
||||
p.Reg = reg
|
||||
p.Type = vt
|
||||
p.InRegister = true
|
||||
p.IsPointer = isPointer(vt)
|
||||
return
|
||||
}
|
||||
|
||||
func isFloat(vt reflect.Type) floatKind {
|
||||
switch vt.Kind() {
|
||||
case reflect.Float32:
|
||||
return floatKind32
|
||||
case reflect.Float64:
|
||||
return floatKind64
|
||||
default:
|
||||
return notFloatKind
|
||||
}
|
||||
}
|
||||
|
||||
func mkXReg(vt reflect.Type, reg XMMRegister) (p Parameter) {
|
||||
p.Reg = reg
|
||||
p.Type = vt
|
||||
p.InRegister = true
|
||||
p.IsFloat = isFloat(vt)
|
||||
return
|
||||
}
|
||||
|
||||
func mkStack(vt reflect.Type, mem uint32) (p Parameter) {
|
||||
p.Mem = mem
|
||||
p.Type = vt
|
||||
p.InRegister = false
|
||||
p.IsPointer = isPointer(vt)
|
||||
p.IsFloat = isFloat(vt)
|
||||
return
|
||||
}
|
||||
|
||||
func (self Parameter) String() string {
|
||||
if self.InRegister {
|
||||
return fmt.Sprintf("[%%%s, Pointer(%v), Float(%v)]", self.Reg, self.IsPointer, self.IsFloat)
|
||||
} else {
|
||||
return fmt.Sprintf("[%d(FP), Pointer(%v), Float(%v)]", self.Mem, self.IsPointer, self.IsFloat)
|
||||
}
|
||||
}
|
||||
|
||||
func CallC(addr uintptr, fr Frame, maxStack uintptr) []byte {
|
||||
p := DefaultArch.CreateProgram()
|
||||
|
||||
stack := CreateLabel("_stack_grow")
|
||||
entry := CreateLabel("_entry")
|
||||
p.Link(entry)
|
||||
fr.emitStackCheck(p, stack, maxStack)
|
||||
fr.emitPrologue(p)
|
||||
fr.emitReserveRegs(p)
|
||||
fr.emitSpillPtrs(p)
|
||||
fr.emitExchangeArgs(p)
|
||||
fr.emitCallC(p, addr)
|
||||
fr.emitExchangeRets(p)
|
||||
fr.emitRestoreRegs(p)
|
||||
fr.emitEpilogue(p)
|
||||
p.Link(stack)
|
||||
fr.emitGrowStack(p, entry)
|
||||
|
||||
return p.Assemble(0)
|
||||
}
|
||||
|
||||
|
||||
func (self *Frame) emitDebug(p *Program) {
|
||||
p.INT(3)
|
||||
}
|
182
vendor/github.com/bytedance/sonic/internal/abi/abi_legacy_amd64.go
generated
vendored
Normal file
182
vendor/github.com/bytedance/sonic/internal/abi/abi_legacy_amd64.go
generated
vendored
Normal file
|
@ -0,0 +1,182 @@
|
|||
//go:build !go1.17
|
||||
// +build !go1.17
|
||||
|
||||
/*
|
||||
* Copyright 2022 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package abi
|
||||
|
||||
import (
|
||||
`fmt`
|
||||
`reflect`
|
||||
`runtime`
|
||||
|
||||
. `github.com/chenzhuoyu/iasm/x86_64`
|
||||
)
|
||||
|
||||
func ReservedRegs(callc bool) []Register {
|
||||
return nil
|
||||
}
|
||||
|
||||
func salloc(p []Parameter, sp uint32, vt reflect.Type) (uint32, []Parameter) {
|
||||
switch vt.Kind() {
|
||||
case reflect.Bool : return sp + 8, append(p, mkStack(reflect.TypeOf(false), sp))
|
||||
case reflect.Int : return sp + 8, append(p, mkStack(intType, sp))
|
||||
case reflect.Int8 : return sp + 8, append(p, mkStack(reflect.TypeOf(int8(0)), sp))
|
||||
case reflect.Int16 : return sp + 8, append(p, mkStack(reflect.TypeOf(int16(0)), sp))
|
||||
case reflect.Int32 : return sp + 8, append(p, mkStack(reflect.TypeOf(int32(0)), sp))
|
||||
case reflect.Int64 : return sp + 8, append(p, mkStack(reflect.TypeOf(int64(0)), sp))
|
||||
case reflect.Uint : return sp + 8, append(p, mkStack(reflect.TypeOf(uint(0)), sp))
|
||||
case reflect.Uint8 : return sp + 8, append(p, mkStack(reflect.TypeOf(uint8(0)), sp))
|
||||
case reflect.Uint16 : return sp + 8, append(p, mkStack(reflect.TypeOf(uint16(0)), sp))
|
||||
case reflect.Uint32 : return sp + 8, append(p, mkStack(reflect.TypeOf(uint32(0)), sp))
|
||||
case reflect.Uint64 : return sp + 8, append(p, mkStack(reflect.TypeOf(uint64(0)), sp))
|
||||
case reflect.Uintptr : return sp + 8, append(p, mkStack(reflect.TypeOf(uintptr(0)), sp))
|
||||
case reflect.Float32 : return sp + 8, append(p, mkStack(reflect.TypeOf(float32(0)), sp))
|
||||
case reflect.Float64 : return sp + 8, append(p, mkStack(reflect.TypeOf(float64(0)), sp))
|
||||
case reflect.Complex64 : panic("abi: go116: not implemented: complex64")
|
||||
case reflect.Complex128 : panic("abi: go116: not implemented: complex128")
|
||||
case reflect.Array : panic("abi: go116: not implemented: arrays")
|
||||
case reflect.Chan : return sp + 8, append(p, mkStack(reflect.TypeOf((chan int)(nil)), sp))
|
||||
case reflect.Func : return sp + 8, append(p, mkStack(reflect.TypeOf((func())(nil)), sp))
|
||||
case reflect.Map : return sp + 8, append(p, mkStack(reflect.TypeOf((map[int]int)(nil)), sp))
|
||||
case reflect.Ptr : return sp + 8, append(p, mkStack(reflect.TypeOf((*int)(nil)), sp))
|
||||
case reflect.UnsafePointer : return sp + 8, append(p, mkStack(ptrType, sp))
|
||||
case reflect.Interface : return sp + 16, append(p, mkStack(ptrType, sp), mkStack(ptrType, sp + 8))
|
||||
case reflect.Slice : return sp + 24, append(p, mkStack(ptrType, sp), mkStack(intType, sp + 8), mkStack(intType, sp + 16))
|
||||
case reflect.String : return sp + 16, append(p, mkStack(ptrType, sp), mkStack(intType, sp + 8))
|
||||
case reflect.Struct : panic("abi: go116: not implemented: structs")
|
||||
default : panic("abi: invalid value type")
|
||||
}
|
||||
}
|
||||
|
||||
func NewFunctionLayout(ft reflect.Type) FunctionLayout {
|
||||
var sp uint32
|
||||
var fn FunctionLayout
|
||||
|
||||
/* assign every arguments */
|
||||
for i := 0; i < ft.NumIn(); i++ {
|
||||
sp, fn.Args = salloc(fn.Args, sp, ft.In(i))
|
||||
}
|
||||
|
||||
/* assign every return value */
|
||||
for i := 0; i < ft.NumOut(); i++ {
|
||||
sp, fn.Rets = salloc(fn.Rets, sp, ft.Out(i))
|
||||
}
|
||||
|
||||
/* update function ID and stack pointer */
|
||||
fn.FP = sp
|
||||
return fn
|
||||
}
|
||||
|
||||
func (self *Frame) emitExchangeArgs(p *Program) {
|
||||
iregArgs, xregArgs := 0, 0
|
||||
for _, v := range self.desc.Args {
|
||||
if v.IsFloat != notFloatKind {
|
||||
xregArgs += 1
|
||||
} else {
|
||||
iregArgs += 1
|
||||
}
|
||||
}
|
||||
|
||||
if iregArgs > len(iregOrderC) {
|
||||
panic("too many arguments, only support at most 6 integer arguments now")
|
||||
}
|
||||
if xregArgs > len(xregOrderC) {
|
||||
panic("too many arguments, only support at most 8 float arguments now")
|
||||
}
|
||||
|
||||
ic, xc := iregArgs, xregArgs
|
||||
for i := 0; i < len(self.desc.Args); i++ {
|
||||
arg := self.desc.Args[i]
|
||||
if arg.IsFloat == floatKind64 {
|
||||
p.MOVSD(self.argv(i), xregOrderC[xregArgs - xc])
|
||||
xc -= 1
|
||||
} else if arg.IsFloat == floatKind32 {
|
||||
p.MOVSS(self.argv(i), xregOrderC[xregArgs - xc])
|
||||
xc -= 1
|
||||
} else {
|
||||
p.MOVQ(self.argv(i), iregOrderC[iregArgs - ic])
|
||||
ic -= 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *Frame) emitStackCheck(p *Program, to *Label, maxStack uintptr) {
|
||||
// get the current goroutine
|
||||
switch runtime.GOOS {
|
||||
case "linux" : p.MOVQ(Abs(-8), R14).FS()
|
||||
case "darwin" : p.MOVQ(Abs(0x30), R14).GS()
|
||||
case "windows": break // windows always stores G pointer at R14
|
||||
default : panic("unsupported operating system")
|
||||
}
|
||||
|
||||
// check the stack guard
|
||||
p.LEAQ(Ptr(RSP, -int32(self.Size() + uint32(maxStack))), RAX)
|
||||
p.CMPQ(Ptr(R14, _G_stackguard0), RAX)
|
||||
p.JBE(to)
|
||||
}
|
||||
|
||||
func (self *Frame) StackCheckTextSize() uint32 {
|
||||
p := DefaultArch.CreateProgram()
|
||||
|
||||
// get the current goroutine
|
||||
switch runtime.GOOS {
|
||||
case "linux" : p.MOVQ(Abs(-8), R14).FS()
|
||||
case "darwin" : p.MOVQ(Abs(0x30), R14).GS()
|
||||
case "windows": break // windows always stores G pointer at R14
|
||||
default : panic("unsupported operating system")
|
||||
}
|
||||
|
||||
// check the stack guard
|
||||
p.LEAQ(Ptr(RSP, -int32(self.Size())), RAX)
|
||||
p.CMPQ(Ptr(R14, _G_stackguard0), RAX)
|
||||
l := CreateLabel("")
|
||||
p.Link(l)
|
||||
p.JBE(l)
|
||||
|
||||
return uint32(len(p.Assemble(0)))
|
||||
}
|
||||
|
||||
func (self *Frame) emitExchangeRets(p *Program) {
|
||||
if len(self.desc.Rets) > 1 {
|
||||
panic("too many results, only support one result now")
|
||||
}
|
||||
// store result
|
||||
if len(self.desc.Rets) ==1 {
|
||||
if self.desc.Rets[0].IsFloat == floatKind64 {
|
||||
p.MOVSD(xregOrderC[0], self.retv(0))
|
||||
} else if self.desc.Rets[0].IsFloat == floatKind32 {
|
||||
p.MOVSS(xregOrderC[0], self.retv(0))
|
||||
} else {
|
||||
p.MOVQ(RAX, self.retv(0))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *Frame) emitRestoreRegs(p *Program) {
|
||||
// load reserved registers
|
||||
for i, r := range ReservedRegs(self.ccall) {
|
||||
switch r.(type) {
|
||||
case Register64:
|
||||
p.MOVQ(self.resv(i), r)
|
||||
case XMMRegister:
|
||||
p.MOVSD(self.resv(i), r)
|
||||
default:
|
||||
panic(fmt.Sprintf("unsupported register type %t to reserve", r))
|
||||
}
|
||||
}
|
||||
}
|
316
vendor/github.com/bytedance/sonic/internal/abi/abi_regabi_amd64.go
generated
vendored
Normal file
316
vendor/github.com/bytedance/sonic/internal/abi/abi_regabi_amd64.go
generated
vendored
Normal file
|
@ -0,0 +1,316 @@
|
|||
//go:build go1.17
|
||||
// +build go1.17
|
||||
|
||||
/*
|
||||
* Copyright 2022 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/** Go Internal ABI implementation
|
||||
*
|
||||
* This module implements the function layout algorithm described by the Go internal ABI.
|
||||
* See https://github.com/golang/go/blob/master/src/cmd/compile/abi-internal.md for more info.
|
||||
*/
|
||||
|
||||
package abi
|
||||
|
||||
import (
|
||||
`fmt`
|
||||
`reflect`
|
||||
|
||||
. `github.com/chenzhuoyu/iasm/x86_64`
|
||||
)
|
||||
|
||||
/** Frame Structure of the Generated Function
|
||||
FP +------------------------------+
|
||||
| . . . |
|
||||
| 2nd reg argument spill space |
|
||||
+ 1st reg argument spill space |
|
||||
| <pointer-sized alignment> |
|
||||
| . . . |
|
||||
| 2nd stack-assigned result |
|
||||
+ 1st stack-assigned result |
|
||||
| <pointer-sized alignment> |
|
||||
| . . . |
|
||||
| 2nd stack-assigned argument |
|
||||
| 1st stack-assigned argument |
|
||||
| stack-assigned receiver |
|
||||
prev() +------------------------------+ (Previous Frame)
|
||||
Return PC |
|
||||
size() -------------------------------|
|
||||
Saved RBP |
|
||||
offs() -------------------------------|
|
||||
1th Reserved Registers |
|
||||
-------------------------------|
|
||||
2th Reserved Registers |
|
||||
-------------------------------|
|
||||
Local Variables |
|
||||
RSP -------------------------------|↓ lower addresses
|
||||
*/
|
||||
|
||||
const zeroRegGo = XMM15
|
||||
|
||||
var iregOrderGo = [...]Register64 {
|
||||
RAX,// RDI
|
||||
RBX,// RSI
|
||||
RCX,// RDX
|
||||
RDI,// RCX
|
||||
RSI,// R8
|
||||
R8, // R9
|
||||
R9,
|
||||
R10,
|
||||
R11,
|
||||
}
|
||||
|
||||
var xregOrderGo = [...]XMMRegister {
|
||||
XMM0,
|
||||
XMM1,
|
||||
XMM2,
|
||||
XMM3,
|
||||
XMM4,
|
||||
XMM5,
|
||||
XMM6,
|
||||
XMM7,
|
||||
XMM8,
|
||||
XMM9,
|
||||
XMM10,
|
||||
XMM11,
|
||||
XMM12,
|
||||
XMM13,
|
||||
XMM14,
|
||||
}
|
||||
|
||||
func ReservedRegs(callc bool) []Register {
|
||||
if callc {
|
||||
return nil
|
||||
}
|
||||
return []Register {
|
||||
R14, // current goroutine
|
||||
R15, // GOT reference
|
||||
}
|
||||
}
|
||||
|
||||
type stackAlloc struct {
|
||||
s uint32
|
||||
i int
|
||||
x int
|
||||
}
|
||||
|
||||
func (self *stackAlloc) reset() {
|
||||
self.i, self.x = 0, 0
|
||||
}
|
||||
|
||||
func (self *stackAlloc) ireg(vt reflect.Type) (p Parameter) {
|
||||
p = mkIReg(vt, iregOrderGo[self.i])
|
||||
self.i++
|
||||
return
|
||||
}
|
||||
|
||||
func (self *stackAlloc) xreg(vt reflect.Type) (p Parameter) {
|
||||
p = mkXReg(vt, xregOrderGo[self.x])
|
||||
self.x++
|
||||
return
|
||||
}
|
||||
|
||||
func (self *stackAlloc) stack(vt reflect.Type) (p Parameter) {
|
||||
p = mkStack(vt, self.s)
|
||||
self.s += uint32(vt.Size())
|
||||
return
|
||||
}
|
||||
|
||||
func (self *stackAlloc) spill(n uint32, a int) uint32 {
|
||||
self.s = alignUp(self.s, a) + n
|
||||
return self.s
|
||||
}
|
||||
|
||||
func (self *stackAlloc) alloc(p []Parameter, vt reflect.Type) []Parameter {
|
||||
nb := vt.Size()
|
||||
vk := vt.Kind()
|
||||
|
||||
/* zero-sized objects are allocated on stack */
|
||||
if nb == 0 {
|
||||
return append(p, mkStack(intType, self.s))
|
||||
}
|
||||
|
||||
/* check for value type */
|
||||
switch vk {
|
||||
case reflect.Bool : return self.valloc(p, reflect.TypeOf(false))
|
||||
case reflect.Int : return self.valloc(p, intType)
|
||||
case reflect.Int8 : return self.valloc(p, reflect.TypeOf(int8(0)))
|
||||
case reflect.Int16 : return self.valloc(p, reflect.TypeOf(int16(0)))
|
||||
case reflect.Int32 : return self.valloc(p, reflect.TypeOf(uint32(0)))
|
||||
case reflect.Int64 : return self.valloc(p, reflect.TypeOf(int64(0)))
|
||||
case reflect.Uint : return self.valloc(p, reflect.TypeOf(uint(0)))
|
||||
case reflect.Uint8 : return self.valloc(p, reflect.TypeOf(uint8(0)))
|
||||
case reflect.Uint16 : return self.valloc(p, reflect.TypeOf(uint16(0)))
|
||||
case reflect.Uint32 : return self.valloc(p, reflect.TypeOf(uint32(0)))
|
||||
case reflect.Uint64 : return self.valloc(p, reflect.TypeOf(uint64(0)))
|
||||
case reflect.Uintptr : return self.valloc(p, reflect.TypeOf(uintptr(0)))
|
||||
case reflect.Float32 : return self.valloc(p, reflect.TypeOf(float32(0)))
|
||||
case reflect.Float64 : return self.valloc(p, reflect.TypeOf(float64(0)))
|
||||
case reflect.Complex64 : panic("abi: go117: not implemented: complex64")
|
||||
case reflect.Complex128 : panic("abi: go117: not implemented: complex128")
|
||||
case reflect.Array : panic("abi: go117: not implemented: arrays")
|
||||
case reflect.Chan : return self.valloc(p, reflect.TypeOf((chan int)(nil)))
|
||||
case reflect.Func : return self.valloc(p, reflect.TypeOf((func())(nil)))
|
||||
case reflect.Map : return self.valloc(p, reflect.TypeOf((map[int]int)(nil)))
|
||||
case reflect.Ptr : return self.valloc(p, reflect.TypeOf((*int)(nil)))
|
||||
case reflect.UnsafePointer : return self.valloc(p, ptrType)
|
||||
case reflect.Interface : return self.valloc(p, ptrType, ptrType)
|
||||
case reflect.Slice : return self.valloc(p, ptrType, intType, intType)
|
||||
case reflect.String : return self.valloc(p, ptrType, intType)
|
||||
case reflect.Struct : panic("abi: go117: not implemented: structs")
|
||||
default : panic("abi: invalid value type")
|
||||
}
|
||||
}
|
||||
|
||||
func (self *stackAlloc) valloc(p []Parameter, vts ...reflect.Type) []Parameter {
|
||||
for _, vt := range vts {
|
||||
enum := isFloat(vt)
|
||||
if enum != notFloatKind && self.x < len(xregOrderGo) {
|
||||
p = append(p, self.xreg(vt))
|
||||
} else if enum == notFloatKind && self.i < len(iregOrderGo) {
|
||||
p = append(p, self.ireg(vt))
|
||||
} else {
|
||||
p = append(p, self.stack(vt))
|
||||
}
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func NewFunctionLayout(ft reflect.Type) FunctionLayout {
|
||||
var sa stackAlloc
|
||||
var fn FunctionLayout
|
||||
|
||||
/* assign every arguments */
|
||||
for i := 0; i < ft.NumIn(); i++ {
|
||||
fn.Args = sa.alloc(fn.Args, ft.In(i))
|
||||
}
|
||||
|
||||
/* reset the register counter, and add a pointer alignment field */
|
||||
sa.reset()
|
||||
|
||||
/* assign every return value */
|
||||
for i := 0; i < ft.NumOut(); i++ {
|
||||
fn.Rets = sa.alloc(fn.Rets, ft.Out(i))
|
||||
}
|
||||
|
||||
sa.spill(0, PtrAlign)
|
||||
|
||||
/* assign spill slots */
|
||||
for i := 0; i < len(fn.Args); i++ {
|
||||
if fn.Args[i].InRegister {
|
||||
fn.Args[i].Mem = sa.spill(PtrSize, PtrAlign) - PtrSize
|
||||
}
|
||||
}
|
||||
|
||||
/* add the final pointer alignment field */
|
||||
fn.FP = sa.spill(0, PtrAlign)
|
||||
return fn
|
||||
}
|
||||
|
||||
func (self *Frame) emitExchangeArgs(p *Program) {
|
||||
iregArgs := make([]Parameter, 0, len(self.desc.Args))
|
||||
xregArgs := 0
|
||||
for _, v := range self.desc.Args {
|
||||
if v.InRegister {
|
||||
if v.IsFloat != notFloatKind {
|
||||
xregArgs += 1
|
||||
} else {
|
||||
iregArgs = append(iregArgs, v)
|
||||
}
|
||||
} else {
|
||||
panic("not support stack-assgined arguments now")
|
||||
}
|
||||
}
|
||||
if xregArgs > len(xregOrderC) {
|
||||
panic("too many arguments, only support at most 8 integer register arguments now")
|
||||
}
|
||||
|
||||
switch len(iregArgs) {
|
||||
case 0, 1, 2, 3: {
|
||||
//Fast-Path: when arguments count are less than four, just exchange the registers
|
||||
for i := 0; i < len(iregArgs); i++ {
|
||||
p.MOVQ(iregOrderGo[i], iregOrderC[i])
|
||||
}
|
||||
}
|
||||
case 4, 5, 6: {
|
||||
// need to spill 3th ~ regArgs registers before exchange
|
||||
for i := 3; i < len(iregArgs); i++ {
|
||||
arg := iregArgs[i]
|
||||
// pointer args have already been spilled
|
||||
if !arg.IsPointer {
|
||||
p.MOVQ(iregOrderGo[i], Ptr(RSP, int32(self.Prev() + arg.Mem)))
|
||||
}
|
||||
}
|
||||
p.MOVQ(iregOrderGo[0], iregOrderC[0])
|
||||
p.MOVQ(iregOrderGo[1], iregOrderC[1])
|
||||
p.MOVQ(iregOrderGo[2], iregOrderC[2])
|
||||
for i := 3; i < len(iregArgs); i++ {
|
||||
arg := iregArgs[i]
|
||||
p.MOVQ(Ptr(RSP, int32(self.Prev() + arg.Mem)), iregOrderC[i])
|
||||
}
|
||||
}
|
||||
default:
|
||||
panic("too many arguments, only support at most 6 integer register arguments now")
|
||||
}
|
||||
}
|
||||
|
||||
func (self *Frame) emitStackCheck(p *Program, to *Label, maxStack uintptr) {
|
||||
p.LEAQ(Ptr(RSP, int32(-(self.Size() + uint32(maxStack)))), R12)
|
||||
p.CMPQ(Ptr(R14, _G_stackguard0), R12)
|
||||
p.JBE(to)
|
||||
}
|
||||
|
||||
func (self *Frame) StackCheckTextSize() uint32 {
|
||||
p := DefaultArch.CreateProgram()
|
||||
p.LEAQ(Ptr(RSP, int32(-(self.Size()))), R12)
|
||||
p.CMPQ(Ptr(R14, _G_stackguard0), R12)
|
||||
to := CreateLabel("")
|
||||
p.Link(to)
|
||||
p.JBE(to)
|
||||
return uint32(len(p.Assemble(0)))
|
||||
}
|
||||
|
||||
func (self *Frame) emitExchangeRets(p *Program) {
|
||||
if len(self.desc.Rets) > 1 {
|
||||
panic("too many results, only support one result now")
|
||||
}
|
||||
// store result
|
||||
if len(self.desc.Rets) == 1 && !self.desc.Rets[0].InRegister {
|
||||
if self.desc.Rets[0].IsFloat == floatKind64 {
|
||||
p.MOVSD(xregOrderC[0], self.retv(0))
|
||||
} else if self.desc.Rets[0].IsFloat == floatKind32 {
|
||||
p.MOVSS(xregOrderC[0], self.retv(0))
|
||||
} else {
|
||||
p.MOVQ(RAX, self.retv(0))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *Frame) emitRestoreRegs(p *Program) {
|
||||
// load reserved registers
|
||||
for i, r := range ReservedRegs(self.ccall) {
|
||||
switch r.(type) {
|
||||
case Register64:
|
||||
p.MOVQ(self.resv(i), r)
|
||||
case XMMRegister:
|
||||
p.MOVSD(self.resv(i), r)
|
||||
default:
|
||||
panic(fmt.Sprintf("unsupported register type %t to reserve", r))
|
||||
}
|
||||
}
|
||||
// zero xmm15 for go abi
|
||||
p.XORPS(zeroRegGo, zeroRegGo)
|
||||
}
|
|
@ -1,8 +1,5 @@
|
|||
//go:build go1.15 && !go1.16
|
||||
// +build go1.15,!go1.16
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
/**
|
||||
* Copyright 2023 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -17,12 +14,22 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package loader
|
||||
package abi
|
||||
|
||||
import (
|
||||
`github.com/bytedance/sonic/internal/loader`
|
||||
_ `unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
func (self Loader) LoadOne(text []byte, funcName string, frameSize int, argSize int, argStackmap []bool, localStackmap []bool) Function {
|
||||
return Function(loader.Loader(text).Load(funcName, frameSize, argSize, argStackmap, localStackmap))
|
||||
}
|
||||
const (
|
||||
_G_stackguard0 = 0x10
|
||||
)
|
||||
|
||||
var (
|
||||
F_morestack_noctxt = uintptr(rt.FuncAddr(morestack_noctxt))
|
||||
)
|
||||
|
||||
//go:linkname morestack_noctxt runtime.morestack_noctxt
|
||||
func morestack_noctxt()
|
||||
|
130
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go116.go
generated
vendored
Normal file
130
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go116.go
generated
vendored
Normal file
|
@ -0,0 +1,130 @@
|
|||
// +build go1.16,!go1.17
|
||||
|
||||
// Copyright 2023 CloudWeGo Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`strconv`
|
||||
_ `unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
var _runtime_writeBarrier uintptr = rt.GcwbAddr()
|
||||
|
||||
//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
|
||||
func gcWriteBarrierAX()
|
||||
|
||||
var (
|
||||
_V_writeBarrier = jit.Imm(int64(_runtime_writeBarrier))
|
||||
|
||||
_F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
|
||||
)
|
||||
|
||||
func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
} else {
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
}
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
|
||||
func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
126
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go117.go
generated
vendored
Normal file
126
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go117.go
generated
vendored
Normal file
|
@ -0,0 +1,126 @@
|
|||
// +build go1.17,!go1.21
|
||||
|
||||
// Copyright 2023 CloudWeGo Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`strconv`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
//go:linkname _runtime_writeBarrier runtime.writeBarrier
|
||||
var _runtime_writeBarrier uintptr
|
||||
|
||||
//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
|
||||
func gcWriteBarrierAX()
|
||||
|
||||
var (
|
||||
_V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier))))
|
||||
|
||||
_F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
|
||||
)
|
||||
|
||||
func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
} else {
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
}
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
|
||||
func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _AX)
|
||||
self.Emit("CMPL", jit.Ptr(_AX, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
132
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go121.go
generated
vendored
Normal file
132
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go121.go
generated
vendored
Normal file
|
@ -0,0 +1,132 @@
|
|||
// +build go1.21,!go1.22
|
||||
|
||||
// Copyright 2023 CloudWeGo Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`strconv`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
//go:linkname _runtime_writeBarrier runtime.writeBarrier
|
||||
var _runtime_writeBarrier uintptr
|
||||
|
||||
//go:nosplit
|
||||
//go:linkname gcWriteBarrier2 runtime.gcWriteBarrier2
|
||||
func gcWriteBarrier2()
|
||||
|
||||
// Notice: gcWriteBarrier must use R11 register!!
|
||||
var _R11 = _IC
|
||||
|
||||
var (
|
||||
_V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier))))
|
||||
|
||||
_F_gcWriteBarrier2 = jit.Func(gcWriteBarrier2)
|
||||
)
|
||||
|
||||
func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI, _R11)
|
||||
} else {
|
||||
self.save(_R11)
|
||||
}
|
||||
self.Emit("MOVQ", _F_gcWriteBarrier2, _R11)
|
||||
self.Rjmp("CALL", _R11)
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_R11, 0))
|
||||
self.Emit("MOVQ", rec, _DI)
|
||||
self.Emit("MOVQ", _DI, jit.Ptr(_R11, 8))
|
||||
if saveDI {
|
||||
self.load(_DI, _R11)
|
||||
} else {
|
||||
self.load(_R11)
|
||||
}
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
}
|
||||
|
||||
func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveAX {
|
||||
self.save(_AX, _R11)
|
||||
} else {
|
||||
self.save(_R11)
|
||||
}
|
||||
self.Emit("MOVQ", _F_gcWriteBarrier2, _R11)
|
||||
self.Rjmp("CALL", _R11)
|
||||
self.Emit("MOVQ", ptr, jit.Ptr(_R11, 0))
|
||||
self.Emit("MOVQ", rec, _AX)
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_R11, 8))
|
||||
if saveAX {
|
||||
self.load(_AX, _R11)
|
||||
} else {
|
||||
self.load(_R11)
|
||||
}
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI, _R11)
|
||||
} else {
|
||||
self.save(_R11)
|
||||
}
|
||||
self.Emit("MOVQ", _F_gcWriteBarrier2, _R11)
|
||||
self.Rjmp("CALL", _R11)
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_R11, 0))
|
||||
self.Emit("MOVQ", rec, _DI)
|
||||
self.Emit("MOVQ", _DI, jit.Ptr(_R11, 8))
|
||||
if saveDI {
|
||||
self.load(_DI, _R11)
|
||||
} else {
|
||||
self.load(_R11)
|
||||
}
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _AX)
|
||||
self.Emit("CMPL", jit.Ptr(_AX, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.save(_R11)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrier2, _R11)
|
||||
self.Rjmp("CALL", _R11)
|
||||
self.Emit("MOVQ", ptr, jit.Ptr(_R11, 0))
|
||||
self.Emit("MOVQ", rec, _AX)
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_R11, 8))
|
||||
self.load(_R11)
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
//go:build go1.17 && !go1.21
|
||||
// +build go1.17,!go1.21
|
||||
//go:build go1.17 && !go1.22
|
||||
// +build go1.17,!go1.22
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
|
@ -24,7 +24,6 @@
|
|||
`fmt`
|
||||
`math`
|
||||
`reflect`
|
||||
`strconv`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/caching`
|
||||
|
@ -33,7 +32,6 @@
|
|||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
/** Register Allocations
|
||||
|
@ -825,8 +823,8 @@ func (self *_Assembler) escape_string_twice() {
|
|||
)
|
||||
|
||||
var (
|
||||
_Vp_max_f32 = new(float64)
|
||||
_Vp_min_f32 = new(float64)
|
||||
_Vp_max_f32 = new(float32)
|
||||
_Vp_min_f32 = new(float32)
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
@ -835,17 +833,15 @@ func init() {
|
|||
}
|
||||
|
||||
func (self *_Assembler) range_single_X0() {
|
||||
self.Emit("MOVSD" , _VAR_st_Dv, _X0) // MOVSD st.Dv, X0
|
||||
self.Emit("CVTSD2SS", _VAR_st_Dv, _X0) // CVTSD2SS _VAR_st_Dv, X0
|
||||
self.Emit("MOVQ" , _V_max_f32, _CX) // MOVQ _max_f32, CX
|
||||
self.Emit("MOVQ" , jit.Gitab(_I_float32), _ET) // MOVQ ${itab(float32)}, ET
|
||||
self.Emit("MOVQ" , jit.Gtype(_T_float32), _EP) // MOVQ ${type(float32)}, EP
|
||||
self.Emit("UCOMISD" , jit.Ptr(_CX, 0), _X0) // UCOMISD (CX), X0
|
||||
self.Emit("UCOMISS" , jit.Ptr(_CX, 0), _X0) // UCOMISS (CX), X0
|
||||
self.Sjmp("JA" , _LB_range_error) // JA _range_error
|
||||
self.Emit("MOVQ" , _V_min_f32, _CX) // MOVQ _min_f32, CX
|
||||
self.Emit("MOVSD" , jit.Ptr(_CX, 0), _X1) // MOVSD (CX), X1
|
||||
self.Emit("UCOMISD" , _X0, _X1) // UCOMISD X0, X1
|
||||
self.Sjmp("JA" , _LB_range_error) // JA _range_error
|
||||
self.Emit("CVTSD2SS", _X0, _X0) // CVTSD2SS X0, X0
|
||||
self.Emit("UCOMISS" , jit.Ptr(_CX, 0), _X0) // UCOMISS (CX), X0
|
||||
self.Sjmp("JB" , _LB_range_error) // JB _range_error
|
||||
}
|
||||
|
||||
func (self *_Assembler) range_signed_CX(i *rt.GoItab, t *rt.GoType, a int64, b int64) {
|
||||
|
@ -1931,62 +1927,3 @@ func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
|
|||
self.Emit("MOVQ", jit.Imm(int64(i)), _AX) // MOVQ $(i), (SP)
|
||||
self.call_go(_F_println)
|
||||
}
|
||||
|
||||
//go:linkname _runtime_writeBarrier runtime.writeBarrier
|
||||
var _runtime_writeBarrier uintptr
|
||||
|
||||
//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
|
||||
func gcWriteBarrierAX()
|
||||
|
||||
var (
|
||||
_V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier))))
|
||||
|
||||
_F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
|
||||
)
|
||||
|
||||
func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
} else {
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
}
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
// +build go1.15,!go1.17
|
||||
// +build go1.16,!go1.17
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
|
@ -23,7 +23,6 @@
|
|||
`fmt`
|
||||
`math`
|
||||
`reflect`
|
||||
`strconv`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/caching`
|
||||
|
@ -32,7 +31,6 @@
|
|||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
/** Register Allocations
|
||||
|
@ -818,8 +816,8 @@ func (self *_Assembler) escape_string_twice() {
|
|||
)
|
||||
|
||||
var (
|
||||
_Vp_max_f32 = new(float64)
|
||||
_Vp_min_f32 = new(float64)
|
||||
_Vp_max_f32 = new(float32)
|
||||
_Vp_min_f32 = new(float32)
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
@ -828,17 +826,15 @@ func init() {
|
|||
}
|
||||
|
||||
func (self *_Assembler) range_single() {
|
||||
self.Emit("MOVSD" , _VAR_st_Dv, _X0) // MOVSD st.Dv, X0
|
||||
self.Emit("CVTSD2SS", _VAR_st_Dv, _X0) // CVTSD2SS st.Dv, X0
|
||||
self.Emit("MOVQ" , _V_max_f32, _AX) // MOVQ _max_f32, AX
|
||||
self.Emit("MOVQ" , jit.Gitab(_I_float32), _ET) // MOVQ ${itab(float32)}, ET
|
||||
self.Emit("MOVQ" , jit.Gtype(_T_float32), _EP) // MOVQ ${type(float32)}, EP
|
||||
self.Emit("UCOMISD" , jit.Ptr(_AX, 0), _X0) // UCOMISD (AX), X0
|
||||
self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0) // UCOMISS (AX), X0
|
||||
self.Sjmp("JA" , _LB_range_error) // JA _range_error
|
||||
self.Emit("MOVQ" , _V_min_f32, _AX) // MOVQ _min_f32, AX
|
||||
self.Emit("MOVSD" , jit.Ptr(_AX, 0), _X1) // MOVSD (AX), X1
|
||||
self.Emit("UCOMISD" , _X0, _X1) // UCOMISD X0, X1
|
||||
self.Sjmp("JA" , _LB_range_error) // JA _range_error
|
||||
self.Emit("CVTSD2SS", _X0, _X0) // CVTSD2SS X0, X0
|
||||
self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0) // UCOMISS (AX), X0
|
||||
self.Sjmp("JB" , _LB_range_error) // JB _range_error
|
||||
}
|
||||
|
||||
func (self *_Assembler) range_signed(i *rt.GoItab, t *rt.GoType, a int64, b int64) {
|
||||
|
@ -1951,63 +1947,3 @@ func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
|
|||
self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0)) // MOVQ $(i), (SP)
|
||||
self.call_go(_F_println)
|
||||
}
|
||||
|
||||
var _runtime_writeBarrier uintptr = rt.GcwbAddr()
|
||||
|
||||
//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
|
||||
func gcWriteBarrierAX()
|
||||
|
||||
var (
|
||||
_V_writeBarrier = jit.Imm(int64(_runtime_writeBarrier))
|
||||
|
||||
_F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
|
||||
)
|
||||
|
||||
func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
} else {
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
}
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
14
vendor/github.com/bytedance/sonic/internal/decoder/decoder.go
generated
vendored
14
vendor/github.com/bytedance/sonic/internal/decoder/decoder.go
generated
vendored
|
@ -30,14 +30,14 @@
|
|||
)
|
||||
|
||||
const (
|
||||
_F_use_int64 = iota
|
||||
_F_use_number
|
||||
_F_disable_urc
|
||||
_F_disable_unknown
|
||||
_F_copy_string
|
||||
_F_validate_string
|
||||
_F_use_int64 = 0
|
||||
_F_disable_urc = 2
|
||||
_F_disable_unknown = 3
|
||||
_F_copy_string = 4
|
||||
|
||||
_F_allow_control = 31
|
||||
_F_use_number = types.B_USE_NUMBER
|
||||
_F_validate_string = types.B_VALIDATE_STRING
|
||||
_F_allow_control = types.B_ALLOW_CONTROL
|
||||
)
|
||||
|
||||
type Options uint64
|
||||
|
|
70
vendor/github.com/bytedance/sonic/internal/decoder/errors.go
generated
vendored
70
vendor/github.com/bytedance/sonic/internal/decoder/errors.go
generated
vendored
|
@ -44,35 +44,12 @@ func (self SyntaxError) Description() string {
|
|||
}
|
||||
|
||||
func (self SyntaxError) description() string {
|
||||
i := 16
|
||||
p := self.Pos - i
|
||||
q := self.Pos + i
|
||||
|
||||
/* check for empty source */
|
||||
if self.Src == "" {
|
||||
return fmt.Sprintf("no sources available: %#v", self)
|
||||
}
|
||||
|
||||
/* prevent slicing before the beginning */
|
||||
if p < 0 {
|
||||
p, q, i = 0, q - p, i + p
|
||||
}
|
||||
|
||||
/* prevent slicing beyond the end */
|
||||
if n := len(self.Src); q > n {
|
||||
n = q - n
|
||||
q = len(self.Src)
|
||||
|
||||
/* move the left bound if possible */
|
||||
if p > n {
|
||||
i += n
|
||||
p -= n
|
||||
}
|
||||
}
|
||||
|
||||
/* left and right length */
|
||||
x := clamp_zero(i)
|
||||
y := clamp_zero(q - p - i - 1)
|
||||
p, x, q, y := calcBounds(len(self.Src), self.Pos)
|
||||
|
||||
/* compose the error description */
|
||||
return fmt.Sprintf(
|
||||
|
@ -85,6 +62,39 @@ func (self SyntaxError) description() string {
|
|||
)
|
||||
}
|
||||
|
||||
func calcBounds(size int, pos int) (lbound int, lwidth int, rbound int, rwidth int) {
|
||||
if pos >= size || pos < 0 {
|
||||
return 0, 0, size, 0
|
||||
}
|
||||
|
||||
i := 16
|
||||
lbound = pos - i
|
||||
rbound = pos + i
|
||||
|
||||
/* prevent slicing before the beginning */
|
||||
if lbound < 0 {
|
||||
lbound, rbound, i = 0, rbound - lbound, i + lbound
|
||||
}
|
||||
|
||||
/* prevent slicing beyond the end */
|
||||
if n := size; rbound > n {
|
||||
n = rbound - n
|
||||
rbound = size
|
||||
|
||||
/* move the left bound if possible */
|
||||
if lbound > n {
|
||||
i += n
|
||||
lbound -= n
|
||||
}
|
||||
}
|
||||
|
||||
/* left and right length */
|
||||
lwidth = clamp_zero(i)
|
||||
rwidth = clamp_zero(rbound - lbound - i - 1)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (self SyntaxError) Message() string {
|
||||
if self.Msg == "" {
|
||||
return self.Code.Message()
|
||||
|
@ -107,16 +117,19 @@ func clamp_zero(v int) int {
|
|||
Value : reflect.ValueOf("..."),
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func error_wrap(src string, pos int, code types.ParsingError) error {
|
||||
return SyntaxError {
|
||||
return *error_wrap_heap(src, pos, code)
|
||||
}
|
||||
|
||||
//go:noinline
|
||||
func error_wrap_heap(src string, pos int, code types.ParsingError) *SyntaxError {
|
||||
return &SyntaxError {
|
||||
Pos : pos,
|
||||
Src : src,
|
||||
Code : code,
|
||||
}
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func error_type(vt *rt.GoType) error {
|
||||
return &json.UnmarshalTypeError{Type: vt.Pack()}
|
||||
}
|
||||
|
@ -158,7 +171,6 @@ func (self MismatchTypeError) Description() string {
|
|||
return fmt.Sprintf("Mismatch type %s with value %s %s", self.Type.String(), swithchJSONType(self.Src, self.Pos), se.description())
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func error_mismatch(src string, pos int, vt *rt.GoType) error {
|
||||
return &MismatchTypeError {
|
||||
Pos : pos,
|
||||
|
@ -167,12 +179,10 @@ func error_mismatch(src string, pos int, vt *rt.GoType) error {
|
|||
}
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func error_field(name string) error {
|
||||
return errors.New("json: unknown field " + strconv.Quote(name))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func error_value(value string, vtype reflect.Type) error {
|
||||
return &json.UnmarshalTypeError {
|
||||
Type : vtype,
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
//go:build go1.17 && !go1.21
|
||||
// +build go1.17,!go1.21
|
||||
// +build go1.17,!go1.22
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
|
@ -23,13 +22,11 @@
|
|||
`encoding/json`
|
||||
`fmt`
|
||||
`reflect`
|
||||
`strconv`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/bytedance/sonic/internal/native`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
/** Crucial Registers:
|
||||
|
@ -720,46 +717,6 @@ func (self *_ValueDecoder) compile() {
|
|||
}
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _AX)
|
||||
self.Emit("CMPL", jit.Ptr(_AX, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
/** Generic Decoder **/
|
||||
|
||||
var (
|
|
@ -1,4 +1,4 @@
|
|||
// +build go1.17,!go1.21
|
||||
// +build go1.17,!go1.22
|
||||
|
||||
//
|
||||
// Copyright 2021 ByteDance Inc.
|
|
@ -1,4 +1,4 @@
|
|||
// +build go1.15,!go1.17
|
||||
// +build go1.16,!go1.17
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
|
@ -22,13 +22,11 @@
|
|||
`encoding/json`
|
||||
`fmt`
|
||||
`reflect`
|
||||
`strconv`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/bytedance/sonic/internal/native`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
/** Crucial Registers:
|
||||
|
@ -645,7 +643,8 @@ func (self *_ValueDecoder) compile() {
|
|||
self.Emit("MOVQ", _R8, _VAR_cs_p)
|
||||
self.Emit("MOVQ", _AX, _VAR_cs_n)
|
||||
self.Emit("MOVQ", _DI, _VAR_cs_LR)
|
||||
self.Emit("MOVQ", _T_byte, jit.Ptr(_SP, 0))
|
||||
self.Emit("MOVQ", _T_byte, _R8)
|
||||
self.Emit("MOVQ", _R8, jit.Ptr(_SP, 0))
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))
|
||||
self.call_go(_F_makeslice)
|
||||
|
@ -722,48 +721,6 @@ func (self *_ValueDecoder) compile() {
|
|||
}
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
/** Generic Decoder **/
|
||||
|
||||
var (
|
|
@ -1,4 +1,4 @@
|
|||
// +build go1.15,!go1.17
|
||||
// +build go1.16,!go1.17
|
||||
|
||||
//
|
||||
// Copyright 2021 ByteDance Inc.
|
2
vendor/github.com/bytedance/sonic/internal/decoder/pools.go
generated
vendored
2
vendor/github.com/bytedance/sonic/internal/decoder/pools.go
generated
vendored
|
@ -29,7 +29,7 @@
|
|||
_MinSlice = 2
|
||||
_MaxStack = 4096 // 4k slots
|
||||
_MaxStackBytes = _MaxStack * _PtrBytes
|
||||
_MaxDigitNums = 800 // used in atof fallback algorithm
|
||||
_MaxDigitNums = types.MaxDigitNums // used in atof fallback algorithm
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
19
vendor/github.com/bytedance/sonic/internal/decoder/stream.go
generated
vendored
19
vendor/github.com/bytedance/sonic/internal/decoder/stream.go
generated
vendored
|
@ -21,8 +21,9 @@
|
|||
`io`
|
||||
`sync`
|
||||
|
||||
`github.com/bytedance/sonic/option`
|
||||
`github.com/bytedance/sonic/internal/native`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/option`
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -71,6 +72,7 @@ func (self *StreamDecoder) Decode(val interface{}) (err error) {
|
|||
|
||||
var first = true
|
||||
var repeat = true
|
||||
|
||||
read_more:
|
||||
for {
|
||||
l := len(buf)
|
||||
|
@ -97,11 +99,20 @@ func (self *StreamDecoder) Decode(val interface{}) (err error) {
|
|||
l := len(buf)
|
||||
if l > 0 {
|
||||
self.Decoder.Reset(string(buf))
|
||||
|
||||
var x int
|
||||
if ret := native.SkipOneFast(&self.s, &x); ret < 0 {
|
||||
if repeat {
|
||||
goto read_more
|
||||
} else {
|
||||
err = SyntaxError{x, self.s, types.ParsingError(-ret), ""}
|
||||
self.err = err
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
err = self.Decoder.Decode(val)
|
||||
if err != nil {
|
||||
if repeat && self.repeatable(err) {
|
||||
goto read_more
|
||||
}
|
||||
self.err = err
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// +build go1.15,!go1.20
|
||||
// +build go1.16,!go1.20
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
10
vendor/github.com/bytedance/sonic/internal/decoder/stubs_go120.go
generated
vendored
10
vendor/github.com/bytedance/sonic/internal/decoder/stubs_go120.go
generated
vendored
|
@ -82,23 +82,23 @@ func makemap_small() unsafe.Pointer
|
|||
|
||||
//go:linkname mapassign runtime.mapassign
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign(t *rt.GoType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
|
||||
func mapassign(t *rt.GoMapType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign_fast32 runtime.mapassign_fast32
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign_fast32(t *rt.GoType, h unsafe.Pointer, k uint32) unsafe.Pointer
|
||||
func mapassign_fast32(t *rt.GoMapType, h unsafe.Pointer, k uint32) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign_fast64 runtime.mapassign_fast64
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign_fast64(t *rt.GoType, h unsafe.Pointer, k uint64) unsafe.Pointer
|
||||
func mapassign_fast64(t *rt.GoMapType, h unsafe.Pointer, k uint64) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign_fast64ptr runtime.mapassign_fast64ptr
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign_fast64ptr(t *rt.GoType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
|
||||
func mapassign_fast64ptr(t *rt.GoMapType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign_faststr runtime.mapassign_faststr
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign_faststr(t *rt.GoType, h unsafe.Pointer, s string) unsafe.Pointer
|
||||
func mapassign_faststr(t *rt.GoMapType, h unsafe.Pointer, s string) unsafe.Pointer
|
||||
|
||||
//go:nosplit
|
||||
//go:linkname memclrHasPointers runtime.memclrHasPointers
|
||||
|
|
51
vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go116.go
generated
vendored
Normal file
51
vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go116.go
generated
vendored
Normal file
|
@ -0,0 +1,51 @@
|
|||
// +build go1.16,!go1.17
|
||||
|
||||
// Copyright 2023 CloudWeGo Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package encoder
|
||||
|
||||
import (
|
||||
`strconv`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
var (
|
||||
_V_writeBarrier = jit.Imm(int64(_runtime_writeBarrier))
|
||||
|
||||
_F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
|
||||
)
|
||||
|
||||
func (self *_Assembler) WritePtr(i int, ptr obj.Addr, rec obj.Addr) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
self.xsave(_DI)
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
self.xload(_DI)
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
51
vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go117.go
generated
vendored
Normal file
51
vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go117.go
generated
vendored
Normal file
|
@ -0,0 +1,51 @@
|
|||
// +build go1.17,!go1.21
|
||||
|
||||
// Copyright 2023 CloudWeGo Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package encoder
|
||||
|
||||
import (
|
||||
`strconv`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
var (
|
||||
_V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier))))
|
||||
|
||||
_F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
|
||||
)
|
||||
|
||||
func (self *_Assembler) WritePtr(i int, ptr obj.Addr, rec obj.Addr) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _BX)
|
||||
self.Emit("CMPL", jit.Ptr(_BX, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.xsave(_DI)
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _BX) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _BX)
|
||||
self.xload(_DI)
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
50
vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go121.go
generated
vendored
Normal file
50
vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go121.go
generated
vendored
Normal file
|
@ -0,0 +1,50 @@
|
|||
// +build go1.21,!go1.22
|
||||
|
||||
// Copyright 2023 CloudWeGo Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package encoder
|
||||
|
||||
import (
|
||||
`strconv`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
var (
|
||||
_V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier))))
|
||||
|
||||
_F_gcWriteBarrier2 = jit.Func(gcWriteBarrier2)
|
||||
)
|
||||
|
||||
func (self *_Assembler) WritePtr(i int, ptr obj.Addr, old obj.Addr) {
|
||||
if old.Reg == x86.REG_AX || old.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _BX)
|
||||
self.Emit("CMPL", jit.Ptr(_BX, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.xsave(_SP_q)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrier2, _BX) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _BX)
|
||||
self.Emit("MOVQ", ptr, jit.Ptr(_SP_q, 0))
|
||||
self.Emit("MOVQ", old, _AX)
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP_q, 8))
|
||||
self.xload(_SP_q)
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, old)
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
//go:build go1.17 && !go1.21
|
||||
// +build go1.17,!go1.21
|
||||
//go:build go1.17 && !go1.22
|
||||
// +build go1.17,!go1.22
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
|
@ -435,8 +435,8 @@ func (self *_Assembler) save_state() {
|
|||
self.Sjmp("JAE" , _LB_error_too_deep) // JA _error_too_deep
|
||||
self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8)) // MOVQ SP.x, 8(ST)(CX)
|
||||
self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16)) // MOVQ SP.f, 16(ST)(CX)
|
||||
self.WriteRecNotAX(0, _SP_p, jit.Sib(_ST, _CX, 1, 24)) // MOVQ SP.p, 24(ST)(CX)
|
||||
self.WriteRecNotAX(1, _SP_q, jit.Sib(_ST, _CX, 1, 32)) // MOVQ SP.q, 32(ST)(CX)
|
||||
self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24)) // MOVQ SP.p, 24(ST)(CX)
|
||||
self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32)) // MOVQ SP.q, 32(ST)(CX)
|
||||
self.Emit("MOVQ", _R9, jit.Ptr(_ST, 0)) // MOVQ R9, (ST)
|
||||
}
|
||||
|
||||
|
@ -1175,28 +1175,3 @@ func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
|
|||
self.Emit("MOVQ", jit.Imm(int64(i)), _AX) // MOVQ $(i), CX
|
||||
self.call_go(_F_println)
|
||||
}
|
||||
|
||||
var (
|
||||
_V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier))))
|
||||
|
||||
_F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
|
||||
)
|
||||
|
||||
func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _BX)
|
||||
self.Emit("CMPL", jit.Ptr(_BX, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.xsave(_DI)
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _BX) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _BX)
|
||||
self.xload(_DI)
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
// +build go1.15,!go1.17
|
||||
// +build go1.16,!go1.17
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
|
@ -421,8 +421,8 @@ func (self *_Assembler) save_state() {
|
|||
self.Sjmp("JAE" , _LB_error_too_deep) // JA _error_too_deep
|
||||
self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8)) // MOVQ SP.x, 8(ST)(CX)
|
||||
self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16)) // MOVQ SP.f, 16(ST)(CX)
|
||||
self.WriteRecNotAX(0, _SP_p, jit.Sib(_ST, _CX, 1, 24)) // MOVQ SP.p, 24(ST)(CX)
|
||||
self.WriteRecNotAX(1, _SP_q, jit.Sib(_ST, _CX, 1, 32)) // MOVQ SP.q, 32(ST)(CX)
|
||||
self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24)) // MOVQ SP.p, 24(ST)(CX)
|
||||
self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32)) // MOVQ SP.q, 32(ST)(CX)
|
||||
self.Emit("MOVQ", _R8, jit.Ptr(_ST, 0)) // MOVQ R8, (ST)
|
||||
}
|
||||
|
||||
|
@ -579,7 +579,8 @@ func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.
|
|||
|
||||
func (self *_Assembler) more_space() {
|
||||
self.Link(_LB_more_space)
|
||||
self.Emit("MOVQ", _T_byte, jit.Ptr(_SP, 0)) // MOVQ $_T_byte, (SP)
|
||||
self.Emit("MOVQ", _T_byte, _AX) // MOVQ $_T_byte, _AX
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ _AX, (SP)
|
||||
self.Emit("MOVQ", _RP, jit.Ptr(_SP, 8)) // MOVQ RP, 8(SP)
|
||||
self.Emit("MOVQ", _RL, jit.Ptr(_SP, 16)) // MOVQ RL, 16(SP)
|
||||
self.Emit("MOVQ", _RC, jit.Ptr(_SP, 24)) // MOVQ RC, 24(SP)
|
||||
|
@ -1172,28 +1173,3 @@ func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
|
|||
self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0)) // MOVQ $(i), (SP)
|
||||
self.call_go(_F_println)
|
||||
}
|
||||
|
||||
var (
|
||||
_V_writeBarrier = jit.Imm(int64(_runtime_writeBarrier))
|
||||
|
||||
_F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
|
||||
)
|
||||
|
||||
func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
self.xsave(_DI)
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
self.xload(_DI)
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
2
vendor/github.com/bytedance/sonic/internal/encoder/debug_go116.go
generated
vendored
2
vendor/github.com/bytedance/sonic/internal/encoder/debug_go116.go
generated
vendored
|
@ -1,4 +1,4 @@
|
|||
// +build go1.15,!go1.17
|
||||
// +build go1.16,!go1.17
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
|
|
2
vendor/github.com/bytedance/sonic/internal/encoder/debug_go117.go
generated
vendored
2
vendor/github.com/bytedance/sonic/internal/encoder/debug_go117.go
generated
vendored
|
@ -1,4 +1,4 @@
|
|||
// +build go1.17,!go1.21
|
||||
// +build go1.17,!go1.22
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
|
|
15
vendor/github.com/bytedance/sonic/internal/encoder/encoder.go
generated
vendored
15
vendor/github.com/bytedance/sonic/internal/encoder/encoder.go
generated
vendored
|
@ -40,6 +40,7 @@
|
|||
bitNoQuoteTextMarshaler
|
||||
bitNoNullSliceOrMap
|
||||
bitValidateString
|
||||
bitNoValidateJSONMarshaler
|
||||
|
||||
// used for recursive compile
|
||||
bitPointerValue = 63
|
||||
|
@ -72,6 +73,10 @@
|
|||
// before encoding it into JSON.
|
||||
ValidateString Options = 1 << bitValidateString
|
||||
|
||||
// NoValidateJSONMarshaler indicates that the encoder should not validate the output string
|
||||
// after encoding the JSONMarshaler to JSON.
|
||||
NoValidateJSONMarshaler Options = 1 << bitNoValidateJSONMarshaler
|
||||
|
||||
// CompatibleWithStd is used to be compatible with std encoder.
|
||||
CompatibleWithStd Options = SortMapKeys | EscapeHTML | CompactMarshaler
|
||||
)
|
||||
|
@ -115,6 +120,15 @@ func (self *Encoder) SetValidateString(f bool) {
|
|||
}
|
||||
}
|
||||
|
||||
// SetNoValidateJSONMarshaler specifies if option NoValidateJSONMarshaler opens
|
||||
func (self *Encoder) SetNoValidateJSONMarshaler(f bool) {
|
||||
if f {
|
||||
self.Opts |= NoValidateJSONMarshaler
|
||||
} else {
|
||||
self.Opts &= ^NoValidateJSONMarshaler
|
||||
}
|
||||
}
|
||||
|
||||
// SetCompactMarshaler specifies if option CompactMarshaler opens
|
||||
func (self *Encoder) SetCompactMarshaler(f bool) {
|
||||
if f {
|
||||
|
@ -292,7 +306,6 @@ func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
|
|||
cfg := option.DefaultCompileOptions()
|
||||
for _, opt := range opts {
|
||||
opt(&cfg)
|
||||
break
|
||||
}
|
||||
return pretouchRec(map[reflect.Type]uint8{vt: 0}, cfg)
|
||||
}
|
||||
|
|
2
vendor/github.com/bytedance/sonic/internal/encoder/primitives.go
generated
vendored
2
vendor/github.com/bytedance/sonic/internal/encoder/primitives.go
generated
vendored
|
@ -93,9 +93,11 @@ func encodeJsonMarshaler(buf *[]byte, val json.Marshaler, opt Options) error {
|
|||
if opt & CompactMarshaler != 0 {
|
||||
return compact(buf, ret)
|
||||
}
|
||||
if opt & NoValidateJSONMarshaler == 0 {
|
||||
if ok, s := Valid(ret); !ok {
|
||||
return error_marshaler(ret, s)
|
||||
}
|
||||
}
|
||||
*buf = append(*buf, ret...)
|
||||
return nil
|
||||
}
|
||||
|
|
2
vendor/github.com/bytedance/sonic/internal/encoder/stubs_go116.go
generated
vendored
2
vendor/github.com/bytedance/sonic/internal/encoder/stubs_go116.go
generated
vendored
|
@ -1,4 +1,4 @@
|
|||
// +build go1.15,!go1.17
|
||||
// +build go1.16,!go1.17
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
|
|
2
vendor/github.com/bytedance/sonic/internal/encoder/stubs_go120.go
generated
vendored
2
vendor/github.com/bytedance/sonic/internal/encoder/stubs_go120.go
generated
vendored
|
@ -1,4 +1,4 @@
|
|||
// +build go1.20
|
||||
// +build go1.20,!go1.21
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
|
|
66
vendor/github.com/bytedance/sonic/internal/encoder/stubs_go121.go
generated
vendored
Normal file
66
vendor/github.com/bytedance/sonic/internal/encoder/stubs_go121.go
generated
vendored
Normal file
|
@ -0,0 +1,66 @@
|
|||
// +build go1.21
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package encoder
|
||||
|
||||
import (
|
||||
`unsafe`
|
||||
|
||||
_ `github.com/chenzhuoyu/base64x`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
//go:linkname _subr__b64encode github.com/chenzhuoyu/base64x._subr__b64encode
|
||||
var _subr__b64encode uintptr
|
||||
|
||||
//go:noescape
|
||||
//go:linkname memmove runtime.memmove
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
|
||||
|
||||
//go:linkname growslice reflect.growslice
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
|
||||
|
||||
//go:linkname assertI2I runtime.assertI2I2
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func assertI2I(inter *rt.GoType, i rt.GoIface) rt.GoIface
|
||||
|
||||
//go:linkname mapiternext runtime.mapiternext
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapiternext(it *rt.GoMapIterator)
|
||||
|
||||
//go:linkname mapiterinit runtime.mapiterinit
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapiterinit(t *rt.GoMapType, m *rt.GoMap, it *rt.GoMapIterator)
|
||||
|
||||
//go:linkname isValidNumber encoding/json.isValidNumber
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func isValidNumber(s string) bool
|
||||
|
||||
//go:noescape
|
||||
//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
|
||||
|
||||
//go:linkname _runtime_writeBarrier runtime.writeBarrier
|
||||
var _runtime_writeBarrier uintptr
|
||||
|
||||
//go:linkname gcWriteBarrier2 runtime.gcWriteBarrier2
|
||||
func gcWriteBarrier2()
|
124
vendor/github.com/bytedance/sonic/internal/loader/funcdata.go
generated
vendored
124
vendor/github.com/bytedance/sonic/internal/loader/funcdata.go
generated
vendored
|
@ -1,124 +0,0 @@
|
|||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package loader
|
||||
|
||||
import (
|
||||
`reflect`
|
||||
`sync`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
//go:linkname lastmoduledatap runtime.lastmoduledatap
|
||||
//goland:noinspection GoUnusedGlobalVariable
|
||||
var lastmoduledatap *_ModuleData
|
||||
|
||||
//go:linkname moduledataverify1 runtime.moduledataverify1
|
||||
func moduledataverify1(_ *_ModuleData)
|
||||
|
||||
// PCDATA and FUNCDATA table indexes.
|
||||
//
|
||||
// See funcdata.h and $GROOT/src/cmd/internal/objabi/funcdata.go.
|
||||
const (
|
||||
_FUNCDATA_ArgsPointerMaps = 0
|
||||
_FUNCDATA_LocalsPointerMaps = 1
|
||||
)
|
||||
|
||||
type funcInfo struct {
|
||||
*_Func
|
||||
datap *_ModuleData
|
||||
}
|
||||
|
||||
//go:linkname findfunc runtime.findfunc
|
||||
func findfunc(pc uintptr) funcInfo
|
||||
|
||||
//go:linkname funcdata runtime.funcdata
|
||||
func funcdata(f funcInfo, i uint8) unsafe.Pointer
|
||||
|
||||
var (
|
||||
modLock sync.Mutex
|
||||
modList []*_ModuleData
|
||||
)
|
||||
|
||||
var emptyByte byte
|
||||
|
||||
func encodeVariant(v int) []byte {
|
||||
var u int
|
||||
var r []byte
|
||||
|
||||
/* split every 7 bits */
|
||||
for v > 127 {
|
||||
u = v & 0x7f
|
||||
v = v >> 7
|
||||
r = append(r, byte(u) | 0x80)
|
||||
}
|
||||
|
||||
/* check for last one */
|
||||
if v == 0 {
|
||||
return r
|
||||
}
|
||||
|
||||
/* add the last one */
|
||||
r = append(r, byte(v))
|
||||
return r
|
||||
}
|
||||
|
||||
func registerModule(mod *_ModuleData) {
|
||||
modLock.Lock()
|
||||
modList = append(modList, mod)
|
||||
lastmoduledatap.next = mod
|
||||
lastmoduledatap = mod
|
||||
modLock.Unlock()
|
||||
}
|
||||
|
||||
func stackMap(f interface{}) (args uintptr, locals uintptr) {
|
||||
fv := reflect.ValueOf(f)
|
||||
if fv.Kind() != reflect.Func {
|
||||
panic("f must be reflect.Func kind!")
|
||||
}
|
||||
fi := findfunc(fv.Pointer())
|
||||
return uintptr(funcdata(fi, uint8(_FUNCDATA_ArgsPointerMaps))), uintptr(funcdata(fi, uint8(_FUNCDATA_LocalsPointerMaps)))
|
||||
}
|
||||
|
||||
var moduleCache = struct{
|
||||
m map[*_ModuleData][]byte
|
||||
l sync.Mutex
|
||||
}{
|
||||
m : make(map[*_ModuleData][]byte),
|
||||
}
|
||||
|
||||
func cacheStackmap(argPtrs []bool, localPtrs []bool, mod *_ModuleData) (argptrs uintptr, localptrs uintptr) {
|
||||
as := rt.StackMapBuilder{}
|
||||
for _, b := range argPtrs {
|
||||
as.AddField(b)
|
||||
}
|
||||
ab, _ := as.Build().MarshalBinary()
|
||||
ls := rt.StackMapBuilder{}
|
||||
for _, b := range localPtrs {
|
||||
ls.AddField(b)
|
||||
}
|
||||
lb, _ := ls.Build().MarshalBinary()
|
||||
cache := make([]byte, len(ab) + len(lb))
|
||||
copy(cache, ab)
|
||||
copy(cache[len(ab):], lb)
|
||||
moduleCache.l.Lock()
|
||||
moduleCache.m[mod] = cache
|
||||
moduleCache.l.Unlock()
|
||||
return uintptr(rt.IndexByte(cache, 0)), uintptr(rt.IndexByte(cache, len(ab)))
|
||||
|
||||
}
|
169
vendor/github.com/bytedance/sonic/internal/loader/funcdata_go115.go
generated
vendored
169
vendor/github.com/bytedance/sonic/internal/loader/funcdata_go115.go
generated
vendored
|
@ -1,169 +0,0 @@
|
|||
// +build go1.15,!go1.16
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package loader
|
||||
|
||||
import (
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
type _Func struct {
|
||||
entry uintptr // start pc
|
||||
nameoff int32 // function name
|
||||
args int32 // in/out args size
|
||||
deferreturn uint32 // offset of start of a deferreturn call instruction from entry, if any.
|
||||
pcsp int32
|
||||
pcfile int32
|
||||
pcln int32
|
||||
npcdata int32
|
||||
funcID uint8 // set for certain special runtime functions
|
||||
_ [2]int8 // unused
|
||||
nfuncdata uint8 // must be last
|
||||
argptrs uintptr
|
||||
localptrs uintptr
|
||||
}
|
||||
|
||||
type _FuncTab struct {
|
||||
entry uintptr
|
||||
funcoff uintptr
|
||||
}
|
||||
|
||||
type _BitVector struct {
|
||||
n int32 // # of bits
|
||||
bytedata *uint8
|
||||
}
|
||||
|
||||
type _PtabEntry struct {
|
||||
name int32
|
||||
typ int32
|
||||
}
|
||||
|
||||
type _TextSection struct {
|
||||
vaddr uintptr // prelinked section vaddr
|
||||
length uintptr // section length
|
||||
baseaddr uintptr // relocated section address
|
||||
}
|
||||
|
||||
type _ModuleData struct {
|
||||
pclntable []byte
|
||||
ftab []_FuncTab
|
||||
filetab []uint32
|
||||
findfunctab *_FindFuncBucket
|
||||
minpc, maxpc uintptr
|
||||
text, etext uintptr
|
||||
noptrdata, enoptrdata uintptr
|
||||
data, edata uintptr
|
||||
bss, ebss uintptr
|
||||
noptrbss, enoptrbss uintptr
|
||||
end, gcdata, gcbss uintptr
|
||||
types, etypes uintptr
|
||||
textsectmap []_TextSection
|
||||
typelinks []int32 // offsets from types
|
||||
itablinks []*rt.GoItab
|
||||
ptab []_PtabEntry
|
||||
pluginpath string
|
||||
pkghashes []byte
|
||||
modulename string
|
||||
modulehashes []byte
|
||||
hasmain uint8 // 1 if module contains the main function, 0 otherwise
|
||||
gcdatamask, gcbssmask _BitVector
|
||||
typemap map[int32]*rt.GoType // offset to *_rtype in previous module
|
||||
bad bool // module failed to load and should be ignored
|
||||
next *_ModuleData
|
||||
}
|
||||
|
||||
type _FindFuncBucket struct {
|
||||
idx uint32
|
||||
subbuckets [16]byte
|
||||
}
|
||||
|
||||
var findFuncTab = &_FindFuncBucket {
|
||||
idx: 1,
|
||||
}
|
||||
|
||||
func registerFunction(name string, pc uintptr, textSize uintptr, fp int, args int, size uintptr, argPtrs []bool, localPtrs []bool) {
|
||||
mod := new(_ModuleData)
|
||||
minpc := pc
|
||||
maxpc := pc + size
|
||||
|
||||
/* build the PC & line table */
|
||||
pclnt := []byte {
|
||||
0xfb, 0xff, 0xff, 0xff, // magic : 0xfffffffb
|
||||
0, // pad1 : 0
|
||||
0, // pad2 : 0
|
||||
1, // minLC : 1
|
||||
4 << (^uintptr(0) >> 63), // ptrSize : 4 << (^uintptr(0) >> 63)
|
||||
}
|
||||
|
||||
// cache arg and local stackmap
|
||||
argptrs, localptrs := cacheStackmap(argPtrs, localPtrs, mod)
|
||||
|
||||
/* add the function name */
|
||||
noff := len(pclnt)
|
||||
pclnt = append(append(pclnt, name...), 0)
|
||||
|
||||
/* add PCDATA */
|
||||
pcsp := len(pclnt)
|
||||
pclnt = append(pclnt, encodeVariant((fp + 1) << 1)...)
|
||||
pclnt = append(pclnt, encodeVariant(int(size))...)
|
||||
|
||||
/* function entry */
|
||||
fnv := _Func {
|
||||
entry : pc,
|
||||
nameoff : int32(noff),
|
||||
args : int32(args),
|
||||
pcsp : int32(pcsp),
|
||||
nfuncdata : 2,
|
||||
argptrs : uintptr(argptrs),
|
||||
localptrs : uintptr(localptrs),
|
||||
}
|
||||
|
||||
/* align the func to 8 bytes */
|
||||
if p := len(pclnt) % 8; p != 0 {
|
||||
pclnt = append(pclnt, make([]byte, 8 - p)...)
|
||||
}
|
||||
|
||||
/* add the function descriptor */
|
||||
foff := len(pclnt)
|
||||
pclnt = append(pclnt, (*(*[unsafe.Sizeof(_Func{})]byte)(unsafe.Pointer(&fnv)))[:]...)
|
||||
|
||||
/* function table */
|
||||
tab := []_FuncTab {
|
||||
{entry: pc, funcoff: uintptr(foff)},
|
||||
{entry: pc, funcoff: uintptr(foff)},
|
||||
{entry: maxpc},
|
||||
}
|
||||
|
||||
/* module data */
|
||||
*mod = _ModuleData {
|
||||
pclntable : pclnt,
|
||||
ftab : tab,
|
||||
findfunctab : findFuncTab,
|
||||
minpc : minpc,
|
||||
maxpc : maxpc,
|
||||
modulename : name,
|
||||
gcdata: uintptr(unsafe.Pointer(&emptyByte)),
|
||||
gcbss: uintptr(unsafe.Pointer(&emptyByte)),
|
||||
}
|
||||
|
||||
/* verify and register the new module */
|
||||
moduledataverify1(mod)
|
||||
registerModule(mod)
|
||||
}
|
175
vendor/github.com/bytedance/sonic/internal/loader/funcdata_go116.go
generated
vendored
175
vendor/github.com/bytedance/sonic/internal/loader/funcdata_go116.go
generated
vendored
|
@ -1,175 +0,0 @@
|
|||
//go:build go1.16 && !go1.18
|
||||
// +build go1.16,!go1.18
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package loader
|
||||
|
||||
import (
|
||||
`unsafe`
|
||||
)
|
||||
|
||||
type _Func struct {
|
||||
entry uintptr // start pc
|
||||
nameoff int32 // function name
|
||||
args int32 // in/out args size
|
||||
deferreturn uint32 // offset of start of a deferreturn call instruction from entry, if any.
|
||||
pcsp uint32
|
||||
pcfile uint32
|
||||
pcln uint32
|
||||
npcdata uint32
|
||||
cuOffset uint32 // runtime.cutab offset of this function's CU
|
||||
funcID uint8 // set for certain special runtime functions
|
||||
_ [2]byte // pad
|
||||
nfuncdata uint8 // must be last
|
||||
argptrs uintptr
|
||||
localptrs uintptr
|
||||
}
|
||||
|
||||
type _FuncTab struct {
|
||||
entry uintptr
|
||||
funcoff uintptr
|
||||
}
|
||||
|
||||
type _PCHeader struct {
|
||||
magic uint32 // 0xFFFFFFFA
|
||||
pad1, pad2 uint8 // 0,0
|
||||
minLC uint8 // min instruction size
|
||||
ptrSize uint8 // size of a ptr in bytes
|
||||
nfunc int // number of functions in the module
|
||||
nfiles uint // number of entries in the file tab.
|
||||
funcnameOffset uintptr // offset to the funcnametab variable from _PCHeader
|
||||
cuOffset uintptr // offset to the cutab variable from _PCHeader
|
||||
filetabOffset uintptr // offset to the filetab variable from _PCHeader
|
||||
pctabOffset uintptr // offset to the pctab varible from _PCHeader
|
||||
pclnOffset uintptr // offset to the pclntab variable from _PCHeader
|
||||
}
|
||||
|
||||
type _BitVector struct {
|
||||
n int32 // # of bits
|
||||
bytedata *uint8
|
||||
}
|
||||
|
||||
type _PtabEntry struct {
|
||||
name int32
|
||||
typ int32
|
||||
}
|
||||
|
||||
type _TextSection struct {
|
||||
vaddr uintptr // prelinked section vaddr
|
||||
length uintptr // section length
|
||||
baseaddr uintptr // relocated section address
|
||||
}
|
||||
|
||||
type _ModuleData struct {
|
||||
pcHeader *_PCHeader
|
||||
funcnametab []byte
|
||||
cutab []uint32
|
||||
filetab []byte
|
||||
pctab []byte
|
||||
pclntable []_Func
|
||||
ftab []_FuncTab
|
||||
findfunctab *_FindFuncBucket
|
||||
minpc, maxpc uintptr
|
||||
text, etext uintptr
|
||||
noptrdata, enoptrdata uintptr
|
||||
data, edata uintptr
|
||||
bss, ebss uintptr
|
||||
noptrbss, enoptrbss uintptr
|
||||
end, gcdata, gcbss uintptr
|
||||
types, etypes uintptr
|
||||
textsectmap []_TextSection
|
||||
typelinks []int32
|
||||
itablinks []unsafe.Pointer
|
||||
ptab []_PtabEntry
|
||||
pluginpath string
|
||||
pkghashes []struct{}
|
||||
modulename string
|
||||
modulehashes []struct{}
|
||||
hasmain uint8
|
||||
gcdatamask, gcbssmask _BitVector
|
||||
typemap map[int32]unsafe.Pointer
|
||||
bad bool
|
||||
next *_ModuleData
|
||||
}
|
||||
|
||||
type _FindFuncBucket struct {
|
||||
idx uint32
|
||||
subbuckets [16]byte
|
||||
}
|
||||
|
||||
var modHeader = &_PCHeader {
|
||||
magic : 0xfffffffa,
|
||||
minLC : 1,
|
||||
nfunc : 1,
|
||||
ptrSize : 4 << (^uintptr(0) >> 63),
|
||||
}
|
||||
|
||||
var findFuncTab = &_FindFuncBucket {
|
||||
idx: 1,
|
||||
}
|
||||
|
||||
func makePCtab(fp int) []byte {
|
||||
return append([]byte{0}, encodeVariant((fp + 1) << 1)...)
|
||||
}
|
||||
|
||||
func registerFunction(name string, pc uintptr, textSize uintptr, fp int, args int, size uintptr, argPtrs []bool, localPtrs []bool) {
|
||||
mod := new(_ModuleData)
|
||||
|
||||
minpc := pc
|
||||
maxpc := pc + size
|
||||
|
||||
// cache arg and local stackmap
|
||||
argptrs, localptrs := cacheStackmap(argPtrs, localPtrs, mod)
|
||||
|
||||
/* function entry */
|
||||
lnt := []_Func {{
|
||||
entry : pc,
|
||||
nameoff : 1,
|
||||
args : int32(args),
|
||||
pcsp : 1,
|
||||
nfuncdata : 2,
|
||||
argptrs : uintptr(argptrs),
|
||||
localptrs : uintptr(localptrs),
|
||||
}}
|
||||
|
||||
/* function table */
|
||||
tab := []_FuncTab {
|
||||
{entry: pc},
|
||||
{entry: pc},
|
||||
{entry: maxpc},
|
||||
}
|
||||
|
||||
/* module data */
|
||||
*mod = _ModuleData {
|
||||
pcHeader : modHeader,
|
||||
funcnametab : append(append([]byte{0}, name...), 0),
|
||||
pctab : append(makePCtab(fp), encodeVariant(int(size))...),
|
||||
pclntable : lnt,
|
||||
ftab : tab,
|
||||
findfunctab : findFuncTab,
|
||||
minpc : minpc,
|
||||
maxpc : maxpc,
|
||||
modulename : name,
|
||||
gcdata: uintptr(unsafe.Pointer(&emptyByte)),
|
||||
gcbss: uintptr(unsafe.Pointer(&emptyByte)),
|
||||
}
|
||||
|
||||
/* verify and register the new module */
|
||||
moduledataverify1(mod)
|
||||
registerModule(mod)
|
||||
}
|
201
vendor/github.com/bytedance/sonic/internal/loader/funcdata_go118.go
generated
vendored
201
vendor/github.com/bytedance/sonic/internal/loader/funcdata_go118.go
generated
vendored
|
@ -1,201 +0,0 @@
|
|||
// +build go1.18,!go1.20
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package loader
|
||||
|
||||
import (
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
// A FuncFlag holds bits about a function.
|
||||
// This list must match the list in cmd/internal/objabi/funcid.go.
|
||||
type funcFlag uint8
|
||||
|
||||
type _Func struct {
|
||||
entryOff uint32 // start pc
|
||||
nameoff int32 // function name
|
||||
args int32 // in/out args size
|
||||
deferreturn uint32 // offset of start of a deferreturn call instruction from entry, if any.
|
||||
pcsp uint32
|
||||
pcfile uint32
|
||||
pcln uint32
|
||||
npcdata uint32
|
||||
cuOffset uint32 // runtime.cutab offset of this function's CU
|
||||
funcID uint8 // set for certain special runtime functions
|
||||
flag funcFlag
|
||||
_ [1]byte // pad
|
||||
nfuncdata uint8 // must be last
|
||||
argptrs uint32
|
||||
localptrs uint32
|
||||
}
|
||||
|
||||
type _FuncTab struct {
|
||||
entry uint32
|
||||
funcoff uint32
|
||||
}
|
||||
|
||||
type _PCHeader struct {
|
||||
magic uint32 // 0xFFFFFFF0
|
||||
pad1, pad2 uint8 // 0,0
|
||||
minLC uint8 // min instruction size
|
||||
ptrSize uint8 // size of a ptr in bytes
|
||||
nfunc int // number of functions in the module
|
||||
nfiles uint // number of entries in the file tab
|
||||
textStart uintptr // base for function entry PC offsets in this module, equal to moduledata.text
|
||||
funcnameOffset uintptr // offset to the funcnametab variable from pcHeader
|
||||
cuOffset uintptr // offset to the cutab variable from pcHeader
|
||||
filetabOffset uintptr // offset to the filetab variable from pcHeader
|
||||
pctabOffset uintptr // offset to the pctab variable from pcHeader
|
||||
pclnOffset uintptr // offset to the pclntab variable from pcHeader
|
||||
}
|
||||
|
||||
type _BitVector struct {
|
||||
n int32 // # of bits
|
||||
bytedata *uint8
|
||||
}
|
||||
|
||||
type _PtabEntry struct {
|
||||
name int32
|
||||
typ int32
|
||||
}
|
||||
|
||||
type _TextSection struct {
|
||||
vaddr uintptr // prelinked section vaddr
|
||||
length uintptr // section length
|
||||
baseaddr uintptr // relocated section address
|
||||
}
|
||||
|
||||
type _ModuleData struct {
|
||||
pcHeader *_PCHeader
|
||||
funcnametab []byte
|
||||
cutab []uint32
|
||||
filetab []byte
|
||||
pctab []byte
|
||||
pclntable []byte
|
||||
ftab []_FuncTab
|
||||
findfunctab *_FindFuncBucket
|
||||
minpc, maxpc uintptr
|
||||
text, etext uintptr
|
||||
noptrdata, enoptrdata uintptr
|
||||
data, edata uintptr
|
||||
bss, ebss uintptr
|
||||
noptrbss, enoptrbss uintptr
|
||||
end, gcdata, gcbss uintptr
|
||||
types, etypes uintptr
|
||||
rodata uintptr
|
||||
gofunc uintptr
|
||||
textsectmap []_TextSection
|
||||
typelinks []int32
|
||||
itablinks []unsafe.Pointer
|
||||
ptab []_PtabEntry
|
||||
pluginpath string
|
||||
pkghashes []struct{}
|
||||
modulename string
|
||||
modulehashes []struct{}
|
||||
hasmain uint8
|
||||
gcdatamask, gcbssmask _BitVector
|
||||
typemap map[int32]unsafe.Pointer
|
||||
bad bool
|
||||
next *_ModuleData
|
||||
}
|
||||
|
||||
|
||||
type _FindFuncBucket struct {
|
||||
idx uint32
|
||||
subbuckets [16]byte
|
||||
}
|
||||
|
||||
|
||||
|
||||
func makePCtab(fp int) []byte {
|
||||
return append([]byte{0}, encodeVariant((fp + 1) << 1)...)
|
||||
}
|
||||
|
||||
func registerFunction(name string, pc uintptr, textSize uintptr, fp int, args int, size uintptr, argPtrs []bool, localPtrs []bool) {
|
||||
mod := new(_ModuleData)
|
||||
|
||||
minpc := pc
|
||||
maxpc := pc + size
|
||||
|
||||
findFuncTab := make([]_FindFuncBucket, textSize/4096 + 1)
|
||||
|
||||
modHeader := &_PCHeader {
|
||||
magic : 0xfffffff0,
|
||||
minLC : 1,
|
||||
nfunc : 1,
|
||||
ptrSize : 4 << (^uintptr(0) >> 63),
|
||||
textStart: minpc,
|
||||
}
|
||||
|
||||
// cache arg and local stackmap
|
||||
argptrs, localptrs := cacheStackmap(argPtrs, localPtrs, mod)
|
||||
|
||||
base := argptrs
|
||||
if argptrs > localptrs {
|
||||
base = localptrs
|
||||
}
|
||||
|
||||
/* function entry */
|
||||
lnt := []_Func {{
|
||||
entryOff : 0,
|
||||
nameoff : 1,
|
||||
args : int32(args),
|
||||
pcsp : 1,
|
||||
nfuncdata : 2,
|
||||
argptrs: uint32(argptrs - base),
|
||||
localptrs: uint32(localptrs - base),
|
||||
}}
|
||||
nlnt := len(lnt)*int(unsafe.Sizeof(_Func{}))
|
||||
plnt := unsafe.Pointer(&lnt[0])
|
||||
|
||||
/* function table */
|
||||
ftab := []_FuncTab {
|
||||
{entry : 0, funcoff : 16},
|
||||
{entry : uint32(size)},
|
||||
}
|
||||
nftab := len(ftab)*int(unsafe.Sizeof(_FuncTab{}))
|
||||
pftab := unsafe.Pointer(&ftab[0])
|
||||
|
||||
pclntab := make([]byte, 0, nftab + nlnt)
|
||||
pclntab = append(pclntab, rt.BytesFrom(pftab, nftab, nftab)...)
|
||||
pclntab = append(pclntab, rt.BytesFrom(plnt, nlnt, nlnt)...)
|
||||
|
||||
/* module data */
|
||||
*mod = _ModuleData {
|
||||
pcHeader : modHeader,
|
||||
funcnametab : append(append([]byte{0}, name...), 0),
|
||||
pctab : append(makePCtab(fp), encodeVariant(int(size))...),
|
||||
pclntable : pclntab,
|
||||
ftab : ftab,
|
||||
text : minpc,
|
||||
etext : pc + textSize,
|
||||
findfunctab : &findFuncTab[0],
|
||||
minpc : minpc,
|
||||
maxpc : maxpc,
|
||||
modulename : name,
|
||||
gcdata: uintptr(unsafe.Pointer(&emptyByte)),
|
||||
gcbss: uintptr(unsafe.Pointer(&emptyByte)),
|
||||
gofunc: base,
|
||||
}
|
||||
|
||||
/* verify and register the new module */
|
||||
moduledataverify1(mod)
|
||||
registerModule(mod)
|
||||
}
|
201
vendor/github.com/bytedance/sonic/internal/loader/funcdata_go120.go
generated
vendored
201
vendor/github.com/bytedance/sonic/internal/loader/funcdata_go120.go
generated
vendored
|
@ -1,201 +0,0 @@
|
|||
// +build go1.20
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package loader
|
||||
|
||||
import (
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
// A FuncFlag holds bits about a function.
|
||||
// This list must match the list in cmd/internal/objabi/funcid.go.
|
||||
type funcFlag uint8
|
||||
|
||||
type _Func struct {
|
||||
entryOff uint32 // start pc
|
||||
nameoff int32 // function name
|
||||
args int32 // in/out args size
|
||||
deferreturn uint32 // offset of start of a deferreturn call instruction from entry, if any.
|
||||
pcsp uint32
|
||||
pcfile uint32
|
||||
pcln uint32
|
||||
npcdata uint32
|
||||
cuOffset uint32 // runtime.cutab offset of this function's CU
|
||||
funcID uint8 // set for certain special runtime functions
|
||||
flag funcFlag
|
||||
_ [1]byte // pad
|
||||
nfuncdata uint8 // must be last
|
||||
argptrs uint32
|
||||
localptrs uint32
|
||||
}
|
||||
|
||||
type _FuncTab struct {
|
||||
entry uint32
|
||||
funcoff uint32
|
||||
}
|
||||
|
||||
type _PCHeader struct {
|
||||
magic uint32 // 0xFFFFFFF0
|
||||
pad1, pad2 uint8 // 0,0
|
||||
minLC uint8 // min instruction size
|
||||
ptrSize uint8 // size of a ptr in bytes
|
||||
nfunc int // number of functions in the module
|
||||
nfiles uint // number of entries in the file tab
|
||||
textStart uintptr // base for function entry PC offsets in this module, equal to moduledata.text
|
||||
funcnameOffset uintptr // offset to the funcnametab variable from pcHeader
|
||||
cuOffset uintptr // offset to the cutab variable from pcHeader
|
||||
filetabOffset uintptr // offset to the filetab variable from pcHeader
|
||||
pctabOffset uintptr // offset to the pctab variable from pcHeader
|
||||
pclnOffset uintptr // offset to the pclntab variable from pcHeader
|
||||
}
|
||||
|
||||
type _BitVector struct {
|
||||
n int32 // # of bits
|
||||
bytedata *uint8
|
||||
}
|
||||
|
||||
type _PtabEntry struct {
|
||||
name int32
|
||||
typ int32
|
||||
}
|
||||
|
||||
type _TextSection struct {
|
||||
vaddr uintptr // prelinked section vaddr
|
||||
length uintptr // section length
|
||||
baseaddr uintptr // relocated section address
|
||||
}
|
||||
|
||||
type _ModuleData struct {
|
||||
pcHeader *_PCHeader
|
||||
funcnametab []byte
|
||||
cutab []uint32
|
||||
filetab []byte
|
||||
pctab []byte
|
||||
pclntable []byte
|
||||
ftab []_FuncTab
|
||||
findfunctab *_FindFuncBucket
|
||||
minpc, maxpc uintptr
|
||||
text, etext uintptr
|
||||
noptrdata, enoptrdata uintptr
|
||||
data, edata uintptr
|
||||
bss, ebss uintptr
|
||||
noptrbss, enoptrbss uintptr
|
||||
end, gcdata, gcbss uintptr
|
||||
types, etypes uintptr
|
||||
rodata uintptr
|
||||
gofunc uintptr
|
||||
textsectmap []_TextSection
|
||||
typelinks []int32
|
||||
itablinks []unsafe.Pointer
|
||||
ptab []_PtabEntry
|
||||
pluginpath string
|
||||
pkghashes []struct{}
|
||||
modulename string
|
||||
modulehashes []struct{}
|
||||
hasmain uint8
|
||||
gcdatamask, gcbssmask _BitVector
|
||||
typemap map[int32]unsafe.Pointer
|
||||
bad bool
|
||||
next *_ModuleData
|
||||
}
|
||||
|
||||
|
||||
type _FindFuncBucket struct {
|
||||
idx uint32
|
||||
subbuckets [16]byte
|
||||
}
|
||||
|
||||
|
||||
|
||||
func makePCtab(fp int) []byte {
|
||||
return append([]byte{0}, encodeVariant((fp + 1) << 1)...)
|
||||
}
|
||||
|
||||
func registerFunction(name string, pc uintptr, textSize uintptr, fp int, args int, size uintptr, argPtrs []bool, localPtrs []bool) {
|
||||
mod := new(_ModuleData)
|
||||
|
||||
minpc := pc
|
||||
maxpc := pc + size
|
||||
|
||||
findFuncTab := make([]_FindFuncBucket, textSize/4096 + 1)
|
||||
|
||||
modHeader := &_PCHeader {
|
||||
magic : 0xfffffff0,
|
||||
minLC : 1,
|
||||
nfunc : 1,
|
||||
ptrSize : 4 << (^uintptr(0) >> 63),
|
||||
textStart: minpc,
|
||||
}
|
||||
|
||||
// cache arg and local stackmap
|
||||
argptrs, localptrs := cacheStackmap(argPtrs, localPtrs, mod)
|
||||
|
||||
base := argptrs
|
||||
if argptrs > localptrs {
|
||||
base = localptrs
|
||||
}
|
||||
|
||||
/* function entry */
|
||||
lnt := []_Func {{
|
||||
entryOff : 0,
|
||||
nameoff : 1,
|
||||
args : int32(args),
|
||||
pcsp : 1,
|
||||
nfuncdata : 2,
|
||||
argptrs: uint32(argptrs - base),
|
||||
localptrs: uint32(localptrs - base),
|
||||
}}
|
||||
nlnt := len(lnt)*int(unsafe.Sizeof(_Func{}))
|
||||
plnt := unsafe.Pointer(&lnt[0])
|
||||
|
||||
/* function table */
|
||||
ftab := []_FuncTab {
|
||||
{entry : 0, funcoff : 16},
|
||||
{entry : uint32(size)},
|
||||
}
|
||||
nftab := len(ftab)*int(unsafe.Sizeof(_FuncTab{}))
|
||||
pftab := unsafe.Pointer(&ftab[0])
|
||||
|
||||
pclntab := make([]byte, 0, nftab + nlnt)
|
||||
pclntab = append(pclntab, rt.BytesFrom(pftab, nftab, nftab)...)
|
||||
pclntab = append(pclntab, rt.BytesFrom(plnt, nlnt, nlnt)...)
|
||||
|
||||
/* module data */
|
||||
*mod = _ModuleData {
|
||||
pcHeader : modHeader,
|
||||
funcnametab : append(append([]byte{0}, name...), 0),
|
||||
pctab : append(makePCtab(fp), encodeVariant(int(size))...),
|
||||
pclntable : pclntab,
|
||||
ftab : ftab,
|
||||
text : minpc,
|
||||
etext : pc + textSize,
|
||||
findfunctab : &findFuncTab[0],
|
||||
minpc : minpc,
|
||||
maxpc : maxpc,
|
||||
modulename : name,
|
||||
gcdata: uintptr(unsafe.Pointer(&emptyByte)),
|
||||
gcbss: uintptr(unsafe.Pointer(&emptyByte)),
|
||||
gofunc: base,
|
||||
}
|
||||
|
||||
/* verify and register the new module */
|
||||
moduledataverify1(mod)
|
||||
registerModule(mod)
|
||||
}
|
74
vendor/github.com/bytedance/sonic/internal/loader/loader.go
generated
vendored
74
vendor/github.com/bytedance/sonic/internal/loader/loader.go
generated
vendored
|
@ -1,74 +0,0 @@
|
|||
//go:build darwin || linux
|
||||
// +build darwin linux
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package loader
|
||||
|
||||
import (
|
||||
`fmt`
|
||||
`os`
|
||||
`reflect`
|
||||
`syscall`
|
||||
`unsafe`
|
||||
)
|
||||
|
||||
const (
|
||||
_AP = syscall.MAP_ANON | syscall.MAP_PRIVATE
|
||||
_RX = syscall.PROT_READ | syscall.PROT_EXEC
|
||||
_RW = syscall.PROT_READ | syscall.PROT_WRITE
|
||||
)
|
||||
|
||||
type Loader []byte
|
||||
type Function unsafe.Pointer
|
||||
|
||||
func (self Loader) Load(fn string, fp int, args int, argPtrs []bool, localPtrs []bool) (f Function) {
|
||||
p := os.Getpagesize()
|
||||
n := (((len(self) - 1) / p) + 1) * p
|
||||
|
||||
/* register the function */
|
||||
m := mmap(n)
|
||||
v := fmt.Sprintf("runtime.__%s_%x", fn, m)
|
||||
|
||||
registerFunction(v, m, uintptr(n), fp, args, uintptr(len(self)), argPtrs, localPtrs)
|
||||
|
||||
/* reference as a slice */
|
||||
s := *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader {
|
||||
Data : m,
|
||||
Cap : n,
|
||||
Len : len(self),
|
||||
}))
|
||||
|
||||
/* copy the machine code, and make it executable */
|
||||
copy(s, self)
|
||||
mprotect(m, n)
|
||||
return Function(&m)
|
||||
}
|
||||
|
||||
func mmap(nb int) uintptr {
|
||||
if m, _, e := syscall.RawSyscall6(syscall.SYS_MMAP, 0, uintptr(nb), _RW, _AP, 0, 0); e != 0 {
|
||||
panic(e)
|
||||
} else {
|
||||
return m
|
||||
}
|
||||
}
|
||||
|
||||
func mprotect(p uintptr, nb int) {
|
||||
if _, _, err := syscall.RawSyscall(syscall.SYS_MPROTECT, p, uintptr(nb), _RX); err != 0 {
|
||||
panic(err)
|
||||
}
|
||||
}
|
111
vendor/github.com/bytedance/sonic/internal/loader/loader_windows.go
generated
vendored
111
vendor/github.com/bytedance/sonic/internal/loader/loader_windows.go
generated
vendored
|
@ -1,111 +0,0 @@
|
|||
//go:build windows
|
||||
// +build windows
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package loader
|
||||
|
||||
import (
|
||||
`fmt`
|
||||
`os`
|
||||
`reflect`
|
||||
`syscall`
|
||||
`unsafe`
|
||||
)
|
||||
|
||||
const (
|
||||
MEM_COMMIT = 0x00001000
|
||||
MEM_RESERVE = 0x00002000
|
||||
)
|
||||
|
||||
var (
|
||||
libKernel32 = syscall.NewLazyDLL("KERNEL32.DLL")
|
||||
libKernel32_VirtualAlloc = libKernel32.NewProc("VirtualAlloc")
|
||||
libKernel32_VirtualProtect = libKernel32.NewProc("VirtualProtect")
|
||||
)
|
||||
|
||||
type Loader []byte
|
||||
type Function unsafe.Pointer
|
||||
|
||||
func (self Loader) Load(fn string, fp int, args int, argPtrs []bool, localPtrs []bool) (f Function) {
|
||||
p := os.Getpagesize()
|
||||
n := (((len(self) - 1) / p) + 1) * p
|
||||
|
||||
/* register the function */
|
||||
m := mmap(n)
|
||||
v := fmt.Sprintf("runtime.__%s_%x", fn, m)
|
||||
|
||||
registerFunction(v, m, uintptr(n), fp, args, uintptr(len(self)), argPtrs, localPtrs)
|
||||
|
||||
/* reference as a slice */
|
||||
s := *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader {
|
||||
Data : m,
|
||||
Cap : n,
|
||||
Len : len(self),
|
||||
}))
|
||||
|
||||
/* copy the machine code, and make it executable */
|
||||
copy(s, self)
|
||||
mprotect(m, n)
|
||||
return Function(&m)
|
||||
}
|
||||
|
||||
func mmap(nb int) uintptr {
|
||||
addr, err := winapi_VirtualAlloc(0, nb, MEM_COMMIT|MEM_RESERVE, syscall.PAGE_READWRITE)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return addr
|
||||
}
|
||||
|
||||
func mprotect(p uintptr, nb int) (oldProtect int) {
|
||||
err := winapi_VirtualProtect(p, nb, syscall.PAGE_EXECUTE_READ, &oldProtect)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// winapi_VirtualAlloc allocate memory
|
||||
// Doc: https://docs.microsoft.com/en-us/windows/win32/api/memoryapi/nf-memoryapi-virtualalloc
|
||||
func winapi_VirtualAlloc(lpAddr uintptr, dwSize int, flAllocationType int, flProtect int) (uintptr, error) {
|
||||
r1, _, err := libKernel32_VirtualAlloc.Call(
|
||||
lpAddr,
|
||||
uintptr(dwSize),
|
||||
uintptr(flAllocationType),
|
||||
uintptr(flProtect),
|
||||
)
|
||||
if r1 == 0 {
|
||||
return 0, err
|
||||
}
|
||||
return r1, nil
|
||||
}
|
||||
|
||||
// winapi_VirtualProtect change memory protection
|
||||
// Doc: https://docs.microsoft.com/en-us/windows/win32/api/memoryapi/nf-memoryapi-virtualprotect
|
||||
func winapi_VirtualProtect(lpAddr uintptr, dwSize int, flNewProtect int, lpflOldProtect *int) error {
|
||||
r1, _, err := libKernel32_VirtualProtect.Call(
|
||||
lpAddr,
|
||||
uintptr(dwSize),
|
||||
uintptr(flNewProtect),
|
||||
uintptr(unsafe.Pointer(lpflOldProtect)),
|
||||
)
|
||||
if r1 == 0 {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
188
vendor/github.com/bytedance/sonic/internal/native/avx/native_amd64.go
generated
vendored
188
vendor/github.com/bytedance/sonic/internal/native/avx/native_amd64.go
generated
vendored
|
@ -1,5 +1,7 @@
|
|||
// Code generated by Makefile, DO NOT EDIT.
|
||||
|
||||
// Code generated by Makefile, DO NOT EDIT.
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
|
@ -22,114 +24,168 @@
|
|||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
var (
|
||||
__i64toa func(out unsafe.Pointer, val int64) (ret int)
|
||||
|
||||
__u64toa func(out unsafe.Pointer, val uint64) (ret int)
|
||||
|
||||
__f64toa func(out unsafe.Pointer, val float64) (ret int)
|
||||
|
||||
__f32toa func(out unsafe.Pointer, val float32) (ret int)
|
||||
|
||||
__lspace func(sp unsafe.Pointer, nb int, off int) (ret int)
|
||||
|
||||
__quote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__html_escape func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer) (ret int)
|
||||
|
||||
__unquote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__value func(s unsafe.Pointer, n int, p int, v unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__vstring func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer, flags uint64)
|
||||
|
||||
__vnumber func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
|
||||
|
||||
__vsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
|
||||
|
||||
__vunsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
|
||||
|
||||
__skip_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__skip_one_fast func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
|
||||
|
||||
__skip_array func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__skip_object func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__skip_number func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
|
||||
|
||||
__validate_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
|
||||
|
||||
__get_by_path func(s unsafe.Pointer, p unsafe.Pointer, path unsafe.Pointer, m unsafe.Pointer) (ret int)
|
||||
|
||||
__validate_utf8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
|
||||
|
||||
__validate_utf8_fast func(s unsafe.Pointer) (ret int)
|
||||
|
||||
__fsm_exec func(m unsafe.Pointer, s unsafe.Pointer, p unsafe.Pointer, flags uint64) (ret int)
|
||||
)
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __i64toa(out *byte, val int64) (ret int)
|
||||
func i64toa(out *byte, val int64) (ret int) {
|
||||
return __i64toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __u64toa(out *byte, val uint64) (ret int)
|
||||
func u64toa(out *byte, val uint64) (ret int) {
|
||||
return __u64toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __f64toa(out *byte, val float64) (ret int)
|
||||
func f64toa(out *byte, val float64) (ret int) {
|
||||
return __f64toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __f32toa(out *byte, val float32) (ret int)
|
||||
func f32toa(out *byte, val float32) (ret int) {
|
||||
return __f32toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __lspace(sp unsafe.Pointer, nb int, off int) (ret int)
|
||||
func lspace(sp unsafe.Pointer, nb int, off int) (ret int) {
|
||||
return __lspace(rt.NoEscape(sp), nb, off)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int)
|
||||
func quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int) {
|
||||
return __quote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int)
|
||||
func html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int) {
|
||||
return __html_escape(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int)
|
||||
func unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int) {
|
||||
return __unquote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(ep)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int)
|
||||
func value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int) {
|
||||
return __value(rt.NoEscape(unsafe.Pointer(s)), n, p, rt.NoEscape(unsafe.Pointer(v)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __vstring(s *string, p *int, v *types.JsonState, flags uint64)
|
||||
func vstring(s *string, p *int, v *types.JsonState, flags uint64) {
|
||||
__vstring(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __vnumber(s *string, p *int, v *types.JsonState)
|
||||
func vnumber(s *string, p *int, v *types.JsonState) {
|
||||
__vnumber(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __vsigned(s *string, p *int, v *types.JsonState)
|
||||
func vsigned(s *string, p *int, v *types.JsonState) {
|
||||
__vsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __vunsigned(s *string, p *int, v *types.JsonState)
|
||||
func vunsigned(s *string, p *int, v *types.JsonState) {
|
||||
__vunsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
|
||||
func skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
|
||||
return __skip_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_one_fast(s *string, p *int) (ret int)
|
||||
func skip_one_fast(s *string, p *int) (ret int) {
|
||||
return __skip_one_fast(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
|
||||
func skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
|
||||
return __skip_array(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
|
||||
func skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
|
||||
return __skip_object(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_number(s *string, p *int) (ret int)
|
||||
func skip_number(s *string, p *int) (ret int) {
|
||||
return __skip_number(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __validate_one(s *string, p *int, m *types.StateMachine) (ret int)
|
||||
func validate_one(s *string, p *int, m *types.StateMachine) (ret int) {
|
||||
return __validate_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int)
|
||||
func get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int) {
|
||||
return __get_by_path(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(path)), rt.NoEscape(unsafe.Pointer(m)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __validate_utf8(s *string, p *int, m *types.StateMachine) (ret int)
|
||||
func validate_utf8(s *string, p *int, m *types.StateMachine) (ret int) {
|
||||
return __validate_utf8(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __validate_utf8_fast(s *string) (ret int)
|
||||
func validate_utf8_fast(s *string) (ret int) {
|
||||
return __validate_utf8_fast(rt.NoEscape(unsafe.Pointer(s)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func fsm_exec(m *types.StateMachine, s *string, p *int, flags uint64) (ret int) {
|
||||
return __fsm_exec(rt.NoEscape(unsafe.Pointer(m)), rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), flags)
|
||||
}
|
||||
|
|
15342
vendor/github.com/bytedance/sonic/internal/native/avx/native_amd64.s
generated
vendored
15342
vendor/github.com/bytedance/sonic/internal/native/avx/native_amd64.s
generated
vendored
File diff suppressed because it is too large
Load diff
49
vendor/github.com/bytedance/sonic/internal/native/avx/native_export_amd64.go
generated
vendored
49
vendor/github.com/bytedance/sonic/internal/native/avx/native_export_amd64.go
generated
vendored
|
@ -1,49 +0,0 @@
|
|||
// Code generated by Makefile, DO NOT EDIT.
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package avx
|
||||
|
||||
var (
|
||||
S_f64toa = _subr__f64toa
|
||||
S_f32toa = _subr__f32toa
|
||||
S_i64toa = _subr__i64toa
|
||||
S_u64toa = _subr__u64toa
|
||||
S_lspace = _subr__lspace
|
||||
)
|
||||
|
||||
var (
|
||||
S_quote = _subr__quote
|
||||
S_unquote = _subr__unquote
|
||||
)
|
||||
|
||||
var (
|
||||
S_value = _subr__value
|
||||
S_vstring = _subr__vstring
|
||||
S_vnumber = _subr__vnumber
|
||||
S_vsigned = _subr__vsigned
|
||||
S_vunsigned = _subr__vunsigned
|
||||
)
|
||||
|
||||
var (
|
||||
S_skip_one = _subr__skip_one
|
||||
S_skip_one_fast = _subr__skip_one_fast
|
||||
S_skip_array = _subr__skip_array
|
||||
S_skip_object = _subr__skip_object
|
||||
S_skip_number = _subr__skip_number
|
||||
S_get_by_path = _subr__get_by_path
|
||||
)
|
739
vendor/github.com/bytedance/sonic/internal/native/avx/native_subr_amd64.go
generated
vendored
739
vendor/github.com/bytedance/sonic/internal/native/avx/native_subr_amd64.go
generated
vendored
|
@ -3,107 +3,666 @@
|
|||
|
||||
package avx
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection ALL
|
||||
func __native_entry__() uintptr
|
||||
|
||||
var (
|
||||
_subr__f32toa = __native_entry__() + 31264
|
||||
_subr__f64toa = __native_entry__() + 192
|
||||
_subr__get_by_path = __native_entry__() + 25856
|
||||
_subr__html_escape = __native_entry__() + 9040
|
||||
_subr__i64toa = __native_entry__() + 3488
|
||||
_subr__lspace = __native_entry__() + 16
|
||||
_subr__quote = __native_entry__() + 4880
|
||||
_subr__skip_array = __native_entry__() + 17952
|
||||
_subr__skip_number = __native_entry__() + 21952
|
||||
_subr__skip_object = __native_entry__() + 20368
|
||||
_subr__skip_one = __native_entry__() + 22112
|
||||
_subr__skip_one_fast = __native_entry__() + 22352
|
||||
_subr__u64toa = __native_entry__() + 3600
|
||||
_subr__unquote = __native_entry__() + 6672
|
||||
_subr__validate_one = __native_entry__() + 22176
|
||||
_subr__validate_utf8 = __native_entry__() + 30000
|
||||
_subr__validate_utf8_fast = __native_entry__() + 30672
|
||||
_subr__value = __native_entry__() + 12224
|
||||
_subr__vnumber = __native_entry__() + 15616
|
||||
_subr__vsigned = __native_entry__() + 17232
|
||||
_subr__vstring = __native_entry__() + 14064
|
||||
_subr__vunsigned = __native_entry__() + 17600
|
||||
import (
|
||||
`github.com/bytedance/sonic/loader`
|
||||
)
|
||||
|
||||
const (
|
||||
_stack__f32toa = 48
|
||||
_entry__f32toa = 31024
|
||||
_entry__f64toa = 176
|
||||
_entry__format_significand = 35808
|
||||
_entry__format_integer = 3424
|
||||
_entry__fsm_exec = 18816
|
||||
_entry__advance_string = 15056
|
||||
_entry__advance_string_default = 37344
|
||||
_entry__do_skip_number = 21376
|
||||
_entry__get_by_path = 26416
|
||||
_entry__skip_one_fast = 22880
|
||||
_entry__unescape = 38256
|
||||
_entry__unhex16_is = 9632
|
||||
_entry__html_escape = 9824
|
||||
_entry__i64toa = 3856
|
||||
_entry__u64toa = 4128
|
||||
_entry__lspace = 16
|
||||
_entry__quote = 5552
|
||||
_entry__skip_array = 18768
|
||||
_entry__skip_number = 22464
|
||||
_entry__skip_object = 21008
|
||||
_entry__skip_one = 22640
|
||||
_entry__unquote = 7296
|
||||
_entry__validate_one = 22704
|
||||
_entry__validate_utf8 = 29728
|
||||
_entry__validate_utf8_fast = 30416
|
||||
_entry__value = 13104
|
||||
_entry__vnumber = 16368
|
||||
_entry__atof_eisel_lemire64 = 11104
|
||||
_entry__atof_native = 12496
|
||||
_entry__decimal_to_f64 = 11504
|
||||
_entry__left_shift = 36288
|
||||
_entry__right_shift = 36832
|
||||
_entry__vsigned = 18016
|
||||
_entry__vstring = 14880
|
||||
_entry__vunsigned = 18384
|
||||
)
|
||||
|
||||
const (
|
||||
_stack__f32toa = 64
|
||||
_stack__f64toa = 80
|
||||
_stack__get_by_path = 304
|
||||
_stack__format_significand = 24
|
||||
_stack__format_integer = 16
|
||||
_stack__fsm_exec = 160
|
||||
_stack__advance_string = 72
|
||||
_stack__advance_string_default = 56
|
||||
_stack__do_skip_number = 32
|
||||
_stack__get_by_path = 280
|
||||
_stack__skip_one_fast = 176
|
||||
_stack__unescape = 64
|
||||
_stack__unhex16_is = 8
|
||||
_stack__html_escape = 64
|
||||
_stack__i64toa = 16
|
||||
_stack__lspace = 8
|
||||
_stack__quote = 56
|
||||
_stack__skip_array = 128
|
||||
_stack__skip_number = 72
|
||||
_stack__skip_object = 128
|
||||
_stack__skip_one = 128
|
||||
_stack__skip_one_fast = 200
|
||||
_stack__u64toa = 8
|
||||
_stack__unquote = 88
|
||||
_stack__validate_one = 128
|
||||
_stack__lspace = 8
|
||||
_stack__quote = 80
|
||||
_stack__skip_array = 168
|
||||
_stack__skip_number = 88
|
||||
_stack__skip_object = 168
|
||||
_stack__skip_one = 168
|
||||
_stack__unquote = 112
|
||||
_stack__validate_one = 168
|
||||
_stack__validate_utf8 = 48
|
||||
_stack__validate_utf8_fast = 24
|
||||
_stack__value = 328
|
||||
_stack__vnumber = 240
|
||||
_stack__value = 352
|
||||
_stack__vnumber = 264
|
||||
_stack__atof_eisel_lemire64 = 40
|
||||
_stack__atof_native = 144
|
||||
_stack__decimal_to_f64 = 88
|
||||
_stack__left_shift = 32
|
||||
_stack__right_shift = 16
|
||||
_stack__vsigned = 16
|
||||
_stack__vstring = 136
|
||||
_stack__vunsigned = 16
|
||||
)
|
||||
|
||||
var (
|
||||
_ = _subr__f32toa
|
||||
_ = _subr__f64toa
|
||||
_ = _subr__get_by_path
|
||||
_ = _subr__html_escape
|
||||
_ = _subr__i64toa
|
||||
_ = _subr__lspace
|
||||
_ = _subr__quote
|
||||
_ = _subr__skip_array
|
||||
_ = _subr__skip_number
|
||||
_ = _subr__skip_object
|
||||
_ = _subr__skip_one
|
||||
_ = _subr__skip_one_fast
|
||||
_ = _subr__u64toa
|
||||
_ = _subr__unquote
|
||||
_ = _subr__validate_one
|
||||
_ = _subr__validate_utf8
|
||||
_ = _subr__validate_utf8_fast
|
||||
_ = _subr__value
|
||||
_ = _subr__vnumber
|
||||
_ = _subr__vsigned
|
||||
_ = _subr__vstring
|
||||
_ = _subr__vunsigned
|
||||
_stack__vstring = 128
|
||||
_stack__vunsigned = 24
|
||||
)
|
||||
|
||||
const (
|
||||
_ = _stack__f32toa
|
||||
_ = _stack__f64toa
|
||||
_ = _stack__get_by_path
|
||||
_ = _stack__html_escape
|
||||
_ = _stack__i64toa
|
||||
_ = _stack__lspace
|
||||
_ = _stack__quote
|
||||
_ = _stack__skip_array
|
||||
_ = _stack__skip_number
|
||||
_ = _stack__skip_object
|
||||
_ = _stack__skip_one
|
||||
_ = _stack__skip_one_fast
|
||||
_ = _stack__u64toa
|
||||
_ = _stack__unquote
|
||||
_ = _stack__validate_one
|
||||
_ = _stack__validate_utf8
|
||||
_ = _stack__validate_utf8_fast
|
||||
_ = _stack__value
|
||||
_ = _stack__vnumber
|
||||
_ = _stack__vsigned
|
||||
_ = _stack__vstring
|
||||
_ = _stack__vunsigned
|
||||
_size__f32toa = 3792
|
||||
_size__f64toa = 3248
|
||||
_size__format_significand = 480
|
||||
_size__format_integer = 432
|
||||
_size__fsm_exec = 1656
|
||||
_size__advance_string = 1264
|
||||
_size__advance_string_default = 912
|
||||
_size__do_skip_number = 876
|
||||
_size__get_by_path = 3312
|
||||
_size__skip_one_fast = 3016
|
||||
_size__unescape = 704
|
||||
_size__unhex16_is = 128
|
||||
_size__html_escape = 1280
|
||||
_size__i64toa = 272
|
||||
_size__u64toa = 1376
|
||||
_size__lspace = 112
|
||||
_size__quote = 1728
|
||||
_size__skip_array = 48
|
||||
_size__skip_number = 160
|
||||
_size__skip_object = 48
|
||||
_size__skip_one = 48
|
||||
_size__unquote = 2336
|
||||
_size__validate_one = 48
|
||||
_size__validate_utf8 = 688
|
||||
_size__validate_utf8_fast = 560
|
||||
_size__value = 1268
|
||||
_size__vnumber = 1648
|
||||
_size__atof_eisel_lemire64 = 400
|
||||
_size__atof_native = 608
|
||||
_size__decimal_to_f64 = 992
|
||||
_size__left_shift = 544
|
||||
_size__right_shift = 480
|
||||
_size__vsigned = 368
|
||||
_size__vstring = 128
|
||||
_size__vunsigned = 368
|
||||
)
|
||||
|
||||
var (
|
||||
_pcsp__f32toa = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{3734, 64},
|
||||
{3738, 48},
|
||||
{3739, 40},
|
||||
{3741, 32},
|
||||
{3743, 24},
|
||||
{3745, 16},
|
||||
{3747, 8},
|
||||
{3751, 0},
|
||||
{3781, 64},
|
||||
}
|
||||
_pcsp__f64toa = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{3124, 56},
|
||||
{3128, 48},
|
||||
{3129, 40},
|
||||
{3131, 32},
|
||||
{3133, 24},
|
||||
{3135, 16},
|
||||
{3137, 8},
|
||||
{3141, 0},
|
||||
{3234, 56},
|
||||
}
|
||||
_pcsp__format_significand = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{468, 24},
|
||||
{469, 16},
|
||||
{471, 8},
|
||||
{473, 0},
|
||||
}
|
||||
_pcsp__format_integer = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{412, 16},
|
||||
{413, 8},
|
||||
{414, 0},
|
||||
{423, 16},
|
||||
{424, 8},
|
||||
{426, 0},
|
||||
}
|
||||
_pcsp__fsm_exec = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{1317, 88},
|
||||
{1321, 48},
|
||||
{1322, 40},
|
||||
{1324, 32},
|
||||
{1326, 24},
|
||||
{1328, 16},
|
||||
{1330, 8},
|
||||
{1331, 0},
|
||||
{1656, 88},
|
||||
}
|
||||
_pcsp__advance_string = [][2]uint32{
|
||||
{14, 0},
|
||||
{18, 8},
|
||||
{20, 16},
|
||||
{22, 24},
|
||||
{24, 32},
|
||||
{26, 40},
|
||||
{27, 48},
|
||||
{529, 72},
|
||||
{533, 48},
|
||||
{534, 40},
|
||||
{536, 32},
|
||||
{538, 24},
|
||||
{540, 16},
|
||||
{542, 8},
|
||||
{543, 0},
|
||||
{1253, 72},
|
||||
}
|
||||
_pcsp__advance_string_default = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{381, 56},
|
||||
{385, 48},
|
||||
{386, 40},
|
||||
{388, 32},
|
||||
{390, 24},
|
||||
{392, 16},
|
||||
{394, 8},
|
||||
{395, 0},
|
||||
{911, 56},
|
||||
}
|
||||
_pcsp__do_skip_number = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{739, 32},
|
||||
{740, 24},
|
||||
{742, 16},
|
||||
{744, 8},
|
||||
{745, 0},
|
||||
{876, 32},
|
||||
}
|
||||
_pcsp__get_by_path = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{3262, 104},
|
||||
{3266, 48},
|
||||
{3267, 40},
|
||||
{3269, 32},
|
||||
{3271, 24},
|
||||
{3273, 16},
|
||||
{3275, 8},
|
||||
{3276, 0},
|
||||
{3301, 104},
|
||||
}
|
||||
_pcsp__skip_one_fast = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{613, 176},
|
||||
{614, 168},
|
||||
{616, 160},
|
||||
{618, 152},
|
||||
{620, 144},
|
||||
{622, 136},
|
||||
{626, 128},
|
||||
{3016, 176},
|
||||
}
|
||||
_pcsp__unescape = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{246, 56},
|
||||
{250, 48},
|
||||
{251, 40},
|
||||
{253, 32},
|
||||
{255, 24},
|
||||
{257, 16},
|
||||
{259, 8},
|
||||
{260, 0},
|
||||
{695, 56},
|
||||
}
|
||||
_pcsp__unhex16_is = [][2]uint32{
|
||||
{1, 0},
|
||||
{35, 8},
|
||||
{36, 0},
|
||||
{62, 8},
|
||||
{63, 0},
|
||||
{97, 8},
|
||||
{98, 0},
|
||||
{121, 8},
|
||||
{123, 0},
|
||||
}
|
||||
_pcsp__html_escape = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{1265, 64},
|
||||
{1269, 48},
|
||||
{1270, 40},
|
||||
{1272, 32},
|
||||
{1274, 24},
|
||||
{1276, 16},
|
||||
{1278, 8},
|
||||
{1280, 0},
|
||||
}
|
||||
_pcsp__i64toa = [][2]uint32{
|
||||
{1, 0},
|
||||
{171, 8},
|
||||
{172, 0},
|
||||
{207, 8},
|
||||
{208, 0},
|
||||
{222, 8},
|
||||
{223, 0},
|
||||
{247, 8},
|
||||
{248, 0},
|
||||
{253, 8},
|
||||
{259, 0},
|
||||
}
|
||||
_pcsp__u64toa = [][2]uint32{
|
||||
{13, 0},
|
||||
{162, 8},
|
||||
{163, 0},
|
||||
{175, 8},
|
||||
{240, 0},
|
||||
{498, 8},
|
||||
{499, 0},
|
||||
{519, 8},
|
||||
{592, 0},
|
||||
{850, 8},
|
||||
{928, 0},
|
||||
{1374, 8},
|
||||
{1376, 0},
|
||||
}
|
||||
_pcsp__lspace = [][2]uint32{
|
||||
{1, 0},
|
||||
{85, 8},
|
||||
{87, 0},
|
||||
}
|
||||
_pcsp__quote = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{1669, 80},
|
||||
{1673, 48},
|
||||
{1674, 40},
|
||||
{1676, 32},
|
||||
{1678, 24},
|
||||
{1680, 16},
|
||||
{1682, 8},
|
||||
{1683, 0},
|
||||
{1718, 80},
|
||||
}
|
||||
_pcsp__skip_array = [][2]uint32{
|
||||
{1, 0},
|
||||
{28, 8},
|
||||
{34, 0},
|
||||
}
|
||||
_pcsp__skip_number = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{107, 56},
|
||||
{111, 48},
|
||||
{112, 40},
|
||||
{114, 32},
|
||||
{116, 24},
|
||||
{118, 16},
|
||||
{120, 8},
|
||||
{121, 0},
|
||||
{145, 56},
|
||||
}
|
||||
_pcsp__skip_object = [][2]uint32{
|
||||
{1, 0},
|
||||
{28, 8},
|
||||
{34, 0},
|
||||
}
|
||||
_pcsp__skip_one = [][2]uint32{
|
||||
{1, 0},
|
||||
{28, 8},
|
||||
{34, 0},
|
||||
}
|
||||
_pcsp__unquote = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{1614, 104},
|
||||
{1618, 48},
|
||||
{1619, 40},
|
||||
{1621, 32},
|
||||
{1623, 24},
|
||||
{1625, 16},
|
||||
{1627, 8},
|
||||
{1628, 0},
|
||||
{2329, 104},
|
||||
}
|
||||
_pcsp__validate_one = [][2]uint32{
|
||||
{1, 0},
|
||||
{33, 8},
|
||||
{39, 0},
|
||||
}
|
||||
_pcsp__validate_utf8 = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{11, 40},
|
||||
{639, 48},
|
||||
{643, 40},
|
||||
{644, 32},
|
||||
{646, 24},
|
||||
{648, 16},
|
||||
{650, 8},
|
||||
{651, 0},
|
||||
{682, 48},
|
||||
}
|
||||
_pcsp__validate_utf8_fast = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{5, 16},
|
||||
{247, 24},
|
||||
{251, 16},
|
||||
{252, 8},
|
||||
{253, 0},
|
||||
{527, 24},
|
||||
{531, 16},
|
||||
{532, 8},
|
||||
{534, 0},
|
||||
}
|
||||
_pcsp__value = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{439, 88},
|
||||
{443, 48},
|
||||
{444, 40},
|
||||
{446, 32},
|
||||
{448, 24},
|
||||
{450, 16},
|
||||
{452, 8},
|
||||
{453, 0},
|
||||
{1268, 88},
|
||||
}
|
||||
_pcsp__vnumber = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{150, 120},
|
||||
{154, 48},
|
||||
{155, 40},
|
||||
{157, 32},
|
||||
{159, 24},
|
||||
{161, 16},
|
||||
{163, 8},
|
||||
{164, 0},
|
||||
{1638, 120},
|
||||
}
|
||||
_pcsp__atof_eisel_lemire64 = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{315, 40},
|
||||
{316, 32},
|
||||
{318, 24},
|
||||
{320, 16},
|
||||
{322, 8},
|
||||
{323, 0},
|
||||
{387, 40},
|
||||
}
|
||||
_pcsp__atof_native = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{596, 56},
|
||||
{600, 8},
|
||||
{602, 0},
|
||||
}
|
||||
_pcsp__decimal_to_f64 = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{951, 56},
|
||||
{955, 48},
|
||||
{956, 40},
|
||||
{958, 32},
|
||||
{960, 24},
|
||||
{962, 16},
|
||||
{964, 8},
|
||||
{965, 0},
|
||||
{977, 56},
|
||||
}
|
||||
_pcsp__left_shift = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{418, 32},
|
||||
{419, 24},
|
||||
{421, 16},
|
||||
{423, 8},
|
||||
{424, 0},
|
||||
{539, 32},
|
||||
}
|
||||
_pcsp__right_shift = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{452, 16},
|
||||
{453, 8},
|
||||
{454, 0},
|
||||
{462, 16},
|
||||
{463, 8},
|
||||
{464, 0},
|
||||
{472, 16},
|
||||
{473, 8},
|
||||
{475, 0},
|
||||
}
|
||||
_pcsp__vsigned = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{113, 16},
|
||||
{114, 8},
|
||||
{115, 0},
|
||||
{126, 16},
|
||||
{127, 8},
|
||||
{128, 0},
|
||||
{278, 16},
|
||||
{279, 8},
|
||||
{280, 0},
|
||||
{284, 16},
|
||||
{285, 8},
|
||||
{286, 0},
|
||||
{340, 16},
|
||||
{341, 8},
|
||||
{342, 0},
|
||||
{353, 16},
|
||||
{354, 8},
|
||||
{356, 0},
|
||||
}
|
||||
_pcsp__vstring = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{11, 40},
|
||||
{105, 56},
|
||||
{109, 40},
|
||||
{110, 32},
|
||||
{112, 24},
|
||||
{114, 16},
|
||||
{116, 8},
|
||||
{118, 0},
|
||||
}
|
||||
_pcsp__vunsigned = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{74, 24},
|
||||
{75, 16},
|
||||
{77, 8},
|
||||
{78, 0},
|
||||
{89, 24},
|
||||
{90, 16},
|
||||
{92, 8},
|
||||
{93, 0},
|
||||
{116, 24},
|
||||
{117, 16},
|
||||
{119, 8},
|
||||
{120, 0},
|
||||
{281, 24},
|
||||
{282, 16},
|
||||
{284, 8},
|
||||
{285, 0},
|
||||
{336, 24},
|
||||
{337, 16},
|
||||
{339, 8},
|
||||
{340, 0},
|
||||
{348, 24},
|
||||
{349, 16},
|
||||
{351, 8},
|
||||
{353, 0},
|
||||
}
|
||||
)
|
||||
|
||||
var Funcs = []loader.CFunc{
|
||||
{"__native_entry__", 0, 67, 0, nil},
|
||||
{"_f32toa", _entry__f32toa, _size__f32toa, _stack__f32toa, _pcsp__f32toa},
|
||||
{"_f64toa", _entry__f64toa, _size__f64toa, _stack__f64toa, _pcsp__f64toa},
|
||||
{"_format_significand", _entry__format_significand, _size__format_significand, _stack__format_significand, _pcsp__format_significand},
|
||||
{"_format_integer", _entry__format_integer, _size__format_integer, _stack__format_integer, _pcsp__format_integer},
|
||||
{"_fsm_exec", _entry__fsm_exec, _size__fsm_exec, _stack__fsm_exec, _pcsp__fsm_exec},
|
||||
{"_advance_string", _entry__advance_string, _size__advance_string, _stack__advance_string, _pcsp__advance_string},
|
||||
{"_advance_string_default", _entry__advance_string_default, _size__advance_string_default, _stack__advance_string_default, _pcsp__advance_string_default},
|
||||
{"_do_skip_number", _entry__do_skip_number, _size__do_skip_number, _stack__do_skip_number, _pcsp__do_skip_number},
|
||||
{"_get_by_path", _entry__get_by_path, _size__get_by_path, _stack__get_by_path, _pcsp__get_by_path},
|
||||
{"_skip_one_fast", _entry__skip_one_fast, _size__skip_one_fast, _stack__skip_one_fast, _pcsp__skip_one_fast},
|
||||
{"_unescape", _entry__unescape, _size__unescape, _stack__unescape, _pcsp__unescape},
|
||||
{"_unhex16_is", _entry__unhex16_is, _size__unhex16_is, _stack__unhex16_is, _pcsp__unhex16_is},
|
||||
{"_html_escape", _entry__html_escape, _size__html_escape, _stack__html_escape, _pcsp__html_escape},
|
||||
{"_i64toa", _entry__i64toa, _size__i64toa, _stack__i64toa, _pcsp__i64toa},
|
||||
{"_u64toa", _entry__u64toa, _size__u64toa, _stack__u64toa, _pcsp__u64toa},
|
||||
{"_lspace", _entry__lspace, _size__lspace, _stack__lspace, _pcsp__lspace},
|
||||
{"_quote", _entry__quote, _size__quote, _stack__quote, _pcsp__quote},
|
||||
{"_skip_array", _entry__skip_array, _size__skip_array, _stack__skip_array, _pcsp__skip_array},
|
||||
{"_skip_number", _entry__skip_number, _size__skip_number, _stack__skip_number, _pcsp__skip_number},
|
||||
{"_skip_object", _entry__skip_object, _size__skip_object, _stack__skip_object, _pcsp__skip_object},
|
||||
{"_skip_one", _entry__skip_one, _size__skip_one, _stack__skip_one, _pcsp__skip_one},
|
||||
{"_unquote", _entry__unquote, _size__unquote, _stack__unquote, _pcsp__unquote},
|
||||
{"_validate_one", _entry__validate_one, _size__validate_one, _stack__validate_one, _pcsp__validate_one},
|
||||
{"_validate_utf8", _entry__validate_utf8, _size__validate_utf8, _stack__validate_utf8, _pcsp__validate_utf8},
|
||||
{"_validate_utf8_fast", _entry__validate_utf8_fast, _size__validate_utf8_fast, _stack__validate_utf8_fast, _pcsp__validate_utf8_fast},
|
||||
{"_value", _entry__value, _size__value, _stack__value, _pcsp__value},
|
||||
{"_vnumber", _entry__vnumber, _size__vnumber, _stack__vnumber, _pcsp__vnumber},
|
||||
{"_atof_eisel_lemire64", _entry__atof_eisel_lemire64, _size__atof_eisel_lemire64, _stack__atof_eisel_lemire64, _pcsp__atof_eisel_lemire64},
|
||||
{"_atof_native", _entry__atof_native, _size__atof_native, _stack__atof_native, _pcsp__atof_native},
|
||||
{"_decimal_to_f64", _entry__decimal_to_f64, _size__decimal_to_f64, _stack__decimal_to_f64, _pcsp__decimal_to_f64},
|
||||
{"_left_shift", _entry__left_shift, _size__left_shift, _stack__left_shift, _pcsp__left_shift},
|
||||
{"_right_shift", _entry__right_shift, _size__right_shift, _stack__right_shift, _pcsp__right_shift},
|
||||
{"_vsigned", _entry__vsigned, _size__vsigned, _stack__vsigned, _pcsp__vsigned},
|
||||
{"_vstring", _entry__vstring, _size__vstring, _stack__vstring, _pcsp__vstring},
|
||||
{"_vunsigned", _entry__vunsigned, _size__vunsigned, _stack__vunsigned, _pcsp__vunsigned},
|
||||
}
|
||||
|
|
14488
vendor/github.com/bytedance/sonic/internal/native/avx/native_text_amd64.go
generated
vendored
Normal file
14488
vendor/github.com/bytedance/sonic/internal/native/avx/native_text_amd64.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
188
vendor/github.com/bytedance/sonic/internal/native/avx2/native_amd64.go
generated
vendored
188
vendor/github.com/bytedance/sonic/internal/native/avx2/native_amd64.go
generated
vendored
|
@ -1,5 +1,7 @@
|
|||
// Code generated by Makefile, DO NOT EDIT.
|
||||
|
||||
// Code generated by Makefile, DO NOT EDIT.
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
|
@ -22,114 +24,168 @@
|
|||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
var (
|
||||
__i64toa func(out unsafe.Pointer, val int64) (ret int)
|
||||
|
||||
__u64toa func(out unsafe.Pointer, val uint64) (ret int)
|
||||
|
||||
__f64toa func(out unsafe.Pointer, val float64) (ret int)
|
||||
|
||||
__f32toa func(out unsafe.Pointer, val float32) (ret int)
|
||||
|
||||
__lspace func(sp unsafe.Pointer, nb int, off int) (ret int)
|
||||
|
||||
__quote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__html_escape func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer) (ret int)
|
||||
|
||||
__unquote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__value func(s unsafe.Pointer, n int, p int, v unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__vstring func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer, flags uint64)
|
||||
|
||||
__vnumber func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
|
||||
|
||||
__vsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
|
||||
|
||||
__vunsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
|
||||
|
||||
__skip_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__skip_one_fast func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
|
||||
|
||||
__skip_array func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__skip_object func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__skip_number func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
|
||||
|
||||
__validate_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
|
||||
|
||||
__get_by_path func(s unsafe.Pointer, p unsafe.Pointer, path unsafe.Pointer, m unsafe.Pointer) (ret int)
|
||||
|
||||
__validate_utf8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
|
||||
|
||||
__validate_utf8_fast func(s unsafe.Pointer) (ret int)
|
||||
|
||||
__fsm_exec func(m unsafe.Pointer, s unsafe.Pointer, p unsafe.Pointer, flags uint64) (ret int)
|
||||
)
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __i64toa(out *byte, val int64) (ret int)
|
||||
func i64toa(out *byte, val int64) (ret int) {
|
||||
return __i64toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __u64toa(out *byte, val uint64) (ret int)
|
||||
func u64toa(out *byte, val uint64) (ret int) {
|
||||
return __u64toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __f64toa(out *byte, val float64) (ret int)
|
||||
func f64toa(out *byte, val float64) (ret int) {
|
||||
return __f64toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __f32toa(out *byte, val float32) (ret int)
|
||||
func f32toa(out *byte, val float32) (ret int) {
|
||||
return __f32toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __lspace(sp unsafe.Pointer, nb int, off int) (ret int)
|
||||
func lspace(sp unsafe.Pointer, nb int, off int) (ret int) {
|
||||
return __lspace(rt.NoEscape(sp), nb, off)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int)
|
||||
func quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int) {
|
||||
return __quote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int)
|
||||
func html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int) {
|
||||
return __html_escape(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int)
|
||||
func unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int) {
|
||||
return __unquote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(ep)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int)
|
||||
func value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int) {
|
||||
return __value(rt.NoEscape(unsafe.Pointer(s)), n, p, rt.NoEscape(unsafe.Pointer(v)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __vstring(s *string, p *int, v *types.JsonState, flags uint64)
|
||||
func vstring(s *string, p *int, v *types.JsonState, flags uint64) {
|
||||
__vstring(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __vnumber(s *string, p *int, v *types.JsonState)
|
||||
func vnumber(s *string, p *int, v *types.JsonState) {
|
||||
__vnumber(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __vsigned(s *string, p *int, v *types.JsonState)
|
||||
func vsigned(s *string, p *int, v *types.JsonState) {
|
||||
__vsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __vunsigned(s *string, p *int, v *types.JsonState)
|
||||
func vunsigned(s *string, p *int, v *types.JsonState) {
|
||||
__vunsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
|
||||
func skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
|
||||
return __skip_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_one_fast(s *string, p *int) (ret int)
|
||||
func skip_one_fast(s *string, p *int) (ret int) {
|
||||
return __skip_one_fast(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
|
||||
func skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
|
||||
return __skip_array(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
|
||||
func skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
|
||||
return __skip_object(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_number(s *string, p *int) (ret int)
|
||||
func skip_number(s *string, p *int) (ret int) {
|
||||
return __skip_number(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __validate_one(s *string, p *int, m *types.StateMachine) (ret int)
|
||||
func validate_one(s *string, p *int, m *types.StateMachine) (ret int) {
|
||||
return __validate_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int)
|
||||
func get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int) {
|
||||
return __get_by_path(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(path)), rt.NoEscape(unsafe.Pointer(m)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __validate_utf8(s *string, p *int, m *types.StateMachine) (ret int)
|
||||
func validate_utf8(s *string, p *int, m *types.StateMachine) (ret int) {
|
||||
return __validate_utf8(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __validate_utf8_fast(s *string) (ret int)
|
||||
func validate_utf8_fast(s *string) (ret int) {
|
||||
return __validate_utf8_fast(rt.NoEscape(unsafe.Pointer(s)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func fsm_exec(m *types.StateMachine, s *string, p *int, flags uint64) (ret int) {
|
||||
return __fsm_exec(rt.NoEscape(unsafe.Pointer(m)), rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), flags)
|
||||
}
|
||||
|
|
15972
vendor/github.com/bytedance/sonic/internal/native/avx2/native_amd64.s
generated
vendored
15972
vendor/github.com/bytedance/sonic/internal/native/avx2/native_amd64.s
generated
vendored
File diff suppressed because it is too large
Load diff
49
vendor/github.com/bytedance/sonic/internal/native/avx2/native_export_amd64.go
generated
vendored
49
vendor/github.com/bytedance/sonic/internal/native/avx2/native_export_amd64.go
generated
vendored
|
@ -1,49 +0,0 @@
|
|||
// Code generated by Makefile, DO NOT EDIT.
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package avx2
|
||||
|
||||
var (
|
||||
S_f64toa = _subr__f64toa
|
||||
S_f32toa = _subr__f32toa
|
||||
S_i64toa = _subr__i64toa
|
||||
S_u64toa = _subr__u64toa
|
||||
S_lspace = _subr__lspace
|
||||
)
|
||||
|
||||
var (
|
||||
S_quote = _subr__quote
|
||||
S_unquote = _subr__unquote
|
||||
)
|
||||
|
||||
var (
|
||||
S_value = _subr__value
|
||||
S_vstring = _subr__vstring
|
||||
S_vnumber = _subr__vnumber
|
||||
S_vsigned = _subr__vsigned
|
||||
S_vunsigned = _subr__vunsigned
|
||||
)
|
||||
|
||||
var (
|
||||
S_skip_one = _subr__skip_one
|
||||
S_skip_one_fast = _subr__skip_one_fast
|
||||
S_skip_array = _subr__skip_array
|
||||
S_skip_object = _subr__skip_object
|
||||
S_skip_number = _subr__skip_number
|
||||
S_get_by_path = _subr__get_by_path
|
||||
)
|
752
vendor/github.com/bytedance/sonic/internal/native/avx2/native_subr_amd64.go
generated
vendored
752
vendor/github.com/bytedance/sonic/internal/native/avx2/native_subr_amd64.go
generated
vendored
|
@ -3,107 +3,679 @@
|
|||
|
||||
package avx2
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection ALL
|
||||
func __native_entry__() uintptr
|
||||
|
||||
var (
|
||||
_subr__f32toa = __native_entry__() + 33888
|
||||
_subr__f64toa = __native_entry__() + 288
|
||||
_subr__get_by_path = __native_entry__() + 28336
|
||||
_subr__html_escape = __native_entry__() + 10496
|
||||
_subr__i64toa = __native_entry__() + 3584
|
||||
_subr__lspace = __native_entry__() + 64
|
||||
_subr__quote = __native_entry__() + 5072
|
||||
_subr__skip_array = __native_entry__() + 20688
|
||||
_subr__skip_number = __native_entry__() + 24912
|
||||
_subr__skip_object = __native_entry__() + 22736
|
||||
_subr__skip_one = __native_entry__() + 25072
|
||||
_subr__skip_one_fast = __native_entry__() + 25488
|
||||
_subr__u64toa = __native_entry__() + 3696
|
||||
_subr__unquote = __native_entry__() + 7888
|
||||
_subr__validate_one = __native_entry__() + 25136
|
||||
_subr__validate_utf8 = __native_entry__() + 30320
|
||||
_subr__validate_utf8_fast = __native_entry__() + 31280
|
||||
_subr__value = __native_entry__() + 15024
|
||||
_subr__vnumber = __native_entry__() + 18352
|
||||
_subr__vsigned = __native_entry__() + 19968
|
||||
_subr__vstring = __native_entry__() + 17024
|
||||
_subr__vunsigned = __native_entry__() + 20336
|
||||
import (
|
||||
`github.com/bytedance/sonic/loader`
|
||||
)
|
||||
|
||||
const (
|
||||
_stack__f32toa = 48
|
||||
_entry__f32toa = 34640
|
||||
_entry__f64toa = 368
|
||||
_entry__format_significand = 39440
|
||||
_entry__format_integer = 3616
|
||||
_entry__fsm_exec = 22128
|
||||
_entry__advance_ns = 17856
|
||||
_entry__advance_string = 18592
|
||||
_entry__advance_string_default = 41024
|
||||
_entry__do_skip_number = 24624
|
||||
_entry__get_by_path = 29616
|
||||
_entry__skip_one_fast = 26736
|
||||
_entry__unescape = 41824
|
||||
_entry__unhex16_is = 11376
|
||||
_entry__html_escape = 11712
|
||||
_entry__i64toa = 4048
|
||||
_entry__u64toa = 4320
|
||||
_entry__lspace = 64
|
||||
_entry__quote = 5872
|
||||
_entry__skip_array = 22080
|
||||
_entry__skip_number = 26128
|
||||
_entry__skip_object = 24048
|
||||
_entry__skip_one = 26304
|
||||
_entry__unquote = 8816
|
||||
_entry__validate_one = 26368
|
||||
_entry__validate_utf8 = 30960
|
||||
_entry__validate_utf8_fast = 31920
|
||||
_entry__value = 16320
|
||||
_entry__vnumber = 19680
|
||||
_entry__atof_eisel_lemire64 = 13760
|
||||
_entry__atof_native = 15712
|
||||
_entry__decimal_to_f64 = 14240
|
||||
_entry__left_shift = 39920
|
||||
_entry__right_shift = 40464
|
||||
_entry__vsigned = 21328
|
||||
_entry__vstring = 18352
|
||||
_entry__vunsigned = 21696
|
||||
)
|
||||
|
||||
const (
|
||||
_stack__f32toa = 64
|
||||
_stack__f64toa = 80
|
||||
_stack__get_by_path = 296
|
||||
_stack__format_significand = 24
|
||||
_stack__format_integer = 16
|
||||
_stack__fsm_exec = 136
|
||||
_stack__advance_ns = 8
|
||||
_stack__advance_string = 48
|
||||
_stack__advance_string_default = 48
|
||||
_stack__do_skip_number = 40
|
||||
_stack__get_by_path = 304
|
||||
_stack__skip_one_fast = 184
|
||||
_stack__unescape = 64
|
||||
_stack__unhex16_is = 8
|
||||
_stack__html_escape = 72
|
||||
_stack__i64toa = 16
|
||||
_stack__lspace = 8
|
||||
_stack__quote = 56
|
||||
_stack__skip_array = 128
|
||||
_stack__skip_number = 72
|
||||
_stack__skip_object = 128
|
||||
_stack__skip_one = 128
|
||||
_stack__skip_one_fast = 208
|
||||
_stack__u64toa = 8
|
||||
_stack__unquote = 72
|
||||
_stack__validate_one = 128
|
||||
_stack__lspace = 8
|
||||
_stack__quote = 72
|
||||
_stack__skip_array = 144
|
||||
_stack__skip_number = 96
|
||||
_stack__skip_object = 144
|
||||
_stack__skip_one = 144
|
||||
_stack__unquote = 112
|
||||
_stack__validate_one = 144
|
||||
_stack__validate_utf8 = 48
|
||||
_stack__validate_utf8_fast = 176
|
||||
_stack__value = 328
|
||||
_stack__vnumber = 240
|
||||
_stack__value = 352
|
||||
_stack__vnumber = 264
|
||||
_stack__atof_eisel_lemire64 = 40
|
||||
_stack__atof_native = 144
|
||||
_stack__decimal_to_f64 = 88
|
||||
_stack__left_shift = 32
|
||||
_stack__right_shift = 16
|
||||
_stack__vsigned = 16
|
||||
_stack__vstring = 112
|
||||
_stack__vunsigned = 16
|
||||
)
|
||||
|
||||
var (
|
||||
_ = _subr__f32toa
|
||||
_ = _subr__f64toa
|
||||
_ = _subr__get_by_path
|
||||
_ = _subr__html_escape
|
||||
_ = _subr__i64toa
|
||||
_ = _subr__lspace
|
||||
_ = _subr__quote
|
||||
_ = _subr__skip_array
|
||||
_ = _subr__skip_number
|
||||
_ = _subr__skip_object
|
||||
_ = _subr__skip_one
|
||||
_ = _subr__skip_one_fast
|
||||
_ = _subr__u64toa
|
||||
_ = _subr__unquote
|
||||
_ = _subr__validate_one
|
||||
_ = _subr__validate_utf8
|
||||
_ = _subr__validate_utf8_fast
|
||||
_ = _subr__value
|
||||
_ = _subr__vnumber
|
||||
_ = _subr__vsigned
|
||||
_ = _subr__vstring
|
||||
_ = _subr__vunsigned
|
||||
_stack__vstring = 104
|
||||
_stack__vunsigned = 24
|
||||
)
|
||||
|
||||
const (
|
||||
_ = _stack__f32toa
|
||||
_ = _stack__f64toa
|
||||
_ = _stack__get_by_path
|
||||
_ = _stack__html_escape
|
||||
_ = _stack__i64toa
|
||||
_ = _stack__lspace
|
||||
_ = _stack__quote
|
||||
_ = _stack__skip_array
|
||||
_ = _stack__skip_number
|
||||
_ = _stack__skip_object
|
||||
_ = _stack__skip_one
|
||||
_ = _stack__skip_one_fast
|
||||
_ = _stack__u64toa
|
||||
_ = _stack__unquote
|
||||
_ = _stack__validate_one
|
||||
_ = _stack__validate_utf8
|
||||
_ = _stack__validate_utf8_fast
|
||||
_ = _stack__value
|
||||
_ = _stack__vnumber
|
||||
_ = _stack__vsigned
|
||||
_ = _stack__vstring
|
||||
_ = _stack__vunsigned
|
||||
_size__f32toa = 3792
|
||||
_size__f64toa = 3248
|
||||
_size__format_significand = 480
|
||||
_size__format_integer = 432
|
||||
_size__fsm_exec = 1380
|
||||
_size__advance_ns = 496
|
||||
_size__advance_string = 1040
|
||||
_size__advance_string_default = 800
|
||||
_size__do_skip_number = 1300
|
||||
_size__get_by_path = 1344
|
||||
_size__skip_one_fast = 2360
|
||||
_size__unescape = 704
|
||||
_size__unhex16_is = 144
|
||||
_size__html_escape = 2048
|
||||
_size__i64toa = 272
|
||||
_size__u64toa = 1408
|
||||
_size__lspace = 256
|
||||
_size__quote = 2896
|
||||
_size__skip_array = 48
|
||||
_size__skip_number = 160
|
||||
_size__skip_object = 48
|
||||
_size__skip_one = 48
|
||||
_size__unquote = 2560
|
||||
_size__validate_one = 64
|
||||
_size__validate_utf8 = 688
|
||||
_size__validate_utf8_fast = 2672
|
||||
_size__value = 992
|
||||
_size__vnumber = 1648
|
||||
_size__atof_eisel_lemire64 = 416
|
||||
_size__atof_native = 608
|
||||
_size__decimal_to_f64 = 1472
|
||||
_size__left_shift = 544
|
||||
_size__right_shift = 496
|
||||
_size__vsigned = 368
|
||||
_size__vstring = 144
|
||||
_size__vunsigned = 368
|
||||
)
|
||||
|
||||
var (
|
||||
_pcsp__f32toa = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{3734, 64},
|
||||
{3738, 48},
|
||||
{3739, 40},
|
||||
{3741, 32},
|
||||
{3743, 24},
|
||||
{3745, 16},
|
||||
{3747, 8},
|
||||
{3751, 0},
|
||||
{3781, 64},
|
||||
}
|
||||
_pcsp__f64toa = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{3124, 56},
|
||||
{3128, 48},
|
||||
{3129, 40},
|
||||
{3131, 32},
|
||||
{3133, 24},
|
||||
{3135, 16},
|
||||
{3137, 8},
|
||||
{3141, 0},
|
||||
{3234, 56},
|
||||
}
|
||||
_pcsp__format_significand = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{468, 24},
|
||||
{469, 16},
|
||||
{471, 8},
|
||||
{473, 0},
|
||||
}
|
||||
_pcsp__format_integer = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{412, 16},
|
||||
{413, 8},
|
||||
{414, 0},
|
||||
{423, 16},
|
||||
{424, 8},
|
||||
{426, 0},
|
||||
}
|
||||
_pcsp__fsm_exec = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{1017, 88},
|
||||
{1021, 48},
|
||||
{1022, 40},
|
||||
{1024, 32},
|
||||
{1026, 24},
|
||||
{1028, 16},
|
||||
{1030, 8},
|
||||
{1031, 0},
|
||||
{1380, 88},
|
||||
}
|
||||
_pcsp__advance_ns = [][2]uint32{
|
||||
{1, 0},
|
||||
{442, 8},
|
||||
{446, 0},
|
||||
{467, 8},
|
||||
{471, 0},
|
||||
{489, 8},
|
||||
}
|
||||
_pcsp__advance_string = [][2]uint32{
|
||||
{14, 0},
|
||||
{18, 8},
|
||||
{20, 16},
|
||||
{22, 24},
|
||||
{24, 32},
|
||||
{26, 40},
|
||||
{396, 48},
|
||||
{397, 40},
|
||||
{399, 32},
|
||||
{401, 24},
|
||||
{403, 16},
|
||||
{405, 8},
|
||||
{409, 0},
|
||||
{1031, 48},
|
||||
}
|
||||
_pcsp__advance_string_default = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{314, 48},
|
||||
{315, 40},
|
||||
{317, 32},
|
||||
{319, 24},
|
||||
{321, 16},
|
||||
{323, 8},
|
||||
{327, 0},
|
||||
{786, 48},
|
||||
}
|
||||
_pcsp__do_skip_number = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{1253, 40},
|
||||
{1254, 32},
|
||||
{1256, 24},
|
||||
{1258, 16},
|
||||
{1260, 8},
|
||||
{1264, 0},
|
||||
{1300, 40},
|
||||
}
|
||||
_pcsp__get_by_path = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{1293, 120},
|
||||
{1297, 48},
|
||||
{1298, 40},
|
||||
{1300, 32},
|
||||
{1302, 24},
|
||||
{1304, 16},
|
||||
{1306, 8},
|
||||
{1307, 0},
|
||||
{1344, 120},
|
||||
}
|
||||
_pcsp__skip_one_fast = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{396, 176},
|
||||
{397, 168},
|
||||
{399, 160},
|
||||
{401, 152},
|
||||
{403, 144},
|
||||
{405, 136},
|
||||
{409, 128},
|
||||
{2360, 176},
|
||||
}
|
||||
_pcsp__unescape = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{246, 56},
|
||||
{250, 48},
|
||||
{251, 40},
|
||||
{253, 32},
|
||||
{255, 24},
|
||||
{257, 16},
|
||||
{259, 8},
|
||||
{260, 0},
|
||||
{695, 56},
|
||||
}
|
||||
_pcsp__unhex16_is = [][2]uint32{
|
||||
{1, 0},
|
||||
{35, 8},
|
||||
{36, 0},
|
||||
{62, 8},
|
||||
{63, 0},
|
||||
{97, 8},
|
||||
{98, 0},
|
||||
{121, 8},
|
||||
{123, 0},
|
||||
}
|
||||
_pcsp__html_escape = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{2017, 72},
|
||||
{2021, 48},
|
||||
{2022, 40},
|
||||
{2024, 32},
|
||||
{2026, 24},
|
||||
{2028, 16},
|
||||
{2030, 8},
|
||||
{2035, 0},
|
||||
}
|
||||
_pcsp__i64toa = [][2]uint32{
|
||||
{1, 0},
|
||||
{171, 8},
|
||||
{172, 0},
|
||||
{207, 8},
|
||||
{208, 0},
|
||||
{222, 8},
|
||||
{223, 0},
|
||||
{247, 8},
|
||||
{248, 0},
|
||||
{253, 8},
|
||||
{259, 0},
|
||||
}
|
||||
_pcsp__u64toa = [][2]uint32{
|
||||
{13, 0},
|
||||
{162, 8},
|
||||
{163, 0},
|
||||
{175, 8},
|
||||
{240, 0},
|
||||
{498, 8},
|
||||
{499, 0},
|
||||
{519, 8},
|
||||
{592, 0},
|
||||
{852, 8},
|
||||
{928, 0},
|
||||
{1376, 8},
|
||||
{1378, 0},
|
||||
}
|
||||
_pcsp__lspace = [][2]uint32{
|
||||
{1, 0},
|
||||
{186, 8},
|
||||
{190, 0},
|
||||
{199, 8},
|
||||
{203, 0},
|
||||
{210, 8},
|
||||
{214, 0},
|
||||
{232, 8},
|
||||
}
|
||||
_pcsp__quote = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{2828, 72},
|
||||
{2832, 48},
|
||||
{2833, 40},
|
||||
{2835, 32},
|
||||
{2837, 24},
|
||||
{2839, 16},
|
||||
{2841, 8},
|
||||
{2845, 0},
|
||||
{2876, 72},
|
||||
}
|
||||
_pcsp__skip_array = [][2]uint32{
|
||||
{1, 0},
|
||||
{28, 8},
|
||||
{34, 0},
|
||||
}
|
||||
_pcsp__skip_number = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{107, 56},
|
||||
{111, 48},
|
||||
{112, 40},
|
||||
{114, 32},
|
||||
{116, 24},
|
||||
{118, 16},
|
||||
{120, 8},
|
||||
{121, 0},
|
||||
{145, 56},
|
||||
}
|
||||
_pcsp__skip_object = [][2]uint32{
|
||||
{1, 0},
|
||||
{28, 8},
|
||||
{34, 0},
|
||||
}
|
||||
_pcsp__skip_one = [][2]uint32{
|
||||
{1, 0},
|
||||
{28, 8},
|
||||
{34, 0},
|
||||
}
|
||||
_pcsp__unquote = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{1836, 104},
|
||||
{1840, 48},
|
||||
{1841, 40},
|
||||
{1843, 32},
|
||||
{1845, 24},
|
||||
{1847, 16},
|
||||
{1849, 8},
|
||||
{1853, 0},
|
||||
{2554, 104},
|
||||
}
|
||||
_pcsp__validate_one = [][2]uint32{
|
||||
{1, 0},
|
||||
{33, 8},
|
||||
{39, 0},
|
||||
}
|
||||
_pcsp__validate_utf8 = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{11, 40},
|
||||
{639, 48},
|
||||
{643, 40},
|
||||
{644, 32},
|
||||
{646, 24},
|
||||
{648, 16},
|
||||
{650, 8},
|
||||
{651, 0},
|
||||
{682, 48},
|
||||
}
|
||||
_pcsp__validate_utf8_fast = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{5, 16},
|
||||
{1706, 176},
|
||||
{1707, 168},
|
||||
{1711, 160},
|
||||
{2003, 176},
|
||||
{2004, 168},
|
||||
{2008, 160},
|
||||
{2656, 176},
|
||||
}
|
||||
_pcsp__value = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{974, 88},
|
||||
{978, 48},
|
||||
{979, 40},
|
||||
{981, 32},
|
||||
{983, 24},
|
||||
{985, 16},
|
||||
{987, 8},
|
||||
{992, 0},
|
||||
}
|
||||
_pcsp__vnumber = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{150, 120},
|
||||
{154, 48},
|
||||
{155, 40},
|
||||
{157, 32},
|
||||
{159, 24},
|
||||
{161, 16},
|
||||
{163, 8},
|
||||
{164, 0},
|
||||
{1638, 120},
|
||||
}
|
||||
_pcsp__atof_eisel_lemire64 = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{315, 40},
|
||||
{316, 32},
|
||||
{318, 24},
|
||||
{320, 16},
|
||||
{322, 8},
|
||||
{323, 0},
|
||||
{387, 40},
|
||||
}
|
||||
_pcsp__atof_native = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{596, 56},
|
||||
{600, 8},
|
||||
{602, 0},
|
||||
}
|
||||
_pcsp__decimal_to_f64 = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{1431, 56},
|
||||
{1435, 48},
|
||||
{1436, 40},
|
||||
{1438, 32},
|
||||
{1440, 24},
|
||||
{1442, 16},
|
||||
{1444, 8},
|
||||
{1448, 0},
|
||||
{1460, 56},
|
||||
}
|
||||
_pcsp__left_shift = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{418, 32},
|
||||
{419, 24},
|
||||
{421, 16},
|
||||
{423, 8},
|
||||
{424, 0},
|
||||
{539, 32},
|
||||
}
|
||||
_pcsp__right_shift = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{452, 16},
|
||||
{453, 8},
|
||||
{454, 0},
|
||||
{462, 16},
|
||||
{463, 8},
|
||||
{464, 0},
|
||||
{472, 16},
|
||||
{473, 8},
|
||||
{475, 0},
|
||||
}
|
||||
_pcsp__vsigned = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{113, 16},
|
||||
{114, 8},
|
||||
{115, 0},
|
||||
{126, 16},
|
||||
{127, 8},
|
||||
{128, 0},
|
||||
{278, 16},
|
||||
{279, 8},
|
||||
{280, 0},
|
||||
{284, 16},
|
||||
{285, 8},
|
||||
{286, 0},
|
||||
{340, 16},
|
||||
{341, 8},
|
||||
{342, 0},
|
||||
{353, 16},
|
||||
{354, 8},
|
||||
{356, 0},
|
||||
}
|
||||
_pcsp__vstring = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{11, 40},
|
||||
{105, 56},
|
||||
{109, 40},
|
||||
{110, 32},
|
||||
{112, 24},
|
||||
{114, 16},
|
||||
{116, 8},
|
||||
{118, 0},
|
||||
}
|
||||
_pcsp__vunsigned = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{74, 24},
|
||||
{75, 16},
|
||||
{77, 8},
|
||||
{78, 0},
|
||||
{89, 24},
|
||||
{90, 16},
|
||||
{92, 8},
|
||||
{93, 0},
|
||||
{116, 24},
|
||||
{117, 16},
|
||||
{119, 8},
|
||||
{120, 0},
|
||||
{281, 24},
|
||||
{282, 16},
|
||||
{284, 8},
|
||||
{285, 0},
|
||||
{336, 24},
|
||||
{337, 16},
|
||||
{339, 8},
|
||||
{340, 0},
|
||||
{348, 24},
|
||||
{349, 16},
|
||||
{351, 8},
|
||||
{353, 0},
|
||||
}
|
||||
)
|
||||
|
||||
var Funcs = []loader.CFunc{
|
||||
{"__native_entry__", 0, 67, 0, nil},
|
||||
{"_f32toa", _entry__f32toa, _size__f32toa, _stack__f32toa, _pcsp__f32toa},
|
||||
{"_f64toa", _entry__f64toa, _size__f64toa, _stack__f64toa, _pcsp__f64toa},
|
||||
{"_format_significand", _entry__format_significand, _size__format_significand, _stack__format_significand, _pcsp__format_significand},
|
||||
{"_format_integer", _entry__format_integer, _size__format_integer, _stack__format_integer, _pcsp__format_integer},
|
||||
{"_fsm_exec", _entry__fsm_exec, _size__fsm_exec, _stack__fsm_exec, _pcsp__fsm_exec},
|
||||
{"_advance_ns", _entry__advance_ns, _size__advance_ns, _stack__advance_ns, _pcsp__advance_ns},
|
||||
{"_advance_string", _entry__advance_string, _size__advance_string, _stack__advance_string, _pcsp__advance_string},
|
||||
{"_advance_string_default", _entry__advance_string_default, _size__advance_string_default, _stack__advance_string_default, _pcsp__advance_string_default},
|
||||
{"_do_skip_number", _entry__do_skip_number, _size__do_skip_number, _stack__do_skip_number, _pcsp__do_skip_number},
|
||||
{"_get_by_path", _entry__get_by_path, _size__get_by_path, _stack__get_by_path, _pcsp__get_by_path},
|
||||
{"_skip_one_fast", _entry__skip_one_fast, _size__skip_one_fast, _stack__skip_one_fast, _pcsp__skip_one_fast},
|
||||
{"_unescape", _entry__unescape, _size__unescape, _stack__unescape, _pcsp__unescape},
|
||||
{"_unhex16_is", _entry__unhex16_is, _size__unhex16_is, _stack__unhex16_is, _pcsp__unhex16_is},
|
||||
{"_html_escape", _entry__html_escape, _size__html_escape, _stack__html_escape, _pcsp__html_escape},
|
||||
{"_i64toa", _entry__i64toa, _size__i64toa, _stack__i64toa, _pcsp__i64toa},
|
||||
{"_u64toa", _entry__u64toa, _size__u64toa, _stack__u64toa, _pcsp__u64toa},
|
||||
{"_lspace", _entry__lspace, _size__lspace, _stack__lspace, _pcsp__lspace},
|
||||
{"_quote", _entry__quote, _size__quote, _stack__quote, _pcsp__quote},
|
||||
{"_skip_array", _entry__skip_array, _size__skip_array, _stack__skip_array, _pcsp__skip_array},
|
||||
{"_skip_number", _entry__skip_number, _size__skip_number, _stack__skip_number, _pcsp__skip_number},
|
||||
{"_skip_object", _entry__skip_object, _size__skip_object, _stack__skip_object, _pcsp__skip_object},
|
||||
{"_skip_one", _entry__skip_one, _size__skip_one, _stack__skip_one, _pcsp__skip_one},
|
||||
{"_unquote", _entry__unquote, _size__unquote, _stack__unquote, _pcsp__unquote},
|
||||
{"_validate_one", _entry__validate_one, _size__validate_one, _stack__validate_one, _pcsp__validate_one},
|
||||
{"_validate_utf8", _entry__validate_utf8, _size__validate_utf8, _stack__validate_utf8, _pcsp__validate_utf8},
|
||||
{"_validate_utf8_fast", _entry__validate_utf8_fast, _size__validate_utf8_fast, _stack__validate_utf8_fast, _pcsp__validate_utf8_fast},
|
||||
{"_value", _entry__value, _size__value, _stack__value, _pcsp__value},
|
||||
{"_vnumber", _entry__vnumber, _size__vnumber, _stack__vnumber, _pcsp__vnumber},
|
||||
{"_atof_eisel_lemire64", _entry__atof_eisel_lemire64, _size__atof_eisel_lemire64, _stack__atof_eisel_lemire64, _pcsp__atof_eisel_lemire64},
|
||||
{"_atof_native", _entry__atof_native, _size__atof_native, _stack__atof_native, _pcsp__atof_native},
|
||||
{"_decimal_to_f64", _entry__decimal_to_f64, _size__decimal_to_f64, _stack__decimal_to_f64, _pcsp__decimal_to_f64},
|
||||
{"_left_shift", _entry__left_shift, _size__left_shift, _stack__left_shift, _pcsp__left_shift},
|
||||
{"_right_shift", _entry__right_shift, _size__right_shift, _stack__right_shift, _pcsp__right_shift},
|
||||
{"_vsigned", _entry__vsigned, _size__vsigned, _stack__vsigned, _pcsp__vsigned},
|
||||
{"_vstring", _entry__vstring, _size__vstring, _stack__vstring, _pcsp__vstring},
|
||||
{"_vunsigned", _entry__vunsigned, _size__vunsigned, _stack__vunsigned, _pcsp__vunsigned},
|
||||
}
|
||||
|
|
15187
vendor/github.com/bytedance/sonic/internal/native/avx2/native_text_amd64.go
generated
vendored
Normal file
15187
vendor/github.com/bytedance/sonic/internal/native/avx2/native_text_amd64.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
199
vendor/github.com/bytedance/sonic/internal/native/dispatch_amd64.go
generated
vendored
199
vendor/github.com/bytedance/sonic/internal/native/dispatch_amd64.go
generated
vendored
|
@ -24,6 +24,8 @@
|
|||
`github.com/bytedance/sonic/internal/native/avx2`
|
||||
`github.com/bytedance/sonic/internal/native/sse`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
`github.com/bytedance/sonic/loader`
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -42,6 +44,7 @@
|
|||
var (
|
||||
S_quote uintptr
|
||||
S_unquote uintptr
|
||||
S_html_escape uintptr
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -61,132 +64,140 @@
|
|||
S_skip_number uintptr
|
||||
)
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func Quote(s unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) int
|
||||
var (
|
||||
S_validate_one uintptr
|
||||
S_validate_utf8 uintptr
|
||||
S_validate_utf8_fast uintptr
|
||||
)
|
||||
|
||||
var (
|
||||
__Quote func(s unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer, flags uint64) int
|
||||
|
||||
__Unquote func(s unsafe.Pointer, nb int, dp unsafe.Pointer, ep unsafe.Pointer, flags uint64) int
|
||||
|
||||
__HTMLEscape func(s unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer) int
|
||||
|
||||
__Value func(s unsafe.Pointer, n int, p int, v unsafe.Pointer, flags uint64) int
|
||||
|
||||
__SkipOne func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) int
|
||||
|
||||
__SkipOneFast func(s unsafe.Pointer, p unsafe.Pointer) int
|
||||
|
||||
__GetByPath func(s unsafe.Pointer, p unsafe.Pointer, path unsafe.Pointer, m unsafe.Pointer) int
|
||||
|
||||
__ValidateOne func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) int
|
||||
|
||||
__I64toa func(out unsafe.Pointer, val int64) (ret int)
|
||||
|
||||
__U64toa func(out unsafe.Pointer, val uint64) (ret int)
|
||||
|
||||
__F64toa func(out unsafe.Pointer, val float64) (ret int)
|
||||
|
||||
__ValidateUTF8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
|
||||
|
||||
__ValidateUTF8Fast func(s unsafe.Pointer) (ret int)
|
||||
)
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func Unquote(s unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) int
|
||||
func Quote(s unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) int {
|
||||
return __Quote(rt.NoEscape(unsafe.Pointer(s)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func HTMLEscape(s unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) int
|
||||
func Unquote(s unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) int {
|
||||
return __Unquote(rt.NoEscape(unsafe.Pointer(s)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(ep)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func Value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) int
|
||||
func HTMLEscape(s unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) int {
|
||||
return __HTMLEscape(rt.NoEscape(unsafe.Pointer(s)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func SkipOne(s *string, p *int, m *types.StateMachine, flags uint64) int
|
||||
func Value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) int {
|
||||
return __Value(rt.NoEscape(unsafe.Pointer(s)), n, p, rt.NoEscape(unsafe.Pointer(v)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func SkipOneFast(s *string, p *int) int
|
||||
func SkipOne(s *string, p *int, m *types.StateMachine, flags uint64) int {
|
||||
return __SkipOne(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func GetByPath(s *string, p *int, path *[]interface{}, m *types.StateMachine) int
|
||||
func SkipOneFast(s *string, p *int) int {
|
||||
return __SkipOneFast(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func ValidateOne(s *string, p *int, m *types.StateMachine) int
|
||||
func GetByPath(s *string, p *int, path *[]interface{}, m *types.StateMachine) int {
|
||||
return __GetByPath(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(path)), rt.NoEscape(unsafe.Pointer(m)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func I64toa(out *byte, val int64) (ret int)
|
||||
func ValidateOne(s *string, p *int, m *types.StateMachine) int {
|
||||
return __ValidateOne(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func U64toa(out *byte, val uint64) (ret int)
|
||||
func I64toa(out *byte, val int64) (ret int) {
|
||||
return __I64toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func F64toa(out *byte, val float64) (ret int)
|
||||
func U64toa(out *byte, val uint64) (ret int) {
|
||||
return __U64toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func ValidateUTF8(s *string, p *int, m *types.StateMachine) (ret int)
|
||||
func F64toa(out *byte, val float64) (ret int) {
|
||||
return __F64toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func ValidateUTF8Fast(s *string) (ret int)
|
||||
func ValidateUTF8(s *string, p *int, m *types.StateMachine) (ret int) {
|
||||
return __ValidateUTF8(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func ValidateUTF8Fast(s *string) (ret int) {
|
||||
return __ValidateUTF8Fast(rt.NoEscape(unsafe.Pointer(s)))
|
||||
}
|
||||
|
||||
var stubs = []loader.GoC{
|
||||
{"_f64toa", &S_f64toa, &__F64toa},
|
||||
{"_f32toa", &S_f32toa, nil},
|
||||
{"_i64toa", &S_i64toa, &__I64toa},
|
||||
{"_u64toa", &S_u64toa, &__U64toa},
|
||||
{"_lspace", &S_lspace, nil},
|
||||
{"_quote", &S_quote, &__Quote},
|
||||
{"_unquote", &S_unquote, &__Unquote},
|
||||
{"_html_escape", &S_html_escape, &__HTMLEscape},
|
||||
{"_value", &S_value, &__Value},
|
||||
{"_vstring", &S_vstring, nil},
|
||||
{"_vnumber", &S_vnumber, nil},
|
||||
{"_vsigned", &S_vsigned, nil},
|
||||
{"_vunsigned", &S_vunsigned, nil},
|
||||
{"_skip_one", &S_skip_one, &__SkipOne},
|
||||
{"_skip_one_fast", &S_skip_one_fast, &__SkipOneFast},
|
||||
{"_get_by_path", &S_get_by_path, &__GetByPath},
|
||||
{"_skip_array", &S_skip_array, nil},
|
||||
{"_skip_object", &S_skip_object, nil},
|
||||
{"_skip_number", &S_skip_number, nil},
|
||||
{"_validate_one", &S_validate_one, &__ValidateOne},
|
||||
{"_validate_utf8", &S_validate_utf8, &__ValidateUTF8},
|
||||
{"_validate_utf8_fast", &S_validate_utf8_fast, &__ValidateUTF8Fast},
|
||||
}
|
||||
|
||||
func useAVX() {
|
||||
S_f64toa = avx.S_f64toa
|
||||
S_f32toa = avx.S_f32toa
|
||||
S_i64toa = avx.S_i64toa
|
||||
S_u64toa = avx.S_u64toa
|
||||
S_lspace = avx.S_lspace
|
||||
S_quote = avx.S_quote
|
||||
S_unquote = avx.S_unquote
|
||||
S_value = avx.S_value
|
||||
S_vstring = avx.S_vstring
|
||||
S_vnumber = avx.S_vnumber
|
||||
S_vsigned = avx.S_vsigned
|
||||
S_vunsigned = avx.S_vunsigned
|
||||
S_skip_one = avx.S_skip_one
|
||||
S_skip_one_fast = avx.S_skip_one_fast
|
||||
S_skip_array = avx.S_skip_array
|
||||
S_skip_object = avx.S_skip_object
|
||||
S_skip_number = avx.S_skip_number
|
||||
S_get_by_path = avx.S_get_by_path
|
||||
loader.WrapGoC(avx.Text__native_entry__, avx.Funcs, stubs, "avx", "avx/native.c")
|
||||
}
|
||||
|
||||
func useAVX2() {
|
||||
S_f64toa = avx2.S_f64toa
|
||||
S_f32toa = avx2.S_f32toa
|
||||
S_i64toa = avx2.S_i64toa
|
||||
S_u64toa = avx2.S_u64toa
|
||||
S_lspace = avx2.S_lspace
|
||||
S_quote = avx2.S_quote
|
||||
S_unquote = avx2.S_unquote
|
||||
S_value = avx2.S_value
|
||||
S_vstring = avx2.S_vstring
|
||||
S_vnumber = avx2.S_vnumber
|
||||
S_vsigned = avx2.S_vsigned
|
||||
S_vunsigned = avx2.S_vunsigned
|
||||
S_skip_one = avx2.S_skip_one
|
||||
S_skip_one_fast = avx2.S_skip_one_fast
|
||||
S_skip_array = avx2.S_skip_array
|
||||
S_skip_object = avx2.S_skip_object
|
||||
S_skip_number = avx2.S_skip_number
|
||||
S_get_by_path = avx2.S_get_by_path
|
||||
loader.WrapGoC(avx2.Text__native_entry__, avx2.Funcs, stubs, "avx2", "avx2/native.c")
|
||||
}
|
||||
|
||||
func useSSE() {
|
||||
S_f64toa = sse.S_f64toa
|
||||
S_f32toa = sse.S_f32toa
|
||||
S_i64toa = sse.S_i64toa
|
||||
S_u64toa = sse.S_u64toa
|
||||
S_lspace = sse.S_lspace
|
||||
S_quote = sse.S_quote
|
||||
S_unquote = sse.S_unquote
|
||||
S_value = sse.S_value
|
||||
S_vstring = sse.S_vstring
|
||||
S_vnumber = sse.S_vnumber
|
||||
S_vsigned = sse.S_vsigned
|
||||
S_vunsigned = sse.S_vunsigned
|
||||
S_skip_one = sse.S_skip_one
|
||||
S_skip_one_fast = sse.S_skip_one_fast
|
||||
S_skip_array = sse.S_skip_array
|
||||
S_skip_object = sse.S_skip_object
|
||||
S_skip_number = sse.S_skip_number
|
||||
S_get_by_path = sse.S_get_by_path
|
||||
loader.WrapGoC(sse.Text__native_entry__, sse.Funcs, stubs, "sse", "sse/native.c")
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
|
137
vendor/github.com/bytedance/sonic/internal/native/dispatch_amd64.s
generated
vendored
137
vendor/github.com/bytedance/sonic/internal/native/dispatch_amd64.s
generated
vendored
|
@ -1,137 +0,0 @@
|
|||
//
|
||||
// Copyright 2021 ByteDance Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
#include "go_asm.h"
|
||||
#include "funcdata.h"
|
||||
#include "textflag.h"
|
||||
|
||||
TEXT ·Quote(SB), NOSPLIT, $0 - 48
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX2(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx2·__quote(SB)
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx·__quote(SB)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕sse·__quote(SB)
|
||||
|
||||
TEXT ·Unquote(SB), NOSPLIT, $0 - 48
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX2(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx2·__unquote(SB)
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx·__unquote(SB)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕sse·__unquote(SB)
|
||||
|
||||
TEXT ·HTMLEscape(SB), NOSPLIT, $0 - 40
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX2(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx2·__html_escape(SB)
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx·__html_escape(SB)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕sse·__html_escape(SB)
|
||||
|
||||
TEXT ·Value(SB), NOSPLIT, $0 - 48
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX2(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx2·__value(SB)
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx·__value(SB)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕sse·__value(SB)
|
||||
|
||||
TEXT ·SkipOne(SB), NOSPLIT, $0 - 40
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX2(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx2·__skip_one(SB)
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx·__skip_one(SB)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕sse·__skip_one(SB)
|
||||
|
||||
TEXT ·SkipOneFast(SB), NOSPLIT, $0 - 24
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX2(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx2·__skip_one_fast(SB)
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx·__skip_one_fast(SB)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕sse·__skip_one_fast(SB)
|
||||
|
||||
TEXT ·GetByPath(SB), NOSPLIT, $0 - 40
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX2(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx2·__get_by_path(SB)
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx·__get_by_path(SB)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕sse·__get_by_path(SB)
|
||||
|
||||
TEXT ·ValidateOne(SB), NOSPLIT, $0 - 32
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX2(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx2·__validate_one(SB)
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx·__validate_one(SB)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕sse·__validate_one(SB)
|
||||
|
||||
TEXT ·ValidateUTF8(SB), NOSPLIT, $0 - 40
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX2(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx2·__validate_utf8(SB)
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx·__validate_utf8(SB)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕sse·__validate_utf8(SB)
|
||||
|
||||
TEXT ·ValidateUTF8Fast(SB), NOSPLIT, $0 - 16
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX2(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx2·__validate_utf8_fast(SB)
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx·__validate_utf8_fast(SB)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕sse·__validate_utf8_fast(SB)
|
||||
|
||||
TEXT ·I64toa(SB), NOSPLIT, $0 - 32
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX2(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx2·__i64toa(SB)
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx·__i64toa(SB)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕sse·__i64toa(SB)
|
||||
|
||||
TEXT ·U64toa(SB), NOSPLIT, $0 - 32
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX2(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx2·__u64toa(SB)
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx·__u64toa(SB)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕sse·__u64toa(SB)
|
||||
|
||||
TEXT ·F64toa(SB), NOSPLIT, $0 - 32
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX2(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx2·__f64toa(SB)
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕cpu·HasAVX(SB), $0
|
||||
JE 2(PC)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕avx·__f64toa(SB)
|
||||
JMP github·com∕bytedance∕sonic∕internal∕native∕sse·__f64toa(SB)
|
||||
|
50
vendor/github.com/bytedance/sonic/internal/native/fastfloat_amd64_test.tmpl
generated
vendored
50
vendor/github.com/bytedance/sonic/internal/native/fastfloat_amd64_test.tmpl
generated
vendored
|
@ -1,3 +1,5 @@
|
|||
// Code generated by Makefile, DO NOT EDIT.
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
|
@ -17,35 +19,35 @@
|
|||
package {{PACKAGE}}
|
||||
|
||||
import (
|
||||
`encoding/json`
|
||||
`math`
|
||||
`math/rand`
|
||||
`strconv`
|
||||
`testing`
|
||||
`math/rand`
|
||||
`encoding/json`
|
||||
|
||||
`github.com/stretchr/testify/assert`
|
||||
)
|
||||
|
||||
func TestFastFloat_Encode(t *testing.T) {
|
||||
var buf [64]byte
|
||||
assert.Equal(t, "0" , string(buf[:__f64toa(&buf[0], 0)]))
|
||||
assert.Equal(t, "-0" , string(buf[:__f64toa(&buf[0], math.Float64frombits(0x8000000000000000))]))
|
||||
assert.Equal(t, "12340000000" , string(buf[:__f64toa(&buf[0], 1234e7)]))
|
||||
assert.Equal(t, "12.34" , string(buf[:__f64toa(&buf[0], 1234e-2)]))
|
||||
assert.Equal(t, "0.001234" , string(buf[:__f64toa(&buf[0], 1234e-6)]))
|
||||
assert.Equal(t, "1e+30" , string(buf[:__f64toa(&buf[0], 1e30)]))
|
||||
assert.Equal(t, "1.234e+33" , string(buf[:__f64toa(&buf[0], 1234e30)]))
|
||||
assert.Equal(t, "1.234e+308" , string(buf[:__f64toa(&buf[0], 1234e305)]))
|
||||
assert.Equal(t, "1.234e-317" , string(buf[:__f64toa(&buf[0], 1234e-320)]))
|
||||
assert.Equal(t, "1.7976931348623157e+308" , string(buf[:__f64toa(&buf[0], 1.7976931348623157e308)]))
|
||||
assert.Equal(t, "-12340000000" , string(buf[:__f64toa(&buf[0], -1234e7)]))
|
||||
assert.Equal(t, "-12.34" , string(buf[:__f64toa(&buf[0], -1234e-2)]))
|
||||
assert.Equal(t, "-0.001234" , string(buf[:__f64toa(&buf[0], -1234e-6)]))
|
||||
assert.Equal(t, "-1e+30" , string(buf[:__f64toa(&buf[0], -1e30)]))
|
||||
assert.Equal(t, "-1.234e+33" , string(buf[:__f64toa(&buf[0], -1234e30)]))
|
||||
assert.Equal(t, "-1.234e+308" , string(buf[:__f64toa(&buf[0], -1234e305)]))
|
||||
assert.Equal(t, "-1.234e-317" , string(buf[:__f64toa(&buf[0], -1234e-320)]))
|
||||
assert.Equal(t, "-2.2250738585072014e-308" , string(buf[:__f64toa(&buf[0], -2.2250738585072014e-308)]))
|
||||
assert.Equal(t, "0" , string(buf[:f64toa(&buf[0], 0)]))
|
||||
assert.Equal(t, "-0" , string(buf[:f64toa(&buf[0], math.Float64frombits(0x8000000000000000))]))
|
||||
assert.Equal(t, "12340000000" , string(buf[:f64toa(&buf[0], 1234e7)]))
|
||||
assert.Equal(t, "12.34" , string(buf[:f64toa(&buf[0], 1234e-2)]))
|
||||
assert.Equal(t, "0.001234" , string(buf[:f64toa(&buf[0], 1234e-6)]))
|
||||
assert.Equal(t, "1e+30" , string(buf[:f64toa(&buf[0], 1e30)]))
|
||||
assert.Equal(t, "1.234e+33" , string(buf[:f64toa(&buf[0], 1234e30)]))
|
||||
assert.Equal(t, "1.234e+308" , string(buf[:f64toa(&buf[0], 1234e305)]))
|
||||
assert.Equal(t, "1.234e-317" , string(buf[:f64toa(&buf[0], 1234e-320)]))
|
||||
assert.Equal(t, "1.7976931348623157e+308" , string(buf[:f64toa(&buf[0], 1.7976931348623157e308)]))
|
||||
assert.Equal(t, "-12340000000" , string(buf[:f64toa(&buf[0], -1234e7)]))
|
||||
assert.Equal(t, "-12.34" , string(buf[:f64toa(&buf[0], -1234e-2)]))
|
||||
assert.Equal(t, "-0.001234" , string(buf[:f64toa(&buf[0], -1234e-6)]))
|
||||
assert.Equal(t, "-1e+30" , string(buf[:f64toa(&buf[0], -1e30)]))
|
||||
assert.Equal(t, "-1.234e+33" , string(buf[:f64toa(&buf[0], -1234e30)]))
|
||||
assert.Equal(t, "-1.234e+308" , string(buf[:f64toa(&buf[0], -1234e305)]))
|
||||
assert.Equal(t, "-1.234e-317" , string(buf[:f64toa(&buf[0], -1234e-320)]))
|
||||
assert.Equal(t, "-2.2250738585072014e-308" , string(buf[:f64toa(&buf[0], -2.2250738585072014e-308)]))
|
||||
}
|
||||
|
||||
func TestFastFloat_Random(t *testing.T) {
|
||||
|
@ -56,7 +58,7 @@ func TestFastFloat_Random(t *testing.T) {
|
|||
f64 := math.Float64frombits(b64)
|
||||
|
||||
jout, jerr := json.Marshal(f64)
|
||||
n := __f64toa(&buf[0], f64)
|
||||
n := f64toa(&buf[0], f64)
|
||||
if jerr == nil {
|
||||
assert.Equal(t, jout, buf[:n])
|
||||
} else {
|
||||
|
@ -65,7 +67,7 @@ func TestFastFloat_Random(t *testing.T) {
|
|||
|
||||
f32 := math.Float32frombits(rand.Uint32())
|
||||
jout, jerr = json.Marshal(f32)
|
||||
n = __f32toa(&buf[0], f32)
|
||||
n = f32toa(&buf[0], f32)
|
||||
if jerr == nil {
|
||||
assert.Equal(t, jout, buf[:n])
|
||||
} else {
|
||||
|
@ -97,7 +99,7 @@ func BenchmarkParseFloat64(b *testing.B) {
|
|||
test: func(b *testing.B) { var buf [64]byte; for i := 0; i < b.N; i++ { strconv.AppendFloat(buf[:0], c.float, 'g', -1, 64) }},
|
||||
}, {
|
||||
name: "FastFloat",
|
||||
test: func(b *testing.B) { var buf [64]byte; for i := 0; i < b.N; i++ { __f64toa(&buf[0], c.float) }},
|
||||
test: func(b *testing.B) { var buf [64]byte; for i := 0; i < b.N; i++ { f64toa(&buf[0], c.float) }},
|
||||
}}
|
||||
for _, bm := range f64bench {
|
||||
name := bm.name + "_" + c.name
|
||||
|
@ -128,7 +130,7 @@ func BenchmarkParseFloat32(b *testing.B) {
|
|||
test: func(b *testing.B) { var buf [64]byte; for i := 0; i < b.N; i++ { strconv.AppendFloat(buf[:0], float64(c.float), 'g', -1, 32) }},
|
||||
}, {
|
||||
name: "FastFloat32",
|
||||
test: func(b *testing.B) { var buf [64]byte; for i := 0; i < b.N; i++ { __f32toa(&buf[0], c.float) }},
|
||||
test: func(b *testing.B) { var buf [64]byte; for i := 0; i < b.N; i++ { f32toa(&buf[0], c.float) }},
|
||||
}}
|
||||
for _, bm := range bench {
|
||||
name := bm.name + "_" + c.name
|
||||
|
|
146
vendor/github.com/bytedance/sonic/internal/native/fastint_amd64_test.tmpl
generated
vendored
146
vendor/github.com/bytedance/sonic/internal/native/fastint_amd64_test.tmpl
generated
vendored
|
@ -1,3 +1,5 @@
|
|||
// Code generated by Makefile, DO NOT EDIT.
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
|
@ -26,76 +28,76 @@ import (
|
|||
|
||||
func TestFastInt_IntToString(t *testing.T) {
|
||||
var buf [32]byte
|
||||
assert.Equal(t, "0" , string(buf[:__i64toa(&buf[0], 0)]))
|
||||
assert.Equal(t, "1" , string(buf[:__i64toa(&buf[0], 1)]))
|
||||
assert.Equal(t, "12" , string(buf[:__i64toa(&buf[0], 12)]))
|
||||
assert.Equal(t, "123" , string(buf[:__i64toa(&buf[0], 123)]))
|
||||
assert.Equal(t, "1234" , string(buf[:__i64toa(&buf[0], 1234)]))
|
||||
assert.Equal(t, "12345" , string(buf[:__i64toa(&buf[0], 12345)]))
|
||||
assert.Equal(t, "123456" , string(buf[:__i64toa(&buf[0], 123456)]))
|
||||
assert.Equal(t, "1234567" , string(buf[:__i64toa(&buf[0], 1234567)]))
|
||||
assert.Equal(t, "12345678" , string(buf[:__i64toa(&buf[0], 12345678)]))
|
||||
assert.Equal(t, "123456789" , string(buf[:__i64toa(&buf[0], 123456789)]))
|
||||
assert.Equal(t, "1234567890" , string(buf[:__i64toa(&buf[0], 1234567890)]))
|
||||
assert.Equal(t, "12345678901" , string(buf[:__i64toa(&buf[0], 12345678901)]))
|
||||
assert.Equal(t, "123456789012" , string(buf[:__i64toa(&buf[0], 123456789012)]))
|
||||
assert.Equal(t, "1234567890123" , string(buf[:__i64toa(&buf[0], 1234567890123)]))
|
||||
assert.Equal(t, "12345678901234" , string(buf[:__i64toa(&buf[0], 12345678901234)]))
|
||||
assert.Equal(t, "123456789012345" , string(buf[:__i64toa(&buf[0], 123456789012345)]))
|
||||
assert.Equal(t, "1234567890123456" , string(buf[:__i64toa(&buf[0], 1234567890123456)]))
|
||||
assert.Equal(t, "12345678901234567" , string(buf[:__i64toa(&buf[0], 12345678901234567)]))
|
||||
assert.Equal(t, "123456789012345678" , string(buf[:__i64toa(&buf[0], 123456789012345678)]))
|
||||
assert.Equal(t, "1234567890123456789" , string(buf[:__i64toa(&buf[0], 1234567890123456789)]))
|
||||
assert.Equal(t, "9223372036854775807" , string(buf[:__i64toa(&buf[0], 9223372036854775807)]))
|
||||
assert.Equal(t, "-1" , string(buf[:__i64toa(&buf[0], -1)]))
|
||||
assert.Equal(t, "-12" , string(buf[:__i64toa(&buf[0], -12)]))
|
||||
assert.Equal(t, "-123" , string(buf[:__i64toa(&buf[0], -123)]))
|
||||
assert.Equal(t, "-1234" , string(buf[:__i64toa(&buf[0], -1234)]))
|
||||
assert.Equal(t, "-12345" , string(buf[:__i64toa(&buf[0], -12345)]))
|
||||
assert.Equal(t, "-123456" , string(buf[:__i64toa(&buf[0], -123456)]))
|
||||
assert.Equal(t, "-1234567" , string(buf[:__i64toa(&buf[0], -1234567)]))
|
||||
assert.Equal(t, "-12345678" , string(buf[:__i64toa(&buf[0], -12345678)]))
|
||||
assert.Equal(t, "-123456789" , string(buf[:__i64toa(&buf[0], -123456789)]))
|
||||
assert.Equal(t, "-1234567890" , string(buf[:__i64toa(&buf[0], -1234567890)]))
|
||||
assert.Equal(t, "-12345678901" , string(buf[:__i64toa(&buf[0], -12345678901)]))
|
||||
assert.Equal(t, "-123456789012" , string(buf[:__i64toa(&buf[0], -123456789012)]))
|
||||
assert.Equal(t, "-1234567890123" , string(buf[:__i64toa(&buf[0], -1234567890123)]))
|
||||
assert.Equal(t, "-12345678901234" , string(buf[:__i64toa(&buf[0], -12345678901234)]))
|
||||
assert.Equal(t, "-123456789012345" , string(buf[:__i64toa(&buf[0], -123456789012345)]))
|
||||
assert.Equal(t, "-1234567890123456" , string(buf[:__i64toa(&buf[0], -1234567890123456)]))
|
||||
assert.Equal(t, "-12345678901234567" , string(buf[:__i64toa(&buf[0], -12345678901234567)]))
|
||||
assert.Equal(t, "-123456789012345678" , string(buf[:__i64toa(&buf[0], -123456789012345678)]))
|
||||
assert.Equal(t, "-1234567890123456789" , string(buf[:__i64toa(&buf[0], -1234567890123456789)]))
|
||||
assert.Equal(t, "-9223372036854775808" , string(buf[:__i64toa(&buf[0], -9223372036854775808)]))
|
||||
assert.Equal(t, "0" , string(buf[:i64toa(&buf[0], 0)]))
|
||||
assert.Equal(t, "1" , string(buf[:i64toa(&buf[0], 1)]))
|
||||
assert.Equal(t, "12" , string(buf[:i64toa(&buf[0], 12)]))
|
||||
assert.Equal(t, "123" , string(buf[:i64toa(&buf[0], 123)]))
|
||||
assert.Equal(t, "1234" , string(buf[:i64toa(&buf[0], 1234)]))
|
||||
assert.Equal(t, "12345" , string(buf[:i64toa(&buf[0], 12345)]))
|
||||
assert.Equal(t, "123456" , string(buf[:i64toa(&buf[0], 123456)]))
|
||||
assert.Equal(t, "1234567" , string(buf[:i64toa(&buf[0], 1234567)]))
|
||||
assert.Equal(t, "12345678" , string(buf[:i64toa(&buf[0], 12345678)]))
|
||||
assert.Equal(t, "123456789" , string(buf[:i64toa(&buf[0], 123456789)]))
|
||||
assert.Equal(t, "1234567890" , string(buf[:i64toa(&buf[0], 1234567890)]))
|
||||
assert.Equal(t, "12345678901" , string(buf[:i64toa(&buf[0], 12345678901)]))
|
||||
assert.Equal(t, "123456789012" , string(buf[:i64toa(&buf[0], 123456789012)]))
|
||||
assert.Equal(t, "1234567890123" , string(buf[:i64toa(&buf[0], 1234567890123)]))
|
||||
assert.Equal(t, "12345678901234" , string(buf[:i64toa(&buf[0], 12345678901234)]))
|
||||
assert.Equal(t, "123456789012345" , string(buf[:i64toa(&buf[0], 123456789012345)]))
|
||||
assert.Equal(t, "1234567890123456" , string(buf[:i64toa(&buf[0], 1234567890123456)]))
|
||||
assert.Equal(t, "12345678901234567" , string(buf[:i64toa(&buf[0], 12345678901234567)]))
|
||||
assert.Equal(t, "123456789012345678" , string(buf[:i64toa(&buf[0], 123456789012345678)]))
|
||||
assert.Equal(t, "1234567890123456789" , string(buf[:i64toa(&buf[0], 1234567890123456789)]))
|
||||
assert.Equal(t, "9223372036854775807" , string(buf[:i64toa(&buf[0], 9223372036854775807)]))
|
||||
assert.Equal(t, "-1" , string(buf[:i64toa(&buf[0], -1)]))
|
||||
assert.Equal(t, "-12" , string(buf[:i64toa(&buf[0], -12)]))
|
||||
assert.Equal(t, "-123" , string(buf[:i64toa(&buf[0], -123)]))
|
||||
assert.Equal(t, "-1234" , string(buf[:i64toa(&buf[0], -1234)]))
|
||||
assert.Equal(t, "-12345" , string(buf[:i64toa(&buf[0], -12345)]))
|
||||
assert.Equal(t, "-123456" , string(buf[:i64toa(&buf[0], -123456)]))
|
||||
assert.Equal(t, "-1234567" , string(buf[:i64toa(&buf[0], -1234567)]))
|
||||
assert.Equal(t, "-12345678" , string(buf[:i64toa(&buf[0], -12345678)]))
|
||||
assert.Equal(t, "-123456789" , string(buf[:i64toa(&buf[0], -123456789)]))
|
||||
assert.Equal(t, "-1234567890" , string(buf[:i64toa(&buf[0], -1234567890)]))
|
||||
assert.Equal(t, "-12345678901" , string(buf[:i64toa(&buf[0], -12345678901)]))
|
||||
assert.Equal(t, "-123456789012" , string(buf[:i64toa(&buf[0], -123456789012)]))
|
||||
assert.Equal(t, "-1234567890123" , string(buf[:i64toa(&buf[0], -1234567890123)]))
|
||||
assert.Equal(t, "-12345678901234" , string(buf[:i64toa(&buf[0], -12345678901234)]))
|
||||
assert.Equal(t, "-123456789012345" , string(buf[:i64toa(&buf[0], -123456789012345)]))
|
||||
assert.Equal(t, "-1234567890123456" , string(buf[:i64toa(&buf[0], -1234567890123456)]))
|
||||
assert.Equal(t, "-12345678901234567" , string(buf[:i64toa(&buf[0], -12345678901234567)]))
|
||||
assert.Equal(t, "-123456789012345678" , string(buf[:i64toa(&buf[0], -123456789012345678)]))
|
||||
assert.Equal(t, "-1234567890123456789" , string(buf[:i64toa(&buf[0], -1234567890123456789)]))
|
||||
assert.Equal(t, "-9223372036854775808" , string(buf[:i64toa(&buf[0], -9223372036854775808)]))
|
||||
}
|
||||
|
||||
func TestFastInt_UintToString(t *testing.T) {
|
||||
var buf [32]byte
|
||||
assert.Equal(t, "0" , string(buf[:__u64toa(&buf[0], 0)]))
|
||||
assert.Equal(t, "1" , string(buf[:__u64toa(&buf[0], 1)]))
|
||||
assert.Equal(t, "12" , string(buf[:__u64toa(&buf[0], 12)]))
|
||||
assert.Equal(t, "123" , string(buf[:__u64toa(&buf[0], 123)]))
|
||||
assert.Equal(t, "1234" , string(buf[:__u64toa(&buf[0], 1234)]))
|
||||
assert.Equal(t, "12345" , string(buf[:__u64toa(&buf[0], 12345)]))
|
||||
assert.Equal(t, "123456" , string(buf[:__u64toa(&buf[0], 123456)]))
|
||||
assert.Equal(t, "1234567" , string(buf[:__u64toa(&buf[0], 1234567)]))
|
||||
assert.Equal(t, "12345678" , string(buf[:__u64toa(&buf[0], 12345678)]))
|
||||
assert.Equal(t, "123456789" , string(buf[:__u64toa(&buf[0], 123456789)]))
|
||||
assert.Equal(t, "1234567890" , string(buf[:__u64toa(&buf[0], 1234567890)]))
|
||||
assert.Equal(t, "12345678901" , string(buf[:__u64toa(&buf[0], 12345678901)]))
|
||||
assert.Equal(t, "123456789012" , string(buf[:__u64toa(&buf[0], 123456789012)]))
|
||||
assert.Equal(t, "1234567890123" , string(buf[:__u64toa(&buf[0], 1234567890123)]))
|
||||
assert.Equal(t, "12345678901234" , string(buf[:__u64toa(&buf[0], 12345678901234)]))
|
||||
assert.Equal(t, "123456789012345" , string(buf[:__u64toa(&buf[0], 123456789012345)]))
|
||||
assert.Equal(t, "1234567890123456" , string(buf[:__u64toa(&buf[0], 1234567890123456)]))
|
||||
assert.Equal(t, "12345678901234567" , string(buf[:__u64toa(&buf[0], 12345678901234567)]))
|
||||
assert.Equal(t, "123456789012345678" , string(buf[:__u64toa(&buf[0], 123456789012345678)]))
|
||||
assert.Equal(t, "1234567890123456789" , string(buf[:__u64toa(&buf[0], 1234567890123456789)]))
|
||||
assert.Equal(t, "12345678901234567890" , string(buf[:__u64toa(&buf[0], 12345678901234567890)]))
|
||||
assert.Equal(t, "18446744073709551615" , string(buf[:__u64toa(&buf[0], 18446744073709551615)]))
|
||||
assert.Equal(t, "0" , string(buf[:u64toa(&buf[0], 0)]))
|
||||
assert.Equal(t, "1" , string(buf[:u64toa(&buf[0], 1)]))
|
||||
assert.Equal(t, "12" , string(buf[:u64toa(&buf[0], 12)]))
|
||||
assert.Equal(t, "123" , string(buf[:u64toa(&buf[0], 123)]))
|
||||
assert.Equal(t, "1234" , string(buf[:u64toa(&buf[0], 1234)]))
|
||||
assert.Equal(t, "12345" , string(buf[:u64toa(&buf[0], 12345)]))
|
||||
assert.Equal(t, "123456" , string(buf[:u64toa(&buf[0], 123456)]))
|
||||
assert.Equal(t, "1234567" , string(buf[:u64toa(&buf[0], 1234567)]))
|
||||
assert.Equal(t, "12345678" , string(buf[:u64toa(&buf[0], 12345678)]))
|
||||
assert.Equal(t, "123456789" , string(buf[:u64toa(&buf[0], 123456789)]))
|
||||
assert.Equal(t, "1234567890" , string(buf[:u64toa(&buf[0], 1234567890)]))
|
||||
assert.Equal(t, "12345678901" , string(buf[:u64toa(&buf[0], 12345678901)]))
|
||||
assert.Equal(t, "123456789012" , string(buf[:u64toa(&buf[0], 123456789012)]))
|
||||
assert.Equal(t, "1234567890123" , string(buf[:u64toa(&buf[0], 1234567890123)]))
|
||||
assert.Equal(t, "12345678901234" , string(buf[:u64toa(&buf[0], 12345678901234)]))
|
||||
assert.Equal(t, "123456789012345" , string(buf[:u64toa(&buf[0], 123456789012345)]))
|
||||
assert.Equal(t, "1234567890123456" , string(buf[:u64toa(&buf[0], 1234567890123456)]))
|
||||
assert.Equal(t, "12345678901234567" , string(buf[:u64toa(&buf[0], 12345678901234567)]))
|
||||
assert.Equal(t, "123456789012345678" , string(buf[:u64toa(&buf[0], 123456789012345678)]))
|
||||
assert.Equal(t, "1234567890123456789" , string(buf[:u64toa(&buf[0], 1234567890123456789)]))
|
||||
assert.Equal(t, "12345678901234567890" , string(buf[:u64toa(&buf[0], 12345678901234567890)]))
|
||||
assert.Equal(t, "18446744073709551615" , string(buf[:u64toa(&buf[0], 18446744073709551615)]))
|
||||
}
|
||||
|
||||
func BenchmarkFastInt_IntToString(b *testing.B) {
|
||||
func BenchmarkFastInt_IntToString(b *testing.B) {
|
||||
benchmarks := []struct {
|
||||
name string
|
||||
test func(*testing.B)
|
||||
|
@ -107,22 +109,22 @@ func TestFastInt_UintToString(t *testing.T) {
|
|||
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { strconv.AppendInt(buf[:0], -int64(i), 10) }},
|
||||
}, {
|
||||
name: "FastInt-Positive",
|
||||
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { __i64toa(&buf[0], int64(i)) }},
|
||||
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { i64toa(&buf[0], int64(i)) }},
|
||||
}, {
|
||||
name: "FastInt-Negative",
|
||||
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { __i64toa(&buf[0], -int64(i)) }},
|
||||
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { i64toa(&buf[0], -int64(i)) }},
|
||||
}}
|
||||
for _, bm := range benchmarks {
|
||||
b.Run(bm.name, bm.test)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type utoaBench struct {
|
||||
type utoaBench struct {
|
||||
name string
|
||||
num uint64
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkFastInt_UintToString(b *testing.B) {
|
||||
func BenchmarkFastInt_UintToString(b *testing.B) {
|
||||
maxUint := "18446744073709551615"
|
||||
benchs := make([]utoaBench, len(maxUint) + 1)
|
||||
benchs[0].name = "Zero"
|
||||
|
@ -141,11 +143,11 @@ func TestFastInt_UintToString(t *testing.T) {
|
|||
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { strconv.AppendUint(buf[:0], t.num, 10) }},
|
||||
}, {
|
||||
name: "FastInt",
|
||||
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { __u64toa(&buf[0], t.num) }},
|
||||
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { u64toa(&buf[0], t.num) }},
|
||||
}}
|
||||
for _, bm := range benchmarks {
|
||||
name := fmt.Sprintf("%s_%s", bm.name, t.name)
|
||||
b.Run(name, bm.test)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
188
vendor/github.com/bytedance/sonic/internal/native/native_amd64.tmpl
generated
vendored
188
vendor/github.com/bytedance/sonic/internal/native/native_amd64.tmpl
generated
vendored
|
@ -1,3 +1,5 @@
|
|||
// Code generated by Makefile, DO NOT EDIT.
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
|
@ -20,114 +22,168 @@ import (
|
|||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
var (
|
||||
__i64toa func(out unsafe.Pointer, val int64) (ret int)
|
||||
|
||||
__u64toa func(out unsafe.Pointer, val uint64) (ret int)
|
||||
|
||||
__f64toa func(out unsafe.Pointer, val float64) (ret int)
|
||||
|
||||
__f32toa func(out unsafe.Pointer, val float32) (ret int)
|
||||
|
||||
__lspace func(sp unsafe.Pointer, nb int, off int) (ret int)
|
||||
|
||||
__quote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__html_escape func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer) (ret int)
|
||||
|
||||
__unquote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__value func(s unsafe.Pointer, n int, p int, v unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__vstring func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer, flags uint64)
|
||||
|
||||
__vnumber func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
|
||||
|
||||
__vsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
|
||||
|
||||
__vunsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
|
||||
|
||||
__skip_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__skip_one_fast func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
|
||||
|
||||
__skip_array func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__skip_object func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__skip_number func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
|
||||
|
||||
__validate_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
|
||||
|
||||
__get_by_path func(s unsafe.Pointer, p unsafe.Pointer, path unsafe.Pointer, m unsafe.Pointer) (ret int)
|
||||
|
||||
__validate_utf8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
|
||||
|
||||
__validate_utf8_fast func(s unsafe.Pointer) (ret int)
|
||||
|
||||
__fsm_exec func(m unsafe.Pointer, s unsafe.Pointer, p unsafe.Pointer, flags uint64) (ret int)
|
||||
)
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __i64toa(out *byte, val int64) (ret int)
|
||||
func i64toa(out *byte, val int64) (ret int) {
|
||||
return __i64toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __u64toa(out *byte, val uint64) (ret int)
|
||||
func u64toa(out *byte, val uint64) (ret int) {
|
||||
return __u64toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __f64toa(out *byte, val float64) (ret int)
|
||||
func f64toa(out *byte, val float64) (ret int) {
|
||||
return __f64toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __f32toa(out *byte, val float32) (ret int)
|
||||
func f32toa(out *byte, val float32) (ret int) {
|
||||
return __f32toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __lspace(sp unsafe.Pointer, nb int, off int) (ret int)
|
||||
func lspace(sp unsafe.Pointer, nb int, off int) (ret int) {
|
||||
return __lspace(rt.NoEscape(sp), nb, off)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int)
|
||||
func quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int) {
|
||||
return __quote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int)
|
||||
func html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int) {
|
||||
return __html_escape(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int)
|
||||
func unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int) {
|
||||
return __unquote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(ep)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int)
|
||||
func value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int) {
|
||||
return __value(rt.NoEscape(unsafe.Pointer(s)), n, p, rt.NoEscape(unsafe.Pointer(v)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __vstring(s *string, p *int, v *types.JsonState, flags uint64)
|
||||
func vstring(s *string, p *int, v *types.JsonState, flags uint64) {
|
||||
__vstring(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __vnumber(s *string, p *int, v *types.JsonState)
|
||||
func vnumber(s *string, p *int, v *types.JsonState) {
|
||||
__vnumber(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __vsigned(s *string, p *int, v *types.JsonState)
|
||||
func vsigned(s *string, p *int, v *types.JsonState) {
|
||||
__vsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __vunsigned(s *string, p *int, v *types.JsonState)
|
||||
func vunsigned(s *string, p *int, v *types.JsonState) {
|
||||
__vunsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
|
||||
func skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
|
||||
return __skip_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_one_fast(s *string, p *int) (ret int)
|
||||
func skip_one_fast(s *string, p *int) (ret int) {
|
||||
return __skip_one_fast(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
|
||||
func skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
|
||||
return __skip_array(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
|
||||
func skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
|
||||
return __skip_object(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_number(s *string, p *int) (ret int)
|
||||
func skip_number(s *string, p *int) (ret int) {
|
||||
return __skip_number(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __validate_one(s *string, p *int, m *types.StateMachine) (ret int)
|
||||
func validate_one(s *string, p *int, m *types.StateMachine) (ret int) {
|
||||
return __validate_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int)
|
||||
func get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int) {
|
||||
return __get_by_path(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(path)), rt.NoEscape(unsafe.Pointer(m)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __validate_utf8(s *string, p *int, m *types.StateMachine) (ret int)
|
||||
func validate_utf8(s *string, p *int, m *types.StateMachine) (ret int) {
|
||||
return __validate_utf8(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __validate_utf8_fast(s *string) (ret int)
|
||||
func validate_utf8_fast(s *string) (ret int) {
|
||||
return __validate_utf8_fast(rt.NoEscape(unsafe.Pointer(s)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func fsm_exec(m *types.StateMachine, s *string, p *int, flags uint64) (ret int) {
|
||||
return __fsm_exec(rt.NoEscape(unsafe.Pointer(m)), rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), flags)
|
||||
}
|
||||
|
|
190
vendor/github.com/bytedance/sonic/internal/native/native_amd64_test.tmpl
generated
vendored
190
vendor/github.com/bytedance/sonic/internal/native/native_amd64_test.tmpl
generated
vendored
|
@ -1,3 +1,5 @@
|
|||
// Code generated by Makefile, DO NOT EDIT.
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
|
@ -20,6 +22,7 @@ import (
|
|||
`encoding/hex`
|
||||
`fmt`
|
||||
`math`
|
||||
`strings`
|
||||
`testing`
|
||||
`unsafe`
|
||||
|
||||
|
@ -34,7 +37,7 @@ func TestNative_Value(t *testing.T) {
|
|||
var v types.JsonState
|
||||
s := ` -12345`
|
||||
p := (*rt.GoString)(unsafe.Pointer(&s))
|
||||
x := __value(p.Ptr, p.Len, 0, &v, 0)
|
||||
x := value(p.Ptr, p.Len, 0, &v, 0)
|
||||
assert.Equal(t, 9, x)
|
||||
assert.Equal(t, types.V_INTEGER, v.Vt)
|
||||
assert.Equal(t, int64(-12345), v.Iv)
|
||||
|
@ -46,7 +49,7 @@ func TestNative_Value_OutOfBound(t *testing.T) {
|
|||
mem := []byte{'"', '"'}
|
||||
s := rt.Mem2Str(mem[:1])
|
||||
p := (*rt.GoString)(unsafe.Pointer(&s))
|
||||
x := __value(p.Ptr, p.Len, 0, &v, 0)
|
||||
x := value(p.Ptr, p.Len, 0, &v, 0)
|
||||
assert.Equal(t, 1, x)
|
||||
assert.Equal(t, -int(types.ERR_EOF), int(v.Vt))
|
||||
}
|
||||
|
@ -56,7 +59,7 @@ func TestNative_Quote(t *testing.T) {
|
|||
d := make([]byte, 256)
|
||||
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
|
||||
sp := (*rt.GoString)(unsafe.Pointer(&s))
|
||||
rv := __quote(sp.Ptr, sp.Len, dp.Ptr, &dp.Len, 0)
|
||||
rv := quote(sp.Ptr, sp.Len, dp.Ptr, &dp.Len, 0)
|
||||
if rv < 0 {
|
||||
require.NoError(t, types.ParsingError(-rv))
|
||||
}
|
||||
|
@ -70,7 +73,7 @@ func TestNative_QuoteNoMem(t *testing.T) {
|
|||
d := make([]byte, 10)
|
||||
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
|
||||
sp := (*rt.GoString)(unsafe.Pointer(&s))
|
||||
rv := __quote(sp.Ptr, sp.Len, dp.Ptr, &dp.Len, 0)
|
||||
rv := quote(sp.Ptr, sp.Len, dp.Ptr, &dp.Len, 0)
|
||||
assert.Equal(t, -6, rv)
|
||||
assert.Equal(t, 5, len(d))
|
||||
assert.Equal(t, `hello`, string(d))
|
||||
|
@ -81,7 +84,7 @@ func TestNative_DoubleQuote(t *testing.T) {
|
|||
d := make([]byte, 256)
|
||||
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
|
||||
sp := (*rt.GoString)(unsafe.Pointer(&s))
|
||||
rv := __quote(sp.Ptr, sp.Len, dp.Ptr, &dp.Len, types.F_DOUBLE_UNQUOTE)
|
||||
rv := quote(sp.Ptr, sp.Len, dp.Ptr, &dp.Len, types.F_DOUBLE_UNQUOTE)
|
||||
if rv < 0 {
|
||||
require.NoError(t, types.ParsingError(-rv))
|
||||
}
|
||||
|
@ -96,7 +99,7 @@ func TestNative_Unquote(t *testing.T) {
|
|||
ep := -1
|
||||
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
|
||||
sp := (*rt.GoString)(unsafe.Pointer(&s))
|
||||
rv := __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
|
||||
rv := unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
|
||||
if rv < 0 {
|
||||
require.NoError(t, types.ParsingError(-rv))
|
||||
}
|
||||
|
@ -111,7 +114,7 @@ func TestNative_UnquoteError(t *testing.T) {
|
|||
ep := -1
|
||||
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
|
||||
sp := (*rt.GoString)(unsafe.Pointer(&s))
|
||||
rv := __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
|
||||
rv := unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
|
||||
assert.Equal(t, -int(types.ERR_EOF), rv)
|
||||
assert.Equal(t, 5, ep)
|
||||
s = `asdf\gqwer`
|
||||
|
@ -119,7 +122,7 @@ func TestNative_UnquoteError(t *testing.T) {
|
|||
ep = -1
|
||||
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
|
||||
sp = (*rt.GoString)(unsafe.Pointer(&s))
|
||||
rv = __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
|
||||
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
|
||||
assert.Equal(t, -int(types.ERR_INVALID_ESCAPE), rv)
|
||||
assert.Equal(t, 5, ep)
|
||||
s = `asdf\u1gggqwer`
|
||||
|
@ -127,7 +130,7 @@ func TestNative_UnquoteError(t *testing.T) {
|
|||
ep = -1
|
||||
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
|
||||
sp = (*rt.GoString)(unsafe.Pointer(&s))
|
||||
rv = __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
|
||||
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
|
||||
assert.Equal(t, -int(types.ERR_INVALID_CHAR), rv)
|
||||
assert.Equal(t, 7, ep)
|
||||
s = `asdf\ud800qwer`
|
||||
|
@ -135,7 +138,7 @@ func TestNative_UnquoteError(t *testing.T) {
|
|||
ep = -1
|
||||
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
|
||||
sp = (*rt.GoString)(unsafe.Pointer(&s))
|
||||
rv = __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
|
||||
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
|
||||
assert.Equal(t, -int(types.ERR_INVALID_UNICODE), rv)
|
||||
assert.Equal(t, 6, ep)
|
||||
s = `asdf\\ud800qwer`
|
||||
|
@ -143,7 +146,7 @@ func TestNative_UnquoteError(t *testing.T) {
|
|||
ep = -1
|
||||
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
|
||||
sp = (*rt.GoString)(unsafe.Pointer(&s))
|
||||
rv = __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_DOUBLE_UNQUOTE)
|
||||
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_DOUBLE_UNQUOTE)
|
||||
assert.Equal(t, -int(types.ERR_INVALID_UNICODE), rv)
|
||||
assert.Equal(t, 7, ep)
|
||||
s = `asdf\ud800\ud800qwer`
|
||||
|
@ -151,7 +154,7 @@ func TestNative_UnquoteError(t *testing.T) {
|
|||
ep = -1
|
||||
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
|
||||
sp = (*rt.GoString)(unsafe.Pointer(&s))
|
||||
rv = __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
|
||||
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
|
||||
assert.Equal(t, -int(types.ERR_INVALID_UNICODE), rv)
|
||||
assert.Equal(t, 12, ep)
|
||||
s = `asdf\\ud800\\ud800qwer`
|
||||
|
@ -159,7 +162,7 @@ func TestNative_UnquoteError(t *testing.T) {
|
|||
ep = -1
|
||||
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
|
||||
sp = (*rt.GoString)(unsafe.Pointer(&s))
|
||||
rv = __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_DOUBLE_UNQUOTE)
|
||||
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_DOUBLE_UNQUOTE)
|
||||
assert.Equal(t, -int(types.ERR_INVALID_UNICODE), rv)
|
||||
assert.Equal(t, 14, ep)
|
||||
}
|
||||
|
@ -170,7 +173,7 @@ func TestNative_DoubleUnquote(t *testing.T) {
|
|||
ep := -1
|
||||
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
|
||||
sp := (*rt.GoString)(unsafe.Pointer(&s))
|
||||
rv := __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_DOUBLE_UNQUOTE)
|
||||
rv := unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_DOUBLE_UNQUOTE)
|
||||
if rv < 0 {
|
||||
require.NoError(t, types.ParsingError(-rv))
|
||||
}
|
||||
|
@ -185,7 +188,7 @@ func TestNative_UnquoteUnicodeReplacement(t *testing.T) {
|
|||
ep := -1
|
||||
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
|
||||
sp := (*rt.GoString)(unsafe.Pointer(&s))
|
||||
rv := __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_UNICODE_REPLACE)
|
||||
rv := unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_UNICODE_REPLACE)
|
||||
if rv < 0 {
|
||||
require.NoError(t, types.ParsingError(-rv))
|
||||
}
|
||||
|
@ -197,7 +200,7 @@ func TestNative_UnquoteUnicodeReplacement(t *testing.T) {
|
|||
ep = -1
|
||||
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
|
||||
sp = (*rt.GoString)(unsafe.Pointer(&s))
|
||||
rv = __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_UNICODE_REPLACE)
|
||||
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_UNICODE_REPLACE)
|
||||
if rv < 0 {
|
||||
require.NoError(t, types.ParsingError(-rv))
|
||||
}
|
||||
|
@ -211,7 +214,7 @@ func TestNative_HTMLEscape(t *testing.T) {
|
|||
d := make([]byte, 256)
|
||||
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
|
||||
sp := (*rt.GoString)(unsafe.Pointer(&s))
|
||||
rv := __html_escape(sp.Ptr, sp.Len, dp.Ptr, &dp.Len)
|
||||
rv := html_escape(sp.Ptr, sp.Len, dp.Ptr, &dp.Len)
|
||||
if rv < 0 {
|
||||
require.NoError(t, types.ParsingError(-rv))
|
||||
}
|
||||
|
@ -225,7 +228,7 @@ func TestNative_HTMLEscapeNoMem(t *testing.T) {
|
|||
d := make([]byte, 10)
|
||||
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
|
||||
sp := (*rt.GoString)(unsafe.Pointer(&s))
|
||||
rv := __html_escape(sp.Ptr, sp.Len, dp.Ptr, &dp.Len)
|
||||
rv := html_escape(sp.Ptr, sp.Len, dp.Ptr, &dp.Len)
|
||||
assert.Equal(t, -6, rv)
|
||||
assert.Equal(t, 5, len(d))
|
||||
assert.Equal(t, `hello`, string(d))
|
||||
|
@ -235,11 +238,11 @@ func TestNative_Vstring(t *testing.T) {
|
|||
var v types.JsonState
|
||||
i := 0
|
||||
s := `test"test\n2"`
|
||||
__vstring(&s, &i, &v, 0)
|
||||
vstring(&s, &i, &v, 0)
|
||||
assert.Equal(t, 5, i)
|
||||
assert.Equal(t, -1, v.Ep)
|
||||
assert.Equal(t, int64(0), v.Iv)
|
||||
__vstring(&s, &i, &v, 0)
|
||||
vstring(&s, &i, &v, 0)
|
||||
assert.Equal(t, 13, i)
|
||||
assert.Equal(t, 9, v.Ep)
|
||||
assert.Equal(t, int64(5), v.Iv)
|
||||
|
@ -250,7 +253,7 @@ func TestNative_Vstring_ValidUnescapedChars(t *testing.T) {
|
|||
valid := uint64(types.F_VALIDATE_STRING)
|
||||
i := 0
|
||||
s := "test\x1f\""
|
||||
__vstring(&s, &i, &v, valid)
|
||||
vstring(&s, &i, &v, valid)
|
||||
assert.Equal(t, -int(types.ERR_INVALID_CHAR), int(v.Vt))
|
||||
}
|
||||
|
||||
|
@ -258,7 +261,7 @@ func TestNative_VstringEscapeEOF(t *testing.T) {
|
|||
var v types.JsonState
|
||||
i := 0
|
||||
s := `xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\"xxxxxxxxxxxxxxxxxxxxxxxxxxxxx"x`
|
||||
__vstring(&s, &i, &v, 0)
|
||||
vstring(&s, &i, &v, 0)
|
||||
assert.Equal(t, 95, i)
|
||||
assert.Equal(t, 63, v.Ep)
|
||||
assert.Equal(t, int64(0), v.Iv)
|
||||
|
@ -274,7 +277,7 @@ func TestNative_VstringHangUpOnRandomData(t *testing.T) {
|
|||
p := 1
|
||||
s := rt.Mem2Str(v)
|
||||
var js types.JsonState
|
||||
__vstring(&s, &p, &js, 0)
|
||||
vstring(&s, &p, &js, 0)
|
||||
fmt.Printf("js: %s\n", spew.Sdump(js))
|
||||
}
|
||||
|
||||
|
@ -282,49 +285,49 @@ func TestNative_Vnumber(t *testing.T) {
|
|||
var v types.JsonState
|
||||
i := 0
|
||||
s := "1234"
|
||||
__vnumber(&s, &i, &v)
|
||||
vnumber(&s, &i, &v)
|
||||
assert.Equal(t, 4, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, int64(1234), v.Iv)
|
||||
assert.Equal(t, types.V_INTEGER, v.Vt)
|
||||
i = 0
|
||||
s = "1.234"
|
||||
__vnumber(&s, &i, &v)
|
||||
vnumber(&s, &i, &v)
|
||||
assert.Equal(t, 5, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, 1.234, v.Dv)
|
||||
assert.Equal(t, types.V_DOUBLE, v.Vt)
|
||||
i = 0
|
||||
s = "1.234e5"
|
||||
__vnumber(&s, &i, &v)
|
||||
vnumber(&s, &i, &v)
|
||||
assert.Equal(t, 7, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, 1.234e5, v.Dv)
|
||||
assert.Equal(t, types.V_DOUBLE, v.Vt)
|
||||
i = 0
|
||||
s = "0.0125"
|
||||
__vnumber(&s, &i, &v)
|
||||
vnumber(&s, &i, &v)
|
||||
assert.Equal(t, 6, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, 0.0125, v.Dv)
|
||||
assert.Equal(t, types.V_DOUBLE, v.Vt)
|
||||
i = 0
|
||||
s = "100000000000000000000"
|
||||
__vnumber(&s, &i, &v)
|
||||
vnumber(&s, &i, &v)
|
||||
assert.Equal(t, 21, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, 100000000000000000000.0, v.Dv)
|
||||
assert.Equal(t, types.V_DOUBLE, v.Vt)
|
||||
i = 0
|
||||
s = "999999999999999900000"
|
||||
__vnumber(&s, &i, &v)
|
||||
vnumber(&s, &i, &v)
|
||||
assert.Equal(t, 21, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, 999999999999999900000.0, v.Dv)
|
||||
assert.Equal(t, types.V_DOUBLE, v.Vt)
|
||||
i = 0
|
||||
s = "-1.234"
|
||||
__vnumber(&s, &i, &v)
|
||||
vnumber(&s, &i, &v)
|
||||
assert.Equal(t, 6, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, -1.234, v.Dv)
|
||||
|
@ -335,65 +338,65 @@ func TestNative_Vsigned(t *testing.T) {
|
|||
var v types.JsonState
|
||||
i := 0
|
||||
s := "1234"
|
||||
__vsigned(&s, &i, &v)
|
||||
vsigned(&s, &i, &v)
|
||||
assert.Equal(t, 4, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, int64(1234), v.Iv)
|
||||
assert.Equal(t, types.V_INTEGER, v.Vt)
|
||||
i = 0
|
||||
s = "-1234"
|
||||
__vsigned(&s, &i, &v)
|
||||
vsigned(&s, &i, &v)
|
||||
assert.Equal(t, 5, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, int64(-1234), v.Iv)
|
||||
assert.Equal(t, types.V_INTEGER, v.Vt)
|
||||
i = 0
|
||||
s = "9223372036854775807"
|
||||
__vsigned(&s, &i, &v)
|
||||
vsigned(&s, &i, &v)
|
||||
assert.Equal(t, 19, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, int64(math.MaxInt64), v.Iv)
|
||||
assert.Equal(t, types.V_INTEGER, v.Vt)
|
||||
i = 0
|
||||
s = "-9223372036854775808"
|
||||
__vsigned(&s, &i, &v)
|
||||
vsigned(&s, &i, &v)
|
||||
assert.Equal(t, 20, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, int64(math.MinInt64), v.Iv)
|
||||
assert.Equal(t, types.V_INTEGER, v.Vt)
|
||||
i = 0
|
||||
s = "9223372036854775808"
|
||||
__vsigned(&s, &i, &v)
|
||||
vsigned(&s, &i, &v)
|
||||
assert.Equal(t, 18, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, types.ValueType(-int(types.ERR_INTEGER_OVERFLOW)), v.Vt)
|
||||
i = 0
|
||||
s = "-9223372036854775809"
|
||||
__vsigned(&s, &i, &v)
|
||||
vsigned(&s, &i, &v)
|
||||
assert.Equal(t, 19, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, types.ValueType(-int(types.ERR_INTEGER_OVERFLOW)), v.Vt)
|
||||
i = 0
|
||||
s = "1.234"
|
||||
__vsigned(&s, &i, &v)
|
||||
vsigned(&s, &i, &v)
|
||||
assert.Equal(t, 1, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
|
||||
i = 0
|
||||
s = "0.0125"
|
||||
__vsigned(&s, &i, &v)
|
||||
vsigned(&s, &i, &v)
|
||||
assert.Equal(t, 1, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
|
||||
i = 0
|
||||
s = "-1234e5"
|
||||
__vsigned(&s, &i, &v)
|
||||
vsigned(&s, &i, &v)
|
||||
assert.Equal(t, 5, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
|
||||
i = 0
|
||||
s = "-1234e-5"
|
||||
__vsigned(&s, &i, &v)
|
||||
vsigned(&s, &i, &v)
|
||||
assert.Equal(t, 5, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
|
||||
|
@ -403,63 +406,63 @@ func TestNative_Vunsigned(t *testing.T) {
|
|||
var v types.JsonState
|
||||
i := 0
|
||||
s := "1234"
|
||||
__vunsigned(&s, &i, &v)
|
||||
vunsigned(&s, &i, &v)
|
||||
assert.Equal(t, 4, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, int64(1234), v.Iv)
|
||||
assert.Equal(t, types.V_INTEGER, v.Vt)
|
||||
i = 0
|
||||
s = "18446744073709551615"
|
||||
__vunsigned(&s, &i, &v)
|
||||
vunsigned(&s, &i, &v)
|
||||
assert.Equal(t, 20, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, ^int64(0), v.Iv)
|
||||
assert.Equal(t, types.V_INTEGER, v.Vt)
|
||||
i = 0
|
||||
s = "18446744073709551616"
|
||||
__vunsigned(&s, &i, &v)
|
||||
vunsigned(&s, &i, &v)
|
||||
assert.Equal(t, 19, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, types.ValueType(-int(types.ERR_INTEGER_OVERFLOW)), v.Vt)
|
||||
i = 0
|
||||
s = "-1234"
|
||||
__vunsigned(&s, &i, &v)
|
||||
vunsigned(&s, &i, &v)
|
||||
assert.Equal(t, 0, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
|
||||
i = 0
|
||||
s = "1.234"
|
||||
__vunsigned(&s, &i, &v)
|
||||
vunsigned(&s, &i, &v)
|
||||
assert.Equal(t, 1, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
|
||||
i = 0
|
||||
s = "0.0125"
|
||||
__vunsigned(&s, &i, &v)
|
||||
vunsigned(&s, &i, &v)
|
||||
assert.Equal(t, 1, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
|
||||
i = 0
|
||||
s = "1234e5"
|
||||
__vunsigned(&s, &i, &v)
|
||||
vunsigned(&s, &i, &v)
|
||||
assert.Equal(t, 4, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
|
||||
i = 0
|
||||
s = "-1234e5"
|
||||
__vunsigned(&s, &i, &v)
|
||||
vunsigned(&s, &i, &v)
|
||||
assert.Equal(t, 0, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
|
||||
i = 0
|
||||
s = "-1.234e5"
|
||||
__vunsigned(&s, &i, &v)
|
||||
vunsigned(&s, &i, &v)
|
||||
assert.Equal(t, 0, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
|
||||
i = 0
|
||||
s = "-1.234e-5"
|
||||
__vunsigned(&s, &i, &v)
|
||||
vunsigned(&s, &i, &v)
|
||||
assert.Equal(t, 0, i)
|
||||
assert.Equal(t, 0, v.Ep)
|
||||
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
|
||||
|
@ -468,36 +471,36 @@ func TestNative_Vunsigned(t *testing.T) {
|
|||
func TestNative_SkipOne(t *testing.T) {
|
||||
p := 0
|
||||
s := ` {"asdf": [null, true, false, 1, 2.0, -3]}, 1234.5`
|
||||
q := __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
q := skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
assert.Equal(t, 42, p)
|
||||
assert.Equal(t, 1, q)
|
||||
p = 0
|
||||
s = `1 2.5 -3 "asdf\nqwer" true false null {} []`
|
||||
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
assert.Equal(t, 1, p)
|
||||
assert.Equal(t, 0, q)
|
||||
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
assert.Equal(t, 5, p)
|
||||
assert.Equal(t, 2, q)
|
||||
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
assert.Equal(t, 8, p)
|
||||
assert.Equal(t, 6, q)
|
||||
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
assert.Equal(t, 21, p)
|
||||
assert.Equal(t, 9, q)
|
||||
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
assert.Equal(t, 26, p)
|
||||
assert.Equal(t, 22, q)
|
||||
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
assert.Equal(t, 32, p)
|
||||
assert.Equal(t, 27, q)
|
||||
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
assert.Equal(t, 37, p)
|
||||
assert.Equal(t, 33, q)
|
||||
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
assert.Equal(t, 40, p)
|
||||
assert.Equal(t, 38, q)
|
||||
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
assert.Equal(t, 43, p)
|
||||
assert.Equal(t, 41, q)
|
||||
}
|
||||
|
@ -509,7 +512,7 @@ func TestNative_SkipOne_Error(t *testing.T) {
|
|||
`"asdf`, `"\\\"`,
|
||||
}) {
|
||||
p := 0
|
||||
q := __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
q := skip_one(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
assert.True(t, q < 0)
|
||||
}
|
||||
}
|
||||
|
@ -517,66 +520,74 @@ func TestNative_SkipOne_Error(t *testing.T) {
|
|||
func TestNative_SkipArray(t *testing.T) {
|
||||
p := 0
|
||||
s := `null, true, false, 1, 2.0, -3, {"asdf": "wqer"}],`
|
||||
__skip_array(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
skip_array(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
assert.Equal(t, p, 48)
|
||||
}
|
||||
|
||||
func TestNative_SkipObject(t *testing.T) {
|
||||
p := 0
|
||||
s := `"asdf": "wqer"},`
|
||||
__skip_object(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
skip_object(&s, &p, &types.StateMachine{}, uint64(0))
|
||||
assert.Equal(t, p, 15)
|
||||
}
|
||||
|
||||
func TestNative_SkipNumber(t *testing.T) {
|
||||
p := 0
|
||||
s := `-1.23e+12`
|
||||
q := __skip_number(&s, &p)
|
||||
q := skip_number(&s, &p)
|
||||
assert.Equal(t, 9, p)
|
||||
assert.Equal(t, 0, q)
|
||||
}
|
||||
|
||||
func TestNative_SkipNumberInJson(t *testing.T) {
|
||||
p := 0x13
|
||||
s := "{\"h\":\"1.00000\",\"i\":true,\"pass3\":1}"
|
||||
q := skip_number(&s, &p)
|
||||
assert.Equal(t, 0x13, p)
|
||||
assert.Equal(t, -2, q)
|
||||
}
|
||||
|
||||
func TestNative_SkipOneFast(t *testing.T) {
|
||||
p := 0
|
||||
s := ` {"asdf": [null, true, false, 1, 2.0, -3]}, 1234.5`
|
||||
q := __skip_one_fast(&s, &p)
|
||||
q := skip_one_fast(&s, &p)
|
||||
assert.Equal(t, 42, p)
|
||||
assert.Equal(t, 1, q)
|
||||
p = 0
|
||||
s = `1, 2.5, -3, "asdf\nqwer", true, false, null, {}, [],`
|
||||
q = __skip_one_fast(&s, &p)
|
||||
q = skip_one_fast(&s, &p)
|
||||
assert.Equal(t, 1, p)
|
||||
assert.Equal(t, 0, q)
|
||||
p += 1
|
||||
q = __skip_one_fast(&s, &p)
|
||||
q = skip_one_fast(&s, &p)
|
||||
assert.Equal(t, 6, p)
|
||||
assert.Equal(t, 3, q)
|
||||
p += 1
|
||||
q = __skip_one_fast(&s, &p)
|
||||
q = skip_one_fast(&s, &p)
|
||||
assert.Equal(t, 10, p)
|
||||
assert.Equal(t, 8, q)
|
||||
p += 1
|
||||
q = __skip_one_fast(&s, &p)
|
||||
q = skip_one_fast(&s, &p)
|
||||
assert.Equal(t, 24, p)
|
||||
assert.Equal(t, 12, q)
|
||||
p += 1
|
||||
q = __skip_one_fast(&s, &p)
|
||||
q = skip_one_fast(&s, &p)
|
||||
assert.Equal(t, 30, p)
|
||||
assert.Equal(t, 26, q)
|
||||
p += 1
|
||||
q = __skip_one_fast(&s, &p)
|
||||
q = skip_one_fast(&s, &p)
|
||||
assert.Equal(t, 37, p)
|
||||
assert.Equal(t, 32, q)
|
||||
p += 1
|
||||
q = __skip_one_fast(&s, &p)
|
||||
q = skip_one_fast(&s, &p)
|
||||
assert.Equal(t, 43, p)
|
||||
assert.Equal(t, 39, q)
|
||||
p += 1
|
||||
q = __skip_one_fast(&s, &p)
|
||||
q = skip_one_fast(&s, &p)
|
||||
assert.Equal(t, 47, p)
|
||||
assert.Equal(t, 45, q)
|
||||
p += 1
|
||||
q = __skip_one_fast(&s, &p)
|
||||
q = skip_one_fast(&s, &p)
|
||||
assert.Equal(t, 51, p)
|
||||
assert.Equal(t, 49, q)
|
||||
}
|
||||
|
@ -587,7 +598,36 @@ func TestNative_SkipOneFast_Error(t *testing.T) {
|
|||
`"asdf`, `"\\\"`,
|
||||
}) {
|
||||
p := 0
|
||||
q := __skip_one_fast(&s, &p)
|
||||
q := skip_one_fast(&s, &p)
|
||||
assert.True(t, q < 0)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNative_GetByPath(t *testing.T) {
|
||||
s := `{"asdf": [null, true, false, 1, 2.0, -3]}, 1234.5`
|
||||
p := 0
|
||||
path := []interface{}{"asdf", 4}
|
||||
ret := get_by_path(&s, &p, &path, types.NewStateMachine())
|
||||
assert.Equal(t, strings.Index(s, "2.0"), ret)
|
||||
}
|
||||
|
||||
func BenchmarkNative_SkipOneFast(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
for i:=0; i<b.N; i++ {
|
||||
s := `{"asdf": [null, true, false, 1, 2.0, -3]}, 1234.5`
|
||||
p := 0
|
||||
_ = skip_one_fast(&s, &p)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkNative_GetByPath(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
for i:=0; i<b.N; i++ {
|
||||
s := `{"asdf": [null, true, false, 1, 2.0, -3]}, 1234.5`
|
||||
p := 0
|
||||
path := []interface{}{"asdf", 3}
|
||||
sm := types.NewStateMachine()
|
||||
_ = get_by_path(&s, &p, &path, sm)
|
||||
types.FreeStateMachine(sm)
|
||||
}
|
||||
}
|
47
vendor/github.com/bytedance/sonic/internal/native/native_export_amd64.tmpl
generated
vendored
47
vendor/github.com/bytedance/sonic/internal/native/native_export_amd64.tmpl
generated
vendored
|
@ -1,47 +0,0 @@
|
|||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package {{PACKAGE}}
|
||||
|
||||
var (
|
||||
S_f64toa = _subr__f64toa
|
||||
S_f32toa = _subr__f32toa
|
||||
S_i64toa = _subr__i64toa
|
||||
S_u64toa = _subr__u64toa
|
||||
S_lspace = _subr__lspace
|
||||
)
|
||||
|
||||
var (
|
||||
S_quote = _subr__quote
|
||||
S_unquote = _subr__unquote
|
||||
)
|
||||
|
||||
var (
|
||||
S_value = _subr__value
|
||||
S_vstring = _subr__vstring
|
||||
S_vnumber = _subr__vnumber
|
||||
S_vsigned = _subr__vsigned
|
||||
S_vunsigned = _subr__vunsigned
|
||||
)
|
||||
|
||||
var (
|
||||
S_skip_one = _subr__skip_one
|
||||
S_skip_one_fast = _subr__skip_one_fast
|
||||
S_skip_array = _subr__skip_array
|
||||
S_skip_object = _subr__skip_object
|
||||
S_skip_number = _subr__skip_number
|
||||
S_get_by_path = _subr__get_by_path
|
||||
)
|
697
vendor/github.com/bytedance/sonic/internal/native/recover_amd64_test.tmpl
generated
vendored
Normal file
697
vendor/github.com/bytedance/sonic/internal/native/recover_amd64_test.tmpl
generated
vendored
Normal file
|
@ -0,0 +1,697 @@
|
|||
/**
|
||||
* Copyright 2023 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package {{PACKAGE}}
|
||||
|
||||
import (
|
||||
`os`
|
||||
`runtime`
|
||||
`runtime/debug`
|
||||
`testing`
|
||||
`time`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/loader`
|
||||
)
|
||||
|
||||
var (
|
||||
debugAsyncGC = os.Getenv("SONIC_NO_ASYNC_GC") == ""
|
||||
)
|
||||
|
||||
var stubs = []loader.GoC{
|
||||
{"_f32toa", nil, &__f32toa},
|
||||
{"_f64toa", nil, &__f64toa},
|
||||
{"_fsm_exec", nil, &__fsm_exec},
|
||||
{"_get_by_path", nil, &__get_by_path},
|
||||
{"_html_escape", nil, &__html_escape},
|
||||
{"_i64toa", nil, &__i64toa},
|
||||
{"_lspace", nil, &__lspace},
|
||||
{"_quote", nil, &__quote},
|
||||
{"_skip_array", nil, &__skip_array},
|
||||
{"_skip_number", nil, &__skip_number},
|
||||
{"_skip_object", nil, &__skip_object},
|
||||
{"_skip_one", nil, &__skip_one},
|
||||
{"_skip_one_fast", nil, &__skip_one_fast},
|
||||
{"_u64toa", nil, &__u64toa},
|
||||
{"_unquote", nil, &__unquote},
|
||||
{"_validate_one", nil, &__validate_one},
|
||||
{"_validate_utf8", nil, &__validate_utf8},
|
||||
{"_validate_utf8_fast", nil, &__validate_utf8_fast},
|
||||
{"_value", nil, &__value},
|
||||
{"_vnumber", nil, &__vnumber},
|
||||
{"_vsigned", nil, &__vsigned},
|
||||
{"_vstring", nil, &__vstring},
|
||||
{"_vunsigned", nil, &__vunsigned},
|
||||
}
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
loader.WrapGoC(Text__native_entry__, Funcs, stubs, "{{PACKAGE}}", "{{PACKAGE}}/native.c")
|
||||
|
||||
go func () {
|
||||
if !debugAsyncGC {
|
||||
return
|
||||
}
|
||||
println("Begin GC looping...")
|
||||
for {
|
||||
runtime.GC()
|
||||
debug.FreeOSMemory()
|
||||
}
|
||||
println("stop GC looping!")
|
||||
}()
|
||||
time.Sleep(time.Millisecond*100)
|
||||
m.Run()
|
||||
}
|
||||
|
||||
func TestRecover_f64toa(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = f64toa(nil, 123)
|
||||
}
|
||||
|
||||
func TestRecover_f32toa(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = f32toa(nil, 123)
|
||||
}
|
||||
|
||||
func TestRecover_i64toa(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = i64toa(nil, 123)
|
||||
}
|
||||
|
||||
func TestRecover_u64toa(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = u64toa(nil, 123)
|
||||
}
|
||||
|
||||
func TestRecover_lspace(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = lspace(nil, 2, 0)
|
||||
}
|
||||
|
||||
func TestRecover_quote(t *testing.T) {
|
||||
var dn = 10
|
||||
var dp = make([]byte, dn)
|
||||
var sp = []byte("123")
|
||||
t.Run("sp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = quote(nil, 3, unsafe.Pointer(&dp[0]), &dn, 0)
|
||||
})
|
||||
t.Run("dp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = quote(unsafe.Pointer(&sp[0]), 3, nil, &dn, 0)
|
||||
})
|
||||
t.Run("dn", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = quote(unsafe.Pointer(&sp[0]), 3, unsafe.Pointer(&dp[0]), nil, 0)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRecover_html_escape(t *testing.T) {
|
||||
var dn = 10
|
||||
var dp = make([]byte, dn)
|
||||
var sp = []byte("123")
|
||||
t.Run("sp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = html_escape(nil, 3, unsafe.Pointer(&dp[0]), &dn)
|
||||
})
|
||||
t.Run("dp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = html_escape(unsafe.Pointer(&sp[0]), 3, nil, &dn)
|
||||
})
|
||||
t.Run("dn", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = html_escape(unsafe.Pointer(&sp[0]), 3, unsafe.Pointer(&dp[0]), nil)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRecover_unquote(t *testing.T) {
|
||||
var ep = 0
|
||||
var dp = make([]byte, 10)
|
||||
var sp = []byte("12\\x\"3\"4")
|
||||
t.Run("sp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = unquote(nil, len(sp), unsafe.Pointer(&dp[0]), &ep, 0)
|
||||
})
|
||||
t.Run("dp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = unquote(unsafe.Pointer(&sp[0]), len(sp), nil, &ep, 0)
|
||||
})
|
||||
t.Run("ep", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = unquote(unsafe.Pointer(&sp[0]), len(sp), unsafe.Pointer(&dp[0]), nil, 0)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRecover_value(t *testing.T) {
|
||||
var v = new(types.JsonState)
|
||||
var sp = []byte("123")
|
||||
t.Run("sp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = value(nil, 3, 0, v, 0)
|
||||
})
|
||||
t.Run("v", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = value(unsafe.Pointer(&sp[0]), 3, 0, nil, 0)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRecover_vstring(t *testing.T) {
|
||||
var v = new(types.JsonState)
|
||||
var sp = "123"
|
||||
var p = 0
|
||||
t.Run("sp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
vstring(nil, &p, v, 0)
|
||||
})
|
||||
t.Run("p", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
vstring(&sp, nil, v, 0)
|
||||
})
|
||||
t.Run("v", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
vstring(&sp, &p, nil, 0)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRecover_vnumber(t *testing.T) {
|
||||
var v = new(types.JsonState)
|
||||
var sp = "123"
|
||||
var p = 0
|
||||
t.Run("sp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
vnumber(nil, &p, v)
|
||||
})
|
||||
t.Run("p", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
vnumber(&sp, nil, v)
|
||||
})
|
||||
t.Run("v", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
vnumber(&sp, &p, nil)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRecover_vsigned(t *testing.T) {
|
||||
var v = new(types.JsonState)
|
||||
var sp = "123"
|
||||
var p = 0
|
||||
t.Run("sp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
vsigned(nil, &p, v)
|
||||
})
|
||||
t.Run("p", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
vsigned(&sp, nil, v)
|
||||
})
|
||||
t.Run("v", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
vsigned(&sp, &p, nil)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRecover_vunsigned(t *testing.T) {
|
||||
var v = new(types.JsonState)
|
||||
var sp = "123"
|
||||
var p = 0
|
||||
t.Run("sp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
vunsigned(nil, &p, v)
|
||||
})
|
||||
t.Run("p", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
vunsigned(&sp, nil, v)
|
||||
})
|
||||
t.Run("v", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
vunsigned(&sp, &p, nil)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRecover_skip_one(t *testing.T) {
|
||||
var v = types.NewStateMachine()
|
||||
var sp = "123"
|
||||
var p = 0
|
||||
t.Run("sp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = skip_one(nil, &p, v, 0)
|
||||
})
|
||||
t.Run("p", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = skip_one(&sp, nil, v, 0)
|
||||
})
|
||||
t.Run("v", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = skip_one(&sp, &p, nil, 0)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRecover_skip_one_fast(t *testing.T) {
|
||||
var sp = "123"
|
||||
var p = 0
|
||||
t.Run("sp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = skip_one_fast(nil, &p)
|
||||
})
|
||||
t.Run("p", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = skip_one_fast(&sp, nil)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRecover_skip_array(t *testing.T) {
|
||||
var v = types.NewStateMachine()
|
||||
var sp = "123"
|
||||
var p = 0
|
||||
t.Run("sp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = skip_array(nil, &p, v, 0)
|
||||
})
|
||||
t.Run("p", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = skip_array(&sp, nil, v, 0)
|
||||
})
|
||||
t.Run("v", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = skip_array(&sp, &p, nil, 0)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRecover_skip_object(t *testing.T) {
|
||||
var v = types.NewStateMachine()
|
||||
var sp = "123"
|
||||
var p = 0
|
||||
t.Run("sp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = skip_object(nil, &p, v, 0)
|
||||
})
|
||||
t.Run("p", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = skip_object(&sp, nil, v, 0)
|
||||
})
|
||||
t.Run("v", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = skip_object(&sp, &p, nil, 0)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRecover_skip_number(t *testing.T) {
|
||||
var sp = "123"
|
||||
var p = 0
|
||||
t.Run("sp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = skip_number(nil, &p)
|
||||
})
|
||||
t.Run("p", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = skip_number(&sp, nil)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRecover_get_by_path(t *testing.T) {
|
||||
var v = []interface{}{}
|
||||
var sp = "123"
|
||||
var p = 0
|
||||
var m = types.NewStateMachine()
|
||||
t.Run("sp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = get_by_path(nil, &p, &v, m)
|
||||
})
|
||||
t.Run("p", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = get_by_path(&sp, nil, &v, m)
|
||||
})
|
||||
t.Run("path", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = get_by_path(&sp, &p, nil, m)
|
||||
})
|
||||
t.Run("m", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = get_by_path(&sp, &p, &v, nil)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRecover_validate_one(t *testing.T) {
|
||||
var v = types.NewStateMachine()
|
||||
var sp = "123"
|
||||
var p = 0
|
||||
t.Run("sp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = validate_one(nil, &p, v)
|
||||
})
|
||||
t.Run("p", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = validate_one(&sp, nil, v)
|
||||
})
|
||||
t.Run("v", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = validate_one(&sp, &p, nil)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRecover_validate_utf8(t *testing.T) {
|
||||
var v = types.NewStateMachine()
|
||||
var sp = string([]byte{0xff, 0xff, 0xff})
|
||||
var p = 0
|
||||
t.Run("sp", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = validate_utf8(nil, &p, v)
|
||||
})
|
||||
t.Run("p", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = validate_utf8(&sp, nil, v)
|
||||
})
|
||||
t.Run("v", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = validate_utf8(&sp, &p, nil)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRecover_validate_utf8_fast(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r!= nil {
|
||||
t.Log("recover: ", r)
|
||||
} else {
|
||||
t.Fatal("no panic")
|
||||
}
|
||||
}()
|
||||
_ = validate_utf8_fast(nil)
|
||||
}
|
188
vendor/github.com/bytedance/sonic/internal/native/sse/native_amd64.go
generated
vendored
188
vendor/github.com/bytedance/sonic/internal/native/sse/native_amd64.go
generated
vendored
|
@ -1,5 +1,7 @@
|
|||
// Code generated by Makefile, DO NOT EDIT.
|
||||
|
||||
// Code generated by Makefile, DO NOT EDIT.
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
|
@ -22,114 +24,168 @@
|
|||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
var (
|
||||
__i64toa func(out unsafe.Pointer, val int64) (ret int)
|
||||
|
||||
__u64toa func(out unsafe.Pointer, val uint64) (ret int)
|
||||
|
||||
__f64toa func(out unsafe.Pointer, val float64) (ret int)
|
||||
|
||||
__f32toa func(out unsafe.Pointer, val float32) (ret int)
|
||||
|
||||
__lspace func(sp unsafe.Pointer, nb int, off int) (ret int)
|
||||
|
||||
__quote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__html_escape func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer) (ret int)
|
||||
|
||||
__unquote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__value func(s unsafe.Pointer, n int, p int, v unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__vstring func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer, flags uint64)
|
||||
|
||||
__vnumber func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
|
||||
|
||||
__vsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
|
||||
|
||||
__vunsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
|
||||
|
||||
__skip_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__skip_one_fast func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
|
||||
|
||||
__skip_array func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__skip_object func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
|
||||
|
||||
__skip_number func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
|
||||
|
||||
__validate_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
|
||||
|
||||
__get_by_path func(s unsafe.Pointer, p unsafe.Pointer, path unsafe.Pointer, m unsafe.Pointer) (ret int)
|
||||
|
||||
__validate_utf8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
|
||||
|
||||
__validate_utf8_fast func(s unsafe.Pointer) (ret int)
|
||||
|
||||
__fsm_exec func(m unsafe.Pointer, s unsafe.Pointer, p unsafe.Pointer, flags uint64) (ret int)
|
||||
)
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __i64toa(out *byte, val int64) (ret int)
|
||||
func i64toa(out *byte, val int64) (ret int) {
|
||||
return __i64toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __u64toa(out *byte, val uint64) (ret int)
|
||||
func u64toa(out *byte, val uint64) (ret int) {
|
||||
return __u64toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __f64toa(out *byte, val float64) (ret int)
|
||||
func f64toa(out *byte, val float64) (ret int) {
|
||||
return __f64toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __f32toa(out *byte, val float32) (ret int)
|
||||
func f32toa(out *byte, val float32) (ret int) {
|
||||
return __f32toa(rt.NoEscape(unsafe.Pointer(out)), val)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __lspace(sp unsafe.Pointer, nb int, off int) (ret int)
|
||||
func lspace(sp unsafe.Pointer, nb int, off int) (ret int) {
|
||||
return __lspace(rt.NoEscape(sp), nb, off)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int)
|
||||
func quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int) {
|
||||
return __quote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int)
|
||||
func html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int) {
|
||||
return __html_escape(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int)
|
||||
func unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int) {
|
||||
return __unquote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(ep)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int)
|
||||
func value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int) {
|
||||
return __value(rt.NoEscape(unsafe.Pointer(s)), n, p, rt.NoEscape(unsafe.Pointer(v)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __vstring(s *string, p *int, v *types.JsonState, flags uint64)
|
||||
func vstring(s *string, p *int, v *types.JsonState, flags uint64) {
|
||||
__vstring(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __vnumber(s *string, p *int, v *types.JsonState)
|
||||
func vnumber(s *string, p *int, v *types.JsonState) {
|
||||
__vnumber(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __vsigned(s *string, p *int, v *types.JsonState)
|
||||
func vsigned(s *string, p *int, v *types.JsonState) {
|
||||
__vsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __vunsigned(s *string, p *int, v *types.JsonState)
|
||||
func vunsigned(s *string, p *int, v *types.JsonState) {
|
||||
__vunsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
|
||||
func skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
|
||||
return __skip_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_one_fast(s *string, p *int) (ret int)
|
||||
func skip_one_fast(s *string, p *int) (ret int) {
|
||||
return __skip_one_fast(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
|
||||
func skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
|
||||
return __skip_array(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
|
||||
func skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
|
||||
return __skip_object(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __skip_number(s *string, p *int) (ret int)
|
||||
func skip_number(s *string, p *int) (ret int) {
|
||||
return __skip_number(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __validate_one(s *string, p *int, m *types.StateMachine) (ret int)
|
||||
func validate_one(s *string, p *int, m *types.StateMachine) (ret int) {
|
||||
return __validate_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int)
|
||||
func get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int) {
|
||||
return __get_by_path(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(path)), rt.NoEscape(unsafe.Pointer(m)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __validate_utf8(s *string, p *int, m *types.StateMachine) (ret int)
|
||||
func validate_utf8(s *string, p *int, m *types.StateMachine) (ret int) {
|
||||
return __validate_utf8(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func __validate_utf8_fast(s *string) (ret int)
|
||||
func validate_utf8_fast(s *string) (ret int) {
|
||||
return __validate_utf8_fast(rt.NoEscape(unsafe.Pointer(s)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func fsm_exec(m *types.StateMachine, s *string, p *int, flags uint64) (ret int) {
|
||||
return __fsm_exec(rt.NoEscape(unsafe.Pointer(m)), rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), flags)
|
||||
}
|
||||
|
|
15479
vendor/github.com/bytedance/sonic/internal/native/sse/native_amd64.s
generated
vendored
15479
vendor/github.com/bytedance/sonic/internal/native/sse/native_amd64.s
generated
vendored
File diff suppressed because it is too large
Load diff
49
vendor/github.com/bytedance/sonic/internal/native/sse/native_export_amd64.go
generated
vendored
49
vendor/github.com/bytedance/sonic/internal/native/sse/native_export_amd64.go
generated
vendored
|
@ -1,49 +0,0 @@
|
|||
// Code generated by Makefile, DO NOT EDIT.
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package sse
|
||||
|
||||
var (
|
||||
S_f64toa = _subr__f64toa
|
||||
S_f32toa = _subr__f32toa
|
||||
S_i64toa = _subr__i64toa
|
||||
S_u64toa = _subr__u64toa
|
||||
S_lspace = _subr__lspace
|
||||
)
|
||||
|
||||
var (
|
||||
S_quote = _subr__quote
|
||||
S_unquote = _subr__unquote
|
||||
)
|
||||
|
||||
var (
|
||||
S_value = _subr__value
|
||||
S_vstring = _subr__vstring
|
||||
S_vnumber = _subr__vnumber
|
||||
S_vsigned = _subr__vsigned
|
||||
S_vunsigned = _subr__vunsigned
|
||||
)
|
||||
|
||||
var (
|
||||
S_skip_one = _subr__skip_one
|
||||
S_skip_one_fast = _subr__skip_one_fast
|
||||
S_skip_array = _subr__skip_array
|
||||
S_skip_object = _subr__skip_object
|
||||
S_skip_number = _subr__skip_number
|
||||
S_get_by_path = _subr__get_by_path
|
||||
)
|
740
vendor/github.com/bytedance/sonic/internal/native/sse/native_subr_amd64.go
generated
vendored
740
vendor/github.com/bytedance/sonic/internal/native/sse/native_subr_amd64.go
generated
vendored
|
@ -3,107 +3,667 @@
|
|||
|
||||
package sse
|
||||
|
||||
//go:nosplit
|
||||
//go:noescape
|
||||
//goland:noinspection ALL
|
||||
func __native_entry__() uintptr
|
||||
|
||||
var (
|
||||
_subr__f32toa = __native_entry__() + 31760
|
||||
_subr__f64toa = __native_entry__() + 160
|
||||
_subr__get_by_path = __native_entry__() + 26384
|
||||
_subr__html_escape = __native_entry__() + 9072
|
||||
_subr__i64toa = __native_entry__() + 3424
|
||||
_subr__lspace = __native_entry__() + 16
|
||||
_subr__quote = __native_entry__() + 4864
|
||||
_subr__skip_array = __native_entry__() + 18112
|
||||
_subr__skip_number = __native_entry__() + 22128
|
||||
_subr__skip_object = __native_entry__() + 20512
|
||||
_subr__skip_one = __native_entry__() + 22288
|
||||
_subr__skip_one_fast = __native_entry__() + 22512
|
||||
_subr__u64toa = __native_entry__() + 3552
|
||||
_subr__unquote = __native_entry__() + 6704
|
||||
_subr__validate_one = __native_entry__() + 22336
|
||||
_subr__validate_utf8 = __native_entry__() + 30528
|
||||
_subr__validate_utf8_fast = __native_entry__() + 31200
|
||||
_subr__value = __native_entry__() + 12272
|
||||
_subr__vnumber = __native_entry__() + 15728
|
||||
_subr__vsigned = __native_entry__() + 17376
|
||||
_subr__vstring = __native_entry__() + 14112
|
||||
_subr__vunsigned = __native_entry__() + 17760
|
||||
import (
|
||||
`github.com/bytedance/sonic/loader`
|
||||
)
|
||||
|
||||
const (
|
||||
_stack__f32toa = 48
|
||||
_entry__f32toa = 31344
|
||||
_entry__f64toa = 128
|
||||
_entry__format_significand = 36272
|
||||
_entry__format_integer = 3280
|
||||
_entry__fsm_exec = 18832
|
||||
_entry__advance_string = 15024
|
||||
_entry__advance_string_default = 37808
|
||||
_entry__do_skip_number = 21376
|
||||
_entry__get_by_path = 26768
|
||||
_entry__skip_one_fast = 22896
|
||||
_entry__unescape = 38752
|
||||
_entry__unhex16_is = 9584
|
||||
_entry__html_escape = 9776
|
||||
_entry__i64toa = 3712
|
||||
_entry__u64toa = 3984
|
||||
_entry__lspace = 16
|
||||
_entry__quote = 5472
|
||||
_entry__skip_array = 18800
|
||||
_entry__skip_number = 22496
|
||||
_entry__skip_object = 21024
|
||||
_entry__skip_one = 22672
|
||||
_entry__unquote = 7248
|
||||
_entry__validate_one = 22720
|
||||
_entry__validate_utf8 = 30096
|
||||
_entry__validate_utf8_fast = 30784
|
||||
_entry__value = 13072
|
||||
_entry__vnumber = 16400
|
||||
_entry__atof_eisel_lemire64 = 11072
|
||||
_entry__atof_native = 12464
|
||||
_entry__decimal_to_f64 = 11472
|
||||
_entry__left_shift = 36752
|
||||
_entry__right_shift = 37296
|
||||
_entry__vsigned = 18048
|
||||
_entry__vstring = 14848
|
||||
_entry__vunsigned = 18416
|
||||
)
|
||||
|
||||
const (
|
||||
_stack__f32toa = 64
|
||||
_stack__f64toa = 80
|
||||
_stack__get_by_path = 240
|
||||
_stack__format_significand = 24
|
||||
_stack__format_integer = 16
|
||||
_stack__fsm_exec = 160
|
||||
_stack__advance_string = 72
|
||||
_stack__advance_string_default = 56
|
||||
_stack__do_skip_number = 32
|
||||
_stack__get_by_path = 264
|
||||
_stack__skip_one_fast = 136
|
||||
_stack__unescape = 64
|
||||
_stack__unhex16_is = 8
|
||||
_stack__html_escape = 64
|
||||
_stack__i64toa = 16
|
||||
_stack__lspace = 8
|
||||
_stack__quote = 64
|
||||
_stack__skip_array = 128
|
||||
_stack__skip_number = 72
|
||||
_stack__skip_object = 128
|
||||
_stack__skip_one = 128
|
||||
_stack__skip_one_fast = 136
|
||||
_stack__u64toa = 8
|
||||
_stack__unquote = 88
|
||||
_stack__validate_one = 128
|
||||
_stack__lspace = 8
|
||||
_stack__quote = 80
|
||||
_stack__skip_array = 168
|
||||
_stack__skip_number = 88
|
||||
_stack__skip_object = 168
|
||||
_stack__skip_one = 168
|
||||
_stack__unquote = 112
|
||||
_stack__validate_one = 168
|
||||
_stack__validate_utf8 = 48
|
||||
_stack__validate_utf8_fast = 24
|
||||
_stack__value = 328
|
||||
_stack__vnumber = 240
|
||||
_stack__value = 352
|
||||
_stack__vnumber = 264
|
||||
_stack__atof_eisel_lemire64 = 40
|
||||
_stack__atof_native = 144
|
||||
_stack__decimal_to_f64 = 88
|
||||
_stack__left_shift = 32
|
||||
_stack__right_shift = 16
|
||||
_stack__vsigned = 16
|
||||
_stack__vstring = 136
|
||||
_stack__vunsigned = 16
|
||||
)
|
||||
|
||||
var (
|
||||
_ = _subr__f32toa
|
||||
_ = _subr__f64toa
|
||||
_ = _subr__get_by_path
|
||||
_ = _subr__html_escape
|
||||
_ = _subr__i64toa
|
||||
_ = _subr__lspace
|
||||
_ = _subr__quote
|
||||
_ = _subr__skip_array
|
||||
_ = _subr__skip_number
|
||||
_ = _subr__skip_object
|
||||
_ = _subr__skip_one
|
||||
_ = _subr__skip_one_fast
|
||||
_ = _subr__u64toa
|
||||
_ = _subr__unquote
|
||||
_ = _subr__validate_one
|
||||
_ = _subr__validate_utf8
|
||||
_ = _subr__validate_utf8_fast
|
||||
_ = _subr__value
|
||||
_ = _subr__vnumber
|
||||
_ = _subr__vsigned
|
||||
_ = _subr__vstring
|
||||
_ = _subr__vunsigned
|
||||
_stack__vstring = 128
|
||||
_stack__vunsigned = 24
|
||||
)
|
||||
|
||||
const (
|
||||
_ = _stack__f32toa
|
||||
_ = _stack__f64toa
|
||||
_ = _stack__get_by_path
|
||||
_ = _stack__html_escape
|
||||
_ = _stack__i64toa
|
||||
_ = _stack__lspace
|
||||
_ = _stack__quote
|
||||
_ = _stack__skip_array
|
||||
_ = _stack__skip_number
|
||||
_ = _stack__skip_object
|
||||
_ = _stack__skip_one
|
||||
_ = _stack__skip_one_fast
|
||||
_ = _stack__u64toa
|
||||
_ = _stack__unquote
|
||||
_ = _stack__validate_one
|
||||
_ = _stack__validate_utf8
|
||||
_ = _stack__validate_utf8_fast
|
||||
_ = _stack__value
|
||||
_ = _stack__vnumber
|
||||
_ = _stack__vsigned
|
||||
_ = _stack__vstring
|
||||
_ = _stack__vunsigned
|
||||
_size__f32toa = 3696
|
||||
_size__f64toa = 3152
|
||||
_size__format_significand = 480
|
||||
_size__format_integer = 432
|
||||
_size__fsm_exec = 1656
|
||||
_size__advance_string = 1328
|
||||
_size__advance_string_default = 944
|
||||
_size__do_skip_number = 908
|
||||
_size__get_by_path = 3328
|
||||
_size__skip_one_fast = 3348
|
||||
_size__unescape = 704
|
||||
_size__unhex16_is = 128
|
||||
_size__html_escape = 1296
|
||||
_size__i64toa = 272
|
||||
_size__u64toa = 1440
|
||||
_size__lspace = 96
|
||||
_size__quote = 1760
|
||||
_size__skip_array = 32
|
||||
_size__skip_number = 160
|
||||
_size__skip_object = 32
|
||||
_size__skip_one = 32
|
||||
_size__unquote = 2336
|
||||
_size__validate_one = 48
|
||||
_size__validate_utf8 = 688
|
||||
_size__validate_utf8_fast = 544
|
||||
_size__value = 1268
|
||||
_size__vnumber = 1648
|
||||
_size__atof_eisel_lemire64 = 400
|
||||
_size__atof_native = 608
|
||||
_size__decimal_to_f64 = 992
|
||||
_size__left_shift = 544
|
||||
_size__right_shift = 480
|
||||
_size__vsigned = 368
|
||||
_size__vstring = 128
|
||||
_size__vunsigned = 368
|
||||
)
|
||||
|
||||
var (
|
||||
_pcsp__f32toa = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{3638, 64},
|
||||
{3642, 48},
|
||||
{3643, 40},
|
||||
{3645, 32},
|
||||
{3647, 24},
|
||||
{3649, 16},
|
||||
{3651, 8},
|
||||
{3652, 0},
|
||||
{3682, 64},
|
||||
}
|
||||
_pcsp__f64toa = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{3033, 56},
|
||||
{3037, 48},
|
||||
{3038, 40},
|
||||
{3040, 32},
|
||||
{3042, 24},
|
||||
{3044, 16},
|
||||
{3046, 8},
|
||||
{3047, 0},
|
||||
{3138, 56},
|
||||
}
|
||||
_pcsp__format_significand = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{468, 24},
|
||||
{469, 16},
|
||||
{471, 8},
|
||||
{473, 0},
|
||||
}
|
||||
_pcsp__format_integer = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{412, 16},
|
||||
{413, 8},
|
||||
{414, 0},
|
||||
{423, 16},
|
||||
{424, 8},
|
||||
{426, 0},
|
||||
}
|
||||
_pcsp__fsm_exec = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{1317, 88},
|
||||
{1321, 48},
|
||||
{1322, 40},
|
||||
{1324, 32},
|
||||
{1326, 24},
|
||||
{1328, 16},
|
||||
{1330, 8},
|
||||
{1331, 0},
|
||||
{1656, 88},
|
||||
}
|
||||
_pcsp__advance_string = [][2]uint32{
|
||||
{14, 0},
|
||||
{18, 8},
|
||||
{20, 16},
|
||||
{22, 24},
|
||||
{24, 32},
|
||||
{26, 40},
|
||||
{27, 48},
|
||||
{587, 72},
|
||||
{591, 48},
|
||||
{592, 40},
|
||||
{594, 32},
|
||||
{596, 24},
|
||||
{598, 16},
|
||||
{600, 8},
|
||||
{601, 0},
|
||||
{1325, 72},
|
||||
}
|
||||
_pcsp__advance_string_default = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{402, 56},
|
||||
{406, 48},
|
||||
{407, 40},
|
||||
{409, 32},
|
||||
{411, 24},
|
||||
{413, 16},
|
||||
{415, 8},
|
||||
{416, 0},
|
||||
{936, 56},
|
||||
}
|
||||
_pcsp__do_skip_number = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{771, 32},
|
||||
{772, 24},
|
||||
{774, 16},
|
||||
{776, 8},
|
||||
{777, 0},
|
||||
{908, 32},
|
||||
}
|
||||
_pcsp__get_by_path = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{3278, 104},
|
||||
{3282, 48},
|
||||
{3283, 40},
|
||||
{3285, 32},
|
||||
{3287, 24},
|
||||
{3289, 16},
|
||||
{3291, 8},
|
||||
{3292, 0},
|
||||
{3317, 104},
|
||||
}
|
||||
_pcsp__skip_one_fast = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{600, 136},
|
||||
{604, 48},
|
||||
{605, 40},
|
||||
{607, 32},
|
||||
{609, 24},
|
||||
{611, 16},
|
||||
{613, 8},
|
||||
{614, 0},
|
||||
{3348, 136},
|
||||
}
|
||||
_pcsp__unescape = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{246, 56},
|
||||
{250, 48},
|
||||
{251, 40},
|
||||
{253, 32},
|
||||
{255, 24},
|
||||
{257, 16},
|
||||
{259, 8},
|
||||
{260, 0},
|
||||
{695, 56},
|
||||
}
|
||||
_pcsp__unhex16_is = [][2]uint32{
|
||||
{1, 0},
|
||||
{35, 8},
|
||||
{36, 0},
|
||||
{62, 8},
|
||||
{63, 0},
|
||||
{97, 8},
|
||||
{98, 0},
|
||||
{121, 8},
|
||||
{123, 0},
|
||||
}
|
||||
_pcsp__html_escape = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{1281, 64},
|
||||
{1285, 48},
|
||||
{1286, 40},
|
||||
{1288, 32},
|
||||
{1290, 24},
|
||||
{1292, 16},
|
||||
{1294, 8},
|
||||
{1296, 0},
|
||||
}
|
||||
_pcsp__i64toa = [][2]uint32{
|
||||
{1, 0},
|
||||
{171, 8},
|
||||
{172, 0},
|
||||
{207, 8},
|
||||
{208, 0},
|
||||
{222, 8},
|
||||
{223, 0},
|
||||
{247, 8},
|
||||
{248, 0},
|
||||
{253, 8},
|
||||
{259, 0},
|
||||
}
|
||||
_pcsp__u64toa = [][2]uint32{
|
||||
{13, 0},
|
||||
{162, 8},
|
||||
{163, 0},
|
||||
{175, 8},
|
||||
{240, 0},
|
||||
{498, 8},
|
||||
{499, 0},
|
||||
{519, 8},
|
||||
{608, 0},
|
||||
{882, 8},
|
||||
{976, 0},
|
||||
{1434, 8},
|
||||
{1436, 0},
|
||||
}
|
||||
_pcsp__lspace = [][2]uint32{
|
||||
{1, 0},
|
||||
{85, 8},
|
||||
{87, 0},
|
||||
}
|
||||
_pcsp__quote = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{1701, 80},
|
||||
{1705, 48},
|
||||
{1706, 40},
|
||||
{1708, 32},
|
||||
{1710, 24},
|
||||
{1712, 16},
|
||||
{1714, 8},
|
||||
{1715, 0},
|
||||
{1750, 80},
|
||||
}
|
||||
_pcsp__skip_array = [][2]uint32{
|
||||
{1, 0},
|
||||
{26, 8},
|
||||
{32, 0},
|
||||
}
|
||||
_pcsp__skip_number = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{107, 56},
|
||||
{111, 48},
|
||||
{112, 40},
|
||||
{114, 32},
|
||||
{116, 24},
|
||||
{118, 16},
|
||||
{120, 8},
|
||||
{121, 0},
|
||||
{145, 56},
|
||||
}
|
||||
_pcsp__skip_object = [][2]uint32{
|
||||
{1, 0},
|
||||
{26, 8},
|
||||
{32, 0},
|
||||
}
|
||||
_pcsp__skip_one = [][2]uint32{
|
||||
{1, 0},
|
||||
{26, 8},
|
||||
{32, 0},
|
||||
}
|
||||
_pcsp__unquote = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{1614, 104},
|
||||
{1618, 48},
|
||||
{1619, 40},
|
||||
{1621, 32},
|
||||
{1623, 24},
|
||||
{1625, 16},
|
||||
{1627, 8},
|
||||
{1628, 0},
|
||||
{2329, 104},
|
||||
}
|
||||
_pcsp__validate_one = [][2]uint32{
|
||||
{1, 0},
|
||||
{31, 8},
|
||||
{37, 0},
|
||||
}
|
||||
_pcsp__validate_utf8 = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{11, 40},
|
||||
{639, 48},
|
||||
{643, 40},
|
||||
{644, 32},
|
||||
{646, 24},
|
||||
{648, 16},
|
||||
{650, 8},
|
||||
{651, 0},
|
||||
{682, 48},
|
||||
}
|
||||
_pcsp__validate_utf8_fast = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{5, 16},
|
||||
{247, 24},
|
||||
{251, 16},
|
||||
{252, 8},
|
||||
{253, 0},
|
||||
{527, 24},
|
||||
{531, 16},
|
||||
{532, 8},
|
||||
{534, 0},
|
||||
}
|
||||
_pcsp__value = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{439, 88},
|
||||
{443, 48},
|
||||
{444, 40},
|
||||
{446, 32},
|
||||
{448, 24},
|
||||
{450, 16},
|
||||
{452, 8},
|
||||
{453, 0},
|
||||
{1268, 88},
|
||||
}
|
||||
_pcsp__vnumber = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{150, 120},
|
||||
{154, 48},
|
||||
{155, 40},
|
||||
{157, 32},
|
||||
{159, 24},
|
||||
{161, 16},
|
||||
{163, 8},
|
||||
{164, 0},
|
||||
{1642, 120},
|
||||
}
|
||||
_pcsp__atof_eisel_lemire64 = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{315, 40},
|
||||
{316, 32},
|
||||
{318, 24},
|
||||
{320, 16},
|
||||
{322, 8},
|
||||
{323, 0},
|
||||
{387, 40},
|
||||
}
|
||||
_pcsp__atof_native = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{596, 56},
|
||||
{600, 8},
|
||||
{602, 0},
|
||||
}
|
||||
_pcsp__decimal_to_f64 = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{12, 40},
|
||||
{13, 48},
|
||||
{951, 56},
|
||||
{955, 48},
|
||||
{956, 40},
|
||||
{958, 32},
|
||||
{960, 24},
|
||||
{962, 16},
|
||||
{964, 8},
|
||||
{965, 0},
|
||||
{977, 56},
|
||||
}
|
||||
_pcsp__left_shift = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{418, 32},
|
||||
{419, 24},
|
||||
{421, 16},
|
||||
{423, 8},
|
||||
{424, 0},
|
||||
{539, 32},
|
||||
}
|
||||
_pcsp__right_shift = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{452, 16},
|
||||
{453, 8},
|
||||
{454, 0},
|
||||
{462, 16},
|
||||
{463, 8},
|
||||
{464, 0},
|
||||
{472, 16},
|
||||
{473, 8},
|
||||
{475, 0},
|
||||
}
|
||||
_pcsp__vsigned = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{111, 16},
|
||||
{112, 8},
|
||||
{113, 0},
|
||||
{124, 16},
|
||||
{125, 8},
|
||||
{126, 0},
|
||||
{278, 16},
|
||||
{279, 8},
|
||||
{280, 0},
|
||||
{284, 16},
|
||||
{285, 8},
|
||||
{286, 0},
|
||||
{340, 16},
|
||||
{341, 8},
|
||||
{342, 0},
|
||||
{353, 16},
|
||||
{354, 8},
|
||||
{356, 0},
|
||||
}
|
||||
_pcsp__vstring = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{8, 24},
|
||||
{10, 32},
|
||||
{11, 40},
|
||||
{105, 56},
|
||||
{109, 40},
|
||||
{110, 32},
|
||||
{112, 24},
|
||||
{114, 16},
|
||||
{116, 8},
|
||||
{118, 0},
|
||||
}
|
||||
_pcsp__vunsigned = [][2]uint32{
|
||||
{1, 0},
|
||||
{4, 8},
|
||||
{6, 16},
|
||||
{72, 24},
|
||||
{73, 16},
|
||||
{75, 8},
|
||||
{76, 0},
|
||||
{87, 24},
|
||||
{88, 16},
|
||||
{90, 8},
|
||||
{91, 0},
|
||||
{114, 24},
|
||||
{115, 16},
|
||||
{117, 8},
|
||||
{118, 0},
|
||||
{281, 24},
|
||||
{282, 16},
|
||||
{284, 8},
|
||||
{285, 0},
|
||||
{336, 24},
|
||||
{337, 16},
|
||||
{339, 8},
|
||||
{340, 0},
|
||||
{348, 24},
|
||||
{349, 16},
|
||||
{351, 8},
|
||||
{353, 0},
|
||||
}
|
||||
)
|
||||
|
||||
var Funcs = []loader.CFunc{
|
||||
{"__native_entry__", 0, 67, 0, nil},
|
||||
{"_f32toa", _entry__f32toa, _size__f32toa, _stack__f32toa, _pcsp__f32toa},
|
||||
{"_f64toa", _entry__f64toa, _size__f64toa, _stack__f64toa, _pcsp__f64toa},
|
||||
{"_format_significand", _entry__format_significand, _size__format_significand, _stack__format_significand, _pcsp__format_significand},
|
||||
{"_format_integer", _entry__format_integer, _size__format_integer, _stack__format_integer, _pcsp__format_integer},
|
||||
{"_fsm_exec", _entry__fsm_exec, _size__fsm_exec, _stack__fsm_exec, _pcsp__fsm_exec},
|
||||
{"_advance_string", _entry__advance_string, _size__advance_string, _stack__advance_string, _pcsp__advance_string},
|
||||
{"_advance_string_default", _entry__advance_string_default, _size__advance_string_default, _stack__advance_string_default, _pcsp__advance_string_default},
|
||||
{"_do_skip_number", _entry__do_skip_number, _size__do_skip_number, _stack__do_skip_number, _pcsp__do_skip_number},
|
||||
{"_get_by_path", _entry__get_by_path, _size__get_by_path, _stack__get_by_path, _pcsp__get_by_path},
|
||||
{"_skip_one_fast", _entry__skip_one_fast, _size__skip_one_fast, _stack__skip_one_fast, _pcsp__skip_one_fast},
|
||||
{"_unescape", _entry__unescape, _size__unescape, _stack__unescape, _pcsp__unescape},
|
||||
{"_unhex16_is", _entry__unhex16_is, _size__unhex16_is, _stack__unhex16_is, _pcsp__unhex16_is},
|
||||
{"_html_escape", _entry__html_escape, _size__html_escape, _stack__html_escape, _pcsp__html_escape},
|
||||
{"_i64toa", _entry__i64toa, _size__i64toa, _stack__i64toa, _pcsp__i64toa},
|
||||
{"_u64toa", _entry__u64toa, _size__u64toa, _stack__u64toa, _pcsp__u64toa},
|
||||
{"_lspace", _entry__lspace, _size__lspace, _stack__lspace, _pcsp__lspace},
|
||||
{"_quote", _entry__quote, _size__quote, _stack__quote, _pcsp__quote},
|
||||
{"_skip_array", _entry__skip_array, _size__skip_array, _stack__skip_array, _pcsp__skip_array},
|
||||
{"_skip_number", _entry__skip_number, _size__skip_number, _stack__skip_number, _pcsp__skip_number},
|
||||
{"_skip_object", _entry__skip_object, _size__skip_object, _stack__skip_object, _pcsp__skip_object},
|
||||
{"_skip_one", _entry__skip_one, _size__skip_one, _stack__skip_one, _pcsp__skip_one},
|
||||
{"_unquote", _entry__unquote, _size__unquote, _stack__unquote, _pcsp__unquote},
|
||||
{"_validate_one", _entry__validate_one, _size__validate_one, _stack__validate_one, _pcsp__validate_one},
|
||||
{"_validate_utf8", _entry__validate_utf8, _size__validate_utf8, _stack__validate_utf8, _pcsp__validate_utf8},
|
||||
{"_validate_utf8_fast", _entry__validate_utf8_fast, _size__validate_utf8_fast, _stack__validate_utf8_fast, _pcsp__validate_utf8_fast},
|
||||
{"_value", _entry__value, _size__value, _stack__value, _pcsp__value},
|
||||
{"_vnumber", _entry__vnumber, _size__vnumber, _stack__vnumber, _pcsp__vnumber},
|
||||
{"_atof_eisel_lemire64", _entry__atof_eisel_lemire64, _size__atof_eisel_lemire64, _stack__atof_eisel_lemire64, _pcsp__atof_eisel_lemire64},
|
||||
{"_atof_native", _entry__atof_native, _size__atof_native, _stack__atof_native, _pcsp__atof_native},
|
||||
{"_decimal_to_f64", _entry__decimal_to_f64, _size__decimal_to_f64, _stack__decimal_to_f64, _pcsp__decimal_to_f64},
|
||||
{"_left_shift", _entry__left_shift, _size__left_shift, _stack__left_shift, _pcsp__left_shift},
|
||||
{"_right_shift", _entry__right_shift, _size__right_shift, _stack__right_shift, _pcsp__right_shift},
|
||||
{"_vsigned", _entry__vsigned, _size__vsigned, _stack__vsigned, _pcsp__vsigned},
|
||||
{"_vstring", _entry__vstring, _size__vstring, _stack__vstring, _pcsp__vstring},
|
||||
{"_vunsigned", _entry__vunsigned, _size__vunsigned, _stack__vunsigned, _pcsp__vunsigned},
|
||||
}
|
||||
|
|
14681
vendor/github.com/bytedance/sonic/internal/native/sse/native_text_amd64.go
generated
vendored
Normal file
14681
vendor/github.com/bytedance/sonic/internal/native/sse/native_text_amd64.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
24
vendor/github.com/bytedance/sonic/internal/native/types/types.go
generated
vendored
24
vendor/github.com/bytedance/sonic/internal/native/types/types.go
generated
vendored
|
@ -19,6 +19,7 @@
|
|||
import (
|
||||
`fmt`
|
||||
`sync`
|
||||
`unsafe`
|
||||
)
|
||||
|
||||
type ValueType int
|
||||
|
@ -46,15 +47,23 @@
|
|||
)
|
||||
|
||||
const (
|
||||
// for native.Unquote() flags
|
||||
B_DOUBLE_UNQUOTE = 0
|
||||
B_UNICODE_REPLACE = 1
|
||||
|
||||
// for native.Value() flags
|
||||
B_USE_NUMBER = 1
|
||||
B_VALIDATE_STRING = 5
|
||||
B_ALLOW_CONTROL = 31
|
||||
)
|
||||
|
||||
const (
|
||||
F_DOUBLE_UNQUOTE = 1 << B_DOUBLE_UNQUOTE
|
||||
F_UNICODE_REPLACE = 1 << B_UNICODE_REPLACE
|
||||
|
||||
F_USE_NUMBER = 1 << B_USE_NUMBER
|
||||
F_VALIDATE_STRING = 1 << B_VALIDATE_STRING
|
||||
F_ALLOW_CONTROL = 1 << B_ALLOW_CONTROL
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -136,3 +145,18 @@ func FreeStateMachine(fsm *StateMachine) {
|
|||
stackPool.Put(fsm)
|
||||
}
|
||||
|
||||
const MaxDigitNums = 800
|
||||
|
||||
var digitPool = sync.Pool{
|
||||
New: func() interface{} {
|
||||
return (*byte)(unsafe.Pointer(&[MaxDigitNums]byte{}))
|
||||
},
|
||||
}
|
||||
|
||||
func NewDbuf() *byte {
|
||||
return digitPool.Get().(*byte)
|
||||
}
|
||||
|
||||
func FreeDbuf(p *byte) {
|
||||
digitPool.Put(p)
|
||||
}
|
|
@ -1,3 +1,5 @@
|
|||
// +build !go1.21
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
48
vendor/github.com/bytedance/sonic/internal/resolver/stubs_latest.go
generated
vendored
Normal file
48
vendor/github.com/bytedance/sonic/internal/resolver/stubs_latest.go
generated
vendored
Normal file
|
@ -0,0 +1,48 @@
|
|||
// +build go1.21
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package resolver
|
||||
|
||||
import (
|
||||
_ `encoding/json`
|
||||
`reflect`
|
||||
_ `unsafe`
|
||||
)
|
||||
|
||||
type StdField struct {
|
||||
name string
|
||||
nameBytes []byte
|
||||
nameNonEsc string
|
||||
nameEscHTML string
|
||||
tag bool
|
||||
index []int
|
||||
typ reflect.Type
|
||||
omitEmpty bool
|
||||
quoted bool
|
||||
encoder func()
|
||||
}
|
||||
|
||||
type StdStructFields struct {
|
||||
list []StdField
|
||||
nameIndex map[string]*StdField
|
||||
byFoldedName map[string]*StdField
|
||||
}
|
||||
|
||||
//go:noescape
|
||||
//go:linkname typeFields encoding/json.typeFields
|
||||
func typeFields(_ reflect.Type) StdStructFields
|
12
vendor/github.com/bytedance/sonic/internal/rt/fastmem.go
generated
vendored
12
vendor/github.com/bytedance/sonic/internal/rt/fastmem.go
generated
vendored
|
@ -110,3 +110,15 @@ func StrFrom(p unsafe.Pointer, n int64) (s string) {
|
|||
(*GoString)(unsafe.Pointer(&s)).Len = int(n)
|
||||
return
|
||||
}
|
||||
|
||||
// NoEscape hides a pointer from escape analysis. NoEscape is
|
||||
// the identity function but escape analysis doesn't think the
|
||||
// output depends on the input. NoEscape is inlined and currently
|
||||
// compiles down to zero instructions.
|
||||
// USE CAREFULLY!
|
||||
//go:nosplit
|
||||
//goland:noinspection GoVetUnsafePointer
|
||||
func NoEscape(p unsafe.Pointer) unsafe.Pointer {
|
||||
x := uintptr(p)
|
||||
return unsafe.Pointer(x ^ 0)
|
||||
}
|
101
vendor/github.com/bytedance/sonic/loader/funcdata.go
generated
vendored
101
vendor/github.com/bytedance/sonic/loader/funcdata.go
generated
vendored
|
@ -42,6 +42,13 @@
|
|||
_SUB_BUCKETSIZE = _BUCKETSIZE / _SUBBUCKETS
|
||||
)
|
||||
|
||||
// Note: This list must match the list in runtime/symtab.go.
|
||||
const (
|
||||
FuncFlag_TOPFRAME = 1 << iota
|
||||
FuncFlag_SPWRITE
|
||||
FuncFlag_ASM
|
||||
)
|
||||
|
||||
// PCDATA and FUNCDATA table indexes.
|
||||
//
|
||||
// See funcdata.h and $GROOT/src/cmd/internal/objabi/funcdata.go.
|
||||
|
@ -142,3 +149,97 @@ func funcNameParts(name string) (string, string, string) {
|
|||
}
|
||||
return name[:i], "[...]", name[j+1:]
|
||||
}
|
||||
|
||||
|
||||
// func name table format:
|
||||
// nameOff[0] -> namePartA namePartB namePartC \x00
|
||||
// nameOff[1] -> namePartA namePartB namePartC \x00
|
||||
// ...
|
||||
func makeFuncnameTab(funcs []Func) (tab []byte, offs []int32) {
|
||||
offs = make([]int32, len(funcs))
|
||||
offset := 1
|
||||
tab = []byte{0}
|
||||
|
||||
for i, f := range funcs {
|
||||
offs[i] = int32(offset)
|
||||
|
||||
a, b, c := funcNameParts(f.Name)
|
||||
tab = append(tab, a...)
|
||||
tab = append(tab, b...)
|
||||
tab = append(tab, c...)
|
||||
tab = append(tab, 0)
|
||||
offset += len(a) + len(b) + len(c) + 1
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// CU table format:
|
||||
// cuOffsets[0] -> filetabOffset[0] filetabOffset[1] ... filetabOffset[len(CUs[0].fileNames)-1]
|
||||
// cuOffsets[1] -> filetabOffset[len(CUs[0].fileNames)] ... filetabOffset[len(CUs[0].fileNames) + len(CUs[1].fileNames)-1]
|
||||
// ...
|
||||
//
|
||||
// file name table format:
|
||||
// filetabOffset[0] -> CUs[0].fileNames[0] \x00
|
||||
// ...
|
||||
// filetabOffset[len(CUs[0]-1)] -> CUs[0].fileNames[len(CUs[0].fileNames)-1] \x00
|
||||
// ...
|
||||
// filetabOffset[SUM(CUs,fileNames)-1] -> CUs[len(CU)-1].fileNames[len(CUs[len(CU)-1].fileNames)-1] \x00
|
||||
func makeFilenametab(cus []compilationUnit) (cutab []uint32, filetab []byte, cuOffsets []uint32) {
|
||||
cuOffsets = make([]uint32, len(cus))
|
||||
cuOffset := 0
|
||||
fileOffset := 0
|
||||
|
||||
for i, cu := range cus {
|
||||
cuOffsets[i] = uint32(cuOffset)
|
||||
|
||||
for _, name := range cu.fileNames {
|
||||
cutab = append(cutab, uint32(fileOffset))
|
||||
|
||||
fileOffset += len(name) + 1
|
||||
filetab = append(filetab, name...)
|
||||
filetab = append(filetab, 0)
|
||||
}
|
||||
|
||||
cuOffset += len(cu.fileNames)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func writeFuncdata(out *[]byte, funcs []Func) (fstart int, funcdataOffs [][]uint32) {
|
||||
fstart = len(*out)
|
||||
*out = append(*out, byte(0))
|
||||
offs := uint32(1)
|
||||
|
||||
funcdataOffs = make([][]uint32, len(funcs))
|
||||
for i, f := range funcs {
|
||||
|
||||
var writer = func(fd encoding.BinaryMarshaler) {
|
||||
var ab []byte
|
||||
var err error
|
||||
if fd != nil {
|
||||
ab, err = fd.MarshalBinary()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
funcdataOffs[i] = append(funcdataOffs[i], offs)
|
||||
} else {
|
||||
ab = []byte{0}
|
||||
funcdataOffs[i] = append(funcdataOffs[i], _INVALID_FUNCDATA_OFFSET)
|
||||
}
|
||||
*out = append(*out, ab...)
|
||||
offs += uint32(len(ab))
|
||||
}
|
||||
|
||||
writer(f.ArgsPointerMaps)
|
||||
writer(f.LocalsPointerMaps)
|
||||
writer(f.StackObjects)
|
||||
writer(f.InlTree)
|
||||
writer(f.OpenCodedDeferInfo)
|
||||
writer(f.ArgInfo)
|
||||
writer(f.ArgLiveInfo)
|
||||
writer(f.WrapInfo)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//go:build go1.15 && !go1.16
|
||||
// +build go1.15,!go1.16
|
||||
//go:build !go1.16
|
||||
// +build !go1.16
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
|
@ -20,9 +20,9 @@
|
|||
package loader
|
||||
|
||||
import (
|
||||
`encoding`
|
||||
`os`
|
||||
`unsafe`
|
||||
`sort`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
@ -171,99 +171,7 @@ type compilationUnit struct {
|
|||
fileNames []string
|
||||
}
|
||||
|
||||
// func name table format:
|
||||
// nameOff[0] -> namePartA namePartB namePartC \x00
|
||||
// nameOff[1] -> namePartA namePartB namePartC \x00
|
||||
// ...
|
||||
func makeFuncnameTab(funcs []Func) (tab []byte, offs []int32) {
|
||||
offs = make([]int32, len(funcs))
|
||||
offset := 0
|
||||
|
||||
for i, f := range funcs {
|
||||
offs[i] = int32(offset)
|
||||
|
||||
a, b, c := funcNameParts(f.Name)
|
||||
tab = append(tab, a...)
|
||||
tab = append(tab, b...)
|
||||
tab = append(tab, c...)
|
||||
tab = append(tab, 0)
|
||||
offset += len(a) + len(b) + len(c) + 1
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// CU table format:
|
||||
// cuOffsets[0] -> filetabOffset[0] filetabOffset[1] ... filetabOffset[len(CUs[0].fileNames)-1]
|
||||
// cuOffsets[1] -> filetabOffset[len(CUs[0].fileNames)] ... filetabOffset[len(CUs[0].fileNames) + len(CUs[1].fileNames)-1]
|
||||
// ...
|
||||
//
|
||||
// file name table format:
|
||||
// filetabOffset[0] -> CUs[0].fileNames[0] \x00
|
||||
// ...
|
||||
// filetabOffset[len(CUs[0]-1)] -> CUs[0].fileNames[len(CUs[0].fileNames)-1] \x00
|
||||
// ...
|
||||
// filetabOffset[SUM(CUs,fileNames)-1] -> CUs[len(CU)-1].fileNames[len(CUs[len(CU)-1].fileNames)-1] \x00
|
||||
func makeFilenametab(cus []compilationUnit) (cutab []uint32, filetab []byte, cuOffsets []uint32) {
|
||||
cuOffsets = make([]uint32, len(cus))
|
||||
cuOffset := 0
|
||||
fileOffset := 0
|
||||
|
||||
for i, cu := range cus {
|
||||
cuOffsets[i] = uint32(cuOffset)
|
||||
|
||||
for _, name := range cu.fileNames {
|
||||
cutab = append(cutab, uint32(fileOffset))
|
||||
|
||||
fileOffset += len(name) + 1
|
||||
filetab = append(filetab, name...)
|
||||
filetab = append(filetab, 0)
|
||||
}
|
||||
|
||||
cuOffset += len(cu.fileNames)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func writeFuncdata(out *[]byte, funcs []Func) (fstart int, funcdataOffs [][]uint32) {
|
||||
fstart = len(*out)
|
||||
*out = append(*out, byte(0))
|
||||
offs := uint32(1)
|
||||
|
||||
funcdataOffs = make([][]uint32, len(funcs))
|
||||
for i, f := range funcs {
|
||||
|
||||
var writer = func(fd encoding.BinaryMarshaler) {
|
||||
var ab []byte
|
||||
var err error
|
||||
if fd != nil {
|
||||
ab, err = fd.MarshalBinary()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
funcdataOffs[i] = append(funcdataOffs[i], offs)
|
||||
} else {
|
||||
ab = []byte{0}
|
||||
funcdataOffs[i] = append(funcdataOffs[i], _INVALID_FUNCDATA_OFFSET)
|
||||
}
|
||||
*out = append(*out, ab...)
|
||||
offs += uint32(len(ab))
|
||||
}
|
||||
|
||||
writer(f.ArgsPointerMaps)
|
||||
writer(f.LocalsPointerMaps)
|
||||
writer(f.StackObjects)
|
||||
writer(f.InlTree)
|
||||
writer(f.OpenCodedDeferInfo)
|
||||
writer(f.ArgInfo)
|
||||
writer(f.ArgLiveInfo)
|
||||
writer(f.WrapInfo)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func makeFtab(funcs []_func, lastFuncSize uint32) (ftab []funcTab, pclntabSize int64, startLocations []uint32) {
|
||||
func makeFtab(funcs []_func, maxpc uintptr) (ftab []funcTab, pclntabSize int64, startLocations []uint32) {
|
||||
// Allocate space for the pc->func table. This structure consists of a pc offset
|
||||
// and an offset to the func structure. After that, we have a single pc
|
||||
// value that marks the end of the last function in the binary.
|
||||
|
@ -283,14 +191,12 @@ func makeFtab(funcs []_func, lastFuncSize uint32) (ftab []funcTab, pclntabSize i
|
|||
}
|
||||
|
||||
// Final entry of table is just end pc offset.
|
||||
lastFunc := funcs[len(funcs)-1]
|
||||
ftab = append(ftab, funcTab{lastFunc.entry + uintptr(lastFuncSize), 0})
|
||||
|
||||
ftab = append(ftab, funcTab{maxpc, 0})
|
||||
return
|
||||
}
|
||||
|
||||
// Pcln table format: [...]funcTab + [...]_Func
|
||||
func makePclntable(size int64, startLocations []uint32, funcs []_func, lastFuncSize uint32, pcdataOffs [][]uint32, funcdataAddr uintptr, funcdataOffs [][]uint32) (pclntab []byte) {
|
||||
func makePclntable(size int64, startLocations []uint32, funcs []_func, maxpc uintptr, pcdataOffs [][]uint32, funcdataAddr uintptr, funcdataOffs [][]uint32) (pclntab []byte) {
|
||||
pclntab = make([]byte, size, size)
|
||||
|
||||
// write a map of pc->func info offsets
|
||||
|
@ -301,8 +207,7 @@ func makePclntable(size int64, startLocations []uint32, funcs []_func, lastFuncS
|
|||
offs += 16
|
||||
}
|
||||
// Final entry of table is just end pc offset.
|
||||
lastFunc := funcs[len(funcs)-1]
|
||||
byteOrder.PutUint64(pclntab[offs:offs+8], uint64(lastFunc.entry)+uint64(lastFuncSize))
|
||||
byteOrder.PutUint64(pclntab[offs:offs+8], uint64(maxpc))
|
||||
offs += 8
|
||||
|
||||
// write func info table
|
||||
|
@ -374,21 +279,22 @@ func writeFindfunctab(out *[]byte, ftab []funcTab) (start int) {
|
|||
tab := make([]findfuncbucket, 0, nbuckets)
|
||||
var s, e = 0, 0
|
||||
for i := 0; i<int(nbuckets); i++ {
|
||||
var pc = min + uintptr((i+1)*_BUCKETSIZE)
|
||||
// find the end func of the bucket
|
||||
for ; e < len(ftab)-1 && ftab[e+1].entry <= pc; e++ {}
|
||||
// store the start func of the bucket
|
||||
var fb = findfuncbucket{idx: uint32(s)}
|
||||
|
||||
// find the last e-th func of the bucket
|
||||
var pc = min + uintptr((i+1)*_BUCKETSIZE)
|
||||
for ; e < len(ftab)-1 && ftab[e+1].entry <= pc; e++ {}
|
||||
|
||||
for j := 0; j<_SUBBUCKETS && (i*_SUBBUCKETS+j)<int(n); j++ {
|
||||
pc = min + uintptr(i*_BUCKETSIZE) + uintptr((j+1)*_SUB_BUCKETSIZE)
|
||||
var ss = s
|
||||
// find the end func of the subbucket
|
||||
for ; ss < len(ftab)-1 && ftab[ss+1].entry <= pc; ss++ {}
|
||||
// store the start func of the subbucket
|
||||
fb._SUBBUCKETS[j] = byte(uint32(s) - fb.idx)
|
||||
s = ss
|
||||
|
||||
// find the s-th end func of the subbucket
|
||||
pc = min + uintptr(i*_BUCKETSIZE) + uintptr((j+1)*_SUB_BUCKETSIZE)
|
||||
for ; s < len(ftab)-1 && ftab[s+1].entry <= pc; s++ {}
|
||||
}
|
||||
|
||||
s = e
|
||||
tab = append(tab, fb)
|
||||
}
|
||||
|
@ -401,15 +307,20 @@ func writeFindfunctab(out *[]byte, ftab []funcTab) (start int) {
|
|||
return
|
||||
}
|
||||
|
||||
func makeModuledata(name string, filenames []string, funcs []Func, text []byte) (mod *moduledata) {
|
||||
func makeModuledata(name string, filenames []string, funcsp *[]Func, text []byte) (mod *moduledata) {
|
||||
mod = new(moduledata)
|
||||
mod.modulename = name
|
||||
|
||||
// sort funcs by entry
|
||||
funcs := *funcsp
|
||||
sort.Slice(funcs, func(i, j int) bool {
|
||||
return funcs[i].EntryOff < funcs[j].EntryOff
|
||||
})
|
||||
*funcsp = funcs
|
||||
|
||||
// make filename table
|
||||
cu := make([]string, 0, len(filenames))
|
||||
for _, f := range filenames {
|
||||
cu = append(cu, f)
|
||||
}
|
||||
cu = append(cu, filenames...)
|
||||
cutab, filetab, cuOffs := makeFilenametab([]compilationUnit{{cu}})
|
||||
mod.cutab = cutab
|
||||
mod.filetab = filetab
|
||||
|
@ -428,9 +339,16 @@ funcnametab, nameOffs := makeFuncnameTab(funcs)
|
|||
// make it executable
|
||||
mprotect(addr, size)
|
||||
|
||||
// assign addresses
|
||||
mod.text = addr
|
||||
mod.etext = addr + uintptr(size)
|
||||
mod.minpc = addr
|
||||
mod.maxpc = addr + uintptr(len(text))
|
||||
|
||||
// make pcdata table
|
||||
// NOTICE: _func only use offset to index pcdata, thus no need mmap() pcdata
|
||||
pctab, pcdataOffs, _funcs := makePctab(funcs, addr, cuOffs, nameOffs)
|
||||
cuOff := cuOffs[0]
|
||||
pctab, pcdataOffs, _funcs := makePctab(funcs, addr, cuOff, nameOffs)
|
||||
mod.pctab = pctab
|
||||
|
||||
// write func data
|
||||
|
@ -440,8 +358,7 @@ funcnametab, nameOffs := makeFuncnameTab(funcs)
|
|||
fstart, funcdataOffs := writeFuncdata(&cache, funcs)
|
||||
|
||||
// make pc->func (binary search) func table
|
||||
lastFuncsize := funcs[len(funcs)-1].TextSize
|
||||
ftab, pclntSize, startLocations := makeFtab(_funcs, lastFuncsize)
|
||||
ftab, pclntSize, startLocations := makeFtab(_funcs, mod.maxpc)
|
||||
mod.ftab = ftab
|
||||
|
||||
// write pc->func (modmap) findfunc table
|
||||
|
@ -455,15 +372,9 @@ funcnametab, nameOffs := makeFuncnameTab(funcs)
|
|||
funcdataAddr := uintptr(rt.IndexByte(cache, fstart))
|
||||
|
||||
// make pclnt table
|
||||
pclntab := makePclntable(pclntSize, startLocations, _funcs, lastFuncsize, pcdataOffs, funcdataAddr, funcdataOffs)
|
||||
pclntab := makePclntable(pclntSize, startLocations, _funcs, mod.maxpc, pcdataOffs, funcdataAddr, funcdataOffs)
|
||||
mod.pclntable = pclntab
|
||||
|
||||
// assign addresses
|
||||
mod.text = addr
|
||||
mod.etext = addr + uintptr(size)
|
||||
mod.minpc = addr
|
||||
mod.maxpc = addr + uintptr(len(text))
|
||||
|
||||
// make pc header
|
||||
mod.pcHeader = &pcHeader {
|
||||
magic : _Magic,
|
||||
|
@ -487,7 +398,7 @@ funcnameOffset: getOffsetOf(moduledata{}, "funcnametab"),
|
|||
|
||||
// makePctab generates pcdelta->valuedelta tables for functions,
|
||||
// and returns the table and the entry offset of every kind pcdata in the table.
|
||||
func makePctab(funcs []Func, addr uintptr, cuOffset []uint32, nameOffset []int32) (pctab []byte, pcdataOffs [][]uint32, _funcs []_func) {
|
||||
func makePctab(funcs []Func, addr uintptr, cuOffset uint32, nameOffset []int32) (pctab []byte, pcdataOffs [][]uint32, _funcs []_func) {
|
||||
_funcs = make([]_func, len(funcs))
|
||||
|
||||
// Pctab offsets of 0 are considered invalid in the runtime. We respect
|
||||
|
@ -538,7 +449,7 @@ func makePctab(funcs []Func, addr uintptr, cuOffset []uint32, nameOffset []int32
|
|||
_f.deferreturn = f.DeferReturn
|
||||
// NOTICE: _func.pcdata is always as [PCDATA_UnsafePoint(0) : PCDATA_ArgLiveIndex(3)]
|
||||
_f.npcdata = uint32(_N_PCDATA)
|
||||
_f.cuOffset = cuOffset[i]
|
||||
_f.cuOffset = cuOffset
|
||||
_f.funcID = f.ID
|
||||
_f.nfuncdata = uint8(_N_FUNCDATA)
|
||||
}
|
158
vendor/github.com/bytedance/sonic/loader/funcdata_go116.go
generated
vendored
158
vendor/github.com/bytedance/sonic/loader/funcdata_go116.go
generated
vendored
|
@ -20,9 +20,9 @@
|
|||
package loader
|
||||
|
||||
import (
|
||||
`encoding`
|
||||
`os`
|
||||
`unsafe`
|
||||
`sort`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
@ -171,99 +171,7 @@ type compilationUnit struct {
|
|||
fileNames []string
|
||||
}
|
||||
|
||||
// func name table format:
|
||||
// nameOff[0] -> namePartA namePartB namePartC \x00
|
||||
// nameOff[1] -> namePartA namePartB namePartC \x00
|
||||
// ...
|
||||
func makeFuncnameTab(funcs []Func) (tab []byte, offs []int32) {
|
||||
offs = make([]int32, len(funcs))
|
||||
offset := 0
|
||||
|
||||
for i, f := range funcs {
|
||||
offs[i] = int32(offset)
|
||||
|
||||
a, b, c := funcNameParts(f.Name)
|
||||
tab = append(tab, a...)
|
||||
tab = append(tab, b...)
|
||||
tab = append(tab, c...)
|
||||
tab = append(tab, 0)
|
||||
offset += len(a) + len(b) + len(c) + 1
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// CU table format:
|
||||
// cuOffsets[0] -> filetabOffset[0] filetabOffset[1] ... filetabOffset[len(CUs[0].fileNames)-1]
|
||||
// cuOffsets[1] -> filetabOffset[len(CUs[0].fileNames)] ... filetabOffset[len(CUs[0].fileNames) + len(CUs[1].fileNames)-1]
|
||||
// ...
|
||||
//
|
||||
// file name table format:
|
||||
// filetabOffset[0] -> CUs[0].fileNames[0] \x00
|
||||
// ...
|
||||
// filetabOffset[len(CUs[0]-1)] -> CUs[0].fileNames[len(CUs[0].fileNames)-1] \x00
|
||||
// ...
|
||||
// filetabOffset[SUM(CUs,fileNames)-1] -> CUs[len(CU)-1].fileNames[len(CUs[len(CU)-1].fileNames)-1] \x00
|
||||
func makeFilenametab(cus []compilationUnit) (cutab []uint32, filetab []byte, cuOffsets []uint32) {
|
||||
cuOffsets = make([]uint32, len(cus))
|
||||
cuOffset := 0
|
||||
fileOffset := 0
|
||||
|
||||
for i, cu := range cus {
|
||||
cuOffsets[i] = uint32(cuOffset)
|
||||
|
||||
for _, name := range cu.fileNames {
|
||||
cutab = append(cutab, uint32(fileOffset))
|
||||
|
||||
fileOffset += len(name) + 1
|
||||
filetab = append(filetab, name...)
|
||||
filetab = append(filetab, 0)
|
||||
}
|
||||
|
||||
cuOffset += len(cu.fileNames)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func writeFuncdata(out *[]byte, funcs []Func) (fstart int, funcdataOffs [][]uint32) {
|
||||
fstart = len(*out)
|
||||
*out = append(*out, byte(0))
|
||||
offs := uint32(1)
|
||||
|
||||
funcdataOffs = make([][]uint32, len(funcs))
|
||||
for i, f := range funcs {
|
||||
|
||||
var writer = func(fd encoding.BinaryMarshaler) {
|
||||
var ab []byte
|
||||
var err error
|
||||
if fd != nil {
|
||||
ab, err = fd.MarshalBinary()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
funcdataOffs[i] = append(funcdataOffs[i], offs)
|
||||
} else {
|
||||
ab = []byte{0}
|
||||
funcdataOffs[i] = append(funcdataOffs[i], _INVALID_FUNCDATA_OFFSET)
|
||||
}
|
||||
*out = append(*out, ab...)
|
||||
offs += uint32(len(ab))
|
||||
}
|
||||
|
||||
writer(f.ArgsPointerMaps)
|
||||
writer(f.LocalsPointerMaps)
|
||||
writer(f.StackObjects)
|
||||
writer(f.InlTree)
|
||||
writer(f.OpenCodedDeferInfo)
|
||||
writer(f.ArgInfo)
|
||||
writer(f.ArgLiveInfo)
|
||||
writer(f.WrapInfo)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func makeFtab(funcs []_func, lastFuncSize uint32) (ftab []funcTab, pclntabSize int64, startLocations []uint32) {
|
||||
func makeFtab(funcs []_func, maxpc uintptr) (ftab []funcTab, pclntabSize int64, startLocations []uint32) {
|
||||
// Allocate space for the pc->func table. This structure consists of a pc offset
|
||||
// and an offset to the func structure. After that, we have a single pc
|
||||
// value that marks the end of the last function in the binary.
|
||||
|
@ -283,14 +191,13 @@ func makeFtab(funcs []_func, lastFuncSize uint32) (ftab []funcTab, pclntabSize i
|
|||
}
|
||||
|
||||
// Final entry of table is just end pc offset.
|
||||
lastFunc := funcs[len(funcs)-1]
|
||||
ftab = append(ftab, funcTab{lastFunc.entry + uintptr(lastFuncSize), 0})
|
||||
ftab = append(ftab, funcTab{maxpc, 0})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Pcln table format: [...]funcTab + [...]_Func
|
||||
func makePclntable(size int64, startLocations []uint32, funcs []_func, lastFuncSize uint32, pcdataOffs [][]uint32, funcdataAddr uintptr, funcdataOffs [][]uint32) (pclntab []byte) {
|
||||
func makePclntable(size int64, startLocations []uint32, funcs []_func, maxpc uintptr, pcdataOffs [][]uint32, funcdataAddr uintptr, funcdataOffs [][]uint32) (pclntab []byte) {
|
||||
pclntab = make([]byte, size, size)
|
||||
|
||||
// write a map of pc->func info offsets
|
||||
|
@ -301,8 +208,7 @@ func makePclntable(size int64, startLocations []uint32, funcs []_func, lastFuncS
|
|||
offs += 16
|
||||
}
|
||||
// Final entry of table is just end pc offset.
|
||||
lastFunc := funcs[len(funcs)-1]
|
||||
byteOrder.PutUint64(pclntab[offs:offs+8], uint64(lastFunc.entry)+uint64(lastFuncSize))
|
||||
byteOrder.PutUint64(pclntab[offs:offs+8], uint64(maxpc))
|
||||
offs += 8
|
||||
|
||||
// write func info table
|
||||
|
@ -374,21 +280,22 @@ func writeFindfunctab(out *[]byte, ftab []funcTab) (start int) {
|
|||
tab := make([]findfuncbucket, 0, nbuckets)
|
||||
var s, e = 0, 0
|
||||
for i := 0; i<int(nbuckets); i++ {
|
||||
var pc = min + uintptr((i+1)*_BUCKETSIZE)
|
||||
// find the end func of the bucket
|
||||
for ; e < len(ftab)-1 && ftab[e+1].entry <= pc; e++ {}
|
||||
// store the start func of the bucket
|
||||
var fb = findfuncbucket{idx: uint32(s)}
|
||||
|
||||
// find the last e-th func of the bucket
|
||||
var pc = min + uintptr((i+1)*_BUCKETSIZE)
|
||||
for ; e < len(ftab)-1 && ftab[e+1].entry <= pc; e++ {}
|
||||
|
||||
for j := 0; j<_SUBBUCKETS && (i*_SUBBUCKETS+j)<int(n); j++ {
|
||||
pc = min + uintptr(i*_BUCKETSIZE) + uintptr((j+1)*_SUB_BUCKETSIZE)
|
||||
var ss = s
|
||||
// find the end func of the subbucket
|
||||
for ; ss < len(ftab)-1 && ftab[ss+1].entry <= pc; ss++ {}
|
||||
// store the start func of the subbucket
|
||||
fb._SUBBUCKETS[j] = byte(uint32(s) - fb.idx)
|
||||
s = ss
|
||||
|
||||
// find the s-th end func of the subbucket
|
||||
pc = min + uintptr(i*_BUCKETSIZE) + uintptr((j+1)*_SUB_BUCKETSIZE)
|
||||
for ; s < len(ftab)-1 && ftab[s+1].entry <= pc; s++ {}
|
||||
}
|
||||
|
||||
s = e
|
||||
tab = append(tab, fb)
|
||||
}
|
||||
|
@ -401,15 +308,20 @@ func writeFindfunctab(out *[]byte, ftab []funcTab) (start int) {
|
|||
return
|
||||
}
|
||||
|
||||
func makeModuledata(name string, filenames []string, funcs []Func, text []byte) (mod *moduledata) {
|
||||
func makeModuledata(name string, filenames []string, funcsp *[]Func, text []byte) (mod *moduledata) {
|
||||
mod = new(moduledata)
|
||||
mod.modulename = name
|
||||
|
||||
// sort funcs by entry
|
||||
funcs := *funcsp
|
||||
sort.Slice(funcs, func(i, j int) bool {
|
||||
return funcs[i].EntryOff < funcs[j].EntryOff
|
||||
})
|
||||
*funcsp = funcs
|
||||
|
||||
// make filename table
|
||||
cu := make([]string, 0, len(filenames))
|
||||
for _, f := range filenames {
|
||||
cu = append(cu, f)
|
||||
}
|
||||
cu = append(cu, filenames...)
|
||||
cutab, filetab, cuOffs := makeFilenametab([]compilationUnit{{cu}})
|
||||
mod.cutab = cutab
|
||||
mod.filetab = filetab
|
||||
|
@ -428,9 +340,16 @@ funcnametab, nameOffs := makeFuncnameTab(funcs)
|
|||
// make it executable
|
||||
mprotect(addr, size)
|
||||
|
||||
// assign addresses
|
||||
mod.text = addr
|
||||
mod.etext = addr + uintptr(size)
|
||||
mod.minpc = addr
|
||||
mod.maxpc = addr + uintptr(len(text))
|
||||
|
||||
// make pcdata table
|
||||
// NOTICE: _func only use offset to index pcdata, thus no need mmap() pcdata
|
||||
pctab, pcdataOffs, _funcs := makePctab(funcs, addr, cuOffs, nameOffs)
|
||||
cuOff := cuOffs[0]
|
||||
pctab, pcdataOffs, _funcs := makePctab(funcs, addr, cuOff, nameOffs)
|
||||
mod.pctab = pctab
|
||||
|
||||
// write func data
|
||||
|
@ -440,8 +359,7 @@ funcnametab, nameOffs := makeFuncnameTab(funcs)
|
|||
fstart, funcdataOffs := writeFuncdata(&cache, funcs)
|
||||
|
||||
// make pc->func (binary search) func table
|
||||
lastFuncsize := funcs[len(funcs)-1].TextSize
|
||||
ftab, pclntSize, startLocations := makeFtab(_funcs, lastFuncsize)
|
||||
ftab, pclntSize, startLocations := makeFtab(_funcs, mod.maxpc)
|
||||
mod.ftab = ftab
|
||||
|
||||
// write pc->func (modmap) findfunc table
|
||||
|
@ -455,15 +373,9 @@ funcnametab, nameOffs := makeFuncnameTab(funcs)
|
|||
funcdataAddr := uintptr(rt.IndexByte(cache, fstart))
|
||||
|
||||
// make pclnt table
|
||||
pclntab := makePclntable(pclntSize, startLocations, _funcs, lastFuncsize, pcdataOffs, funcdataAddr, funcdataOffs)
|
||||
pclntab := makePclntable(pclntSize, startLocations, _funcs, mod.maxpc, pcdataOffs, funcdataAddr, funcdataOffs)
|
||||
mod.pclntable = pclntab
|
||||
|
||||
// assign addresses
|
||||
mod.text = addr
|
||||
mod.etext = addr + uintptr(size)
|
||||
mod.minpc = addr
|
||||
mod.maxpc = addr + uintptr(len(text))
|
||||
|
||||
// make pc header
|
||||
mod.pcHeader = &pcHeader {
|
||||
magic : _Magic,
|
||||
|
@ -487,7 +399,7 @@ funcnameOffset: getOffsetOf(moduledata{}, "funcnametab"),
|
|||
|
||||
// makePctab generates pcdelta->valuedelta tables for functions,
|
||||
// and returns the table and the entry offset of every kind pcdata in the table.
|
||||
func makePctab(funcs []Func, addr uintptr, cuOffset []uint32, nameOffset []int32) (pctab []byte, pcdataOffs [][]uint32, _funcs []_func) {
|
||||
func makePctab(funcs []Func, addr uintptr, cuOffset uint32, nameOffset []int32) (pctab []byte, pcdataOffs [][]uint32, _funcs []_func) {
|
||||
_funcs = make([]_func, len(funcs))
|
||||
|
||||
// Pctab offsets of 0 are considered invalid in the runtime. We respect
|
||||
|
@ -538,7 +450,7 @@ func makePctab(funcs []Func, addr uintptr, cuOffset []uint32, nameOffset []int32
|
|||
_f.deferreturn = f.DeferReturn
|
||||
// NOTICE: _func.pcdata is always as [PCDATA_UnsafePoint(0) : PCDATA_ArgLiveIndex(3)]
|
||||
_f.npcdata = uint32(_N_PCDATA)
|
||||
_f.cuOffset = cuOffset[i]
|
||||
_f.cuOffset = cuOffset
|
||||
_f.funcID = f.ID
|
||||
_f.nfuncdata = uint8(_N_FUNCDATA)
|
||||
}
|
||||
|
|
430
vendor/github.com/bytedance/sonic/loader/funcdata_go118.go
generated
vendored
430
vendor/github.com/bytedance/sonic/loader/funcdata_go118.go
generated
vendored
|
@ -1,4 +1,5 @@
|
|||
// go:build go1.18 && !go1.20
|
||||
//go:build go1.18 && !go1.20
|
||||
// +build go1.18,!go1.20
|
||||
|
||||
/*
|
||||
|
@ -20,10 +21,6 @@
|
|||
package loader
|
||||
|
||||
import (
|
||||
`encoding`
|
||||
`os`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
|
@ -31,21 +28,6 @@
|
|||
_Magic uint32 = 0xfffffff0
|
||||
)
|
||||
|
||||
type pcHeader struct {
|
||||
magic uint32 // 0xFFFFFFF0
|
||||
pad1, pad2 uint8 // 0,0
|
||||
minLC uint8 // min instruction size
|
||||
ptrSize uint8 // size of a ptr in bytes
|
||||
nfunc int // number of functions in the module
|
||||
nfiles uint // number of entries in the file tab
|
||||
textStart uintptr // base for function entry PC offsets in this module, equal to moduledata.text
|
||||
funcnameOffset uintptr // offset to the funcnametab variable from pcHeader
|
||||
cuOffset uintptr // offset to the cutab variable from pcHeader
|
||||
filetabOffset uintptr // offset to the filetab variable from pcHeader
|
||||
pctabOffset uintptr // offset to the pctab variable from pcHeader
|
||||
pclnOffset uintptr // offset to the pclntab variable from pcHeader
|
||||
}
|
||||
|
||||
type moduledata struct {
|
||||
pcHeader *pcHeader
|
||||
funcnametab []byte
|
||||
|
@ -129,413 +111,3 @@ funcID uint8 // set for certain special runtime functions
|
|||
//
|
||||
// funcdata [nfuncdata]uint32
|
||||
}
|
||||
|
||||
type funcTab struct {
|
||||
entry uint32
|
||||
funcoff uint32
|
||||
}
|
||||
|
||||
type bitVector struct {
|
||||
n int32 // # of bits
|
||||
bytedata *uint8
|
||||
}
|
||||
|
||||
type ptabEntry struct {
|
||||
name int32
|
||||
typ int32
|
||||
}
|
||||
|
||||
type textSection struct {
|
||||
vaddr uintptr // prelinked section vaddr
|
||||
end uintptr // vaddr + section length
|
||||
baseaddr uintptr // relocated section address
|
||||
}
|
||||
|
||||
type modulehash struct {
|
||||
modulename string
|
||||
linktimehash string
|
||||
runtimehash *string
|
||||
}
|
||||
|
||||
// findfuncbucket is an array of these structures.
|
||||
// Each bucket represents 4096 bytes of the text segment.
|
||||
// Each subbucket represents 256 bytes of the text segment.
|
||||
// To find a function given a pc, locate the bucket and subbucket for
|
||||
// that pc. Add together the idx and subbucket value to obtain a
|
||||
// function index. Then scan the functab array starting at that
|
||||
// index to find the target function.
|
||||
// This table uses 20 bytes for every 4096 bytes of code, or ~0.5% overhead.
|
||||
type findfuncbucket struct {
|
||||
idx uint32
|
||||
_SUBBUCKETS [16]byte
|
||||
}
|
||||
|
||||
// func name table format:
|
||||
// nameOff[0] -> namePartA namePartB namePartC \x00
|
||||
// nameOff[1] -> namePartA namePartB namePartC \x00
|
||||
// ...
|
||||
func makeFuncnameTab(funcs []Func) (tab []byte, offs []int32) {
|
||||
offs = make([]int32, len(funcs))
|
||||
offset := 0
|
||||
|
||||
for i, f := range funcs {
|
||||
offs[i] = int32(offset)
|
||||
|
||||
a, b, c := funcNameParts(f.Name)
|
||||
tab = append(tab, a...)
|
||||
tab = append(tab, b...)
|
||||
tab = append(tab, c...)
|
||||
tab = append(tab, 0)
|
||||
offset += len(a) + len(b) + len(c) + 1
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
type compilationUnit struct {
|
||||
fileNames []string
|
||||
}
|
||||
|
||||
// CU table format:
|
||||
// cuOffsets[0] -> filetabOffset[0] filetabOffset[1] ... filetabOffset[len(CUs[0].fileNames)-1]
|
||||
// cuOffsets[1] -> filetabOffset[len(CUs[0].fileNames)] ... filetabOffset[len(CUs[0].fileNames) + len(CUs[1].fileNames)-1]
|
||||
// ...
|
||||
//
|
||||
// file name table format:
|
||||
// filetabOffset[0] -> CUs[0].fileNames[0] \x00
|
||||
// ...
|
||||
// filetabOffset[len(CUs[0]-1)] -> CUs[0].fileNames[len(CUs[0].fileNames)-1] \x00
|
||||
// ...
|
||||
// filetabOffset[SUM(CUs,fileNames)-1] -> CUs[len(CU)-1].fileNames[len(CUs[len(CU)-1].fileNames)-1] \x00
|
||||
func makeFilenametab(cus []compilationUnit) (cutab []uint32, filetab []byte, cuOffsets []uint32) {
|
||||
cuOffsets = make([]uint32, len(cus))
|
||||
cuOffset := 0
|
||||
fileOffset := 0
|
||||
|
||||
for i, cu := range cus {
|
||||
cuOffsets[i] = uint32(cuOffset)
|
||||
|
||||
for _, name := range cu.fileNames {
|
||||
cutab = append(cutab, uint32(fileOffset))
|
||||
|
||||
fileOffset += len(name) + 1
|
||||
filetab = append(filetab, name...)
|
||||
filetab = append(filetab, 0)
|
||||
}
|
||||
|
||||
cuOffset += len(cu.fileNames)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func writeFuncdata(out *[]byte, funcs []Func) (fstart int, funcdataOffs [][]uint32) {
|
||||
fstart = len(*out)
|
||||
*out = append(*out, byte(0))
|
||||
offs := uint32(1)
|
||||
|
||||
funcdataOffs = make([][]uint32, len(funcs))
|
||||
for i, f := range funcs {
|
||||
|
||||
var writer = func(fd encoding.BinaryMarshaler) {
|
||||
var ab []byte
|
||||
var err error
|
||||
if fd != nil {
|
||||
ab, err = fd.MarshalBinary()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
funcdataOffs[i] = append(funcdataOffs[i], offs)
|
||||
} else {
|
||||
ab = []byte{0}
|
||||
funcdataOffs[i] = append(funcdataOffs[i], _INVALID_FUNCDATA_OFFSET)
|
||||
}
|
||||
*out = append(*out, ab...)
|
||||
offs += uint32(len(ab))
|
||||
}
|
||||
|
||||
writer(f.ArgsPointerMaps)
|
||||
writer(f.LocalsPointerMaps)
|
||||
writer(f.StackObjects)
|
||||
writer(f.InlTree)
|
||||
writer(f.OpenCodedDeferInfo)
|
||||
writer(f.ArgInfo)
|
||||
writer(f.ArgLiveInfo)
|
||||
writer(f.WrapInfo)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func makeFtab(funcs []_func, lastFuncSize uint32) (ftab []funcTab) {
|
||||
// Allocate space for the pc->func table. This structure consists of a pc offset
|
||||
// and an offset to the func structure. After that, we have a single pc
|
||||
// value that marks the end of the last function in the binary.
|
||||
var size int64 = int64(len(funcs)*2*4 + 4)
|
||||
var startLocations = make([]uint32, len(funcs))
|
||||
for i, f := range funcs {
|
||||
size = rnd(size, int64(_PtrSize))
|
||||
//writePCToFunc
|
||||
startLocations[i] = uint32(size)
|
||||
size += int64(uint8(_FUNC_SIZE)+f.nfuncdata*4+uint8(f.npcdata)*4)
|
||||
}
|
||||
|
||||
ftab = make([]funcTab, 0, len(funcs)+1)
|
||||
|
||||
// write a map of pc->func info offsets
|
||||
for i, f := range funcs {
|
||||
ftab = append(ftab, funcTab{uint32(f.entryOff), uint32(startLocations[i])})
|
||||
}
|
||||
|
||||
// Final entry of table is just end pc offset.
|
||||
lastFunc := funcs[len(funcs)-1]
|
||||
ftab = append(ftab, funcTab{uint32(lastFunc.entryOff + lastFuncSize), 0})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Pcln table format: [...]funcTab + [...]_Func
|
||||
func makePclntable(funcs []_func, lastFuncSize uint32, pcdataOffs [][]uint32, funcdataOffs [][]uint32) (pclntab []byte) {
|
||||
// Allocate space for the pc->func table. This structure consists of a pc offset
|
||||
// and an offset to the func structure. After that, we have a single pc
|
||||
// value that marks the end of the last function in the binary.
|
||||
var size int64 = int64(len(funcs)*2*4 + 4)
|
||||
var startLocations = make([]uint32, len(funcs))
|
||||
for i := range funcs {
|
||||
size = rnd(size, int64(_PtrSize))
|
||||
//writePCToFunc
|
||||
startLocations[i] = uint32(size)
|
||||
size += int64(int(_FUNC_SIZE)+len(funcdataOffs[i])*4+len(pcdataOffs[i])*4)
|
||||
}
|
||||
|
||||
pclntab = make([]byte, size, size)
|
||||
|
||||
// write a map of pc->func info offsets
|
||||
offs := 0
|
||||
for i, f := range funcs {
|
||||
byteOrder.PutUint32(pclntab[offs:offs+4], uint32(f.entryOff))
|
||||
byteOrder.PutUint32(pclntab[offs+4:offs+8], uint32(startLocations[i]))
|
||||
offs += 8
|
||||
}
|
||||
// Final entry of table is just end pc offset.
|
||||
lastFunc := funcs[len(funcs)-1]
|
||||
byteOrder.PutUint32(pclntab[offs:offs+4], uint32(lastFunc.entryOff+lastFuncSize))
|
||||
|
||||
// write func info table
|
||||
for i, f := range funcs {
|
||||
off := startLocations[i]
|
||||
|
||||
// write _func structure to pclntab
|
||||
fb := rt.BytesFrom(unsafe.Pointer(&f), int(_FUNC_SIZE), int(_FUNC_SIZE))
|
||||
copy(pclntab[off:off+uint32(_FUNC_SIZE)], fb)
|
||||
off += uint32(_FUNC_SIZE)
|
||||
|
||||
// NOTICE: _func.pcdata always starts from PcUnsafePoint, which is index 3
|
||||
for j := 3; j < len(pcdataOffs[i]); j++ {
|
||||
byteOrder.PutUint32(pclntab[off:off+4], uint32(pcdataOffs[i][j]))
|
||||
off += 4
|
||||
}
|
||||
|
||||
// funcdata refs as offsets from gofunc
|
||||
for _, funcdata := range funcdataOffs[i] {
|
||||
byteOrder.PutUint32(pclntab[off:off+4], uint32(funcdata))
|
||||
off += 4
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// findfunc table used to map pc to belonging func,
|
||||
// returns the index in the func table.
|
||||
//
|
||||
// All text section are divided into buckets sized _BUCKETSIZE(4K):
|
||||
// every bucket is divided into _SUBBUCKETS sized _SUB_BUCKETSIZE(64),
|
||||
// and it has a base idx to plus the offset stored in jth subbucket.
|
||||
// see findfunc() in runtime/symtab.go
|
||||
func writeFindfunctab(out *[]byte, ftab []funcTab) (start int) {
|
||||
start = len(*out)
|
||||
|
||||
max := ftab[len(ftab)-1].entry
|
||||
min := ftab[0].entry
|
||||
nbuckets := (max - min + _BUCKETSIZE - 1) / _BUCKETSIZE
|
||||
n := (max - min + _SUB_BUCKETSIZE - 1) / _SUB_BUCKETSIZE
|
||||
|
||||
tab := make([]findfuncbucket, 0, nbuckets)
|
||||
var s, e = 0, 0
|
||||
for i := 0; i<int(nbuckets); i++ {
|
||||
var pc = min + uint32((i+1)*_BUCKETSIZE)
|
||||
// find the end func of the bucket
|
||||
for ; e < len(ftab)-1 && ftab[e+1].entry <= pc; e++ {}
|
||||
// store the start func of the bucket
|
||||
var fb = findfuncbucket{idx: uint32(s)}
|
||||
|
||||
for j := 0; j<_SUBBUCKETS && (i*_SUBBUCKETS+j)<int(n); j++ {
|
||||
pc = min + uint32(i*_BUCKETSIZE) + uint32((j+1)*_SUB_BUCKETSIZE)
|
||||
var ss = s
|
||||
// find the end func of the subbucket
|
||||
for ; ss < len(ftab)-1 && ftab[ss+1].entry <= pc; ss++ {}
|
||||
// store the start func of the subbucket
|
||||
fb._SUBBUCKETS[j] = byte(uint32(s) - fb.idx)
|
||||
s = ss
|
||||
}
|
||||
s = e
|
||||
tab = append(tab, fb)
|
||||
}
|
||||
|
||||
// write findfuncbucket
|
||||
if len(tab) > 0 {
|
||||
size := int(unsafe.Sizeof(findfuncbucket{}))*len(tab)
|
||||
*out = append(*out, rt.BytesFrom(unsafe.Pointer(&tab[0]), size, size)...)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func makeModuledata(name string, filenames []string, funcs []Func, text []byte) (mod *moduledata) {
|
||||
mod = new(moduledata)
|
||||
mod.modulename = name
|
||||
|
||||
// make filename table
|
||||
cu := make([]string, 0, len(filenames))
|
||||
for _, f := range filenames {
|
||||
cu = append(cu, f)
|
||||
}
|
||||
cutab, filetab, cuOffs := makeFilenametab([]compilationUnit{{cu}})
|
||||
mod.cutab = cutab
|
||||
mod.filetab = filetab
|
||||
|
||||
// make funcname table
|
||||
funcnametab, nameOffs := makeFuncnameTab(funcs)
|
||||
mod.funcnametab = funcnametab
|
||||
|
||||
// make pcdata table
|
||||
// NOTICE: _func only use offset to index pcdata, thus no need mmap() pcdata
|
||||
pctab, pcdataOffs, _funcs := makePctab(funcs, cuOffs, nameOffs)
|
||||
mod.pctab = pctab
|
||||
|
||||
// write func data
|
||||
// NOTICE: _func use mod.gofunc+offset to directly point funcdata, thus need cache funcdata
|
||||
// TODO: estimate accurate capacity
|
||||
cache := make([]byte, 0, len(funcs)*int(_PtrSize))
|
||||
fstart, funcdataOffs := writeFuncdata(&cache, funcs)
|
||||
|
||||
// make pc->func (binary search) func table
|
||||
lastFuncsize := funcs[len(funcs)-1].TextSize
|
||||
ftab := makeFtab(_funcs, lastFuncsize)
|
||||
mod.ftab = ftab
|
||||
|
||||
// write pc->func (modmap) findfunc table
|
||||
ffstart := writeFindfunctab(&cache, ftab)
|
||||
|
||||
// make pclnt table
|
||||
pclntab := makePclntable(_funcs, lastFuncsize, pcdataOffs, funcdataOffs)
|
||||
mod.pclntable = pclntab
|
||||
|
||||
// mmap() text and funcdata segements
|
||||
p := os.Getpagesize()
|
||||
size := int(rnd(int64(len(text)), int64(p)))
|
||||
addr := mmap(size)
|
||||
// copy the machine code
|
||||
s := rt.BytesFrom(unsafe.Pointer(addr), len(text), size)
|
||||
copy(s, text)
|
||||
// make it executable
|
||||
mprotect(addr, size)
|
||||
|
||||
// assign addresses
|
||||
mod.text = addr
|
||||
mod.etext = addr + uintptr(size)
|
||||
mod.minpc = addr
|
||||
mod.maxpc = addr + uintptr(len(text))
|
||||
|
||||
// cache funcdata and findfuncbucket
|
||||
moduleCache.Lock()
|
||||
moduleCache.m[mod] = cache
|
||||
moduleCache.Unlock()
|
||||
mod.gofunc = uintptr(unsafe.Pointer(&cache[fstart]))
|
||||
mod.findfunctab = uintptr(unsafe.Pointer(&cache[ffstart]))
|
||||
|
||||
// make pc header
|
||||
mod.pcHeader = &pcHeader {
|
||||
magic : _Magic,
|
||||
minLC : _MinLC,
|
||||
ptrSize : _PtrSize,
|
||||
nfunc : len(funcs),
|
||||
nfiles: uint(len(cu)),
|
||||
textStart: mod.text,
|
||||
funcnameOffset: getOffsetOf(moduledata{}, "funcnametab"),
|
||||
cuOffset: getOffsetOf(moduledata{}, "cutab"),
|
||||
filetabOffset: getOffsetOf(moduledata{}, "filetab"),
|
||||
pctabOffset: getOffsetOf(moduledata{}, "pctab"),
|
||||
pclnOffset: getOffsetOf(moduledata{}, "pclntable"),
|
||||
}
|
||||
|
||||
// sepecial case: gcdata and gcbss must by non-empty
|
||||
mod.gcdata = uintptr(unsafe.Pointer(&emptyByte))
|
||||
mod.gcbss = uintptr(unsafe.Pointer(&emptyByte))
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// makePctab generates pcdelta->valuedelta tables for functions,
|
||||
// and returns the table and the entry offset of every kind pcdata in the table.
|
||||
func makePctab(funcs []Func, cuOffset []uint32, nameOffset []int32) (pctab []byte, pcdataOffs [][]uint32, _funcs []_func) {
|
||||
_funcs = make([]_func, len(funcs))
|
||||
|
||||
// Pctab offsets of 0 are considered invalid in the runtime. We respect
|
||||
// that by just padding a single byte at the beginning of runtime.pctab,
|
||||
// that way no real offsets can be zero.
|
||||
pctab = make([]byte, 1, 12*len(funcs)+1)
|
||||
pcdataOffs = make([][]uint32, len(funcs))
|
||||
|
||||
for i, f := range funcs {
|
||||
_f := &_funcs[i]
|
||||
|
||||
var writer = func(pc *Pcdata) {
|
||||
var ab []byte
|
||||
var err error
|
||||
if pc != nil {
|
||||
ab, err = pc.MarshalBinary()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
pcdataOffs[i] = append(pcdataOffs[i], uint32(len(pctab)))
|
||||
} else {
|
||||
ab = []byte{0}
|
||||
pcdataOffs[i] = append(pcdataOffs[i], _PCDATA_INVALID_OFFSET)
|
||||
}
|
||||
pctab = append(pctab, ab...)
|
||||
}
|
||||
|
||||
if f.Pcsp != nil {
|
||||
_f.pcsp = uint32(len(pctab))
|
||||
}
|
||||
writer(f.Pcsp)
|
||||
if f.Pcfile != nil {
|
||||
_f.pcfile = uint32(len(pctab))
|
||||
}
|
||||
writer(f.Pcfile)
|
||||
if f.Pcline != nil {
|
||||
_f.pcln = uint32(len(pctab))
|
||||
}
|
||||
writer(f.Pcline)
|
||||
writer(f.PcUnsafePoint)
|
||||
writer(f.PcStackMapIndex)
|
||||
writer(f.PcInlTreeIndex)
|
||||
writer(f.PcArgLiveIndex)
|
||||
|
||||
_f.entryOff = f.EntryOff
|
||||
_f.nameOff = nameOffset[i]
|
||||
_f.args = f.ArgsSize
|
||||
_f.deferreturn = f.DeferReturn
|
||||
// NOTICE: _func.pcdata is always as [PCDATA_UnsafePoint(0) : PCDATA_ArgLiveIndex(3)]
|
||||
_f.npcdata = uint32(_N_PCDATA)
|
||||
_f.cuOffset = cuOffset[i]
|
||||
_f.funcID = f.ID
|
||||
_f.flag = f.Flag
|
||||
_f.nfuncdata = uint8(_N_FUNCDATA)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func registerFunction(name string, pc uintptr, textSize uintptr, fp int, args int, size uintptr, argptrs uintptr, localptrs uintptr) {}
|
431
vendor/github.com/bytedance/sonic/loader/funcdata_go120.go
generated
vendored
431
vendor/github.com/bytedance/sonic/loader/funcdata_go120.go
generated
vendored
|
@ -20,10 +20,6 @@
|
|||
package loader
|
||||
|
||||
import (
|
||||
`encoding`
|
||||
`os`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
|
@ -51,8 +47,6 @@ funcnametab []byte
|
|||
end, gcdata, gcbss uintptr
|
||||
types, etypes uintptr
|
||||
rodata uintptr
|
||||
|
||||
// TODO: generate funcinfo object to memory
|
||||
gofunc uintptr // go.func.* is actual funcinfo object in image
|
||||
|
||||
textsectmap []textSection // see runtime/symtab.go: textAddr()
|
||||
|
@ -118,428 +112,3 @@ funcID uint8 // set for certain special runtime functions
|
|||
//
|
||||
// funcdata [nfuncdata]uint32
|
||||
}
|
||||
|
||||
type funcTab struct {
|
||||
entry uint32
|
||||
funcoff uint32
|
||||
}
|
||||
|
||||
type pcHeader struct {
|
||||
magic uint32 // 0xFFFFFFF0
|
||||
pad1, pad2 uint8 // 0,0
|
||||
minLC uint8 // min instruction size
|
||||
ptrSize uint8 // size of a ptr in bytes
|
||||
nfunc int // number of functions in the module
|
||||
nfiles uint // number of entries in the file tab
|
||||
textStart uintptr // base for function entry PC offsets in this module, equal to moduledata.text
|
||||
funcnameOffset uintptr // offset to the funcnametab variable from pcHeader
|
||||
cuOffset uintptr // offset to the cutab variable from pcHeader
|
||||
filetabOffset uintptr // offset to the filetab variable from pcHeader
|
||||
pctabOffset uintptr // offset to the pctab variable from pcHeader
|
||||
pclnOffset uintptr // offset to the pclntab variable from pcHeader
|
||||
}
|
||||
|
||||
type bitVector struct {
|
||||
n int32 // # of bits
|
||||
bytedata *uint8
|
||||
}
|
||||
|
||||
type ptabEntry struct {
|
||||
name int32
|
||||
typ int32
|
||||
}
|
||||
|
||||
type textSection struct {
|
||||
vaddr uintptr // prelinked section vaddr
|
||||
end uintptr // vaddr + section length
|
||||
baseaddr uintptr // relocated section address
|
||||
}
|
||||
|
||||
type modulehash struct {
|
||||
modulename string
|
||||
linktimehash string
|
||||
runtimehash *string
|
||||
}
|
||||
|
||||
// findfuncbucket is an array of these structures.
|
||||
// Each bucket represents 4096 bytes of the text segment.
|
||||
// Each subbucket represents 256 bytes of the text segment.
|
||||
// To find a function given a pc, locate the bucket and subbucket for
|
||||
// that pc. Add together the idx and subbucket value to obtain a
|
||||
// function index. Then scan the functab array starting at that
|
||||
// index to find the target function.
|
||||
// This table uses 20 bytes for every 4096 bytes of code, or ~0.5% overhead.
|
||||
type findfuncbucket struct {
|
||||
idx uint32
|
||||
_SUBBUCKETS [16]byte
|
||||
}
|
||||
|
||||
// func name table format:
|
||||
// nameOff[0] -> namePartA namePartB namePartC \x00
|
||||
// nameOff[1] -> namePartA namePartB namePartC \x00
|
||||
// ...
|
||||
func makeFuncnameTab(funcs []Func) (tab []byte, offs []int32) {
|
||||
offs = make([]int32, len(funcs))
|
||||
offset := 0
|
||||
|
||||
for i, f := range funcs {
|
||||
offs[i] = int32(offset)
|
||||
|
||||
a, b, c := funcNameParts(f.Name)
|
||||
tab = append(tab, a...)
|
||||
tab = append(tab, b...)
|
||||
tab = append(tab, c...)
|
||||
tab = append(tab, 0)
|
||||
offset += len(a) + len(b) + len(c) + 1
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
type compilationUnit struct {
|
||||
fileNames []string
|
||||
}
|
||||
|
||||
// CU table format:
|
||||
// cuOffsets[0] -> filetabOffset[0] filetabOffset[1] ... filetabOffset[len(CUs[0].fileNames)-1]
|
||||
// cuOffsets[1] -> filetabOffset[len(CUs[0].fileNames)] ... filetabOffset[len(CUs[0].fileNames) + len(CUs[1].fileNames)-1]
|
||||
// ...
|
||||
//
|
||||
// file name table format:
|
||||
// filetabOffset[0] -> CUs[0].fileNames[0] \x00
|
||||
// ...
|
||||
// filetabOffset[len(CUs[0]-1)] -> CUs[0].fileNames[len(CUs[0].fileNames)-1] \x00
|
||||
// ...
|
||||
// filetabOffset[SUM(CUs,fileNames)-1] -> CUs[len(CU)-1].fileNames[len(CUs[len(CU)-1].fileNames)-1] \x00
|
||||
func makeFilenametab(cus []compilationUnit) (cutab []uint32, filetab []byte, cuOffsets []uint32) {
|
||||
cuOffsets = make([]uint32, len(cus))
|
||||
cuOffset := 0
|
||||
fileOffset := 0
|
||||
|
||||
for i, cu := range cus {
|
||||
cuOffsets[i] = uint32(cuOffset)
|
||||
|
||||
for _, name := range cu.fileNames {
|
||||
cutab = append(cutab, uint32(fileOffset))
|
||||
|
||||
fileOffset += len(name) + 1
|
||||
filetab = append(filetab, name...)
|
||||
filetab = append(filetab, 0)
|
||||
}
|
||||
|
||||
cuOffset += len(cu.fileNames)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func writeFuncdata(out *[]byte, funcs []Func) (fstart int, funcdataOffs [][]uint32) {
|
||||
fstart = len(*out)
|
||||
*out = append(*out, byte(0))
|
||||
offs := uint32(1)
|
||||
|
||||
funcdataOffs = make([][]uint32, len(funcs))
|
||||
for i, f := range funcs {
|
||||
|
||||
var writer = func(fd encoding.BinaryMarshaler) {
|
||||
var ab []byte
|
||||
var err error
|
||||
if fd != nil {
|
||||
ab, err = fd.MarshalBinary()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
funcdataOffs[i] = append(funcdataOffs[i], offs)
|
||||
} else {
|
||||
ab = []byte{0}
|
||||
funcdataOffs[i] = append(funcdataOffs[i], _INVALID_FUNCDATA_OFFSET)
|
||||
}
|
||||
*out = append(*out, ab...)
|
||||
offs += uint32(len(ab))
|
||||
}
|
||||
|
||||
writer(f.ArgsPointerMaps)
|
||||
writer(f.LocalsPointerMaps)
|
||||
writer(f.StackObjects)
|
||||
writer(f.InlTree)
|
||||
writer(f.OpenCodedDeferInfo)
|
||||
writer(f.ArgInfo)
|
||||
writer(f.ArgLiveInfo)
|
||||
writer(f.WrapInfo)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func makeFtab(funcs []_func, lastFuncSize uint32) (ftab []funcTab) {
|
||||
// Allocate space for the pc->func table. This structure consists of a pc offset
|
||||
// and an offset to the func structure. After that, we have a single pc
|
||||
// value that marks the end of the last function in the binary.
|
||||
var size int64 = int64(len(funcs)*2*4 + 4)
|
||||
var startLocations = make([]uint32, len(funcs))
|
||||
for i, f := range funcs {
|
||||
size = rnd(size, int64(_PtrSize))
|
||||
//writePCToFunc
|
||||
startLocations[i] = uint32(size)
|
||||
size += int64(uint8(_FUNC_SIZE)+f.nfuncdata*4+uint8(f.npcdata)*4)
|
||||
}
|
||||
|
||||
ftab = make([]funcTab, 0, len(funcs)+1)
|
||||
|
||||
// write a map of pc->func info offsets
|
||||
for i, f := range funcs {
|
||||
ftab = append(ftab, funcTab{uint32(f.entryOff), uint32(startLocations[i])})
|
||||
}
|
||||
|
||||
// Final entry of table is just end pc offset.
|
||||
lastFunc := funcs[len(funcs)-1]
|
||||
ftab = append(ftab, funcTab{uint32(lastFunc.entryOff + lastFuncSize), 0})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Pcln table format: [...]funcTab + [...]_Func
|
||||
func makePclntable(funcs []_func, lastFuncSize uint32, pcdataOffs [][]uint32, funcdataOffs [][]uint32) (pclntab []byte) {
|
||||
// Allocate space for the pc->func table. This structure consists of a pc offset
|
||||
// and an offset to the func structure. After that, we have a single pc
|
||||
// value that marks the end of the last function in the binary.
|
||||
var size int64 = int64(len(funcs)*2*4 + 4)
|
||||
var startLocations = make([]uint32, len(funcs))
|
||||
for i := range funcs {
|
||||
size = rnd(size, int64(_PtrSize))
|
||||
//writePCToFunc
|
||||
startLocations[i] = uint32(size)
|
||||
size += int64(int(_FUNC_SIZE)+len(funcdataOffs[i])*4+len(pcdataOffs[i])*4)
|
||||
}
|
||||
|
||||
pclntab = make([]byte, size, size)
|
||||
|
||||
// write a map of pc->func info offsets
|
||||
offs := 0
|
||||
for i, f := range funcs {
|
||||
byteOrder.PutUint32(pclntab[offs:offs+4], uint32(f.entryOff))
|
||||
byteOrder.PutUint32(pclntab[offs+4:offs+8], uint32(startLocations[i]))
|
||||
offs += 8
|
||||
}
|
||||
// Final entry of table is just end pc offset.
|
||||
lastFunc := funcs[len(funcs)-1]
|
||||
byteOrder.PutUint32(pclntab[offs:offs+4], uint32(lastFunc.entryOff+lastFuncSize))
|
||||
|
||||
// write func info table
|
||||
for i, f := range funcs {
|
||||
off := startLocations[i]
|
||||
|
||||
// write _func structure to pclntab
|
||||
fb := rt.BytesFrom(unsafe.Pointer(&f), int(_FUNC_SIZE), int(_FUNC_SIZE))
|
||||
copy(pclntab[off:off+uint32(_FUNC_SIZE)], fb)
|
||||
off += uint32(_FUNC_SIZE)
|
||||
|
||||
// NOTICE: _func.pcdata always starts from PcUnsafePoint, which is index 3
|
||||
for j := 3; j < len(pcdataOffs[i]); j++ {
|
||||
byteOrder.PutUint32(pclntab[off:off+4], uint32(pcdataOffs[i][j]))
|
||||
off += 4
|
||||
}
|
||||
|
||||
// funcdata refs as offsets from gofunc
|
||||
for _, funcdata := range funcdataOffs[i] {
|
||||
byteOrder.PutUint32(pclntab[off:off+4], uint32(funcdata))
|
||||
off += 4
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// findfunc table used to map pc to belonging func,
|
||||
// returns the index in the func table.
|
||||
//
|
||||
// All text section are divided into buckets sized _BUCKETSIZE(4K):
|
||||
// every bucket is divided into _SUBBUCKETS sized _SUB_BUCKETSIZE(64),
|
||||
// and it has a base idx to plus the offset stored in jth subbucket.
|
||||
// see findfunc() in runtime/symtab.go
|
||||
func writeFindfunctab(out *[]byte, ftab []funcTab) (start int) {
|
||||
start = len(*out)
|
||||
|
||||
max := ftab[len(ftab)-1].entry
|
||||
min := ftab[0].entry
|
||||
nbuckets := (max - min + _BUCKETSIZE - 1) / _BUCKETSIZE
|
||||
n := (max - min + _SUB_BUCKETSIZE - 1) / _SUB_BUCKETSIZE
|
||||
|
||||
tab := make([]findfuncbucket, 0, nbuckets)
|
||||
var s, e = 0, 0
|
||||
for i := 0; i<int(nbuckets); i++ {
|
||||
var pc = min + uint32((i+1)*_BUCKETSIZE)
|
||||
// find the end func of the bucket
|
||||
for ; e < len(ftab)-1 && ftab[e+1].entry <= pc; e++ {}
|
||||
// store the start func of the bucket
|
||||
var fb = findfuncbucket{idx: uint32(s)}
|
||||
|
||||
for j := 0; j<_SUBBUCKETS && (i*_SUBBUCKETS+j)<int(n); j++ {
|
||||
pc = min + uint32(i*_BUCKETSIZE) + uint32((j+1)*_SUB_BUCKETSIZE)
|
||||
var ss = s
|
||||
// find the end func of the subbucket
|
||||
for ; ss < len(ftab)-1 && ftab[ss+1].entry <= pc; ss++ {}
|
||||
// store the start func of the subbucket
|
||||
fb._SUBBUCKETS[j] = byte(uint32(s) - fb.idx)
|
||||
s = ss
|
||||
}
|
||||
s = e
|
||||
tab = append(tab, fb)
|
||||
}
|
||||
|
||||
// write findfuncbucket
|
||||
if len(tab) > 0 {
|
||||
size := int(unsafe.Sizeof(findfuncbucket{}))*len(tab)
|
||||
*out = append(*out, rt.BytesFrom(unsafe.Pointer(&tab[0]), size, size)...)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func makeModuledata(name string, filenames []string, funcs []Func, text []byte) (mod *moduledata) {
|
||||
mod = new(moduledata)
|
||||
mod.modulename = name
|
||||
|
||||
// make filename table
|
||||
cu := make([]string, 0, len(filenames))
|
||||
for _, f := range filenames {
|
||||
cu = append(cu, f)
|
||||
}
|
||||
cutab, filetab, cuOffs := makeFilenametab([]compilationUnit{{cu}})
|
||||
mod.cutab = cutab
|
||||
mod.filetab = filetab
|
||||
|
||||
// make funcname table
|
||||
funcnametab, nameOffs := makeFuncnameTab(funcs)
|
||||
mod.funcnametab = funcnametab
|
||||
|
||||
// make pcdata table
|
||||
// NOTICE: _func only use offset to index pcdata, thus no need mmap() pcdata
|
||||
pctab, pcdataOffs, _funcs := makePctab(funcs, cuOffs, nameOffs)
|
||||
mod.pctab = pctab
|
||||
|
||||
// write func data
|
||||
// NOTICE: _func use mod.gofunc+offset to directly point funcdata, thus need cache funcdata
|
||||
// TODO: estimate accurate capacity
|
||||
cache := make([]byte, 0, len(funcs)*int(_PtrSize))
|
||||
fstart, funcdataOffs := writeFuncdata(&cache, funcs)
|
||||
|
||||
// make pc->func (binary search) func table
|
||||
lastFuncsize := funcs[len(funcs)-1].TextSize
|
||||
ftab := makeFtab(_funcs, lastFuncsize)
|
||||
mod.ftab = ftab
|
||||
|
||||
// write pc->func (modmap) findfunc table
|
||||
ffstart := writeFindfunctab(&cache, ftab)
|
||||
|
||||
// make pclnt table
|
||||
pclntab := makePclntable(_funcs, lastFuncsize, pcdataOffs, funcdataOffs)
|
||||
mod.pclntable = pclntab
|
||||
|
||||
// mmap() text and funcdata segements
|
||||
p := os.Getpagesize()
|
||||
size := int(rnd(int64(len(text)), int64(p)))
|
||||
addr := mmap(size)
|
||||
// copy the machine code
|
||||
s := rt.BytesFrom(unsafe.Pointer(addr), len(text), size)
|
||||
copy(s, text)
|
||||
// make it executable
|
||||
mprotect(addr, size)
|
||||
|
||||
// assign addresses
|
||||
mod.text = addr
|
||||
mod.etext = addr + uintptr(size)
|
||||
mod.minpc = addr
|
||||
mod.maxpc = addr + uintptr(len(text))
|
||||
|
||||
// cache funcdata and findfuncbucket
|
||||
moduleCache.Lock()
|
||||
moduleCache.m[mod] = cache
|
||||
moduleCache.Unlock()
|
||||
mod.gofunc = uintptr(unsafe.Pointer(&cache[fstart]))
|
||||
mod.findfunctab = uintptr(unsafe.Pointer(&cache[ffstart]))
|
||||
|
||||
// make pc header
|
||||
mod.pcHeader = &pcHeader {
|
||||
magic : _Magic,
|
||||
minLC : _MinLC,
|
||||
ptrSize : _PtrSize,
|
||||
nfunc : len(funcs),
|
||||
nfiles: uint(len(cu)),
|
||||
textStart: mod.text,
|
||||
funcnameOffset: getOffsetOf(moduledata{}, "funcnametab"),
|
||||
cuOffset: getOffsetOf(moduledata{}, "cutab"),
|
||||
filetabOffset: getOffsetOf(moduledata{}, "filetab"),
|
||||
pctabOffset: getOffsetOf(moduledata{}, "pctab"),
|
||||
pclnOffset: getOffsetOf(moduledata{}, "pclntable"),
|
||||
}
|
||||
|
||||
// sepecial case: gcdata and gcbss must by non-empty
|
||||
mod.gcdata = uintptr(unsafe.Pointer(&emptyByte))
|
||||
mod.gcbss = uintptr(unsafe.Pointer(&emptyByte))
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// makePctab generates pcdelta->valuedelta tables for functions,
|
||||
// and returns the table and the entry offset of every kind pcdata in the table.
|
||||
func makePctab(funcs []Func, cuOffset []uint32, nameOffset []int32) (pctab []byte, pcdataOffs [][]uint32, _funcs []_func) {
|
||||
_funcs = make([]_func, len(funcs))
|
||||
|
||||
// Pctab offsets of 0 are considered invalid in the runtime. We respect
|
||||
// that by just padding a single byte at the beginning of runtime.pctab,
|
||||
// that way no real offsets can be zero.
|
||||
pctab = make([]byte, 1, 12*len(funcs)+1)
|
||||
pcdataOffs = make([][]uint32, len(funcs))
|
||||
|
||||
for i, f := range funcs {
|
||||
_f := &_funcs[i]
|
||||
|
||||
var writer = func(pc *Pcdata) {
|
||||
var ab []byte
|
||||
var err error
|
||||
if pc != nil {
|
||||
ab, err = pc.MarshalBinary()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
pcdataOffs[i] = append(pcdataOffs[i], uint32(len(pctab)))
|
||||
} else {
|
||||
ab = []byte{0}
|
||||
pcdataOffs[i] = append(pcdataOffs[i], _PCDATA_INVALID_OFFSET)
|
||||
}
|
||||
pctab = append(pctab, ab...)
|
||||
}
|
||||
|
||||
if f.Pcsp != nil {
|
||||
_f.pcsp = uint32(len(pctab))
|
||||
}
|
||||
writer(f.Pcsp)
|
||||
if f.Pcfile != nil {
|
||||
_f.pcfile = uint32(len(pctab))
|
||||
}
|
||||
writer(f.Pcfile)
|
||||
if f.Pcline != nil {
|
||||
_f.pcln = uint32(len(pctab))
|
||||
}
|
||||
writer(f.Pcline)
|
||||
writer(f.PcUnsafePoint)
|
||||
writer(f.PcStackMapIndex)
|
||||
writer(f.PcInlTreeIndex)
|
||||
writer(f.PcArgLiveIndex)
|
||||
|
||||
_f.entryOff = f.EntryOff
|
||||
_f.nameOff = nameOffset[i]
|
||||
_f.args = f.ArgsSize
|
||||
_f.deferreturn = f.DeferReturn
|
||||
// NOTICE: _func.pcdata is always as [PCDATA_UnsafePoint(0) : PCDATA_ArgLiveIndex(3)]
|
||||
_f.npcdata = uint32(_N_PCDATA)
|
||||
_f.cuOffset = cuOffset[i]
|
||||
_f.funcID = f.ID
|
||||
_f.flag = f.Flag
|
||||
_f.nfuncdata = uint8(_N_FUNCDATA)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func registerFunction(name string, pc uintptr, textSize uintptr, fp int, args int, size uintptr, argptrs uintptr, localptrs uintptr) {}
|
119
vendor/github.com/bytedance/sonic/loader/funcdata_go121.go
generated
vendored
Normal file
119
vendor/github.com/bytedance/sonic/loader/funcdata_go121.go
generated
vendored
Normal file
|
@ -0,0 +1,119 @@
|
|||
//go:build go1.21 && !go1.22
|
||||
// +build go1.21,!go1.22
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package loader
|
||||
|
||||
import (
|
||||
`unsafe`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
const (
|
||||
_Magic uint32 = 0xFFFFFFF1
|
||||
)
|
||||
|
||||
type moduledata struct {
|
||||
pcHeader *pcHeader
|
||||
funcnametab []byte
|
||||
cutab []uint32
|
||||
filetab []byte
|
||||
pctab []byte
|
||||
pclntable []byte
|
||||
ftab []funcTab
|
||||
findfunctab uintptr
|
||||
minpc, maxpc uintptr // first func address, last func address + last func size
|
||||
|
||||
text, etext uintptr // start/end of text, (etext-text) must be greater than MIN_FUNC
|
||||
noptrdata, enoptrdata uintptr
|
||||
data, edata uintptr
|
||||
bss, ebss uintptr
|
||||
noptrbss, enoptrbss uintptr
|
||||
covctrs, ecovctrs uintptr
|
||||
end, gcdata, gcbss uintptr
|
||||
types, etypes uintptr
|
||||
rodata uintptr
|
||||
gofunc uintptr // go.func.* is actual funcinfo object in image
|
||||
|
||||
textsectmap []textSection // see runtime/symtab.go: textAddr()
|
||||
typelinks []int32 // offsets from types
|
||||
itablinks []*rt.GoItab
|
||||
|
||||
ptab []ptabEntry
|
||||
|
||||
pluginpath string
|
||||
pkghashes []modulehash
|
||||
|
||||
// This slice records the initializing tasks that need to be
|
||||
// done to start up the program. It is built by the linker.
|
||||
inittasks []unsafe.Pointer
|
||||
|
||||
modulename string
|
||||
modulehashes []modulehash
|
||||
|
||||
hasmain uint8 // 1 if module contains the main function, 0 otherwise
|
||||
|
||||
gcdatamask, gcbssmask bitVector
|
||||
|
||||
typemap map[int32]*rt.GoType // offset to *_rtype in previous module
|
||||
|
||||
bad bool // module failed to load and should be ignored
|
||||
|
||||
next *moduledata
|
||||
}
|
||||
|
||||
type _func struct {
|
||||
entryOff uint32 // start pc, as offset from moduledata.text/pcHeader.textStart
|
||||
nameOff int32 // function name, as index into moduledata.funcnametab.
|
||||
|
||||
args int32 // in/out args size
|
||||
deferreturn uint32 // offset of start of a deferreturn call instruction from entry, if any.
|
||||
|
||||
pcsp uint32
|
||||
pcfile uint32
|
||||
pcln uint32
|
||||
npcdata uint32
|
||||
cuOffset uint32 // runtime.cutab offset of this function's CU
|
||||
startLine int32 // line number of start of function (func keyword/TEXT directive)
|
||||
funcID uint8 // set for certain special runtime functions
|
||||
flag uint8
|
||||
_ [1]byte // pad
|
||||
nfuncdata uint8 //
|
||||
|
||||
// The end of the struct is followed immediately by two variable-length
|
||||
// arrays that reference the pcdata and funcdata locations for this
|
||||
// function.
|
||||
|
||||
// pcdata contains the offset into moduledata.pctab for the start of
|
||||
// that index's table. e.g.,
|
||||
// &moduledata.pctab[_func.pcdata[_PCDATA_UnsafePoint]] is the start of
|
||||
// the unsafe point table.
|
||||
//
|
||||
// An offset of 0 indicates that there is no table.
|
||||
//
|
||||
// pcdata [npcdata]uint32
|
||||
|
||||
// funcdata contains the offset past moduledata.gofunc which contains a
|
||||
// pointer to that index's funcdata. e.g.,
|
||||
// *(moduledata.gofunc + _func.funcdata[_FUNCDATA_ArgsPointerMaps]) is
|
||||
// the argument pointer map.
|
||||
//
|
||||
// An offset of ^uint32(0) indicates that there is no entry.
|
||||
//
|
||||
// funcdata [nfuncdata]uint32
|
||||
}
|
355
vendor/github.com/bytedance/sonic/loader/funcdata_latest.go
generated
vendored
Normal file
355
vendor/github.com/bytedance/sonic/loader/funcdata_latest.go
generated
vendored
Normal file
|
@ -0,0 +1,355 @@
|
|||
// go:build go1.18 && !go1.22
|
||||
// +build go1.18,!go1.22
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package loader
|
||||
|
||||
import (
|
||||
`os`
|
||||
`sort`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
type funcTab struct {
|
||||
entry uint32
|
||||
funcoff uint32
|
||||
}
|
||||
|
||||
type pcHeader struct {
|
||||
magic uint32 // 0xFFFFFFF0
|
||||
pad1, pad2 uint8 // 0,0
|
||||
minLC uint8 // min instruction size
|
||||
ptrSize uint8 // size of a ptr in bytes
|
||||
nfunc int // number of functions in the module
|
||||
nfiles uint // number of entries in the file tab
|
||||
textStart uintptr // base for function entry PC offsets in this module, equal to moduledata.text
|
||||
funcnameOffset uintptr // offset to the funcnametab variable from pcHeader
|
||||
cuOffset uintptr // offset to the cutab variable from pcHeader
|
||||
filetabOffset uintptr // offset to the filetab variable from pcHeader
|
||||
pctabOffset uintptr // offset to the pctab variable from pcHeader
|
||||
pclnOffset uintptr // offset to the pclntab variable from pcHeader
|
||||
}
|
||||
|
||||
type bitVector struct {
|
||||
n int32 // # of bits
|
||||
bytedata *uint8
|
||||
}
|
||||
|
||||
type ptabEntry struct {
|
||||
name int32
|
||||
typ int32
|
||||
}
|
||||
|
||||
type textSection struct {
|
||||
vaddr uintptr // prelinked section vaddr
|
||||
end uintptr // vaddr + section length
|
||||
baseaddr uintptr // relocated section address
|
||||
}
|
||||
|
||||
type modulehash struct {
|
||||
modulename string
|
||||
linktimehash string
|
||||
runtimehash *string
|
||||
}
|
||||
|
||||
// findfuncbucket is an array of these structures.
|
||||
// Each bucket represents 4096 bytes of the text segment.
|
||||
// Each subbucket represents 256 bytes of the text segment.
|
||||
// To find a function given a pc, locate the bucket and subbucket for
|
||||
// that pc. Add together the idx and subbucket value to obtain a
|
||||
// function index. Then scan the functab array starting at that
|
||||
// index to find the target function.
|
||||
// This table uses 20 bytes for every 4096 bytes of code, or ~0.5% overhead.
|
||||
type findfuncbucket struct {
|
||||
idx uint32
|
||||
_SUBBUCKETS [16]byte
|
||||
}
|
||||
|
||||
type compilationUnit struct {
|
||||
fileNames []string
|
||||
}
|
||||
|
||||
func makeFtab(funcs []_func, maxpc uint32) (ftab []funcTab, pclntabSize int64, startLocations []uint32) {
|
||||
// Allocate space for the pc->func table. This structure consists of a pc offset
|
||||
// and an offset to the func structure. After that, we have a single pc
|
||||
// value that marks the end of the last function in the binary.
|
||||
pclntabSize = int64(len(funcs)*2*int(_PtrSize) + int(_PtrSize))
|
||||
startLocations = make([]uint32, len(funcs))
|
||||
for i, f := range funcs {
|
||||
pclntabSize = rnd(pclntabSize, int64(_PtrSize))
|
||||
//writePCToFunc
|
||||
startLocations[i] = uint32(pclntabSize)
|
||||
pclntabSize += int64(uint8(_FUNC_SIZE)+f.nfuncdata*4+uint8(f.npcdata)*4)
|
||||
}
|
||||
|
||||
ftab = make([]funcTab, 0, len(funcs)+1)
|
||||
|
||||
// write a map of pc->func info offsets
|
||||
for i, f := range funcs {
|
||||
ftab = append(ftab, funcTab{uint32(f.entryOff), uint32(startLocations[i])})
|
||||
}
|
||||
|
||||
// Final entry of table is just end pc offset.
|
||||
ftab = append(ftab, funcTab{maxpc, 0})
|
||||
return
|
||||
}
|
||||
|
||||
// Pcln table format: [...]funcTab + [...]_Func
|
||||
func makePclntable(size int64, startLocations []uint32, funcs []_func, maxpc uint32, pcdataOffs [][]uint32, funcdataOffs [][]uint32) (pclntab []byte) {
|
||||
// Allocate space for the pc->func table. This structure consists of a pc offset
|
||||
// and an offset to the func structure. After that, we have a single pc
|
||||
// value that marks the end of the last function in the binary.
|
||||
pclntab = make([]byte, size, size)
|
||||
|
||||
// write a map of pc->func info offsets
|
||||
offs := 0
|
||||
for i, f := range funcs {
|
||||
byteOrder.PutUint32(pclntab[offs:offs+4], uint32(f.entryOff))
|
||||
byteOrder.PutUint32(pclntab[offs+4:offs+8], uint32(startLocations[i]))
|
||||
offs += 8
|
||||
}
|
||||
// Final entry of table is just end pc offset.
|
||||
byteOrder.PutUint32(pclntab[offs:offs+4], maxpc)
|
||||
|
||||
// write func info table
|
||||
for i := range funcs {
|
||||
off := startLocations[i]
|
||||
|
||||
// write _func structure to pclntab
|
||||
fb := rt.BytesFrom(unsafe.Pointer(&funcs[i]), int(_FUNC_SIZE), int(_FUNC_SIZE))
|
||||
copy(pclntab[off:off+uint32(_FUNC_SIZE)], fb)
|
||||
off += uint32(_FUNC_SIZE)
|
||||
|
||||
// NOTICE: _func.pcdata always starts from PcUnsafePoint, which is index 3
|
||||
for j := 3; j < len(pcdataOffs[i]); j++ {
|
||||
byteOrder.PutUint32(pclntab[off:off+4], uint32(pcdataOffs[i][j]))
|
||||
off += 4
|
||||
}
|
||||
|
||||
// funcdata refs as offsets from gofunc
|
||||
for _, funcdata := range funcdataOffs[i] {
|
||||
byteOrder.PutUint32(pclntab[off:off+4], uint32(funcdata))
|
||||
off += 4
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// findfunc table used to map pc to belonging func,
|
||||
// returns the index in the func table.
|
||||
//
|
||||
// All text section are divided into buckets sized _BUCKETSIZE(4K):
|
||||
// every bucket is divided into _SUBBUCKETS sized _SUB_BUCKETSIZE(64),
|
||||
// and it has a base idx to plus the offset stored in jth subbucket.
|
||||
// see findfunc() in runtime/symtab.go
|
||||
func writeFindfunctab(out *[]byte, ftab []funcTab) (start int) {
|
||||
start = len(*out)
|
||||
|
||||
max := ftab[len(ftab)-1].entry
|
||||
min := ftab[0].entry
|
||||
nbuckets := (max - min + _BUCKETSIZE - 1) / _BUCKETSIZE
|
||||
n := (max - min + _SUB_BUCKETSIZE - 1) / _SUB_BUCKETSIZE
|
||||
|
||||
tab := make([]findfuncbucket, 0, nbuckets)
|
||||
var s, e = 0, 0
|
||||
for i := 0; i<int(nbuckets); i++ {
|
||||
// store the start s-th func of the bucket
|
||||
var fb = findfuncbucket{idx: uint32(s)}
|
||||
|
||||
// find the last e-th func of the bucket
|
||||
var pc = min + uint32((i+1)*_BUCKETSIZE)
|
||||
for ; e < len(ftab)-1 && ftab[e+1].entry <= pc; e++ {}
|
||||
|
||||
for j := 0; j<_SUBBUCKETS && (i*_SUBBUCKETS+j)<int(n); j++ {
|
||||
// store the start func of the subbucket
|
||||
fb._SUBBUCKETS[j] = byte(uint32(s) - fb.idx)
|
||||
|
||||
// find the s-th end func of the subbucket
|
||||
pc = min + uint32(i*_BUCKETSIZE) + uint32((j+1)*_SUB_BUCKETSIZE)
|
||||
for ; s < len(ftab)-1 && ftab[s+1].entry <= pc; s++ {}
|
||||
}
|
||||
|
||||
s = e
|
||||
tab = append(tab, fb)
|
||||
}
|
||||
|
||||
// write findfuncbucket
|
||||
if len(tab) > 0 {
|
||||
size := int(unsafe.Sizeof(findfuncbucket{}))*len(tab)
|
||||
*out = append(*out, rt.BytesFrom(unsafe.Pointer(&tab[0]), size, size)...)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func makeModuledata(name string, filenames []string, funcsp *[]Func, text []byte) (mod *moduledata) {
|
||||
mod = new(moduledata)
|
||||
mod.modulename = name
|
||||
|
||||
// sort funcs by entry
|
||||
funcs := *funcsp
|
||||
sort.Slice(funcs, func(i, j int) bool {
|
||||
return funcs[i].EntryOff < funcs[j].EntryOff
|
||||
})
|
||||
*funcsp = funcs
|
||||
|
||||
// make filename table
|
||||
cu := make([]string, 0, len(filenames))
|
||||
cu = append(cu, filenames...)
|
||||
cutab, filetab, cuOffs := makeFilenametab([]compilationUnit{{cu}})
|
||||
mod.cutab = cutab
|
||||
mod.filetab = filetab
|
||||
|
||||
// make funcname table
|
||||
funcnametab, nameOffs := makeFuncnameTab(funcs)
|
||||
mod.funcnametab = funcnametab
|
||||
|
||||
// mmap() text and funcdata segements
|
||||
p := os.Getpagesize()
|
||||
size := int(rnd(int64(len(text)), int64(p)))
|
||||
addr := mmap(size)
|
||||
// copy the machine code
|
||||
s := rt.BytesFrom(unsafe.Pointer(addr), len(text), size)
|
||||
copy(s, text)
|
||||
// make it executable
|
||||
mprotect(addr, size)
|
||||
|
||||
// assign addresses
|
||||
mod.text = addr
|
||||
mod.etext = addr + uintptr(size)
|
||||
mod.minpc = addr
|
||||
mod.maxpc = addr + uintptr(len(text))
|
||||
|
||||
// make pcdata table
|
||||
// NOTICE: _func only use offset to index pcdata, thus no need mmap() pcdata
|
||||
cuOff := cuOffs[0]
|
||||
pctab, pcdataOffs, _funcs := makePctab(funcs, cuOff, nameOffs)
|
||||
mod.pctab = pctab
|
||||
|
||||
// write func data
|
||||
// NOTICE: _func use mod.gofunc+offset to directly point funcdata, thus need cache funcdata
|
||||
// TODO: estimate accurate capacity
|
||||
cache := make([]byte, 0, len(funcs)*int(_PtrSize))
|
||||
fstart, funcdataOffs := writeFuncdata(&cache, funcs)
|
||||
|
||||
// make pc->func (binary search) func table
|
||||
ftab, pclntSize, startLocations := makeFtab(_funcs, uint32(len(text)))
|
||||
mod.ftab = ftab
|
||||
|
||||
// write pc->func (modmap) findfunc table
|
||||
ffstart := writeFindfunctab(&cache, ftab)
|
||||
|
||||
// cache funcdata and findfuncbucket
|
||||
moduleCache.Lock()
|
||||
moduleCache.m[mod] = cache
|
||||
moduleCache.Unlock()
|
||||
mod.gofunc = uintptr(unsafe.Pointer(&cache[fstart]))
|
||||
mod.findfunctab = uintptr(unsafe.Pointer(&cache[ffstart]))
|
||||
|
||||
// make pclnt table
|
||||
pclntab := makePclntable(pclntSize, startLocations, _funcs, uint32(len(text)), pcdataOffs, funcdataOffs)
|
||||
mod.pclntable = pclntab
|
||||
|
||||
// make pc header
|
||||
mod.pcHeader = &pcHeader {
|
||||
magic : _Magic,
|
||||
minLC : _MinLC,
|
||||
ptrSize : _PtrSize,
|
||||
nfunc : len(funcs),
|
||||
nfiles: uint(len(cu)),
|
||||
textStart: mod.text,
|
||||
funcnameOffset: getOffsetOf(moduledata{}, "funcnametab"),
|
||||
cuOffset: getOffsetOf(moduledata{}, "cutab"),
|
||||
filetabOffset: getOffsetOf(moduledata{}, "filetab"),
|
||||
pctabOffset: getOffsetOf(moduledata{}, "pctab"),
|
||||
pclnOffset: getOffsetOf(moduledata{}, "pclntable"),
|
||||
}
|
||||
|
||||
// sepecial case: gcdata and gcbss must by non-empty
|
||||
mod.gcdata = uintptr(unsafe.Pointer(&emptyByte))
|
||||
mod.gcbss = uintptr(unsafe.Pointer(&emptyByte))
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// makePctab generates pcdelta->valuedelta tables for functions,
|
||||
// and returns the table and the entry offset of every kind pcdata in the table.
|
||||
func makePctab(funcs []Func, cuOffset uint32, nameOffset []int32) (pctab []byte, pcdataOffs [][]uint32, _funcs []_func) {
|
||||
_funcs = make([]_func, len(funcs))
|
||||
|
||||
// Pctab offsets of 0 are considered invalid in the runtime. We respect
|
||||
// that by just padding a single byte at the beginning of runtime.pctab,
|
||||
// that way no real offsets can be zero.
|
||||
pctab = make([]byte, 1, 12*len(funcs)+1)
|
||||
pcdataOffs = make([][]uint32, len(funcs))
|
||||
|
||||
for i, f := range funcs {
|
||||
_f := &_funcs[i]
|
||||
|
||||
var writer = func(pc *Pcdata) {
|
||||
var ab []byte
|
||||
var err error
|
||||
if pc != nil {
|
||||
ab, err = pc.MarshalBinary()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
pcdataOffs[i] = append(pcdataOffs[i], uint32(len(pctab)))
|
||||
} else {
|
||||
ab = []byte{0}
|
||||
pcdataOffs[i] = append(pcdataOffs[i], _PCDATA_INVALID_OFFSET)
|
||||
}
|
||||
pctab = append(pctab, ab...)
|
||||
}
|
||||
|
||||
if f.Pcsp != nil {
|
||||
_f.pcsp = uint32(len(pctab))
|
||||
}
|
||||
writer(f.Pcsp)
|
||||
if f.Pcfile != nil {
|
||||
_f.pcfile = uint32(len(pctab))
|
||||
}
|
||||
writer(f.Pcfile)
|
||||
if f.Pcline != nil {
|
||||
_f.pcln = uint32(len(pctab))
|
||||
}
|
||||
writer(f.Pcline)
|
||||
writer(f.PcUnsafePoint)
|
||||
writer(f.PcStackMapIndex)
|
||||
writer(f.PcInlTreeIndex)
|
||||
writer(f.PcArgLiveIndex)
|
||||
|
||||
_f.entryOff = f.EntryOff
|
||||
_f.nameOff = nameOffset[i]
|
||||
_f.args = f.ArgsSize
|
||||
_f.deferreturn = f.DeferReturn
|
||||
// NOTICE: _func.pcdata is always as [PCDATA_UnsafePoint(0) : PCDATA_ArgLiveIndex(3)]
|
||||
_f.npcdata = uint32(_N_PCDATA)
|
||||
_f.cuOffset = cuOffset
|
||||
_f.funcID = f.ID
|
||||
_f.flag = f.Flag
|
||||
_f.nfuncdata = uint8(_N_FUNCDATA)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func registerFunction(name string, pc uintptr, textSize uintptr, fp int, args int, size uintptr, argptrs uintptr, localptrs uintptr) {}
|
|
@ -1,5 +1,5 @@
|
|||
//go:build go1.16 && !go1.21
|
||||
// +build go1.16,!go1.21
|
||||
//go:build go1.16 && !go1.22
|
||||
// +build go1.16,!go1.22
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
|
@ -87,18 +87,27 @@ func (self Loader) LoadOne(text []byte, funcName string, frameSize int, argSize
|
|||
// and returns runnable function pointer
|
||||
// WARN: this API is experimental, use it carefully
|
||||
func Load(text []byte, funcs []Func, modulename string, filenames []string) (out []Function) {
|
||||
ids := make([]string, len(funcs))
|
||||
for i, f := range funcs {
|
||||
ids[i] = f.Name
|
||||
}
|
||||
// generate module data and allocate memory address
|
||||
mod := makeModuledata(modulename, filenames, funcs, text)
|
||||
mod := makeModuledata(modulename, filenames, &funcs, text)
|
||||
|
||||
// verify and register the new module
|
||||
moduledataverify1(mod)
|
||||
registerModule(mod)
|
||||
|
||||
//
|
||||
// encapsulate function address
|
||||
out = make([]Function, len(funcs))
|
||||
for i, f := range funcs {
|
||||
for i, s := range ids {
|
||||
for _, f := range funcs {
|
||||
if f.Name == s {
|
||||
m := uintptr(mod.text + uintptr(f.EntryOff))
|
||||
out[i] = Function(&m)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue