// Copyright 2024 CloudWeGo Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package vm import ( "encoding" "encoding/json" "fmt" "math" "reflect" "unsafe" "github.com/bytedance/sonic/internal/encoder/alg" "github.com/bytedance/sonic/internal/encoder/ir" "github.com/bytedance/sonic/internal/encoder/vars" "github.com/bytedance/sonic/internal/rt" "github.com/bytedance/sonic/internal/base64" ) const ( _S_cond = iota _S_init ) var ( _T_json_Marshaler = rt.UnpackType(vars.JsonMarshalerType) _T_encoding_TextMarshaler = rt.UnpackType(vars.EncodingTextMarshalerType) ) func print_instr(buf []byte, pc int, op ir.Op, ins *ir.Instr, p unsafe.Pointer) { if len(buf) > 20 { fmt.Println(string(buf[len(buf)-20:])) } else { fmt.Println(string(buf)) } fmt.Printf("pc %04d, op %v, ins %#v, ptr: %x\n", pc, op, ins.Disassemble(), p) } func Execute(b *[]byte, p unsafe.Pointer, s *vars.Stack, flags uint64, prog *ir.Program) (error) { pl := len(*prog) if pl <= 0 { return nil } var buf = *b var x int var q unsafe.Pointer var f uint64 var pro = &(*prog)[0] for pc := 0; pc < pl; { ins := (*ir.Instr)(rt.Add(unsafe.Pointer(pro), ir.OpSize*uintptr(pc))) pc++ op := ins.Op() switch op { case ir.OP_goto: pc = ins.Vi() continue case ir.OP_byte: v := ins.Byte() buf = append(buf, v) case ir.OP_text: v := ins.Vs() buf = append(buf, v...) case ir.OP_deref: p = *(*unsafe.Pointer)(p) case ir.OP_index: p = rt.Add(p, uintptr(ins.I64())) case ir.OP_load: // NOTICE: load CANNOT change f! x, _, p, q = s.Load() case ir.OP_save: if !s.Save(x, f, p, q) { return vars.ERR_too_deep } case ir.OP_drop: x, f, p, q = s.Drop() case ir.OP_drop_2: s.Drop() x, f, p, q = s.Drop() case ir.OP_recurse: vt, pv := ins.Vp2() f := flags if pv { f |= (1 << alg.BitPointerValue) } *b = buf if vt.Indirect() { if err := EncodeTypedPointer(b, vt, (*unsafe.Pointer)(rt.NoEscape(unsafe.Pointer(&p))), s, f); err != nil { return err } } else { vp := (*unsafe.Pointer)(p) if err := EncodeTypedPointer(b, vt, vp, s, f); err != nil { return err } } buf = *b case ir.OP_is_nil: if is_nil(p) { pc = ins.Vi() continue } case ir.OP_is_nil_p1: if (*rt.GoEface)(p).Value == nil { pc = ins.Vi() continue } case ir.OP_null: buf = append(buf, 'n', 'u', 'l', 'l') case ir.OP_str: v := *(*string)(p) buf = alg.Quote(buf, v, false) case ir.OP_bool: if *(*bool)(p) { buf = append(buf, 't', 'r', 'u', 'e') } else { buf = append(buf, 'f', 'a', 'l', 's', 'e') } case ir.OP_i8: v := *(*int8)(p) buf = alg.I64toa(buf, int64(v)) case ir.OP_i16: v := *(*int16)(p) buf = alg.I64toa(buf, int64(v)) case ir.OP_i32: v := *(*int32)(p) buf = alg.I64toa(buf, int64(v)) case ir.OP_i64: v := *(*int64)(p) buf = alg.I64toa(buf, int64(v)) case ir.OP_u8: v := *(*uint8)(p) buf = alg.U64toa(buf, uint64(v)) case ir.OP_u16: v := *(*uint16)(p) buf = alg.U64toa(buf, uint64(v)) case ir.OP_u32: v := *(*uint32)(p) buf = alg.U64toa(buf, uint64(v)) case ir.OP_u64: v := *(*uint64)(p) buf = alg.U64toa(buf, uint64(v)) case ir.OP_f32: v := *(*float32)(p) if math.IsNaN(float64(v)) || math.IsInf(float64(v), 0) { if flags&(1<