compiler.go 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885
  1. /*
  2. * Copyright 2021 ByteDance Inc.
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. package encoder
  17. import (
  18. `fmt`
  19. `reflect`
  20. `strconv`
  21. `strings`
  22. `unsafe`
  23. `github.com/bytedance/sonic/internal/resolver`
  24. `github.com/bytedance/sonic/internal/rt`
  25. `github.com/bytedance/sonic/option`
  26. )
  27. type _Op uint8
  28. const (
  29. _OP_null _Op = iota + 1
  30. _OP_empty_arr
  31. _OP_empty_obj
  32. _OP_bool
  33. _OP_i8
  34. _OP_i16
  35. _OP_i32
  36. _OP_i64
  37. _OP_u8
  38. _OP_u16
  39. _OP_u32
  40. _OP_u64
  41. _OP_f32
  42. _OP_f64
  43. _OP_str
  44. _OP_bin
  45. _OP_quote
  46. _OP_number
  47. _OP_eface
  48. _OP_iface
  49. _OP_byte
  50. _OP_text
  51. _OP_deref
  52. _OP_index
  53. _OP_load
  54. _OP_save
  55. _OP_drop
  56. _OP_drop_2
  57. _OP_recurse
  58. _OP_is_nil
  59. _OP_is_nil_p1
  60. _OP_is_zero_1
  61. _OP_is_zero_2
  62. _OP_is_zero_4
  63. _OP_is_zero_8
  64. _OP_is_zero_map
  65. _OP_goto
  66. _OP_map_iter
  67. _OP_map_stop
  68. _OP_map_check_key
  69. _OP_map_write_key
  70. _OP_map_value_next
  71. _OP_slice_len
  72. _OP_slice_next
  73. _OP_marshal
  74. _OP_marshal_p
  75. _OP_marshal_text
  76. _OP_marshal_text_p
  77. _OP_cond_set
  78. _OP_cond_testc
  79. )
  80. const (
  81. _INT_SIZE = 32 << (^uint(0) >> 63)
  82. _PTR_SIZE = 32 << (^uintptr(0) >> 63)
  83. _PTR_BYTE = unsafe.Sizeof(uintptr(0))
  84. )
  85. const (
  86. _MAX_ILBUF = 100000 // cutoff at 100k of IL instructions
  87. _MAX_FIELDS = 50 // cutoff at 50 fields struct
  88. )
  89. var _OpNames = [256]string {
  90. _OP_null : "null",
  91. _OP_empty_arr : "empty_arr",
  92. _OP_empty_obj : "empty_obj",
  93. _OP_bool : "bool",
  94. _OP_i8 : "i8",
  95. _OP_i16 : "i16",
  96. _OP_i32 : "i32",
  97. _OP_i64 : "i64",
  98. _OP_u8 : "u8",
  99. _OP_u16 : "u16",
  100. _OP_u32 : "u32",
  101. _OP_u64 : "u64",
  102. _OP_f32 : "f32",
  103. _OP_f64 : "f64",
  104. _OP_str : "str",
  105. _OP_bin : "bin",
  106. _OP_quote : "quote",
  107. _OP_number : "number",
  108. _OP_eface : "eface",
  109. _OP_iface : "iface",
  110. _OP_byte : "byte",
  111. _OP_text : "text",
  112. _OP_deref : "deref",
  113. _OP_index : "index",
  114. _OP_load : "load",
  115. _OP_save : "save",
  116. _OP_drop : "drop",
  117. _OP_drop_2 : "drop_2",
  118. _OP_recurse : "recurse",
  119. _OP_is_nil : "is_nil",
  120. _OP_is_nil_p1 : "is_nil_p1",
  121. _OP_is_zero_1 : "is_zero_1",
  122. _OP_is_zero_2 : "is_zero_2",
  123. _OP_is_zero_4 : "is_zero_4",
  124. _OP_is_zero_8 : "is_zero_8",
  125. _OP_is_zero_map : "is_zero_map",
  126. _OP_goto : "goto",
  127. _OP_map_iter : "map_iter",
  128. _OP_map_stop : "map_stop",
  129. _OP_map_check_key : "map_check_key",
  130. _OP_map_write_key : "map_write_key",
  131. _OP_map_value_next : "map_value_next",
  132. _OP_slice_len : "slice_len",
  133. _OP_slice_next : "slice_next",
  134. _OP_marshal : "marshal",
  135. _OP_marshal_p : "marshal_p",
  136. _OP_marshal_text : "marshal_text",
  137. _OP_marshal_text_p : "marshal_text_p",
  138. _OP_cond_set : "cond_set",
  139. _OP_cond_testc : "cond_testc",
  140. }
  141. func (self _Op) String() string {
  142. if ret := _OpNames[self]; ret != "" {
  143. return ret
  144. } else {
  145. return "<invalid>"
  146. }
  147. }
  148. func _OP_int() _Op {
  149. switch _INT_SIZE {
  150. case 32: return _OP_i32
  151. case 64: return _OP_i64
  152. default: panic("unsupported int size")
  153. }
  154. }
  155. func _OP_uint() _Op {
  156. switch _INT_SIZE {
  157. case 32: return _OP_u32
  158. case 64: return _OP_u64
  159. default: panic("unsupported uint size")
  160. }
  161. }
  162. func _OP_uintptr() _Op {
  163. switch _PTR_SIZE {
  164. case 32: return _OP_u32
  165. case 64: return _OP_u64
  166. default: panic("unsupported pointer size")
  167. }
  168. }
  169. func _OP_is_zero_ints() _Op {
  170. switch _INT_SIZE {
  171. case 32: return _OP_is_zero_4
  172. case 64: return _OP_is_zero_8
  173. default: panic("unsupported integer size")
  174. }
  175. }
  176. type _Instr struct {
  177. u uint64 // union {op: 8, _: 8, vi: 48}, vi maybe int or len(str)
  178. p unsafe.Pointer // maybe GoString.Ptr, or *GoType
  179. }
  180. func packOp(op _Op) uint64 {
  181. return uint64(op) << 56
  182. }
  183. func newInsOp(op _Op) _Instr {
  184. return _Instr{u: packOp(op)}
  185. }
  186. func newInsVi(op _Op, vi int) _Instr {
  187. return _Instr{u: packOp(op) | rt.PackInt(vi)}
  188. }
  189. func newInsVs(op _Op, vs string) _Instr {
  190. return _Instr {
  191. u: packOp(op) | rt.PackInt(len(vs)),
  192. p: (*rt.GoString)(unsafe.Pointer(&vs)).Ptr,
  193. }
  194. }
  195. func newInsVt(op _Op, vt reflect.Type) _Instr {
  196. return _Instr {
  197. u: packOp(op),
  198. p: unsafe.Pointer(rt.UnpackType(vt)),
  199. }
  200. }
  201. func newInsVp(op _Op, vt reflect.Type, pv bool) _Instr {
  202. i := 0
  203. if pv {
  204. i = 1
  205. }
  206. return _Instr {
  207. u: packOp(op) | rt.PackInt(i),
  208. p: unsafe.Pointer(rt.UnpackType(vt)),
  209. }
  210. }
  211. func (self _Instr) op() _Op {
  212. return _Op(self.u >> 56)
  213. }
  214. func (self _Instr) vi() int {
  215. return rt.UnpackInt(self.u)
  216. }
  217. func (self _Instr) vf() uint8 {
  218. return (*rt.GoType)(self.p).KindFlags
  219. }
  220. func (self _Instr) vs() (v string) {
  221. (*rt.GoString)(unsafe.Pointer(&v)).Ptr = self.p
  222. (*rt.GoString)(unsafe.Pointer(&v)).Len = self.vi()
  223. return
  224. }
  225. func (self _Instr) vk() reflect.Kind {
  226. return (*rt.GoType)(self.p).Kind()
  227. }
  228. func (self _Instr) vt() reflect.Type {
  229. return (*rt.GoType)(self.p).Pack()
  230. }
  231. func (self _Instr) vp() (vt reflect.Type, pv bool) {
  232. return (*rt.GoType)(self.p).Pack(), rt.UnpackInt(self.u) == 1
  233. }
  234. func (self _Instr) i64() int64 {
  235. return int64(self.vi())
  236. }
  237. func (self _Instr) vlen() int {
  238. return int((*rt.GoType)(self.p).Size)
  239. }
  240. func (self _Instr) isBranch() bool {
  241. switch self.op() {
  242. case _OP_goto : fallthrough
  243. case _OP_is_nil : fallthrough
  244. case _OP_is_nil_p1 : fallthrough
  245. case _OP_is_zero_1 : fallthrough
  246. case _OP_is_zero_2 : fallthrough
  247. case _OP_is_zero_4 : fallthrough
  248. case _OP_is_zero_8 : fallthrough
  249. case _OP_map_check_key : fallthrough
  250. case _OP_map_write_key : fallthrough
  251. case _OP_slice_next : fallthrough
  252. case _OP_cond_testc : return true
  253. default : return false
  254. }
  255. }
  256. func (self _Instr) disassemble() string {
  257. switch self.op() {
  258. case _OP_byte : return fmt.Sprintf("%-18s%s", self.op().String(), strconv.QuoteRune(rune(self.vi())))
  259. case _OP_text : return fmt.Sprintf("%-18s%s", self.op().String(), strconv.Quote(self.vs()))
  260. case _OP_index : return fmt.Sprintf("%-18s%d", self.op().String(), self.vi())
  261. case _OP_recurse : fallthrough
  262. case _OP_map_iter : fallthrough
  263. case _OP_marshal : fallthrough
  264. case _OP_marshal_p : fallthrough
  265. case _OP_marshal_text : fallthrough
  266. case _OP_marshal_text_p : return fmt.Sprintf("%-18s%s", self.op().String(), self.vt())
  267. case _OP_goto : fallthrough
  268. case _OP_is_nil : fallthrough
  269. case _OP_is_nil_p1 : fallthrough
  270. case _OP_is_zero_1 : fallthrough
  271. case _OP_is_zero_2 : fallthrough
  272. case _OP_is_zero_4 : fallthrough
  273. case _OP_is_zero_8 : fallthrough
  274. case _OP_is_zero_map : fallthrough
  275. case _OP_cond_testc : fallthrough
  276. case _OP_map_check_key : fallthrough
  277. case _OP_map_write_key : return fmt.Sprintf("%-18sL_%d", self.op().String(), self.vi())
  278. case _OP_slice_next : return fmt.Sprintf("%-18sL_%d, %s", self.op().String(), self.vi(), self.vt())
  279. default : return self.op().String()
  280. }
  281. }
  282. type (
  283. _Program []_Instr
  284. )
  285. func (self _Program) pc() int {
  286. return len(self)
  287. }
  288. func (self _Program) tag(n int) {
  289. if n >= _MaxStack {
  290. panic("type nesting too deep")
  291. }
  292. }
  293. func (self _Program) pin(i int) {
  294. v := &self[i]
  295. v.u &= 0xffff000000000000
  296. v.u |= rt.PackInt(self.pc())
  297. }
  298. func (self _Program) rel(v []int) {
  299. for _, i := range v {
  300. self.pin(i)
  301. }
  302. }
  303. func (self *_Program) add(op _Op) {
  304. *self = append(*self, newInsOp(op))
  305. }
  306. func (self *_Program) key(op _Op) {
  307. *self = append(*self,
  308. newInsVi(_OP_byte, '"'),
  309. newInsOp(op),
  310. newInsVi(_OP_byte, '"'),
  311. )
  312. }
  313. func (self *_Program) int(op _Op, vi int) {
  314. *self = append(*self, newInsVi(op, vi))
  315. }
  316. func (self *_Program) str(op _Op, vs string) {
  317. *self = append(*self, newInsVs(op, vs))
  318. }
  319. func (self *_Program) rtt(op _Op, vt reflect.Type) {
  320. *self = append(*self, newInsVt(op, vt))
  321. }
  322. func (self *_Program) vp(op _Op, vt reflect.Type, pv bool) {
  323. *self = append(*self, newInsVp(op, vt, pv))
  324. }
  325. func (self _Program) disassemble() string {
  326. nb := len(self)
  327. tab := make([]bool, nb + 1)
  328. ret := make([]string, 0, nb + 1)
  329. /* prescan to get all the labels */
  330. for _, ins := range self {
  331. if ins.isBranch() {
  332. tab[ins.vi()] = true
  333. }
  334. }
  335. /* disassemble each instruction */
  336. for i, ins := range self {
  337. if !tab[i] {
  338. ret = append(ret, "\t" + ins.disassemble())
  339. } else {
  340. ret = append(ret, fmt.Sprintf("L_%d:\n\t%s", i, ins.disassemble()))
  341. }
  342. }
  343. /* add the last label, if needed */
  344. if tab[nb] {
  345. ret = append(ret, fmt.Sprintf("L_%d:", nb))
  346. }
  347. /* add an "end" indicator, and join all the strings */
  348. return strings.Join(append(ret, "\tend"), "\n")
  349. }
  350. type _Compiler struct {
  351. opts option.CompileOptions
  352. pv bool
  353. tab map[reflect.Type]bool
  354. rec map[reflect.Type]uint8
  355. }
  356. func newCompiler() *_Compiler {
  357. return &_Compiler {
  358. opts: option.DefaultCompileOptions(),
  359. tab: map[reflect.Type]bool{},
  360. rec: map[reflect.Type]uint8{},
  361. }
  362. }
  363. func (self *_Compiler) apply(opts option.CompileOptions) *_Compiler {
  364. self.opts = opts
  365. if self.opts.RecursiveDepth > 0 {
  366. self.rec = map[reflect.Type]uint8{}
  367. }
  368. return self
  369. }
  370. func (self *_Compiler) rescue(ep *error) {
  371. if val := recover(); val != nil {
  372. if err, ok := val.(error); ok {
  373. *ep = err
  374. } else {
  375. panic(val)
  376. }
  377. }
  378. }
  379. func (self *_Compiler) compile(vt reflect.Type, pv bool) (ret _Program, err error) {
  380. defer self.rescue(&err)
  381. self.compileOne(&ret, 0, vt, pv)
  382. return
  383. }
  384. func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type, pv bool) {
  385. if self.tab[vt] {
  386. p.vp(_OP_recurse, vt, pv)
  387. } else {
  388. self.compileRec(p, sp, vt, pv)
  389. }
  390. }
  391. func (self *_Compiler) compileRec(p *_Program, sp int, vt reflect.Type, pv bool) {
  392. pr := self.pv
  393. pt := reflect.PtrTo(vt)
  394. /* check for addressable `json.Marshaler` with pointer receiver */
  395. if pv && pt.Implements(jsonMarshalerType) {
  396. p.rtt(_OP_marshal_p, pt)
  397. return
  398. }
  399. /* check for `json.Marshaler` */
  400. if vt.Implements(jsonMarshalerType) {
  401. self.compileMarshaler(p, _OP_marshal, vt, jsonMarshalerType)
  402. return
  403. }
  404. /* check for addressable `encoding.TextMarshaler` with pointer receiver */
  405. if pv && pt.Implements(encodingTextMarshalerType) {
  406. p.rtt(_OP_marshal_text_p, pt)
  407. return
  408. }
  409. /* check for `encoding.TextMarshaler` */
  410. if vt.Implements(encodingTextMarshalerType) {
  411. self.compileMarshaler(p, _OP_marshal_text, vt, encodingTextMarshalerType)
  412. return
  413. }
  414. /* enter the recursion, and compile the type */
  415. self.pv = pv
  416. self.tab[vt] = true
  417. self.compileOps(p, sp, vt)
  418. /* exit the recursion */
  419. self.pv = pr
  420. delete(self.tab, vt)
  421. }
  422. func (self *_Compiler) compileOps(p *_Program, sp int, vt reflect.Type) {
  423. switch vt.Kind() {
  424. case reflect.Bool : p.add(_OP_bool)
  425. case reflect.Int : p.add(_OP_int())
  426. case reflect.Int8 : p.add(_OP_i8)
  427. case reflect.Int16 : p.add(_OP_i16)
  428. case reflect.Int32 : p.add(_OP_i32)
  429. case reflect.Int64 : p.add(_OP_i64)
  430. case reflect.Uint : p.add(_OP_uint())
  431. case reflect.Uint8 : p.add(_OP_u8)
  432. case reflect.Uint16 : p.add(_OP_u16)
  433. case reflect.Uint32 : p.add(_OP_u32)
  434. case reflect.Uint64 : p.add(_OP_u64)
  435. case reflect.Uintptr : p.add(_OP_uintptr())
  436. case reflect.Float32 : p.add(_OP_f32)
  437. case reflect.Float64 : p.add(_OP_f64)
  438. case reflect.String : self.compileString (p, vt)
  439. case reflect.Array : self.compileArray (p, sp, vt.Elem(), vt.Len())
  440. case reflect.Interface : self.compileInterface (p, vt)
  441. case reflect.Map : self.compileMap (p, sp, vt)
  442. case reflect.Ptr : self.compilePtr (p, sp, vt.Elem())
  443. case reflect.Slice : self.compileSlice (p, sp, vt.Elem())
  444. case reflect.Struct : self.compileStruct (p, sp, vt)
  445. default : panic (error_type(vt))
  446. }
  447. }
  448. func (self *_Compiler) compileNil(p *_Program, sp int, vt reflect.Type, nil_op _Op, fn func(*_Program, int, reflect.Type)) {
  449. x := p.pc()
  450. p.add(_OP_is_nil)
  451. fn(p, sp, vt)
  452. e := p.pc()
  453. p.add(_OP_goto)
  454. p.pin(x)
  455. p.add(nil_op)
  456. p.pin(e)
  457. }
  458. func (self *_Compiler) compilePtr(p *_Program, sp int, vt reflect.Type) {
  459. self.compileNil(p, sp, vt, _OP_null, self.compilePtrBody)
  460. }
  461. func (self *_Compiler) compilePtrBody(p *_Program, sp int, vt reflect.Type) {
  462. p.tag(sp)
  463. p.add(_OP_save)
  464. p.add(_OP_deref)
  465. self.compileOne(p, sp + 1, vt, true)
  466. p.add(_OP_drop)
  467. }
  468. func (self *_Compiler) compileMap(p *_Program, sp int, vt reflect.Type) {
  469. self.compileNil(p, sp, vt, _OP_empty_obj, self.compileMapBody)
  470. }
  471. func (self *_Compiler) compileMapBody(p *_Program, sp int, vt reflect.Type) {
  472. p.tag(sp + 1)
  473. p.int(_OP_byte, '{')
  474. p.add(_OP_save)
  475. p.rtt(_OP_map_iter, vt)
  476. p.add(_OP_save)
  477. i := p.pc()
  478. p.add(_OP_map_check_key)
  479. u := p.pc()
  480. p.add(_OP_map_write_key)
  481. self.compileMapBodyKey(p, vt.Key())
  482. p.pin(u)
  483. p.int(_OP_byte, ':')
  484. p.add(_OP_map_value_next)
  485. self.compileOne(p, sp + 2, vt.Elem(), false)
  486. j := p.pc()
  487. p.add(_OP_map_check_key)
  488. p.int(_OP_byte, ',')
  489. v := p.pc()
  490. p.add(_OP_map_write_key)
  491. self.compileMapBodyKey(p, vt.Key())
  492. p.pin(v)
  493. p.int(_OP_byte, ':')
  494. p.add(_OP_map_value_next)
  495. self.compileOne(p, sp + 2, vt.Elem(), false)
  496. p.int(_OP_goto, j)
  497. p.pin(i)
  498. p.pin(j)
  499. p.add(_OP_map_stop)
  500. p.add(_OP_drop_2)
  501. p.int(_OP_byte, '}')
  502. }
  503. func (self *_Compiler) compileMapBodyKey(p *_Program, vk reflect.Type) {
  504. if !vk.Implements(encodingTextMarshalerType) {
  505. self.compileMapBodyTextKey(p, vk)
  506. } else {
  507. self.compileMapBodyUtextKey(p, vk)
  508. }
  509. }
  510. func (self *_Compiler) compileMapBodyTextKey(p *_Program, vk reflect.Type) {
  511. switch vk.Kind() {
  512. case reflect.Invalid : panic("map key is nil")
  513. case reflect.Bool : p.key(_OP_bool)
  514. case reflect.Int : p.key(_OP_int())
  515. case reflect.Int8 : p.key(_OP_i8)
  516. case reflect.Int16 : p.key(_OP_i16)
  517. case reflect.Int32 : p.key(_OP_i32)
  518. case reflect.Int64 : p.key(_OP_i64)
  519. case reflect.Uint : p.key(_OP_uint())
  520. case reflect.Uint8 : p.key(_OP_u8)
  521. case reflect.Uint16 : p.key(_OP_u16)
  522. case reflect.Uint32 : p.key(_OP_u32)
  523. case reflect.Uint64 : p.key(_OP_u64)
  524. case reflect.Uintptr : p.key(_OP_uintptr())
  525. case reflect.Float32 : p.key(_OP_f32)
  526. case reflect.Float64 : p.key(_OP_f64)
  527. case reflect.String : self.compileString(p, vk)
  528. default : panic(error_type(vk))
  529. }
  530. }
  531. func (self *_Compiler) compileMapBodyUtextKey(p *_Program, vk reflect.Type) {
  532. if vk.Kind() != reflect.Ptr {
  533. p.rtt(_OP_marshal_text, vk)
  534. } else {
  535. self.compileMapBodyUtextPtr(p, vk)
  536. }
  537. }
  538. func (self *_Compiler) compileMapBodyUtextPtr(p *_Program, vk reflect.Type) {
  539. i := p.pc()
  540. p.add(_OP_is_nil)
  541. p.rtt(_OP_marshal_text, vk)
  542. j := p.pc()
  543. p.add(_OP_goto)
  544. p.pin(i)
  545. p.str(_OP_text, "\"\"")
  546. p.pin(j)
  547. }
  548. func (self *_Compiler) compileSlice(p *_Program, sp int, vt reflect.Type) {
  549. self.compileNil(p, sp, vt, _OP_empty_arr, self.compileSliceBody)
  550. }
  551. func (self *_Compiler) compileSliceBody(p *_Program, sp int, vt reflect.Type) {
  552. if isSimpleByte(vt) {
  553. p.add(_OP_bin)
  554. } else {
  555. self.compileSliceArray(p, sp, vt)
  556. }
  557. }
  558. func (self *_Compiler) compileSliceArray(p *_Program, sp int, vt reflect.Type) {
  559. p.tag(sp)
  560. p.int(_OP_byte, '[')
  561. p.add(_OP_save)
  562. p.add(_OP_slice_len)
  563. i := p.pc()
  564. p.rtt(_OP_slice_next, vt)
  565. self.compileOne(p, sp + 1, vt, true)
  566. j := p.pc()
  567. p.rtt(_OP_slice_next, vt)
  568. p.int(_OP_byte, ',')
  569. self.compileOne(p, sp + 1, vt, true)
  570. p.int(_OP_goto, j)
  571. p.pin(i)
  572. p.pin(j)
  573. p.add(_OP_drop)
  574. p.int(_OP_byte, ']')
  575. }
  576. func (self *_Compiler) compileArray(p *_Program, sp int, vt reflect.Type, nb int) {
  577. p.tag(sp)
  578. p.int(_OP_byte, '[')
  579. p.add(_OP_save)
  580. /* first item */
  581. if nb != 0 {
  582. self.compileOne(p, sp + 1, vt, self.pv)
  583. p.add(_OP_load)
  584. }
  585. /* remaining items */
  586. for i := 1; i < nb; i++ {
  587. p.int(_OP_byte, ',')
  588. p.int(_OP_index, i * int(vt.Size()))
  589. self.compileOne(p, sp + 1, vt, self.pv)
  590. p.add(_OP_load)
  591. }
  592. /* end of array */
  593. p.add(_OP_drop)
  594. p.int(_OP_byte, ']')
  595. }
  596. func (self *_Compiler) compileString(p *_Program, vt reflect.Type) {
  597. if vt != jsonNumberType {
  598. p.add(_OP_str)
  599. } else {
  600. p.add(_OP_number)
  601. }
  602. }
  603. func (self *_Compiler) compileStruct(p *_Program, sp int, vt reflect.Type) {
  604. if sp >= self.opts.MaxInlineDepth || p.pc() >= _MAX_ILBUF || (sp > 0 && vt.NumField() >= _MAX_FIELDS) {
  605. p.vp(_OP_recurse, vt, self.pv)
  606. if self.opts.RecursiveDepth > 0 {
  607. if self.pv {
  608. self.rec[vt] = 1
  609. } else {
  610. self.rec[vt] = 0
  611. }
  612. }
  613. } else {
  614. self.compileStructBody(p, sp, vt)
  615. }
  616. }
  617. func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) {
  618. p.tag(sp)
  619. p.int(_OP_byte, '{')
  620. p.add(_OP_save)
  621. p.add(_OP_cond_set)
  622. /* compile each field */
  623. for _, fv := range resolver.ResolveStruct(vt) {
  624. var s []int
  625. var o resolver.Offset
  626. /* "omitempty" for arrays */
  627. if fv.Type.Kind() == reflect.Array {
  628. if fv.Type.Len() == 0 && (fv.Opts & resolver.F_omitempty) != 0 {
  629. continue
  630. }
  631. }
  632. /* index to the field */
  633. for _, o = range fv.Path {
  634. if p.int(_OP_index, int(o.Size)); o.Kind == resolver.F_deref {
  635. s = append(s, p.pc())
  636. p.add(_OP_is_nil)
  637. p.add(_OP_deref)
  638. }
  639. }
  640. /* check for "omitempty" option */
  641. if fv.Type.Kind() != reflect.Struct && fv.Type.Kind() != reflect.Array && (fv.Opts & resolver.F_omitempty) != 0 {
  642. s = append(s, p.pc())
  643. self.compileStructFieldZero(p, fv.Type)
  644. }
  645. /* add the comma if not the first element */
  646. i := p.pc()
  647. p.add(_OP_cond_testc)
  648. p.int(_OP_byte, ',')
  649. p.pin(i)
  650. /* compile the key and value */
  651. ft := fv.Type
  652. p.str(_OP_text, Quote(fv.Name) + ":")
  653. /* check for "stringnize" option */
  654. if (fv.Opts & resolver.F_stringize) == 0 {
  655. self.compileOne(p, sp + 1, ft, self.pv)
  656. } else {
  657. self.compileStructFieldStr(p, sp + 1, ft)
  658. }
  659. /* patch the skipping jumps and reload the struct pointer */
  660. p.rel(s)
  661. p.add(_OP_load)
  662. }
  663. /* end of object */
  664. p.add(_OP_drop)
  665. p.int(_OP_byte, '}')
  666. }
  667. func (self *_Compiler) compileStructFieldStr(p *_Program, sp int, vt reflect.Type) {
  668. pc := -1
  669. ft := vt
  670. sv := false
  671. /* dereference the pointer if needed */
  672. if ft.Kind() == reflect.Ptr {
  673. ft = ft.Elem()
  674. }
  675. /* check if it can be stringized */
  676. switch ft.Kind() {
  677. case reflect.Bool : sv = true
  678. case reflect.Int : sv = true
  679. case reflect.Int8 : sv = true
  680. case reflect.Int16 : sv = true
  681. case reflect.Int32 : sv = true
  682. case reflect.Int64 : sv = true
  683. case reflect.Uint : sv = true
  684. case reflect.Uint8 : sv = true
  685. case reflect.Uint16 : sv = true
  686. case reflect.Uint32 : sv = true
  687. case reflect.Uint64 : sv = true
  688. case reflect.Uintptr : sv = true
  689. case reflect.Float32 : sv = true
  690. case reflect.Float64 : sv = true
  691. case reflect.String : sv = true
  692. }
  693. /* if it's not, ignore the "string" and follow the regular path */
  694. if !sv {
  695. self.compileOne(p, sp, vt, self.pv)
  696. return
  697. }
  698. /* dereference the pointer */
  699. if vt.Kind() == reflect.Ptr {
  700. pc = p.pc()
  701. vt = vt.Elem()
  702. p.add(_OP_is_nil)
  703. p.add(_OP_deref)
  704. }
  705. /* special case of a double-quoted string */
  706. if ft != jsonNumberType && ft.Kind() == reflect.String {
  707. p.add(_OP_quote)
  708. } else {
  709. self.compileStructFieldQuoted(p, sp, vt)
  710. }
  711. /* the "null" case of the pointer */
  712. if pc != -1 {
  713. e := p.pc()
  714. p.add(_OP_goto)
  715. p.pin(pc)
  716. p.add(_OP_null)
  717. p.pin(e)
  718. }
  719. }
  720. func (self *_Compiler) compileStructFieldZero(p *_Program, vt reflect.Type) {
  721. switch vt.Kind() {
  722. case reflect.Bool : p.add(_OP_is_zero_1)
  723. case reflect.Int : p.add(_OP_is_zero_ints())
  724. case reflect.Int8 : p.add(_OP_is_zero_1)
  725. case reflect.Int16 : p.add(_OP_is_zero_2)
  726. case reflect.Int32 : p.add(_OP_is_zero_4)
  727. case reflect.Int64 : p.add(_OP_is_zero_8)
  728. case reflect.Uint : p.add(_OP_is_zero_ints())
  729. case reflect.Uint8 : p.add(_OP_is_zero_1)
  730. case reflect.Uint16 : p.add(_OP_is_zero_2)
  731. case reflect.Uint32 : p.add(_OP_is_zero_4)
  732. case reflect.Uint64 : p.add(_OP_is_zero_8)
  733. case reflect.Uintptr : p.add(_OP_is_nil)
  734. case reflect.Float32 : p.add(_OP_is_zero_4)
  735. case reflect.Float64 : p.add(_OP_is_zero_8)
  736. case reflect.String : p.add(_OP_is_nil_p1)
  737. case reflect.Interface : p.add(_OP_is_nil_p1)
  738. case reflect.Map : p.add(_OP_is_zero_map)
  739. case reflect.Ptr : p.add(_OP_is_nil)
  740. case reflect.Slice : p.add(_OP_is_nil_p1)
  741. default : panic(error_type(vt))
  742. }
  743. }
  744. func (self *_Compiler) compileStructFieldQuoted(p *_Program, sp int, vt reflect.Type) {
  745. p.int(_OP_byte, '"')
  746. self.compileOne(p, sp, vt, self.pv)
  747. p.int(_OP_byte, '"')
  748. }
  749. func (self *_Compiler) compileInterface(p *_Program, vt reflect.Type) {
  750. x := p.pc()
  751. p.add(_OP_is_nil_p1)
  752. /* iface and efaces are different */
  753. if vt.NumMethod() == 0 {
  754. p.add(_OP_eface)
  755. } else {
  756. p.add(_OP_iface)
  757. }
  758. /* the "null" value */
  759. e := p.pc()
  760. p.add(_OP_goto)
  761. p.pin(x)
  762. p.add(_OP_null)
  763. p.pin(e)
  764. }
  765. func (self *_Compiler) compileMarshaler(p *_Program, op _Op, vt reflect.Type, mt reflect.Type) {
  766. pc := p.pc()
  767. vk := vt.Kind()
  768. /* direct receiver */
  769. if vk != reflect.Ptr {
  770. p.rtt(op, vt)
  771. return
  772. }
  773. /* value receiver with a pointer type, check for nil before calling the marshaler */
  774. p.add(_OP_is_nil)
  775. p.rtt(op, vt)
  776. i := p.pc()
  777. p.add(_OP_goto)
  778. p.pin(pc)
  779. p.add(_OP_null)
  780. p.pin(i)
  781. }