compiler.go 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155
  1. /*
  2. * Copyright 2021 ByteDance Inc.
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. package decoder
  17. import (
  18. `encoding/json`
  19. `fmt`
  20. `reflect`
  21. `sort`
  22. `strconv`
  23. `strings`
  24. `unsafe`
  25. `github.com/bytedance/sonic/internal/caching`
  26. `github.com/bytedance/sonic/internal/resolver`
  27. `github.com/bytedance/sonic/internal/rt`
  28. `github.com/bytedance/sonic/option`
  29. )
  30. type _Op uint8
  31. const (
  32. _OP_any _Op = iota + 1
  33. _OP_dyn
  34. _OP_str
  35. _OP_bin
  36. _OP_bool
  37. _OP_num
  38. _OP_i8
  39. _OP_i16
  40. _OP_i32
  41. _OP_i64
  42. _OP_u8
  43. _OP_u16
  44. _OP_u32
  45. _OP_u64
  46. _OP_f32
  47. _OP_f64
  48. _OP_unquote
  49. _OP_nil_1
  50. _OP_nil_2
  51. _OP_nil_3
  52. _OP_deref
  53. _OP_index
  54. _OP_is_null
  55. _OP_is_null_quote
  56. _OP_map_init
  57. _OP_map_key_i8
  58. _OP_map_key_i16
  59. _OP_map_key_i32
  60. _OP_map_key_i64
  61. _OP_map_key_u8
  62. _OP_map_key_u16
  63. _OP_map_key_u32
  64. _OP_map_key_u64
  65. _OP_map_key_f32
  66. _OP_map_key_f64
  67. _OP_map_key_str
  68. _OP_map_key_utext
  69. _OP_map_key_utext_p
  70. _OP_array_skip
  71. _OP_array_clear
  72. _OP_array_clear_p
  73. _OP_slice_init
  74. _OP_slice_append
  75. _OP_object_skip
  76. _OP_object_next
  77. _OP_struct_field
  78. _OP_unmarshal
  79. _OP_unmarshal_p
  80. _OP_unmarshal_text
  81. _OP_unmarshal_text_p
  82. _OP_lspace
  83. _OP_match_char
  84. _OP_check_char
  85. _OP_load
  86. _OP_save
  87. _OP_drop
  88. _OP_drop_2
  89. _OP_recurse
  90. _OP_goto
  91. _OP_switch
  92. _OP_check_char_0
  93. _OP_dismatch_err
  94. _OP_go_skip
  95. _OP_add
  96. _OP_check_empty
  97. _OP_debug
  98. )
  99. const (
  100. _INT_SIZE = 32 << (^uint(0) >> 63)
  101. _PTR_SIZE = 32 << (^uintptr(0) >> 63)
  102. _PTR_BYTE = unsafe.Sizeof(uintptr(0))
  103. )
  104. const (
  105. _MAX_ILBUF = 100000 // cutoff at 100k of IL instructions
  106. _MAX_FIELDS = 50 // cutoff at 50 fields struct
  107. )
  108. var _OpNames = [256]string {
  109. _OP_any : "any",
  110. _OP_dyn : "dyn",
  111. _OP_str : "str",
  112. _OP_bin : "bin",
  113. _OP_bool : "bool",
  114. _OP_num : "num",
  115. _OP_i8 : "i8",
  116. _OP_i16 : "i16",
  117. _OP_i32 : "i32",
  118. _OP_i64 : "i64",
  119. _OP_u8 : "u8",
  120. _OP_u16 : "u16",
  121. _OP_u32 : "u32",
  122. _OP_u64 : "u64",
  123. _OP_f32 : "f32",
  124. _OP_f64 : "f64",
  125. _OP_unquote : "unquote",
  126. _OP_nil_1 : "nil_1",
  127. _OP_nil_2 : "nil_2",
  128. _OP_nil_3 : "nil_3",
  129. _OP_deref : "deref",
  130. _OP_index : "index",
  131. _OP_is_null : "is_null",
  132. _OP_is_null_quote : "is_null_quote",
  133. _OP_map_init : "map_init",
  134. _OP_map_key_i8 : "map_key_i8",
  135. _OP_map_key_i16 : "map_key_i16",
  136. _OP_map_key_i32 : "map_key_i32",
  137. _OP_map_key_i64 : "map_key_i64",
  138. _OP_map_key_u8 : "map_key_u8",
  139. _OP_map_key_u16 : "map_key_u16",
  140. _OP_map_key_u32 : "map_key_u32",
  141. _OP_map_key_u64 : "map_key_u64",
  142. _OP_map_key_f32 : "map_key_f32",
  143. _OP_map_key_f64 : "map_key_f64",
  144. _OP_map_key_str : "map_key_str",
  145. _OP_map_key_utext : "map_key_utext",
  146. _OP_map_key_utext_p : "map_key_utext_p",
  147. _OP_array_skip : "array_skip",
  148. _OP_slice_init : "slice_init",
  149. _OP_slice_append : "slice_append",
  150. _OP_object_skip : "object_skip",
  151. _OP_object_next : "object_next",
  152. _OP_struct_field : "struct_field",
  153. _OP_unmarshal : "unmarshal",
  154. _OP_unmarshal_p : "unmarshal_p",
  155. _OP_unmarshal_text : "unmarshal_text",
  156. _OP_unmarshal_text_p : "unmarshal_text_p",
  157. _OP_lspace : "lspace",
  158. _OP_match_char : "match_char",
  159. _OP_check_char : "check_char",
  160. _OP_load : "load",
  161. _OP_save : "save",
  162. _OP_drop : "drop",
  163. _OP_drop_2 : "drop_2",
  164. _OP_recurse : "recurse",
  165. _OP_goto : "goto",
  166. _OP_switch : "switch",
  167. _OP_check_char_0 : "check_char_0",
  168. _OP_dismatch_err : "dismatch_err",
  169. _OP_add : "add",
  170. _OP_go_skip : "go_skip",
  171. _OP_check_empty : "check_empty",
  172. _OP_debug : "debug",
  173. }
  174. func (self _Op) String() string {
  175. if ret := _OpNames[self]; ret != "" {
  176. return ret
  177. } else {
  178. return "<invalid>"
  179. }
  180. }
  181. func _OP_int() _Op {
  182. switch _INT_SIZE {
  183. case 32: return _OP_i32
  184. case 64: return _OP_i64
  185. default: panic("unsupported int size")
  186. }
  187. }
  188. func _OP_uint() _Op {
  189. switch _INT_SIZE {
  190. case 32: return _OP_u32
  191. case 64: return _OP_u64
  192. default: panic("unsupported uint size")
  193. }
  194. }
  195. func _OP_uintptr() _Op {
  196. switch _PTR_SIZE {
  197. case 32: return _OP_u32
  198. case 64: return _OP_u64
  199. default: panic("unsupported pointer size")
  200. }
  201. }
  202. func _OP_map_key_int() _Op {
  203. switch _INT_SIZE {
  204. case 32: return _OP_map_key_i32
  205. case 64: return _OP_map_key_i64
  206. default: panic("unsupported int size")
  207. }
  208. }
  209. func _OP_map_key_uint() _Op {
  210. switch _INT_SIZE {
  211. case 32: return _OP_map_key_u32
  212. case 64: return _OP_map_key_u64
  213. default: panic("unsupported uint size")
  214. }
  215. }
  216. func _OP_map_key_uintptr() _Op {
  217. switch _PTR_SIZE {
  218. case 32: return _OP_map_key_u32
  219. case 64: return _OP_map_key_u64
  220. default: panic("unsupported pointer size")
  221. }
  222. }
  223. type _Instr struct {
  224. u uint64 // union {op: 8, vb: 8, vi: 48}, iv maybe int or len([]int)
  225. p unsafe.Pointer // maybe GoSlice.Data, *GoType or *caching.FieldMap
  226. }
  227. func packOp(op _Op) uint64 {
  228. return uint64(op) << 56
  229. }
  230. func newInsOp(op _Op) _Instr {
  231. return _Instr{u: packOp(op)}
  232. }
  233. func newInsVi(op _Op, vi int) _Instr {
  234. return _Instr{u: packOp(op) | rt.PackInt(vi)}
  235. }
  236. func newInsVb(op _Op, vb byte) _Instr {
  237. return _Instr{u: packOp(op) | (uint64(vb) << 48)}
  238. }
  239. func newInsVs(op _Op, vs []int) _Instr {
  240. return _Instr {
  241. u: packOp(op) | rt.PackInt(len(vs)),
  242. p: (*rt.GoSlice)(unsafe.Pointer(&vs)).Ptr,
  243. }
  244. }
  245. func newInsVt(op _Op, vt reflect.Type) _Instr {
  246. return _Instr {
  247. u: packOp(op),
  248. p: unsafe.Pointer(rt.UnpackType(vt)),
  249. }
  250. }
  251. func newInsVf(op _Op, vf *caching.FieldMap) _Instr {
  252. return _Instr {
  253. u: packOp(op),
  254. p: unsafe.Pointer(vf),
  255. }
  256. }
  257. func (self _Instr) op() _Op {
  258. return _Op(self.u >> 56)
  259. }
  260. func (self _Instr) vi() int {
  261. return rt.UnpackInt(self.u)
  262. }
  263. func (self _Instr) vb() byte {
  264. return byte(self.u >> 48)
  265. }
  266. func (self _Instr) vs() (v []int) {
  267. (*rt.GoSlice)(unsafe.Pointer(&v)).Ptr = self.p
  268. (*rt.GoSlice)(unsafe.Pointer(&v)).Cap = self.vi()
  269. (*rt.GoSlice)(unsafe.Pointer(&v)).Len = self.vi()
  270. return
  271. }
  272. func (self _Instr) vf() *caching.FieldMap {
  273. return (*caching.FieldMap)(self.p)
  274. }
  275. func (self _Instr) vk() reflect.Kind {
  276. return (*rt.GoType)(self.p).Kind()
  277. }
  278. func (self _Instr) vt() reflect.Type {
  279. return (*rt.GoType)(self.p).Pack()
  280. }
  281. func (self _Instr) i64() int64 {
  282. return int64(self.vi())
  283. }
  284. func (self _Instr) vlen() int {
  285. return int((*rt.GoType)(self.p).Size)
  286. }
  287. func (self _Instr) isBranch() bool {
  288. switch self.op() {
  289. case _OP_goto : fallthrough
  290. case _OP_switch : fallthrough
  291. case _OP_is_null : fallthrough
  292. case _OP_is_null_quote : fallthrough
  293. case _OP_check_char : return true
  294. default : return false
  295. }
  296. }
  297. func (self _Instr) disassemble() string {
  298. switch self.op() {
  299. case _OP_dyn : fallthrough
  300. case _OP_deref : fallthrough
  301. case _OP_map_key_i8 : fallthrough
  302. case _OP_map_key_i16 : fallthrough
  303. case _OP_map_key_i32 : fallthrough
  304. case _OP_map_key_i64 : fallthrough
  305. case _OP_map_key_u8 : fallthrough
  306. case _OP_map_key_u16 : fallthrough
  307. case _OP_map_key_u32 : fallthrough
  308. case _OP_map_key_u64 : fallthrough
  309. case _OP_map_key_f32 : fallthrough
  310. case _OP_map_key_f64 : fallthrough
  311. case _OP_map_key_str : fallthrough
  312. case _OP_map_key_utext : fallthrough
  313. case _OP_map_key_utext_p : fallthrough
  314. case _OP_slice_init : fallthrough
  315. case _OP_slice_append : fallthrough
  316. case _OP_unmarshal : fallthrough
  317. case _OP_unmarshal_p : fallthrough
  318. case _OP_unmarshal_text : fallthrough
  319. case _OP_unmarshal_text_p : fallthrough
  320. case _OP_recurse : return fmt.Sprintf("%-18s%s", self.op(), self.vt())
  321. case _OP_goto : fallthrough
  322. case _OP_is_null_quote : fallthrough
  323. case _OP_is_null : return fmt.Sprintf("%-18sL_%d", self.op(), self.vi())
  324. case _OP_index : fallthrough
  325. case _OP_array_clear : fallthrough
  326. case _OP_array_clear_p : return fmt.Sprintf("%-18s%d", self.op(), self.vi())
  327. case _OP_switch : return fmt.Sprintf("%-18s%s", self.op(), self.formatSwitchLabels())
  328. case _OP_struct_field : return fmt.Sprintf("%-18s%s", self.op(), self.formatStructFields())
  329. case _OP_match_char : return fmt.Sprintf("%-18s%s", self.op(), strconv.QuoteRune(rune(self.vb())))
  330. case _OP_check_char : return fmt.Sprintf("%-18sL_%d, %s", self.op(), self.vi(), strconv.QuoteRune(rune(self.vb())))
  331. default : return self.op().String()
  332. }
  333. }
  334. func (self _Instr) formatSwitchLabels() string {
  335. var i int
  336. var v int
  337. var m []string
  338. /* format each label */
  339. for i, v = range self.vs() {
  340. m = append(m, fmt.Sprintf("%d=L_%d", i, v))
  341. }
  342. /* join them with "," */
  343. return strings.Join(m, ", ")
  344. }
  345. func (self _Instr) formatStructFields() string {
  346. var i uint64
  347. var r []string
  348. var m []struct{i int; n string}
  349. /* extract all the fields */
  350. for i = 0; i < self.vf().N; i++ {
  351. if v := self.vf().At(i); v.Hash != 0 {
  352. m = append(m, struct{i int; n string}{i: v.ID, n: v.Name})
  353. }
  354. }
  355. /* sort by field name */
  356. sort.Slice(m, func(i, j int) bool {
  357. return m[i].n < m[j].n
  358. })
  359. /* format each field */
  360. for _, v := range m {
  361. r = append(r, fmt.Sprintf("%s=%d", v.n, v.i))
  362. }
  363. /* join them with "," */
  364. return strings.Join(r, ", ")
  365. }
  366. type (
  367. _Program []_Instr
  368. )
  369. func (self _Program) pc() int {
  370. return len(self)
  371. }
  372. func (self _Program) tag(n int) {
  373. if n >= _MaxStack {
  374. panic("type nesting too deep")
  375. }
  376. }
  377. func (self _Program) pin(i int) {
  378. v := &self[i]
  379. v.u &= 0xffff000000000000
  380. v.u |= rt.PackInt(self.pc())
  381. }
  382. func (self _Program) rel(v []int) {
  383. for _, i := range v {
  384. self.pin(i)
  385. }
  386. }
  387. func (self *_Program) add(op _Op) {
  388. *self = append(*self, newInsOp(op))
  389. }
  390. func (self *_Program) int(op _Op, vi int) {
  391. *self = append(*self, newInsVi(op, vi))
  392. }
  393. func (self *_Program) chr(op _Op, vb byte) {
  394. *self = append(*self, newInsVb(op, vb))
  395. }
  396. func (self *_Program) tab(op _Op, vs []int) {
  397. *self = append(*self, newInsVs(op, vs))
  398. }
  399. func (self *_Program) rtt(op _Op, vt reflect.Type) {
  400. *self = append(*self, newInsVt(op, vt))
  401. }
  402. func (self *_Program) fmv(op _Op, vf *caching.FieldMap) {
  403. *self = append(*self, newInsVf(op, vf))
  404. }
  405. func (self _Program) disassemble() string {
  406. nb := len(self)
  407. tab := make([]bool, nb + 1)
  408. ret := make([]string, 0, nb + 1)
  409. /* prescan to get all the labels */
  410. for _, ins := range self {
  411. if ins.isBranch() {
  412. if ins.op() != _OP_switch {
  413. tab[ins.vi()] = true
  414. } else {
  415. for _, v := range ins.vs() {
  416. tab[v] = true
  417. }
  418. }
  419. }
  420. }
  421. /* disassemble each instruction */
  422. for i, ins := range self {
  423. if !tab[i] {
  424. ret = append(ret, "\t" + ins.disassemble())
  425. } else {
  426. ret = append(ret, fmt.Sprintf("L_%d:\n\t%s", i, ins.disassemble()))
  427. }
  428. }
  429. /* add the last label, if needed */
  430. if tab[nb] {
  431. ret = append(ret, fmt.Sprintf("L_%d:", nb))
  432. }
  433. /* add an "end" indicator, and join all the strings */
  434. return strings.Join(append(ret, "\tend"), "\n")
  435. }
  436. type _Compiler struct {
  437. opts option.CompileOptions
  438. tab map[reflect.Type]bool
  439. rec map[reflect.Type]bool
  440. }
  441. func newCompiler() *_Compiler {
  442. return &_Compiler {
  443. opts: option.DefaultCompileOptions(),
  444. tab: map[reflect.Type]bool{},
  445. rec: map[reflect.Type]bool{},
  446. }
  447. }
  448. func (self *_Compiler) apply(opts option.CompileOptions) *_Compiler {
  449. self.opts = opts
  450. return self
  451. }
  452. func (self *_Compiler) rescue(ep *error) {
  453. if val := recover(); val != nil {
  454. if err, ok := val.(error); ok {
  455. *ep = err
  456. } else {
  457. panic(val)
  458. }
  459. }
  460. }
  461. func (self *_Compiler) compile(vt reflect.Type) (ret _Program, err error) {
  462. defer self.rescue(&err)
  463. self.compileOne(&ret, 0, vt)
  464. return
  465. }
  466. func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type) {
  467. /* check for recursive nesting */
  468. ok := self.tab[vt]
  469. if ok {
  470. p.rtt(_OP_recurse, vt)
  471. return
  472. }
  473. pt := reflect.PtrTo(vt)
  474. /* check for `json.Unmarshaler` with pointer receiver */
  475. if pt.Implements(jsonUnmarshalerType) {
  476. p.rtt(_OP_unmarshal_p, pt)
  477. return
  478. }
  479. /* check for `json.Unmarshaler` */
  480. if vt.Implements(jsonUnmarshalerType) {
  481. p.add(_OP_lspace)
  482. self.compileUnmarshalJson(p, vt)
  483. return
  484. }
  485. /* check for `encoding.TextMarshaler` with pointer receiver */
  486. if pt.Implements(encodingTextUnmarshalerType) {
  487. p.add(_OP_lspace)
  488. self.compileUnmarshalTextPtr(p, pt)
  489. return
  490. }
  491. /* check for `encoding.TextUnmarshaler` */
  492. if vt.Implements(encodingTextUnmarshalerType) {
  493. p.add(_OP_lspace)
  494. self.compileUnmarshalText(p, vt)
  495. return
  496. }
  497. /* enter the recursion */
  498. p.add(_OP_lspace)
  499. self.tab[vt] = true
  500. self.compileOps(p, sp, vt)
  501. delete(self.tab, vt)
  502. }
  503. func (self *_Compiler) compileOps(p *_Program, sp int, vt reflect.Type) {
  504. switch vt.Kind() {
  505. case reflect.Bool : self.compilePrimitive (vt, p, _OP_bool)
  506. case reflect.Int : self.compilePrimitive (vt, p, _OP_int())
  507. case reflect.Int8 : self.compilePrimitive (vt, p, _OP_i8)
  508. case reflect.Int16 : self.compilePrimitive (vt, p, _OP_i16)
  509. case reflect.Int32 : self.compilePrimitive (vt, p, _OP_i32)
  510. case reflect.Int64 : self.compilePrimitive (vt, p, _OP_i64)
  511. case reflect.Uint : self.compilePrimitive (vt, p, _OP_uint())
  512. case reflect.Uint8 : self.compilePrimitive (vt, p, _OP_u8)
  513. case reflect.Uint16 : self.compilePrimitive (vt, p, _OP_u16)
  514. case reflect.Uint32 : self.compilePrimitive (vt, p, _OP_u32)
  515. case reflect.Uint64 : self.compilePrimitive (vt, p, _OP_u64)
  516. case reflect.Uintptr : self.compilePrimitive (vt, p, _OP_uintptr())
  517. case reflect.Float32 : self.compilePrimitive (vt, p, _OP_f32)
  518. case reflect.Float64 : self.compilePrimitive (vt, p, _OP_f64)
  519. case reflect.String : self.compileString (p, vt)
  520. case reflect.Array : self.compileArray (p, sp, vt)
  521. case reflect.Interface : self.compileInterface (p, vt)
  522. case reflect.Map : self.compileMap (p, sp, vt)
  523. case reflect.Ptr : self.compilePtr (p, sp, vt)
  524. case reflect.Slice : self.compileSlice (p, sp, vt)
  525. case reflect.Struct : self.compileStruct (p, sp, vt)
  526. default : panic (&json.UnmarshalTypeError{Type: vt})
  527. }
  528. }
  529. func (self *_Compiler) compileMap(p *_Program, sp int, vt reflect.Type) {
  530. if reflect.PtrTo(vt.Key()).Implements(encodingTextUnmarshalerType) {
  531. self.compileMapOp(p, sp, vt, _OP_map_key_utext_p)
  532. } else if vt.Key().Implements(encodingTextUnmarshalerType) {
  533. self.compileMapOp(p, sp, vt, _OP_map_key_utext)
  534. } else {
  535. self.compileMapUt(p, sp, vt)
  536. }
  537. }
  538. func (self *_Compiler) compileMapUt(p *_Program, sp int, vt reflect.Type) {
  539. switch vt.Key().Kind() {
  540. case reflect.Int : self.compileMapOp(p, sp, vt, _OP_map_key_int())
  541. case reflect.Int8 : self.compileMapOp(p, sp, vt, _OP_map_key_i8)
  542. case reflect.Int16 : self.compileMapOp(p, sp, vt, _OP_map_key_i16)
  543. case reflect.Int32 : self.compileMapOp(p, sp, vt, _OP_map_key_i32)
  544. case reflect.Int64 : self.compileMapOp(p, sp, vt, _OP_map_key_i64)
  545. case reflect.Uint : self.compileMapOp(p, sp, vt, _OP_map_key_uint())
  546. case reflect.Uint8 : self.compileMapOp(p, sp, vt, _OP_map_key_u8)
  547. case reflect.Uint16 : self.compileMapOp(p, sp, vt, _OP_map_key_u16)
  548. case reflect.Uint32 : self.compileMapOp(p, sp, vt, _OP_map_key_u32)
  549. case reflect.Uint64 : self.compileMapOp(p, sp, vt, _OP_map_key_u64)
  550. case reflect.Uintptr : self.compileMapOp(p, sp, vt, _OP_map_key_uintptr())
  551. case reflect.Float32 : self.compileMapOp(p, sp, vt, _OP_map_key_f32)
  552. case reflect.Float64 : self.compileMapOp(p, sp, vt, _OP_map_key_f64)
  553. case reflect.String : self.compileMapOp(p, sp, vt, _OP_map_key_str)
  554. default : panic(&json.UnmarshalTypeError{Type: vt})
  555. }
  556. }
  557. func (self *_Compiler) compileMapOp(p *_Program, sp int, vt reflect.Type, op _Op) {
  558. i := p.pc()
  559. p.add(_OP_is_null)
  560. p.tag(sp + 1)
  561. skip := self.checkIfSkip(p, vt, '{')
  562. p.add(_OP_save)
  563. p.add(_OP_map_init)
  564. p.add(_OP_save)
  565. p.add(_OP_lspace)
  566. j := p.pc()
  567. p.chr(_OP_check_char, '}')
  568. p.chr(_OP_match_char, '"')
  569. skip2 := p.pc()
  570. p.rtt(op, vt)
  571. /* match the value separator */
  572. p.add(_OP_lspace)
  573. p.chr(_OP_match_char, ':')
  574. self.compileOne(p, sp + 2, vt.Elem())
  575. p.pin(skip2)
  576. p.add(_OP_load)
  577. k0 := p.pc()
  578. p.add(_OP_lspace)
  579. k1 := p.pc()
  580. p.chr(_OP_check_char, '}')
  581. p.chr(_OP_match_char, ',')
  582. p.add(_OP_lspace)
  583. p.chr(_OP_match_char, '"')
  584. skip3 := p.pc()
  585. p.rtt(op, vt)
  586. /* match the value separator */
  587. p.add(_OP_lspace)
  588. p.chr(_OP_match_char, ':')
  589. self.compileOne(p, sp + 2, vt.Elem())
  590. p.pin(skip3)
  591. p.add(_OP_load)
  592. p.int(_OP_goto, k0)
  593. p.pin(j)
  594. p.pin(k1)
  595. p.add(_OP_drop_2)
  596. x := p.pc()
  597. p.add(_OP_goto)
  598. p.pin(i)
  599. p.add(_OP_nil_1)
  600. p.pin(skip)
  601. p.pin(x)
  602. }
  603. func (self *_Compiler) compilePtr(p *_Program, sp int, et reflect.Type) {
  604. i := p.pc()
  605. p.add(_OP_is_null)
  606. /* dereference all the way down */
  607. for et.Kind() == reflect.Ptr {
  608. if et.Implements(jsonUnmarshalerType) {
  609. p.rtt(_OP_unmarshal_p, et)
  610. return
  611. }
  612. if et.Implements(encodingTextUnmarshalerType) {
  613. p.add(_OP_lspace)
  614. self.compileUnmarshalTextPtr(p, et)
  615. return
  616. }
  617. et = et.Elem()
  618. p.rtt(_OP_deref, et)
  619. }
  620. /* check for recursive nesting */
  621. ok := self.tab[et]
  622. if ok {
  623. p.rtt(_OP_recurse, et)
  624. } else {
  625. /* enter the recursion */
  626. p.add(_OP_lspace)
  627. self.tab[et] = true
  628. /* not inline the pointer type
  629. * recursing the defined pointer type's elem will casue issue379.
  630. */
  631. self.compileOps(p, sp, et)
  632. }
  633. delete(self.tab, et)
  634. j := p.pc()
  635. p.add(_OP_goto)
  636. p.pin(i)
  637. p.add(_OP_nil_1)
  638. p.pin(j)
  639. }
  640. func (self *_Compiler) compileArray(p *_Program, sp int, vt reflect.Type) {
  641. x := p.pc()
  642. p.add(_OP_is_null)
  643. p.tag(sp)
  644. skip := self.checkIfSkip(p, vt, '[')
  645. p.add(_OP_save)
  646. p.add(_OP_lspace)
  647. v := []int{p.pc()}
  648. p.chr(_OP_check_char, ']')
  649. /* decode every item */
  650. for i := 1; i <= vt.Len(); i++ {
  651. self.compileOne(p, sp + 1, vt.Elem())
  652. p.add(_OP_load)
  653. p.int(_OP_index, i * int(vt.Elem().Size()))
  654. p.add(_OP_lspace)
  655. v = append(v, p.pc())
  656. p.chr(_OP_check_char, ']')
  657. p.chr(_OP_match_char, ',')
  658. }
  659. /* drop rest of the array */
  660. p.add(_OP_array_skip)
  661. w := p.pc()
  662. p.add(_OP_goto)
  663. p.rel(v)
  664. /* check for pointer data */
  665. if rt.UnpackType(vt.Elem()).PtrData == 0 {
  666. p.int(_OP_array_clear, int(vt.Size()))
  667. } else {
  668. p.int(_OP_array_clear_p, int(vt.Size()))
  669. }
  670. /* restore the stack */
  671. p.pin(w)
  672. p.add(_OP_drop)
  673. p.pin(skip)
  674. p.pin(x)
  675. }
  676. func (self *_Compiler) compileSlice(p *_Program, sp int, vt reflect.Type) {
  677. if vt.Elem().Kind() == byteType.Kind() {
  678. self.compileSliceBin(p, sp, vt)
  679. } else {
  680. self.compileSliceList(p, sp, vt)
  681. }
  682. }
  683. func (self *_Compiler) compileSliceBin(p *_Program, sp int, vt reflect.Type) {
  684. i := p.pc()
  685. p.add(_OP_is_null)
  686. j := p.pc()
  687. p.chr(_OP_check_char, '[')
  688. skip := self.checkIfSkip(p, vt, '"')
  689. k := p.pc()
  690. p.chr(_OP_check_char, '"')
  691. p.add(_OP_bin)
  692. x := p.pc()
  693. p.add(_OP_goto)
  694. p.pin(j)
  695. self.compileSliceBody(p, sp, vt.Elem())
  696. y := p.pc()
  697. p.add(_OP_goto)
  698. p.pin(i)
  699. p.pin(k)
  700. p.add(_OP_nil_3)
  701. p.pin(x)
  702. p.pin(skip)
  703. p.pin(y)
  704. }
  705. func (self *_Compiler) compileSliceList(p *_Program, sp int, vt reflect.Type) {
  706. i := p.pc()
  707. p.add(_OP_is_null)
  708. p.tag(sp)
  709. skip := self.checkIfSkip(p, vt, '[')
  710. self.compileSliceBody(p, sp, vt.Elem())
  711. x := p.pc()
  712. p.add(_OP_goto)
  713. p.pin(i)
  714. p.add(_OP_nil_3)
  715. p.pin(x)
  716. p.pin(skip)
  717. }
  718. func (self *_Compiler) compileSliceBody(p *_Program, sp int, et reflect.Type) {
  719. p.add(_OP_lspace)
  720. j := p.pc()
  721. p.chr(_OP_check_empty, ']')
  722. p.rtt(_OP_slice_init, et)
  723. p.add(_OP_save)
  724. p.rtt(_OP_slice_append, et)
  725. self.compileOne(p, sp + 1, et)
  726. p.add(_OP_load)
  727. k0 := p.pc()
  728. p.add(_OP_lspace)
  729. k1 := p.pc()
  730. p.chr(_OP_check_char, ']')
  731. p.chr(_OP_match_char, ',')
  732. p.rtt(_OP_slice_append, et)
  733. self.compileOne(p, sp + 1, et)
  734. p.add(_OP_load)
  735. p.int(_OP_goto, k0)
  736. p.pin(k1)
  737. p.add(_OP_drop)
  738. p.pin(j)
  739. }
  740. func (self *_Compiler) compileString(p *_Program, vt reflect.Type) {
  741. if vt == jsonNumberType {
  742. self.compilePrimitive(vt, p, _OP_num)
  743. } else {
  744. self.compileStringBody(vt, p)
  745. }
  746. }
  747. func (self *_Compiler) compileStringBody(vt reflect.Type, p *_Program) {
  748. i := p.pc()
  749. p.add(_OP_is_null)
  750. skip := self.checkIfSkip(p, vt, '"')
  751. p.add(_OP_str)
  752. p.pin(i)
  753. p.pin(skip)
  754. }
  755. func (self *_Compiler) compileStruct(p *_Program, sp int, vt reflect.Type) {
  756. if sp >= self.opts.MaxInlineDepth || p.pc() >= _MAX_ILBUF || (sp > 0 && vt.NumField() >= _MAX_FIELDS) {
  757. p.rtt(_OP_recurse, vt)
  758. if self.opts.RecursiveDepth > 0 {
  759. self.rec[vt] = true
  760. }
  761. } else {
  762. self.compileStructBody(p, sp, vt)
  763. }
  764. }
  765. func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) {
  766. fv := resolver.ResolveStruct(vt)
  767. fm, sw := caching.CreateFieldMap(len(fv)), make([]int, len(fv))
  768. /* start of object */
  769. p.tag(sp)
  770. n := p.pc()
  771. p.add(_OP_is_null)
  772. skip := self.checkIfSkip(p, vt, '{')
  773. p.add(_OP_save)
  774. p.add(_OP_lspace)
  775. x := p.pc()
  776. p.chr(_OP_check_char, '}')
  777. p.chr(_OP_match_char, '"')
  778. p.fmv(_OP_struct_field, fm)
  779. p.add(_OP_lspace)
  780. p.chr(_OP_match_char, ':')
  781. p.tab(_OP_switch, sw)
  782. p.add(_OP_object_next)
  783. y0 := p.pc()
  784. p.add(_OP_lspace)
  785. y1 := p.pc()
  786. p.chr(_OP_check_char, '}')
  787. p.chr(_OP_match_char, ',')
  788. /* special case of an empty struct */
  789. if len(fv) == 0 {
  790. p.add(_OP_object_skip)
  791. goto end_of_object
  792. }
  793. /* match the remaining fields */
  794. p.add(_OP_lspace)
  795. p.chr(_OP_match_char, '"')
  796. p.fmv(_OP_struct_field, fm)
  797. p.add(_OP_lspace)
  798. p.chr(_OP_match_char, ':')
  799. p.tab(_OP_switch, sw)
  800. p.add(_OP_object_next)
  801. p.int(_OP_goto, y0)
  802. /* process each field */
  803. for i, f := range fv {
  804. sw[i] = p.pc()
  805. fm.Set(f.Name, i)
  806. /* index to the field */
  807. for _, o := range f.Path {
  808. if p.int(_OP_index, int(o.Size)); o.Kind == resolver.F_deref {
  809. p.rtt(_OP_deref, o.Type)
  810. }
  811. }
  812. /* check for "stringnize" option */
  813. if (f.Opts & resolver.F_stringize) == 0 {
  814. self.compileOne(p, sp + 1, f.Type)
  815. } else {
  816. self.compileStructFieldStr(p, sp + 1, f.Type)
  817. }
  818. /* load the state, and try next field */
  819. p.add(_OP_load)
  820. p.int(_OP_goto, y0)
  821. }
  822. end_of_object:
  823. p.pin(x)
  824. p.pin(y1)
  825. p.add(_OP_drop)
  826. p.pin(n)
  827. p.pin(skip)
  828. }
  829. func (self *_Compiler) compileStructFieldStr(p *_Program, sp int, vt reflect.Type) {
  830. n1 := -1
  831. ft := vt
  832. sv := false
  833. /* dereference the pointer if needed */
  834. if ft.Kind() == reflect.Ptr {
  835. ft = ft.Elem()
  836. }
  837. /* check if it can be stringized */
  838. switch ft.Kind() {
  839. case reflect.Bool : sv = true
  840. case reflect.Int : sv = true
  841. case reflect.Int8 : sv = true
  842. case reflect.Int16 : sv = true
  843. case reflect.Int32 : sv = true
  844. case reflect.Int64 : sv = true
  845. case reflect.Uint : sv = true
  846. case reflect.Uint8 : sv = true
  847. case reflect.Uint16 : sv = true
  848. case reflect.Uint32 : sv = true
  849. case reflect.Uint64 : sv = true
  850. case reflect.Uintptr : sv = true
  851. case reflect.Float32 : sv = true
  852. case reflect.Float64 : sv = true
  853. case reflect.String : sv = true
  854. }
  855. /* if it's not, ignore the "string" and follow the regular path */
  856. if !sv {
  857. self.compileOne(p, sp, vt)
  858. return
  859. }
  860. /* remove the leading space, and match the leading quote */
  861. vk := vt.Kind()
  862. p.add(_OP_lspace)
  863. n0 := p.pc()
  864. p.add(_OP_is_null)
  865. skip := self.checkIfSkip(p, stringType, '"')
  866. /* also check for inner "null" */
  867. n1 = p.pc()
  868. p.add(_OP_is_null_quote)
  869. /* dereference the pointer only when it is not null */
  870. if vk == reflect.Ptr {
  871. vt = vt.Elem()
  872. p.rtt(_OP_deref, vt)
  873. }
  874. n2 := p.pc()
  875. p.chr(_OP_check_char_0, '"')
  876. /* string opcode selector */
  877. _OP_string := func() _Op {
  878. if ft == jsonNumberType {
  879. return _OP_num
  880. } else {
  881. return _OP_unquote
  882. }
  883. }
  884. /* compile for each type */
  885. switch vt.Kind() {
  886. case reflect.Bool : p.add(_OP_bool)
  887. case reflect.Int : p.add(_OP_int())
  888. case reflect.Int8 : p.add(_OP_i8)
  889. case reflect.Int16 : p.add(_OP_i16)
  890. case reflect.Int32 : p.add(_OP_i32)
  891. case reflect.Int64 : p.add(_OP_i64)
  892. case reflect.Uint : p.add(_OP_uint())
  893. case reflect.Uint8 : p.add(_OP_u8)
  894. case reflect.Uint16 : p.add(_OP_u16)
  895. case reflect.Uint32 : p.add(_OP_u32)
  896. case reflect.Uint64 : p.add(_OP_u64)
  897. case reflect.Uintptr : p.add(_OP_uintptr())
  898. case reflect.Float32 : p.add(_OP_f32)
  899. case reflect.Float64 : p.add(_OP_f64)
  900. case reflect.String : p.add(_OP_string())
  901. default : panic("not reachable")
  902. }
  903. /* the closing quote is not needed when parsing a pure string */
  904. if vt == jsonNumberType || vt.Kind() != reflect.String {
  905. p.chr(_OP_match_char, '"')
  906. }
  907. /* pin the `is_null_quote` jump location */
  908. if n1 != -1 && vk != reflect.Ptr {
  909. p.pin(n1)
  910. }
  911. /* "null" but not a pointer, act as if the field is not present */
  912. if vk != reflect.Ptr {
  913. pc2 := p.pc()
  914. p.add(_OP_goto)
  915. p.pin(n2)
  916. p.rtt(_OP_dismatch_err, vt)
  917. p.int(_OP_add, 1)
  918. p.pin(pc2)
  919. p.pin(n0)
  920. return
  921. }
  922. /* the "null" case of the pointer */
  923. pc := p.pc()
  924. p.add(_OP_goto)
  925. p.pin(n0) // `is_null` jump location
  926. p.pin(n1) // `is_null_quote` jump location
  927. p.add(_OP_nil_1)
  928. pc2 := p.pc()
  929. p.add(_OP_goto)
  930. p.pin(n2)
  931. p.rtt(_OP_dismatch_err, vt)
  932. p.int(_OP_add, 1)
  933. p.pin(pc)
  934. p.pin(pc2)
  935. p.pin(skip)
  936. }
  937. func (self *_Compiler) compileInterface(p *_Program, vt reflect.Type) {
  938. i := p.pc()
  939. p.add(_OP_is_null)
  940. /* check for empty interface */
  941. if vt.NumMethod() == 0 {
  942. p.add(_OP_any)
  943. } else {
  944. p.rtt(_OP_dyn, vt)
  945. }
  946. /* finish the OpCode */
  947. j := p.pc()
  948. p.add(_OP_goto)
  949. p.pin(i)
  950. p.add(_OP_nil_2)
  951. p.pin(j)
  952. }
  953. func (self *_Compiler) compilePrimitive(vt reflect.Type, p *_Program, op _Op) {
  954. i := p.pc()
  955. p.add(_OP_is_null)
  956. // skip := self.checkPrimitive(p, vt)
  957. p.add(op)
  958. p.pin(i)
  959. // p.pin(skip)
  960. }
  961. func (self *_Compiler) compileUnmarshalEnd(p *_Program, vt reflect.Type, i int) {
  962. j := p.pc()
  963. k := vt.Kind()
  964. /* not a pointer */
  965. if k != reflect.Ptr {
  966. p.pin(i)
  967. return
  968. }
  969. /* it seems that in Go JSON library, "null" takes priority over any kind of unmarshaler */
  970. p.add(_OP_goto)
  971. p.pin(i)
  972. p.add(_OP_nil_1)
  973. p.pin(j)
  974. }
  975. func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type) {
  976. i := p.pc()
  977. v := _OP_unmarshal
  978. p.add(_OP_is_null)
  979. /* check for dynamic interface */
  980. if vt.Kind() == reflect.Interface {
  981. v = _OP_dyn
  982. }
  983. /* call the unmarshaler */
  984. p.rtt(v, vt)
  985. self.compileUnmarshalEnd(p, vt, i)
  986. }
  987. func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type) {
  988. i := p.pc()
  989. v := _OP_unmarshal_text
  990. p.add(_OP_is_null)
  991. /* check for dynamic interface */
  992. if vt.Kind() == reflect.Interface {
  993. v = _OP_dyn
  994. } else {
  995. p.chr(_OP_match_char, '"')
  996. }
  997. /* call the unmarshaler */
  998. p.rtt(v, vt)
  999. self.compileUnmarshalEnd(p, vt, i)
  1000. }
  1001. func (self *_Compiler) compileUnmarshalTextPtr(p *_Program, vt reflect.Type) {
  1002. i := p.pc()
  1003. p.add(_OP_is_null)
  1004. p.chr(_OP_match_char, '"')
  1005. p.rtt(_OP_unmarshal_text_p, vt)
  1006. p.pin(i)
  1007. }
  1008. func (self *_Compiler) checkIfSkip(p *_Program, vt reflect.Type, c byte) int {
  1009. j := p.pc()
  1010. p.chr(_OP_check_char_0, c)
  1011. p.rtt(_OP_dismatch_err, vt)
  1012. s := p.pc()
  1013. p.add(_OP_go_skip)
  1014. p.pin(j)
  1015. p.int(_OP_add, 1)
  1016. return s
  1017. }