pools.go 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193
  1. /*
  2. * Copyright 2021 ByteDance Inc.
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. package encoder
  17. import (
  18. `bytes`
  19. `sync`
  20. `unsafe`
  21. `errors`
  22. `reflect`
  23. `github.com/bytedance/sonic/internal/caching`
  24. `github.com/bytedance/sonic/option`
  25. `github.com/bytedance/sonic/internal/rt`
  26. )
  27. const (
  28. _MaxStack = 4096 // 4k states
  29. _StackSize = unsafe.Sizeof(_Stack{})
  30. )
  31. var (
  32. bytesPool = sync.Pool{}
  33. stackPool = sync.Pool{}
  34. bufferPool = sync.Pool{}
  35. programCache = caching.CreateProgramCache()
  36. )
  37. type _State struct {
  38. x int
  39. f uint64
  40. p unsafe.Pointer
  41. q unsafe.Pointer
  42. }
  43. type _Stack struct {
  44. sp uint64
  45. sb [_MaxStack]_State
  46. }
  47. type _Encoder func(
  48. rb *[]byte,
  49. vp unsafe.Pointer,
  50. sb *_Stack,
  51. fv uint64,
  52. ) error
  53. var _KeepAlive struct {
  54. rb *[]byte
  55. vp unsafe.Pointer
  56. sb *_Stack
  57. fv uint64
  58. err error
  59. frame [_FP_offs]byte
  60. }
  61. var errCallShadow = errors.New("DON'T CALL THIS!")
  62. // Faker func of _Encoder, used to export its stackmap as _Encoder's
  63. func _Encoder_Shadow(rb *[]byte, vp unsafe.Pointer, sb *_Stack, fv uint64) (err error) {
  64. // align to assembler_amd64.go: _FP_offs
  65. var frame [_FP_offs]byte
  66. // must keep all args and frames noticeable to GC
  67. _KeepAlive.rb = rb
  68. _KeepAlive.vp = vp
  69. _KeepAlive.sb = sb
  70. _KeepAlive.fv = fv
  71. _KeepAlive.err = err
  72. _KeepAlive.frame = frame
  73. return errCallShadow
  74. }
  75. func newBytes() []byte {
  76. if ret := bytesPool.Get(); ret != nil {
  77. return ret.([]byte)
  78. } else {
  79. return make([]byte, 0, option.DefaultEncoderBufferSize)
  80. }
  81. }
  82. func newStack() *_Stack {
  83. if ret := stackPool.Get(); ret == nil {
  84. return new(_Stack)
  85. } else {
  86. return ret.(*_Stack)
  87. }
  88. }
  89. func resetStack(p *_Stack) {
  90. memclrNoHeapPointers(unsafe.Pointer(p), _StackSize)
  91. }
  92. func newBuffer() *bytes.Buffer {
  93. if ret := bufferPool.Get(); ret != nil {
  94. return ret.(*bytes.Buffer)
  95. } else {
  96. return bytes.NewBuffer(make([]byte, 0, option.DefaultEncoderBufferSize))
  97. }
  98. }
  99. func freeBytes(p []byte) {
  100. p = p[:0]
  101. bytesPool.Put(p)
  102. }
  103. func freeStack(p *_Stack) {
  104. p.sp = 0
  105. stackPool.Put(p)
  106. }
  107. func freeBuffer(p *bytes.Buffer) {
  108. p.Reset()
  109. bufferPool.Put(p)
  110. }
  111. func makeEncoder(vt *rt.GoType, ex ...interface{}) (interface{}, error) {
  112. if pp, err := newCompiler().compile(vt.Pack(), ex[0].(bool)); err != nil {
  113. return nil, err
  114. } else {
  115. as := newAssembler(pp)
  116. as.name = vt.String()
  117. return as.Load(), nil
  118. }
  119. }
  120. func findOrCompile(vt *rt.GoType, pv bool) (_Encoder, error) {
  121. if val := programCache.Get(vt); val != nil {
  122. return val.(_Encoder), nil
  123. } else if ret, err := programCache.Compute(vt, makeEncoder, pv); err == nil {
  124. return ret.(_Encoder), nil
  125. } else {
  126. return nil, err
  127. }
  128. }
  129. func pretouchType(_vt reflect.Type, opts option.CompileOptions, v uint8) (map[reflect.Type]uint8, error) {
  130. /* compile function */
  131. compiler := newCompiler().apply(opts)
  132. encoder := func(vt *rt.GoType, ex ...interface{}) (interface{}, error) {
  133. if pp, err := compiler.compile(_vt, ex[0].(bool)); err != nil {
  134. return nil, err
  135. } else {
  136. as := newAssembler(pp)
  137. as.name = vt.String()
  138. return as.Load(), nil
  139. }
  140. }
  141. /* find or compile */
  142. vt := rt.UnpackType(_vt)
  143. if val := programCache.Get(vt); val != nil {
  144. return nil, nil
  145. } else if _, err := programCache.Compute(vt, encoder, v == 1); err == nil {
  146. return compiler.rec, nil
  147. } else {
  148. return nil, err
  149. }
  150. }
  151. func pretouchRec(vtm map[reflect.Type]uint8, opts option.CompileOptions) error {
  152. if opts.RecursiveDepth < 0 || len(vtm) == 0 {
  153. return nil
  154. }
  155. next := make(map[reflect.Type]uint8)
  156. for vt, v := range vtm {
  157. sub, err := pretouchType(vt, opts, v)
  158. if err != nil {
  159. return err
  160. }
  161. for svt, v := range sub {
  162. next[svt] = v
  163. }
  164. }
  165. opts.RecursiveDepth -= 1
  166. return pretouchRec(next, opts)
  167. }