pcache.go 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173
  1. /*
  2. * Copyright 2021 ByteDance Inc.
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. package caching
  17. import (
  18. `sync`
  19. `sync/atomic`
  20. `unsafe`
  21. `github.com/bytedance/sonic/internal/rt`
  22. )
  23. /** Program Map **/
  24. const (
  25. _LoadFactor = 0.5
  26. _InitCapacity = 4096 // must be a power of 2
  27. )
  28. type _ProgramMap struct {
  29. n uint64
  30. m uint32
  31. b []_ProgramEntry
  32. }
  33. type _ProgramEntry struct {
  34. vt *rt.GoType
  35. fn interface{}
  36. }
  37. func newProgramMap() *_ProgramMap {
  38. return &_ProgramMap {
  39. n: 0,
  40. m: _InitCapacity - 1,
  41. b: make([]_ProgramEntry, _InitCapacity),
  42. }
  43. }
  44. func (self *_ProgramMap) copy() *_ProgramMap {
  45. fork := &_ProgramMap{
  46. n: self.n,
  47. m: self.m,
  48. b: make([]_ProgramEntry, len(self.b)),
  49. }
  50. for i, f := range self.b {
  51. fork.b[i] = f
  52. }
  53. return fork
  54. }
  55. func (self *_ProgramMap) get(vt *rt.GoType) interface{} {
  56. i := self.m + 1
  57. p := vt.Hash & self.m
  58. /* linear probing */
  59. for ; i > 0; i-- {
  60. if b := self.b[p]; b.vt == vt {
  61. return b.fn
  62. } else if b.vt == nil {
  63. break
  64. } else {
  65. p = (p + 1) & self.m
  66. }
  67. }
  68. /* not found */
  69. return nil
  70. }
  71. func (self *_ProgramMap) add(vt *rt.GoType, fn interface{}) *_ProgramMap {
  72. p := self.copy()
  73. f := float64(atomic.LoadUint64(&p.n) + 1) / float64(p.m + 1)
  74. /* check for load factor */
  75. if f > _LoadFactor {
  76. p = p.rehash()
  77. }
  78. /* insert the value */
  79. p.insert(vt, fn)
  80. return p
  81. }
  82. func (self *_ProgramMap) rehash() *_ProgramMap {
  83. c := (self.m + 1) << 1
  84. r := &_ProgramMap{m: c - 1, b: make([]_ProgramEntry, int(c))}
  85. /* rehash every entry */
  86. for i := uint32(0); i <= self.m; i++ {
  87. if b := self.b[i]; b.vt != nil {
  88. r.insert(b.vt, b.fn)
  89. }
  90. }
  91. /* rebuild successful */
  92. return r
  93. }
  94. func (self *_ProgramMap) insert(vt *rt.GoType, fn interface{}) {
  95. h := vt.Hash
  96. p := h & self.m
  97. /* linear probing */
  98. for i := uint32(0); i <= self.m; i++ {
  99. if b := &self.b[p]; b.vt != nil {
  100. p += 1
  101. p &= self.m
  102. } else {
  103. b.vt = vt
  104. b.fn = fn
  105. atomic.AddUint64(&self.n, 1)
  106. return
  107. }
  108. }
  109. /* should never happens */
  110. panic("no available slots")
  111. }
  112. /** RCU Program Cache **/
  113. type ProgramCache struct {
  114. m sync.Mutex
  115. p unsafe.Pointer
  116. }
  117. func CreateProgramCache() *ProgramCache {
  118. return &ProgramCache {
  119. m: sync.Mutex{},
  120. p: unsafe.Pointer(newProgramMap()),
  121. }
  122. }
  123. func (self *ProgramCache) Get(vt *rt.GoType) interface{} {
  124. return (*_ProgramMap)(atomic.LoadPointer(&self.p)).get(vt)
  125. }
  126. func (self *ProgramCache) Compute(vt *rt.GoType, compute func(*rt.GoType, ... interface{}) (interface{}, error), ex ...interface{}) (interface{}, error) {
  127. var err error
  128. var val interface{}
  129. /* use defer to prevent inlining of this function */
  130. self.m.Lock()
  131. defer self.m.Unlock()
  132. /* double check with write lock held */
  133. if val = self.Get(vt); val != nil {
  134. return val, nil
  135. }
  136. /* compute the value */
  137. if val, err = compute(vt, ex...); err != nil {
  138. return nil, err
  139. }
  140. /* update the RCU cache */
  141. atomic.StorePointer(&self.p, unsafe.Pointer((*_ProgramMap)(atomic.LoadPointer(&self.p)).add(vt, val)))
  142. return val, nil
  143. }