compiler.go 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158
  1. /*
  2. * Copyright 2021 ByteDance Inc.
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. package decoder
  17. import (
  18. `encoding/json`
  19. `fmt`
  20. `reflect`
  21. `sort`
  22. `strconv`
  23. `strings`
  24. `unsafe`
  25. `github.com/bytedance/sonic/internal/caching`
  26. `github.com/bytedance/sonic/internal/resolver`
  27. `github.com/bytedance/sonic/internal/rt`
  28. `github.com/bytedance/sonic/option`
  29. )
  30. type _Op uint8
  31. const (
  32. _OP_any _Op = iota + 1
  33. _OP_dyn
  34. _OP_str
  35. _OP_bin
  36. _OP_bool
  37. _OP_num
  38. _OP_i8
  39. _OP_i16
  40. _OP_i32
  41. _OP_i64
  42. _OP_u8
  43. _OP_u16
  44. _OP_u32
  45. _OP_u64
  46. _OP_f32
  47. _OP_f64
  48. _OP_unquote
  49. _OP_nil_1
  50. _OP_nil_2
  51. _OP_nil_3
  52. _OP_deref
  53. _OP_index
  54. _OP_is_null
  55. _OP_is_null_quote
  56. _OP_map_init
  57. _OP_map_key_i8
  58. _OP_map_key_i16
  59. _OP_map_key_i32
  60. _OP_map_key_i64
  61. _OP_map_key_u8
  62. _OP_map_key_u16
  63. _OP_map_key_u32
  64. _OP_map_key_u64
  65. _OP_map_key_f32
  66. _OP_map_key_f64
  67. _OP_map_key_str
  68. _OP_map_key_utext
  69. _OP_map_key_utext_p
  70. _OP_array_skip
  71. _OP_array_clear
  72. _OP_array_clear_p
  73. _OP_slice_init
  74. _OP_slice_append
  75. _OP_object_skip
  76. _OP_object_next
  77. _OP_struct_field
  78. _OP_unmarshal
  79. _OP_unmarshal_p
  80. _OP_unmarshal_text
  81. _OP_unmarshal_text_p
  82. _OP_lspace
  83. _OP_match_char
  84. _OP_check_char
  85. _OP_load
  86. _OP_save
  87. _OP_drop
  88. _OP_drop_2
  89. _OP_recurse
  90. _OP_goto
  91. _OP_switch
  92. _OP_check_char_0
  93. _OP_dismatch_err
  94. _OP_go_skip
  95. _OP_add
  96. _OP_check_empty
  97. _OP_debug
  98. )
  99. const (
  100. _INT_SIZE = 32 << (^uint(0) >> 63)
  101. _PTR_SIZE = 32 << (^uintptr(0) >> 63)
  102. _PTR_BYTE = unsafe.Sizeof(uintptr(0))
  103. )
  104. const (
  105. _MAX_ILBUF = 100000 // cutoff at 100k of IL instructions
  106. _MAX_FIELDS = 50 // cutoff at 50 fields struct
  107. )
  108. var _OpNames = [256]string {
  109. _OP_any : "any",
  110. _OP_dyn : "dyn",
  111. _OP_str : "str",
  112. _OP_bin : "bin",
  113. _OP_bool : "bool",
  114. _OP_num : "num",
  115. _OP_i8 : "i8",
  116. _OP_i16 : "i16",
  117. _OP_i32 : "i32",
  118. _OP_i64 : "i64",
  119. _OP_u8 : "u8",
  120. _OP_u16 : "u16",
  121. _OP_u32 : "u32",
  122. _OP_u64 : "u64",
  123. _OP_f32 : "f32",
  124. _OP_f64 : "f64",
  125. _OP_unquote : "unquote",
  126. _OP_nil_1 : "nil_1",
  127. _OP_nil_2 : "nil_2",
  128. _OP_nil_3 : "nil_3",
  129. _OP_deref : "deref",
  130. _OP_index : "index",
  131. _OP_is_null : "is_null",
  132. _OP_is_null_quote : "is_null_quote",
  133. _OP_map_init : "map_init",
  134. _OP_map_key_i8 : "map_key_i8",
  135. _OP_map_key_i16 : "map_key_i16",
  136. _OP_map_key_i32 : "map_key_i32",
  137. _OP_map_key_i64 : "map_key_i64",
  138. _OP_map_key_u8 : "map_key_u8",
  139. _OP_map_key_u16 : "map_key_u16",
  140. _OP_map_key_u32 : "map_key_u32",
  141. _OP_map_key_u64 : "map_key_u64",
  142. _OP_map_key_f32 : "map_key_f32",
  143. _OP_map_key_f64 : "map_key_f64",
  144. _OP_map_key_str : "map_key_str",
  145. _OP_map_key_utext : "map_key_utext",
  146. _OP_map_key_utext_p : "map_key_utext_p",
  147. _OP_array_skip : "array_skip",
  148. _OP_slice_init : "slice_init",
  149. _OP_slice_append : "slice_append",
  150. _OP_object_skip : "object_skip",
  151. _OP_object_next : "object_next",
  152. _OP_struct_field : "struct_field",
  153. _OP_unmarshal : "unmarshal",
  154. _OP_unmarshal_p : "unmarshal_p",
  155. _OP_unmarshal_text : "unmarshal_text",
  156. _OP_unmarshal_text_p : "unmarshal_text_p",
  157. _OP_lspace : "lspace",
  158. _OP_match_char : "match_char",
  159. _OP_check_char : "check_char",
  160. _OP_load : "load",
  161. _OP_save : "save",
  162. _OP_drop : "drop",
  163. _OP_drop_2 : "drop_2",
  164. _OP_recurse : "recurse",
  165. _OP_goto : "goto",
  166. _OP_switch : "switch",
  167. _OP_check_char_0 : "check_char_0",
  168. _OP_dismatch_err : "dismatch_err",
  169. _OP_add : "add",
  170. _OP_go_skip : "go_skip",
  171. _OP_check_empty : "check_empty",
  172. _OP_debug : "debug",
  173. }
  174. func (self _Op) String() string {
  175. if ret := _OpNames[self]; ret != "" {
  176. return ret
  177. } else {
  178. return "<invalid>"
  179. }
  180. }
  181. func _OP_int() _Op {
  182. switch _INT_SIZE {
  183. case 32: return _OP_i32
  184. case 64: return _OP_i64
  185. default: panic("unsupported int size")
  186. }
  187. }
  188. func _OP_uint() _Op {
  189. switch _INT_SIZE {
  190. case 32: return _OP_u32
  191. case 64: return _OP_u64
  192. default: panic("unsupported uint size")
  193. }
  194. }
  195. func _OP_uintptr() _Op {
  196. switch _PTR_SIZE {
  197. case 32: return _OP_u32
  198. case 64: return _OP_u64
  199. default: panic("unsupported pointer size")
  200. }
  201. }
  202. func _OP_map_key_int() _Op {
  203. switch _INT_SIZE {
  204. case 32: return _OP_map_key_i32
  205. case 64: return _OP_map_key_i64
  206. default: panic("unsupported int size")
  207. }
  208. }
  209. func _OP_map_key_uint() _Op {
  210. switch _INT_SIZE {
  211. case 32: return _OP_map_key_u32
  212. case 64: return _OP_map_key_u64
  213. default: panic("unsupported uint size")
  214. }
  215. }
  216. func _OP_map_key_uintptr() _Op {
  217. switch _PTR_SIZE {
  218. case 32: return _OP_map_key_u32
  219. case 64: return _OP_map_key_u64
  220. default: panic("unsupported pointer size")
  221. }
  222. }
  223. type _Instr struct {
  224. u uint64 // union {op: 8, vb: 8, vi: 48}, iv maybe int or len([]int)
  225. p unsafe.Pointer // maybe GoSlice.Data, *GoType or *caching.FieldMap
  226. }
  227. func packOp(op _Op) uint64 {
  228. return uint64(op) << 56
  229. }
  230. func newInsOp(op _Op) _Instr {
  231. return _Instr{u: packOp(op)}
  232. }
  233. func newInsVi(op _Op, vi int) _Instr {
  234. return _Instr{u: packOp(op) | rt.PackInt(vi)}
  235. }
  236. func newInsVb(op _Op, vb byte) _Instr {
  237. return _Instr{u: packOp(op) | (uint64(vb) << 48)}
  238. }
  239. func newInsVs(op _Op, vs []int) _Instr {
  240. return _Instr {
  241. u: packOp(op) | rt.PackInt(len(vs)),
  242. p: (*rt.GoSlice)(unsafe.Pointer(&vs)).Ptr,
  243. }
  244. }
  245. func newInsVt(op _Op, vt reflect.Type) _Instr {
  246. return _Instr {
  247. u: packOp(op),
  248. p: unsafe.Pointer(rt.UnpackType(vt)),
  249. }
  250. }
  251. func newInsVf(op _Op, vf *caching.FieldMap) _Instr {
  252. return _Instr {
  253. u: packOp(op),
  254. p: unsafe.Pointer(vf),
  255. }
  256. }
  257. func (self _Instr) op() _Op {
  258. return _Op(self.u >> 56)
  259. }
  260. func (self _Instr) vi() int {
  261. return rt.UnpackInt(self.u)
  262. }
  263. func (self _Instr) vb() byte {
  264. return byte(self.u >> 48)
  265. }
  266. func (self _Instr) vs() (v []int) {
  267. (*rt.GoSlice)(unsafe.Pointer(&v)).Ptr = self.p
  268. (*rt.GoSlice)(unsafe.Pointer(&v)).Cap = self.vi()
  269. (*rt.GoSlice)(unsafe.Pointer(&v)).Len = self.vi()
  270. return
  271. }
  272. func (self _Instr) vf() *caching.FieldMap {
  273. return (*caching.FieldMap)(self.p)
  274. }
  275. func (self _Instr) vk() reflect.Kind {
  276. return (*rt.GoType)(self.p).Kind()
  277. }
  278. func (self _Instr) vt() reflect.Type {
  279. return (*rt.GoType)(self.p).Pack()
  280. }
  281. func (self _Instr) i64() int64 {
  282. return int64(self.vi())
  283. }
  284. func (self _Instr) vlen() int {
  285. return int((*rt.GoType)(self.p).Size)
  286. }
  287. func (self _Instr) isBranch() bool {
  288. switch self.op() {
  289. case _OP_goto : fallthrough
  290. case _OP_switch : fallthrough
  291. case _OP_is_null : fallthrough
  292. case _OP_is_null_quote : fallthrough
  293. case _OP_check_char : return true
  294. default : return false
  295. }
  296. }
  297. func (self _Instr) disassemble() string {
  298. switch self.op() {
  299. case _OP_dyn : fallthrough
  300. case _OP_deref : fallthrough
  301. case _OP_map_key_i8 : fallthrough
  302. case _OP_map_key_i16 : fallthrough
  303. case _OP_map_key_i32 : fallthrough
  304. case _OP_map_key_i64 : fallthrough
  305. case _OP_map_key_u8 : fallthrough
  306. case _OP_map_key_u16 : fallthrough
  307. case _OP_map_key_u32 : fallthrough
  308. case _OP_map_key_u64 : fallthrough
  309. case _OP_map_key_f32 : fallthrough
  310. case _OP_map_key_f64 : fallthrough
  311. case _OP_map_key_str : fallthrough
  312. case _OP_map_key_utext : fallthrough
  313. case _OP_map_key_utext_p : fallthrough
  314. case _OP_slice_init : fallthrough
  315. case _OP_slice_append : fallthrough
  316. case _OP_unmarshal : fallthrough
  317. case _OP_unmarshal_p : fallthrough
  318. case _OP_unmarshal_text : fallthrough
  319. case _OP_unmarshal_text_p : fallthrough
  320. case _OP_recurse : return fmt.Sprintf("%-18s%s", self.op(), self.vt())
  321. case _OP_goto : fallthrough
  322. case _OP_is_null_quote : fallthrough
  323. case _OP_is_null : return fmt.Sprintf("%-18sL_%d", self.op(), self.vi())
  324. case _OP_index : fallthrough
  325. case _OP_array_clear : fallthrough
  326. case _OP_array_clear_p : return fmt.Sprintf("%-18s%d", self.op(), self.vi())
  327. case _OP_switch : return fmt.Sprintf("%-18s%s", self.op(), self.formatSwitchLabels())
  328. case _OP_struct_field : return fmt.Sprintf("%-18s%s", self.op(), self.formatStructFields())
  329. case _OP_match_char : return fmt.Sprintf("%-18s%s", self.op(), strconv.QuoteRune(rune(self.vb())))
  330. case _OP_check_char : return fmt.Sprintf("%-18sL_%d, %s", self.op(), self.vi(), strconv.QuoteRune(rune(self.vb())))
  331. default : return self.op().String()
  332. }
  333. }
  334. func (self _Instr) formatSwitchLabels() string {
  335. var i int
  336. var v int
  337. var m []string
  338. /* format each label */
  339. for i, v = range self.vs() {
  340. m = append(m, fmt.Sprintf("%d=L_%d", i, v))
  341. }
  342. /* join them with "," */
  343. return strings.Join(m, ", ")
  344. }
  345. func (self _Instr) formatStructFields() string {
  346. var i uint64
  347. var r []string
  348. var m []struct{i int; n string}
  349. /* extract all the fields */
  350. for i = 0; i < self.vf().N; i++ {
  351. if v := self.vf().At(i); v.Hash != 0 {
  352. m = append(m, struct{i int; n string}{i: v.ID, n: v.Name})
  353. }
  354. }
  355. /* sort by field name */
  356. sort.Slice(m, func(i, j int) bool {
  357. return m[i].n < m[j].n
  358. })
  359. /* format each field */
  360. for _, v := range m {
  361. r = append(r, fmt.Sprintf("%s=%d", v.n, v.i))
  362. }
  363. /* join them with "," */
  364. return strings.Join(r, ", ")
  365. }
  366. type (
  367. _Program []_Instr
  368. )
  369. func (self _Program) pc() int {
  370. return len(self)
  371. }
  372. func (self _Program) tag(n int) {
  373. if n >= _MaxStack {
  374. panic("type nesting too deep")
  375. }
  376. }
  377. func (self _Program) pin(i int) {
  378. v := &self[i]
  379. v.u &= 0xffff000000000000
  380. v.u |= rt.PackInt(self.pc())
  381. }
  382. func (self _Program) rel(v []int) {
  383. for _, i := range v {
  384. self.pin(i)
  385. }
  386. }
  387. func (self *_Program) add(op _Op) {
  388. *self = append(*self, newInsOp(op))
  389. }
  390. func (self *_Program) int(op _Op, vi int) {
  391. *self = append(*self, newInsVi(op, vi))
  392. }
  393. func (self *_Program) chr(op _Op, vb byte) {
  394. *self = append(*self, newInsVb(op, vb))
  395. }
  396. func (self *_Program) tab(op _Op, vs []int) {
  397. *self = append(*self, newInsVs(op, vs))
  398. }
  399. func (self *_Program) rtt(op _Op, vt reflect.Type) {
  400. *self = append(*self, newInsVt(op, vt))
  401. }
  402. func (self *_Program) fmv(op _Op, vf *caching.FieldMap) {
  403. *self = append(*self, newInsVf(op, vf))
  404. }
  405. func (self _Program) disassemble() string {
  406. nb := len(self)
  407. tab := make([]bool, nb + 1)
  408. ret := make([]string, 0, nb + 1)
  409. /* prescan to get all the labels */
  410. for _, ins := range self {
  411. if ins.isBranch() {
  412. if ins.op() != _OP_switch {
  413. tab[ins.vi()] = true
  414. } else {
  415. for _, v := range ins.vs() {
  416. tab[v] = true
  417. }
  418. }
  419. }
  420. }
  421. /* disassemble each instruction */
  422. for i, ins := range self {
  423. if !tab[i] {
  424. ret = append(ret, "\t" + ins.disassemble())
  425. } else {
  426. ret = append(ret, fmt.Sprintf("L_%d:\n\t%s", i, ins.disassemble()))
  427. }
  428. }
  429. /* add the last label, if needed */
  430. if tab[nb] {
  431. ret = append(ret, fmt.Sprintf("L_%d:", nb))
  432. }
  433. /* add an "end" indicator, and join all the strings */
  434. return strings.Join(append(ret, "\tend"), "\n")
  435. }
  436. type _Compiler struct {
  437. opts option.CompileOptions
  438. tab map[reflect.Type]bool
  439. rec map[reflect.Type]bool
  440. }
  441. func newCompiler() *_Compiler {
  442. return &_Compiler {
  443. opts: option.DefaultCompileOptions(),
  444. tab: map[reflect.Type]bool{},
  445. rec: map[reflect.Type]bool{},
  446. }
  447. }
  448. func (self *_Compiler) apply(opts option.CompileOptions) *_Compiler {
  449. self.opts = opts
  450. return self
  451. }
  452. func (self *_Compiler) rescue(ep *error) {
  453. if val := recover(); val != nil {
  454. if err, ok := val.(error); ok {
  455. *ep = err
  456. } else {
  457. panic(val)
  458. }
  459. }
  460. }
  461. func (self *_Compiler) compile(vt reflect.Type) (ret _Program, err error) {
  462. defer self.rescue(&err)
  463. self.compileOne(&ret, 0, vt)
  464. return
  465. }
  466. func (self *_Compiler) checkMarshaler(p *_Program, vt reflect.Type) bool {
  467. pt := reflect.PtrTo(vt)
  468. /* check for `json.Unmarshaler` with pointer receiver */
  469. if pt.Implements(jsonUnmarshalerType) {
  470. p.rtt(_OP_unmarshal_p, pt)
  471. return true
  472. }
  473. /* check for `json.Unmarshaler` */
  474. if vt.Implements(jsonUnmarshalerType) {
  475. p.add(_OP_lspace)
  476. self.compileUnmarshalJson(p, vt)
  477. return true
  478. }
  479. /* check for `encoding.TextMarshaler` with pointer receiver */
  480. if pt.Implements(encodingTextUnmarshalerType) {
  481. p.add(_OP_lspace)
  482. self.compileUnmarshalTextPtr(p, pt)
  483. return true
  484. }
  485. /* check for `encoding.TextUnmarshaler` */
  486. if vt.Implements(encodingTextUnmarshalerType) {
  487. p.add(_OP_lspace)
  488. self.compileUnmarshalText(p, vt)
  489. return true
  490. }
  491. return false
  492. }
  493. func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type) {
  494. /* check for recursive nesting */
  495. ok := self.tab[vt]
  496. if ok {
  497. p.rtt(_OP_recurse, vt)
  498. return
  499. }
  500. if self.checkMarshaler(p, vt) {
  501. return
  502. }
  503. /* enter the recursion */
  504. p.add(_OP_lspace)
  505. self.tab[vt] = true
  506. self.compileOps(p, sp, vt)
  507. delete(self.tab, vt)
  508. }
  509. func (self *_Compiler) compileOps(p *_Program, sp int, vt reflect.Type) {
  510. switch vt.Kind() {
  511. case reflect.Bool : self.compilePrimitive (vt, p, _OP_bool)
  512. case reflect.Int : self.compilePrimitive (vt, p, _OP_int())
  513. case reflect.Int8 : self.compilePrimitive (vt, p, _OP_i8)
  514. case reflect.Int16 : self.compilePrimitive (vt, p, _OP_i16)
  515. case reflect.Int32 : self.compilePrimitive (vt, p, _OP_i32)
  516. case reflect.Int64 : self.compilePrimitive (vt, p, _OP_i64)
  517. case reflect.Uint : self.compilePrimitive (vt, p, _OP_uint())
  518. case reflect.Uint8 : self.compilePrimitive (vt, p, _OP_u8)
  519. case reflect.Uint16 : self.compilePrimitive (vt, p, _OP_u16)
  520. case reflect.Uint32 : self.compilePrimitive (vt, p, _OP_u32)
  521. case reflect.Uint64 : self.compilePrimitive (vt, p, _OP_u64)
  522. case reflect.Uintptr : self.compilePrimitive (vt, p, _OP_uintptr())
  523. case reflect.Float32 : self.compilePrimitive (vt, p, _OP_f32)
  524. case reflect.Float64 : self.compilePrimitive (vt, p, _OP_f64)
  525. case reflect.String : self.compileString (p, vt)
  526. case reflect.Array : self.compileArray (p, sp, vt)
  527. case reflect.Interface : self.compileInterface (p, vt)
  528. case reflect.Map : self.compileMap (p, sp, vt)
  529. case reflect.Ptr : self.compilePtr (p, sp, vt)
  530. case reflect.Slice : self.compileSlice (p, sp, vt)
  531. case reflect.Struct : self.compileStruct (p, sp, vt)
  532. default : panic (&json.UnmarshalTypeError{Type: vt})
  533. }
  534. }
  535. func (self *_Compiler) compileMap(p *_Program, sp int, vt reflect.Type) {
  536. if reflect.PtrTo(vt.Key()).Implements(encodingTextUnmarshalerType) {
  537. self.compileMapOp(p, sp, vt, _OP_map_key_utext_p)
  538. } else if vt.Key().Implements(encodingTextUnmarshalerType) {
  539. self.compileMapOp(p, sp, vt, _OP_map_key_utext)
  540. } else {
  541. self.compileMapUt(p, sp, vt)
  542. }
  543. }
  544. func (self *_Compiler) compileMapUt(p *_Program, sp int, vt reflect.Type) {
  545. switch vt.Key().Kind() {
  546. case reflect.Int : self.compileMapOp(p, sp, vt, _OP_map_key_int())
  547. case reflect.Int8 : self.compileMapOp(p, sp, vt, _OP_map_key_i8)
  548. case reflect.Int16 : self.compileMapOp(p, sp, vt, _OP_map_key_i16)
  549. case reflect.Int32 : self.compileMapOp(p, sp, vt, _OP_map_key_i32)
  550. case reflect.Int64 : self.compileMapOp(p, sp, vt, _OP_map_key_i64)
  551. case reflect.Uint : self.compileMapOp(p, sp, vt, _OP_map_key_uint())
  552. case reflect.Uint8 : self.compileMapOp(p, sp, vt, _OP_map_key_u8)
  553. case reflect.Uint16 : self.compileMapOp(p, sp, vt, _OP_map_key_u16)
  554. case reflect.Uint32 : self.compileMapOp(p, sp, vt, _OP_map_key_u32)
  555. case reflect.Uint64 : self.compileMapOp(p, sp, vt, _OP_map_key_u64)
  556. case reflect.Uintptr : self.compileMapOp(p, sp, vt, _OP_map_key_uintptr())
  557. case reflect.Float32 : self.compileMapOp(p, sp, vt, _OP_map_key_f32)
  558. case reflect.Float64 : self.compileMapOp(p, sp, vt, _OP_map_key_f64)
  559. case reflect.String : self.compileMapOp(p, sp, vt, _OP_map_key_str)
  560. default : panic(&json.UnmarshalTypeError{Type: vt})
  561. }
  562. }
  563. func (self *_Compiler) compileMapOp(p *_Program, sp int, vt reflect.Type, op _Op) {
  564. i := p.pc()
  565. p.add(_OP_is_null)
  566. p.tag(sp + 1)
  567. skip := self.checkIfSkip(p, vt, '{')
  568. p.add(_OP_save)
  569. p.add(_OP_map_init)
  570. p.add(_OP_save)
  571. p.add(_OP_lspace)
  572. j := p.pc()
  573. p.chr(_OP_check_char, '}')
  574. p.chr(_OP_match_char, '"')
  575. skip2 := p.pc()
  576. p.rtt(op, vt)
  577. /* match the value separator */
  578. p.add(_OP_lspace)
  579. p.chr(_OP_match_char, ':')
  580. self.compileOne(p, sp + 2, vt.Elem())
  581. p.pin(skip2)
  582. p.add(_OP_load)
  583. k0 := p.pc()
  584. p.add(_OP_lspace)
  585. k1 := p.pc()
  586. p.chr(_OP_check_char, '}')
  587. p.chr(_OP_match_char, ',')
  588. p.add(_OP_lspace)
  589. p.chr(_OP_match_char, '"')
  590. skip3 := p.pc()
  591. p.rtt(op, vt)
  592. /* match the value separator */
  593. p.add(_OP_lspace)
  594. p.chr(_OP_match_char, ':')
  595. self.compileOne(p, sp + 2, vt.Elem())
  596. p.pin(skip3)
  597. p.add(_OP_load)
  598. p.int(_OP_goto, k0)
  599. p.pin(j)
  600. p.pin(k1)
  601. p.add(_OP_drop_2)
  602. x := p.pc()
  603. p.add(_OP_goto)
  604. p.pin(i)
  605. p.add(_OP_nil_1)
  606. p.pin(skip)
  607. p.pin(x)
  608. }
  609. func (self *_Compiler) compilePtr(p *_Program, sp int, et reflect.Type) {
  610. i := p.pc()
  611. p.add(_OP_is_null)
  612. /* dereference all the way down */
  613. for et.Kind() == reflect.Ptr {
  614. if self.checkMarshaler(p, et) {
  615. return
  616. }
  617. et = et.Elem()
  618. p.rtt(_OP_deref, et)
  619. }
  620. /* check for recursive nesting */
  621. ok := self.tab[et]
  622. if ok {
  623. p.rtt(_OP_recurse, et)
  624. } else {
  625. /* enter the recursion */
  626. p.add(_OP_lspace)
  627. self.tab[et] = true
  628. /* not inline the pointer type
  629. * recursing the defined pointer type's elem will casue issue379.
  630. */
  631. self.compileOps(p, sp, et)
  632. }
  633. delete(self.tab, et)
  634. j := p.pc()
  635. p.add(_OP_goto)
  636. // set val pointer as nil
  637. p.pin(i)
  638. p.add(_OP_nil_1)
  639. // nothing todo
  640. p.pin(j)
  641. }
  642. func (self *_Compiler) compileArray(p *_Program, sp int, vt reflect.Type) {
  643. x := p.pc()
  644. p.add(_OP_is_null)
  645. p.tag(sp)
  646. skip := self.checkIfSkip(p, vt, '[')
  647. p.add(_OP_save)
  648. p.add(_OP_lspace)
  649. v := []int{p.pc()}
  650. p.chr(_OP_check_char, ']')
  651. /* decode every item */
  652. for i := 1; i <= vt.Len(); i++ {
  653. self.compileOne(p, sp + 1, vt.Elem())
  654. p.add(_OP_load)
  655. p.int(_OP_index, i * int(vt.Elem().Size()))
  656. p.add(_OP_lspace)
  657. v = append(v, p.pc())
  658. p.chr(_OP_check_char, ']')
  659. p.chr(_OP_match_char, ',')
  660. }
  661. /* drop rest of the array */
  662. p.add(_OP_array_skip)
  663. w := p.pc()
  664. p.add(_OP_goto)
  665. p.rel(v)
  666. /* check for pointer data */
  667. if rt.UnpackType(vt.Elem()).PtrData == 0 {
  668. p.int(_OP_array_clear, int(vt.Size()))
  669. } else {
  670. p.int(_OP_array_clear_p, int(vt.Size()))
  671. }
  672. /* restore the stack */
  673. p.pin(w)
  674. p.add(_OP_drop)
  675. p.pin(skip)
  676. p.pin(x)
  677. }
  678. func (self *_Compiler) compileSlice(p *_Program, sp int, vt reflect.Type) {
  679. if vt.Elem().Kind() == byteType.Kind() {
  680. self.compileSliceBin(p, sp, vt)
  681. } else {
  682. self.compileSliceList(p, sp, vt)
  683. }
  684. }
  685. func (self *_Compiler) compileSliceBin(p *_Program, sp int, vt reflect.Type) {
  686. i := p.pc()
  687. p.add(_OP_is_null)
  688. j := p.pc()
  689. p.chr(_OP_check_char, '[')
  690. skip := self.checkIfSkip(p, vt, '"')
  691. k := p.pc()
  692. p.chr(_OP_check_char, '"')
  693. p.add(_OP_bin)
  694. x := p.pc()
  695. p.add(_OP_goto)
  696. p.pin(j)
  697. self.compileSliceBody(p, sp, vt.Elem())
  698. y := p.pc()
  699. p.add(_OP_goto)
  700. p.pin(i)
  701. p.pin(k)
  702. p.add(_OP_nil_3)
  703. p.pin(x)
  704. p.pin(skip)
  705. p.pin(y)
  706. }
  707. func (self *_Compiler) compileSliceList(p *_Program, sp int, vt reflect.Type) {
  708. i := p.pc()
  709. p.add(_OP_is_null)
  710. p.tag(sp)
  711. skip := self.checkIfSkip(p, vt, '[')
  712. self.compileSliceBody(p, sp, vt.Elem())
  713. x := p.pc()
  714. p.add(_OP_goto)
  715. p.pin(i)
  716. p.add(_OP_nil_3)
  717. p.pin(x)
  718. p.pin(skip)
  719. }
  720. func (self *_Compiler) compileSliceBody(p *_Program, sp int, et reflect.Type) {
  721. p.add(_OP_lspace)
  722. j := p.pc()
  723. p.chr(_OP_check_empty, ']')
  724. p.rtt(_OP_slice_init, et)
  725. p.add(_OP_save)
  726. p.rtt(_OP_slice_append, et)
  727. self.compileOne(p, sp + 1, et)
  728. p.add(_OP_load)
  729. k0 := p.pc()
  730. p.add(_OP_lspace)
  731. k1 := p.pc()
  732. p.chr(_OP_check_char, ']')
  733. p.chr(_OP_match_char, ',')
  734. p.rtt(_OP_slice_append, et)
  735. self.compileOne(p, sp + 1, et)
  736. p.add(_OP_load)
  737. p.int(_OP_goto, k0)
  738. p.pin(k1)
  739. p.add(_OP_drop)
  740. p.pin(j)
  741. }
  742. func (self *_Compiler) compileString(p *_Program, vt reflect.Type) {
  743. if vt == jsonNumberType {
  744. self.compilePrimitive(vt, p, _OP_num)
  745. } else {
  746. self.compileStringBody(vt, p)
  747. }
  748. }
  749. func (self *_Compiler) compileStringBody(vt reflect.Type, p *_Program) {
  750. i := p.pc()
  751. p.add(_OP_is_null)
  752. skip := self.checkIfSkip(p, vt, '"')
  753. p.add(_OP_str)
  754. p.pin(i)
  755. p.pin(skip)
  756. }
  757. func (self *_Compiler) compileStruct(p *_Program, sp int, vt reflect.Type) {
  758. if sp >= self.opts.MaxInlineDepth || p.pc() >= _MAX_ILBUF || (sp > 0 && vt.NumField() >= _MAX_FIELDS) {
  759. p.rtt(_OP_recurse, vt)
  760. if self.opts.RecursiveDepth > 0 {
  761. self.rec[vt] = true
  762. }
  763. } else {
  764. self.compileStructBody(p, sp, vt)
  765. }
  766. }
  767. func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) {
  768. fv := resolver.ResolveStruct(vt)
  769. fm, sw := caching.CreateFieldMap(len(fv)), make([]int, len(fv))
  770. /* start of object */
  771. p.tag(sp)
  772. n := p.pc()
  773. p.add(_OP_is_null)
  774. skip := self.checkIfSkip(p, vt, '{')
  775. p.add(_OP_save)
  776. p.add(_OP_lspace)
  777. x := p.pc()
  778. p.chr(_OP_check_char, '}')
  779. p.chr(_OP_match_char, '"')
  780. p.fmv(_OP_struct_field, fm)
  781. p.add(_OP_lspace)
  782. p.chr(_OP_match_char, ':')
  783. p.tab(_OP_switch, sw)
  784. p.add(_OP_object_next)
  785. y0 := p.pc()
  786. p.add(_OP_lspace)
  787. y1 := p.pc()
  788. p.chr(_OP_check_char, '}')
  789. p.chr(_OP_match_char, ',')
  790. /* special case of an empty struct */
  791. if len(fv) == 0 {
  792. p.add(_OP_object_skip)
  793. goto end_of_object
  794. }
  795. /* match the remaining fields */
  796. p.add(_OP_lspace)
  797. p.chr(_OP_match_char, '"')
  798. p.fmv(_OP_struct_field, fm)
  799. p.add(_OP_lspace)
  800. p.chr(_OP_match_char, ':')
  801. p.tab(_OP_switch, sw)
  802. p.add(_OP_object_next)
  803. p.int(_OP_goto, y0)
  804. /* process each field */
  805. for i, f := range fv {
  806. sw[i] = p.pc()
  807. fm.Set(f.Name, i)
  808. /* index to the field */
  809. for _, o := range f.Path {
  810. if p.int(_OP_index, int(o.Size)); o.Kind == resolver.F_deref {
  811. p.rtt(_OP_deref, o.Type)
  812. }
  813. }
  814. /* check for "stringnize" option */
  815. if (f.Opts & resolver.F_stringize) == 0 {
  816. self.compileOne(p, sp + 1, f.Type)
  817. } else {
  818. self.compileStructFieldStr(p, sp + 1, f.Type)
  819. }
  820. /* load the state, and try next field */
  821. p.add(_OP_load)
  822. p.int(_OP_goto, y0)
  823. }
  824. end_of_object:
  825. p.pin(x)
  826. p.pin(y1)
  827. p.add(_OP_drop)
  828. p.pin(n)
  829. p.pin(skip)
  830. }
  831. func (self *_Compiler) compileStructFieldStr(p *_Program, sp int, vt reflect.Type) {
  832. n1 := -1
  833. ft := vt
  834. sv := false
  835. /* dereference the pointer if needed */
  836. if ft.Kind() == reflect.Ptr {
  837. ft = ft.Elem()
  838. }
  839. /* check if it can be stringized */
  840. switch ft.Kind() {
  841. case reflect.Bool : sv = true
  842. case reflect.Int : sv = true
  843. case reflect.Int8 : sv = true
  844. case reflect.Int16 : sv = true
  845. case reflect.Int32 : sv = true
  846. case reflect.Int64 : sv = true
  847. case reflect.Uint : sv = true
  848. case reflect.Uint8 : sv = true
  849. case reflect.Uint16 : sv = true
  850. case reflect.Uint32 : sv = true
  851. case reflect.Uint64 : sv = true
  852. case reflect.Uintptr : sv = true
  853. case reflect.Float32 : sv = true
  854. case reflect.Float64 : sv = true
  855. case reflect.String : sv = true
  856. }
  857. /* if it's not, ignore the "string" and follow the regular path */
  858. if !sv {
  859. self.compileOne(p, sp, vt)
  860. return
  861. }
  862. /* remove the leading space, and match the leading quote */
  863. vk := vt.Kind()
  864. p.add(_OP_lspace)
  865. n0 := p.pc()
  866. p.add(_OP_is_null)
  867. skip := self.checkIfSkip(p, stringType, '"')
  868. /* also check for inner "null" */
  869. n1 = p.pc()
  870. p.add(_OP_is_null_quote)
  871. /* dereference the pointer only when it is not null */
  872. if vk == reflect.Ptr {
  873. vt = vt.Elem()
  874. p.rtt(_OP_deref, vt)
  875. }
  876. n2 := p.pc()
  877. p.chr(_OP_check_char_0, '"')
  878. /* string opcode selector */
  879. _OP_string := func() _Op {
  880. if ft == jsonNumberType {
  881. return _OP_num
  882. } else {
  883. return _OP_unquote
  884. }
  885. }
  886. /* compile for each type */
  887. switch vt.Kind() {
  888. case reflect.Bool : p.add(_OP_bool)
  889. case reflect.Int : p.add(_OP_int())
  890. case reflect.Int8 : p.add(_OP_i8)
  891. case reflect.Int16 : p.add(_OP_i16)
  892. case reflect.Int32 : p.add(_OP_i32)
  893. case reflect.Int64 : p.add(_OP_i64)
  894. case reflect.Uint : p.add(_OP_uint())
  895. case reflect.Uint8 : p.add(_OP_u8)
  896. case reflect.Uint16 : p.add(_OP_u16)
  897. case reflect.Uint32 : p.add(_OP_u32)
  898. case reflect.Uint64 : p.add(_OP_u64)
  899. case reflect.Uintptr : p.add(_OP_uintptr())
  900. case reflect.Float32 : p.add(_OP_f32)
  901. case reflect.Float64 : p.add(_OP_f64)
  902. case reflect.String : p.add(_OP_string())
  903. default : panic("not reachable")
  904. }
  905. /* the closing quote is not needed when parsing a pure string */
  906. if vt == jsonNumberType || vt.Kind() != reflect.String {
  907. p.chr(_OP_match_char, '"')
  908. }
  909. /* pin the `is_null_quote` jump location */
  910. if n1 != -1 && vk != reflect.Ptr {
  911. p.pin(n1)
  912. }
  913. /* "null" but not a pointer, act as if the field is not present */
  914. if vk != reflect.Ptr {
  915. pc2 := p.pc()
  916. p.add(_OP_goto)
  917. p.pin(n2)
  918. p.rtt(_OP_dismatch_err, vt)
  919. p.int(_OP_add, 1)
  920. p.pin(pc2)
  921. p.pin(n0)
  922. return
  923. }
  924. /* the "null" case of the pointer */
  925. pc := p.pc()
  926. p.add(_OP_goto)
  927. p.pin(n0) // `is_null` jump location
  928. p.pin(n1) // `is_null_quote` jump location
  929. p.add(_OP_nil_1)
  930. pc2 := p.pc()
  931. p.add(_OP_goto)
  932. p.pin(n2)
  933. p.rtt(_OP_dismatch_err, vt)
  934. p.int(_OP_add, 1)
  935. p.pin(pc)
  936. p.pin(pc2)
  937. p.pin(skip)
  938. }
  939. func (self *_Compiler) compileInterface(p *_Program, vt reflect.Type) {
  940. i := p.pc()
  941. p.add(_OP_is_null)
  942. /* check for empty interface */
  943. if vt.NumMethod() == 0 {
  944. p.add(_OP_any)
  945. } else {
  946. p.rtt(_OP_dyn, vt)
  947. }
  948. /* finish the OpCode */
  949. j := p.pc()
  950. p.add(_OP_goto)
  951. p.pin(i)
  952. p.add(_OP_nil_2)
  953. p.pin(j)
  954. }
  955. func (self *_Compiler) compilePrimitive(vt reflect.Type, p *_Program, op _Op) {
  956. i := p.pc()
  957. p.add(_OP_is_null)
  958. // skip := self.checkPrimitive(p, vt)
  959. p.add(op)
  960. p.pin(i)
  961. // p.pin(skip)
  962. }
  963. func (self *_Compiler) compileUnmarshalEnd(p *_Program, vt reflect.Type, i int) {
  964. j := p.pc()
  965. k := vt.Kind()
  966. /* not a pointer */
  967. if k != reflect.Ptr {
  968. p.pin(i)
  969. return
  970. }
  971. /* it seems that in Go JSON library, "null" takes priority over any kind of unmarshaler */
  972. p.add(_OP_goto)
  973. p.pin(i)
  974. p.add(_OP_nil_1)
  975. p.pin(j)
  976. }
  977. func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type) {
  978. i := p.pc()
  979. v := _OP_unmarshal
  980. p.add(_OP_is_null)
  981. /* check for dynamic interface */
  982. if vt.Kind() == reflect.Interface {
  983. v = _OP_dyn
  984. }
  985. /* call the unmarshaler */
  986. p.rtt(v, vt)
  987. self.compileUnmarshalEnd(p, vt, i)
  988. }
  989. func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type) {
  990. i := p.pc()
  991. v := _OP_unmarshal_text
  992. p.add(_OP_is_null)
  993. /* check for dynamic interface */
  994. if vt.Kind() == reflect.Interface {
  995. v = _OP_dyn
  996. } else {
  997. p.chr(_OP_match_char, '"')
  998. }
  999. /* call the unmarshaler */
  1000. p.rtt(v, vt)
  1001. self.compileUnmarshalEnd(p, vt, i)
  1002. }
  1003. func (self *_Compiler) compileUnmarshalTextPtr(p *_Program, vt reflect.Type) {
  1004. i := p.pc()
  1005. p.add(_OP_is_null)
  1006. p.chr(_OP_match_char, '"')
  1007. p.rtt(_OP_unmarshal_text_p, vt)
  1008. p.pin(i)
  1009. }
  1010. func (self *_Compiler) checkIfSkip(p *_Program, vt reflect.Type, c byte) int {
  1011. j := p.pc()
  1012. p.chr(_OP_check_char_0, c)
  1013. p.rtt(_OP_dismatch_err, vt)
  1014. s := p.pc()
  1015. p.add(_OP_go_skip)
  1016. p.pin(j)
  1017. p.int(_OP_add, 1)
  1018. return s
  1019. }