assembler_stkabi_amd64.go 89 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950
  1. // +build go1.16,!go1.17
  2. /*
  3. * Copyright 2021 ByteDance Inc.
  4. *
  5. * Licensed under the Apache License, Version 2.0 (the "License");
  6. * you may not use this file except in compliance with the License.
  7. * You may obtain a copy of the License at
  8. *
  9. * http://www.apache.org/licenses/LICENSE-2.0
  10. *
  11. * Unless required by applicable law or agreed to in writing, software
  12. * distributed under the License is distributed on an "AS IS" BASIS,
  13. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. * See the License for the specific language governing permissions and
  15. * limitations under the License.
  16. */
  17. package decoder
  18. import (
  19. `encoding/json`
  20. `fmt`
  21. `math`
  22. `reflect`
  23. `unsafe`
  24. `github.com/bytedance/sonic/internal/caching`
  25. `github.com/bytedance/sonic/internal/jit`
  26. `github.com/bytedance/sonic/internal/native`
  27. `github.com/bytedance/sonic/internal/native/types`
  28. `github.com/bytedance/sonic/internal/rt`
  29. `github.com/twitchyliquid64/golang-asm/obj`
  30. )
  31. /** Register Allocations
  32. *
  33. * State Registers:
  34. *
  35. * %rbx : stack base
  36. * %r12 : input pointer
  37. * %r13 : input length
  38. * %r14 : input cursor
  39. * %r15 : value pointer
  40. *
  41. * Error Registers:
  42. *
  43. * %r10 : error type register
  44. * %r11 : error pointer register
  45. */
  46. /** Function Prototype & Stack Map
  47. *
  48. * func (s string, ic int, vp unsafe.Pointer, sb *_Stack, fv uint64, sv string) (rc int, err error)
  49. *
  50. * s.buf : (FP)
  51. * s.len : 8(FP)
  52. * ic : 16(FP)
  53. * vp : 24(FP)
  54. * sb : 32(FP)
  55. * fv : 40(FP)
  56. * sv : 56(FP)
  57. * err.vt : 72(FP)
  58. * err.vp : 80(FP)
  59. */
  60. const (
  61. _FP_args = 96 // 96 bytes to pass arguments and return values for this function
  62. _FP_fargs = 80 // 80 bytes for passing arguments to other Go functions
  63. _FP_saves = 40 // 40 bytes for saving the registers before CALL instructions
  64. _FP_locals = 144 // 144 bytes for local variables
  65. )
  66. const (
  67. _FP_offs = _FP_fargs + _FP_saves + _FP_locals
  68. _FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer
  69. _FP_base = _FP_size + 8 // 8 bytes for the return address
  70. )
  71. const (
  72. _IM_null = 0x6c6c756e // 'null'
  73. _IM_true = 0x65757274 // 'true'
  74. _IM_alse = 0x65736c61 // 'alse' ('false' without the 'f')
  75. )
  76. const (
  77. _BM_space = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n')
  78. )
  79. const (
  80. _MODE_JSON = 1 << 3 // base64 mode
  81. )
  82. const (
  83. _LB_error = "_error"
  84. _LB_im_error = "_im_error"
  85. _LB_eof_error = "_eof_error"
  86. _LB_type_error = "_type_error"
  87. _LB_field_error = "_field_error"
  88. _LB_range_error = "_range_error"
  89. _LB_stack_error = "_stack_error"
  90. _LB_base64_error = "_base64_error"
  91. _LB_unquote_error = "_unquote_error"
  92. _LB_parsing_error = "_parsing_error"
  93. _LB_parsing_error_v = "_parsing_error_v"
  94. _LB_mismatch_error = "_mismatch_error"
  95. )
  96. const (
  97. _LB_char_0_error = "_char_0_error"
  98. _LB_char_1_error = "_char_1_error"
  99. _LB_char_2_error = "_char_2_error"
  100. _LB_char_3_error = "_char_3_error"
  101. _LB_char_4_error = "_char_4_error"
  102. _LB_char_m2_error = "_char_m2_error"
  103. _LB_char_m3_error = "_char_m3_error"
  104. )
  105. const (
  106. _LB_skip_one = "_skip_one"
  107. _LB_skip_key_value = "_skip_key_value"
  108. )
  109. var (
  110. _AX = jit.Reg("AX")
  111. _CX = jit.Reg("CX")
  112. _DX = jit.Reg("DX")
  113. _DI = jit.Reg("DI")
  114. _SI = jit.Reg("SI")
  115. _BP = jit.Reg("BP")
  116. _SP = jit.Reg("SP")
  117. _R8 = jit.Reg("R8")
  118. _R9 = jit.Reg("R9")
  119. _X0 = jit.Reg("X0")
  120. _X1 = jit.Reg("X1")
  121. )
  122. var (
  123. _ST = jit.Reg("BX")
  124. _IP = jit.Reg("R12")
  125. _IL = jit.Reg("R13")
  126. _IC = jit.Reg("R14")
  127. _VP = jit.Reg("R15")
  128. )
  129. var (
  130. _R10 = jit.Reg("R10") // used for gcWriteBarrier
  131. _DF = jit.Reg("R10") // reuse R10 in generic decoder for flags
  132. _ET = jit.Reg("R10")
  133. _EP = jit.Reg("R11")
  134. )
  135. var (
  136. _ARG_s = _ARG_sp
  137. _ARG_sp = jit.Ptr(_SP, _FP_base)
  138. _ARG_sl = jit.Ptr(_SP, _FP_base + 8)
  139. _ARG_ic = jit.Ptr(_SP, _FP_base + 16)
  140. _ARG_vp = jit.Ptr(_SP, _FP_base + 24)
  141. _ARG_sb = jit.Ptr(_SP, _FP_base + 32)
  142. _ARG_fv = jit.Ptr(_SP, _FP_base + 40)
  143. )
  144. var (
  145. _VAR_sv = _VAR_sv_p
  146. _VAR_sv_p = jit.Ptr(_SP, _FP_base + 48)
  147. _VAR_sv_n = jit.Ptr(_SP, _FP_base + 56)
  148. _VAR_vk = jit.Ptr(_SP, _FP_base + 64)
  149. )
  150. var (
  151. _RET_rc = jit.Ptr(_SP, _FP_base + 72)
  152. _RET_et = jit.Ptr(_SP, _FP_base + 80)
  153. _RET_ep = jit.Ptr(_SP, _FP_base + 88)
  154. )
  155. var (
  156. _VAR_st = _VAR_st_Vt
  157. _VAR_sr = jit.Ptr(_SP, _FP_fargs + _FP_saves)
  158. )
  159. var (
  160. _VAR_st_Vt = jit.Ptr(_SP, _FP_fargs + _FP_saves + 0)
  161. _VAR_st_Dv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
  162. _VAR_st_Iv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
  163. _VAR_st_Ep = jit.Ptr(_SP, _FP_fargs + _FP_saves + 24)
  164. _VAR_st_Db = jit.Ptr(_SP, _FP_fargs + _FP_saves + 32)
  165. _VAR_st_Dc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 40)
  166. )
  167. var (
  168. _VAR_ss_AX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 48)
  169. _VAR_ss_CX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 56)
  170. _VAR_ss_SI = jit.Ptr(_SP, _FP_fargs + _FP_saves + 64)
  171. _VAR_ss_R8 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 72)
  172. _VAR_ss_R9 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 80)
  173. )
  174. var (
  175. _VAR_bs_p = jit.Ptr(_SP, _FP_fargs + _FP_saves + 88)
  176. _VAR_bs_n = jit.Ptr(_SP, _FP_fargs + _FP_saves + 96)
  177. _VAR_bs_LR = jit.Ptr(_SP, _FP_fargs + _FP_saves + 104)
  178. )
  179. var _VAR_fl = jit.Ptr(_SP, _FP_fargs + _FP_saves + 112)
  180. var (
  181. _VAR_et = jit.Ptr(_SP, _FP_fargs + _FP_saves + 120) // save dismatched type
  182. _VAR_ic = jit.Ptr(_SP, _FP_fargs + _FP_saves + 128) // save dismatched position
  183. _VAR_pc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 136) // save skip return pc
  184. )
  185. type _Assembler struct {
  186. jit.BaseAssembler
  187. p _Program
  188. name string
  189. }
  190. func newAssembler(p _Program) *_Assembler {
  191. return new(_Assembler).Init(p)
  192. }
  193. /** Assembler Interface **/
  194. func (self *_Assembler) Load() _Decoder {
  195. return ptodec(self.BaseAssembler.Load("decode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
  196. }
  197. func (self *_Assembler) Init(p _Program) *_Assembler {
  198. self.p = p
  199. self.BaseAssembler.Init(self.compile)
  200. return self
  201. }
  202. func (self *_Assembler) compile() {
  203. self.prologue()
  204. self.instrs()
  205. self.epilogue()
  206. self.copy_string()
  207. self.escape_string()
  208. self.escape_string_twice()
  209. self.skip_one()
  210. self.skip_key_value()
  211. self.mismatch_error()
  212. self.type_error()
  213. self.field_error()
  214. self.range_error()
  215. self.stack_error()
  216. self.base64_error()
  217. self.parsing_error()
  218. }
  219. /** Assembler Stages **/
  220. var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
  221. _OP_any : (*_Assembler)._asm_OP_any,
  222. _OP_dyn : (*_Assembler)._asm_OP_dyn,
  223. _OP_str : (*_Assembler)._asm_OP_str,
  224. _OP_bin : (*_Assembler)._asm_OP_bin,
  225. _OP_bool : (*_Assembler)._asm_OP_bool,
  226. _OP_num : (*_Assembler)._asm_OP_num,
  227. _OP_i8 : (*_Assembler)._asm_OP_i8,
  228. _OP_i16 : (*_Assembler)._asm_OP_i16,
  229. _OP_i32 : (*_Assembler)._asm_OP_i32,
  230. _OP_i64 : (*_Assembler)._asm_OP_i64,
  231. _OP_u8 : (*_Assembler)._asm_OP_u8,
  232. _OP_u16 : (*_Assembler)._asm_OP_u16,
  233. _OP_u32 : (*_Assembler)._asm_OP_u32,
  234. _OP_u64 : (*_Assembler)._asm_OP_u64,
  235. _OP_f32 : (*_Assembler)._asm_OP_f32,
  236. _OP_f64 : (*_Assembler)._asm_OP_f64,
  237. _OP_unquote : (*_Assembler)._asm_OP_unquote,
  238. _OP_nil_1 : (*_Assembler)._asm_OP_nil_1,
  239. _OP_nil_2 : (*_Assembler)._asm_OP_nil_2,
  240. _OP_nil_3 : (*_Assembler)._asm_OP_nil_3,
  241. _OP_deref : (*_Assembler)._asm_OP_deref,
  242. _OP_index : (*_Assembler)._asm_OP_index,
  243. _OP_is_null : (*_Assembler)._asm_OP_is_null,
  244. _OP_is_null_quote : (*_Assembler)._asm_OP_is_null_quote,
  245. _OP_map_init : (*_Assembler)._asm_OP_map_init,
  246. _OP_map_key_i8 : (*_Assembler)._asm_OP_map_key_i8,
  247. _OP_map_key_i16 : (*_Assembler)._asm_OP_map_key_i16,
  248. _OP_map_key_i32 : (*_Assembler)._asm_OP_map_key_i32,
  249. _OP_map_key_i64 : (*_Assembler)._asm_OP_map_key_i64,
  250. _OP_map_key_u8 : (*_Assembler)._asm_OP_map_key_u8,
  251. _OP_map_key_u16 : (*_Assembler)._asm_OP_map_key_u16,
  252. _OP_map_key_u32 : (*_Assembler)._asm_OP_map_key_u32,
  253. _OP_map_key_u64 : (*_Assembler)._asm_OP_map_key_u64,
  254. _OP_map_key_f32 : (*_Assembler)._asm_OP_map_key_f32,
  255. _OP_map_key_f64 : (*_Assembler)._asm_OP_map_key_f64,
  256. _OP_map_key_str : (*_Assembler)._asm_OP_map_key_str,
  257. _OP_map_key_utext : (*_Assembler)._asm_OP_map_key_utext,
  258. _OP_map_key_utext_p : (*_Assembler)._asm_OP_map_key_utext_p,
  259. _OP_array_skip : (*_Assembler)._asm_OP_array_skip,
  260. _OP_array_clear : (*_Assembler)._asm_OP_array_clear,
  261. _OP_array_clear_p : (*_Assembler)._asm_OP_array_clear_p,
  262. _OP_slice_init : (*_Assembler)._asm_OP_slice_init,
  263. _OP_slice_append : (*_Assembler)._asm_OP_slice_append,
  264. _OP_object_skip : (*_Assembler)._asm_OP_object_skip,
  265. _OP_object_next : (*_Assembler)._asm_OP_object_next,
  266. _OP_struct_field : (*_Assembler)._asm_OP_struct_field,
  267. _OP_unmarshal : (*_Assembler)._asm_OP_unmarshal,
  268. _OP_unmarshal_p : (*_Assembler)._asm_OP_unmarshal_p,
  269. _OP_unmarshal_text : (*_Assembler)._asm_OP_unmarshal_text,
  270. _OP_unmarshal_text_p : (*_Assembler)._asm_OP_unmarshal_text_p,
  271. _OP_lspace : (*_Assembler)._asm_OP_lspace,
  272. _OP_match_char : (*_Assembler)._asm_OP_match_char,
  273. _OP_check_char : (*_Assembler)._asm_OP_check_char,
  274. _OP_load : (*_Assembler)._asm_OP_load,
  275. _OP_save : (*_Assembler)._asm_OP_save,
  276. _OP_drop : (*_Assembler)._asm_OP_drop,
  277. _OP_drop_2 : (*_Assembler)._asm_OP_drop_2,
  278. _OP_recurse : (*_Assembler)._asm_OP_recurse,
  279. _OP_goto : (*_Assembler)._asm_OP_goto,
  280. _OP_switch : (*_Assembler)._asm_OP_switch,
  281. _OP_check_char_0 : (*_Assembler)._asm_OP_check_char_0,
  282. _OP_dismatch_err : (*_Assembler)._asm_OP_dismatch_err,
  283. _OP_go_skip : (*_Assembler)._asm_OP_go_skip,
  284. _OP_add : (*_Assembler)._asm_OP_add,
  285. _OP_check_empty : (*_Assembler)._asm_OP_check_empty,
  286. }
  287. func (self *_Assembler) instr(v *_Instr) {
  288. if fn := _OpFuncTab[v.op()]; fn != nil {
  289. fn(self, v)
  290. } else {
  291. panic(fmt.Sprintf("invalid opcode: %d", v.op()))
  292. }
  293. }
  294. func (self *_Assembler) instrs() {
  295. for i, v := range self.p {
  296. self.Mark(i)
  297. self.instr(&v)
  298. self.debug_instr(i, &v)
  299. }
  300. }
  301. func (self *_Assembler) epilogue() {
  302. self.Mark(len(self.p))
  303. self.Emit("XORL", _EP, _EP) // XORL EP, EP
  304. self.Emit("MOVQ", _VAR_et, _ET) // MOVQ VAR_et, ET
  305. self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
  306. self.Sjmp("JNZ", _LB_mismatch_error) // JNZ _LB_mismatch_error
  307. self.Link(_LB_error) // _error:
  308. self.Emit("MOVQ", _IC, _RET_rc) // MOVQ IC, rc<>+40(FP)
  309. self.Emit("MOVQ", _ET, _RET_et) // MOVQ ET, et<>+48(FP)
  310. self.Emit("MOVQ", _EP, _RET_ep) // MOVQ EP, ep<>+56(FP)
  311. self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP
  312. self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP
  313. self.Emit("RET") // RET
  314. }
  315. func (self *_Assembler) prologue() {
  316. self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP
  317. self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP)
  318. self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP
  319. self.Emit("MOVQ", _ARG_sp, _IP) // MOVQ s.p<>+0(FP), IP
  320. self.Emit("MOVQ", _ARG_sl, _IL) // MOVQ s.l<>+8(FP), IL
  321. self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC
  322. self.Emit("MOVQ", _ARG_vp, _VP) // MOVQ vp<>+24(FP), VP
  323. self.Emit("MOVQ", _ARG_sb, _ST) // MOVQ vp<>+32(FP), ST
  324. // initialize digital buffer first
  325. self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_st_Dc) // MOVQ $_MaxDigitNums, ss.Dcap
  326. self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX) // LEAQ _DbufOffset(ST), AX
  327. self.Emit("MOVQ", _AX, _VAR_st_Db) // MOVQ AX, ss.Dbuf
  328. self.Emit("XORL", _AX, _AX) // XORL AX, AX
  329. self.Emit("MOVQ", _AX, _VAR_et) // MOVQ AX, ss.Dp
  330. }
  331. /** Function Calling Helpers **/
  332. var _REG_go = []obj.Addr {
  333. _ST,
  334. _VP,
  335. _IP,
  336. _IL,
  337. _IC,
  338. }
  339. func (self *_Assembler) save(r ...obj.Addr) {
  340. for i, v := range r {
  341. if i > _FP_saves / 8 - 1 {
  342. panic("too many registers to save")
  343. } else {
  344. self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
  345. }
  346. }
  347. }
  348. func (self *_Assembler) load(r ...obj.Addr) {
  349. for i, v := range r {
  350. if i > _FP_saves / 8 - 1 {
  351. panic("too many registers to load")
  352. } else {
  353. self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
  354. }
  355. }
  356. }
  357. func (self *_Assembler) call(fn obj.Addr) {
  358. self.Emit("MOVQ", fn, _AX) // MOVQ ${fn}, AX
  359. self.Rjmp("CALL", _AX) // CALL AX
  360. }
  361. func (self *_Assembler) call_go(fn obj.Addr) {
  362. self.save(_REG_go...) // SAVE $REG_go
  363. self.call(fn) // CALL ${fn}
  364. self.load(_REG_go...) // LOAD $REG_go
  365. }
  366. func (self *_Assembler) call_sf(fn obj.Addr) {
  367. self.Emit("LEAQ", _ARG_s, _DI) // LEAQ s<>+0(FP), DI
  368. self.Emit("MOVQ", _IC, _ARG_ic) // MOVQ IC, ic<>+16(FP)
  369. self.Emit("LEAQ", _ARG_ic, _SI) // LEAQ ic<>+16(FP), SI
  370. self.Emit("LEAQ", jit.Ptr(_ST, _FsmOffset), _DX) // LEAQ _FsmOffset(ST), DX
  371. self.Emit("MOVQ", _ARG_fv, _CX)
  372. self.call(fn) // CALL ${fn}
  373. self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC
  374. }
  375. func (self *_Assembler) call_vf(fn obj.Addr) {
  376. self.Emit("LEAQ", _ARG_s, _DI) // LEAQ s<>+0(FP), DI
  377. self.Emit("MOVQ", _IC, _ARG_ic) // MOVQ IC, ic<>+16(FP)
  378. self.Emit("LEAQ", _ARG_ic, _SI) // LEAQ ic<>+16(FP), SI
  379. self.Emit("LEAQ", _VAR_st, _DX) // LEAQ st, DX
  380. self.call(fn) // CALL ${fn}
  381. self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC
  382. }
  383. /** Assembler Error Handlers **/
  384. var (
  385. _F_convT64 = jit.Func(convT64)
  386. _F_error_wrap = jit.Func(error_wrap)
  387. _F_error_type = jit.Func(error_type)
  388. _F_error_field = jit.Func(error_field)
  389. _F_error_value = jit.Func(error_value)
  390. _F_error_mismatch = jit.Func(error_mismatch)
  391. )
  392. var (
  393. _I_int8 , _T_int8 = rtype(reflect.TypeOf(int8(0)))
  394. _I_int16 , _T_int16 = rtype(reflect.TypeOf(int16(0)))
  395. _I_int32 , _T_int32 = rtype(reflect.TypeOf(int32(0)))
  396. _I_uint8 , _T_uint8 = rtype(reflect.TypeOf(uint8(0)))
  397. _I_uint16 , _T_uint16 = rtype(reflect.TypeOf(uint16(0)))
  398. _I_uint32 , _T_uint32 = rtype(reflect.TypeOf(uint32(0)))
  399. _I_float32 , _T_float32 = rtype(reflect.TypeOf(float32(0)))
  400. )
  401. var (
  402. _T_error = rt.UnpackType(errorType)
  403. _I_base64_CorruptInputError = jit.Itab(_T_error, base64CorruptInputError)
  404. )
  405. var (
  406. _V_stackOverflow = jit.Imm(int64(uintptr(unsafe.Pointer(&stackOverflow))))
  407. _I_json_UnsupportedValueError = jit.Itab(_T_error, reflect.TypeOf(new(json.UnsupportedValueError)))
  408. _I_json_MismatchTypeError = jit.Itab(_T_error, reflect.TypeOf(new(MismatchTypeError)))
  409. )
  410. func (self *_Assembler) type_error() {
  411. self.Link(_LB_type_error) // _type_error:
  412. self.Emit("MOVQ", _ET, jit.Ptr(_SP, 0)) // MOVQ ET, (SP)
  413. self.call_go(_F_error_type) // CALL_GO error_type
  414. self.Emit("MOVQ", jit.Ptr(_SP, 8), _ET) // MOVQ 8(SP), ET
  415. self.Emit("MOVQ", jit.Ptr(_SP, 16), _EP) // MOVQ 16(SP), EP
  416. self.Sjmp("JMP" , _LB_error) // JMP _error
  417. }
  418. func (self *_Assembler) mismatch_error() {
  419. self.Link(_LB_mismatch_error) // _type_error:
  420. self.Emit("MOVQ", _VAR_et, _ET) // MOVQ _VAR_et, ET
  421. self.Emit("MOVQ", _VAR_ic, _EP) // MOVQ _VAR_ic, EP
  422. self.Emit("MOVQ", _I_json_MismatchTypeError, _AX) // MOVQ _I_json_MismatchTypeError, AX
  423. self.Emit("CMPQ", _ET, _AX) // CMPQ ET, AX
  424. self.Sjmp("JE" , _LB_error) // JE _LB_error
  425. self.Emit("MOVQ", _ARG_sp, _AX)
  426. self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
  427. self.Emit("MOVQ", _ARG_sl, _CX)
  428. self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
  429. self.Emit("MOVQ", _VAR_ic, _AX)
  430. self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
  431. self.Emit("MOVQ", _VAR_et, _CX)
  432. self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ CX, 24(SP)
  433. self.call_go(_F_error_mismatch) // CALL_GO error_type
  434. self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
  435. self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
  436. self.Sjmp("JMP" , _LB_error) // JMP _error
  437. }
  438. func (self *_Assembler) _asm_OP_dismatch_err(p *_Instr) {
  439. self.Emit("MOVQ", _IC, _VAR_ic)
  440. self.Emit("MOVQ", jit.Type(p.vt()), _ET)
  441. self.Emit("MOVQ", _ET, _VAR_et)
  442. }
  443. func (self *_Assembler) _asm_OP_go_skip(p *_Instr) {
  444. self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
  445. self.Xref(p.vi(), 4)
  446. self.Emit("MOVQ", _R9, _VAR_pc)
  447. self.Sjmp("JMP" , _LB_skip_one) // JMP _skip_one
  448. }
  449. func (self *_Assembler) skip_one() {
  450. self.Link(_LB_skip_one) // _skip:
  451. self.Emit("MOVQ", _VAR_ic, _IC) // MOVQ _VAR_ic, IC
  452. self.call_sf(_F_skip_one) // CALL_SF skip_one
  453. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  454. self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
  455. self.Emit("MOVQ" , _VAR_pc, _R9) // MOVQ pc, R9
  456. self.Rjmp("JMP" , _R9) // JMP (R9)
  457. }
  458. func (self *_Assembler) skip_key_value() {
  459. self.Link(_LB_skip_key_value) // _skip:
  460. // skip the key
  461. self.Emit("MOVQ", _VAR_ic, _IC) // MOVQ _VAR_ic, IC
  462. self.call_sf(_F_skip_one) // CALL_SF skip_one
  463. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  464. self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
  465. // match char ':'
  466. self.lspace("_global_1")
  467. self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(':'))
  468. self.Sjmp("JNE" , _LB_parsing_error_v) // JNE _parse_error_v
  469. self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC
  470. self.lspace("_global_2")
  471. // skip the value
  472. self.call_sf(_F_skip_one) // CALL_SF skip_one
  473. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  474. self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
  475. // jump back to specified address
  476. self.Emit("MOVQ" , _VAR_pc, _R9) // MOVQ pc, R9
  477. self.Rjmp("JMP" , _R9) // JMP (R9)
  478. }
  479. func (self *_Assembler) field_error() {
  480. self.Link(_LB_field_error) // _field_error:
  481. self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0
  482. self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0)) // MOVOU X0, (SP)
  483. self.call_go(_F_error_field) // CALL_GO error_field
  484. self.Emit("MOVQ" , jit.Ptr(_SP, 16), _ET) // MOVQ 16(SP), ET
  485. self.Emit("MOVQ" , jit.Ptr(_SP, 24), _EP) // MOVQ 24(SP), EP
  486. self.Sjmp("JMP" , _LB_error) // JMP _error
  487. }
  488. func (self *_Assembler) range_error() {
  489. self.Link(_LB_range_error) // _range_error:
  490. self.slice_from(_VAR_st_Ep, 0) // SLICE st.Ep, $0
  491. self.Emit("MOVQ", _DI, jit.Ptr(_SP, 0)) // MOVQ DI, (SP)
  492. self.Emit("MOVQ", _SI, jit.Ptr(_SP, 8)) // MOVQ SI, 8(SP)
  493. self.Emit("MOVQ", _ET, jit.Ptr(_SP, 16)) // MOVQ ET, 16(SP)
  494. self.Emit("MOVQ", _EP, jit.Ptr(_SP, 24)) // MOVQ EP, 24(SP)
  495. self.call_go(_F_error_value) // CALL_GO error_value
  496. self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
  497. self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
  498. self.Sjmp("JMP" , _LB_error) // JMP _error
  499. }
  500. func (self *_Assembler) stack_error() {
  501. self.Link(_LB_stack_error) // _stack_error:
  502. self.Emit("MOVQ", _V_stackOverflow, _EP) // MOVQ ${_V_stackOverflow}, EP
  503. self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ ${_I_json_UnsupportedValueError}, ET
  504. self.Sjmp("JMP" , _LB_error) // JMP _error
  505. }
  506. func (self *_Assembler) base64_error() {
  507. self.Link(_LB_base64_error)
  508. self.Emit("NEGQ", _AX) // NEGQ AX
  509. self.Emit("SUBQ", jit.Imm(1), _AX) // SUBQ $1, AX
  510. self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
  511. self.call_go(_F_convT64) // CALL_GO convT64
  512. self.Emit("MOVQ", jit.Ptr(_SP, 8), _EP) // MOVQ 8(SP), EP
  513. self.Emit("MOVQ", _I_base64_CorruptInputError, _ET) // MOVQ ${itab(base64.CorruptInputError)}, ET
  514. self.Sjmp("JMP" , _LB_error) // JMP _error
  515. }
  516. func (self *_Assembler) parsing_error() {
  517. self.Link(_LB_eof_error) // _eof_error:
  518. self.Emit("MOVQ" , _IL, _IC) // MOVQ IL, IC
  519. self.Emit("MOVL" , jit.Imm(int64(types.ERR_EOF)), _EP) // MOVL ${types.ERR_EOF}, EP
  520. self.Sjmp("JMP" , _LB_parsing_error) // JMP _parsing_error
  521. self.Link(_LB_unquote_error) // _unquote_error:
  522. self.Emit("SUBQ" , _VAR_sr, _SI) // SUBQ sr, SI
  523. self.Emit("SUBQ" , _SI, _IC) // SUBQ IL, IC
  524. self.Link(_LB_parsing_error_v) // _parsing_error_v:
  525. self.Emit("MOVQ" , _AX, _EP) // MOVQ AX, EP
  526. self.Emit("NEGQ" , _EP) // NEGQ EP
  527. self.Sjmp("JMP" , _LB_parsing_error) // JMP _parsing_error
  528. self.Link(_LB_char_m3_error) // _char_m3_error:
  529. self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC
  530. self.Link(_LB_char_m2_error) // _char_m2_error:
  531. self.Emit("SUBQ" , jit.Imm(2), _IC) // SUBQ $2, IC
  532. self.Sjmp("JMP" , _LB_char_0_error) // JMP _char_0_error
  533. self.Link(_LB_im_error) // _im_error:
  534. self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPB CX, (IP)(IC)
  535. self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error
  536. self.Emit("SHRL" , jit.Imm(8), _CX) // SHRL $8, CX
  537. self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 1)) // CMPB CX, 1(IP)(IC)
  538. self.Sjmp("JNE" , _LB_char_1_error) // JNE _char_1_error
  539. self.Emit("SHRL" , jit.Imm(8), _CX) // SHRL $8, CX
  540. self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 2)) // CMPB CX, 2(IP)(IC)
  541. self.Sjmp("JNE" , _LB_char_2_error) // JNE _char_2_error
  542. self.Sjmp("JMP" , _LB_char_3_error) // JNE _char_3_error
  543. self.Link(_LB_char_4_error) // _char_4_error:
  544. self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
  545. self.Link(_LB_char_3_error) // _char_3_error:
  546. self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
  547. self.Link(_LB_char_2_error) // _char_2_error:
  548. self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
  549. self.Link(_LB_char_1_error) // _char_1_error:
  550. self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
  551. self.Link(_LB_char_0_error) // _char_0_error:
  552. self.Emit("MOVL" , jit.Imm(int64(types.ERR_INVALID_CHAR)), _EP) // MOVL ${types.ERR_INVALID_CHAR}, EP
  553. self.Link(_LB_parsing_error) // _parsing_error:
  554. self.Emit("MOVOU", _ARG_s, _X0) // MOVOU s, X0
  555. self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0)) // MOVOU X0, (SP)
  556. self.Emit("MOVQ" , _IC, jit.Ptr(_SP, 16)) // MOVQ IC, 16(SP)
  557. self.Emit("MOVQ" , _EP, jit.Ptr(_SP, 24)) // MOVQ EP, 24(SP)
  558. self.call_go(_F_error_wrap) // CALL_GO error_wrap
  559. self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
  560. self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
  561. self.Sjmp("JMP" , _LB_error) // JMP _error
  562. }
  563. /** Memory Management Routines **/
  564. var (
  565. _T_byte = jit.Type(byteType)
  566. _F_mallocgc = jit.Func(mallocgc)
  567. )
  568. func (self *_Assembler) malloc(nb obj.Addr, ret obj.Addr) {
  569. self.Emit("XORL", _AX, _AX) // XORL AX, AX
  570. self.Emit("MOVQ", _T_byte, _CX) // MOVQ ${type(byte)}, CX
  571. self.Emit("MOVQ", nb, jit.Ptr(_SP, 0)) // MOVQ ${nb}, (SP)
  572. self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
  573. self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
  574. self.call_go(_F_mallocgc) // CALL_GO mallocgc
  575. self.Emit("MOVQ", jit.Ptr(_SP, 24), ret) // MOVQ 24(SP), ${ret}
  576. }
  577. func (self *_Assembler) valloc(vt reflect.Type, ret obj.Addr) {
  578. self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX) // MOVQ ${vt.Size()}, AX
  579. self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
  580. self.Emit("MOVQ", jit.Type(vt), _AX) // MOVQ ${vt}, AX
  581. self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
  582. self.Emit("MOVB", jit.Imm(1), jit.Ptr(_SP, 16)) // MOVB $1, 16(SP)
  583. self.call_go(_F_mallocgc) // CALL_GO mallocgc
  584. self.Emit("MOVQ", jit.Ptr(_SP, 24), ret) // MOVQ 24(SP), ${ret}
  585. }
  586. func (self *_Assembler) vfollow(vt reflect.Type) {
  587. self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX
  588. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  589. self.Sjmp("JNZ" , "_end_{n}") // JNZ _end_{n}
  590. self.valloc(vt, _AX) // VALLOC ${vt}, AX
  591. self.WritePtrAX(1, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP)
  592. self.Link("_end_{n}") // _end_{n}:
  593. self.Emit("MOVQ" , _AX, _VP) // MOVQ AX, VP
  594. }
  595. /** Value Parsing Routines **/
  596. var (
  597. _F_vstring = jit.Imm(int64(native.S_vstring))
  598. _F_vnumber = jit.Imm(int64(native.S_vnumber))
  599. _F_vsigned = jit.Imm(int64(native.S_vsigned))
  600. _F_vunsigned = jit.Imm(int64(native.S_vunsigned))
  601. )
  602. func (self *_Assembler) check_err(vt reflect.Type, pin string, pin2 int) {
  603. self.Emit("MOVQ" , _VAR_st_Vt, _AX) // MOVQ st.Vt, AX
  604. self.Emit("TESTQ", _AX, _AX) // CMPQ AX, ${native.V_STRING}
  605. // try to skip the value
  606. if vt != nil {
  607. self.Sjmp("JNS" , "_check_err_{n}") // JNE _parsing_error_v
  608. self.Emit("MOVQ", jit.Type(vt), _ET)
  609. self.Emit("MOVQ", _ET, _VAR_et)
  610. if pin2 != -1 {
  611. self.Emit("SUBQ", jit.Imm(1), _BP)
  612. self.Emit("MOVQ", _BP, _VAR_ic)
  613. self.Byte(0x4c , 0x8d, 0x0d) // LEAQ (PC), R9
  614. self.Xref(pin2, 4)
  615. self.Emit("MOVQ", _R9, _VAR_pc)
  616. self.Sjmp("JMP" , _LB_skip_key_value)
  617. } else {
  618. self.Emit("MOVQ", _BP, _VAR_ic)
  619. self.Byte(0x4c , 0x8d, 0x0d) // LEAQ (PC), R9
  620. self.Sref(pin, 4)
  621. self.Emit("MOVQ", _R9, _VAR_pc)
  622. self.Sjmp("JMP" , _LB_skip_one)
  623. }
  624. self.Link("_check_err_{n}")
  625. } else {
  626. self.Sjmp("JS" , _LB_parsing_error_v) // JNE _parsing_error_v
  627. }
  628. }
  629. func (self *_Assembler) check_eof(d int64) {
  630. if d == 1 {
  631. self.Emit("CMPQ", _IC, _IL) // CMPQ IC, IL
  632. self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error
  633. } else {
  634. self.Emit("LEAQ", jit.Ptr(_IC, d), _AX) // LEAQ ${d}(IC), AX
  635. self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL
  636. self.Sjmp("JA" , _LB_eof_error) // JA _eof_error
  637. }
  638. }
  639. func (self *_Assembler) parse_string() { // parse_string has a validate flag params in the last
  640. self.Emit("MOVQ", _ARG_fv, _CX)
  641. self.call_vf(_F_vstring)
  642. self.check_err(nil, "", -1)
  643. }
  644. func (self *_Assembler) parse_number(vt reflect.Type, pin string, pin2 int) {
  645. self.Emit("MOVQ", _IC, _BP)
  646. self.call_vf(_F_vnumber) // call vnumber
  647. self.check_err(vt, pin, pin2)
  648. }
  649. func (self *_Assembler) parse_signed(vt reflect.Type, pin string, pin2 int) {
  650. self.Emit("MOVQ", _IC, _BP)
  651. self.call_vf(_F_vsigned)
  652. self.check_err(vt, pin, pin2)
  653. }
  654. func (self *_Assembler) parse_unsigned(vt reflect.Type, pin string, pin2 int) {
  655. self.Emit("MOVQ", _IC, _BP)
  656. self.call_vf(_F_vunsigned)
  657. self.check_err(vt, pin, pin2)
  658. }
  659. // Pointer: DI, Size: SI, Return: R9
  660. func (self *_Assembler) copy_string() {
  661. self.Link("_copy_string")
  662. self.Emit("MOVQ", _DI, _VAR_bs_p)
  663. self.Emit("MOVQ", _SI, _VAR_bs_n)
  664. self.Emit("MOVQ", _R9, _VAR_bs_LR)
  665. self.malloc(_SI, _AX)
  666. self.Emit("MOVQ", _AX, _VAR_sv_p)
  667. self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))
  668. self.Emit("MOVQ", _VAR_bs_p, _DI)
  669. self.Emit("MOVQ", _DI, jit.Ptr(_SP, 8))
  670. self.Emit("MOVQ", _VAR_bs_n, _SI)
  671. self.Emit("MOVQ", _SI, jit.Ptr(_SP, 16))
  672. self.call_go(_F_memmove)
  673. self.Emit("MOVQ", _VAR_sv_p, _DI)
  674. self.Emit("MOVQ", _VAR_bs_n, _SI)
  675. self.Emit("MOVQ", _VAR_bs_LR, _R9)
  676. self.Rjmp("JMP", _R9)
  677. }
  678. // Pointer: DI, Size: SI, Return: R9
  679. func (self *_Assembler) escape_string() {
  680. self.Link("_escape_string")
  681. self.Emit("MOVQ" , _DI, _VAR_bs_p)
  682. self.Emit("MOVQ" , _SI, _VAR_bs_n)
  683. self.Emit("MOVQ" , _R9, _VAR_bs_LR)
  684. self.malloc(_SI, _DX) // MALLOC SI, DX
  685. self.Emit("MOVQ" , _DX, _VAR_sv_p)
  686. self.Emit("MOVQ" , _VAR_bs_p, _DI)
  687. self.Emit("MOVQ" , _VAR_bs_n, _SI)
  688. self.Emit("LEAQ" , _VAR_sr, _CX) // LEAQ sr, CX
  689. self.Emit("XORL" , _R8, _R8) // XORL R8, R8
  690. self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv) // BTQ ${_F_disable_urc}, fv
  691. self.Emit("SETCC", _R8) // SETCC R8
  692. self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8) // SHLQ ${types.B_UNICODE_REPLACE}, R8
  693. self.call(_F_unquote) // CALL unquote
  694. self.Emit("MOVQ" , _VAR_bs_n, _SI) // MOVQ ${n}, SI
  695. self.Emit("ADDQ" , jit.Imm(1), _SI) // ADDQ $1, SI
  696. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  697. self.Sjmp("JS" , _LB_unquote_error) // JS _unquote_error
  698. self.Emit("MOVQ" , _AX, _SI)
  699. self.Emit("MOVQ" , _VAR_sv_p, _DI)
  700. self.Emit("MOVQ" , _VAR_bs_LR, _R9)
  701. self.Rjmp("JMP", _R9)
  702. }
  703. func (self *_Assembler) escape_string_twice() {
  704. self.Link("_escape_string_twice")
  705. self.Emit("MOVQ" , _DI, _VAR_bs_p)
  706. self.Emit("MOVQ" , _SI, _VAR_bs_n)
  707. self.Emit("MOVQ" , _R9, _VAR_bs_LR)
  708. self.malloc(_SI, _DX) // MALLOC SI, DX
  709. self.Emit("MOVQ" , _DX, _VAR_sv_p)
  710. self.Emit("MOVQ" , _VAR_bs_p, _DI)
  711. self.Emit("MOVQ" , _VAR_bs_n, _SI)
  712. self.Emit("LEAQ" , _VAR_sr, _CX) // LEAQ sr, CX
  713. self.Emit("MOVL" , jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8
  714. self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv) // BTQ ${_F_disable_urc}, AX
  715. self.Emit("XORL" , _AX, _AX) // XORL AX, AX
  716. self.Emit("SETCC", _AX) // SETCC AX
  717. self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _AX) // SHLQ ${types.B_UNICODE_REPLACE}, AX
  718. self.Emit("ORQ" , _AX, _R8) // ORQ AX, R8
  719. self.call(_F_unquote) // CALL unquote
  720. self.Emit("MOVQ" , _VAR_bs_n, _SI) // MOVQ ${n}, SI
  721. self.Emit("ADDQ" , jit.Imm(3), _SI) // ADDQ $3, SI
  722. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  723. self.Sjmp("JS" , _LB_unquote_error) // JS _unquote_error
  724. self.Emit("MOVQ" , _AX, _SI)
  725. self.Emit("MOVQ" , _VAR_sv_p, _DI)
  726. self.Emit("MOVQ" , _VAR_bs_LR, _R9)
  727. self.Rjmp("JMP", _R9)
  728. }
  729. /** Range Checking Routines **/
  730. var (
  731. _V_max_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_max_f32))))
  732. _V_min_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_min_f32))))
  733. )
  734. var (
  735. _Vp_max_f32 = new(float32)
  736. _Vp_min_f32 = new(float32)
  737. )
  738. func init() {
  739. *_Vp_max_f32 = math.MaxFloat32
  740. *_Vp_min_f32 = -math.MaxFloat32
  741. }
  742. func (self *_Assembler) range_single() {
  743. self.Emit("CVTSD2SS", _VAR_st_Dv, _X0) // CVTSD2SS st.Dv, X0
  744. self.Emit("MOVQ" , _V_max_f32, _AX) // MOVQ _max_f32, AX
  745. self.Emit("MOVQ" , jit.Gitab(_I_float32), _ET) // MOVQ ${itab(float32)}, ET
  746. self.Emit("MOVQ" , jit.Gtype(_T_float32), _EP) // MOVQ ${type(float32)}, EP
  747. self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0) // UCOMISS (AX), X0
  748. self.Sjmp("JA" , _LB_range_error) // JA _range_error
  749. self.Emit("MOVQ" , _V_min_f32, _AX) // MOVQ _min_f32, AX
  750. self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0) // UCOMISS (AX), X0
  751. self.Sjmp("JB" , _LB_range_error) // JB _range_error
  752. }
  753. func (self *_Assembler) range_signed(i *rt.GoItab, t *rt.GoType, a int64, b int64) {
  754. self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
  755. self.Emit("MOVQ", jit.Gitab(i), _ET) // MOVQ ${i}, ET
  756. self.Emit("MOVQ", jit.Gtype(t), _EP) // MOVQ ${t}, EP
  757. self.Emit("CMPQ", _AX, jit.Imm(a)) // CMPQ AX, ${a}
  758. self.Sjmp("JL" , _LB_range_error) // JL _range_error
  759. self.Emit("CMPQ", _AX, jit.Imm(b)) // CMPQ AX, ${B}
  760. self.Sjmp("JG" , _LB_range_error) // JG _range_error
  761. }
  762. func (self *_Assembler) range_unsigned(i *rt.GoItab, t *rt.GoType, v uint64) {
  763. self.Emit("MOVQ" , _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
  764. self.Emit("MOVQ" , jit.Gitab(i), _ET) // MOVQ ${i}, ET
  765. self.Emit("MOVQ" , jit.Gtype(t), _EP) // MOVQ ${t}, EP
  766. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  767. self.Sjmp("JS" , _LB_range_error) // JS _range_error
  768. self.Emit("CMPQ" , _AX, jit.Imm(int64(v))) // CMPQ AX, ${a}
  769. self.Sjmp("JA" , _LB_range_error) // JA _range_error
  770. }
  771. /** String Manipulating Routines **/
  772. var (
  773. _F_unquote = jit.Imm(int64(native.S_unquote))
  774. )
  775. func (self *_Assembler) slice_from(p obj.Addr, d int64) {
  776. self.Emit("MOVQ", p, _SI) // MOVQ ${p}, SI
  777. self.slice_from_r(_SI, d) // SLICE_R SI, ${d}
  778. }
  779. func (self *_Assembler) slice_from_r(p obj.Addr, d int64) {
  780. self.Emit("LEAQ", jit.Sib(_IP, p, 1, 0), _DI) // LEAQ (IP)(${p}), DI
  781. self.Emit("NEGQ", p) // NEGQ ${p}
  782. self.Emit("LEAQ", jit.Sib(_IC, p, 1, d), _SI) // LEAQ d(IC)(${p}), SI
  783. }
  784. func (self *_Assembler) unquote_once(p obj.Addr, n obj.Addr, stack bool, copy bool) {
  785. self.slice_from(_VAR_st_Iv, -1) // SLICE st.Iv, $-1
  786. self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1)) // CMPQ st.Ep, $-1
  787. self.Sjmp("JE" , "_noescape_{n}") // JE _noescape_{n}
  788. self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
  789. self.Sref("_unquote_once_write_{n}", 4)
  790. self.Sjmp("JMP" , "_escape_string")
  791. self.Link("_noescape_{n}") // _noescape_{n}:
  792. if copy {
  793. self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv)
  794. self.Sjmp("JNC", "_unquote_once_write_{n}")
  795. self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
  796. self.Sref("_unquote_once_write_{n}", 4)
  797. self.Sjmp("JMP", "_copy_string")
  798. }
  799. self.Link("_unquote_once_write_{n}")
  800. self.Emit("MOVQ" , _SI, n) // MOVQ SI, ${n}
  801. if stack {
  802. self.Emit("MOVQ", _DI, p)
  803. } else {
  804. self.WriteRecNotAX(10, _DI, p, false, false)
  805. }
  806. }
  807. func (self *_Assembler) unquote_twice(p obj.Addr, n obj.Addr, stack bool) {
  808. self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1)) // CMPQ st.Ep, $-1
  809. self.Sjmp("JE" , _LB_eof_error) // JE _eof_error
  810. self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -3), jit.Imm('\\')) // CMPB -3(IP)(IC), $'\\'
  811. self.Sjmp("JNE" , _LB_char_m3_error) // JNE _char_m3_error
  812. self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -2), jit.Imm('"')) // CMPB -2(IP)(IC), $'"'
  813. self.Sjmp("JNE" , _LB_char_m2_error) // JNE _char_m2_error
  814. self.slice_from(_VAR_st_Iv, -3) // SLICE st.Iv, $-3
  815. self.Emit("MOVQ" , _SI, _AX) // MOVQ SI, AX
  816. self.Emit("ADDQ" , _VAR_st_Iv, _AX) // ADDQ st.Iv, AX
  817. self.Emit("CMPQ" , _VAR_st_Ep, _AX) // CMPQ st.Ep, AX
  818. self.Sjmp("JE" , "_noescape_{n}") // JE _noescape_{n}
  819. self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
  820. self.Sref("_unquote_twice_write_{n}", 4)
  821. self.Sjmp("JMP" , "_escape_string_twice")
  822. self.Link("_noescape_{n}") // _noescape_{n}:
  823. self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv)
  824. self.Sjmp("JNC", "_unquote_twice_write_{n}")
  825. self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
  826. self.Sref("_unquote_twice_write_{n}", 4)
  827. self.Sjmp("JMP", "_copy_string")
  828. self.Link("_unquote_twice_write_{n}")
  829. self.Emit("MOVQ" , _SI, n) // MOVQ SI, ${n}
  830. if stack {
  831. self.Emit("MOVQ", _DI, p)
  832. } else {
  833. self.WriteRecNotAX(12, _DI, p, false, false)
  834. }
  835. }
  836. /** Memory Clearing Routines **/
  837. var (
  838. _F_memclrHasPointers = jit.Func(memclrHasPointers)
  839. _F_memclrNoHeapPointers = jit.Func(memclrNoHeapPointers)
  840. )
  841. func (self *_Assembler) mem_clear_fn(ptrfree bool) {
  842. if !ptrfree {
  843. self.call_go(_F_memclrHasPointers)
  844. } else {
  845. self.call_go(_F_memclrNoHeapPointers)
  846. }
  847. }
  848. func (self *_Assembler) mem_clear_rem(size int64, ptrfree bool) {
  849. self.Emit("MOVQ", jit.Imm(size), _CX) // MOVQ ${size}, CX
  850. self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
  851. self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _AX) // MOVQ (ST)(AX), AX
  852. self.Emit("SUBQ", _VP, _AX) // SUBQ VP, AX
  853. self.Emit("ADDQ", _AX, _CX) // ADDQ AX, CX
  854. self.Emit("MOVQ", _VP, jit.Ptr(_SP, 0)) // MOVQ VP, (SP)
  855. self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
  856. self.mem_clear_fn(ptrfree) // CALL_GO memclr{Has,NoHeap}Pointers
  857. }
  858. /** Map Assigning Routines **/
  859. var (
  860. _F_mapassign = jit.Func(mapassign)
  861. _F_mapassign_fast32 = jit.Func(mapassign_fast32)
  862. _F_mapassign_faststr = jit.Func(mapassign_faststr)
  863. _F_mapassign_fast64ptr = jit.Func(mapassign_fast64ptr)
  864. )
  865. var (
  866. _F_decodeJsonUnmarshaler obj.Addr
  867. _F_decodeTextUnmarshaler obj.Addr
  868. )
  869. func init() {
  870. _F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler)
  871. _F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler)
  872. }
  873. func (self *_Assembler) mapaccess_ptr(t reflect.Type) {
  874. if rt.MapType(rt.UnpackType(t)).IndirectElem() {
  875. self.vfollow(t.Elem())
  876. }
  877. }
  878. func (self *_Assembler) mapassign_std(t reflect.Type, v obj.Addr) {
  879. self.Emit("LEAQ", v, _AX) // LEAQ ${v}, AX
  880. self.mapassign_call(t, _F_mapassign) // MAPASSIGN ${t}, mapassign
  881. }
  882. func (self *_Assembler) mapassign_str_fast(t reflect.Type, p obj.Addr, n obj.Addr) {
  883. self.Emit("MOVQ", jit.Type(t), _AX) // MOVQ ${t}, AX
  884. self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
  885. self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8)) // MOVQ VP, 8(SP)
  886. self.Emit("MOVQ", p, jit.Ptr(_SP, 16)) // MOVQ ${p}, 16(SP)
  887. self.Emit("MOVQ", n, jit.Ptr(_SP, 24)) // MOVQ ${n}, 24(SP)
  888. self.call_go(_F_mapassign_faststr) // CALL_GO ${fn}
  889. self.Emit("MOVQ", jit.Ptr(_SP, 32), _VP) // MOVQ 32(SP), VP
  890. self.mapaccess_ptr(t)
  891. }
  892. func (self *_Assembler) mapassign_call(t reflect.Type, fn obj.Addr) {
  893. self.Emit("MOVQ", jit.Type(t), _SI) // MOVQ ${t}, SI
  894. self.Emit("MOVQ", _SI, jit.Ptr(_SP, 0)) // MOVQ SI, (SP)
  895. self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8)) // MOVQ VP, 8(SP)
  896. self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
  897. self.call_go(fn) // CALL_GO ${fn}
  898. self.Emit("MOVQ", jit.Ptr(_SP, 24), _VP) // MOVQ 24(SP), VP
  899. }
  900. func (self *_Assembler) mapassign_fastx(t reflect.Type, fn obj.Addr) {
  901. self.mapassign_call(t, fn)
  902. self.mapaccess_ptr(t)
  903. }
  904. func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) {
  905. pv := false
  906. vk := t.Key()
  907. tk := t.Key()
  908. /* deref pointer if needed */
  909. if vk.Kind() == reflect.Ptr {
  910. pv = true
  911. vk = vk.Elem()
  912. }
  913. /* addressable value with pointer receiver */
  914. if addressable {
  915. pv = false
  916. tk = reflect.PtrTo(tk)
  917. }
  918. /* allocate the key, and call the unmarshaler */
  919. self.valloc(vk, _DI) // VALLOC ${vk}, DI
  920. // must spill vk pointer since next call_go may invoke GC
  921. self.Emit("MOVQ" , _DI, _VAR_vk)
  922. self.Emit("MOVQ" , jit.Type(tk), _AX) // MOVQ ${tk}, AX
  923. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
  924. self.Emit("MOVQ" , _DI, jit.Ptr(_SP, 8)) // MOVQ DI, 8(SP)
  925. self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0
  926. self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16)) // MOVOU X0, 16(SP)
  927. self.call_go(_F_decodeTextUnmarshaler) // CALL_GO decodeTextUnmarshaler
  928. self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
  929. self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
  930. self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
  931. self.Sjmp("JNZ" , _LB_error) // JNZ _error
  932. self.Emit("MOVQ" , _VAR_vk, _AX)
  933. /* select the correct assignment function */
  934. if !pv {
  935. self.mapassign_call(t, _F_mapassign)
  936. } else {
  937. self.mapassign_fastx(t, _F_mapassign_fast64ptr)
  938. }
  939. }
  940. /** External Unmarshaler Routines **/
  941. var (
  942. _F_skip_one = jit.Imm(int64(native.S_skip_one))
  943. _F_skip_number = jit.Imm(int64(native.S_skip_number))
  944. )
  945. func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) {
  946. self.call_sf(_F_skip_one) // CALL_SF skip_one
  947. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  948. self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
  949. self.slice_from_r(_AX, 0) // SLICE_R AX, $0
  950. self.Emit("MOVQ" , _DI, _VAR_sv_p) // MOVQ DI, sv.p
  951. self.Emit("MOVQ" , _SI, _VAR_sv_n) // MOVQ SI, sv.n
  952. self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref) // UNMARSHAL json, ${t}, ${deref}
  953. }
  954. func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) {
  955. self.parse_string() // PARSE STRING
  956. self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE once, sv.p, sv.n
  957. self.unmarshal_func(t, _F_decodeTextUnmarshaler, deref) // UNMARSHAL text, ${t}, ${deref}
  958. }
  959. func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool) {
  960. pt := t
  961. vk := t.Kind()
  962. /* allocate the field if needed */
  963. if deref && vk == reflect.Ptr {
  964. self.Emit("MOVQ" , _VP, _AX) // MOVQ VP, AX
  965. self.Emit("MOVQ" , jit.Ptr(_AX, 0), _AX) // MOVQ (AX), AX
  966. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  967. self.Sjmp("JNZ" , "_deref_{n}") // JNZ _deref_{n}
  968. self.valloc(t.Elem(), _AX) // VALLOC ${t.Elem()}, AX
  969. self.WritePtrAX(3, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP)
  970. self.Link("_deref_{n}") // _deref_{n}:
  971. }
  972. /* set value type */
  973. self.Emit("MOVQ", jit.Type(pt), _CX) // MOVQ ${pt}, CX
  974. self.Emit("MOVQ", _CX, jit.Ptr(_SP, 0)) // MOVQ CX, (SP)
  975. /* set value pointer */
  976. if deref && vk == reflect.Ptr {
  977. self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
  978. } else {
  979. self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8)) // MOVQ VP, 8(SP)
  980. }
  981. /* set the source string and call the unmarshaler */
  982. self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0
  983. self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16)) // MOVOU X0, 16(SP)
  984. self.call_go(fn) // CALL_GO ${fn}
  985. self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
  986. self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
  987. self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
  988. self.Sjmp("JNZ" , _LB_error) // JNZ _error
  989. }
  990. /** Dynamic Decoding Routine **/
  991. var (
  992. _F_decodeTypedPointer obj.Addr
  993. )
  994. func init() {
  995. _F_decodeTypedPointer = jit.Func(decodeTypedPointer)
  996. }
  997. func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) {
  998. self.Emit("MOVQ" , _ARG_fv, _CX) // MOVQ fv, CX
  999. self.Emit("MOVOU", _ARG_sp, _X0) // MOVOU sp, X0
  1000. self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0)) // MOVOU X0, (SP)
  1001. self.Emit("MOVQ" , _IC, jit.Ptr(_SP, 16)) // MOVQ IC, 16(SP)
  1002. self.Emit("MOVQ" , vt, jit.Ptr(_SP, 24)) // MOVQ ${vt}, 24(SP)
  1003. self.Emit("MOVQ" , vp, jit.Ptr(_SP, 32)) // MOVQ ${vp}, 32(SP)
  1004. self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 40)) // MOVQ ST, 40(SP)
  1005. self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 48)) // MOVQ CX, 48(SP)
  1006. self.call_go(_F_decodeTypedPointer) // CALL_GO decodeTypedPointer
  1007. self.Emit("MOVQ" , jit.Ptr(_SP, 64), _ET) // MOVQ 64(SP), ET
  1008. self.Emit("MOVQ" , jit.Ptr(_SP, 72), _EP) // MOVQ 72(SP), EP
  1009. self.Emit("MOVQ" , jit.Ptr(_SP, 56), _IC) // MOVQ 56(SP), IC
  1010. self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
  1011. self.Sjmp("JE", "_decode_dynamic_end_{n}") // JE, _decode_dynamic_end_{n}
  1012. self.Emit("MOVQ", _I_json_MismatchTypeError, _AX) // MOVQ _I_json_MismatchTypeError, AX
  1013. self.Emit("CMPQ", _ET, _AX) // CMPQ ET, AX
  1014. self.Sjmp("JNE" , _LB_error) // JNE LB_error
  1015. self.Emit("MOVQ", _EP, _VAR_ic) // MOVQ EP, VAR_ic
  1016. self.Emit("MOVQ", _ET, _VAR_et) // MOVQ ET, VAR_et
  1017. self.Link("_decode_dynamic_end_{n}")
  1018. }
  1019. /** OpCode Assembler Functions **/
  1020. var (
  1021. _F_memequal = jit.Func(memequal)
  1022. _F_memmove = jit.Func(memmove)
  1023. _F_growslice = jit.Func(growslice)
  1024. _F_makeslice = jit.Func(makeslice)
  1025. _F_makemap_small = jit.Func(makemap_small)
  1026. _F_mapassign_fast64 = jit.Func(mapassign_fast64)
  1027. )
  1028. var (
  1029. _F_lspace = jit.Imm(int64(native.S_lspace))
  1030. _F_strhash = jit.Imm(int64(caching.S_strhash))
  1031. )
  1032. var (
  1033. _F_b64decode = jit.Imm(int64(_subr__b64decode))
  1034. _F_decodeValue = jit.Imm(int64(_subr_decode_value))
  1035. )
  1036. var (
  1037. _F_skip_array = jit.Imm(int64(native.S_skip_array))
  1038. _F_skip_object = jit.Imm(int64(native.S_skip_object))
  1039. )
  1040. var (
  1041. _F_FieldMap_GetCaseInsensitive obj.Addr
  1042. _Empty_Slice = make([]byte, 0)
  1043. _Zero_Base = int64(uintptr(((*rt.GoSlice)(unsafe.Pointer(&_Empty_Slice))).Ptr))
  1044. )
  1045. const (
  1046. _MODE_AVX2 = 1 << 2
  1047. )
  1048. const (
  1049. _Fe_ID = int64(unsafe.Offsetof(caching.FieldEntry{}.ID))
  1050. _Fe_Name = int64(unsafe.Offsetof(caching.FieldEntry{}.Name))
  1051. _Fe_Hash = int64(unsafe.Offsetof(caching.FieldEntry{}.Hash))
  1052. )
  1053. const (
  1054. _Vk_Ptr = int64(reflect.Ptr)
  1055. _Gt_KindFlags = int64(unsafe.Offsetof(rt.GoType{}.KindFlags))
  1056. )
  1057. func init() {
  1058. _F_FieldMap_GetCaseInsensitive = jit.Func((*caching.FieldMap).GetCaseInsensitive)
  1059. }
  1060. func (self *_Assembler) _asm_OP_any(_ *_Instr) {
  1061. self.Emit("MOVQ" , jit.Ptr(_VP, 8), _CX) // MOVQ 8(VP), CX
  1062. self.Emit("TESTQ" , _CX, _CX) // TESTQ CX, CX
  1063. self.Sjmp("JZ" , "_decode_{n}") // JZ _decode_{n}
  1064. self.Emit("CMPQ" , _CX, _VP) // CMPQ CX, VP
  1065. self.Sjmp("JE" , "_decode_{n}") // JE _decode_{n}
  1066. self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX
  1067. self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX) // MOVBLZX _Gt_KindFlags(AX), DX
  1068. self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX) // ANDL ${F_kind_mask}, DX
  1069. self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr)) // CMPL DX, ${reflect.Ptr}
  1070. self.Sjmp("JNE" , "_decode_{n}") // JNE _decode_{n}
  1071. self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI) // LEAQ 8(VP), DI
  1072. self.decode_dynamic(_AX, _DI) // DECODE AX, DI
  1073. self.Sjmp("JMP" , "_decode_end_{n}") // JMP _decode_end_{n}
  1074. self.Link("_decode_{n}") // _decode_{n}:
  1075. self.Emit("MOVQ" , _ARG_fv, _DF) // MOVQ fv, DF
  1076. self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 0)) // MOVQ _ST, (SP)
  1077. self.call(_F_decodeValue) // CALL decodeValue
  1078. self.Emit("TESTQ" , _EP, _EP) // TESTQ EP, EP
  1079. self.Sjmp("JNZ" , _LB_parsing_error) // JNZ _parsing_error
  1080. self.Link("_decode_end_{n}") // _decode_end_{n}:
  1081. }
  1082. func (self *_Assembler) _asm_OP_dyn(p *_Instr) {
  1083. self.Emit("MOVQ" , jit.Type(p.vt()), _ET) // MOVQ ${p.vt()}, ET
  1084. self.Emit("CMPQ" , jit.Ptr(_VP, 8), jit.Imm(0)) // CMPQ 8(VP), $0
  1085. self.Sjmp("JE" , _LB_type_error) // JE _type_error
  1086. self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX
  1087. self.Emit("MOVQ" , jit.Ptr(_AX, 8), _AX) // MOVQ 8(AX), AX
  1088. self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX) // MOVBLZX _Gt_KindFlags(AX), DX
  1089. self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX) // ANDL ${F_kind_mask}, DX
  1090. self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr)) // CMPL DX, ${reflect.Ptr}
  1091. self.Sjmp("JNE" , _LB_type_error) // JNE _type_error
  1092. self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI) // LEAQ 8(VP), DI
  1093. self.decode_dynamic(_AX, _DI) // DECODE AX, DI
  1094. self.Link("_decode_end_{n}") // _decode_end_{n}:
  1095. }
  1096. func (self *_Assembler) _asm_OP_str(_ *_Instr) {
  1097. self.parse_string() // PARSE STRING
  1098. self.unquote_once(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false, true) // UNQUOTE once, (VP), 8(VP)
  1099. }
  1100. func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
  1101. self.parse_string() // PARSE STRING
  1102. self.slice_from(_VAR_st_Iv, -1) // SLICE st.Iv, $-1
  1103. self.Emit("MOVQ" , _DI, jit.Ptr(_VP, 0)) // MOVQ DI, (VP)
  1104. self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 8)) // MOVQ SI, 8(VP)
  1105. self.Emit("SHRQ" , jit.Imm(2), _SI) // SHRQ $2, SI
  1106. self.Emit("LEAQ" , jit.Sib(_SI, _SI, 2, 0), _SI) // LEAQ (SI)(SI*2), SI
  1107. self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16)) // MOVQ SI, 16(VP)
  1108. self.malloc(_SI, _SI) // MALLOC SI, SI
  1109. // TODO: due to base64x's bug, only use AVX mode now
  1110. self.Emit("MOVL", jit.Imm(_MODE_JSON), _CX) // MOVL $_MODE_JSON, CX
  1111. /* call the decoder */
  1112. self.Emit("XORL" , _DX, _DX) // XORL DX, DX
  1113. self.Emit("MOVQ" , _VP, _DI) // MOVQ VP, DI
  1114. self.Emit("MOVQ" , jit.Ptr(_VP, 0), _R9) // MOVQ SI, (VP)
  1115. self.WriteRecNotAX(4, _SI, jit.Ptr(_VP, 0), true, false) // XCHGQ SI, (VP)
  1116. self.Emit("MOVQ" , _R9, _SI)
  1117. self.Emit("XCHGQ", _DX, jit.Ptr(_VP, 8)) // XCHGQ DX, 8(VP)
  1118. self.call(_F_b64decode) // CALL b64decode
  1119. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  1120. self.Sjmp("JS" , _LB_base64_error) // JS _base64_error
  1121. self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP)
  1122. }
  1123. func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
  1124. self.Emit("LEAQ", jit.Ptr(_IC, 4), _AX) // LEAQ 4(IC), AX
  1125. self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL
  1126. self.Sjmp("JA" , _LB_eof_error) // JA _eof_error
  1127. self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('f')) // CMPB (IP)(IC), $'f'
  1128. self.Sjmp("JE" , "_false_{n}") // JE _false_{n}
  1129. self.Emit("MOVL", jit.Imm(_IM_true), _CX) // MOVL $"true", CX
  1130. self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPL CX, (IP)(IC)
  1131. self.Sjmp("JE" , "_bool_true_{n}")
  1132. // try to skip the value
  1133. self.Emit("MOVQ", _IC, _VAR_ic)
  1134. self.Emit("MOVQ", _T_bool, _ET)
  1135. self.Emit("MOVQ", _ET, _VAR_et)
  1136. self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
  1137. self.Sref("_end_{n}", 4)
  1138. self.Emit("MOVQ", _R9, _VAR_pc)
  1139. self.Sjmp("JMP" , _LB_skip_one)
  1140. self.Link("_bool_true_{n}")
  1141. self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC
  1142. self.Emit("MOVB", jit.Imm(1), jit.Ptr(_VP, 0)) // MOVB $1, (VP)
  1143. self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n}
  1144. self.Link("_false_{n}") // _false_{n}:
  1145. self.Emit("ADDQ", jit.Imm(1), _AX) // ADDQ $1, AX
  1146. self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC
  1147. self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL
  1148. self.Sjmp("JA" , _LB_eof_error) // JA _eof_error
  1149. self.Emit("MOVL", jit.Imm(_IM_alse), _CX) // MOVL $"alse", CX
  1150. self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPL CX, (IP)(IC)
  1151. self.Sjmp("JNE" , _LB_im_error) // JNE _im_error
  1152. self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC
  1153. self.Emit("XORL", _AX, _AX) // XORL AX, AX
  1154. self.Emit("MOVB", _AX, jit.Ptr(_VP, 0)) // MOVB AX, (VP)
  1155. self.Link("_end_{n}") // _end_{n}:
  1156. }
  1157. func (self *_Assembler) _asm_OP_num(_ *_Instr) {
  1158. self.Emit("MOVQ", jit.Imm(0), _VAR_fl)
  1159. self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
  1160. self.Emit("MOVQ", _IC, _BP)
  1161. self.Sjmp("JNE", "_skip_number_{n}")
  1162. self.Emit("MOVQ", jit.Imm(1), _VAR_fl)
  1163. self.Emit("ADDQ", jit.Imm(1), _IC)
  1164. self.Link("_skip_number_{n}")
  1165. /* call skip_number */
  1166. self.call_sf(_F_skip_number) // CALL_SF skip_one
  1167. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  1168. self.Sjmp("JNS" , "_num_next_{n}")
  1169. /* call skip one */
  1170. self.Emit("MOVQ", _BP, _VAR_ic)
  1171. self.Emit("MOVQ", _T_number, _ET)
  1172. self.Emit("MOVQ", _ET, _VAR_et)
  1173. self.Byte(0x4c, 0x8d, 0x0d)
  1174. self.Sref("_num_end_{n}", 4)
  1175. self.Emit("MOVQ", _R9, _VAR_pc)
  1176. self.Sjmp("JMP" , _LB_skip_one)
  1177. /* assgin string */
  1178. self.Link("_num_next_{n}")
  1179. self.slice_from_r(_AX, 0)
  1180. self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv)
  1181. self.Sjmp("JNC", "_num_write_{n}")
  1182. self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
  1183. self.Sref("_num_write_{n}", 4)
  1184. self.Sjmp("JMP", "_copy_string")
  1185. self.Link("_num_write_{n}")
  1186. self.Emit("MOVQ", _SI, jit.Ptr(_VP, 8)) // MOVQ SI, 8(VP)
  1187. self.WriteRecNotAX(13, _DI, jit.Ptr(_VP, 0), false, false)
  1188. /* check if quoted */
  1189. self.Emit("CMPQ", _VAR_fl, jit.Imm(1))
  1190. self.Sjmp("JNE", "_num_end_{n}")
  1191. self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
  1192. self.Sjmp("JNE", _LB_char_0_error)
  1193. self.Emit("ADDQ", jit.Imm(1), _IC)
  1194. self.Link("_num_end_{n}")
  1195. }
  1196. func (self *_Assembler) _asm_OP_i8(ins *_Instr) {
  1197. var pin = "_i8_end_{n}"
  1198. self.parse_signed(int8Type, pin, -1) // PARSE int8
  1199. self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8) // RANGE int8
  1200. self.Emit("MOVB", _AX, jit.Ptr(_VP, 0)) // MOVB AX, (VP)
  1201. self.Link(pin)
  1202. }
  1203. func (self *_Assembler) _asm_OP_i16(ins *_Instr) {
  1204. var pin = "_i16_end_{n}"
  1205. self.parse_signed(int16Type, pin, -1) // PARSE int16
  1206. self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16) // RANGE int16
  1207. self.Emit("MOVW", _AX, jit.Ptr(_VP, 0)) // MOVW AX, (VP)
  1208. self.Link(pin)
  1209. }
  1210. func (self *_Assembler) _asm_OP_i32(ins *_Instr) {
  1211. var pin = "_i32_end_{n}"
  1212. self.parse_signed(int32Type, pin, -1) // PARSE int32
  1213. self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32) // RANGE int32
  1214. self.Emit("MOVL", _AX, jit.Ptr(_VP, 0)) // MOVL AX, (VP)
  1215. self.Link(pin)
  1216. }
  1217. func (self *_Assembler) _asm_OP_i64(ins *_Instr) {
  1218. var pin = "_i64_end_{n}"
  1219. self.parse_signed(int64Type, pin, -1) // PARSE int64
  1220. self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
  1221. self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP)
  1222. self.Link(pin)
  1223. }
  1224. func (self *_Assembler) _asm_OP_u8(ins *_Instr) {
  1225. var pin = "_u8_end_{n}"
  1226. self.parse_unsigned(uint8Type, pin, -1) // PARSE uint8
  1227. self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8) // RANGE uint8
  1228. self.Emit("MOVB", _AX, jit.Ptr(_VP, 0)) // MOVB AX, (VP)
  1229. self.Link(pin)
  1230. }
  1231. func (self *_Assembler) _asm_OP_u16(ins *_Instr) {
  1232. var pin = "_u16_end_{n}"
  1233. self.parse_unsigned(uint16Type, pin, -1) // PARSE uint16
  1234. self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16) // RANGE uint16
  1235. self.Emit("MOVW", _AX, jit.Ptr(_VP, 0)) // MOVW AX, (VP)
  1236. self.Link(pin)
  1237. }
  1238. func (self *_Assembler) _asm_OP_u32(ins *_Instr) {
  1239. var pin = "_u32_end_{n}"
  1240. self.parse_unsigned(uint32Type, pin, -1) // PARSE uint32
  1241. self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32) // RANGE uint32
  1242. self.Emit("MOVL", _AX, jit.Ptr(_VP, 0)) // MOVL AX, (VP)
  1243. self.Link(pin)
  1244. }
  1245. func (self *_Assembler) _asm_OP_u64(ins *_Instr) {
  1246. var pin = "_u64_end_{n}"
  1247. self.parse_unsigned(uint64Type, pin, -1) // PARSE uint64
  1248. self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
  1249. self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP)
  1250. self.Link(pin)
  1251. }
  1252. func (self *_Assembler) _asm_OP_f32(ins *_Instr) {
  1253. var pin = "_f32_end_{n}"
  1254. self.parse_number(float32Type, pin, -1) // PARSE NUMBER
  1255. self.range_single() // RANGE float32
  1256. self.Emit("MOVSS", _X0, jit.Ptr(_VP, 0)) // MOVSS X0, (VP)
  1257. self.Link(pin)
  1258. }
  1259. func (self *_Assembler) _asm_OP_f64(ins *_Instr) {
  1260. var pin = "_f64_end_{n}"
  1261. self.parse_number(float64Type, pin, -1) // PARSE NUMBER
  1262. self.Emit("MOVSD", _VAR_st_Dv, _X0) // MOVSD st.Dv, X0
  1263. self.Emit("MOVSD", _X0, jit.Ptr(_VP, 0)) // MOVSD X0, (VP)
  1264. self.Link(pin)
  1265. }
  1266. func (self *_Assembler) _asm_OP_unquote(ins *_Instr) {
  1267. self.check_eof(2)
  1268. self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('\\')) // CMPB (IP)(IC), $'\\'
  1269. self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error
  1270. self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 1), jit.Imm('"')) // CMPB 1(IP)(IC), $'"'
  1271. self.Sjmp("JNE" , _LB_char_1_error) // JNE _char_1_error
  1272. self.Emit("ADDQ", jit.Imm(2), _IC) // ADDQ $2, IC
  1273. self.parse_string() // PARSE STRING
  1274. self.unquote_twice(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false) // UNQUOTE twice, (VP), 8(VP)
  1275. }
  1276. func (self *_Assembler) _asm_OP_nil_1(_ *_Instr) {
  1277. self.Emit("XORL", _AX, _AX) // XORL AX, AX
  1278. self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP)
  1279. }
  1280. func (self *_Assembler) _asm_OP_nil_2(_ *_Instr) {
  1281. self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
  1282. self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)
  1283. }
  1284. func (self *_Assembler) _asm_OP_nil_3(_ *_Instr) {
  1285. self.Emit("XORL" , _AX, _AX) // XORL AX, AX
  1286. self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
  1287. self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)
  1288. self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 16)) // MOVOU X0, 16(VP)
  1289. }
  1290. func (self *_Assembler) _asm_OP_deref(p *_Instr) {
  1291. self.vfollow(p.vt())
  1292. }
  1293. func (self *_Assembler) _asm_OP_index(p *_Instr) {
  1294. self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ ${p.vi()}, AX
  1295. self.Emit("ADDQ", _AX, _VP) // ADDQ _AX, _VP
  1296. }
  1297. func (self *_Assembler) _asm_OP_is_null(p *_Instr) {
  1298. self.Emit("LEAQ" , jit.Ptr(_IC, 4), _AX) // LEAQ 4(IC), AX
  1299. self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL
  1300. self.Sjmp("JA" , "_not_null_{n}") // JA _not_null_{n}
  1301. self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null)) // CMPL (IP)(IC), $"null"
  1302. self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC
  1303. self.Xjmp("JE" , p.vi()) // JE {p.vi()}
  1304. self.Link("_not_null_{n}") // _not_null_{n}:
  1305. }
  1306. func (self *_Assembler) _asm_OP_is_null_quote(p *_Instr) {
  1307. self.Emit("LEAQ" , jit.Ptr(_IC, 5), _AX) // LEAQ 4(IC), AX
  1308. self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL
  1309. self.Sjmp("JA" , "_not_null_quote_{n}") // JA _not_null_quote_{n}
  1310. self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null)) // CMPL (IP)(IC), $"null"
  1311. self.Sjmp("JNE" , "_not_null_quote_{n}") // JNE _not_null_quote_{n}
  1312. self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 4), jit.Imm('"')) // CMPB 4(IP)(IC), $'"'
  1313. self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC
  1314. self.Xjmp("JE" , p.vi()) // JE {p.vi()}
  1315. self.Link("_not_null_quote_{n}") // _not_null_quote_{n}:
  1316. }
  1317. func (self *_Assembler) _asm_OP_map_init(_ *_Instr) {
  1318. self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX
  1319. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  1320. self.Sjmp("JNZ" , "_end_{n}") // JNZ _end_{n}
  1321. self.call_go(_F_makemap_small) // CALL_GO makemap_small
  1322. self.Emit("MOVQ" , jit.Ptr(_SP, 0), _AX) // MOVQ (SP), AX
  1323. self.WritePtrAX(6, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP)
  1324. self.Link("_end_{n}") // _end_{n}:
  1325. self.Emit("MOVQ" , _AX, _VP) // MOVQ AX, VP
  1326. }
  1327. func (self *_Assembler) _asm_OP_map_key_i8(p *_Instr) {
  1328. self.parse_signed(int8Type, "", p.vi()) // PARSE int8
  1329. self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8) // RANGE int8
  1330. self.match_char('"')
  1331. self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN int8, mapassign, st.Iv
  1332. }
  1333. func (self *_Assembler) _asm_OP_map_key_i16(p *_Instr) {
  1334. self.parse_signed(int16Type, "", p.vi()) // PARSE int16
  1335. self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16) // RANGE int16
  1336. self.match_char('"')
  1337. self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN int16, mapassign, st.Iv
  1338. }
  1339. func (self *_Assembler) _asm_OP_map_key_i32(p *_Instr) {
  1340. self.parse_signed(int32Type, "", p.vi()) // PARSE int32
  1341. self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32) // RANGE int32
  1342. self.match_char('"')
  1343. if vt := p.vt(); !mapfast(vt) {
  1344. self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN int32, mapassign, st.Iv
  1345. } else {
  1346. self.mapassign_fastx(vt, _F_mapassign_fast32) // MAPASSIGN int32, mapassign_fast32
  1347. }
  1348. }
  1349. func (self *_Assembler) _asm_OP_map_key_i64(p *_Instr) {
  1350. self.parse_signed(int64Type, "", p.vi()) // PARSE int64
  1351. self.match_char('"')
  1352. if vt := p.vt(); !mapfast(vt) {
  1353. self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN int64, mapassign, st.Iv
  1354. } else {
  1355. self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
  1356. self.mapassign_fastx(vt, _F_mapassign_fast64) // MAPASSIGN int64, mapassign_fast64
  1357. }
  1358. }
  1359. func (self *_Assembler) _asm_OP_map_key_u8(p *_Instr) {
  1360. self.parse_unsigned(uint8Type, "", p.vi()) // PARSE uint8
  1361. self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8) // RANGE uint8
  1362. self.match_char('"')
  1363. self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN uint8, vt.Iv
  1364. }
  1365. func (self *_Assembler) _asm_OP_map_key_u16(p *_Instr) {
  1366. self.parse_unsigned(uint16Type, "", p.vi()) // PARSE uint16
  1367. self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16) // RANGE uint16
  1368. self.match_char('"')
  1369. self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN uint16, vt.Iv
  1370. }
  1371. func (self *_Assembler) _asm_OP_map_key_u32(p *_Instr) {
  1372. self.parse_unsigned(uint32Type, "", p.vi()) // PARSE uint32
  1373. self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32) // RANGE uint32
  1374. self.match_char('"')
  1375. if vt := p.vt(); !mapfast(vt) {
  1376. self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN uint32, vt.Iv
  1377. } else {
  1378. self.mapassign_fastx(vt, _F_mapassign_fast32) // MAPASSIGN uint32, mapassign_fast32
  1379. }
  1380. }
  1381. func (self *_Assembler) _asm_OP_map_key_u64(p *_Instr) {
  1382. self.parse_unsigned(uint64Type, "", p.vi()) // PARSE uint64
  1383. self.match_char('"')
  1384. if vt := p.vt(); !mapfast(vt) {
  1385. self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN uint64, vt.Iv
  1386. } else {
  1387. self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
  1388. self.mapassign_fastx(vt, _F_mapassign_fast64) // MAPASSIGN uint64, mapassign_fast64
  1389. }
  1390. }
  1391. func (self *_Assembler) _asm_OP_map_key_f32(p *_Instr) {
  1392. self.parse_number(float32Type, "", p.vi()) // PARSE NUMBER
  1393. self.range_single() // RANGE float32
  1394. self.Emit("MOVSS", _X0, _VAR_st_Dv) // MOVSS X0, st.Dv
  1395. self.match_char('"')
  1396. self.mapassign_std(p.vt(), _VAR_st_Dv) // MAPASSIGN ${p.vt()}, mapassign, st.Dv
  1397. }
  1398. func (self *_Assembler) _asm_OP_map_key_f64(p *_Instr) {
  1399. self.parse_number(float64Type, "", p.vi()) // PARSE NUMBER
  1400. self.match_char('"')
  1401. self.mapassign_std(p.vt(), _VAR_st_Dv) // MAPASSIGN ${p.vt()}, mapassign, st.Dv
  1402. }
  1403. func (self *_Assembler) _asm_OP_map_key_str(p *_Instr) {
  1404. self.parse_string() // PARSE STRING
  1405. self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE once, sv.p, sv.n
  1406. if vt := p.vt(); !mapfast(vt) {
  1407. self.valloc(vt.Key(), _DI)
  1408. self.Emit("MOVOU", _VAR_sv, _X0)
  1409. self.Emit("MOVOU", _X0, jit.Ptr(_DI, 0))
  1410. self.mapassign_std(vt, jit.Ptr(_DI, 0))
  1411. } else {
  1412. self.Emit("MOVQ", _VAR_sv_p, _DI) // MOVQ sv.p, DI
  1413. self.Emit("MOVQ", _VAR_sv_n, _SI) // MOVQ sv.n, SI
  1414. self.mapassign_str_fast(vt, _DI, _SI) // MAPASSIGN string, DI, SI
  1415. }
  1416. }
  1417. func (self *_Assembler) _asm_OP_map_key_utext(p *_Instr) {
  1418. self.parse_string() // PARSE STRING
  1419. self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE once, sv.p, sv.n
  1420. self.mapassign_utext(p.vt(), false) // MAPASSIGN utext, ${p.vt()}, false
  1421. }
  1422. func (self *_Assembler) _asm_OP_map_key_utext_p(p *_Instr) {
  1423. self.parse_string() // PARSE STRING
  1424. self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false) // UNQUOTE once, sv.p, sv.n
  1425. self.mapassign_utext(p.vt(), true) // MAPASSIGN utext, ${p.vt()}, true
  1426. }
  1427. func (self *_Assembler) _asm_OP_array_skip(_ *_Instr) {
  1428. self.call_sf(_F_skip_array) // CALL_SF skip_array
  1429. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  1430. self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
  1431. }
  1432. func (self *_Assembler) _asm_OP_array_clear(p *_Instr) {
  1433. self.mem_clear_rem(p.i64(), true)
  1434. }
  1435. func (self *_Assembler) _asm_OP_array_clear_p(p *_Instr) {
  1436. self.mem_clear_rem(p.i64(), false)
  1437. }
  1438. func (self *_Assembler) _asm_OP_slice_init(p *_Instr) {
  1439. self.Emit("XORL" , _AX, _AX) // XORL AX, AX
  1440. self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP)
  1441. self.Emit("MOVQ" , jit.Ptr(_VP, 16), _AX) // MOVQ 16(VP), AX
  1442. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  1443. self.Sjmp("JNZ" , "_done_{n}") // JNZ _done_{n}
  1444. self.Emit("MOVQ" , jit.Imm(_MinSlice), _CX) // MOVQ ${_MinSlice}, CX
  1445. self.Emit("MOVQ" , _CX, jit.Ptr(_VP, 16)) // MOVQ CX, 16(VP)
  1446. self.Emit("MOVQ" , jit.Type(p.vt()), _DX) // MOVQ ${p.vt()}, DX
  1447. self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP)
  1448. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
  1449. self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 16)) // MOVQ CX, 16(SP)
  1450. self.call_go(_F_makeslice) // CALL_GO makeslice
  1451. self.Emit("MOVQ" , jit.Ptr(_SP, 24), _AX) // MOVQ 24(SP), AX
  1452. self.WritePtrAX(7, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP)
  1453. self.Link("_done_{n}") // _done_{n}:
  1454. self.Emit("XORL" , _AX, _AX) // XORL AX, AX
  1455. self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP)
  1456. }
  1457. func (self *_Assembler) _asm_OP_check_empty(p *_Instr) {
  1458. rbracket := p.vb()
  1459. if rbracket == ']' {
  1460. self.check_eof(1)
  1461. self.Emit("LEAQ", jit.Ptr(_IC, 1), _AX) // LEAQ 1(IC), AX
  1462. self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(rbracket))) // CMPB (IP)(IC), ']'
  1463. self.Sjmp("JNE" , "_not_empty_array_{n}") // JNE _not_empty_array_{n}
  1464. self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC
  1465. self.Emit("MOVQ", jit.Imm(_Zero_Base), _AX)
  1466. self.WritePtrAX(9, jit.Ptr(_VP, 0), false)
  1467. self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
  1468. self.Emit("MOVOU", _X0, jit.Ptr(_VP, 8)) // MOVOU X0, 8(VP)
  1469. self.Xjmp("JMP" , p.vi()) // JMP {p.vi()}
  1470. self.Link("_not_empty_array_{n}")
  1471. } else {
  1472. panic("only implement check empty array here!")
  1473. }
  1474. }
  1475. func (self *_Assembler) _asm_OP_slice_append(p *_Instr) {
  1476. self.Emit("MOVQ" , jit.Ptr(_VP, 8), _AX) // MOVQ 8(VP), AX
  1477. self.Emit("CMPQ" , _AX, jit.Ptr(_VP, 16)) // CMPQ AX, 16(VP)
  1478. self.Sjmp("JB" , "_index_{n}") // JB _index_{n}
  1479. self.Emit("MOVQ" , jit.Type(p.vt()), _AX) // MOVQ ${p.vt()}, AX
  1480. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
  1481. self.Emit("MOVOU", jit.Ptr(_VP, 0), _X0) // MOVOU (VP), X0
  1482. self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP)
  1483. self.Emit("MOVQ" , jit.Ptr(_VP, 16), _AX) // MOVQ 16(VP), AX
  1484. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 24)) // MOVQ AX, 24(SP)
  1485. self.Emit("SHLQ" , jit.Imm(1), _AX) // SHLQ $1, AX
  1486. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP)
  1487. self.call_go(_F_growslice) // CALL_GO growslice
  1488. self.Emit("MOVQ" , jit.Ptr(_SP, 40), _DI) // MOVQ 40(SP), DI
  1489. self.Emit("MOVQ" , jit.Ptr(_SP, 48), _AX) // MOVQ 48(SP), AX
  1490. self.Emit("MOVQ" , jit.Ptr(_SP, 56), _SI) // MOVQ 56(SP), SI
  1491. self.WriteRecNotAX(8, _DI, jit.Ptr(_VP, 0), true, true)// MOVQ DI, (VP)
  1492. self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP)
  1493. self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16)) // MOVQ SI, 16(VP)
  1494. // because growslice not zero memory {oldcap, newlen} when append et not has ptrdata.
  1495. // but we should zero it, avoid decode it as random values.
  1496. if rt.UnpackType(p.vt()).PtrData == 0 {
  1497. self.Emit("SUBQ" , _AX, _SI) // MOVQ AX, SI
  1498. self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8)) // ADDQ $1, 8(VP)
  1499. self.Emit("MOVQ" , _DI, _VP) // MOVQ DI, VP
  1500. self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX) // MOVQ ${p.vlen()}, CX
  1501. self.From("MULQ" , _CX) // MULQ CX
  1502. self.Emit("ADDQ" , _AX, _VP) // ADDQ AX, VP
  1503. self.Emit("MOVQ" , _SI, _AX) // MOVQ SI, AX
  1504. self.From("MULQ" , _CX) // MULQ CX
  1505. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
  1506. self.Emit("MOVQ" , _VP, jit.Ptr(_SP, 0)) // MOVQ VP, (SP)
  1507. self.mem_clear_fn(true) // CALL_GO memclr{Has,NoHeap}
  1508. self.Sjmp("JMP", "_append_slice_end_{n}") // JMP _append_slice_end_{n}
  1509. }
  1510. self.Link("_index_{n}") // _index_{n}:
  1511. self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8)) // ADDQ $1, 8(VP)
  1512. self.Emit("MOVQ" , jit.Ptr(_VP, 0), _VP) // MOVQ (VP), VP
  1513. self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX) // MOVQ ${p.vlen()}, CX
  1514. self.From("MULQ" , _CX) // MULQ CX
  1515. self.Emit("ADDQ" , _AX, _VP) // ADDQ AX, VP
  1516. self.Link("_append_slice_end_{n}")
  1517. }
  1518. func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) {
  1519. self.call_sf(_F_skip_object) // CALL_SF skip_object
  1520. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  1521. self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
  1522. }
  1523. func (self *_Assembler) _asm_OP_object_next(_ *_Instr) {
  1524. self.call_sf(_F_skip_one) // CALL_SF skip_one
  1525. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  1526. self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
  1527. }
  1528. func (self *_Assembler) _asm_OP_struct_field(p *_Instr) {
  1529. assert_eq(caching.FieldEntrySize, 32, "invalid field entry size")
  1530. self.Emit("MOVQ" , jit.Imm(-1), _AX) // MOVQ $-1, AX
  1531. self.Emit("MOVQ" , _AX, _VAR_sr) // MOVQ AX, sr
  1532. self.parse_string() // PARSE STRING
  1533. self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false) // UNQUOTE once, sv.p, sv.n
  1534. self.Emit("LEAQ" , _VAR_sv, _AX) // LEAQ sv, AX
  1535. self.Emit("XORL" , _CX, _CX) // XORL CX, CX
  1536. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
  1537. self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
  1538. self.call_go(_F_strhash) // CALL_GO strhash
  1539. self.Emit("MOVQ" , jit.Ptr(_SP, 16), _AX) // MOVQ 16(SP), AX
  1540. self.Emit("MOVQ" , _AX, _R9) // MOVQ AX, R9
  1541. self.Emit("MOVQ" , jit.Imm(freezeFields(p.vf())), _CX) // MOVQ ${p.vf()}, CX
  1542. self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_b), _SI) // MOVQ FieldMap.b(CX), SI
  1543. self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_N), _CX) // MOVQ FieldMap.N(CX), CX
  1544. self.Emit("TESTQ", _CX, _CX) // TESTQ CX, CX
  1545. self.Sjmp("JZ" , "_try_lowercase_{n}") // JZ _try_lowercase_{n}
  1546. self.Link("_loop_{n}") // _loop_{n}:
  1547. self.Emit("XORL" , _DX, _DX) // XORL DX, DX
  1548. self.From("DIVQ" , _CX) // DIVQ CX
  1549. self.Emit("LEAQ" , jit.Ptr(_DX, 1), _AX) // LEAQ 1(DX), AX
  1550. self.Emit("SHLQ" , jit.Imm(5), _DX) // SHLQ $5, DX
  1551. self.Emit("LEAQ" , jit.Sib(_SI, _DX, 1, 0), _DI) // LEAQ (SI)(DX), DI
  1552. self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Hash), _R8) // MOVQ FieldEntry.Hash(DI), R8
  1553. self.Emit("TESTQ", _R8, _R8) // TESTQ R8, R8
  1554. self.Sjmp("JZ" , "_try_lowercase_{n}") // JZ _try_lowercase_{n}
  1555. self.Emit("CMPQ" , _R8, _R9) // CMPQ R8, R9
  1556. self.Sjmp("JNE" , "_loop_{n}") // JNE _loop_{n}
  1557. self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name + 8), _DX) // MOVQ FieldEntry.Name+8(DI), DX
  1558. self.Emit("CMPQ" , _DX, _VAR_sv_n) // CMPQ DX, sv.n
  1559. self.Sjmp("JNE" , "_loop_{n}") // JNE _loop_{n}
  1560. self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_ID), _R8) // MOVQ FieldEntry.ID(DI), R8
  1561. self.Emit("MOVQ" , _AX, _VAR_ss_AX) // MOVQ AX, ss.AX
  1562. self.Emit("MOVQ" , _CX, _VAR_ss_CX) // MOVQ CX, ss.CX
  1563. self.Emit("MOVQ" , _SI, _VAR_ss_SI) // MOVQ SI, ss.SI
  1564. self.Emit("MOVQ" , _R8, _VAR_ss_R8) // MOVQ R8, ss.R8
  1565. self.Emit("MOVQ" , _R9, _VAR_ss_R9) // MOVQ R9, ss.R9
  1566. self.Emit("MOVQ" , _VAR_sv_p, _AX) // MOVQ _VAR_sv_p, AX
  1567. self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name), _CX) // MOVQ FieldEntry.Name(DI), CX
  1568. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
  1569. self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
  1570. self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 16)) // MOVQ DX, 16(SP)
  1571. self.call_go(_F_memequal) // CALL_GO memequal
  1572. self.Emit("MOVQ" , _VAR_ss_AX, _AX) // MOVQ ss.AX, AX
  1573. self.Emit("MOVQ" , _VAR_ss_CX, _CX) // MOVQ ss.CX, CX
  1574. self.Emit("MOVQ" , _VAR_ss_SI, _SI) // MOVQ ss.SI, SI
  1575. self.Emit("MOVQ" , _VAR_ss_R9, _R9) // MOVQ ss.R9, R9
  1576. self.Emit("MOVB" , jit.Ptr(_SP, 24), _DX) // MOVB 24(SP), DX
  1577. self.Emit("TESTB", _DX, _DX) // TESTB DX, DX
  1578. self.Sjmp("JZ" , "_loop_{n}") // JZ _loop_{n}
  1579. self.Emit("MOVQ" , _VAR_ss_R8, _R8) // MOVQ ss.R8, R8
  1580. self.Emit("MOVQ" , _R8, _VAR_sr) // MOVQ R8, sr
  1581. self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n}
  1582. self.Link("_try_lowercase_{n}") // _try_lowercase_{n}:
  1583. self.Emit("MOVQ" , jit.Imm(referenceFields(p.vf())), _AX) // MOVQ ${p.vf()}, AX
  1584. self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0
  1585. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
  1586. self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP)
  1587. self.call_go(_F_FieldMap_GetCaseInsensitive) // CALL_GO FieldMap::GetCaseInsensitive
  1588. self.Emit("MOVQ" , jit.Ptr(_SP, 24), _AX) // MOVQ 24(SP), AX
  1589. self.Emit("MOVQ" , _AX, _VAR_sr) // MOVQ AX, _VAR_sr
  1590. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  1591. self.Sjmp("JNS" , "_end_{n}") // JNS _end_{n}
  1592. self.Emit("BTQ" , jit.Imm(_F_disable_unknown), _ARG_fv) // BTQ ${_F_disable_unknown}, fv
  1593. self.Sjmp("JC" , _LB_field_error) // JC _field_error
  1594. self.Link("_end_{n}") // _end_{n}:
  1595. }
  1596. func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) {
  1597. self.unmarshal_json(p.vt(), true)
  1598. }
  1599. func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) {
  1600. self.unmarshal_json(p.vt(), false)
  1601. }
  1602. func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) {
  1603. self.unmarshal_text(p.vt(), true)
  1604. }
  1605. func (self *_Assembler) _asm_OP_unmarshal_text_p(p *_Instr) {
  1606. self.unmarshal_text(p.vt(), false)
  1607. }
  1608. func (self *_Assembler) _asm_OP_lspace(_ *_Instr) {
  1609. self.lspace("_{n}")
  1610. }
  1611. func (self *_Assembler) lspace(subfix string) {
  1612. var label = "_lspace" + subfix
  1613. self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
  1614. self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error
  1615. self.Emit("MOVQ" , jit.Imm(_BM_space), _DX) // MOVQ _BM_space, DX
  1616. self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
  1617. self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
  1618. self.Sjmp("JA" , label) // JA _nospace_{n}
  1619. self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
  1620. self.Sjmp("JNC" , label) // JNC _nospace_{n}
  1621. /* test up to 4 characters */
  1622. for i := 0; i < 3; i++ {
  1623. self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
  1624. self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
  1625. self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error
  1626. self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
  1627. self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
  1628. self.Sjmp("JA" , label) // JA _nospace_{n}
  1629. self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
  1630. self.Sjmp("JNC" , label) // JNC _nospace_{n}
  1631. }
  1632. /* handle over to the native function */
  1633. self.Emit("MOVQ" , _IP, _DI) // MOVQ IP, DI
  1634. self.Emit("MOVQ" , _IL, _SI) // MOVQ IL, SI
  1635. self.Emit("MOVQ" , _IC, _DX) // MOVQ IC, DX
  1636. self.call(_F_lspace) // CALL lspace
  1637. self.Emit("TESTQ" , _AX, _AX) // TESTQ AX, AX
  1638. self.Sjmp("JS" , _LB_parsing_error_v) // JS _parsing_error_v
  1639. self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL
  1640. self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error
  1641. self.Emit("MOVQ" , _AX, _IC) // MOVQ AX, IC
  1642. self.Link(label) // _nospace_{n}:
  1643. }
  1644. func (self *_Assembler) _asm_OP_match_char(p *_Instr) {
  1645. self.match_char(p.vb())
  1646. }
  1647. func (self *_Assembler) match_char(char byte) {
  1648. self.check_eof(1)
  1649. self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(char))) // CMPB (IP)(IC), ${p.vb()}
  1650. self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error
  1651. self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC
  1652. }
  1653. func (self *_Assembler) _asm_OP_check_char(p *_Instr) {
  1654. self.check_eof(1)
  1655. self.Emit("LEAQ" , jit.Ptr(_IC, 1), _AX) // LEAQ 1(IC), AX
  1656. self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb()))) // CMPB (IP)(IC), ${p.vb()}
  1657. self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC
  1658. self.Xjmp("JE" , p.vi()) // JE {p.vi()}
  1659. }
  1660. func (self *_Assembler) _asm_OP_check_char_0(p *_Instr) {
  1661. self.check_eof(1)
  1662. self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb()))) // CMPB (IP)(IC), ${p.vb()}
  1663. self.Xjmp("JE" , p.vi()) // JE {p.vi()}
  1664. }
  1665. func (self *_Assembler) _asm_OP_add(p *_Instr) {
  1666. self.Emit("ADDQ", jit.Imm(int64(p.vi())), _IC) // ADDQ ${p.vi()}, IC
  1667. }
  1668. func (self *_Assembler) _asm_OP_load(_ *_Instr) {
  1669. self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
  1670. self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _VP) // MOVQ (ST)(AX), VP
  1671. }
  1672. func (self *_Assembler) _asm_OP_save(_ *_Instr) {
  1673. self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX
  1674. self.Emit("CMPQ", _CX, jit.Imm(_MaxStackBytes)) // CMPQ CX, ${_MaxStackBytes}
  1675. self.Sjmp("JAE" , _LB_stack_error) // JA _stack_error
  1676. self.WriteRecNotAX(0 , _VP, jit.Sib(_ST, _CX, 1, 8), false, false) // MOVQ VP, 8(ST)(CX)
  1677. self.Emit("ADDQ", jit.Imm(8), _CX) // ADDQ $8, CX
  1678. self.Emit("MOVQ", _CX, jit.Ptr(_ST, 0)) // MOVQ CX, (ST)
  1679. }
  1680. func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
  1681. self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
  1682. self.Emit("SUBQ", jit.Imm(8), _AX) // SUBQ $8, AX
  1683. self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _VP) // MOVQ 8(ST)(AX), VP
  1684. self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST)
  1685. self.Emit("XORL", _ET, _ET) // XORL ET, ET
  1686. self.Emit("MOVQ", _ET, jit.Sib(_ST, _AX, 1, 8)) // MOVQ ET, 8(ST)(AX)
  1687. }
  1688. func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
  1689. self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
  1690. self.Emit("SUBQ" , jit.Imm(16), _AX) // SUBQ $16, AX
  1691. self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _VP) // MOVQ 8(ST)(AX), VP
  1692. self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST)
  1693. self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
  1694. self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX)
  1695. }
  1696. func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
  1697. self.Emit("MOVQ", jit.Type(p.vt()), _AX) // MOVQ ${p.vt()}, AX
  1698. self.decode_dynamic(_AX, _VP) // DECODE AX, VP
  1699. }
  1700. func (self *_Assembler) _asm_OP_goto(p *_Instr) {
  1701. self.Xjmp("JMP", p.vi())
  1702. }
  1703. func (self *_Assembler) _asm_OP_switch(p *_Instr) {
  1704. self.Emit("MOVQ", _VAR_sr, _AX) // MOVQ sr, AX
  1705. self.Emit("CMPQ", _AX, jit.Imm(p.i64())) // CMPQ AX, ${len(p.vs())}
  1706. self.Sjmp("JAE" , "_default_{n}") // JAE _default_{n}
  1707. /* jump table selector */
  1708. self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI
  1709. self.Sref("_switch_table_{n}", 4) // .... &_switch_table_{n}
  1710. self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX) // MOVLQSX (DI)(AX*4), AX
  1711. self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX
  1712. self.Rjmp("JMP" , _AX) // JMP AX
  1713. self.Link("_switch_table_{n}") // _switch_table_{n}:
  1714. /* generate the jump table */
  1715. for i, v := range p.vs() {
  1716. self.Xref(v, int64(-i) * 4)
  1717. }
  1718. /* default case */
  1719. self.Link("_default_{n}")
  1720. self.NOP()
  1721. }
  1722. func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
  1723. self.Emit("MOVQ", jit.Imm(int64(p2.op())), jit.Ptr(_SP, 16))// MOVQ $(p2.op()), 16(SP)
  1724. self.Emit("MOVQ", jit.Imm(int64(p1.op())), jit.Ptr(_SP, 8)) // MOVQ $(p1.op()), 8(SP)
  1725. self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0)) // MOVQ $(i), (SP)
  1726. self.call_go(_F_println)
  1727. }