assembler_stkabi_amd64.go 47 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175
  1. // +build go1.16,!go1.17
  2. /*
  3. * Copyright 2021 ByteDance Inc.
  4. *
  5. * Licensed under the Apache License, Version 2.0 (the "License");
  6. * you may not use this file except in compliance with the License.
  7. * You may obtain a copy of the License at
  8. *
  9. * http://www.apache.org/licenses/LICENSE-2.0
  10. *
  11. * Unless required by applicable law or agreed to in writing, software
  12. * distributed under the License is distributed on an "AS IS" BASIS,
  13. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. * See the License for the specific language governing permissions and
  15. * limitations under the License.
  16. */
  17. package encoder
  18. import (
  19. `fmt`
  20. `reflect`
  21. `strconv`
  22. `unsafe`
  23. `github.com/bytedance/sonic/internal/cpu`
  24. `github.com/bytedance/sonic/internal/jit`
  25. `github.com/bytedance/sonic/internal/native/types`
  26. `github.com/twitchyliquid64/golang-asm/obj`
  27. `github.com/twitchyliquid64/golang-asm/obj/x86`
  28. `github.com/bytedance/sonic/internal/native`
  29. `github.com/bytedance/sonic/internal/rt`
  30. )
  31. /** Register Allocations
  32. *
  33. * State Registers:
  34. *
  35. * %rbx : stack base
  36. * %rdi : result pointer
  37. * %rsi : result length
  38. * %rdx : result capacity
  39. * %r12 : sp->p
  40. * %r13 : sp->q
  41. * %r14 : sp->x
  42. * %r15 : sp->f
  43. *
  44. * Error Registers:
  45. *
  46. * %r10 : error type register
  47. * %r11 : error pointer register
  48. */
  49. /** Function Prototype & Stack Map
  50. *
  51. * func (buf *[]byte, p unsafe.Pointer, sb *_Stack, fv uint64) (err error)
  52. *
  53. * buf : (FP)
  54. * p : 8(FP)
  55. * sb : 16(FP)
  56. * fv : 24(FP)
  57. * err.vt : 32(FP)
  58. * err.vp : 40(FP)
  59. */
  60. const (
  61. _S_cond = iota
  62. _S_init
  63. )
  64. const (
  65. _FP_args = 48 // 48 bytes for passing arguments to this function
  66. _FP_fargs = 64 // 64 bytes for passing arguments to other Go functions
  67. _FP_saves = 64 // 64 bytes for saving the registers before CALL instructions
  68. _FP_locals = 24 // 24 bytes for local variables
  69. )
  70. const (
  71. _FP_offs = _FP_fargs + _FP_saves + _FP_locals
  72. _FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer
  73. _FP_base = _FP_size + 8 // 8 bytes for the return address
  74. )
  75. const (
  76. _FM_exp32 = 0x7f800000
  77. _FM_exp64 = 0x7ff0000000000000
  78. )
  79. const (
  80. _IM_null = 0x6c6c756e // 'null'
  81. _IM_true = 0x65757274 // 'true'
  82. _IM_fals = 0x736c6166 // 'fals' ('false' without the 'e')
  83. _IM_open = 0x00225c22 // '"\"∅'
  84. _IM_array = 0x5d5b // '[]'
  85. _IM_object = 0x7d7b // '{}'
  86. _IM_mulv = -0x5555555555555555
  87. )
  88. const (
  89. _LB_more_space = "_more_space"
  90. _LB_more_space_return = "_more_space_return_"
  91. )
  92. const (
  93. _LB_error = "_error"
  94. _LB_error_too_deep = "_error_too_deep"
  95. _LB_error_invalid_number = "_error_invalid_number"
  96. _LB_error_nan_or_infinite = "_error_nan_or_infinite"
  97. _LB_panic = "_panic"
  98. )
  99. var (
  100. _AX = jit.Reg("AX")
  101. _CX = jit.Reg("CX")
  102. _DX = jit.Reg("DX")
  103. _DI = jit.Reg("DI")
  104. _SI = jit.Reg("SI")
  105. _BP = jit.Reg("BP")
  106. _SP = jit.Reg("SP")
  107. _R8 = jit.Reg("R8")
  108. )
  109. var (
  110. _X0 = jit.Reg("X0")
  111. _Y0 = jit.Reg("Y0")
  112. )
  113. var (
  114. _ST = jit.Reg("BX")
  115. _RP = jit.Reg("DI")
  116. _RL = jit.Reg("SI")
  117. _RC = jit.Reg("DX")
  118. )
  119. var (
  120. _LR = jit.Reg("R9")
  121. _R10 = jit.Reg("R10") // used for gcWriterBarrier
  122. _ET = jit.Reg("R10")
  123. _EP = jit.Reg("R11")
  124. )
  125. var (
  126. _SP_p = jit.Reg("R12")
  127. _SP_q = jit.Reg("R13")
  128. _SP_x = jit.Reg("R14")
  129. _SP_f = jit.Reg("R15")
  130. )
  131. var (
  132. _ARG_rb = jit.Ptr(_SP, _FP_base)
  133. _ARG_vp = jit.Ptr(_SP, _FP_base + 8)
  134. _ARG_sb = jit.Ptr(_SP, _FP_base + 16)
  135. _ARG_fv = jit.Ptr(_SP, _FP_base + 24)
  136. )
  137. var (
  138. _RET_et = jit.Ptr(_SP, _FP_base + 32)
  139. _RET_ep = jit.Ptr(_SP, _FP_base + 40)
  140. )
  141. var (
  142. _VAR_sp = jit.Ptr(_SP, _FP_fargs + _FP_saves)
  143. _VAR_dn = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
  144. _VAR_vp = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
  145. )
  146. var (
  147. _REG_ffi = []obj.Addr{_RP, _RL, _RC}
  148. _REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL}
  149. _REG_jsr = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR}
  150. _REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC}
  151. )
  152. type _Assembler struct {
  153. jit.BaseAssembler
  154. p _Program
  155. x int
  156. name string
  157. }
  158. func newAssembler(p _Program) *_Assembler {
  159. return new(_Assembler).Init(p)
  160. }
  161. /** Assembler Interface **/
  162. func (self *_Assembler) Load() _Encoder {
  163. return ptoenc(self.BaseAssembler.Load("encode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
  164. }
  165. func (self *_Assembler) Init(p _Program) *_Assembler {
  166. self.p = p
  167. self.BaseAssembler.Init(self.compile)
  168. return self
  169. }
  170. func (self *_Assembler) compile() {
  171. self.prologue()
  172. self.instrs()
  173. self.epilogue()
  174. self.builtins()
  175. }
  176. /** Assembler Stages **/
  177. var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
  178. _OP_null : (*_Assembler)._asm_OP_null,
  179. _OP_empty_arr : (*_Assembler)._asm_OP_empty_arr,
  180. _OP_empty_obj : (*_Assembler)._asm_OP_empty_obj,
  181. _OP_bool : (*_Assembler)._asm_OP_bool,
  182. _OP_i8 : (*_Assembler)._asm_OP_i8,
  183. _OP_i16 : (*_Assembler)._asm_OP_i16,
  184. _OP_i32 : (*_Assembler)._asm_OP_i32,
  185. _OP_i64 : (*_Assembler)._asm_OP_i64,
  186. _OP_u8 : (*_Assembler)._asm_OP_u8,
  187. _OP_u16 : (*_Assembler)._asm_OP_u16,
  188. _OP_u32 : (*_Assembler)._asm_OP_u32,
  189. _OP_u64 : (*_Assembler)._asm_OP_u64,
  190. _OP_f32 : (*_Assembler)._asm_OP_f32,
  191. _OP_f64 : (*_Assembler)._asm_OP_f64,
  192. _OP_str : (*_Assembler)._asm_OP_str,
  193. _OP_bin : (*_Assembler)._asm_OP_bin,
  194. _OP_quote : (*_Assembler)._asm_OP_quote,
  195. _OP_number : (*_Assembler)._asm_OP_number,
  196. _OP_eface : (*_Assembler)._asm_OP_eface,
  197. _OP_iface : (*_Assembler)._asm_OP_iface,
  198. _OP_byte : (*_Assembler)._asm_OP_byte,
  199. _OP_text : (*_Assembler)._asm_OP_text,
  200. _OP_deref : (*_Assembler)._asm_OP_deref,
  201. _OP_index : (*_Assembler)._asm_OP_index,
  202. _OP_load : (*_Assembler)._asm_OP_load,
  203. _OP_save : (*_Assembler)._asm_OP_save,
  204. _OP_drop : (*_Assembler)._asm_OP_drop,
  205. _OP_drop_2 : (*_Assembler)._asm_OP_drop_2,
  206. _OP_recurse : (*_Assembler)._asm_OP_recurse,
  207. _OP_is_nil : (*_Assembler)._asm_OP_is_nil,
  208. _OP_is_nil_p1 : (*_Assembler)._asm_OP_is_nil_p1,
  209. _OP_is_zero_1 : (*_Assembler)._asm_OP_is_zero_1,
  210. _OP_is_zero_2 : (*_Assembler)._asm_OP_is_zero_2,
  211. _OP_is_zero_4 : (*_Assembler)._asm_OP_is_zero_4,
  212. _OP_is_zero_8 : (*_Assembler)._asm_OP_is_zero_8,
  213. _OP_is_zero_map : (*_Assembler)._asm_OP_is_zero_map,
  214. _OP_goto : (*_Assembler)._asm_OP_goto,
  215. _OP_map_iter : (*_Assembler)._asm_OP_map_iter,
  216. _OP_map_stop : (*_Assembler)._asm_OP_map_stop,
  217. _OP_map_check_key : (*_Assembler)._asm_OP_map_check_key,
  218. _OP_map_write_key : (*_Assembler)._asm_OP_map_write_key,
  219. _OP_map_value_next : (*_Assembler)._asm_OP_map_value_next,
  220. _OP_slice_len : (*_Assembler)._asm_OP_slice_len,
  221. _OP_slice_next : (*_Assembler)._asm_OP_slice_next,
  222. _OP_marshal : (*_Assembler)._asm_OP_marshal,
  223. _OP_marshal_p : (*_Assembler)._asm_OP_marshal_p,
  224. _OP_marshal_text : (*_Assembler)._asm_OP_marshal_text,
  225. _OP_marshal_text_p : (*_Assembler)._asm_OP_marshal_text_p,
  226. _OP_cond_set : (*_Assembler)._asm_OP_cond_set,
  227. _OP_cond_testc : (*_Assembler)._asm_OP_cond_testc,
  228. }
  229. func (self *_Assembler) instr(v *_Instr) {
  230. if fn := _OpFuncTab[v.op()]; fn != nil {
  231. fn(self, v)
  232. } else {
  233. panic(fmt.Sprintf("invalid opcode: %d", v.op()))
  234. }
  235. }
  236. func (self *_Assembler) instrs() {
  237. for i, v := range self.p {
  238. self.Mark(i)
  239. self.instr(&v)
  240. self.debug_instr(i, &v)
  241. }
  242. }
  243. func (self *_Assembler) builtins() {
  244. self.more_space()
  245. self.error_too_deep()
  246. self.error_invalid_number()
  247. self.error_nan_or_infinite()
  248. self.go_panic()
  249. }
  250. func (self *_Assembler) epilogue() {
  251. self.Mark(len(self.p))
  252. self.Emit("XORL", _ET, _ET)
  253. self.Emit("XORL", _EP, _EP)
  254. self.Link(_LB_error)
  255. self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX
  256. self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX)
  257. self.Emit("MOVQ", _ET, _RET_et) // MOVQ ET, et<>+24(FP)
  258. self.Emit("MOVQ", _EP, _RET_ep) // MOVQ EP, ep<>+32(FP)
  259. self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP
  260. self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP
  261. self.Emit("RET") // RET
  262. }
  263. func (self *_Assembler) prologue() {
  264. self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP
  265. self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP)
  266. self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP
  267. self.load_buffer() // LOAD {buf}
  268. self.Emit("MOVQ", _ARG_vp, _SP_p) // MOVQ vp<>+8(FP), SP.p
  269. self.Emit("MOVQ", _ARG_sb, _ST) // MOVQ sb<>+16(FP), ST
  270. self.Emit("XORL", _SP_x, _SP_x) // XORL SP.x, SP.x
  271. self.Emit("XORL", _SP_f, _SP_f) // XORL SP.f, SP.f
  272. self.Emit("XORL", _SP_q, _SP_q) // XORL SP.q, SP.q
  273. }
  274. /** Assembler Inline Functions **/
  275. func (self *_Assembler) xsave(reg ...obj.Addr) {
  276. for i, v := range reg {
  277. if i > _FP_saves / 8 - 1 {
  278. panic("too many registers to save")
  279. } else {
  280. self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
  281. }
  282. }
  283. }
  284. func (self *_Assembler) xload(reg ...obj.Addr) {
  285. for i, v := range reg {
  286. if i > _FP_saves / 8 - 1 {
  287. panic("too many registers to load")
  288. } else {
  289. self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
  290. }
  291. }
  292. }
  293. func (self *_Assembler) rbuf_di() {
  294. if _RP.Reg != x86.REG_DI {
  295. panic("register allocation messed up: RP != DI")
  296. } else {
  297. self.Emit("ADDQ", _RL, _RP)
  298. }
  299. }
  300. func (self *_Assembler) store_int(nd int, fn obj.Addr, ins string) {
  301. self.check_size(nd)
  302. self.save_c() // SAVE $C_regs
  303. self.rbuf_di() // MOVQ RP, DI
  304. self.Emit(ins, jit.Ptr(_SP_p, 0), _SI) // $ins (SP.p), SI
  305. self.call_c(fn) // CALL_C $fn
  306. self.Emit("ADDQ", _AX, _RL) // ADDQ AX, RL
  307. }
  308. func (self *_Assembler) store_str(s string) {
  309. i := 0
  310. m := rt.Str2Mem(s)
  311. /* 8-byte stores */
  312. for i <= len(m) - 8 {
  313. self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX) // MOVQ $s[i:], AX
  314. self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i))) // MOVQ AX, i(RP)(RL)
  315. i += 8
  316. }
  317. /* 4-byte stores */
  318. if i <= len(m) - 4 {
  319. self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVL $s[i:], i(RP)(RL)
  320. i += 4
  321. }
  322. /* 2-byte stores */
  323. if i <= len(m) - 2 {
  324. self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVW $s[i:], i(RP)(RL)
  325. i += 2
  326. }
  327. /* last byte */
  328. if i < len(m) {
  329. self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i))) // MOVB $s[i:], i(RP)(RL)
  330. }
  331. }
  332. func (self *_Assembler) check_size(n int) {
  333. self.check_size_rl(jit.Ptr(_RL, int64(n)))
  334. }
  335. func (self *_Assembler) check_size_r(r obj.Addr, d int) {
  336. self.check_size_rl(jit.Sib(_RL, r, 1, int64(d)))
  337. }
  338. func (self *_Assembler) check_size_rl(v obj.Addr) {
  339. idx := self.x
  340. key := _LB_more_space_return + strconv.Itoa(idx)
  341. /* the following code relies on LR == R9 to work */
  342. if _LR.Reg != x86.REG_R9 {
  343. panic("register allocation messed up: LR != R9")
  344. }
  345. /* check for buffer capacity */
  346. self.x++
  347. self.Emit("LEAQ", v, _AX) // LEAQ $v, AX
  348. self.Emit("CMPQ", _AX, _RC) // CMPQ AX, RC
  349. self.Sjmp("JBE" , key) // JBE _more_space_return_{n}
  350. self.slice_grow_ax(key) // GROW $key
  351. self.Link(key) // _more_space_return_{n}:
  352. }
  353. func (self *_Assembler) slice_grow_ax(ret string) {
  354. self.Byte(0x4c, 0x8d, 0x0d) // LEAQ ?(PC), R9
  355. self.Sref(ret, 4) // .... &ret
  356. self.Sjmp("JMP" , _LB_more_space) // JMP _more_space
  357. }
  358. /** State Stack Helpers **/
  359. const (
  360. _StateSize = int64(unsafe.Sizeof(_State{}))
  361. _StackLimit = _MaxStack * _StateSize
  362. )
  363. func (self *_Assembler) save_state() {
  364. self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX
  365. self.Emit("LEAQ", jit.Ptr(_CX, _StateSize), _R8) // LEAQ _StateSize(CX), R8
  366. self.Emit("CMPQ", _R8, jit.Imm(_StackLimit)) // CMPQ R8, $_StackLimit
  367. self.Sjmp("JAE" , _LB_error_too_deep) // JA _error_too_deep
  368. self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8)) // MOVQ SP.x, 8(ST)(CX)
  369. self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16)) // MOVQ SP.f, 16(ST)(CX)
  370. self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24)) // MOVQ SP.p, 24(ST)(CX)
  371. self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32)) // MOVQ SP.q, 32(ST)(CX)
  372. self.Emit("MOVQ", _R8, jit.Ptr(_ST, 0)) // MOVQ R8, (ST)
  373. }
  374. func (self *_Assembler) drop_state(decr int64) {
  375. self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
  376. self.Emit("SUBQ" , jit.Imm(decr), _AX) // SUBQ $decr, AX
  377. self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST)
  378. self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _SP_x) // MOVQ 8(ST)(AX), SP.x
  379. self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 16), _SP_f) // MOVQ 16(ST)(AX), SP.f
  380. self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 24), _SP_p) // MOVQ 24(ST)(AX), SP.p
  381. self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 32), _SP_q) // MOVQ 32(ST)(AX), SP.q
  382. self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
  383. self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX)
  384. self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24)) // MOVOU X0, 24(ST)(AX)
  385. }
  386. /** Buffer Helpers **/
  387. func (self *_Assembler) add_char(ch byte) {
  388. self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVB $ch, (RP)(RL)
  389. self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL
  390. }
  391. func (self *_Assembler) add_long(ch uint32, n int64) {
  392. self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVL $ch, (RP)(RL)
  393. self.Emit("ADDQ", jit.Imm(n), _RL) // ADDQ $n, RL
  394. }
  395. func (self *_Assembler) add_text(ss string) {
  396. self.store_str(ss) // TEXT $ss
  397. self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL) // ADDQ ${len(ss)}, RL
  398. }
  399. func (self *_Assembler) prep_buffer() {
  400. self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX
  401. self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX)
  402. self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
  403. }
  404. func (self *_Assembler) prep_buffer_c() {
  405. self.Emit("MOVQ", _ARG_rb, _DI) // MOVQ rb<>+0(FP), DI
  406. self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8)) // MOVQ RL, 8(DI)
  407. }
  408. func (self *_Assembler) save_buffer() {
  409. self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX
  410. self.Emit("MOVQ", _RP, jit.Ptr(_CX, 0)) // MOVQ RP, (CX)
  411. self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX)
  412. self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16)) // MOVQ RC, 16(CX)
  413. }
  414. func (self *_Assembler) load_buffer() {
  415. self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX
  416. self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX), RP
  417. self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX), RL
  418. self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), RC
  419. }
  420. /** Function Interface Helpers **/
  421. func (self *_Assembler) call(pc obj.Addr) {
  422. self.Emit("MOVQ", pc, _AX) // MOVQ $pc, AX
  423. self.Rjmp("CALL", _AX) // CALL AX
  424. }
  425. func (self *_Assembler) save_c() {
  426. self.xsave(_REG_ffi...) // SAVE $REG_ffi
  427. }
  428. func (self *_Assembler) call_c(pc obj.Addr) {
  429. self.call(pc) // CALL $pc
  430. self.xload(_REG_ffi...) // LOAD $REG_ffi
  431. }
  432. func (self *_Assembler) call_go(pc obj.Addr) {
  433. self.xsave(_REG_all...) // SAVE $REG_all
  434. self.call(pc) // CALL $pc
  435. self.xload(_REG_all...) // LOAD $REG_all
  436. }
  437. func (self *_Assembler) call_encoder(pc obj.Addr) {
  438. self.xsave(_REG_enc...) // SAVE $REG_enc
  439. self.call(pc) // CALL $pc
  440. self.xload(_REG_enc...) // LOAD $REG_enc
  441. self.load_buffer() // LOAD {buf}
  442. }
  443. func (self *_Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) {
  444. switch vt.Kind() {
  445. case reflect.Interface : self.call_marshaler_i(fn, it)
  446. case reflect.Ptr, reflect.Map: self.call_marshaler_v(fn, it, vt, true)
  447. // struct/array of 1 direct iface type can be direct
  448. default : self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect())
  449. }
  450. }
  451. func (self *_Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) {
  452. self.Emit("MOVQ" , jit.Gtype(it), _AX) // MOVQ $it, AX
  453. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
  454. self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
  455. self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX) // MOVQ 8(SP.p), CX
  456. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  457. self.Sjmp("JZ" , "_null_{n}") // JZ _null_{n}
  458. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
  459. self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 16)) // MOVQ CX, 16(SP)
  460. self.call_go(_F_assertI2I) // CALL_GO assertI2I
  461. self.prep_buffer() // MOVE {buf}, (SP)
  462. self.Emit("MOVOU", jit.Ptr(_SP, 24), _X0) // MOVOU 24(SP), X0
  463. self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP)
  464. self.Emit("MOVQ", _ARG_fv, _CX) // MOVQ ARG.fv, CX
  465. self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ CX, 24(SP)
  466. self.call_encoder(fn) // CALL $fn
  467. self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
  468. self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
  469. self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
  470. self.Sjmp("JNZ" , _LB_error) // JNZ _error
  471. self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n}
  472. self.Link("_null_{n}") // _null_{n}:
  473. self.check_size(4) // SIZE $4
  474. self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1)
  475. self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL
  476. self.Link("_done_{n}") // _done_{n}:
  477. }
  478. func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) {
  479. self.prep_buffer() // MOVE {buf}, (SP)
  480. self.Emit("MOVQ", jit.Itab(it, vt), _AX) // MOVQ $(itab(it, vt)), AX
  481. self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
  482. /* dereference the pointer if needed */
  483. if !deref {
  484. self.Emit("MOVQ", _SP_p, jit.Ptr(_SP, 16)) // MOVQ SP.p, 16(SP)
  485. } else {
  486. self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
  487. self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
  488. }
  489. /* call the encoder, and perform error checks */
  490. self.Emit("MOVQ", _ARG_fv, _CX) // MOVQ ARG.fv, CX
  491. self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ CX, 24(SP)
  492. self.call_encoder(fn) // CALL $fn
  493. self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
  494. self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
  495. self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
  496. self.Sjmp("JNZ" , _LB_error) // JNZ _error
  497. }
  498. /** Builtin: _more_space **/
  499. var (
  500. _T_byte = jit.Type(byteType)
  501. _F_growslice = jit.Func(growslice)
  502. )
  503. func (self *_Assembler) more_space() {
  504. self.Link(_LB_more_space)
  505. self.Emit("MOVQ", _T_byte, _AX) // MOVQ $_T_byte, _AX
  506. self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ _AX, (SP)
  507. self.Emit("MOVQ", _RP, jit.Ptr(_SP, 8)) // MOVQ RP, 8(SP)
  508. self.Emit("MOVQ", _RL, jit.Ptr(_SP, 16)) // MOVQ RL, 16(SP)
  509. self.Emit("MOVQ", _RC, jit.Ptr(_SP, 24)) // MOVQ RC, 24(SP)
  510. self.Emit("MOVQ", _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP)
  511. self.xsave(_REG_jsr...) // SAVE $REG_jsr
  512. self.call(_F_growslice) // CALL $pc
  513. self.xload(_REG_jsr...) // LOAD $REG_jsr
  514. self.Emit("MOVQ", jit.Ptr(_SP, 40), _RP) // MOVQ 40(SP), RP
  515. self.Emit("MOVQ", jit.Ptr(_SP, 48), _RL) // MOVQ 48(SP), RL
  516. self.Emit("MOVQ", jit.Ptr(_SP, 56), _RC) // MOVQ 56(SP), RC
  517. self.save_buffer() // SAVE {buf}
  518. self.Rjmp("JMP" , _LR) // JMP LR
  519. }
  520. /** Builtin Errors **/
  521. var (
  522. _V_ERR_too_deep = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_too_deep))))
  523. _V_ERR_nan_or_infinite = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_nan_or_infinite))))
  524. _I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(errorType), jsonUnsupportedValueType)
  525. )
  526. func (self *_Assembler) error_too_deep() {
  527. self.Link(_LB_error_too_deep)
  528. self.Emit("MOVQ", _V_ERR_too_deep, _EP) // MOVQ $_V_ERR_too_deep, EP
  529. self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET
  530. self.Sjmp("JMP" , _LB_error) // JMP _error
  531. }
  532. func (self *_Assembler) error_invalid_number() {
  533. self.Link(_LB_error_invalid_number)
  534. self.call_go(_F_error_number) // CALL_GO error_number
  535. self.Emit("MOVQ", jit.Ptr(_SP, 16), _ET) // MOVQ 16(SP), ET
  536. self.Emit("MOVQ", jit.Ptr(_SP, 24), _EP) // MOVQ 24(SP), EP
  537. self.Sjmp("JMP" , _LB_error) // JMP _error
  538. }
  539. func (self *_Assembler) error_nan_or_infinite() {
  540. self.Link(_LB_error_nan_or_infinite)
  541. self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP) // MOVQ $_V_ERR_nan_or_infinite, EP
  542. self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET
  543. self.Sjmp("JMP" , _LB_error) // JMP _error
  544. }
  545. /** String Encoding Routine **/
  546. var (
  547. _F_quote = jit.Imm(int64(native.S_quote))
  548. _F_panic = jit.Func(goPanic)
  549. )
  550. func (self *_Assembler) go_panic() {
  551. self.Link(_LB_panic)
  552. self.Emit("MOVQ", _SP_p, jit.Ptr(_SP, 8))
  553. self.call_go(_F_panic)
  554. }
  555. func (self *_Assembler) encode_string(doubleQuote bool) {
  556. self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX
  557. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  558. self.Sjmp("JZ" , "_str_empty_{n}") // JZ _str_empty_{n}
  559. self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
  560. self.Sjmp("JNE" , "_str_next_{n}")
  561. self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), jit.Ptr(_SP, 0))
  562. self.Sjmp("JMP", _LB_panic)
  563. self.Link("_str_next_{n}")
  564. /* openning quote, check for double quote */
  565. if !doubleQuote {
  566. self.check_size_r(_AX, 2) // SIZE $2
  567. self.add_char('"') // CHAR $'"'
  568. } else {
  569. self.check_size_r(_AX, 6) // SIZE $6
  570. self.add_long(_IM_open, 3) // TEXT $`"\"`
  571. }
  572. /* quoting loop */
  573. self.Emit("XORL", _AX, _AX) // XORL AX, AX
  574. self.Emit("MOVQ", _AX, _VAR_sp) // MOVQ AX, sp
  575. self.Link("_str_loop_{n}") // _str_loop_{n}:
  576. self.save_c() // SAVE $REG_ffi
  577. /* load the output buffer first, and then input buffer,
  578. * because the parameter registers collide with RP / RL / RC */
  579. self.Emit("MOVQ", _RC, _CX) // MOVQ RC, CX
  580. self.Emit("SUBQ", _RL, _CX) // SUBQ RL, CX
  581. self.Emit("MOVQ", _CX, _VAR_dn) // MOVQ CX, dn
  582. self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX) // LEAQ (RP)(RL), DX
  583. self.Emit("LEAQ", _VAR_dn, _CX) // LEAQ dn, CX
  584. self.Emit("MOVQ", _VAR_sp, _AX) // MOVQ sp, AX
  585. self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI) // MOVQ (SP.p), DI
  586. self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI) // MOVQ 8(SP.p), SI
  587. self.Emit("ADDQ", _AX, _DI) // ADDQ AX, DI
  588. self.Emit("SUBQ", _AX, _SI) // SUBQ AX, SI
  589. /* set the flags based on `doubleQuote` */
  590. if !doubleQuote {
  591. self.Emit("XORL", _R8, _R8) // XORL R8, R8
  592. } else {
  593. self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8
  594. }
  595. /* call the native quoter */
  596. self.call_c(_F_quote) // CALL quote
  597. self.Emit("ADDQ" , _VAR_dn, _RL) // ADDQ dn, RL
  598. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  599. self.Sjmp("JS" , "_str_space_{n}") // JS _str_space_{n}
  600. /* close the string, check for double quote */
  601. if !doubleQuote {
  602. self.check_size(1) // SIZE $1
  603. self.add_char('"') // CHAR $'"'
  604. self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n}
  605. } else {
  606. self.check_size(3) // SIZE $3
  607. self.add_text("\\\"\"") // TEXT $'\""'
  608. self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n}
  609. }
  610. /* not enough space to contain the quoted string */
  611. self.Link("_str_space_{n}") // _str_space_{n}:
  612. self.Emit("NOTQ", _AX) // NOTQ AX
  613. self.Emit("ADDQ", _AX, _VAR_sp) // ADDQ AX, sp
  614. self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX) // LEAQ (RC)(RC), AX
  615. self.slice_grow_ax("_str_loop_{n}") // GROW _str_loop_{n}
  616. /* empty string, check for double quote */
  617. if !doubleQuote {
  618. self.Link("_str_empty_{n}") // _str_empty_{n}:
  619. self.check_size(2) // SIZE $2
  620. self.add_text("\"\"") // TEXT $'""'
  621. self.Link("_str_end_{n}") // _str_end_{n}:
  622. } else {
  623. self.Link("_str_empty_{n}") // _str_empty_{n}:
  624. self.check_size(6) // SIZE $6
  625. self.add_text("\"\\\"\\\"\"") // TEXT $'"\"\""'
  626. self.Link("_str_end_{n}") // _str_end_{n}:
  627. }
  628. }
  629. /** OpCode Assembler Functions **/
  630. var (
  631. _T_json_Marshaler = rt.UnpackType(jsonMarshalerType)
  632. _T_encoding_TextMarshaler = rt.UnpackType(encodingTextMarshalerType)
  633. )
  634. var (
  635. _F_f64toa = jit.Imm(int64(native.S_f64toa))
  636. _F_f32toa = jit.Imm(int64(native.S_f32toa))
  637. _F_i64toa = jit.Imm(int64(native.S_i64toa))
  638. _F_u64toa = jit.Imm(int64(native.S_u64toa))
  639. _F_b64encode = jit.Imm(int64(_subr__b64encode))
  640. )
  641. var (
  642. _F_memmove = jit.Func(memmove)
  643. _F_error_number = jit.Func(error_number)
  644. _F_isValidNumber = jit.Func(isValidNumber)
  645. )
  646. var (
  647. _F_iteratorStop = jit.Func(iteratorStop)
  648. _F_iteratorNext = jit.Func(iteratorNext)
  649. _F_iteratorStart = jit.Func(iteratorStart)
  650. )
  651. var (
  652. _F_encodeTypedPointer obj.Addr
  653. _F_encodeJsonMarshaler obj.Addr
  654. _F_encodeTextMarshaler obj.Addr
  655. )
  656. const (
  657. _MODE_AVX2 = 1 << 2
  658. )
  659. func init() {
  660. _F_encodeTypedPointer = jit.Func(encodeTypedPointer)
  661. _F_encodeJsonMarshaler = jit.Func(encodeJsonMarshaler)
  662. _F_encodeTextMarshaler = jit.Func(encodeTextMarshaler)
  663. }
  664. func (self *_Assembler) _asm_OP_null(_ *_Instr) {
  665. self.check_size(4)
  666. self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1)
  667. self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL
  668. }
  669. func (self *_Assembler) _asm_OP_empty_arr(_ *_Instr) {
  670. self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
  671. self.Sjmp("JC", "_empty_arr_{n}")
  672. self._asm_OP_null(nil)
  673. self.Sjmp("JMP", "_empty_arr_end_{n}")
  674. self.Link("_empty_arr_{n}")
  675. self.check_size(2)
  676. self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0))
  677. self.Emit("ADDQ", jit.Imm(2), _RL)
  678. self.Link("_empty_arr_end_{n}")
  679. }
  680. func (self *_Assembler) _asm_OP_empty_obj(_ *_Instr) {
  681. self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
  682. self.Sjmp("JC", "_empty_obj_{n}")
  683. self._asm_OP_null(nil)
  684. self.Sjmp("JMP", "_empty_obj_end_{n}")
  685. self.Link("_empty_obj_{n}")
  686. self.check_size(2)
  687. self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0))
  688. self.Emit("ADDQ", jit.Imm(2), _RL)
  689. self.Link("_empty_obj_end_{n}")
  690. }
  691. func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
  692. self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0
  693. self.Sjmp("JE" , "_false_{n}") // JE _false_{n}
  694. self.check_size(4) // SIZE $4
  695. self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'true', (RP)(RL*1)
  696. self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL
  697. self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n}
  698. self.Link("_false_{n}") // _false_{n}:
  699. self.check_size(5) // SIZE $5
  700. self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'fals', (RP)(RL*1)
  701. self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4)) // MOVB $'e', 4(RP)(RL*1)
  702. self.Emit("ADDQ", jit.Imm(5), _RL) // ADDQ $5, RL
  703. self.Link("_end_{n}") // _end_{n}:
  704. }
  705. func (self *_Assembler) _asm_OP_i8(_ *_Instr) {
  706. self.store_int(4, _F_i64toa, "MOVBQSX")
  707. }
  708. func (self *_Assembler) _asm_OP_i16(_ *_Instr) {
  709. self.store_int(6, _F_i64toa, "MOVWQSX")
  710. }
  711. func (self *_Assembler) _asm_OP_i32(_ *_Instr) {
  712. self.store_int(17, _F_i64toa, "MOVLQSX")
  713. }
  714. func (self *_Assembler) _asm_OP_i64(_ *_Instr) {
  715. self.store_int(21, _F_i64toa, "MOVQ")
  716. }
  717. func (self *_Assembler) _asm_OP_u8(_ *_Instr) {
  718. self.store_int(3, _F_u64toa, "MOVBQZX")
  719. }
  720. func (self *_Assembler) _asm_OP_u16(_ *_Instr) {
  721. self.store_int(5, _F_u64toa, "MOVWQZX")
  722. }
  723. func (self *_Assembler) _asm_OP_u32(_ *_Instr) {
  724. self.store_int(16, _F_u64toa, "MOVLQZX")
  725. }
  726. func (self *_Assembler) _asm_OP_u64(_ *_Instr) {
  727. self.store_int(20, _F_u64toa, "MOVQ")
  728. }
  729. func (self *_Assembler) _asm_OP_f32(_ *_Instr) {
  730. self.check_size(32)
  731. self.Emit("MOVL" , jit.Ptr(_SP_p, 0), _AX) // MOVL (SP.p), AX
  732. self.Emit("ANDL" , jit.Imm(_FM_exp32), _AX) // ANDL $_FM_exp32, AX
  733. self.Emit("XORL" , jit.Imm(_FM_exp32), _AX) // XORL $_FM_exp32, AX
  734. self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite
  735. self.save_c() // SAVE $C_regs
  736. self.rbuf_di() // MOVQ RP, DI
  737. self.Emit("MOVSS" , jit.Ptr(_SP_p, 0), _X0) // MOVSS (SP.p), X0
  738. self.call_c(_F_f32toa) // CALL_C f64toa
  739. self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL
  740. }
  741. func (self *_Assembler) _asm_OP_f64(_ *_Instr) {
  742. self.check_size(32)
  743. self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
  744. self.Emit("MOVQ" , jit.Imm(_FM_exp64), _CX) // MOVQ $_FM_exp64, CX
  745. self.Emit("ANDQ" , _CX, _AX) // ANDQ CX, AX
  746. self.Emit("XORQ" , _CX, _AX) // XORQ CX, AX
  747. self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite
  748. self.save_c() // SAVE $C_regs
  749. self.rbuf_di() // MOVQ RP, DI
  750. self.Emit("MOVSD" , jit.Ptr(_SP_p, 0), _X0) // MOVSD (SP.p), X0
  751. self.call_c(_F_f64toa) // CALL_C f64toa
  752. self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL
  753. }
  754. func (self *_Assembler) _asm_OP_str(_ *_Instr) {
  755. self.encode_string(false)
  756. }
  757. func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
  758. self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX
  759. self.Emit("ADDQ", jit.Imm(2), _AX) // ADDQ $2, AX
  760. self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX) // MOVQ $_MF_mulv, CX
  761. self.Emit("MOVQ", _DX, _R8) // MOVQ DX, R8
  762. self.From("MULQ", _CX) // MULQ CX
  763. self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX) // LEAQ 1(DX)(DX), AX
  764. self.Emit("ORQ" , jit.Imm(2), _AX) // ORQ $2, AX
  765. self.Emit("MOVQ", _R8, _DX) // MOVQ R8, DX
  766. self.check_size_r(_AX, 0) // SIZE AX
  767. self.add_char('"') // CHAR $'"'
  768. self.save_c() // SAVE $REG_ffi
  769. self.prep_buffer_c() // MOVE {buf}, DI
  770. self.Emit("MOVQ", _SP_p, _SI) // MOVQ SP.p, SI
  771. /* check for AVX2 support */
  772. if !cpu.HasAVX2 {
  773. self.Emit("XORL", _DX, _DX) // XORL DX, DX
  774. } else {
  775. self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX) // MOVL $_MODE_AVX2, DX
  776. }
  777. /* call the encoder */
  778. self.call_c(_F_b64encode) // CALL b64encode
  779. self.load_buffer() // LOAD {buf}
  780. self.add_char('"') // CHAR $'"'
  781. }
  782. func (self *_Assembler) _asm_OP_quote(_ *_Instr) {
  783. self.encode_string(true)
  784. }
  785. func (self *_Assembler) _asm_OP_number(_ *_Instr) {
  786. self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX) // MOVQ (SP.p), CX
  787. self.Emit("TESTQ", _CX, _CX) // TESTQ CX, CX
  788. self.Sjmp("JZ" , "_empty_{n}") // JZ _empty_{n}
  789. self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
  790. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  791. self.Sjmp("JNZ" , "_number_next_{n}")
  792. self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), jit.Ptr(_SP, 0))
  793. self.Sjmp("JMP", _LB_panic)
  794. self.Link("_number_next_{n}")
  795. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
  796. self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
  797. self.call_go(_F_isValidNumber) // CALL_GO isValidNumber
  798. self.Emit("CMPB" , jit.Ptr(_SP, 16), jit.Imm(0)) // CMPB 16(SP), $0
  799. self.Sjmp("JE" , _LB_error_invalid_number) // JE _error_invalid_number
  800. self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX
  801. self.check_size_r(_AX, 0) // SIZE AX
  802. self.Emit("LEAQ" , jit.Sib(_RP, _RL, 1, 0), _AX) // LEAQ (RP)(RL), AX
  803. self.Emit("ADDQ" , jit.Ptr(_SP_p, 8), _RL) // ADDQ 8(SP.p), RL
  804. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
  805. self.Emit("MOVOU", jit.Ptr(_SP_p, 0), _X0) // MOVOU (SP.p), X0
  806. self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP)
  807. self.call_go(_F_memmove) // CALL_GO memmove
  808. self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n}
  809. self.Link("_empty_{n}") // _empty_{n}:
  810. self.check_size(1) // SIZE $1
  811. self.add_char('0') // CHAR $'0'
  812. self.Link("_done_{n}") // _done_{n}:
  813. }
  814. func (self *_Assembler) _asm_OP_eface(_ *_Instr) {
  815. self.prep_buffer() // MOVE {buf}, (SP)s
  816. self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
  817. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
  818. self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _AX) // LEAQ 8(SP.p), AX
  819. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
  820. self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24)) // MOVQ ST, 24(SP)
  821. self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX
  822. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP)
  823. self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer
  824. self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET) // MOVQ 40(SP), ET
  825. self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP) // MOVQ 48(SP), EP
  826. self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
  827. self.Sjmp("JNZ" , _LB_error) // JNZ _error
  828. }
  829. func (self *_Assembler) _asm_OP_iface(_ *_Instr) {
  830. self.prep_buffer() // MOVE {buf}, (SP)
  831. self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
  832. self.Emit("MOVQ" , jit.Ptr(_AX, 8), _AX) // MOVQ 8(AX), AX
  833. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
  834. self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _AX) // LEAQ 8(SP.p), AX
  835. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
  836. self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24)) // MOVQ ST, 24(SP)
  837. self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX
  838. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP)
  839. self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer
  840. self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET) // MOVQ 40(SP), ET
  841. self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP) // MOVQ 48(SP), EP
  842. self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
  843. self.Sjmp("JNZ" , _LB_error) // JNZ _error
  844. }
  845. func (self *_Assembler) _asm_OP_byte(p *_Instr) {
  846. self.check_size(1)
  847. self.Emit("MOVB", jit.Imm(p.i64()), jit.Sib(_RP, _RL, 1, 0)) // MOVL p.vi(), (RP)(RL*1)
  848. self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL
  849. }
  850. func (self *_Assembler) _asm_OP_text(p *_Instr) {
  851. self.check_size(len(p.vs())) // SIZE ${len(p.vs())}
  852. self.add_text(p.vs()) // TEXT ${p.vs()}
  853. }
  854. func (self *_Assembler) _asm_OP_deref(_ *_Instr) {
  855. self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p
  856. }
  857. func (self *_Assembler) _asm_OP_index(p *_Instr) {
  858. self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ $p.vi(), AX
  859. self.Emit("ADDQ", _AX, _SP_p) // ADDQ AX, SP.p
  860. }
  861. func (self *_Assembler) _asm_OP_load(_ *_Instr) {
  862. self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
  863. self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x) // MOVQ -24(ST)(AX), SP.x
  864. self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p) // MOVQ -8(ST)(AX), SP.p
  865. self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q) // MOVQ (ST)(AX), SP.q
  866. }
  867. func (self *_Assembler) _asm_OP_save(_ *_Instr) {
  868. self.save_state()
  869. }
  870. func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
  871. self.drop_state(_StateSize)
  872. }
  873. func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
  874. self.drop_state(_StateSize * 2) // DROP $(_StateSize * 2)
  875. self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56)) // MOVOU X0, 56(ST)(AX)
  876. }
  877. func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
  878. self.prep_buffer() // MOVE {buf}, (SP)
  879. vt, pv := p.vp()
  880. self.Emit("MOVQ", jit.Type(vt), _AX) // MOVQ $(type(p.vt())), AX
  881. self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
  882. /* check for indirection */
  883. if !rt.UnpackType(vt).Indirect() {
  884. self.Emit("MOVQ", _SP_p, _AX) // MOVQ SP.p, AX
  885. } else {
  886. self.Emit("MOVQ", _SP_p, _VAR_vp) // MOVQ SP.p, 48(SP)
  887. self.Emit("LEAQ", _VAR_vp, _AX) // LEAQ 48(SP), AX
  888. }
  889. /* call the encoder */
  890. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
  891. self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24)) // MOVQ ST, 24(SP)
  892. self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX
  893. if pv {
  894. self.Emit("BTCQ", jit.Imm(bitPointerValue), _AX) // BTCQ $1, AX
  895. }
  896. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP)
  897. self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer
  898. self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET) // MOVQ 40(SP), ET
  899. self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP) // MOVQ 48(SP), EP
  900. self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
  901. self.Sjmp("JNZ" , _LB_error) // JNZ _error
  902. }
  903. func (self *_Assembler) _asm_OP_is_nil(p *_Instr) {
  904. self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0
  905. self.Xjmp("JE" , p.vi()) // JE p.vi()
  906. }
  907. func (self *_Assembler) _asm_OP_is_nil_p1(p *_Instr) {
  908. self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0)) // CMPQ 8(SP.p), $0
  909. self.Xjmp("JE" , p.vi()) // JE p.vi()
  910. }
  911. func (self *_Assembler) _asm_OP_is_zero_1(p *_Instr) {
  912. self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0
  913. self.Xjmp("JE" , p.vi()) // JE p.vi()
  914. }
  915. func (self *_Assembler) _asm_OP_is_zero_2(p *_Instr) {
  916. self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPW (SP.p), $0
  917. self.Xjmp("JE" , p.vi()) // JE p.vi()
  918. }
  919. func (self *_Assembler) _asm_OP_is_zero_4(p *_Instr) {
  920. self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPL (SP.p), $0
  921. self.Xjmp("JE" , p.vi()) // JE p.vi()
  922. }
  923. func (self *_Assembler) _asm_OP_is_zero_8(p *_Instr) {
  924. self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0
  925. self.Xjmp("JE" , p.vi()) // JE p.vi()
  926. }
  927. func (self *_Assembler) _asm_OP_is_zero_map(p *_Instr) {
  928. self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
  929. self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
  930. self.Xjmp("JZ" , p.vi()) // JZ p.vi()
  931. self.Emit("CMPQ" , jit.Ptr(_AX, 0), jit.Imm(0)) // CMPQ (AX), $0
  932. self.Xjmp("JE" , p.vi()) // JE p.vi()
  933. }
  934. func (self *_Assembler) _asm_OP_goto(p *_Instr) {
  935. self.Xjmp("JMP", p.vi())
  936. }
  937. func (self *_Assembler) _asm_OP_map_iter(p *_Instr) {
  938. self.Emit("MOVQ" , jit.Type(p.vt()), _AX) // MOVQ $p.vt(), AX
  939. self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _CX) // MOVQ (SP.p), CX
  940. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
  941. self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
  942. self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX
  943. self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
  944. self.call_go(_F_iteratorStart) // CALL_GO iteratorStart
  945. self.Emit("MOVQ" , jit.Ptr(_SP, 24), _SP_q) // MOVQ 24(SP), SP.q
  946. self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
  947. self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
  948. self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
  949. self.Sjmp("JNZ" , _LB_error) // JNZ _error
  950. }
  951. func (self *_Assembler) _asm_OP_map_stop(_ *_Instr) {
  952. self.Emit("MOVQ", _SP_q, jit.Ptr(_SP, 0)) // MOVQ SP.q, 0(SP)
  953. self.call_go(_F_iteratorStop) // CALL_GO iteratorStop
  954. self.Emit("XORL", _SP_q, _SP_q) // XORL SP.q, SP.q
  955. }
  956. func (self *_Assembler) _asm_OP_map_check_key(p *_Instr) {
  957. self.Emit("MOVQ" , jit.Ptr(_SP_q, 0), _SP_p) // MOVQ (SP.q), SP.p
  958. self.Emit("TESTQ", _SP_p, _SP_p) // TESTQ SP.p, SP.p
  959. self.Xjmp("JZ" , p.vi()) // JNZ p.vi()
  960. }
  961. func (self *_Assembler) _asm_OP_map_write_key(p *_Instr) {
  962. self.Emit("BTQ", jit.Imm(bitSortMapKeys), _ARG_fv) // BTQ ${SortMapKeys}, fv
  963. self.Sjmp("JNC", "_unordered_key_{n}") // JNC _unordered_key_{n}
  964. self.encode_string(false) // STR $false
  965. self.Xjmp("JMP", p.vi()) // JMP ${p.vi()}
  966. self.Link("_unordered_key_{n}") // _unordered_key_{n}:
  967. }
  968. func (self *_Assembler) _asm_OP_map_value_next(_ *_Instr) {
  969. self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p) // MOVQ 8(SP.q), SP.p
  970. self.Emit("MOVQ", _SP_q, jit.Ptr(_SP, 0)) // MOVQ SP.q, (SP)
  971. self.call_go(_F_iteratorNext) // CALL_GO iteratorNext
  972. }
  973. func (self *_Assembler) _asm_OP_slice_len(_ *_Instr) {
  974. self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _SP_x) // MOVQ 8(SP.p), SP.x
  975. self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p
  976. self.Emit("ORQ" , jit.Imm(1 << _S_init), _SP_f) // ORQ $(1<<_S_init), SP.f
  977. }
  978. func (self *_Assembler) _asm_OP_slice_next(p *_Instr) {
  979. self.Emit("TESTQ" , _SP_x, _SP_x) // TESTQ SP.x, SP.x
  980. self.Xjmp("JZ" , p.vi()) // JZ p.vi()
  981. self.Emit("SUBQ" , jit.Imm(1), _SP_x) // SUBQ $1, SP.x
  982. self.Emit("BTRQ" , jit.Imm(_S_init), _SP_f) // BTRQ $_S_init, SP.f
  983. self.Emit("LEAQ" , jit.Ptr(_SP_p, int64(p.vlen())), _AX) // LEAQ $(p.vlen())(SP.p), AX
  984. self.Emit("CMOVQCC", _AX, _SP_p) // CMOVQNC AX, SP.p
  985. }
  986. func (self *_Assembler) _asm_OP_marshal(p *_Instr) {
  987. self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt())
  988. }
  989. func (self *_Assembler) _asm_OP_marshal_p(p *_Instr) {
  990. if p.vk() != reflect.Ptr {
  991. panic("marshal_p: invalid type")
  992. } else {
  993. self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt(), false)
  994. }
  995. }
  996. func (self *_Assembler) _asm_OP_marshal_text(p *_Instr) {
  997. self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt())
  998. }
  999. func (self *_Assembler) _asm_OP_marshal_text_p(p *_Instr) {
  1000. if p.vk() != reflect.Ptr {
  1001. panic("marshal_text_p: invalid type")
  1002. } else {
  1003. self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt(), false)
  1004. }
  1005. }
  1006. func (self *_Assembler) _asm_OP_cond_set(_ *_Instr) {
  1007. self.Emit("ORQ", jit.Imm(1 << _S_cond), _SP_f) // ORQ $(1<<_S_cond), SP.f
  1008. }
  1009. func (self *_Assembler) _asm_OP_cond_testc(p *_Instr) {
  1010. self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f) // BTRQ $_S_cond, SP.f
  1011. self.Xjmp("JC" , p.vi())
  1012. }
  1013. func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
  1014. self.Emit("MOVQ", jit.Imm(int64(p2.op())), jit.Ptr(_SP, 16))// MOVQ $(p2.op()), 16(SP)
  1015. self.Emit("MOVQ", jit.Imm(int64(p1.op())), jit.Ptr(_SP, 8)) // MOVQ $(p1.op()), 8(SP)
  1016. self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0)) // MOVQ $(i), (SP)
  1017. self.call_go(_F_println)
  1018. }