loic boulet преди 1 месец
ревизия
ec23f007bc
променени са 100 файла, в които са добавени 20643 реда и са изтрити 0 реда
  1. 1 0
      .gitignore
  2. 221 0
      app/server.go
  3. 27 0
      auth/auth.go
  4. 58 0
      auth/jwt.go
  5. 96 0
      auth/memory_ts.go
  6. 3 0
      cloud/aws.go
  7. 3 0
      cloud/digitalocean.go
  8. 572 0
      cloud/ovh.go
  9. 154 0
      cloud/provider.go
  10. 46 0
      config.sample.yml
  11. 177 0
      config/config.go
  12. 12 0
      dbmanager/database.go
  13. 7 0
      dbmanager/entity.go
  14. 45 0
      dbmanager/factory.go
  15. 139 0
      dbmanager/memory.go
  16. 10 0
      dbmanager/repository.go
  17. 56 0
      dbstore/store.go
  18. 3 0
      docker/builder.go
  19. 3 0
      docker/compose.go
  20. 56 0
      go.mod
  21. 129 0
      go.sum
  22. 67 0
      handlers/auth.go
  23. 250 0
      handlers/autodeploy.go
  24. 154 0
      handlers/clients.go
  25. 181 0
      handlers/deployments.go
  26. 129 0
      handlers/monitoring.go
  27. 78 0
      handlers/providers.go
  28. 108 0
      handlers/templates.go
  29. 139 0
      handlers/tickets.go
  30. 67 0
      main.go
  31. 110 0
      middleware/auth.go
  32. 35 0
      middleware/logging.go
  33. 86 0
      middleware/metrics.go
  34. 49 0
      models/autodeploy.go
  35. 22 0
      models/client.go
  36. 41 0
      models/deployment.go
  37. 22 0
      models/metrics.go
  38. 12 0
      models/provider.go
  39. 21 0
      models/template.go
  40. 12 0
      models/ticket.go
  41. 19 0
      models/user.go
  42. 54 0
      services/auth.go
  43. 3 0
      services/autodeploy.go
  44. 3 0
      services/clients.go
  45. 3 0
      services/deployments.go
  46. 3 0
      services/monitoring.go
  47. 3 0
      services/providers.go
  48. 3 0
      services/templates.go
  49. 3 0
      services/tickets.go
  50. 20 0
      vendor/github.com/beorn7/perks/LICENSE
  51. 2388 0
      vendor/github.com/beorn7/perks/quantile/exampledata.txt
  52. 316 0
      vendor/github.com/beorn7/perks/quantile/stream.go
  53. 52 0
      vendor/github.com/bytedance/sonic/.gitignore
  54. 6 0
      vendor/github.com/bytedance/sonic/.gitmodules
  55. 24 0
      vendor/github.com/bytedance/sonic/.licenserc.yaml
  56. 128 0
      vendor/github.com/bytedance/sonic/CODE_OF_CONDUCT.md
  57. 63 0
      vendor/github.com/bytedance/sonic/CONTRIBUTING.md
  58. 0 0
      vendor/github.com/bytedance/sonic/CREDITS
  59. 201 0
      vendor/github.com/bytedance/sonic/LICENSE
  60. 471 0
      vendor/github.com/bytedance/sonic/README.md
  61. 469 0
      vendor/github.com/bytedance/sonic/README_ZH_CN.md
  62. 214 0
      vendor/github.com/bytedance/sonic/api.go
  63. 135 0
      vendor/github.com/bytedance/sonic/ast/api.go
  64. 114 0
      vendor/github.com/bytedance/sonic/ast/api_compat.go
  65. 0 0
      vendor/github.com/bytedance/sonic/ast/asm.s
  66. 31 0
      vendor/github.com/bytedance/sonic/ast/b64_amd64.go
  67. 31 0
      vendor/github.com/bytedance/sonic/ast/b64_compat.go
  68. 409 0
      vendor/github.com/bytedance/sonic/ast/buffer.go
  69. 618 0
      vendor/github.com/bytedance/sonic/ast/decode.go
  70. 259 0
      vendor/github.com/bytedance/sonic/ast/encode.go
  71. 130 0
      vendor/github.com/bytedance/sonic/ast/error.go
  72. 203 0
      vendor/github.com/bytedance/sonic/ast/iterator.go
  73. 1824 0
      vendor/github.com/bytedance/sonic/ast/node.go
  74. 660 0
      vendor/github.com/bytedance/sonic/ast/parser.go
  75. 138 0
      vendor/github.com/bytedance/sonic/ast/search.go
  76. 55 0
      vendor/github.com/bytedance/sonic/ast/stubs_go115.go
  77. 55 0
      vendor/github.com/bytedance/sonic/ast/stubs_go120.go
  78. 315 0
      vendor/github.com/bytedance/sonic/ast/visitor.go
  79. 131 0
      vendor/github.com/bytedance/sonic/compat.go
  80. 68 0
      vendor/github.com/bytedance/sonic/decoder/decoder_amd64.go
  81. 194 0
      vendor/github.com/bytedance/sonic/decoder/decoder_compat.go
  82. 117 0
      vendor/github.com/bytedance/sonic/encoder/encoder_amd64.go
  83. 261 0
      vendor/github.com/bytedance/sonic/encoder/encoder_compat.go
  84. 9 0
      vendor/github.com/bytedance/sonic/go.work
  85. 0 0
      vendor/github.com/bytedance/sonic/internal/caching/asm.s
  86. 115 0
      vendor/github.com/bytedance/sonic/internal/caching/fcache.go
  87. 40 0
      vendor/github.com/bytedance/sonic/internal/caching/hashing.go
  88. 173 0
      vendor/github.com/bytedance/sonic/internal/caching/pcache.go
  89. 40 0
      vendor/github.com/bytedance/sonic/internal/cpu/features.go
  90. 0 0
      vendor/github.com/bytedance/sonic/internal/decoder/asm.s
  91. 130 0
      vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go116.go
  92. 126 0
      vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go117.go
  93. 132 0
      vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go121.go
  94. 1930 0
      vendor/github.com/bytedance/sonic/internal/decoder/assembler_regabi_amd64.go
  95. 1950 0
      vendor/github.com/bytedance/sonic/internal/decoder/assembler_stkabi_amd64.go
  96. 1158 0
      vendor/github.com/bytedance/sonic/internal/decoder/compiler.go
  97. 70 0
      vendor/github.com/bytedance/sonic/internal/decoder/debug.go
  98. 255 0
      vendor/github.com/bytedance/sonic/internal/decoder/decoder.go
  99. 191 0
      vendor/github.com/bytedance/sonic/internal/decoder/errors.go
  100. 729 0
      vendor/github.com/bytedance/sonic/internal/decoder/generic_regabi_amd64.go

+ 1 - 0
.gitignore

@@ -0,0 +1 @@
+config.yaml

+ 221 - 0
app/server.go

@@ -0,0 +1,221 @@
+package app
+
+import (
+	"context"
+	"fmt"
+	"net/http"
+	"os"
+	"os/signal"
+	"strconv"
+	"syscall"
+	"time"
+
+	"git.linuxforward.com/byop/byop-engine/auth"
+	"git.linuxforward.com/byop/byop-engine/cloud"
+	"git.linuxforward.com/byop/byop-engine/config"
+	"git.linuxforward.com/byop/byop-engine/dbmanager"
+	"git.linuxforward.com/byop/byop-engine/handlers"
+	mw "git.linuxforward.com/byop/byop-engine/middleware"
+	"github.com/gin-gonic/gin"
+	"github.com/pkg/errors"
+	"github.com/sirupsen/logrus"
+)
+
+type App struct {
+	entry      *logrus.Entry
+	cnf        *config.Config
+	ctx        context.Context
+	cancelFunc context.CancelFunc
+	rtr        *gin.Engine
+	// Database
+	dbManager dbmanager.DbManager[dbmanager.Entity]
+	dbFactory *dbmanager.DbManagerFactory
+
+	// Services
+	authService auth.Service
+	tokenStore  auth.TokenStore
+
+	// Common Handlers
+	authHandler   *handlers.AuthHandler
+	clientHandler *handlers.ClientHandler
+
+	// Resource Handlers
+	providerHandler   *handlers.ProviderHandler
+	deploymentHandler *handlers.DeploymentHandler
+	templateHandler   *handlers.TemplateHandler
+	ticketHandler     *handlers.TicketHandler
+	monitoringHandler *handlers.MonitoringHandler
+}
+
+func NewApp(cnf *config.Config) (*App, error) {
+	ctx, cancelFunc := context.WithCancel(context.Background())
+
+	app := &App{
+		entry:      logrus.WithField("component", "app"),
+		cnf:        cnf,
+		ctx:        ctx,
+		cancelFunc: cancelFunc,
+	}
+
+	// Initialize router first
+	if cnf.Debug {
+		gin.SetMode(gin.DebugMode)
+	} else {
+		// Set gin to release mode for production
+		// This will disable debug logs and enable performance optimizations
+		gin.SetMode(gin.ReleaseMode)
+	}
+	app.rtr = gin.New()
+	app.rtr.Use(gin.Recovery())
+	app.rtr.Use(mw.Logger)
+
+	// Initialize database connection
+	if err := app.initDatabase(); err != nil {
+		return nil, errors.Wrap(err, "initialize database")
+	}
+
+	// Initialize services and handlers
+	if err := app.initServices(); err != nil {
+		return nil, errors.Wrap(err, "initialize services")
+	}
+
+	if err := app.initHandlers(); err != nil {
+		return nil, errors.Wrap(err, "initialize handlers")
+	}
+
+	// Set up routes after all handlers are initialized
+	app.setupRoutes()
+
+	return app, nil
+}
+
+func (a *App) Run() error {
+
+	srv := &http.Server{
+		Addr:    fmt.Sprintf(":%s", strconv.Itoa(a.cnf.Server.Port)),
+		Handler: a.rtr,
+	}
+
+	go func() {
+		a.entry.WithField("address", srv.Addr).Info("Starting server...")
+		// Handle TLS if configured
+		if a.cnf.Server.Tls.Enabled {
+			a.entry.Info("Starting server with TLS...")
+			err := srv.ListenAndServeTLS(a.cnf.Server.Tls.CertFile, a.cnf.Server.Tls.KeyFile)
+			if err != nil && err != http.ErrServerClosed {
+				a.entry.WithError(err).Fatal("Failed to start server")
+			}
+		} else {
+			err := srv.ListenAndServe()
+			if err != nil && err != http.ErrServerClosed {
+				a.entry.WithError(err).Fatal("Failed to start server")
+			}
+		}
+		a.entry.Info("Server stopped")
+	}()
+
+	quit := make(chan os.Signal, 1)
+	signal.Notify(quit, os.Interrupt, syscall.SIGTERM)
+	<-quit
+
+	a.entry.Info("Stopping server...")
+	ctxTimeout, cancelFunc := context.WithTimeout(context.Background(), 30*time.Second)
+	defer cancelFunc()
+	err := srv.Shutdown(ctxTimeout)
+	if err != nil {
+		return fmt.Errorf("shutdown server: %w", err)
+	}
+
+	a.entry.Info("Server stopped successfully")
+	return nil
+}
+
+func (a *App) initServices() error {
+	// Initialize token store
+	a.tokenStore = auth.NewMemoryTokenStore(time.Duration(a.cnf.Auth.CleanupInterval))
+
+	// Initialize authentication service
+	a.authService = auth.NewJWTService(
+		[]byte(a.cnf.Auth.PrivateKey),
+		time.Duration(a.cnf.Auth.TokenDuration),
+		a.tokenStore,
+	)
+
+	// Initialize providers
+	if err := a.loadProviders(); err != nil {
+		return errors.Wrap(err, "load providers")
+	}
+
+	a.entry.Info("Services initialized successfully")
+	return nil
+}
+
+func (a *App) initHandlers() error {
+	// Initialize authentication handler
+	a.authHandler = handlers.NewAuthHandler(a.authService)
+
+	// Initialize resource handlers
+	a.providerHandler = handlers.NewProviderHandler()
+
+	// Create managers for each entity type
+	clientDbManager, err := a.dbFactory.CreateClientManager()
+	if err != nil {
+		return fmt.Errorf("create client db manager: %w", err)
+	}
+
+	if err := clientDbManager.Connect(); err != nil {
+		return fmt.Errorf("connect to client database: %w", err)
+	}
+
+	a.clientHandler = handlers.NewClientHandler(clientDbManager)
+	// Initialize other handlers...
+
+	a.entry.Info("Handlers initialized successfully")
+	return nil
+}
+
+func (a *App) loadProviders() error {
+	for name, config := range a.cnf.Providers {
+		provider, ok := cloud.GetProvider(name)
+		if !ok {
+			return fmt.Errorf("provider %s not found", name)
+		}
+		err := provider.Initialize(config)
+		if err != nil {
+			return fmt.Errorf("initialize provider %s: %w", name, err)
+		}
+		a.entry.WithField("provider", name).Info("Provider initialized")
+	}
+	a.entry.Info("All providers loaded successfully")
+
+	return nil
+}
+
+func (a *App) setupRoutes() {
+	// API version group
+	v1 := a.rtr.Group("/api/v1")
+
+	// Auth routes - no middleware required
+	a.authHandler.RegisterRoutes(v1)
+
+	// Protected routes - require authentication
+	protected := v1.Group("/")
+	protected.Use(mw.Auth(a.authService)) // Auth middleware with service dependency
+
+	// Register resource routes
+	providers := protected.Group("/providers")
+	a.providerHandler.RegisterRoutes(providers)
+
+	clients := protected.Group("/clients")
+	a.clientHandler.RegisterRoutes(clients)
+
+	// Register other resource routes...
+
+	a.entry.Info("Routes configured successfully")
+}
+
+func (a *App) initDatabase() error {
+	a.dbFactory = dbmanager.NewDbManagerFactory(a.cnf.Database.Type)
+	a.entry.Info("Database factory initialized successfully")
+	return nil
+}

+ 27 - 0
auth/auth.go

@@ -0,0 +1,27 @@
+package auth
+
+import (
+	"context"
+	"errors"
+)
+
+var (
+	ErrTokenExpired     = errors.New("token has expired")
+	ErrInvalidToken     = errors.New("token is invalid")
+	ErrTokenBlacklisted = errors.New("token has been revoked")
+)
+
+// Service defines the interface for authentication operations
+type Service interface {
+	// GenerateToken creates a new authentication token for a user
+	GenerateToken(ctx context.Context, clientID string) (string, error)
+
+	// ValidateToken verifies a token and returns the client ID if valid
+	ValidateToken(ctx context.Context, token string) (string, error)
+
+	// RefreshToken creates a new token based on an existing valid token
+	RefreshToken(ctx context.Context, token string) (string, error)
+
+	// Logout invalidates a token
+	Logout(ctx context.Context, token string) error
+}

+ 58 - 0
auth/jwt.go

@@ -0,0 +1,58 @@
+package auth
+
+import (
+	"context"
+	"time"
+
+	"github.com/golang-jwt/jwt"
+)
+
+// Claims represents the JWT claims structure
+type Claims struct {
+	jwt.StandardClaims
+	ClientID string `json:"client_id"`
+	Role     string `json:"role"`
+}
+
+// JWTService implements the auth.Service interface using JWT tokens
+type JWTService struct {
+	privateKey    []byte
+	tokenDuration time.Duration
+	tokenStore    TokenStore // Interface for blacklist storage
+}
+
+// TokenStore defines storage operations for token management
+type TokenStore interface {
+	IsBlacklisted(ctx context.Context, token string) (bool, error)
+	Blacklist(ctx context.Context, token string, expiry time.Time) error
+}
+
+// NewJWTService creates a new JWT-based auth service
+func NewJWTService(privateKey []byte, tokenDuration time.Duration, store TokenStore) Service {
+	return &JWTService{
+		privateKey:    privateKey,
+		tokenDuration: tokenDuration,
+		tokenStore:    store,
+	}
+}
+
+// Implementation of Service interface methods...
+func (s *JWTService) GenerateToken(ctx context.Context, clientID string) (string, error) {
+	// Implementation here
+	return "", nil
+}
+
+func (s *JWTService) ValidateToken(ctx context.Context, token string) (string, error) {
+	// Implementation here
+	return "", nil
+}
+
+func (s *JWTService) RefreshToken(ctx context.Context, token string) (string, error) {
+	// Implementation here
+	return "", nil
+}
+
+func (s *JWTService) Logout(ctx context.Context, token string) error {
+	// Implementation here
+	return nil
+}

+ 96 - 0
auth/memory_ts.go

@@ -0,0 +1,96 @@
+package auth
+
+import (
+	"context"
+	"sync"
+	"time"
+)
+
+// MemoryTokenStore implements TokenStore with in-memory storage
+type MemoryTokenStore struct {
+	blacklist map[string]time.Time
+	mutex     sync.RWMutex
+	// Optional cleaner to remove expired tokens
+	cleanupInterval time.Duration
+	stopCleanup     chan struct{}
+}
+
+// NewMemoryTokenStore creates a new in-memory token store
+func NewMemoryTokenStore(cleanupInterval time.Duration) *MemoryTokenStore {
+	store := &MemoryTokenStore{
+		blacklist:       make(map[string]time.Time),
+		mutex:           sync.RWMutex{},
+		cleanupInterval: cleanupInterval,
+		stopCleanup:     make(chan struct{}),
+	}
+
+	// Start background cleanup if interval > 0
+	if cleanupInterval > 0 {
+		go store.startCleanupRoutine()
+	}
+
+	return store
+}
+
+// IsBlacklisted checks if a token is in the blacklist
+func (s *MemoryTokenStore) IsBlacklisted(ctx context.Context, token string) (bool, error) {
+	s.mutex.RLock()
+	defer s.mutex.RUnlock()
+
+	expiry, exists := s.blacklist[token]
+	if !exists {
+		return false, nil
+	}
+
+	// If token has expired, we can remove it
+	if time.Now().After(expiry) {
+		// Don't remove here to avoid write lock during read operation
+		return false, nil
+	}
+
+	return true, nil
+}
+
+// Blacklist adds a token to the blacklist
+func (s *MemoryTokenStore) Blacklist(ctx context.Context, token string, expiry time.Time) error {
+	s.mutex.Lock()
+	defer s.mutex.Unlock()
+
+	s.blacklist[token] = expiry
+	return nil
+}
+
+// startCleanupRoutine periodically removes expired tokens
+func (s *MemoryTokenStore) startCleanupRoutine() {
+	ticker := time.NewTicker(s.cleanupInterval)
+	defer ticker.Stop()
+
+	for {
+		select {
+		case <-ticker.C:
+			s.cleanup()
+		case <-s.stopCleanup:
+			return
+		}
+	}
+}
+
+// cleanup removes expired tokens from the blacklist
+func (s *MemoryTokenStore) cleanup() {
+	now := time.Now()
+	s.mutex.Lock()
+	defer s.mutex.Unlock()
+
+	for token, expiry := range s.blacklist {
+		if now.After(expiry) {
+			delete(s.blacklist, token)
+		}
+	}
+}
+
+// Close stops the cleanup goroutine
+func (s *MemoryTokenStore) Close() {
+	if s.cleanupInterval > 0 {
+		s.stopCleanup <- struct{}{}
+	}
+}

+ 3 - 0
cloud/aws.go

@@ -0,0 +1,3 @@
+package cloud
+
+// TODO: Implement aws cloud provider

+ 3 - 0
cloud/digitalocean.go

@@ -0,0 +1,3 @@
+package cloud
+
+// TODO: Implement digitalocean cloud provider

+ 572 - 0
cloud/ovh.go

@@ -0,0 +1,572 @@
+package cloud
+
+import (
+	"context"
+	"errors"
+	"fmt"
+	"time"
+
+	"github.com/ovh/go-ovh/ovh"
+)
+
+// OVHProvider implements the Provider interface for OVH Cloud
+type OVHProvider struct {
+	client     *ovh.Client
+	projectID  string
+	region     string
+	configured bool
+}
+
+// NewOVHProvider creates a new OVH provider
+func NewOVHProvider() Provider {
+	return &OVHProvider{}
+}
+
+func init() {
+	RegisterProvider("ovh", NewOVHProvider)
+}
+
+// Initialize sets up the OVH provider with credentials and configuration
+func (p *OVHProvider) Initialize(config map[string]string) error {
+	// Check required configuration
+	requiredKeys := []string{"application_key", "application_secret", "consumer_key", "project_id"}
+	for _, key := range requiredKeys {
+		if _, ok := config[key]; !ok {
+			return fmt.Errorf("missing required configuration key: %s", key)
+		}
+	}
+
+	// Create OVH client
+	client, err := ovh.NewClient(
+		"ovh-eu", // Endpoint (can be configurable)
+		config["application_key"],
+		config["application_secret"],
+		config["consumer_key"],
+	)
+	if err != nil {
+		return fmt.Errorf("failed to create OVH client: %w", err)
+	}
+
+	p.client = client
+	p.projectID = config["project_id"]
+
+	// Set default region if provided
+	if region, ok := config["region"]; ok {
+		p.region = region
+	}
+
+	p.configured = true
+	return nil
+}
+
+// Validate checks if the OVH provider credentials are valid
+func (p *OVHProvider) Validate(ctx context.Context) (bool, error) {
+	if !p.configured {
+		return false, errors.New("provider not configured")
+	}
+
+	// Try to get project info to verify credentials
+	path := fmt.Sprintf("/cloud/project/%s", p.projectID)
+	var project map[string]interface{}
+
+	err := p.client.Get(path, &project)
+	if err != nil {
+		return false, fmt.Errorf("validation failed: %w", err)
+	}
+
+	return true, nil
+}
+
+// ListRegions lists all available OVH regions
+func (p *OVHProvider) ListRegions(ctx context.Context) ([]Region, error) {
+	if !p.configured {
+		return nil, errors.New("provider not configured")
+	}
+
+	path := fmt.Sprintf("/cloud/project/%s/region", p.projectID)
+	var regionIDs []string
+
+	err := p.client.Get(path, &regionIDs)
+	if err != nil {
+		return nil, fmt.Errorf("failed to list regions: %w", err)
+	}
+
+	regions := make([]Region, 0, len(regionIDs))
+	for _, id := range regionIDs {
+		var regionDetails struct {
+			Name          string `json:"name"`
+			ContinentCode string `json:"continentCode"`
+			Status        string `json:"status"`
+		}
+
+		regionPath := fmt.Sprintf("/cloud/project/%s/region/%s", p.projectID, id)
+		err := p.client.Get(regionPath, &regionDetails)
+		if err != nil {
+			continue // Skip this region if we can't get details
+		}
+
+		regions = append(regions, Region{
+			ID:   id,
+			Name: regionDetails.Name,
+			Zone: regionDetails.ContinentCode,
+		})
+	}
+
+	return regions, nil
+}
+
+// ListInstanceSizes lists available VM sizes (flavors) in OVH
+func (p *OVHProvider) ListInstanceSizes(ctx context.Context, region string) ([]InstanceSize, error) {
+	if !p.configured {
+		return nil, errors.New("provider not configured")
+	}
+
+	if region == "" {
+		region = p.region
+	}
+
+	if region == "" {
+		return nil, errors.New("region must be specified")
+	}
+
+	path := fmt.Sprintf("/cloud/project/%s/flavor?region=%s", p.projectID, region)
+	var flavors []struct {
+		ID          string  `json:"id"`
+		Name        string  `json:"name"`
+		Vcpus       int     `json:"vcpus"`
+		RAM         int     `json:"ram"`  // in MB
+		Disk        int     `json:"disk"` // in GB
+		Type        string  `json:"type"`
+		HourlyPrice float64 `json:"hourlyPrice"`
+	}
+
+	err := p.client.Get(path, &flavors)
+	if err != nil {
+		return nil, fmt.Errorf("failed to list flavors: %w", err)
+	}
+
+	sizes := make([]InstanceSize, 0, len(flavors))
+	for _, flavor := range flavors {
+		sizes = append(sizes, InstanceSize{
+			ID:       flavor.ID,
+			Name:     flavor.Name,
+			CPUCores: flavor.Vcpus,
+			MemoryGB: flavor.RAM / 1024, // Convert MB to GB
+			DiskGB:   flavor.Disk,
+			Price:    flavor.HourlyPrice,
+		})
+	}
+
+	return sizes, nil
+}
+
+// ListInstances lists all instances in OVH
+func (p *OVHProvider) ListInstances(ctx context.Context) ([]Instance, error) {
+	if !p.configured {
+		return nil, errors.New("provider not configured")
+	}
+
+	path := fmt.Sprintf("/cloud/project/%s/instance", p.projectID)
+	var ovhInstances []struct {
+		ID          string    `json:"id"`
+		Name        string    `json:"name"`
+		Status      string    `json:"status"`
+		Created     time.Time `json:"created"`
+		Region      string    `json:"region"`
+		FlavorID    string    `json:"flavorId"`
+		ImageID     string    `json:"imageId"`
+		IPAddresses []struct {
+			IP      string `json:"ip"`
+			Type    string `json:"type"`    // public or private
+			Version int    `json:"version"` // 4 or 6
+		} `json:"ipAddresses"`
+	}
+
+	err := p.client.Get(path, &ovhInstances)
+	if err != nil {
+		return nil, fmt.Errorf("failed to list instances: %w", err)
+	}
+
+	instances := make([]Instance, 0, len(ovhInstances))
+	for _, ovhInstance := range ovhInstances {
+		instance := Instance{
+			ID:        ovhInstance.ID,
+			Name:      ovhInstance.Name,
+			Region:    ovhInstance.Region,
+			Size:      ovhInstance.FlavorID,
+			ImageID:   ovhInstance.ImageID,
+			Status:    mapOVHStatus(ovhInstance.Status),
+			CreatedAt: ovhInstance.Created,
+		}
+
+		// Extract IP addresses
+		for _, ip := range ovhInstance.IPAddresses {
+			if ip.Version == 4 { // Only use IPv4 for now
+				if ip.Type == "public" {
+					instance.IPAddress = ip.IP
+				} else {
+					instance.PrivateIP = ip.IP
+				}
+			}
+		}
+
+		instances = append(instances, instance)
+	}
+
+	return instances, nil
+}
+
+// GetInstance gets a specific instance by ID
+func (p *OVHProvider) GetInstance(ctx context.Context, id string) (*Instance, error) {
+	if !p.configured {
+		return nil, errors.New("provider not configured")
+	}
+
+	path := fmt.Sprintf("/cloud/project/%s/instance/%s", p.projectID, id)
+	var ovhInstance struct {
+		ID          string    `json:"id"`
+		Name        string    `json:"name"`
+		Status      string    `json:"status"`
+		Created     time.Time `json:"created"`
+		Region      string    `json:"region"`
+		FlavorID    string    `json:"flavorId"`
+		ImageID     string    `json:"imageId"`
+		IPAddresses []struct {
+			IP      string `json:"ip"`
+			Type    string `json:"type"`    // public or private
+			Version int    `json:"version"` // 4 or 6
+		} `json:"ipAddresses"`
+	}
+
+	err := p.client.Get(path, &ovhInstance)
+	if err != nil {
+		return nil, fmt.Errorf("failed to get instance: %w", err)
+	}
+
+	instance := &Instance{
+		ID:        ovhInstance.ID,
+		Name:      ovhInstance.Name,
+		Region:    ovhInstance.Region,
+		Size:      ovhInstance.FlavorID,
+		ImageID:   ovhInstance.ImageID,
+		Status:    mapOVHStatus(ovhInstance.Status),
+		CreatedAt: ovhInstance.Created,
+	}
+
+	// Extract IP addresses
+	for _, ip := range ovhInstance.IPAddresses {
+		if ip.Version == 4 { // Only use IPv4 for now
+			if ip.Type == "public" {
+				instance.IPAddress = ip.IP
+			} else {
+				instance.PrivateIP = ip.IP
+			}
+		}
+	}
+
+	return instance, nil
+}
+
+// CreateInstance creates a new instance in OVH
+func (p *OVHProvider) CreateInstance(ctx context.Context, opts InstanceCreateOpts) (*Instance, error) {
+	if !p.configured {
+		return nil, errors.New("provider not configured")
+	}
+
+	// Prepare create request
+	path := fmt.Sprintf("/cloud/project/%s/instance", p.projectID)
+	request := struct {
+		Name     string   `json:"name"`
+		Region   string   `json:"region"`
+		FlavorID string   `json:"flavorId"`
+		ImageID  string   `json:"imageId"`
+		SSHKeyID []string `json:"sshKeyId,omitempty"`
+		UserData string   `json:"userData,omitempty"`
+		Networks []string `json:"networks,omitempty"`
+	}{
+		Name:     opts.Name,
+		Region:   opts.Region,
+		FlavorID: opts.Size,
+		ImageID:  opts.ImageID,
+		SSHKeyID: opts.SSHKeyIDs,
+		UserData: opts.UserData,
+	}
+
+	var result struct {
+		ID      string `json:"id"`
+		Status  string `json:"status"`
+		Created string `json:"created"`
+	}
+
+	err := p.client.Post(path, request, &result)
+	if err != nil {
+		return nil, fmt.Errorf("failed to create instance: %w", err)
+	}
+
+	// Fetch the full instance details
+	createdInstance, err := p.GetInstance(ctx, result.ID)
+	if err != nil {
+		return nil, fmt.Errorf("instance created but failed to retrieve details: %w", err)
+	}
+
+	return createdInstance, nil
+}
+
+// DeleteInstance deletes an instance in OVH
+func (p *OVHProvider) DeleteInstance(ctx context.Context, id string) error {
+	if !p.configured {
+		return errors.New("provider not configured")
+	}
+
+	path := fmt.Sprintf("/cloud/project/%s/instance/%s", p.projectID, id)
+	err := p.client.Delete(path, nil)
+	if err != nil {
+		return fmt.Errorf("failed to delete instance: %w", err)
+	}
+
+	return nil
+}
+
+// StartInstance starts an instance in OVH
+func (p *OVHProvider) StartInstance(ctx context.Context, id string) error {
+	if !p.configured {
+		return errors.New("provider not configured")
+	}
+
+	path := fmt.Sprintf("/cloud/project/%s/instance/%s/start", p.projectID, id)
+	err := p.client.Post(path, nil, nil)
+	if err != nil {
+		return fmt.Errorf("failed to start instance: %w", err)
+	}
+
+	return nil
+}
+
+// StopInstance stops an instance in OVH
+func (p *OVHProvider) StopInstance(ctx context.Context, id string) error {
+	if !p.configured {
+		return errors.New("provider not configured")
+	}
+
+	path := fmt.Sprintf("/cloud/project/%s/instance/%s/stop", p.projectID, id)
+	err := p.client.Post(path, nil, nil)
+	if err != nil {
+		return fmt.Errorf("failed to stop instance: %w", err)
+	}
+
+	return nil
+}
+
+// RestartInstance restarts an instance in OVH
+func (p *OVHProvider) RestartInstance(ctx context.Context, id string) error {
+	if !p.configured {
+		return errors.New("provider not configured")
+	}
+
+	path := fmt.Sprintf("/cloud/project/%s/instance/%s/reboot", p.projectID, id)
+	err := p.client.Post(path, nil, nil)
+	if err != nil {
+		return fmt.Errorf("failed to restart instance: %w", err)
+	}
+
+	return nil
+}
+
+// ListImages lists available OS images in OVH
+func (p *OVHProvider) ListImages(ctx context.Context) ([]Image, error) {
+	if !p.configured {
+		return nil, errors.New("provider not configured")
+	}
+
+	// Get all images
+	path := fmt.Sprintf("/cloud/project/%s/image", p.projectID)
+	var ovhImages []struct {
+		ID           string    `json:"id"`
+		Name         string    `json:"name"`
+		Region       string    `json:"region"`
+		Visibility   string    `json:"visibility"`
+		Type         string    `json:"type"`
+		Status       string    `json:"status"`
+		CreationDate time.Time `json:"creationDate"`
+		MinDisk      int       `json:"minDisk"`
+		Size         int       `json:"size"`
+	}
+
+	err := p.client.Get(path, &ovhImages)
+	if err != nil {
+		return nil, fmt.Errorf("failed to list images: %w", err)
+	}
+
+	images := make([]Image, 0, len(ovhImages))
+	for _, ovhImage := range ovhImages {
+		images = append(images, Image{
+			ID:          ovhImage.ID,
+			Name:        ovhImage.Name,
+			Description: ovhImage.Type,
+			Type:        ovhImage.Type,
+			Status:      ovhImage.Status,
+			CreatedAt:   ovhImage.CreationDate,
+			MinDiskGB:   ovhImage.MinDisk,
+			SizeGB:      ovhImage.Size / (1024 * 1024 * 1024), // Convert bytes to GB
+		})
+	}
+
+	return images, nil
+}
+
+// ListSSHKeys lists SSH keys in OVH
+func (p *OVHProvider) ListSSHKeys(ctx context.Context) ([]SSHKey, error) {
+	if !p.configured {
+		return nil, errors.New("provider not configured")
+	}
+
+	path := fmt.Sprintf("/cloud/project/%s/sshkey", p.projectID)
+	var ovhKeys []struct {
+		ID          string    `json:"id"`
+		Name        string    `json:"name"`
+		PublicKey   string    `json:"publicKey"`
+		Fingerprint string    `json:"fingerprint"`
+		CreatedAt   time.Time `json:"creationDate"`
+	}
+
+	err := p.client.Get(path, &ovhKeys)
+	if err != nil {
+		return nil, fmt.Errorf("failed to list SSH keys: %w", err)
+	}
+
+	keys := make([]SSHKey, 0, len(ovhKeys))
+	for _, ovhKey := range ovhKeys {
+		keys = append(keys, SSHKey{
+			ID:          ovhKey.ID,
+			Name:        ovhKey.Name,
+			PublicKey:   ovhKey.PublicKey,
+			Fingerprint: ovhKey.Fingerprint,
+			CreatedAt:   ovhKey.CreatedAt,
+		})
+	}
+
+	return keys, nil
+}
+
+// CreateSSHKey creates a new SSH key in OVH
+func (p *OVHProvider) CreateSSHKey(ctx context.Context, name, publicKey string) (*SSHKey, error) {
+	if !p.configured {
+		return nil, errors.New("provider not configured")
+	}
+
+	path := fmt.Sprintf("/cloud/project/%s/sshkey", p.projectID)
+	request := struct {
+		Name      string `json:"name"`
+		PublicKey string `json:"publicKey"`
+		Region    string `json:"region,omitempty"`
+	}{
+		Name:      name,
+		PublicKey: publicKey,
+		Region:    p.region, // Optional region
+	}
+
+	var result struct {
+		ID          string    `json:"id"`
+		Name        string    `json:"name"`
+		PublicKey   string    `json:"publicKey"`
+		Fingerprint string    `json:"fingerprint"`
+		CreatedAt   time.Time `json:"creationDate"`
+	}
+
+	err := p.client.Post(path, request, &result)
+	if err != nil {
+		return nil, fmt.Errorf("failed to create SSH key: %w", err)
+	}
+
+	return &SSHKey{
+		ID:          result.ID,
+		Name:        result.Name,
+		PublicKey:   result.PublicKey,
+		Fingerprint: result.Fingerprint,
+		CreatedAt:   result.CreatedAt,
+	}, nil
+}
+
+// DeleteSSHKey deletes an SSH key in OVH
+func (p *OVHProvider) DeleteSSHKey(ctx context.Context, id string) error {
+	if !p.configured {
+		return errors.New("provider not configured")
+	}
+
+	path := fmt.Sprintf("/cloud/project/%s/sshkey/%s", p.projectID, id)
+	err := p.client.Delete(path, nil)
+	if err != nil {
+		return fmt.Errorf("failed to delete SSH key: %w", err)
+	}
+
+	return nil
+}
+
+// GetInstanceStatus gets the current status of an instance in OVH
+func (p *OVHProvider) GetInstanceStatus(ctx context.Context, id string) (string, error) {
+	instance, err := p.GetInstance(ctx, id)
+	if err != nil {
+		return "", err
+	}
+
+	return instance.Status, nil
+}
+
+// WaitForInstanceStatus waits for an instance to reach a specific status
+func (p *OVHProvider) WaitForInstanceStatus(ctx context.Context, id, status string, timeout time.Duration) error {
+	deadline := time.Now().Add(timeout)
+
+	for time.Now().Before(deadline) {
+		currentStatus, err := p.GetInstanceStatus(ctx, id)
+		if err != nil {
+			return err
+		}
+
+		if currentStatus == status {
+			return nil
+		}
+
+		select {
+		case <-ctx.Done():
+			return ctx.Err()
+		case <-time.After(5 * time.Second):
+			// Wait 5 seconds before next check
+		}
+	}
+
+	return fmt.Errorf("timeout waiting for instance %s to reach status %s", id, status)
+}
+
+// mapOVHStatus maps OVH instance status to standardized status
+func mapOVHStatus(ovhStatus string) string {
+	switch ovhStatus {
+	case "ACTIVE":
+		return "Running"
+	case "BUILD":
+		return "Creating"
+	case "BUILDING":
+		return "Creating"
+	case "SHUTOFF":
+		return "Stopped"
+	case "DELETED":
+		return "Terminated"
+	case "SOFT_DELETED":
+		return "Terminated"
+	case "HARD_REBOOT":
+		return "Restarting"
+	case "REBOOT":
+		return "Restarting"
+	case "RESCUE":
+		return "Running"
+	case "ERROR":
+		return "Error"
+	case "PAUSED":
+		return "Stopped"
+	case "SUSPENDED":
+		return "Stopped"
+	case "STOPPING":
+		return "Stopping"
+	default:
+		return ovhStatus
+	}
+}

+ 154 - 0
cloud/provider.go

@@ -0,0 +1,154 @@
+package cloud
+
+import (
+	"context"
+	"time"
+)
+
+// InstanceSize represents the size configuration for a VM instance
+type InstanceSize struct {
+	ID       string  `json:"id"`
+	Name     string  `json:"name"`
+	CPUCores int     `json:"cpu_cores"`
+	MemoryGB int     `json:"memory_gb"`
+	DiskGB   int     `json:"disk_gb"`
+	Price    float64 `json:"price"` // Hourly price
+}
+
+// Region represents a geographic region
+type Region struct {
+	ID   string `json:"id"`
+	Name string `json:"name"`
+	Zone string `json:"zone"`
+}
+
+// Instance represents a virtual machine instance
+type Instance struct {
+	ID             string            `json:"id"`
+	Name           string            `json:"name"`
+	Region         string            `json:"region"`
+	Size           string            `json:"size"`
+	ImageID        string            `json:"image_id"`
+	IPAddress      string            `json:"ip_address"`
+	PrivateIP      string            `json:"private_ip,omitempty"`
+	Status         string            `json:"status"` // Creating, Running, Stopping, Stopped, Restarting, Terminated
+	CreatedAt      time.Time         `json:"created_at"`
+	Tags           map[string]string `json:"tags,omitempty"`
+	SecurityGroups []string          `json:"security_groups,omitempty"`
+}
+
+// InstanceCreateOpts are options to configure a new instance
+type InstanceCreateOpts struct {
+	Name           string            `json:"name"`
+	Region         string            `json:"region"`
+	Size           string            `json:"size"`
+	ImageID        string            `json:"image_id"`
+	SSHKeyIDs      []string          `json:"ssh_key_ids,omitempty"`
+	UserData       string            `json:"user_data,omitempty"`
+	Tags           map[string]string `json:"tags,omitempty"`
+	SecurityGroups []string          `json:"security_groups,omitempty"`
+}
+
+// SSHKey represents an SSH key
+type SSHKey struct {
+	ID          string    `json:"id"`
+	Name        string    `json:"name"`
+	Fingerprint string    `json:"fingerprint"`
+	PublicKey   string    `json:"public_key"`
+	CreatedAt   time.Time `json:"created_at"`
+}
+
+// Image represents an operating system image
+type Image struct {
+	ID          string    `json:"id"`
+	Name        string    `json:"name"`
+	Description string    `json:"description"`
+	Type        string    `json:"type"` // base, snapshot, backup
+	Status      string    `json:"status"`
+	CreatedAt   time.Time `json:"created_at"`
+	MinDiskGB   int       `json:"min_disk_gb,omitempty"`
+	SizeGB      int       `json:"size_gb,omitempty"`
+}
+
+// Provider defines the interface that all cloud providers must implement
+type Provider interface {
+	// Initialize sets up the provider with credentials and configuration
+	Initialize(config map[string]string) error
+
+	// Validate checks if the provider credentials are valid
+	Validate(ctx context.Context) (bool, error)
+
+	// ListRegions lists all available regions
+	ListRegions(ctx context.Context) ([]Region, error)
+
+	// ListInstanceSizes lists available VM sizes
+	ListInstanceSizes(ctx context.Context, region string) ([]InstanceSize, error)
+
+	// ListInstances lists all instances
+	ListInstances(ctx context.Context) ([]Instance, error)
+
+	// GetInstance gets a specific instance by ID
+	GetInstance(ctx context.Context, id string) (*Instance, error)
+
+	// CreateInstance creates a new instance
+	CreateInstance(ctx context.Context, opts InstanceCreateOpts) (*Instance, error)
+
+	// DeleteInstance deletes an instance
+	DeleteInstance(ctx context.Context, id string) error
+
+	// StartInstance starts an instance
+	StartInstance(ctx context.Context, id string) error
+
+	// StopInstance stops an instance
+	StopInstance(ctx context.Context, id string) error
+
+	// RestartInstance restarts an instance
+	RestartInstance(ctx context.Context, id string) error
+
+	// ListImages lists available OS images
+	ListImages(ctx context.Context) ([]Image, error)
+
+	// ListSSHKeys lists SSH keys
+	ListSSHKeys(ctx context.Context) ([]SSHKey, error)
+
+	// CreateSSHKey creates a new SSH key
+	CreateSSHKey(ctx context.Context, name, publicKey string) (*SSHKey, error)
+
+	// DeleteSSHKey deletes an SSH key
+	DeleteSSHKey(ctx context.Context, id string) error
+
+	// GetInstanceStatus gets the current status of an instance
+	GetInstanceStatus(ctx context.Context, id string) (string, error)
+
+	// WaitForInstanceStatus waits for an instance to reach a specific status
+	WaitForInstanceStatus(ctx context.Context, id, status string, timeout time.Duration) error
+}
+
+// ProviderFactory is a function that creates a new provider instance
+type ProviderFactory func() Provider
+
+// providers holds a map of provider factories
+var providers = make(map[string]ProviderFactory)
+
+// RegisterProvider registers a new provider factory
+func RegisterProvider(name string, factory ProviderFactory) {
+	providers[name] = factory
+}
+
+// GetProvider returns a provider by name
+func GetProvider(name string) (Provider, bool) {
+	factory, ok := providers[name]
+	if !ok {
+		return nil, false
+	}
+	return factory(), true
+}
+
+// GetSupportedProviders returns a list of supported provider names
+func GetSupportedProviders() []string {
+	var names []string
+	for name := range providers {
+		names = append(names, name)
+	}
+	return names
+}

+ 46 - 0
config.sample.yml

@@ -0,0 +1,46 @@
+# Server configuration
+server:
+  host: "0.0.0.0"  # Listen on all interfaces
+  port: 8080       # HTTP port to listen on
+  tls:             # TLS/HTTPS configuration
+    enabled: false  # Set to true to enable HTTPS
+    cert_file: "/path/to/cert.pem"
+    key_file: "/path/to/key.pem"
+
+# Database configuration
+database:
+  host: "localhost"
+  port: 5432
+  username: "byop_user"
+  password: "secure_password"
+  name: "byop_db"
+  ssl_mode: "disable"  # Options: disable, require, verify-ca, verify-full
+
+# Authentication configuration
+auth:
+  private_key: "your-jwt-signing-key-here"  # Used to sign JWT tokens
+  token_duration: 3600000000000  # Token validity duration in nanoseconds (1 hour)
+  cleanup_interval: 86400000000000  # Token cleanup interval in nanoseconds (24 hours)
+
+# Cloud providers configuration
+providers:
+  # OVH configuration
+  ovh:
+    application_key: "your-ovh-app-key"
+    application_secret: "your-ovh-app-secret"
+    consumer_key: "your-ovh-consumer-key"
+    project_id: "your-ovh-project-id"
+    region: "GRA7"  # Optional default region
+
+  # AWS configuration
+  aws:
+    access_key: "your-aws-access-key"
+    secret_key: "your-aws-secret-key"
+    region: "us-east-1"
+
+  # Azure configuration
+  azure:
+    subscription_id: "your-azure-subscription-id"
+    tenant_id: "your-azure-tenant-id"
+    client_id: "your-azure-client-id"
+    client_secret: "your-azure-client-secret"

+ 177 - 0
config/config.go

@@ -0,0 +1,177 @@
+package config
+
+import (
+	"fmt"
+	"os"
+
+	"gopkg.in/yaml.v3"
+)
+
+// Config holds application configuration
+type Config struct {
+	Server    *Server                      `yaml:"server"`
+	Database  *Database                    `yaml:"database"`
+	Auth      *Auth                        `yaml:"auth"`
+	Providers map[string]map[string]string `yaml:"providers"`
+	Debug     bool                         `yaml:"debug"`
+}
+
+func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
+	type plain Config
+	err := unmarshal((*plain)(c))
+	if err != nil {
+		return err
+	}
+	if c.Server == nil {
+		return fmt.Errorf("server configuration is required")
+	}
+	if c.Database == nil {
+		return fmt.Errorf("database configuration is required")
+	}
+	if c.Auth == nil {
+		return fmt.Errorf("auth configuration is required")
+	}
+	if c.Providers == nil {
+		return fmt.Errorf("at least one provider configuration is required")
+	}
+	return nil
+}
+
+// Server holds server configuration
+type Server struct {
+	Host string     `yaml:"host"`
+	Port int        `yaml:"port"`
+	Tls  *TlsConfig `yaml:"tls"`
+}
+
+func (c *Server) UnmarshalYAML(unmarshal func(interface{}) error) error {
+	type plain Server
+	err := unmarshal((*plain)(c))
+	if err != nil {
+		return err
+	}
+	if c.Host == "" {
+		return fmt.Errorf("host is required")
+	}
+	if c.Port == 0 {
+		c.Port = 443
+	}
+	if c.Tls == nil {
+		return fmt.Errorf("TLS configuration is required")
+	}
+	return nil
+}
+
+// TlsConfig holds TLS configuration
+type TlsConfig struct {
+	Enabled  bool   `yaml:"enabled"`
+	CertFile string `yaml:"cert_file"`
+	KeyFile  string `yaml:"key_file"`
+}
+
+func (c *TlsConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
+	type plain TlsConfig
+	err := unmarshal((*plain)(c))
+	if err != nil {
+		return err
+	}
+	if (c.CertFile == "" || c.KeyFile == "") && c.Enabled {
+		return fmt.Errorf("TLS cert file is required")
+	}
+	return nil
+}
+
+// Database holds database configuration
+type Database struct {
+	Type string `yaml:"type"`
+	Sql  *Sql   `yaml:"sql"`
+}
+
+func (c *Database) UnmarshalYAML(unmarshal func(interface{}) error) error {
+	type plain Database
+	err := unmarshal((*plain)(c))
+	if err != nil {
+		return err
+	}
+	if c.Type == "" {
+		return fmt.Errorf("database type is required")
+	}
+	if c.Type != "sql" && c.Type != "memory" {
+		return fmt.Errorf("unsupported database type: %s", c.Type)
+	}
+	if c.Type == "sql" && c.Sql == nil {
+		return fmt.Errorf("SQL database configuration is required")
+	}
+	return nil
+}
+
+// Sql holds SQL database configuration
+type Sql struct {
+	Host     string `yaml:"host"`
+	Port     int    `yaml:"port"`
+	Username string `yaml:"username"`
+	Password string `yaml:"password"`
+	Name     string `yaml:"name"`
+}
+
+func (c *Sql) UnmarshalYAML(unmarshal func(interface{}) error) error {
+	type plain Sql
+	err := unmarshal((*plain)(c))
+	if err != nil {
+		return err
+	}
+	if c.Host == "" {
+		return fmt.Errorf("host is required")
+	}
+	if c.Port == 0 {
+		c.Port = 5432 // Default PostgreSQL port
+	}
+	if c.Username == "" {
+		return fmt.Errorf("username is required")
+	}
+	if c.Password == "" {
+		return fmt.Errorf("password is required")
+	}
+	if c.Name == "" {
+		return fmt.Errorf("database name is required")
+	}
+	return nil
+}
+
+// Auth holds authentication configuration
+type Auth struct {
+	PrivateKey      string `yaml:"private_key"`
+	TokenDuration   int    `yaml:"token_duration"`
+	CleanupInterval int    `yaml:"cleanup_interval"`
+}
+
+func (c *Auth) UnmarshalYAML(unmarshal func(interface{}) error) error {
+	type plain Auth
+	err := unmarshal((*plain)(c))
+	if err != nil {
+		return err
+	}
+	if c.PrivateKey == "" {
+		return fmt.Errorf("private key is required")
+	}
+	if c.TokenDuration == 0 {
+		c.TokenDuration = 3600 // Default to 1 hour
+	}
+	if c.CleanupInterval == 0 {
+		c.CleanupInterval = 3600 // Default to 1 hour
+	}
+	return nil
+}
+
+func Load(configPath string) (*Config, error) {
+	cnf := &Config{}
+	b, err := os.ReadFile(configPath)
+	if err != nil {
+		return nil, err
+	}
+	err = yaml.Unmarshal(b, cnf)
+	if err != nil {
+		return nil, err
+	}
+	return cnf, nil
+}

+ 12 - 0
dbmanager/database.go

@@ -0,0 +1,12 @@
+package dbmanager
+
+// DbManager defines the interface for database operations
+type DbManager[T Entity] interface {
+	Connect() error
+	Disconnect() error
+	Create(entityType string, entity T) error
+	GetByID(entityType string, id string) (T, error)
+	Update(entityType string, entity T) error
+	Delete(entityType string, id string) error
+	List(entityType string, filter map[string]interface{}) ([]T, error)
+}

+ 7 - 0
dbmanager/entity.go

@@ -0,0 +1,7 @@
+package dbmanager
+
+// Entity represents a database entity with an ID
+type Entity interface {
+	GetID() string
+	SetID(id string)
+}

+ 45 - 0
dbmanager/factory.go

@@ -0,0 +1,45 @@
+// In dbmanager/factory.go
+package dbmanager
+
+import (
+	"fmt"
+
+	"git.linuxforward.com/byop/byop-engine/models"
+)
+
+// DbManagerFactory creates type-safe database managers
+type DbManagerFactory struct {
+	dbType string
+	// Add configuration params as needed
+}
+
+// NewDbManagerFactory creates a new factory
+func NewDbManagerFactory(dbType string) *DbManagerFactory {
+	return &DbManagerFactory{
+		dbType: dbType,
+	}
+}
+
+// CreateEntityManager creates a DbManager for the generic Entity interface
+func (f *DbManagerFactory) CreateEntityManager() (DbManager[Entity], error) {
+	switch f.dbType {
+	case "memory":
+		return NewMemoryDbManager[Entity](), nil
+	case "sql":
+		return nil, fmt.Errorf("SQL database type is not implemented yet")
+	default:
+		return nil, fmt.Errorf("unsupported database type: %s", f.dbType)
+	}
+}
+
+// CreateClientManager creates a DbManager specifically for Client entities
+func (f *DbManagerFactory) CreateClientManager() (DbManager[*models.Client], error) {
+	switch f.dbType {
+	case "memory":
+		return NewMemoryDbManager[*models.Client](), nil
+	case "sql":
+		return nil, fmt.Errorf("SQL database type is not implemented yet")
+	default:
+		return nil, fmt.Errorf("unsupported database type: %s", f.dbType)
+	}
+}

+ 139 - 0
dbmanager/memory.go

@@ -0,0 +1,139 @@
+package dbmanager
+
+import (
+	"fmt"
+	"reflect"
+)
+
+// MemoryDbManager implements DbManager using in-memory storage
+type MemoryDbManager[T Entity] struct {
+	// In-memory database storage
+	collections map[string]map[string]T
+}
+
+// NewMemoryDbManager creates a new memory-based database manager
+func NewMemoryDbManager[T Entity]() *MemoryDbManager[T] {
+	return &MemoryDbManager[T]{
+		collections: make(map[string]map[string]T),
+	}
+}
+
+// Connect initializes the memory database
+func (db *MemoryDbManager[T]) Connect() error {
+	// Nothing to do for memory implementation
+	return nil
+}
+
+// Disconnect cleans up memory database
+func (db *MemoryDbManager[T]) Disconnect() error {
+	// Clear collections to free memory
+	db.collections = make(map[string]map[string]T)
+	return nil
+}
+
+// getCollection returns the collection for the given entity type
+func (db *MemoryDbManager[T]) getCollection(entityType string) map[string]T {
+	if _, exists := db.collections[entityType]; !exists {
+		db.collections[entityType] = make(map[string]T)
+	}
+	return db.collections[entityType]
+}
+
+// Create creates a new entity in memory
+func (db *MemoryDbManager[T]) Create(entityType string, entity T) error {
+	collection := db.getCollection(entityType)
+	id := entity.GetID()
+
+	if _, exists := collection[id]; exists {
+		return fmt.Errorf("entity with ID %s already exists in %s", id, entityType)
+	}
+
+	collection[id] = entity
+	return nil
+}
+
+// GetByID retrieves an entity by ID from memory
+func (db *MemoryDbManager[T]) GetByID(entityType string, id string) (T, error) {
+	collection := db.getCollection(entityType)
+
+	if entity, exists := collection[id]; exists {
+		return entity, nil
+	}
+
+	var zero T
+	return zero, fmt.Errorf("entity with ID %s not found in %s", id, entityType)
+}
+
+// Update updates an existing entity in memory
+func (db *MemoryDbManager[T]) Update(entityType string, entity T) error {
+	collection := db.getCollection(entityType)
+	id := entity.GetID()
+
+	if _, exists := collection[id]; !exists {
+		return fmt.Errorf("entity with ID %s does not exist in %s", id, entityType)
+	}
+
+	collection[id] = entity
+	return nil
+}
+
+// Delete deletes an entity from memory
+func (db *MemoryDbManager[T]) Delete(entityType string, id string) error {
+	collection := db.getCollection(entityType)
+
+	if _, exists := collection[id]; !exists {
+		return fmt.Errorf("entity with ID %s does not exist in %s", id, entityType)
+	}
+
+	delete(collection, id)
+	return nil
+}
+
+// List retrieves entities based on a filter from memory
+func (db *MemoryDbManager[T]) List(entityType string, filter map[string]interface{}) ([]T, error) {
+	collection := db.getCollection(entityType)
+	result := make([]T, 0)
+
+	// If no filter is provided, return all entities
+	if len(filter) == 0 {
+		for _, entity := range collection {
+			result = append(result, entity)
+		}
+		return result, nil
+	}
+
+	// Filter entities based on field values
+	// Note: This is a simple implementation that relies on reflection
+	// and might not be efficient for large datasets
+	for _, entity := range collection {
+		matches := true
+		entityValue := reflect.ValueOf(entity)
+
+		// For structs, we need to get the underlying value
+		if entityValue.Kind() == reflect.Ptr {
+			entityValue = entityValue.Elem()
+		}
+
+		for field, value := range filter {
+			// Find the field in the struct
+			structField := entityValue.FieldByName(field)
+			if !structField.IsValid() {
+				matches = false
+				break
+			}
+
+			// Compare values
+			fieldValue := structField.Interface()
+			if fieldValue != value {
+				matches = false
+				break
+			}
+		}
+
+		if matches {
+			result = append(result, entity)
+		}
+	}
+
+	return result, nil
+}

+ 10 - 0
dbmanager/repository.go

@@ -0,0 +1,10 @@
+package dbmanager
+
+// Repository defines generic CRUD operations for any entity type
+type Repository[T Entity] interface {
+	Create(entity T) error
+	GetByID(id string) (T, error)
+	Update(entity T) error
+	Delete(id string) error
+	List(filter map[string]interface{}) ([]T, error)
+}

+ 56 - 0
dbstore/store.go

@@ -0,0 +1,56 @@
+package dbstore
+
+import (
+	"git.linuxforward.com/byop/byop-engine/dbmanager"
+)
+
+// Store provides a generic implementation of Repository
+type Store[T dbmanager.Entity] struct {
+	Db         dbmanager.DbManager[T]
+	EntityType string
+}
+
+// NewStore creates a new generic store
+func NewStore[T dbmanager.Entity](db dbmanager.DbManager[T], entityType string) *Store[T] {
+	return &Store[T]{
+		Db:         db,
+		EntityType: entityType,
+	}
+}
+
+// Create creates a new entity
+func (gs *Store[T]) Create(entity T) error {
+	return gs.Db.Create(gs.EntityType, entity)
+}
+
+// GetByID retrieves an entity by ID
+func (gs *Store[T]) GetByID(id string) (T, error) {
+	entity, err := gs.Db.GetByID(gs.EntityType, id)
+	var zero T
+	if err != nil {
+		return zero, err
+	}
+	// No need for nil check and type assertion if Db.GetByID returns T directly
+	return entity, nil
+}
+
+// Update updates an existing entity
+func (gs *Store[T]) Update(entity T) error {
+	return gs.Db.Update(gs.EntityType, entity)
+}
+
+// Delete deletes an entity
+func (gs *Store[T]) Delete(id string) error {
+	return gs.Db.Delete(gs.EntityType, id)
+}
+
+// List retrieves entities based on a filter
+func (gs *Store[T]) List(filter map[string]interface{}) ([]T, error) {
+	entities, err := gs.Db.List(gs.EntityType, filter)
+	if err != nil {
+		return nil, err
+	}
+
+	// No type assertions needed if Db.List returns []T directly
+	return entities, nil
+}

+ 3 - 0
docker/builder.go

@@ -0,0 +1,3 @@
+package docker
+
+// TODO: Implement builder functionality

+ 3 - 0
docker/compose.go

@@ -0,0 +1,3 @@
+package docker
+
+// TODO: Implement compose functionality

+ 56 - 0
go.mod

@@ -0,0 +1,56 @@
+module git.linuxforward.com/byop/byop-engine
+
+go 1.22
+
+toolchain go1.23.0
+
+require (
+	github.com/prometheus/client_golang v1.22.0
+	gopkg.in/yaml.v3 v3.0.1
+)
+
+require (
+	github.com/bytedance/sonic v1.11.6 // indirect
+	github.com/bytedance/sonic/loader v0.1.1 // indirect
+	github.com/cloudwego/base64x v0.1.4 // indirect
+	github.com/cloudwego/iasm v0.2.0 // indirect
+	github.com/gabriel-vasile/mimetype v1.4.3 // indirect
+	github.com/gin-contrib/sse v0.1.0 // indirect
+	github.com/go-playground/locales v0.14.1 // indirect
+	github.com/go-playground/universal-translator v0.18.1 // indirect
+	github.com/go-playground/validator/v10 v10.20.0 // indirect
+	github.com/goccy/go-json v0.10.2 // indirect
+	github.com/json-iterator/go v1.1.12 // indirect
+	github.com/klauspost/cpuid/v2 v2.2.7 // indirect
+	github.com/kr/text v0.2.0 // indirect
+	github.com/leodido/go-urn v1.4.0 // indirect
+	github.com/mattn/go-isatty v0.0.20 // indirect
+	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
+	github.com/modern-go/reflect2 v1.0.2 // indirect
+	github.com/pelletier/go-toml/v2 v2.2.2 // indirect
+	github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
+	github.com/ugorji/go/codec v1.2.12 // indirect
+	golang.org/x/arch v0.8.0 // indirect
+	golang.org/x/crypto v0.31.0 // indirect
+	golang.org/x/net v0.33.0 // indirect
+	golang.org/x/oauth2 v0.24.0 // indirect
+	golang.org/x/text v0.21.0 // indirect
+	gopkg.in/ini.v1 v1.67.0 // indirect
+)
+
+require (
+	github.com/beorn7/perks v1.0.1 // indirect
+	github.com/cespare/xxhash/v2 v2.3.0 // indirect
+	github.com/gin-gonic/gin v1.10.0
+	github.com/golang-jwt/jwt v3.2.2+incompatible
+	github.com/golang-jwt/jwt/v5 v5.2.2
+	github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
+	github.com/ovh/go-ovh v1.7.0
+	github.com/pkg/errors v0.9.1
+	github.com/prometheus/client_model v0.6.1 // indirect
+	github.com/prometheus/common v0.62.0 // indirect
+	github.com/prometheus/procfs v0.15.1 // indirect
+	github.com/sirupsen/logrus v1.9.3
+	golang.org/x/sys v0.30.0 // indirect
+	google.golang.org/protobuf v1.36.5 // indirect
+)

+ 129 - 0
go.sum

@@ -0,0 +1,129 @@
+github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
+github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
+github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0=
+github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4=
+github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM=
+github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
+github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
+github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
+github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
+github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
+github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
+github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
+github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
+github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
+github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
+github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
+github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
+github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
+github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
+github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
+github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
+github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
+github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
+github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
+github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8=
+github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
+github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
+github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
+github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY=
+github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
+github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
+github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
+github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
+github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
+github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
+github.com/jarcoal/httpmock v1.3.0 h1:2RJ8GP0IIaWwcC9Fp2BmVi8Kog3v2Hn7VXM3fTd+nuc=
+github.com/jarcoal/httpmock v1.3.0/go.mod h1:3yb8rc4BI7TCBhFY8ng0gjuLKJNquuDNiPaZjnENuYg=
+github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
+github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
+github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
+github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM=
+github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
+github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
+github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
+github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
+github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
+github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
+github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
+github.com/maxatome/go-testdeep v1.12.0 h1:Ql7Go8Tg0C1D/uMMX59LAoYK7LffeJQ6X2T04nTH68g=
+github.com/maxatome/go-testdeep v1.12.0/go.mod h1:lPZc/HAcJMP92l7yI6TRz1aZN5URwUBUAfUNvrclaNM=
+github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
+github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
+github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
+github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
+github.com/ovh/go-ovh v1.7.0 h1:V14nF7FwDjQrZt9g7jzcvAAQ3HN6DNShRFRMC3jLoPw=
+github.com/ovh/go-ovh v1.7.0/go.mod h1:cTVDnl94z4tl8pP1uZ/8jlVxntjSIf09bNcQ5TJSC7c=
+github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
+github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
+github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
+github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q=
+github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0=
+github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E=
+github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY=
+github.com/prometheus/common v0.62.0 h1:xasJaQlnWAeyHdUBeGjXmutelfJHWMRr+Fg4QszZ2Io=
+github.com/prometheus/common v0.62.0/go.mod h1:vyBcEuLSvWos9B1+CyL7JZ2up+uFzXhkqml0W5zIY1I=
+github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc=
+github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
+github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
+github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog=
+github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
+github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
+github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
+github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
+github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
+github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
+github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
+github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
+github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
+github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
+github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
+golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
+golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc=
+golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
+golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
+golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
+golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
+golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
+golang.org/x/oauth2 v0.24.0 h1:KTBBxWqUa0ykRPLtV69rRto9TLXcqYkeswu48x/gvNE=
+golang.org/x/oauth2 v0.24.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
+golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
+golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
+golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
+google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
+google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
+gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
+gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
+rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=

+ 67 - 0
handlers/auth.go

@@ -0,0 +1,67 @@
+package handlers
+
+import (
+	"net/http"
+
+	"git.linuxforward.com/byop/byop-engine/auth"
+	"github.com/gin-gonic/gin"
+)
+
+// AuthHandler handles authentication-related operations
+type AuthHandler struct {
+	authService auth.Service
+}
+
+// NewAuthHandler creates a new AuthHandler
+func NewAuthHandler(authService auth.Service) *AuthHandler {
+	return &AuthHandler{
+		authService: authService,
+	}
+}
+
+// RegisterRoutes registers authentication routes
+func (h *AuthHandler) RegisterRoutes(r *gin.RouterGroup) {
+	r.POST("/login", h.Login)
+	r.POST("/refresh-token", h.RefreshToken)
+	r.POST("/logout", h.Logout)
+}
+
+// Login handles user authentication
+func (h *AuthHandler) Login(c *gin.Context) {
+	var credentials struct {
+		Email    string `json:"email"`
+		Password string `json:"password"`
+	}
+
+	if err := c.ShouldBindJSON(&credentials); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request body"})
+		return
+	}
+
+	// TODO: Implement authentication logic
+	resp, err := h.authService.GenerateToken(c, credentials.Email)
+	if err != nil {
+		c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to generate token"})
+		return
+	}
+
+	c.JSON(http.StatusOK, resp)
+}
+
+// RefreshToken handles token refresh
+func (h *AuthHandler) RefreshToken(c *gin.Context) {
+	// TODO: Implement token refresh logic
+
+	resp := gin.H{
+		"token": "new-dummy-token",
+	}
+
+	c.JSON(http.StatusOK, resp)
+}
+
+// Logout handles user logout
+func (h *AuthHandler) Logout(c *gin.Context) {
+	// TODO: Implement logout logic
+
+	c.Status(http.StatusNoContent)
+}

+ 250 - 0
handlers/autodeploy.go

@@ -0,0 +1,250 @@
+package handlers
+
+import (
+	"encoding/json"
+	"fmt"
+	"log"
+	"net/http"
+	"sync"
+	"time"
+
+	"git.linuxforward.com/byop/byop-engine/models"
+	"github.com/gin-gonic/gin"
+)
+
+// AutoDeployHandler handles auto-deployment related operations
+type AutoDeployHandler struct {
+	autoDeploySettings *models.AutoDeploySettings
+	settingsMutex      sync.RWMutex
+	deploymentQueue    chan *models.AutoDeployRequest
+}
+
+// NewAutoDeployHandler creates a new AutoDeployHandler
+func NewAutoDeployHandler() *AutoDeployHandler {
+	return &AutoDeployHandler{}
+}
+
+// RegisterRoutes registers routes for auto-deployment operations
+func (h *AutoDeployHandler) RegisterRoutes(r *gin.RouterGroup) {
+	r.POST("/client", h.AutoDeployClient)
+	r.POST("/webhook", h.AutoDeployWebhook)
+	r.PUT("/settings", h.UpdateAutoDeploySettings)
+	r.GET("/settings", h.GetAutoDeploySettings)
+}
+
+// AutoDeployClient handles requests to deploy for a client
+func (h *AutoDeployHandler) AutoDeployClient(c *gin.Context) {
+	// Parse the request
+	var request *models.AutoDeployRequest
+	if err := c.ShouldBindJSON(&request); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{
+			"error": "Invalid request body",
+		})
+		return
+	}
+
+	// Check if auto-deployment is enabled
+	h.settingsMutex.RLock()
+	enabled := h.autoDeploySettings.Enabled
+	h.settingsMutex.RUnlock()
+
+	if !enabled {
+		c.JSON(http.StatusServiceUnavailable, gin.H{
+			"error": "Auto-deployment is currently disabled",
+		})
+		return
+	}
+
+	// Queue the deployment request
+	select {
+	case h.deploymentQueue <- request:
+		// Successfully queued
+	default:
+		// Queue is full
+		c.JSON(http.StatusServiceUnavailable, gin.H{
+			"error": "Deployment queue is full, try again later",
+		})
+		return
+	}
+
+	// Return a success response
+	c.JSON(http.StatusAccepted, gin.H{
+		"status":   "queued",
+		"message":  "Deployment request has been queued",
+		"clientId": request.ClientID,
+		"queuedAt": time.Now(),
+	})
+}
+
+// AutoDeployWebhook handles webhook triggers for auto-deployment
+func (h *AutoDeployHandler) AutoDeployWebhook(c *gin.Context) {
+	// Parse the webhook payload
+	var payload *models.WebhookPayload
+	if err := c.ShouldBindJSON(&payload); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{
+			"error": "Invalid webhook payload",
+		})
+		return
+	}
+
+	// Validate the webhook signature
+	h.settingsMutex.RLock()
+	secret := h.autoDeploySettings.WebhookSecret
+	enabled := h.autoDeploySettings.Enabled
+	h.settingsMutex.RUnlock()
+
+	// This is a simplified signature check, in production use HMAC
+	if c.GetHeader("X-Webhook-Signature") != secret {
+		c.JSON(http.StatusUnauthorized, gin.H{
+			"error": "Invalid webhook signature",
+		})
+		return
+	}
+
+	if !enabled {
+		c.JSON(http.StatusServiceUnavailable, gin.H{
+			"error": "Auto-deployment is currently disabled",
+		})
+		return
+	}
+
+	// Handle different event types
+	switch payload.Event {
+	case "client.created":
+		// Check if we should auto-deploy for new clients
+		h.settingsMutex.RLock()
+		autoDeploy := h.autoDeploySettings.AutoDeployNewClients
+		h.settingsMutex.RUnlock()
+
+		if autoDeploy && payload.ClientID != "" {
+			// Queue a deployment request
+			request := &models.AutoDeployRequest{
+				ClientID: payload.ClientID,
+				// Use defaults for other fields
+			}
+
+			select {
+			case h.deploymentQueue <- request:
+				// Successfully queued
+			default:
+				// Queue is full
+				log.Printf("Deployment queue is full, skipping auto-deployment for client %s", payload.ClientID)
+			}
+		}
+
+	case "deployment.requested":
+		// Handle explicit deployment requests
+		if payload.ClientID == "" {
+			c.JSON(http.StatusBadRequest, gin.H{
+				"error": "client_id is required for deployment requests",
+			})
+			return
+		}
+
+		// Parse the deployment data
+		var deployData struct {
+			TemplateID string            `json:"template_id"`
+			ProviderID string            `json:"provider_id"`
+			Region     string            `json:"region"`
+			Tags       map[string]string `json:"tags"`
+		}
+
+		if err := json.Unmarshal(payload.Data, &deployData); err != nil {
+			c.JSON(http.StatusBadRequest, gin.H{
+				"error": "Invalid deployment data",
+			})
+			return
+		}
+
+		// Queue the deployment
+		request := &models.AutoDeployRequest{
+			ClientID:   payload.ClientID,
+			TemplateID: deployData.TemplateID,
+			ProviderID: deployData.ProviderID,
+			Region:     deployData.Region,
+			Tags:       deployData.Tags,
+		}
+
+		select {
+		case h.deploymentQueue <- request:
+			// Successfully queued
+		default:
+			// Queue is full
+			c.JSON(http.StatusServiceUnavailable, gin.H{
+				"error": "Deployment queue is full, try again later",
+			})
+			return
+		}
+
+	default:
+		// Unknown event type
+		c.JSON(http.StatusBadRequest, gin.H{
+			"error": fmt.Sprintf("Unsupported event type: %s", payload.Event),
+		})
+		return
+	}
+
+	// Return a success response
+	c.JSON(http.StatusOK, gin.H{
+		"status":    "received",
+		"event":     payload.Event,
+		"timestamp": time.Now(),
+		"requestId": payload.RequestID,
+	})
+}
+
+// UpdateAutoDeploySettings updates the auto-deployment settings
+func (h *AutoDeployHandler) UpdateAutoDeploySettings(c *gin.Context) {
+	// Parse the settings
+	var settings *models.AutoDeploySettings
+	if err := c.ShouldBindJSON(&settings); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{
+			"error": "Invalid settings format",
+		})
+		return
+	}
+
+	// Validate settings
+	if settings.DefaultProviderID == "" {
+		c.JSON(http.StatusBadRequest, gin.H{
+			"error": "default_provider_id is required",
+		})
+		return
+	}
+
+	if settings.DefaultTemplateID == "" {
+		c.JSON(http.StatusBadRequest, gin.H{
+			"error": "default_template_id is required",
+		})
+		return
+	}
+
+	// Update the settings
+	h.settingsMutex.Lock()
+	// Keep the webhook secret if not provided
+	if settings.WebhookSecret == "" {
+		settings.WebhookSecret = h.autoDeploySettings.WebhookSecret
+	}
+	h.autoDeploySettings = settings
+	h.settingsMutex.Unlock()
+
+	// Return the updated settings (without sensitive fields)
+	h.settingsMutex.RLock()
+	responseSettings := h.autoDeploySettings
+	responseSettings.WebhookSecret = "********" // Hide the actual secret
+	h.settingsMutex.RUnlock()
+
+	c.JSON(http.StatusOK, responseSettings)
+}
+
+// GetAutoDeploySettings returns the current auto-deployment settings
+func (h *AutoDeployHandler) GetAutoDeploySettings(c *gin.Context) {
+	// Get a copy of the settings
+	h.settingsMutex.RLock()
+	settings := h.autoDeploySettings
+	settings.WebhookSecret = "********" // Hide the actual secret
+	h.settingsMutex.RUnlock()
+
+	// Return the settings
+	c.JSON(http.StatusOK, settings)
+}

+ 154 - 0
handlers/clients.go

@@ -0,0 +1,154 @@
+package handlers
+
+import (
+	"fmt"
+	"net/http"
+
+	"git.linuxforward.com/byop/byop-engine/dbmanager"
+	"git.linuxforward.com/byop/byop-engine/dbstore"
+	"git.linuxforward.com/byop/byop-engine/models"
+	"github.com/gin-gonic/gin"
+)
+
+// ClientHandler handles client-related operations
+type ClientHandler struct {
+	store *dbstore.Store[*models.Client]
+}
+
+// NewClientHandler creates a new ClientHandler
+func NewClientHandler(db dbmanager.DbManager[*models.Client]) *ClientHandler {
+	// Initialize the store for client operations
+	store := dbstore.NewStore(db, "clients")
+	return &ClientHandler{
+		store: store,
+	}
+}
+
+// RegisterRoutes registers routes for client operations
+func (h *ClientHandler) RegisterRoutes(r *gin.RouterGroup) {
+	r.GET("/", h.ListClients)
+	r.POST("/", h.CreateClient)
+	r.GET("/:id", h.GetClient)
+	r.PUT("/:id", h.UpdateClient)
+	r.DELETE("/:id", h.DeleteClient)
+	r.GET("/:id/deployments", h.GetClientDeployments)
+}
+
+// ListClients returns all clients
+func (h *ClientHandler) ListClients(c *gin.Context) {
+	clients, err := h.store.List(nil)
+	if err != nil {
+		c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("Failed to fetch clients: %v", err)})
+		return
+	}
+
+	c.JSON(http.StatusOK, clients)
+}
+
+// CreateClient creates a new client
+func (h *ClientHandler) CreateClient(c *gin.Context) {
+	var client models.Client
+
+	if err := c.ShouldBindJSON(&client); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request body"})
+		return
+	}
+
+	// Create a pointer to the client for the repository
+	clientPtr := &client
+
+	if err := h.store.Create(clientPtr); err != nil {
+		c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("Failed to create client: %v", err)})
+		return
+	}
+
+	c.JSON(http.StatusCreated, client)
+}
+
+// GetClient returns a specific client
+func (h *ClientHandler) GetClient(c *gin.Context) {
+	id := c.Param("id")
+
+	client, err := h.store.GetByID(id)
+	if err != nil {
+		c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("Failed to fetch client: %v", err)})
+		return
+	}
+
+	if client == nil {
+		c.JSON(http.StatusNotFound, gin.H{"error": "Client not found"})
+		return
+	}
+
+	c.JSON(http.StatusOK, client)
+}
+
+// UpdateClient updates a client
+func (h *ClientHandler) UpdateClient(c *gin.Context) {
+	id := c.Param("id")
+
+	// Check if client exists
+	existingClient, err := h.store.GetByID(id)
+	if err != nil {
+		c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("Failed to fetch client: %v", err)})
+		return
+	}
+
+	if existingClient == nil {
+		c.JSON(http.StatusNotFound, gin.H{"error": "Client not found"})
+		return
+	}
+
+	// Parse updated client data
+	var updatedClient models.Client
+	if err := c.ShouldBindJSON(&updatedClient); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request body"})
+		return
+	}
+
+	// Set the ID to ensure it matches the URL parameter
+	updatedClient.ID = id
+	clientPtr := &updatedClient
+
+	if err := h.store.Update(clientPtr); err != nil {
+		c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("Failed to update client: %v", err)})
+		return
+	}
+
+	c.JSON(http.StatusOK, updatedClient)
+}
+
+// DeleteClient deletes a client
+func (h *ClientHandler) DeleteClient(c *gin.Context) {
+	id := c.Param("id")
+
+	if err := h.store.Delete(id); err != nil {
+		c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("Failed to delete client: %v", err)})
+		return
+	}
+
+	c.Status(http.StatusNoContent)
+}
+
+// GetClientDeployments returns all deployments for a client
+func (h *ClientHandler) GetClientDeployments(c *gin.Context) {
+	id := c.Param("id")
+
+	// Check if client exists
+	client, err := h.store.GetByID(id)
+	if err != nil {
+		c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("Failed to fetch client: %v", err)})
+		return
+	}
+
+	if client == nil {
+		c.JSON(http.StatusNotFound, gin.H{"error": "Client not found"})
+		return
+	}
+
+	// TODO: Retrieve deployments - this likely requires a separate repository or service
+	// For now, return an empty list
+	deployments := []models.Deployment{}
+
+	c.JSON(http.StatusOK, deployments)
+}

+ 181 - 0
handlers/deployments.go

@@ -0,0 +1,181 @@
+package handlers
+
+import (
+	"net/http"
+
+	"git.linuxforward.com/byop/byop-engine/models"
+	"github.com/gin-gonic/gin"
+)
+
+// DeploymentHandler handles deployment-related operations
+type DeploymentHandler struct {
+	// Add any dependencies needed for deployment operations
+}
+
+// NewDeploymentHandler creates a new DeploymentHandler
+func NewDeploymentHandler() *DeploymentHandler {
+	return &DeploymentHandler{}
+}
+
+// RegisterRoutes registers routes for deployment operations
+func (h *DeploymentHandler) RegisterRoutes(r *gin.RouterGroup) {
+	r.GET("/", h.ListDeployments)
+	r.POST("/", h.CreateDeployment)
+	r.GET("/:id", h.GetDeployment)
+	r.PUT("/:id", h.UpdateDeployment)
+	r.DELETE("/:id", h.DeleteDeployment)
+	r.POST("/:id/start", h.StartDeployment)
+	r.POST("/:id/stop", h.StopDeployment)
+	r.POST("/:id/restart", h.RestartDeployment)
+	r.GET("/:id/logs", h.GetDeploymentLogs)
+	r.GET("/:id/metrics", h.GetDeploymentMetrics)
+	r.POST("/:id/scale", h.ScaleDeployment)
+}
+
+// ListDeployments returns all deployments
+func (h *DeploymentHandler) ListDeployments(c *gin.Context) {
+	// TODO: Fetch deployments from database
+	deployments := []models.Deployment{}
+
+	c.JSON(http.StatusOK, deployments)
+}
+
+// CreateDeployment creates a new deployment
+func (h *DeploymentHandler) CreateDeployment(c *gin.Context) {
+	var deployment models.Deployment
+
+	if err := c.ShouldBindJSON(&deployment); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request body"})
+		return
+	}
+
+	// TODO: Save deployment to database
+
+	c.JSON(http.StatusCreated, deployment)
+}
+
+// GetDeployment returns a specific deployment
+func (h *DeploymentHandler) GetDeployment(c *gin.Context) {
+	id := c.Param("id")
+	if id == "" {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Missing deployment ID"})
+		return
+	}
+
+	// TODO: Fetch deployment from database
+	deployment := models.Deployment{ID: id}
+
+	c.JSON(http.StatusOK, deployment)
+}
+
+// UpdateDeployment updates a deployment
+func (h *DeploymentHandler) UpdateDeployment(c *gin.Context) {
+	id := c.Param("id")
+	if id == "" {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Missing deployment ID"})
+		return
+	}
+
+	var deployment models.Deployment
+	if err := c.ShouldBindJSON(&deployment); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request body"})
+		return
+	}
+
+	deployment.ID = id
+	// TODO: Update deployment in database
+
+	c.JSON(http.StatusOK, deployment)
+}
+
+// DeleteDeployment deletes a deployment
+func (h *DeploymentHandler) DeleteDeployment(c *gin.Context) {
+	// TODO: Delete deployment from database
+
+	c.Status(http.StatusNoContent)
+}
+
+// StartDeployment starts a deployment
+func (h *DeploymentHandler) StartDeployment(c *gin.Context) {
+	id := c.Param("id")
+	if id == "" {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Missing deployment ID"})
+		return
+	}
+
+	// TODO: Start deployment
+	result := map[string]string{"status": "started", "id": id}
+
+	c.JSON(http.StatusOK, result)
+}
+
+// StopDeployment stops a deployment
+func (h *DeploymentHandler) StopDeployment(c *gin.Context) {
+	id := c.Param("id")
+	if id == "" {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Missing deployment ID"})
+		return
+	}
+
+	// TODO: Stop deployment
+	result := map[string]string{"status": "stopped", "id": id}
+
+	c.JSON(http.StatusOK, result)
+}
+
+// RestartDeployment restarts a deployment
+func (h *DeploymentHandler) RestartDeployment(c *gin.Context) {
+	id := c.Param("id")
+	if id == "" {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Missing deployment ID"})
+		return
+	}
+
+	// TODO: Restart deployment
+	result := map[string]string{"status": "restarted", "id": id}
+
+	c.JSON(http.StatusOK, result)
+}
+
+// GetDeploymentLogs returns logs for a deployment
+func (h *DeploymentHandler) GetDeploymentLogs(c *gin.Context) {
+	// TODO: Fetch deployment logs
+	logs := []string{"Log entry 1", "Log entry 2"}
+
+	c.JSON(http.StatusOK, logs)
+}
+
+// GetDeploymentMetrics returns metrics for a deployment
+func (h *DeploymentHandler) GetDeploymentMetrics(c *gin.Context) {
+	// TODO: Fetch deployment metrics
+	metrics := []models.MetricSample{}
+
+	c.JSON(http.StatusOK, metrics)
+}
+
+// ScaleDeployment scales a deployment
+func (h *DeploymentHandler) ScaleDeployment(c *gin.Context) {
+	id := c.Param("id")
+	if id == "" {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Missing deployment ID"})
+		return
+	}
+
+	var scaleRequest struct {
+		Replicas int `json:"replicas"`
+	}
+
+	if err := c.ShouldBindJSON(&scaleRequest); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request body"})
+		return
+	}
+
+	// TODO: Scale deployment
+	result := map[string]interface{}{
+		"id":       id,
+		"replicas": scaleRequest.Replicas,
+		"status":   "scaling",
+	}
+
+	c.JSON(http.StatusOK, result)
+}

+ 129 - 0
handlers/monitoring.go

@@ -0,0 +1,129 @@
+package handlers
+
+import (
+	"fmt"
+	"net/http"
+
+	"github.com/gin-gonic/gin"
+)
+
+// MonitoringHandler handles monitoring-related operations
+type MonitoringHandler struct {
+	// Add any dependencies needed for monitoring operations
+}
+
+// NewMonitoringHandler creates a new MonitoringHandler
+func NewMonitoringHandler() *MonitoringHandler {
+	return &MonitoringHandler{}
+}
+
+// RegisterRoutes registers routes for monitoring operations
+func (h *MonitoringHandler) RegisterRoutes(r *gin.RouterGroup) {
+	r.GET("/overview", h.MonitoringOverview)
+	r.GET("/deployments/:id", h.MonitoringDeployment)
+	r.GET("/alerts", h.ListAlerts)
+	r.POST("/alerts", h.CreateAlert)
+	r.GET("/alerts/:id", h.GetAlert)
+	r.PUT("/alerts/:id", h.UpdateAlert)
+	r.DELETE("/alerts/:id", h.DeleteAlert)
+}
+
+// MonitoringOverview returns an overview of system monitoring
+func (h *MonitoringHandler) MonitoringOverview(c *gin.Context) {
+	overview := map[string]interface{}{
+		"total_deployments":    10,
+		"active_deployments":   8,
+		"inactive_deployments": 2,
+		"alerts":               1,
+		"avg_cpu_usage":        45.2,
+		"avg_memory_usage":     60.1,
+	}
+
+	c.JSON(http.StatusOK, overview)
+}
+
+// MonitoringDeployment returns monitoring data for a deployment
+func (h *MonitoringHandler) MonitoringDeployment(c *gin.Context) {
+	id := c.Param("id")
+
+	data := map[string]interface{}{
+		"deployment_id": id,
+		"cpu_usage":     42.5,
+		"memory_usage":  58.7,
+		"disk_usage":    30.2,
+		"network_in":    1024,
+		"network_out":   2048,
+		"uptime":        "10d 4h 30m",
+	}
+
+	c.JSON(http.StatusOK, data)
+}
+
+// ListAlerts returns all monitoring alerts
+func (h *MonitoringHandler) ListAlerts(c *gin.Context) {
+	alerts := []map[string]interface{}{
+		{
+			"id":            "alert-id",
+			"name":          "High CPU Usage",
+			"condition":     "cpu_usage > 80",
+			"deployment_id": "deployment-id",
+			"status":        "active",
+		},
+	}
+
+	c.JSON(http.StatusOK, alerts)
+}
+
+// CreateAlert creates a new monitoring alert
+func (h *MonitoringHandler) CreateAlert(c *gin.Context) {
+	var alert map[string]interface{}
+
+	if err := c.ShouldBindJSON(&alert); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request body"})
+		return
+	}
+
+	alert["id"] = "new-alert-id"
+
+	c.JSON(http.StatusCreated, alert)
+}
+
+// GetAlert returns a specific alert
+func (h *MonitoringHandler) GetAlert(c *gin.Context) {
+	id := c.Param("id")
+
+	alert := map[string]interface{}{
+		"id":            id,
+		"name":          "High CPU Usage",
+		"condition":     "cpu_usage > 80",
+		"deployment_id": "deployment-id",
+		"status":        "active",
+	}
+
+	c.JSON(http.StatusOK, alert)
+}
+
+// UpdateAlert updates a monitoring alert
+func (h *MonitoringHandler) UpdateAlert(c *gin.Context) {
+	id := c.Param("id")
+
+	var alert map[string]interface{}
+	if err := c.ShouldBindJSON(&alert); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request body"})
+		return
+	}
+
+	alert["id"] = id
+
+	c.JSON(http.StatusOK, alert)
+}
+
+// DeleteAlert deletes a monitoring alert
+func (h *MonitoringHandler) DeleteAlert(c *gin.Context) {
+	id := c.Param("id")
+
+	fmt.Println("Deleting alert ID:", id)
+
+	// Log deletion (optional)
+	c.Status(http.StatusNoContent)
+}

+ 78 - 0
handlers/providers.go

@@ -0,0 +1,78 @@
+package handlers
+
+import (
+	"fmt"
+	"net/http"
+
+	"git.linuxforward.com/byop/byop-engine/cloud"
+	"git.linuxforward.com/byop/byop-engine/models"
+	"github.com/gin-gonic/gin"
+)
+
+// ProviderHandler handles provider-related operations
+type ProviderHandler struct {
+	// Add any dependencies needed for provider operations
+}
+
+// NewProviderHandler creates a new ProviderHandler
+func NewProviderHandler() *ProviderHandler {
+	return &ProviderHandler{}
+}
+
+// RegisterRoutes registers routes for provider operations
+func (h *ProviderHandler) RegisterRoutes(r *gin.RouterGroup) {
+	r.GET("/", h.ListProviders)
+	// r.POST("/", h.CreateProvider)
+	r.GET("/:id", h.GetProvider)
+	// r.PUT("/:id", h.UpdateProvider)
+	// r.DELETE("/:id", h.DeleteProvider)
+	r.GET("/:id/regions", h.GetProviderRegions)
+	r.POST("/:id/validate", h.ValidateProvider)
+}
+
+// ListProviders returns all providers
+func (h *ProviderHandler) ListProviders(c *gin.Context) {
+	// TODO: Fetch providers from database
+
+	providers := cloud.GetSupportedProviders()
+	if len(providers) == 0 {
+		c.JSON(http.StatusNotFound, gin.H{"error": "No providers found"})
+		return
+	}
+
+	c.JSON(http.StatusOK, providers)
+}
+
+// GetProvider returns a specific provider
+func (h *ProviderHandler) GetProvider(c *gin.Context) {
+	id := c.Param("id")
+
+	// TODO: Fetch provider from database
+	provider := models.Provider{ID: id}
+
+	c.JSON(http.StatusOK, provider)
+}
+
+// GetProviderRegions returns available regions for a provider
+func (h *ProviderHandler) GetProviderRegions(c *gin.Context) {
+	id := c.Param("id")
+
+	fmt.Printf("Fetching regions for provider %s\n", id)
+
+	// TODO: Fetch regions from provider API
+	regions := []string{"us-east-1", "eu-west-1"}
+
+	c.JSON(http.StatusOK, regions)
+}
+
+// ValidateProvider validates provider credentials
+func (h *ProviderHandler) ValidateProvider(c *gin.Context) {
+	id := c.Param("id")
+
+	fmt.Printf("Validating provider %s\n", id)
+
+	// TODO: Validate provider credentials
+	result := map[string]bool{"valid": true}
+
+	c.JSON(http.StatusOK, result)
+}

+ 108 - 0
handlers/templates.go

@@ -0,0 +1,108 @@
+package handlers
+
+import (
+	"fmt"
+	"net/http"
+
+	"git.linuxforward.com/byop/byop-engine/models"
+	"github.com/gin-gonic/gin"
+)
+
+// TemplateHandler handles template-related operations
+type TemplateHandler struct {
+	// Add any dependencies needed for template operations
+}
+
+// NewTemplateHandler creates a new TemplateHandler
+func NewTemplateHandler() *TemplateHandler {
+	return &TemplateHandler{}
+}
+
+// RegisterRoutes registers routes for template operations
+func (h *TemplateHandler) RegisterRoutes(r *gin.RouterGroup) {
+	r.GET("/", h.ListTemplates)
+	r.POST("/", h.CreateTemplate)
+	r.GET("/:id", h.GetTemplate)
+	r.PUT("/:id", h.UpdateTemplate)
+	r.DELETE("/:id", h.DeleteTemplate)
+	r.POST("/:id/deploy", h.DeployTemplate)
+}
+
+// ListTemplates returns all templates
+func (h *TemplateHandler) ListTemplates(c *gin.Context) {
+	// Implementation for listing templates
+	templates := []models.Template{}
+	c.JSON(http.StatusOK, templates)
+}
+
+// CreateTemplate creates a new template
+func (h *TemplateHandler) CreateTemplate(c *gin.Context) {
+	var template models.Template
+	if err := c.ShouldBindJSON(&template); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request body"})
+		return
+	}
+
+	// Implementation for creating template
+	c.JSON(http.StatusCreated, template)
+}
+
+// GetTemplate returns a specific template
+func (h *TemplateHandler) GetTemplate(c *gin.Context) {
+	id := c.Param("id")
+
+	// Implementation for getting a template
+	template := models.Template{ID: id}
+	c.JSON(http.StatusOK, template)
+}
+
+// UpdateTemplate updates a template
+func (h *TemplateHandler) UpdateTemplate(c *gin.Context) {
+	id := c.Param("id")
+
+	var template models.Template
+	if err := c.ShouldBindJSON(&template); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request body"})
+		return
+	}
+
+	template.ID = id
+	// Implementation for updating template
+
+	c.JSON(http.StatusOK, template)
+}
+
+// DeleteTemplate deletes a template
+func (h *TemplateHandler) DeleteTemplate(c *gin.Context) {
+	id := c.Param("id")
+
+	fmt.Println("Deleting template with ID:", id)
+
+	// Implementation for deleting template
+
+	c.Status(http.StatusNoContent)
+}
+
+// DeployTemplate deploys a template
+func (h *TemplateHandler) DeployTemplate(c *gin.Context) {
+	id := c.Param("id")
+
+	var deployRequest struct {
+		Name string `json:"name"`
+		// Other deployment parameters
+	}
+
+	if err := c.ShouldBindJSON(&deployRequest); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request body"})
+		return
+	}
+
+	// Implementation for deploying template
+	result := map[string]interface{}{
+		"template_id":   id,
+		"deployment_id": "new-deployment-id",
+		"status":        "deploying",
+	}
+
+	c.JSON(http.StatusAccepted, result)
+}

+ 139 - 0
handlers/tickets.go

@@ -0,0 +1,139 @@
+package handlers
+
+import (
+	"fmt"
+	"net/http"
+
+	"git.linuxforward.com/byop/byop-engine/models"
+	"github.com/gin-gonic/gin"
+)
+
+// TicketHandler handles ticket-related operations
+type TicketHandler struct {
+	// Add any dependencies needed for ticket operations
+}
+
+// NewTicketHandler creates a new TicketHandler
+func NewTicketHandler() *TicketHandler {
+	return &TicketHandler{}
+}
+
+// RegisterRoutes registers routes for ticket operations
+func (h *TicketHandler) RegisterRoutes(r *gin.RouterGroup) {
+	r.GET("/", h.ListTickets)
+	r.POST("/", h.CreateTicket)
+	r.GET("/:id", h.GetTicket)
+	r.PUT("/:id", h.UpdateTicket)
+	r.GET("/:id/comments", h.GetTicketComments)
+	r.POST("/:id/comments", h.AddTicketComment)
+	r.POST("/:id/resolve", h.ResolveTicket)
+}
+
+// ListTickets returns all tickets
+func (h *TicketHandler) ListTickets(c *gin.Context) {
+	// TODO: Fetch tickets from database
+	tickets := []models.Ticket{}
+
+	c.JSON(http.StatusOK, tickets)
+}
+
+// CreateTicket creates a new ticket
+func (h *TicketHandler) CreateTicket(c *gin.Context) {
+	var ticket models.Ticket
+
+	if err := c.ShouldBindJSON(&ticket); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request body"})
+		return
+	}
+
+	// TODO: Save ticket to database
+
+	c.JSON(http.StatusCreated, ticket)
+}
+
+// GetTicket returns a specific ticket
+func (h *TicketHandler) GetTicket(c *gin.Context) {
+	id := c.Param("id")
+
+	// TODO: Fetch ticket from database
+	ticket := models.Ticket{ID: id}
+
+	c.JSON(http.StatusOK, ticket)
+}
+
+// UpdateTicket updates a ticket
+func (h *TicketHandler) UpdateTicket(c *gin.Context) {
+	id := c.Param("id")
+
+	var ticket models.Ticket
+	if err := c.ShouldBindJSON(&ticket); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request body"})
+		return
+	}
+
+	ticket.ID = id
+	// TODO: Update ticket in database
+
+	c.JSON(http.StatusOK, ticket)
+}
+
+// GetTicketComments returns comments for a ticket
+func (h *TicketHandler) GetTicketComments(c *gin.Context) {
+	id := c.Param("id")
+
+	fmt.Println("Fetching comments for ticket ID:", id)
+
+	// TODO: Fetch ticket comments
+	comments := []map[string]interface{}{
+		{
+			"id":      "comment-id",
+			"content": "Comment content",
+			"user":    "User name",
+			"created": "2023-01-01T12:00:00Z",
+		},
+	}
+
+	c.JSON(http.StatusOK, comments)
+}
+
+// AddTicketComment adds a comment to a ticket
+func (h *TicketHandler) AddTicketComment(c *gin.Context) {
+	id := c.Param("id")
+
+	fmt.Println("Adding comment to ticket ID:", id)
+
+	var comment struct {
+		Content string `json:"content"`
+	}
+
+	if err := c.ShouldBindJSON(&comment); err != nil {
+		c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request body"})
+		return
+	}
+
+	// TODO: Add comment to ticket
+
+	result := map[string]interface{}{
+		"id":      "new-comment-id",
+		"content": comment.Content,
+		"user":    "Current user",
+		"created": "2023-01-01T12:00:00Z",
+	}
+
+	c.JSON(http.StatusCreated, result)
+}
+
+// ResolveTicket resolves a ticket
+func (h *TicketHandler) ResolveTicket(c *gin.Context) {
+	id := c.Param("id")
+
+	// TODO: Resolve ticket
+
+	result := map[string]interface{}{
+		"id":       id,
+		"status":   "Resolved",
+		"resolved": "2023-01-01T12:00:00Z",
+	}
+
+	c.JSON(http.StatusOK, result)
+}

+ 67 - 0
main.go

@@ -0,0 +1,67 @@
+package main
+
+import (
+	"flag"
+	"fmt"
+	"log"
+	"os"
+
+	"git.linuxforward.com/byop/byop-engine/app"
+	"git.linuxforward.com/byop/byop-engine/config"
+)
+
+// Version information
+var (
+	Version   = "0.1.0"
+	BuildTime = "unknown"
+	GitCommit = "unknown"
+)
+
+func printVersion() {
+	fmt.Printf("BYOP Engine v%s\n", Version)
+	fmt.Printf("Build time: %s\n", BuildTime)
+	fmt.Printf("Git commit: %s\n", GitCommit)
+}
+
+func printHelp() {
+	fmt.Printf("BYOP Engine - Bring Your Own Platform engine\n\n")
+	fmt.Printf("Usage: %s [options]\n\n", os.Args[0])
+	fmt.Println("Options:")
+	flag.PrintDefaults()
+}
+
+func main() {
+	// 1. Parse command line flags
+	configPath := flag.String("config", "./config.yaml", "Path to configuration file")
+	showVersion := flag.Bool("version", false, "Display version information and exit")
+	showHelp := flag.Bool("help", false, "Display help information and exit")
+	flag.Parse()
+
+	// Handle version and help flags
+	if *showVersion {
+		printVersion()
+		os.Exit(0)
+	}
+
+	if *showHelp {
+		printHelp()
+		os.Exit(0)
+	}
+
+	// 2. Parse configuration
+	cfg, err := config.Load(*configPath)
+	if err != nil {
+		log.Fatalf("Failed to load configuration: %v", err)
+	}
+
+	// 3. Initialize application
+	app, err := app.NewApp(cfg)
+	if err != nil {
+		log.Fatalf("Failed to initialize application: %v", err)
+	}
+
+	// 4. Start application
+	if err := app.Run(); err != nil {
+		log.Fatalf("Failed to start application: %v", err)
+	}
+}

+ 110 - 0
middleware/auth.go

@@ -0,0 +1,110 @@
+package middleware
+
+import (
+	"context"
+	"net/http"
+	"time"
+
+	"git.linuxforward.com/byop/byop-engine/auth"
+	"github.com/gin-gonic/gin"
+	"github.com/golang-jwt/jwt/v5"
+)
+
+// JWT secret key - in production, this should be loaded from environment variables
+var jwtSecret = []byte("your-secret-key-here")
+
+// Claims represents the JWT claims
+type Claims struct {
+	UserID string `json:"user_id"`
+	Role   string `json:"role"`
+	jwt.RegisteredClaims
+}
+
+// Auth middleware that accepts the auth service as dependency
+func Auth(authService auth.Service) gin.HandlerFunc {
+	return func(c *gin.Context) {
+		// Get token from request
+		token := extractTokenFromHeader(c)
+		if token == "" {
+			c.JSON(http.StatusUnauthorized, gin.H{"error": "Authorization header required"})
+			c.Abort()
+			return
+		}
+
+		// Validate token using the auth service
+		clientID, err := authService.ValidateToken(c.Request.Context(), token)
+		if err != nil {
+			c.JSON(http.StatusUnauthorized, gin.H{"error": "Invalid token"})
+			c.Abort()
+			return
+		}
+
+		// Set client ID in context for later use
+		c.Set("clientID", clientID)
+		c.Next()
+	}
+}
+
+// GenerateToken creates a new JWT token for a user
+func GenerateToken(userID, role string) (string, error) {
+	// Set claims with expiration time
+	claims := &Claims{
+		UserID: userID,
+		Role:   role,
+		RegisteredClaims: jwt.RegisteredClaims{
+			ExpiresAt: jwt.NewNumericDate(time.Now().Add(24 * time.Hour)), // Token expires in 24 hours
+			IssuedAt:  jwt.NewNumericDate(time.Now()),
+			Subject:   userID,
+		},
+	}
+
+	// Create token with claims
+	token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
+
+	// Sign the token with the secret key
+	tokenString, err := token.SignedString(jwtSecret)
+	if err != nil {
+		return "", err
+	}
+
+	return tokenString, nil
+}
+
+// GetUserIDFromContext retrieves the user ID from the request context
+func GetUserIDFromContext(ctx context.Context) string {
+	userID, ok := ctx.Value("user_id").(string)
+	if !ok {
+		return ""
+	}
+	return userID
+}
+
+// GetRoleFromContext retrieves the user role from the request context
+func GetRoleFromContext(ctx context.Context) string {
+	role, ok := ctx.Value("role").(string)
+	if !ok {
+		return ""
+	}
+	return role
+}
+
+// IsAdmin checks if the user in the context has admin role
+func IsAdmin(ctx context.Context) bool {
+	role := GetRoleFromContext(ctx)
+	return role == "admin"
+}
+
+// extractTokenFromHeader gets the JWT token from the Authorization header
+func extractTokenFromHeader(c *gin.Context) string {
+	authHeader := c.GetHeader("Authorization")
+	if authHeader == "" {
+		return ""
+	}
+
+	// Check if the header has the format "Bearer <token>"
+	if len(authHeader) > 7 && authHeader[:7] == "Bearer " {
+		return authHeader[7:]
+	}
+
+	return ""
+}

+ 35 - 0
middleware/logging.go

@@ -0,0 +1,35 @@
+package middleware
+
+import (
+	"log"
+	"time"
+
+	"github.com/gin-gonic/gin"
+)
+
+// Logger is a middleware that logs HTTP requests
+func Logger(c *gin.Context) {
+	start := time.Now()
+
+	// Process request
+	c.Next()
+
+	// Log the request details
+	duration := time.Since(start)
+	statusCode := c.Writer.Status()
+	method := c.Request.Method
+	path := c.Request.URL.Path
+
+	log.Printf("%s %s %d %s %s", method, path, statusCode, duration, c.ClientIP())
+	if statusCode >= 400 {
+		log.Printf("Error: %s %s %d", method, path, statusCode)
+	}
+	// Log the response size
+	responseSize := c.Writer.Size()
+	log.Printf("Response size: %d bytes", responseSize)
+	// Log the request duration
+	log.Printf("Request duration: %s", duration)
+	// Log the client IP
+	clientIP := c.ClientIP()
+	log.Printf("Client IP: %s", clientIP)
+}

+ 86 - 0
middleware/metrics.go

@@ -0,0 +1,86 @@
+package middleware
+
+import (
+	"strconv"
+	"time"
+
+	"github.com/gin-gonic/gin"
+	"github.com/prometheus/client_golang/prometheus"
+	"github.com/prometheus/client_golang/prometheus/promauto"
+)
+
+var (
+	// RequestsTotal counts the number of HTTP requests processed
+	RequestsTotal = promauto.NewCounterVec(
+		prometheus.CounterOpts{
+			Name: "http_requests_total",
+			Help: "Total number of HTTP requests",
+		},
+		[]string{"method", "path", "status"},
+	)
+
+	// RequestDuration observes the HTTP request duration
+	RequestDuration = promauto.NewHistogramVec(
+		prometheus.HistogramOpts{
+			Name:    "http_request_duration_seconds",
+			Help:    "HTTP request duration in seconds",
+			Buckets: prometheus.DefBuckets,
+		},
+		[]string{"method", "path"},
+	)
+
+	// ResponseSize observes the HTTP response size
+	ResponseSize = promauto.NewHistogramVec(
+		prometheus.HistogramOpts{
+			Name:    "http_response_size_bytes",
+			Help:    "HTTP response size in bytes",
+			Buckets: prometheus.ExponentialBuckets(100, 10, 8), // From 100B to 10GB
+		},
+		[]string{"method", "path"},
+	)
+)
+
+// Metrics is a middleware that collects Prometheus metrics for HTTP requests
+func Metrics(c *gin.Context) {
+	start := time.Now()
+
+	// Create a custom response writer to capture status code and response size
+	mrw := &metricsResponseWriter{ResponseWriter: c.Writer}
+	c.Writer = mrw
+
+	// Process the request
+	c.Next()
+
+	// Record metrics
+	duration := time.Since(start).Seconds()
+	statusCode := strconv.Itoa(mrw.statusCode)
+	method := c.Request.Method
+	path := c.Request.URL.Path
+
+	RequestsTotal.WithLabelValues(method, path, statusCode).Inc()
+	RequestDuration.WithLabelValues(method, path).Observe(duration)
+	ResponseSize.WithLabelValues(method, path).Observe(float64(mrw.responseSize))
+}
+
+// metricsResponseWriter is a custom ResponseWriter that captures status code and response size
+type metricsResponseWriter struct {
+	gin.ResponseWriter
+	statusCode   int
+	responseSize int
+}
+
+// WriteHeader captures the status code
+func (mrw *metricsResponseWriter) WriteHeader(code int) {
+	mrw.statusCode = code
+	mrw.ResponseWriter.WriteHeader(code)
+}
+
+// Write captures the response size
+func (mrw *metricsResponseWriter) Write(b []byte) (int, error) {
+	n, err := mrw.ResponseWriter.Write(b)
+	if err != nil {
+		return n, err
+	}
+	mrw.responseSize += n
+	return n, nil
+}

+ 49 - 0
models/autodeploy.go

@@ -0,0 +1,49 @@
+package models
+
+import (
+	"encoding/json"
+	"time"
+)
+
+// AutoDeploySettings contains auto-deployment configuration
+type AutoDeploySettings struct {
+	Enabled               bool              `json:"enabled"`
+	DefaultProviderID     string            `json:"default_provider_id"`
+	DefaultTemplateID     string            `json:"default_template_id"`
+	DefaultRegion         string            `json:"default_region"`
+	AutoDeployNewClients  bool              `json:"auto_deploy_new_clients"`
+	WebhookSecret         string            `json:"webhook_secret,omitempty"`
+	WebhookURL            string            `json:"webhook_url"`
+	DefaultTags           map[string]string `json:"default_tags"`
+	ResourceLimits        ResourceLimits    `json:"resource_limits"`
+	NotificationEndpoints []string          `json:"notification_endpoints"`
+}
+
+// ResourceLimits defines resource constraints for auto-deployments
+type ResourceLimits struct {
+	MaxCPUCores int `json:"max_cpu_cores"`
+	MaxMemoryGB int `json:"max_memory_gb"`
+	MaxDiskGB   int `json:"max_disk_gb"`
+	MaxCount    int `json:"max_count"` // Maximum number of instances
+}
+
+// AutoDeployRequest represents a request to automatically deploy for a client
+type AutoDeployRequest struct {
+	ClientID   string            `json:"client_id"`
+	TemplateID string            `json:"template_id,omitempty"`
+	ProviderID string            `json:"provider_id,omitempty"`
+	Region     string            `json:"region,omitempty"`
+	Tags       map[string]string `json:"tags,omitempty"`
+	Priority   string            `json:"priority,omitempty"` // Low, Medium, High
+}
+
+// WebhookPayload represents data sent to or received from a webhook
+type WebhookPayload struct {
+	Event      string          `json:"event"`
+	Timestamp  time.Time       `json:"timestamp"`
+	ClientID   string          `json:"client_id"`
+	Data       json.RawMessage `json:"data"`
+	Signature  string          `json:"signature,omitempty"`
+	RequestID  string          `json:"request_id"`
+	ProviderID string          `json:"provider_id,omitempty"`
+}

+ 22 - 0
models/client.go

@@ -0,0 +1,22 @@
+package models
+
+import (
+	"time"
+)
+
+type Client struct {
+	ID string `json:"id"`
+	// TODO: Add Client fields
+	CreatedAt time.Time `json:"created_at"`
+	UpdatedAt time.Time `json:"updated_at"`
+}
+
+// GetID returns the user's ID
+func (c *Client) GetID() string {
+	return c.ID
+}
+
+// SetID sets the user's ID
+func (c *Client) SetID(id string) {
+	c.ID = id
+}

+ 41 - 0
models/deployment.go

@@ -0,0 +1,41 @@
+package models
+
+import (
+	"time"
+)
+
+type Deployment struct {
+	ID            string    `json:"id"`
+	ClientID      string    `json:"client_id"`
+	ProviderID    string    `json:"provider_id"`
+	TemplateID    string    `json:"template_id"`
+	Status        string    `json:"status"` // Pending, Running, Failed, Terminated
+	Region        string    `json:"region"`
+	VPSID         string    `json:"vps_id"` // ID assigned by provider
+	IPAddress     string    `json:"ip_address"`
+	Configuration string    `json:"configuration"` // JSON configuration
+	CreatedAt     time.Time `json:"created_at"`
+	UpdatedAt     time.Time `json:"updated_at"`
+	DeployedAt    time.Time `json:"deployed_at,omitempty"`
+	TerminatedAt  time.Time `json:"terminated_at,omitempty"`
+}
+
+type AutoDeployConfig struct {
+	ID        string    `json:"id"`
+	ProjectID string    `json:"project_id"`
+	Branch    string    `json:"branch"`
+	Trigger   string    `json:"trigger"`
+	Enabled   bool      `json:"enabled"`
+	CreatedAt time.Time `json:"created_at"`
+	UpdatedAt time.Time `json:"updated_at"`
+}
+
+type AutoDeployHistory struct {
+	ID        string    `json:"id"`
+	ProjectID string    `json:"project_id"`
+	Branch    string    `json:"branch"`
+	Trigger   string    `json:"trigger"`
+	Status    string    `json:"status"`
+	CreatedAt time.Time `json:"created_at"`
+	UpdatedAt time.Time `json:"updated_at"`
+}

+ 22 - 0
models/metrics.go

@@ -0,0 +1,22 @@
+package models
+
+import (
+	"time"
+)
+
+type Metrics struct {
+	ID string `json:"id"`
+	// TODO: Add Metrics fields
+	CreatedAt time.Time `json:"created_at"`
+	UpdatedAt time.Time `json:"updated_at"`
+}
+
+type MetricSample struct {
+	ID         string    `json:"id"`
+	MetricID   string    `json:"metric_id"`
+	MetricName string    `json:"metric_name"`
+	Value      float64   `json:"value"`
+	Timestamp  time.Time `json:"timestamp"`
+	CreatedAt  time.Time `json:"created_at"`
+	UpdatedAt  time.Time `json:"updated_at"`
+}

+ 12 - 0
models/provider.go

@@ -0,0 +1,12 @@
+package models
+
+import (
+	"time"
+)
+
+type Provider struct {
+	ID        string    `json:"id"`
+	// TODO: Add Provider fields
+	CreatedAt time.Time `json:"created_at"`
+	UpdatedAt time.Time `json:"updated_at"`
+}

+ 21 - 0
models/template.go

@@ -0,0 +1,21 @@
+package models
+
+import (
+	"time"
+)
+
+type Template struct {
+	ID                   string `json:"id"`
+	Name                 string `json:"name"`
+	Description          string `json:"description"`
+	Dockerfile           string `json:"dockerfile"`
+	DockerCompose        string `json:"docker_compose"`
+	ConfigTemplate       string `json:"config_template"` // For generating client-specific configs
+	ResourceRequirements struct {
+		CPU      int `json:"cpu"`
+		MemoryMB int `json:"memory_mb"`
+		DiskGB   int `json:"disk_gb"`
+	} `json:"resource_requirements"`
+	CreatedAt time.Time `json:"created_at"`
+	UpdatedAt time.Time `json:"updated_at"`
+}

+ 12 - 0
models/ticket.go

@@ -0,0 +1,12 @@
+package models
+
+import (
+	"time"
+)
+
+type Ticket struct {
+	ID        string    `json:"id"`
+	// TODO: Add Ticket fields
+	CreatedAt time.Time `json:"created_at"`
+	UpdatedAt time.Time `json:"updated_at"`
+}

+ 19 - 0
models/user.go

@@ -0,0 +1,19 @@
+package models
+
+// User represents a user in the system
+type User struct {
+	ID       string
+	Username string
+	Email    string
+	// Other user fields
+}
+
+// GetID returns the user's ID
+func (u *User) GetID() string {
+	return u.ID
+}
+
+// SetID sets the user's ID
+func (u *User) SetID(id string) {
+	u.ID = id
+}

+ 54 - 0
services/auth.go

@@ -0,0 +1,54 @@
+package services
+
+// TODO: Implement auth service
+
+import (
+	"errors"
+
+	"github.com/golang-jwt/jwt"
+)
+
+var (
+	ErrTokenExpired     = errors.New("token has expired")
+	ErrInvalidToken     = errors.New("token is invalid")
+	ErrTokenBlacklisted = errors.New("token has been revoked")
+)
+
+type JwtService struct {
+	PrivateKey []byte
+	TokenTTL   int
+}
+
+type Claims struct {
+	jwt.StandardClaims
+	ClientId string `json:"client_id"`
+	Role     string `json:"role"`
+}
+
+func NewJwtService() *JwtService {
+	return &JwtService{}
+}
+
+func (s *JwtService) GenerateToken(clientId string) (string, error) {
+	//TODO: Implement token generation logic
+
+	return "", nil
+}
+
+func (s *JwtService) ValidateToken(token string) (string, error) {
+	// TODO: Implement token validation logic
+
+	return "", nil
+}
+
+func (s *JwtService) RefreshToken(token string) (string, error) {
+	// TODO: Implement token refresh logic
+
+	return "", nil
+}
+
+func (s *JwtService) Logout(token string) error {
+	// TODO: Implement logout logic
+
+	return nil
+}

+ 3 - 0
services/autodeploy.go

@@ -0,0 +1,3 @@
+package services
+
+// TODO: Implement autodeploy service

+ 3 - 0
services/clients.go

@@ -0,0 +1,3 @@
+package services
+
+// TODO: Implement clients service

+ 3 - 0
services/deployments.go

@@ -0,0 +1,3 @@
+package services
+
+// TODO: Implement deployments service

+ 3 - 0
services/monitoring.go

@@ -0,0 +1,3 @@
+package services
+
+// TODO: Implement monitoring service

+ 3 - 0
services/providers.go

@@ -0,0 +1,3 @@
+package services
+
+// TODO: Implement providers service

+ 3 - 0
services/templates.go

@@ -0,0 +1,3 @@
+package services
+
+// TODO: Implement templates service

+ 3 - 0
services/tickets.go

@@ -0,0 +1,3 @@
+package services
+
+// TODO: Implement tickets service

+ 20 - 0
vendor/github.com/beorn7/perks/LICENSE

@@ -0,0 +1,20 @@
+Copyright (C) 2013 Blake Mizerany
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

+ 2388 - 0
vendor/github.com/beorn7/perks/quantile/exampledata.txt

@@ -0,0 +1,2388 @@
+8
+5
+26
+12
+5
+235
+13
+6
+28
+30
+3
+3
+3
+3
+5
+2
+33
+7
+2
+4
+7
+12
+14
+5
+8
+3
+10
+4
+5
+3
+6
+6
+209
+20
+3
+10
+14
+3
+4
+6
+8
+5
+11
+7
+3
+2
+3
+3
+212
+5
+222
+4
+10
+10
+5
+6
+3
+8
+3
+10
+254
+220
+2
+3
+5
+24
+5
+4
+222
+7
+3
+3
+223
+8
+15
+12
+14
+14
+3
+2
+2
+3
+13
+3
+11
+4
+4
+6
+5
+7
+13
+5
+3
+5
+2
+5
+3
+5
+2
+7
+15
+17
+14
+3
+6
+6
+3
+17
+5
+4
+7
+6
+4
+4
+8
+6
+8
+3
+9
+3
+6
+3
+4
+5
+3
+3
+660
+4
+6
+10
+3
+6
+3
+2
+5
+13
+2
+4
+4
+10
+4
+8
+4
+3
+7
+9
+9
+3
+10
+37
+3
+13
+4
+12
+3
+6
+10
+8
+5
+21
+2
+3
+8
+3
+2
+3
+3
+4
+12
+2
+4
+8
+8
+4
+3
+2
+20
+1
+6
+32
+2
+11
+6
+18
+3
+8
+11
+3
+212
+3
+4
+2
+6
+7
+12
+11
+3
+2
+16
+10
+6
+4
+6
+3
+2
+7
+3
+2
+2
+2
+2
+5
+6
+4
+3
+10
+3
+4
+6
+5
+3
+4
+4
+5
+6
+4
+3
+4
+4
+5
+7
+5
+5
+3
+2
+7
+2
+4
+12
+4
+5
+6
+2
+4
+4
+8
+4
+15
+13
+7
+16
+5
+3
+23
+5
+5
+7
+3
+2
+9
+8
+7
+5
+8
+11
+4
+10
+76
+4
+47
+4
+3
+2
+7
+4
+2
+3
+37
+10
+4
+2
+20
+5
+4
+4
+10
+10
+4
+3
+7
+23
+240
+7
+13
+5
+5
+3
+3
+2
+5
+4
+2
+8
+7
+19
+2
+23
+8
+7
+2
+5
+3
+8
+3
+8
+13
+5
+5
+5
+2
+3
+23
+4
+9
+8
+4
+3
+3
+5
+220
+2
+3
+4
+6
+14
+3
+53
+6
+2
+5
+18
+6
+3
+219
+6
+5
+2
+5
+3
+6
+5
+15
+4
+3
+17
+3
+2
+4
+7
+2
+3
+3
+4
+4
+3
+2
+664
+6
+3
+23
+5
+5
+16
+5
+8
+2
+4
+2
+24
+12
+3
+2
+3
+5
+8
+3
+5
+4
+3
+14
+3
+5
+8
+2
+3
+7
+9
+4
+2
+3
+6
+8
+4
+3
+4
+6
+5
+3
+3
+6
+3
+19
+4
+4
+6
+3
+6
+3
+5
+22
+5
+4
+4
+3
+8
+11
+4
+9
+7
+6
+13
+4
+4
+4
+6
+17
+9
+3
+3
+3
+4
+3
+221
+5
+11
+3
+4
+2
+12
+6
+3
+5
+7
+5
+7
+4
+9
+7
+14
+37
+19
+217
+16
+3
+5
+2
+2
+7
+19
+7
+6
+7
+4
+24
+5
+11
+4
+7
+7
+9
+13
+3
+4
+3
+6
+28
+4
+4
+5
+5
+2
+5
+6
+4
+4
+6
+10
+5
+4
+3
+2
+3
+3
+6
+5
+5
+4
+3
+2
+3
+7
+4
+6
+18
+16
+8
+16
+4
+5
+8
+6
+9
+13
+1545
+6
+215
+6
+5
+6
+3
+45
+31
+5
+2
+2
+4
+3
+3
+2
+5
+4
+3
+5
+7
+7
+4
+5
+8
+5
+4
+749
+2
+31
+9
+11
+2
+11
+5
+4
+4
+7
+9
+11
+4
+5
+4
+7
+3
+4
+6
+2
+15
+3
+4
+3
+4
+3
+5
+2
+13
+5
+5
+3
+3
+23
+4
+4
+5
+7
+4
+13
+2
+4
+3
+4
+2
+6
+2
+7
+3
+5
+5
+3
+29
+5
+4
+4
+3
+10
+2
+3
+79
+16
+6
+6
+7
+7
+3
+5
+5
+7
+4
+3
+7
+9
+5
+6
+5
+9
+6
+3
+6
+4
+17
+2
+10
+9
+3
+6
+2
+3
+21
+22
+5
+11
+4
+2
+17
+2
+224
+2
+14
+3
+4
+4
+2
+4
+4
+4
+4
+5
+3
+4
+4
+10
+2
+6
+3
+3
+5
+7
+2
+7
+5
+6
+3
+218
+2
+2
+5
+2
+6
+3
+5
+222
+14
+6
+33
+3
+2
+5
+3
+3
+3
+9
+5
+3
+3
+2
+7
+4
+3
+4
+3
+5
+6
+5
+26
+4
+13
+9
+7
+3
+221
+3
+3
+4
+4
+4
+4
+2
+18
+5
+3
+7
+9
+6
+8
+3
+10
+3
+11
+9
+5
+4
+17
+5
+5
+6
+6
+3
+2
+4
+12
+17
+6
+7
+218
+4
+2
+4
+10
+3
+5
+15
+3
+9
+4
+3
+3
+6
+29
+3
+3
+4
+5
+5
+3
+8
+5
+6
+6
+7
+5
+3
+5
+3
+29
+2
+31
+5
+15
+24
+16
+5
+207
+4
+3
+3
+2
+15
+4
+4
+13
+5
+5
+4
+6
+10
+2
+7
+8
+4
+6
+20
+5
+3
+4
+3
+12
+12
+5
+17
+7
+3
+3
+3
+6
+10
+3
+5
+25
+80
+4
+9
+3
+2
+11
+3
+3
+2
+3
+8
+7
+5
+5
+19
+5
+3
+3
+12
+11
+2
+6
+5
+5
+5
+3
+3
+3
+4
+209
+14
+3
+2
+5
+19
+4
+4
+3
+4
+14
+5
+6
+4
+13
+9
+7
+4
+7
+10
+2
+9
+5
+7
+2
+8
+4
+6
+5
+5
+222
+8
+7
+12
+5
+216
+3
+4
+4
+6
+3
+14
+8
+7
+13
+4
+3
+3
+3
+3
+17
+5
+4
+3
+33
+6
+6
+33
+7
+5
+3
+8
+7
+5
+2
+9
+4
+2
+233
+24
+7
+4
+8
+10
+3
+4
+15
+2
+16
+3
+3
+13
+12
+7
+5
+4
+207
+4
+2
+4
+27
+15
+2
+5
+2
+25
+6
+5
+5
+6
+13
+6
+18
+6
+4
+12
+225
+10
+7
+5
+2
+2
+11
+4
+14
+21
+8
+10
+3
+5
+4
+232
+2
+5
+5
+3
+7
+17
+11
+6
+6
+23
+4
+6
+3
+5
+4
+2
+17
+3
+6
+5
+8
+3
+2
+2
+14
+9
+4
+4
+2
+5
+5
+3
+7
+6
+12
+6
+10
+3
+6
+2
+2
+19
+5
+4
+4
+9
+2
+4
+13
+3
+5
+6
+3
+6
+5
+4
+9
+6
+3
+5
+7
+3
+6
+6
+4
+3
+10
+6
+3
+221
+3
+5
+3
+6
+4
+8
+5
+3
+6
+4
+4
+2
+54
+5
+6
+11
+3
+3
+4
+4
+4
+3
+7
+3
+11
+11
+7
+10
+6
+13
+223
+213
+15
+231
+7
+3
+7
+228
+2
+3
+4
+4
+5
+6
+7
+4
+13
+3
+4
+5
+3
+6
+4
+6
+7
+2
+4
+3
+4
+3
+3
+6
+3
+7
+3
+5
+18
+5
+6
+8
+10
+3
+3
+3
+2
+4
+2
+4
+4
+5
+6
+6
+4
+10
+13
+3
+12
+5
+12
+16
+8
+4
+19
+11
+2
+4
+5
+6
+8
+5
+6
+4
+18
+10
+4
+2
+216
+6
+6
+6
+2
+4
+12
+8
+3
+11
+5
+6
+14
+5
+3
+13
+4
+5
+4
+5
+3
+28
+6
+3
+7
+219
+3
+9
+7
+3
+10
+6
+3
+4
+19
+5
+7
+11
+6
+15
+19
+4
+13
+11
+3
+7
+5
+10
+2
+8
+11
+2
+6
+4
+6
+24
+6
+3
+3
+3
+3
+6
+18
+4
+11
+4
+2
+5
+10
+8
+3
+9
+5
+3
+4
+5
+6
+2
+5
+7
+4
+4
+14
+6
+4
+4
+5
+5
+7
+2
+4
+3
+7
+3
+3
+6
+4
+5
+4
+4
+4
+3
+3
+3
+3
+8
+14
+2
+3
+5
+3
+2
+4
+5
+3
+7
+3
+3
+18
+3
+4
+4
+5
+7
+3
+3
+3
+13
+5
+4
+8
+211
+5
+5
+3
+5
+2
+5
+4
+2
+655
+6
+3
+5
+11
+2
+5
+3
+12
+9
+15
+11
+5
+12
+217
+2
+6
+17
+3
+3
+207
+5
+5
+4
+5
+9
+3
+2
+8
+5
+4
+3
+2
+5
+12
+4
+14
+5
+4
+2
+13
+5
+8
+4
+225
+4
+3
+4
+5
+4
+3
+3
+6
+23
+9
+2
+6
+7
+233
+4
+4
+6
+18
+3
+4
+6
+3
+4
+4
+2
+3
+7
+4
+13
+227
+4
+3
+5
+4
+2
+12
+9
+17
+3
+7
+14
+6
+4
+5
+21
+4
+8
+9
+2
+9
+25
+16
+3
+6
+4
+7
+8
+5
+2
+3
+5
+4
+3
+3
+5
+3
+3
+3
+2
+3
+19
+2
+4
+3
+4
+2
+3
+4
+4
+2
+4
+3
+3
+3
+2
+6
+3
+17
+5
+6
+4
+3
+13
+5
+3
+3
+3
+4
+9
+4
+2
+14
+12
+4
+5
+24
+4
+3
+37
+12
+11
+21
+3
+4
+3
+13
+4
+2
+3
+15
+4
+11
+4
+4
+3
+8
+3
+4
+4
+12
+8
+5
+3
+3
+4
+2
+220
+3
+5
+223
+3
+3
+3
+10
+3
+15
+4
+241
+9
+7
+3
+6
+6
+23
+4
+13
+7
+3
+4
+7
+4
+9
+3
+3
+4
+10
+5
+5
+1
+5
+24
+2
+4
+5
+5
+6
+14
+3
+8
+2
+3
+5
+13
+13
+3
+5
+2
+3
+15
+3
+4
+2
+10
+4
+4
+4
+5
+5
+3
+5
+3
+4
+7
+4
+27
+3
+6
+4
+15
+3
+5
+6
+6
+5
+4
+8
+3
+9
+2
+6
+3
+4
+3
+7
+4
+18
+3
+11
+3
+3
+8
+9
+7
+24
+3
+219
+7
+10
+4
+5
+9
+12
+2
+5
+4
+4
+4
+3
+3
+19
+5
+8
+16
+8
+6
+22
+3
+23
+3
+242
+9
+4
+3
+3
+5
+7
+3
+3
+5
+8
+3
+7
+5
+14
+8
+10
+3
+4
+3
+7
+4
+6
+7
+4
+10
+4
+3
+11
+3
+7
+10
+3
+13
+6
+8
+12
+10
+5
+7
+9
+3
+4
+7
+7
+10
+8
+30
+9
+19
+4
+3
+19
+15
+4
+13
+3
+215
+223
+4
+7
+4
+8
+17
+16
+3
+7
+6
+5
+5
+4
+12
+3
+7
+4
+4
+13
+4
+5
+2
+5
+6
+5
+6
+6
+7
+10
+18
+23
+9
+3
+3
+6
+5
+2
+4
+2
+7
+3
+3
+2
+5
+5
+14
+10
+224
+6
+3
+4
+3
+7
+5
+9
+3
+6
+4
+2
+5
+11
+4
+3
+3
+2
+8
+4
+7
+4
+10
+7
+3
+3
+18
+18
+17
+3
+3
+3
+4
+5
+3
+3
+4
+12
+7
+3
+11
+13
+5
+4
+7
+13
+5
+4
+11
+3
+12
+3
+6
+4
+4
+21
+4
+6
+9
+5
+3
+10
+8
+4
+6
+4
+4
+6
+5
+4
+8
+6
+4
+6
+4
+4
+5
+9
+6
+3
+4
+2
+9
+3
+18
+2
+4
+3
+13
+3
+6
+6
+8
+7
+9
+3
+2
+16
+3
+4
+6
+3
+2
+33
+22
+14
+4
+9
+12
+4
+5
+6
+3
+23
+9
+4
+3
+5
+5
+3
+4
+5
+3
+5
+3
+10
+4
+5
+5
+8
+4
+4
+6
+8
+5
+4
+3
+4
+6
+3
+3
+3
+5
+9
+12
+6
+5
+9
+3
+5
+3
+2
+2
+2
+18
+3
+2
+21
+2
+5
+4
+6
+4
+5
+10
+3
+9
+3
+2
+10
+7
+3
+6
+6
+4
+4
+8
+12
+7
+3
+7
+3
+3
+9
+3
+4
+5
+4
+4
+5
+5
+10
+15
+4
+4
+14
+6
+227
+3
+14
+5
+216
+22
+5
+4
+2
+2
+6
+3
+4
+2
+9
+9
+4
+3
+28
+13
+11
+4
+5
+3
+3
+2
+3
+3
+5
+3
+4
+3
+5
+23
+26
+3
+4
+5
+6
+4
+6
+3
+5
+5
+3
+4
+3
+2
+2
+2
+7
+14
+3
+6
+7
+17
+2
+2
+15
+14
+16
+4
+6
+7
+13
+6
+4
+5
+6
+16
+3
+3
+28
+3
+6
+15
+3
+9
+2
+4
+6
+3
+3
+22
+4
+12
+6
+7
+2
+5
+4
+10
+3
+16
+6
+9
+2
+5
+12
+7
+5
+5
+5
+5
+2
+11
+9
+17
+4
+3
+11
+7
+3
+5
+15
+4
+3
+4
+211
+8
+7
+5
+4
+7
+6
+7
+6
+3
+6
+5
+6
+5
+3
+4
+4
+26
+4
+6
+10
+4
+4
+3
+2
+3
+3
+4
+5
+9
+3
+9
+4
+4
+5
+5
+8
+2
+4
+2
+3
+8
+4
+11
+19
+5
+8
+6
+3
+5
+6
+12
+3
+2
+4
+16
+12
+3
+4
+4
+8
+6
+5
+6
+6
+219
+8
+222
+6
+16
+3
+13
+19
+5
+4
+3
+11
+6
+10
+4
+7
+7
+12
+5
+3
+3
+5
+6
+10
+3
+8
+2
+5
+4
+7
+2
+4
+4
+2
+12
+9
+6
+4
+2
+40
+2
+4
+10
+4
+223
+4
+2
+20
+6
+7
+24
+5
+4
+5
+2
+20
+16
+6
+5
+13
+2
+3
+3
+19
+3
+2
+4
+5
+6
+7
+11
+12
+5
+6
+7
+7
+3
+5
+3
+5
+3
+14
+3
+4
+4
+2
+11
+1
+7
+3
+9
+6
+11
+12
+5
+8
+6
+221
+4
+2
+12
+4
+3
+15
+4
+5
+226
+7
+218
+7
+5
+4
+5
+18
+4
+5
+9
+4
+4
+2
+9
+18
+18
+9
+5
+6
+6
+3
+3
+7
+3
+5
+4
+4
+4
+12
+3
+6
+31
+5
+4
+7
+3
+6
+5
+6
+5
+11
+2
+2
+11
+11
+6
+7
+5
+8
+7
+10
+5
+23
+7
+4
+3
+5
+34
+2
+5
+23
+7
+3
+6
+8
+4
+4
+4
+2
+5
+3
+8
+5
+4
+8
+25
+2
+3
+17
+8
+3
+4
+8
+7
+3
+15
+6
+5
+7
+21
+9
+5
+6
+6
+5
+3
+2
+3
+10
+3
+6
+3
+14
+7
+4
+4
+8
+7
+8
+2
+6
+12
+4
+213
+6
+5
+21
+8
+2
+5
+23
+3
+11
+2
+3
+6
+25
+2
+3
+6
+7
+6
+6
+4
+4
+6
+3
+17
+9
+7
+6
+4
+3
+10
+7
+2
+3
+3
+3
+11
+8
+3
+7
+6
+4
+14
+36
+3
+4
+3
+3
+22
+13
+21
+4
+2
+7
+4
+4
+17
+15
+3
+7
+11
+2
+4
+7
+6
+209
+6
+3
+2
+2
+24
+4
+9
+4
+3
+3
+3
+29
+2
+2
+4
+3
+3
+5
+4
+6
+3
+3
+2
+4

+ 316 - 0
vendor/github.com/beorn7/perks/quantile/stream.go

@@ -0,0 +1,316 @@
+// Package quantile computes approximate quantiles over an unbounded data
+// stream within low memory and CPU bounds.
+//
+// A small amount of accuracy is traded to achieve the above properties.
+//
+// Multiple streams can be merged before calling Query to generate a single set
+// of results. This is meaningful when the streams represent the same type of
+// data. See Merge and Samples.
+//
+// For more detailed information about the algorithm used, see:
+//
+// Effective Computation of Biased Quantiles over Data Streams
+//
+// http://www.cs.rutgers.edu/~muthu/bquant.pdf
+package quantile
+
+import (
+	"math"
+	"sort"
+)
+
+// Sample holds an observed value and meta information for compression. JSON
+// tags have been added for convenience.
+type Sample struct {
+	Value float64 `json:",string"`
+	Width float64 `json:",string"`
+	Delta float64 `json:",string"`
+}
+
+// Samples represents a slice of samples. It implements sort.Interface.
+type Samples []Sample
+
+func (a Samples) Len() int           { return len(a) }
+func (a Samples) Less(i, j int) bool { return a[i].Value < a[j].Value }
+func (a Samples) Swap(i, j int)      { a[i], a[j] = a[j], a[i] }
+
+type invariant func(s *stream, r float64) float64
+
+// NewLowBiased returns an initialized Stream for low-biased quantiles
+// (e.g. 0.01, 0.1, 0.5) where the needed quantiles are not known a priori, but
+// error guarantees can still be given even for the lower ranks of the data
+// distribution.
+//
+// The provided epsilon is a relative error, i.e. the true quantile of a value
+// returned by a query is guaranteed to be within (1±Epsilon)*Quantile.
+//
+// See http://www.cs.rutgers.edu/~muthu/bquant.pdf for time, space, and error
+// properties.
+func NewLowBiased(epsilon float64) *Stream {
+	ƒ := func(s *stream, r float64) float64 {
+		return 2 * epsilon * r
+	}
+	return newStream(ƒ)
+}
+
+// NewHighBiased returns an initialized Stream for high-biased quantiles
+// (e.g. 0.01, 0.1, 0.5) where the needed quantiles are not known a priori, but
+// error guarantees can still be given even for the higher ranks of the data
+// distribution.
+//
+// The provided epsilon is a relative error, i.e. the true quantile of a value
+// returned by a query is guaranteed to be within 1-(1±Epsilon)*(1-Quantile).
+//
+// See http://www.cs.rutgers.edu/~muthu/bquant.pdf for time, space, and error
+// properties.
+func NewHighBiased(epsilon float64) *Stream {
+	ƒ := func(s *stream, r float64) float64 {
+		return 2 * epsilon * (s.n - r)
+	}
+	return newStream(ƒ)
+}
+
+// NewTargeted returns an initialized Stream concerned with a particular set of
+// quantile values that are supplied a priori. Knowing these a priori reduces
+// space and computation time. The targets map maps the desired quantiles to
+// their absolute errors, i.e. the true quantile of a value returned by a query
+// is guaranteed to be within (Quantile±Epsilon).
+//
+// See http://www.cs.rutgers.edu/~muthu/bquant.pdf for time, space, and error properties.
+func NewTargeted(targetMap map[float64]float64) *Stream {
+	// Convert map to slice to avoid slow iterations on a map.
+	// ƒ is called on the hot path, so converting the map to a slice
+	// beforehand results in significant CPU savings.
+	targets := targetMapToSlice(targetMap)
+
+	ƒ := func(s *stream, r float64) float64 {
+		var m = math.MaxFloat64
+		var f float64
+		for _, t := range targets {
+			if t.quantile*s.n <= r {
+				f = (2 * t.epsilon * r) / t.quantile
+			} else {
+				f = (2 * t.epsilon * (s.n - r)) / (1 - t.quantile)
+			}
+			if f < m {
+				m = f
+			}
+		}
+		return m
+	}
+	return newStream(ƒ)
+}
+
+type target struct {
+	quantile float64
+	epsilon  float64
+}
+
+func targetMapToSlice(targetMap map[float64]float64) []target {
+	targets := make([]target, 0, len(targetMap))
+
+	for quantile, epsilon := range targetMap {
+		t := target{
+			quantile: quantile,
+			epsilon:  epsilon,
+		}
+		targets = append(targets, t)
+	}
+
+	return targets
+}
+
+// Stream computes quantiles for a stream of float64s. It is not thread-safe by
+// design. Take care when using across multiple goroutines.
+type Stream struct {
+	*stream
+	b      Samples
+	sorted bool
+}
+
+func newStream(ƒ invariant) *Stream {
+	x := &stream{ƒ: ƒ}
+	return &Stream{x, make(Samples, 0, 500), true}
+}
+
+// Insert inserts v into the stream.
+func (s *Stream) Insert(v float64) {
+	s.insert(Sample{Value: v, Width: 1})
+}
+
+func (s *Stream) insert(sample Sample) {
+	s.b = append(s.b, sample)
+	s.sorted = false
+	if len(s.b) == cap(s.b) {
+		s.flush()
+	}
+}
+
+// Query returns the computed qth percentiles value. If s was created with
+// NewTargeted, and q is not in the set of quantiles provided a priori, Query
+// will return an unspecified result.
+func (s *Stream) Query(q float64) float64 {
+	if !s.flushed() {
+		// Fast path when there hasn't been enough data for a flush;
+		// this also yields better accuracy for small sets of data.
+		l := len(s.b)
+		if l == 0 {
+			return 0
+		}
+		i := int(math.Ceil(float64(l) * q))
+		if i > 0 {
+			i -= 1
+		}
+		s.maybeSort()
+		return s.b[i].Value
+	}
+	s.flush()
+	return s.stream.query(q)
+}
+
+// Merge merges samples into the underlying streams samples. This is handy when
+// merging multiple streams from separate threads, database shards, etc.
+//
+// ATTENTION: This method is broken and does not yield correct results. The
+// underlying algorithm is not capable of merging streams correctly.
+func (s *Stream) Merge(samples Samples) {
+	sort.Sort(samples)
+	s.stream.merge(samples)
+}
+
+// Reset reinitializes and clears the list reusing the samples buffer memory.
+func (s *Stream) Reset() {
+	s.stream.reset()
+	s.b = s.b[:0]
+}
+
+// Samples returns stream samples held by s.
+func (s *Stream) Samples() Samples {
+	if !s.flushed() {
+		return s.b
+	}
+	s.flush()
+	return s.stream.samples()
+}
+
+// Count returns the total number of samples observed in the stream
+// since initialization.
+func (s *Stream) Count() int {
+	return len(s.b) + s.stream.count()
+}
+
+func (s *Stream) flush() {
+	s.maybeSort()
+	s.stream.merge(s.b)
+	s.b = s.b[:0]
+}
+
+func (s *Stream) maybeSort() {
+	if !s.sorted {
+		s.sorted = true
+		sort.Sort(s.b)
+	}
+}
+
+func (s *Stream) flushed() bool {
+	return len(s.stream.l) > 0
+}
+
+type stream struct {
+	n float64
+	l []Sample
+	ƒ invariant
+}
+
+func (s *stream) reset() {
+	s.l = s.l[:0]
+	s.n = 0
+}
+
+func (s *stream) insert(v float64) {
+	s.merge(Samples{{v, 1, 0}})
+}
+
+func (s *stream) merge(samples Samples) {
+	// TODO(beorn7): This tries to merge not only individual samples, but
+	// whole summaries. The paper doesn't mention merging summaries at
+	// all. Unittests show that the merging is inaccurate. Find out how to
+	// do merges properly.
+	var r float64
+	i := 0
+	for _, sample := range samples {
+		for ; i < len(s.l); i++ {
+			c := s.l[i]
+			if c.Value > sample.Value {
+				// Insert at position i.
+				s.l = append(s.l, Sample{})
+				copy(s.l[i+1:], s.l[i:])
+				s.l[i] = Sample{
+					sample.Value,
+					sample.Width,
+					math.Max(sample.Delta, math.Floor(s.ƒ(s, r))-1),
+					// TODO(beorn7): How to calculate delta correctly?
+				}
+				i++
+				goto inserted
+			}
+			r += c.Width
+		}
+		s.l = append(s.l, Sample{sample.Value, sample.Width, 0})
+		i++
+	inserted:
+		s.n += sample.Width
+		r += sample.Width
+	}
+	s.compress()
+}
+
+func (s *stream) count() int {
+	return int(s.n)
+}
+
+func (s *stream) query(q float64) float64 {
+	t := math.Ceil(q * s.n)
+	t += math.Ceil(s.ƒ(s, t) / 2)
+	p := s.l[0]
+	var r float64
+	for _, c := range s.l[1:] {
+		r += p.Width
+		if r+c.Width+c.Delta > t {
+			return p.Value
+		}
+		p = c
+	}
+	return p.Value
+}
+
+func (s *stream) compress() {
+	if len(s.l) < 2 {
+		return
+	}
+	x := s.l[len(s.l)-1]
+	xi := len(s.l) - 1
+	r := s.n - 1 - x.Width
+
+	for i := len(s.l) - 2; i >= 0; i-- {
+		c := s.l[i]
+		if c.Width+x.Width+x.Delta <= s.ƒ(s, r) {
+			x.Width += c.Width
+			s.l[xi] = x
+			// Remove element at i.
+			copy(s.l[i:], s.l[i+1:])
+			s.l = s.l[:len(s.l)-1]
+			xi -= 1
+		} else {
+			x = c
+			xi = i
+		}
+		r -= c.Width
+	}
+}
+
+func (s *stream) samples() Samples {
+	samples := make(Samples, len(s.l))
+	copy(samples, s.l)
+	return samples
+}

+ 52 - 0
vendor/github.com/bytedance/sonic/.gitignore

@@ -0,0 +1,52 @@
+*.o
+*.swp
+*.swm
+*.swn
+*.a
+*.so
+_obj
+_test
+*.[568vq]
+[568vq].out
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+_testmain.go
+*.exe
+*.exe~
+*.test
+*.prof
+*.rar
+*.zip
+*.gz
+*.psd
+*.bmd
+*.cfg
+*.pptx
+*.log
+*nohup.out
+*settings.pyc
+*.sublime-project
+*.sublime-workspace
+.DS_Store
+/.idea/
+/.vscode/
+/output/
+/vendor/
+/Gopkg.lock
+/Gopkg.toml
+coverage.html
+coverage.out
+coverage.xml
+junit.xml
+*.profile
+*.svg
+*.out
+ast/test.out
+ast/bench.sh
+
+!testdata/*.json.gz
+fuzz/testdata
+*__debug_bin

+ 6 - 0
vendor/github.com/bytedance/sonic/.gitmodules

@@ -0,0 +1,6 @@
+[submodule "cloudwego"]
+	path = tools/asm2asm
+	url = https://github.com/cloudwego/asm2asm.git
+[submodule "tools/simde"]
+	path = tools/simde
+	url = https://github.com/simd-everywhere/simde.git

+ 24 - 0
vendor/github.com/bytedance/sonic/.licenserc.yaml

@@ -0,0 +1,24 @@
+header:
+  license:
+    spdx-id: Apache-2.0
+    copyright-owner: ByteDance Inc.
+
+  paths:
+    - '**/*.go'
+    - '**/*.s'
+
+  paths-ignore:
+    - 'ast/asm.s'                                   # empty file
+    - 'decoder/asm.s'                               # empty file
+    - 'encoder/asm.s'                               # empty file
+    - 'internal/caching/asm.s'                      # empty file
+    - 'internal/jit/asm.s'                          # empty file
+    - 'internal/native/avx/native_amd64.s'          # auto-generated by asm2asm
+    - 'internal/native/avx/native_subr_amd64.go'    # auto-generated by asm2asm
+    - 'internal/native/avx2/native_amd64.s'         # auto-generated by asm2asm
+    - 'internal/native/avx2/native_subr_amd64.go'   # auto-generated by asm2asm
+    - 'internal/resolver/asm.s'                     # empty file
+    - 'internal/rt/asm.s'                           # empty file
+    - 'internal/loader/asm.s'                       # empty file
+
+  comment: on-failure

+ 128 - 0
vendor/github.com/bytedance/sonic/CODE_OF_CONDUCT.md

@@ -0,0 +1,128 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+We as members, contributors, and leaders pledge to make participation in our
+community a harassment-free experience for everyone, regardless of age, body
+size, visible or invisible disability, ethnicity, sex characteristics, gender
+identity and expression, level of experience, education, socio-economic status,
+nationality, personal appearance, race, religion, or sexual identity
+and orientation.
+
+We pledge to act and interact in ways that contribute to an open, welcoming,
+diverse, inclusive, and healthy community.
+
+## Our Standards
+
+Examples of behavior that contributes to a positive environment for our
+community include:
+
+* Demonstrating empathy and kindness toward other people
+* Being respectful of differing opinions, viewpoints, and experiences
+* Giving and gracefully accepting constructive feedback
+* Accepting responsibility and apologizing to those affected by our mistakes,
+  and learning from the experience
+* Focusing on what is best not just for us as individuals, but for the
+  overall community
+
+Examples of unacceptable behavior include:
+
+* The use of sexualized language or imagery, and sexual attention or
+  advances of any kind
+* Trolling, insulting or derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or email
+  address, without their explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+  professional setting
+
+## Enforcement Responsibilities
+
+Community leaders are responsible for clarifying and enforcing our standards of
+acceptable behavior and will take appropriate and fair corrective action in
+response to any behavior that they deem inappropriate, threatening, offensive,
+or harmful.
+
+Community leaders have the right and responsibility to remove, edit, or reject
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, and will communicate reasons for moderation
+decisions when appropriate.
+
+## Scope
+
+This Code of Conduct applies within all community spaces, and also applies when
+an individual is officially representing the community in public spaces.
+Examples of representing our community include using an official e-mail address,
+posting via an official social media account, or acting as an appointed
+representative at an online or offline event.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported to the community leaders responsible for enforcement at
+wudi.daniel@bytedance.com.
+All complaints will be reviewed and investigated promptly and fairly.
+
+All community leaders are obligated to respect the privacy and security of the
+reporter of any incident.
+
+## Enforcement Guidelines
+
+Community leaders will follow these Community Impact Guidelines in determining
+the consequences for any action they deem in violation of this Code of Conduct:
+
+### 1. Correction
+
+**Community Impact**: Use of inappropriate language or other behavior deemed
+unprofessional or unwelcome in the community.
+
+**Consequence**: A private, written warning from community leaders, providing
+clarity around the nature of the violation and an explanation of why the
+behavior was inappropriate. A public apology may be requested.
+
+### 2. Warning
+
+**Community Impact**: A violation through a single incident or series
+of actions.
+
+**Consequence**: A warning with consequences for continued behavior. No
+interaction with the people involved, including unsolicited interaction with
+those enforcing the Code of Conduct, for a specified period of time. This
+includes avoiding interactions in community spaces as well as external channels
+like social media. Violating these terms may lead to a temporary or
+permanent ban.
+
+### 3. Temporary Ban
+
+**Community Impact**: A serious violation of community standards, including
+sustained inappropriate behavior.
+
+**Consequence**: A temporary ban from any sort of interaction or public
+communication with the community for a specified period of time. No public or
+private interaction with the people involved, including unsolicited interaction
+with those enforcing the Code of Conduct, is allowed during this period.
+Violating these terms may lead to a permanent ban.
+
+### 4. Permanent Ban
+
+**Community Impact**: Demonstrating a pattern of violation of community
+standards, including sustained inappropriate behavior,  harassment of an
+individual, or aggression toward or disparagement of classes of individuals.
+
+**Consequence**: A permanent ban from any sort of public interaction within
+the community.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage],
+version 2.0, available at
+https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
+
+Community Impact Guidelines were inspired by [Mozilla's code of conduct
+enforcement ladder](https://github.com/mozilla/diversity).
+
+[homepage]: https://www.contributor-covenant.org
+
+For answers to common questions about this code of conduct, see the FAQ at
+https://www.contributor-covenant.org/faq. Translations are available at
+https://www.contributor-covenant.org/translations.

+ 63 - 0
vendor/github.com/bytedance/sonic/CONTRIBUTING.md

@@ -0,0 +1,63 @@
+# How to Contribute
+
+## Your First Pull Request
+We use GitHub for our codebase. You can start by reading [How To Pull Request](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests).
+
+## Without Semantic Versioning
+We keep the stable code in branch `main` like `golang.org/x`. Development base on branch `develop`. We promise the **Forward Compatibility** by adding new package directory with suffix `v2/v3` when code has break changes.
+
+## Branch Organization
+We use [git-flow](https://nvie.com/posts/a-successful-git-branching-model/) as our branch organization, as known as [FDD](https://en.wikipedia.org/wiki/Feature-driven_development)
+
+
+## Bugs
+### 1. How to Find Known Issues
+We are using [Github Issues](https://github.com/bytedance/sonic/issues) for our public bugs. We keep a close eye on this and try to make it clear when we have an internal fix in progress. Before filing a new task, try to make sure your problem doesn’t already exist.
+
+### 2. Reporting New Issues
+Providing a reduced test code is a recommended way for reporting issues. Then can be placed in:
+- Just in issues
+- [Golang Playground](https://play.golang.org/)
+
+### 3. Security Bugs
+Please do not report the safe disclosure of bugs to public issues. Contact us by [Support Email](mailto:sonic@bytedance.com)
+
+## How to Get in Touch
+- [Email](mailto:wudi.daniel@bytedance.com)
+
+## Submit a Pull Request
+Before you submit your Pull Request (PR) consider the following guidelines:
+1. Search [GitHub](https://github.com/bytedance/sonic/pulls) for an open or closed PR that relates to your submission. You don't want to duplicate existing efforts.
+2. Be sure that an issue describes the problem you're fixing, or documents the design for the feature you'd like to add. Discussing the design upfront helps to ensure that we're ready to accept your work.
+3. [Fork](https://docs.github.com/en/github/getting-started-with-github/fork-a-repo) the bytedance/sonic repo.
+4. In your forked repository, make your changes in a new git branch:
+    ```
+    git checkout -b bugfix/security_bug develop
+    ```
+5. Create your patch, including appropriate test cases.
+6. Follow our [Style Guides](#code-style-guides).
+7. Commit your changes using a descriptive commit message that follows [AngularJS Git Commit Message Conventions](https://docs.google.com/document/d/1QrDFcIiPjSLDn3EL15IJygNPiHORgU1_OOAqWjiDU5Y/edit).
+   Adherence to these conventions is necessary because release notes will be automatically generated from these messages.
+8. Push your branch to GitHub:
+    ```
+    git push origin bugfix/security_bug
+    ```
+9. In GitHub, send a pull request to `sonic:main`
+
+Note: you must use one of `optimize/feature/bugfix/doc/ci/test/refactor` following a slash(`/`) as the branch prefix.
+
+Your pr title and commit message should follow https://www.conventionalcommits.org/.
+
+## Contribution Prerequisites
+- Our development environment keeps up with [Go Official](https://golang.org/project/).
+- You need fully checking with lint tools before submit your pull request. [gofmt](https://golang.org/pkg/cmd/gofmt/) & [golangci-lint](https://github.com/golangci/golangci-lint)
+- You are familiar with [Github](https://github.com) 
+- Maybe you need familiar with [Actions](https://github.com/features/actions)(our default workflow tool).
+
+## Code Style Guides
+See [Go Code Review Comments](https://github.com/golang/go/wiki/CodeReviewComments).
+
+Good resources:
+- [Effective Go](https://golang.org/doc/effective_go)
+- [Pingcap General advice](https://pingcap.github.io/style-guide/general.html)
+- [Uber Go Style Guide](https://github.com/uber-go/guide/blob/master/style.md)

+ 0 - 0
vendor/github.com/bytedance/sonic/CREDITS


+ 201 - 0
vendor/github.com/bytedance/sonic/LICENSE

@@ -0,0 +1,201 @@
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.

+ 471 - 0
vendor/github.com/bytedance/sonic/README.md

@@ -0,0 +1,471 @@
+# Sonic
+
+English | [中文](README_ZH_CN.md)
+
+A blazingly fast JSON serializing &amp; deserializing library, accelerated by JIT (just-in-time compiling) and SIMD (single-instruction-multiple-data).
+
+## Requirement
+
+- Go 1.16~1.22
+- Linux / MacOS / Windows(need go1.17 above)
+- Amd64 ARCH
+
+## Features
+
+- Runtime object binding without code generation
+- Complete APIs for JSON value manipulation
+- Fast, fast, fast!
+
+## APIs
+
+see [go.dev](https://pkg.go.dev/github.com/bytedance/sonic)
+
+## Benchmarks
+
+For **all sizes** of json and **all scenarios** of usage, **Sonic performs best**.
+
+- [Medium](https://github.com/bytedance/sonic/blob/main/decoder/testdata_test.go#L19) (13KB, 300+ key, 6 layers)
+
+```powershell
+goversion: 1.17.1
+goos: darwin
+goarch: amd64
+cpu: Intel(R) Core(TM) i9-9880H CPU @ 2.30GHz
+BenchmarkEncoder_Generic_Sonic-16                      32393 ns/op         402.40 MB/s       11965 B/op          4 allocs/op
+BenchmarkEncoder_Generic_Sonic_Fast-16                 21668 ns/op         601.57 MB/s       10940 B/op          4 allocs/op
+BenchmarkEncoder_Generic_JsonIter-16                   42168 ns/op         309.12 MB/s       14345 B/op        115 allocs/op
+BenchmarkEncoder_Generic_GoJson-16                     65189 ns/op         199.96 MB/s       23261 B/op         16 allocs/op
+BenchmarkEncoder_Generic_StdLib-16                    106322 ns/op         122.60 MB/s       49136 B/op        789 allocs/op
+BenchmarkEncoder_Binding_Sonic-16                       6269 ns/op        2079.26 MB/s       14173 B/op          4 allocs/op
+BenchmarkEncoder_Binding_Sonic_Fast-16                  5281 ns/op        2468.16 MB/s       12322 B/op          4 allocs/op
+BenchmarkEncoder_Binding_JsonIter-16                   20056 ns/op         649.93 MB/s        9488 B/op          2 allocs/op
+BenchmarkEncoder_Binding_GoJson-16                      8311 ns/op        1568.32 MB/s        9481 B/op          1 allocs/op
+BenchmarkEncoder_Binding_StdLib-16                     16448 ns/op         792.52 MB/s        9479 B/op          1 allocs/op
+BenchmarkEncoder_Parallel_Generic_Sonic-16              6681 ns/op        1950.93 MB/s       12738 B/op          4 allocs/op
+BenchmarkEncoder_Parallel_Generic_Sonic_Fast-16         4179 ns/op        3118.99 MB/s       10757 B/op          4 allocs/op
+BenchmarkEncoder_Parallel_Generic_JsonIter-16           9861 ns/op        1321.84 MB/s       14362 B/op        115 allocs/op
+BenchmarkEncoder_Parallel_Generic_GoJson-16            18850 ns/op         691.52 MB/s       23278 B/op         16 allocs/op
+BenchmarkEncoder_Parallel_Generic_StdLib-16            45902 ns/op         283.97 MB/s       49174 B/op        789 allocs/op
+BenchmarkEncoder_Parallel_Binding_Sonic-16              1480 ns/op        8810.09 MB/s       13049 B/op          4 allocs/op
+BenchmarkEncoder_Parallel_Binding_Sonic_Fast-16         1209 ns/op        10785.23 MB/s      11546 B/op          4 allocs/op
+BenchmarkEncoder_Parallel_Binding_JsonIter-16           6170 ns/op        2112.58 MB/s        9504 B/op          2 allocs/op
+BenchmarkEncoder_Parallel_Binding_GoJson-16             3321 ns/op        3925.52 MB/s        9496 B/op          1 allocs/op
+BenchmarkEncoder_Parallel_Binding_StdLib-16             3739 ns/op        3486.49 MB/s        9480 B/op          1 allocs/op
+
+BenchmarkDecoder_Generic_Sonic-16                      66812 ns/op         195.10 MB/s       57602 B/op        723 allocs/op
+BenchmarkDecoder_Generic_Sonic_Fast-16                 54523 ns/op         239.07 MB/s       49786 B/op        313 allocs/op
+BenchmarkDecoder_Generic_StdLib-16                    124260 ns/op         104.90 MB/s       50869 B/op        772 allocs/op
+BenchmarkDecoder_Generic_JsonIter-16                   91274 ns/op         142.81 MB/s       55782 B/op       1068 allocs/op
+BenchmarkDecoder_Generic_GoJson-16                     88569 ns/op         147.17 MB/s       66367 B/op        973 allocs/op
+BenchmarkDecoder_Binding_Sonic-16                      32557 ns/op         400.38 MB/s       28302 B/op        137 allocs/op
+BenchmarkDecoder_Binding_Sonic_Fast-16                 28649 ns/op         455.00 MB/s       24999 B/op         34 allocs/op
+BenchmarkDecoder_Binding_StdLib-16                    111437 ns/op         116.97 MB/s       10576 B/op        208 allocs/op
+BenchmarkDecoder_Binding_JsonIter-16                   35090 ns/op         371.48 MB/s       14673 B/op        385 allocs/op
+BenchmarkDecoder_Binding_GoJson-16                     28738 ns/op         453.59 MB/s       22039 B/op         49 allocs/op
+BenchmarkDecoder_Parallel_Generic_Sonic-16             12321 ns/op        1057.91 MB/s       57233 B/op        723 allocs/op
+BenchmarkDecoder_Parallel_Generic_Sonic_Fast-16        10644 ns/op        1224.64 MB/s       49362 B/op        313 allocs/op
+BenchmarkDecoder_Parallel_Generic_StdLib-16            57587 ns/op         226.35 MB/s       50874 B/op        772 allocs/op
+BenchmarkDecoder_Parallel_Generic_JsonIter-16          38666 ns/op         337.12 MB/s       55789 B/op       1068 allocs/op
+BenchmarkDecoder_Parallel_Generic_GoJson-16            30259 ns/op         430.79 MB/s       66370 B/op        974 allocs/op
+BenchmarkDecoder_Parallel_Binding_Sonic-16              5965 ns/op        2185.28 MB/s       27747 B/op        137 allocs/op
+BenchmarkDecoder_Parallel_Binding_Sonic_Fast-16         5170 ns/op        2521.31 MB/s       24715 B/op         34 allocs/op
+BenchmarkDecoder_Parallel_Binding_StdLib-16            27582 ns/op         472.58 MB/s       10576 B/op        208 allocs/op
+BenchmarkDecoder_Parallel_Binding_JsonIter-16          13571 ns/op         960.51 MB/s       14685 B/op        385 allocs/op
+BenchmarkDecoder_Parallel_Binding_GoJson-16            10031 ns/op        1299.51 MB/s       22111 B/op         49 allocs/op
+
+BenchmarkGetOne_Sonic-16                                3276 ns/op        3975.78 MB/s          24 B/op          1 allocs/op
+BenchmarkGetOne_Gjson-16                                9431 ns/op        1380.81 MB/s           0 B/op          0 allocs/op
+BenchmarkGetOne_Jsoniter-16                            51178 ns/op         254.46 MB/s       27936 B/op        647 allocs/op
+BenchmarkGetOne_Parallel_Sonic-16                      216.7 ns/op       60098.95 MB/s          24 B/op          1 allocs/op
+BenchmarkGetOne_Parallel_Gjson-16                       1076 ns/op        12098.62 MB/s          0 B/op          0 allocs/op
+BenchmarkGetOne_Parallel_Jsoniter-16                   17741 ns/op         734.06 MB/s       27945 B/op        647 allocs/op
+BenchmarkSetOne_Sonic-16                               9571 ns/op         1360.61 MB/s        1584 B/op         17 allocs/op
+BenchmarkSetOne_Sjson-16                               36456 ns/op         357.22 MB/s       52180 B/op          9 allocs/op
+BenchmarkSetOne_Jsoniter-16                            79475 ns/op         163.86 MB/s       45862 B/op        964 allocs/op
+BenchmarkSetOne_Parallel_Sonic-16                      850.9 ns/op       15305.31 MB/s        1584 B/op         17 allocs/op
+BenchmarkSetOne_Parallel_Sjson-16                      18194 ns/op         715.77 MB/s       52247 B/op          9 allocs/op
+BenchmarkSetOne_Parallel_Jsoniter-16                   33560 ns/op         388.05 MB/s       45892 B/op        964 allocs/op
+BenchmarkLoadNode/LoadAll()-16                         11384 ns/op        1143.93 MB/s        6307 B/op         25 allocs/op
+BenchmarkLoadNode_Parallel/LoadAll()-16                 5493 ns/op        2370.68 MB/s        7145 B/op         25 allocs/op
+BenchmarkLoadNode/Interface()-16                       17722 ns/op         734.85 MB/s       13323 B/op         88 allocs/op
+BenchmarkLoadNode_Parallel/Interface()-16              10330 ns/op        1260.70 MB/s       15178 B/op         88 allocs/op
+```
+
+- [Small](https://github.com/bytedance/sonic/blob/main/testdata/small.go) (400B, 11 keys, 3 layers)
+![small benchmarks](./docs/imgs/bench-small.png)
+- [Large](https://github.com/bytedance/sonic/blob/main/testdata/twitter.json) (635KB, 10000+ key, 6 layers)
+![large benchmarks](./docs/imgs/bench-large.png)
+
+See [bench.sh](https://github.com/bytedance/sonic/blob/main/scripts/bench.sh) for benchmark codes.
+
+## How it works
+
+See [INTRODUCTION.md](./docs/INTRODUCTION.md).
+
+## Usage
+
+### Marshal/Unmarshal
+
+Default behaviors are mostly consistent with `encoding/json`, except HTML escaping form (see [Escape HTML](https://github.com/bytedance/sonic/blob/main/README.md#escape-html)) and `SortKeys` feature (optional support see [Sort Keys](https://github.com/bytedance/sonic/blob/main/README.md#sort-keys)) that is **NOT** in conformity to [RFC8259](https://datatracker.ietf.org/doc/html/rfc8259).
+
+ ```go
+import "github.com/bytedance/sonic"
+
+var data YourSchema
+// Marshal
+output, err := sonic.Marshal(&data)
+// Unmarshal
+err := sonic.Unmarshal(output, &data)
+ ```
+
+### Streaming IO
+
+Sonic supports decoding json from `io.Reader` or encoding objects into `io.Writer`, aims at handling multiple values as well as reducing memory consumption.
+
+- encoder
+
+```go
+var o1 = map[string]interface{}{
+    "a": "b",
+}
+var o2 = 1
+var w = bytes.NewBuffer(nil)
+var enc = sonic.ConfigDefault.NewEncoder(w)
+enc.Encode(o1)
+enc.Encode(o2)
+fmt.Println(w.String())
+// Output:
+// {"a":"b"}
+// 1
+```
+
+- decoder
+
+```go
+var o =  map[string]interface{}{}
+var r = strings.NewReader(`{"a":"b"}{"1":"2"}`)
+var dec = sonic.ConfigDefault.NewDecoder(r)
+dec.Decode(&o)
+dec.Decode(&o)
+fmt.Printf("%+v", o)
+// Output:
+// map[1:2 a:b]
+```
+
+### Use Number/Use Int64
+
+ ```go
+import "github.com/bytedance/sonic/decoder"
+
+var input = `1`
+var data interface{}
+
+// default float64
+dc := decoder.NewDecoder(input)
+dc.Decode(&data) // data == float64(1)
+// use json.Number
+dc = decoder.NewDecoder(input)
+dc.UseNumber()
+dc.Decode(&data) // data == json.Number("1")
+// use int64
+dc = decoder.NewDecoder(input)
+dc.UseInt64()
+dc.Decode(&data) // data == int64(1)
+
+root, err := sonic.GetFromString(input)
+// Get json.Number
+jn := root.Number()
+jm := root.InterfaceUseNumber().(json.Number) // jn == jm
+// Get float64
+fn := root.Float64()
+fm := root.Interface().(float64) // jn == jm
+ ```
+
+### Sort Keys
+
+On account of the performance loss from sorting (roughly 10%), sonic doesn't enable this feature by default. If your component depends on it to work (like [zstd](https://github.com/facebook/zstd)), Use it like this:
+
+```go
+import "github.com/bytedance/sonic"
+import "github.com/bytedance/sonic/encoder"
+
+// Binding map only
+m := map[string]interface{}{}
+v, err := encoder.Encode(m, encoder.SortMapKeys)
+
+// Or ast.Node.SortKeys() before marshal
+var root := sonic.Get(JSON)
+err := root.SortKeys()
+```
+
+### Escape HTML
+
+On account of the performance loss (roughly 15%), sonic doesn't enable this feature by default. You can use `encoder.EscapeHTML` option to open this feature (align with `encoding/json.HTMLEscape`).
+
+```go
+import "github.com/bytedance/sonic"
+
+v := map[string]string{"&&":"<>"}
+ret, err := Encode(v, EscapeHTML) // ret == `{"\u0026\u0026":{"X":"\u003c\u003e"}}`
+```
+
+### Compact Format
+
+Sonic encodes primitive objects (struct/map...) as compact-format JSON by default, except marshaling `json.RawMessage` or `json.Marshaler`: sonic ensures validating their output JSON but **DONOT** compacting them for performance concerns. We provide the option `encoder.CompactMarshaler` to add compacting process.
+
+### Print Error
+
+If there invalid syntax in input JSON, sonic will return `decoder.SyntaxError`, which supports pretty-printing of error position
+
+```go
+import "github.com/bytedance/sonic"
+import "github.com/bytedance/sonic/decoder"
+
+var data interface{}
+err := sonic.UnmarshalString("[[[}]]", &data)
+if err != nil {
+    /* One line by default */
+    println(e.Error()) // "Syntax error at index 3: invalid char\n\n\t[[[}]]\n\t...^..\n"
+    /* Pretty print */
+    if e, ok := err.(decoder.SyntaxError); ok {
+        /*Syntax error at index 3: invalid char
+
+            [[[}]]
+            ...^..
+        */
+        print(e.Description())
+    } else if me, ok := err.(*decoder.MismatchTypeError); ok {
+        // decoder.MismatchTypeError is new to Sonic v1.6.0
+        print(me.Description())
+    }
+}
+```
+
+#### Mismatched Types [Sonic v1.6.0]
+
+If there a **mismatch-typed** value for a given key, sonic will report `decoder.MismatchTypeError` (if there are many, report the last one), but still skip wrong the value and keep decoding next JSON.
+
+```go
+import "github.com/bytedance/sonic"
+import "github.com/bytedance/sonic/decoder"
+
+var data = struct{
+    A int
+    B int
+}{}
+err := UnmarshalString(`{"A":"1","B":1}`, &data)
+println(err.Error())    // Mismatch type int with value string "at index 5: mismatched type with value\n\n\t{\"A\":\"1\",\"B\":1}\n\t.....^.........\n"
+fmt.Printf("%+v", data) // {A:0 B:1}
+```
+
+### Ast.Node
+
+Sonic/ast.Node is a completely self-contained AST for JSON. It implements serialization and deserialization both and provides robust APIs for obtaining and modification of generic data.
+
+#### Get/Index
+
+Search partial JSON by given paths, which must be non-negative integer or string, or nil
+
+```go
+import "github.com/bytedance/sonic"
+
+input := []byte(`{"key1":[{},{"key2":{"key3":[1,2,3]}}]}`)
+
+// no path, returns entire json
+root, err := sonic.Get(input)
+raw := root.Raw() // == string(input)
+
+// multiple paths
+root, err := sonic.Get(input, "key1", 1, "key2")
+sub := root.Get("key3").Index(2).Int64() // == 3
+```
+
+**Tip**: since `Index()` uses offset to locate data, which is much faster than scanning like `Get()`, we suggest you use it as much as possible. And sonic also provides another API `IndexOrGet()` to underlying use offset as well as ensure the key is matched.
+
+#### Set/Unset
+
+Modify the json content by Set()/Unset()
+
+```go
+import "github.com/bytedance/sonic"
+
+// Set
+exist, err := root.Set("key4", NewBool(true)) // exist == false
+alias1 := root.Get("key4")
+println(alias1.Valid()) // true
+alias2 := root.Index(1)
+println(alias1 == alias2) // true
+
+// Unset
+exist, err := root.UnsetByIndex(1) // exist == true
+println(root.Get("key4").Check()) // "value not exist"
+```
+
+#### Serialize
+
+To encode `ast.Node` as json, use `MarshalJson()` or `json.Marshal()` (MUST pass the node's pointer)
+
+```go
+import (
+    "encoding/json"
+    "github.com/bytedance/sonic"
+)
+
+buf, err := root.MarshalJson()
+println(string(buf))                // {"key1":[{},{"key2":{"key3":[1,2,3]}}]}
+exp, err := json.Marshal(&root)     // WARN: use pointer
+println(string(buf) == string(exp)) // true
+```
+
+#### APIs
+
+- validation: `Check()`, `Error()`, `Valid()`, `Exist()`
+- searching: `Index()`, `Get()`, `IndexPair()`, `IndexOrGet()`, `GetByPath()`
+- go-type casting: `Int64()`, `Float64()`, `String()`, `Number()`, `Bool()`, `Map[UseNumber|UseNode]()`, `Array[UseNumber|UseNode]()`, `Interface[UseNumber|UseNode]()`
+- go-type packing: `NewRaw()`, `NewNumber()`, `NewNull()`, `NewBool()`, `NewString()`, `NewObject()`, `NewArray()`
+- iteration: `Values()`, `Properties()`, `ForEach()`, `SortKeys()`
+- modification: `Set()`, `SetByIndex()`, `Add()`
+
+### Ast.Visitor
+
+Sonic provides an advanced API for fully parsing JSON into non-standard types (neither `struct` not `map[string]interface{}`) without using any intermediate representation (`ast.Node` or `interface{}`). For example, you might have the following types which are like `interface{}` but actually not `interface{}`:
+
+```go
+type UserNode interface {}
+
+// the following types implement the UserNode interface.
+type (
+    UserNull    struct{}
+    UserBool    struct{ Value bool }
+    UserInt64   struct{ Value int64 }
+    UserFloat64 struct{ Value float64 }
+    UserString  struct{ Value string }
+    UserObject  struct{ Value map[string]UserNode }
+    UserArray   struct{ Value []UserNode }
+)
+```
+
+Sonic provides the following API to return **the preorder traversal of a JSON AST**. The `ast.Visitor` is a SAX style interface which is used in some C++ JSON library. You should implement `ast.Visitor` by yourself and pass it to `ast.Preorder()` method. In your visitor you can make your custom types to represent JSON values. There may be an O(n) space container (such as stack) in your visitor to record the object / array hierarchy.
+
+```go
+func Preorder(str string, visitor Visitor, opts *VisitorOptions) error
+
+type Visitor interface {
+    OnNull() error
+    OnBool(v bool) error
+    OnString(v string) error
+    OnInt64(v int64, n json.Number) error
+    OnFloat64(v float64, n json.Number) error
+    OnObjectBegin(capacity int) error
+    OnObjectKey(key string) error
+    OnObjectEnd() error
+    OnArrayBegin(capacity int) error
+    OnArrayEnd() error
+}
+```
+
+See [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go) for detailed usage. We also implement a demo visitor for `UserNode` in [ast/visitor_test.go](https://github.com/bytedance/sonic/blob/main/ast/visitor_test.go).
+
+## Compatibility
+
+Sonic **DOES NOT** ensure to support all environments, due to the difficulty of developing high-performance codes. For developers who use sonic to build their applications in different environments, we have the following suggestions:
+
+- Developing on **Mac M1**: Make sure you have Rosetta 2 installed on your machine, and set `GOARCH=amd64` when building your application. Rosetta 2 can automatically translate x86 binaries to arm64 binaries and run x86 applications on Mac M1.
+- Developing on **Linux arm64**: You can install qemu and use the `qemu-x86_64 -cpu max` command to convert x86 binaries to amr64 binaries for applications built with sonic. The qemu can achieve a similar transfer effect to Rosetta 2 on Mac M1.
+
+For developers who want to use sonic on Linux arm64 without qemu, or those who want to handle JSON strictly consistent with `encoding/json`, we provide some compatible APIs as `sonic.API`
+
+- `ConfigDefault`: the sonic's default config (`EscapeHTML=false`,`SortKeys=false`...) to run on sonic-supporting environment. It will fall back to `encoding/json` with the corresponding config, and some options like `SortKeys=false` will be invalid.
+- `ConfigStd`: the std-compatible config (`EscapeHTML=true`,`SortKeys=true`...) to run on sonic-supporting environment. It will fall back to `encoding/json`.
+- `ConfigFastest`: the fastest config (`NoQuoteTextMarshaler=true`) to run on sonic-supporting environment. It will fall back to `encoding/json` with the corresponding config, and some options will be invalid.
+
+## Tips
+
+### Pretouch
+
+Since Sonic uses [golang-asm](https://github.com/twitchyliquid64/golang-asm) as a JIT assembler, which is NOT very suitable for runtime compiling, first-hit running of a huge schema may cause request-timeout or even process-OOM. For better stability, we advise **using `Pretouch()` for huge-schema or compact-memory applications** before `Marshal()/Unmarshal()`.
+
+```go
+import (
+    "reflect"
+    "github.com/bytedance/sonic"
+    "github.com/bytedance/sonic/option"
+)
+
+func init() {
+    var v HugeStruct
+
+    // For most large types (nesting depth <= option.DefaultMaxInlineDepth)
+    err := sonic.Pretouch(reflect.TypeOf(v))
+
+    // with more CompileOption...
+    err := sonic.Pretouch(reflect.TypeOf(v),
+        // If the type is too deep nesting (nesting depth > option.DefaultMaxInlineDepth),
+        // you can set compile recursive loops in Pretouch for better stability in JIT.
+        option.WithCompileRecursiveDepth(loop),
+        // For a large nested struct, try to set a smaller depth to reduce compiling time.
+        option.WithCompileMaxInlineDepth(depth),
+    )
+}
+```
+
+### Copy string
+
+When decoding **string values without any escaped characters**, sonic references them from the origin JSON buffer instead of mallocing a new buffer to copy. This helps a lot for CPU performance but may leave the whole JSON buffer in memory as long as the decoded objects are being used. In practice, we found the extra memory introduced by referring JSON buffer is usually 20% ~ 80% of decoded objects. Once an application holds these objects for a long time (for example, cache the decoded objects for reusing), its in-use memory on the server may go up. - `Config.CopyString`/`decoder.CopyString()`: We provide the option for `Decode()` / `Unmarshal()` users to choose not to reference the JSON buffer, which may cause a decline in CPU performance to some degree.
+
+- `GetFromStringNoCopy()`: For memory safety, `sonic.Get()` / `sonic.GetFromString()` now copies return JSON. If users want to get json more quickly and not care about memory usage, you can use `GetFromStringNoCopy()` to return a JSON directly referenced from source.
+
+### Pass string or []byte?
+
+For alignment to `encoding/json`, we provide API to pass `[]byte` as an argument, but the string-to-bytes copy is conducted at the same time considering safety, which may lose performance when the origin JSON is huge. Therefore, you can use `UnmarshalString()` and `GetFromString()` to pass a string, as long as your origin data is a string or **nocopy-cast** is safe for your []byte. We also provide API `MarshalString()` for convenient **nocopy-cast** of encoded JSON []byte, which is safe since sonic's output bytes is always duplicated and unique.
+
+### Accelerate `encoding.TextMarshaler`
+
+To ensure data security, sonic.Encoder quotes and escapes string values from `encoding.TextMarshaler` interfaces by default, which may degrade performance much if most of your data is in form of them. We provide `encoder.NoQuoteTextMarshaler` to skip these operations, which means you **MUST** ensure their output string escaped and quoted following [RFC8259](https://datatracker.ietf.org/doc/html/rfc8259).
+
+### Better performance for generic data
+
+In **fully-parsed** scenario, `Unmarshal()` performs better than `Get()`+`Node.Interface()`. But if you only have a part of the schema for specific json, you can combine `Get()` and `Unmarshal()` together:
+
+```go
+import "github.com/bytedance/sonic"
+
+node, err := sonic.GetFromString(_TwitterJson, "statuses", 3, "user")
+var user User // your partial schema...
+err = sonic.UnmarshalString(node.Raw(), &user)
+```
+
+Even if you don't have any schema, use `ast.Node` as the container of generic values instead of `map` or `interface`:
+
+```go
+import "github.com/bytedance/sonic"
+
+root, err := sonic.GetFromString(_TwitterJson)
+user := root.GetByPath("statuses", 3, "user")  // === root.Get("status").Index(3).Get("user")
+err = user.Check()
+
+// err = user.LoadAll() // only call this when you want to use 'user' concurrently...
+go someFunc(user)
+```
+
+Why? Because `ast.Node` stores its children using `array`:
+
+- `Array`'s performance is **much better** than `Map` when Inserting (Deserialize) and Scanning (Serialize) data;
+- **Hashing** (`map[x]`) is not as efficient as **Indexing** (`array[x]`), which `ast.Node` can conduct on **both array and object**;
+- Using `Interface()`/`Map()` means Sonic must parse all the underlying values, while `ast.Node` can parse them **on demand**.
+
+**CAUTION:** `ast.Node` **DOESN'T** ensure concurrent security directly, due to its **lazy-load** design. However, you can call `Node.Load()`/`Node.LoadAll()` to achieve that, which may bring performance reduction while it still works faster than converting to `map` or `interface{}`
+
+### Ast.Node or Ast.Visitor?
+
+For generic data, `ast.Node` should be enough for your needs in most cases.
+
+However, `ast.Node` is designed for partially processing JSON string. It has some special designs such as lazy-load which might not be suitable for directly parsing the whole JSON string like `Unmarshal()`. Although `ast.Node` is better then `map` or `interface{}`, it's also a kind of intermediate representation after all if your final types are customized and you have to convert the above types to your custom types after parsing.
+
+For better performance, in previous case the `ast.Visitor` will be the better choice. It performs JSON decoding like `Unmarshal()` and you can directly use your final types to represents a JSON AST without any intermediate representations.
+
+But `ast.Visitor` is not a very handy API. You might need to write a lot of code to implement your visitor and carefully maintain the tree hierarchy during decoding. Please read the comments in [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go) carefully if you decide to use this API.
+
+## Community
+
+Sonic is a subproject of [CloudWeGo](https://www.cloudwego.io/). We are committed to building a cloud native ecosystem.

+ 469 - 0
vendor/github.com/bytedance/sonic/README_ZH_CN.md

@@ -0,0 +1,469 @@
+# Sonic
+
+[English](README.md) | 中文
+
+一个速度奇快的 JSON 序列化/反序列化库,由 JIT (即时编译)和 SIMD (单指令流多数据流)加速。
+
+## 依赖
+
+- Go 1.16~1.22
+- Linux / MacOS / Windows(需要 Go1.17 以上)
+- Amd64 架构
+
+## 接口
+
+详见 [go.dev](https://pkg.go.dev/github.com/bytedance/sonic)
+
+## 特色
+
+- 运行时对象绑定,无需代码生成
+- 完备的 JSON 操作 API
+- 快,更快,还要更快!
+
+## 基准测试
+
+对于**所有大小**的 json 和**所有使用场景**, **Sonic 表现均为最佳**。
+
+- [中型](https://github.com/bytedance/sonic/blob/main/decoder/testdata_test.go#L19) (13kB, 300+ 键, 6 层)
+
+```powershell
+goversion: 1.17.1
+goos: darwin
+goarch: amd64
+cpu: Intel(R) Core(TM) i9-9880H CPU @ 2.30GHz
+BenchmarkEncoder_Generic_Sonic-16                      32393 ns/op         402.40 MB/s       11965 B/op          4 allocs/op
+BenchmarkEncoder_Generic_Sonic_Fast-16                 21668 ns/op         601.57 MB/s       10940 B/op          4 allocs/op
+BenchmarkEncoder_Generic_JsonIter-16                   42168 ns/op         309.12 MB/s       14345 B/op        115 allocs/op
+BenchmarkEncoder_Generic_GoJson-16                     65189 ns/op         199.96 MB/s       23261 B/op         16 allocs/op
+BenchmarkEncoder_Generic_StdLib-16                    106322 ns/op         122.60 MB/s       49136 B/op        789 allocs/op
+BenchmarkEncoder_Binding_Sonic-16                       6269 ns/op        2079.26 MB/s       14173 B/op          4 allocs/op
+BenchmarkEncoder_Binding_Sonic_Fast-16                  5281 ns/op        2468.16 MB/s       12322 B/op          4 allocs/op
+BenchmarkEncoder_Binding_JsonIter-16                   20056 ns/op         649.93 MB/s        9488 B/op          2 allocs/op
+BenchmarkEncoder_Binding_GoJson-16                      8311 ns/op        1568.32 MB/s        9481 B/op          1 allocs/op
+BenchmarkEncoder_Binding_StdLib-16                     16448 ns/op         792.52 MB/s        9479 B/op          1 allocs/op
+BenchmarkEncoder_Parallel_Generic_Sonic-16              6681 ns/op        1950.93 MB/s       12738 B/op          4 allocs/op
+BenchmarkEncoder_Parallel_Generic_Sonic_Fast-16         4179 ns/op        3118.99 MB/s       10757 B/op          4 allocs/op
+BenchmarkEncoder_Parallel_Generic_JsonIter-16           9861 ns/op        1321.84 MB/s       14362 B/op        115 allocs/op
+BenchmarkEncoder_Parallel_Generic_GoJson-16            18850 ns/op         691.52 MB/s       23278 B/op         16 allocs/op
+BenchmarkEncoder_Parallel_Generic_StdLib-16            45902 ns/op         283.97 MB/s       49174 B/op        789 allocs/op
+BenchmarkEncoder_Parallel_Binding_Sonic-16              1480 ns/op        8810.09 MB/s       13049 B/op          4 allocs/op
+BenchmarkEncoder_Parallel_Binding_Sonic_Fast-16         1209 ns/op        10785.23 MB/s      11546 B/op          4 allocs/op
+BenchmarkEncoder_Parallel_Binding_JsonIter-16           6170 ns/op        2112.58 MB/s        9504 B/op          2 allocs/op
+BenchmarkEncoder_Parallel_Binding_GoJson-16             3321 ns/op        3925.52 MB/s        9496 B/op          1 allocs/op
+BenchmarkEncoder_Parallel_Binding_StdLib-16             3739 ns/op        3486.49 MB/s        9480 B/op          1 allocs/op
+
+BenchmarkDecoder_Generic_Sonic-16                      66812 ns/op         195.10 MB/s       57602 B/op        723 allocs/op
+BenchmarkDecoder_Generic_Sonic_Fast-16                 54523 ns/op         239.07 MB/s       49786 B/op        313 allocs/op
+BenchmarkDecoder_Generic_StdLib-16                    124260 ns/op         104.90 MB/s       50869 B/op        772 allocs/op
+BenchmarkDecoder_Generic_JsonIter-16                   91274 ns/op         142.81 MB/s       55782 B/op       1068 allocs/op
+BenchmarkDecoder_Generic_GoJson-16                     88569 ns/op         147.17 MB/s       66367 B/op        973 allocs/op
+BenchmarkDecoder_Binding_Sonic-16                      32557 ns/op         400.38 MB/s       28302 B/op        137 allocs/op
+BenchmarkDecoder_Binding_Sonic_Fast-16                 28649 ns/op         455.00 MB/s       24999 B/op         34 allocs/op
+BenchmarkDecoder_Binding_StdLib-16                    111437 ns/op         116.97 MB/s       10576 B/op        208 allocs/op
+BenchmarkDecoder_Binding_JsonIter-16                   35090 ns/op         371.48 MB/s       14673 B/op        385 allocs/op
+BenchmarkDecoder_Binding_GoJson-16                     28738 ns/op         453.59 MB/s       22039 B/op         49 allocs/op
+BenchmarkDecoder_Parallel_Generic_Sonic-16             12321 ns/op        1057.91 MB/s       57233 B/op        723 allocs/op
+BenchmarkDecoder_Parallel_Generic_Sonic_Fast-16        10644 ns/op        1224.64 MB/s       49362 B/op        313 allocs/op
+BenchmarkDecoder_Parallel_Generic_StdLib-16            57587 ns/op         226.35 MB/s       50874 B/op        772 allocs/op
+BenchmarkDecoder_Parallel_Generic_JsonIter-16          38666 ns/op         337.12 MB/s       55789 B/op       1068 allocs/op
+BenchmarkDecoder_Parallel_Generic_GoJson-16            30259 ns/op         430.79 MB/s       66370 B/op        974 allocs/op
+BenchmarkDecoder_Parallel_Binding_Sonic-16              5965 ns/op        2185.28 MB/s       27747 B/op        137 allocs/op
+BenchmarkDecoder_Parallel_Binding_Sonic_Fast-16         5170 ns/op        2521.31 MB/s       24715 B/op         34 allocs/op
+BenchmarkDecoder_Parallel_Binding_StdLib-16            27582 ns/op         472.58 MB/s       10576 B/op        208 allocs/op
+BenchmarkDecoder_Parallel_Binding_JsonIter-16          13571 ns/op         960.51 MB/s       14685 B/op        385 allocs/op
+BenchmarkDecoder_Parallel_Binding_GoJson-16            10031 ns/op        1299.51 MB/s       22111 B/op         49 allocs/op
+
+BenchmarkGetOne_Sonic-16                                3276 ns/op        3975.78 MB/s          24 B/op          1 allocs/op
+BenchmarkGetOne_Gjson-16                                9431 ns/op        1380.81 MB/s           0 B/op          0 allocs/op
+BenchmarkGetOne_Jsoniter-16                            51178 ns/op         254.46 MB/s       27936 B/op        647 allocs/op
+BenchmarkGetOne_Parallel_Sonic-16                      216.7 ns/op       60098.95 MB/s          24 B/op          1 allocs/op
+BenchmarkGetOne_Parallel_Gjson-16                       1076 ns/op        12098.62 MB/s          0 B/op          0 allocs/op
+BenchmarkGetOne_Parallel_Jsoniter-16                   17741 ns/op         734.06 MB/s       27945 B/op        647 allocs/op
+BenchmarkSetOne_Sonic-16                               9571 ns/op         1360.61 MB/s        1584 B/op         17 allocs/op
+BenchmarkSetOne_Sjson-16                               36456 ns/op         357.22 MB/s       52180 B/op          9 allocs/op
+BenchmarkSetOne_Jsoniter-16                            79475 ns/op         163.86 MB/s       45862 B/op        964 allocs/op
+BenchmarkSetOne_Parallel_Sonic-16                      850.9 ns/op       15305.31 MB/s        1584 B/op         17 allocs/op
+BenchmarkSetOne_Parallel_Sjson-16                      18194 ns/op         715.77 MB/s       52247 B/op          9 allocs/op
+BenchmarkSetOne_Parallel_Jsoniter-16                   33560 ns/op         388.05 MB/s       45892 B/op        964 allocs/op
+BenchmarkLoadNode/LoadAll()-16                         11384 ns/op        1143.93 MB/s        6307 B/op         25 allocs/op
+BenchmarkLoadNode_Parallel/LoadAll()-16                 5493 ns/op        2370.68 MB/s        7145 B/op         25 allocs/op
+BenchmarkLoadNode/Interface()-16                       17722 ns/op         734.85 MB/s       13323 B/op         88 allocs/op
+BenchmarkLoadNode_Parallel/Interface()-16              10330 ns/op        1260.70 MB/s       15178 B/op         88 allocs/op
+```
+
+- [小型](https://github.com/bytedance/sonic/blob/main/testdata/small.go) (400B, 11 个键, 3 层)
+![small benchmarks](./docs/imgs/bench-small.png)
+- [大型](https://github.com/bytedance/sonic/blob/main/testdata/twitter.json) (635kB, 10000+ 个键, 6 层)
+![large benchmarks](./docs/imgs/bench-large.png)
+
+要查看基准测试代码,请参阅 [bench.sh](https://github.com/bytedance/sonic/blob/main/scripts/bench.sh) 。
+
+## 工作原理
+
+请参阅 [INTRODUCTION_ZH_CN.md](./docs/INTRODUCTION_ZH_CN.md).
+
+## 使用方式
+
+### 序列化/反序列化
+
+默认的行为基本上与 `encoding/json` 相一致,除了 HTML 转义形式(参见 [Escape HTML](https://github.com/bytedance/sonic/blob/main/README.md#escape-html)) 和 `SortKeys` 功能(参见 [Sort Keys](https://github.com/bytedance/sonic/blob/main/README.md#sort-keys))**没有**遵循 [RFC8259](https://datatracker.ietf.org/doc/html/rfc8259) 。
+
+ ```go
+import "github.com/bytedance/sonic"
+
+var data YourSchema
+// Marshal
+output, err := sonic.Marshal(&data)
+// Unmarshal
+err := sonic.Unmarshal(output, &data)
+ ```
+
+### 流式输入输出
+
+Sonic 支持解码 `io.Reader` 中输入的 json,或将对象编码为 json 后输出至 `io.Writer`,以处理多个值并减少内存消耗。
+
+- 编码器
+
+```go
+var o1 = map[string]interface{}{
+    "a": "b",
+}
+var o2 = 1
+var w = bytes.NewBuffer(nil)
+var enc = sonic.ConfigDefault.NewEncoder(w)
+enc.Encode(o1)
+enc.Encode(o2)
+fmt.Println(w.String())
+// Output:
+// {"a":"b"}
+// 1
+```
+
+- 解码器
+
+```go
+var o =  map[string]interface{}{}
+var r = strings.NewReader(`{"a":"b"}{"1":"2"}`)
+var dec = sonic.ConfigDefault.NewDecoder(r)
+dec.Decode(&o)
+dec.Decode(&o)
+fmt.Printf("%+v", o)
+// Output:
+// map[1:2 a:b]
+```
+
+### 使用 `Number` / `int64`
+
+```go
+import "github.com/bytedance/sonic/decoder"
+
+var input = `1`
+var data interface{}
+
+// default float64
+dc := decoder.NewDecoder(input)
+dc.Decode(&data) // data == float64(1)
+// use json.Number
+dc = decoder.NewDecoder(input)
+dc.UseNumber()
+dc.Decode(&data) // data == json.Number("1")
+// use int64
+dc = decoder.NewDecoder(input)
+dc.UseInt64()
+dc.Decode(&data) // data == int64(1)
+
+root, err := sonic.GetFromString(input)
+// Get json.Number
+jn := root.Number()
+jm := root.InterfaceUseNumber().(json.Number) // jn == jm
+// Get float64
+fn := root.Float64()
+fm := root.Interface().(float64) // jn == jm
+ ```
+
+### 对键排序
+
+考虑到排序带来的性能损失(约 10% ), sonic 默认不会启用这个功能。如果你的组件依赖这个行为(如 [zstd](https://github.com/facebook/zstd)) ,可以仿照下面的例子:
+
+```go
+import "github.com/bytedance/sonic"
+import "github.com/bytedance/sonic/encoder"
+
+// Binding map only
+m := map[string]interface{}{}
+v, err := encoder.Encode(m, encoder.SortMapKeys)
+
+// Or ast.Node.SortKeys() before marshal
+var root := sonic.Get(JSON)
+err := root.SortKeys()
+```
+
+### HTML 转义
+
+考虑到性能损失(约15%), sonic 默认不会启用这个功能。你可以使用 `encoder.EscapeHTML` 选项来开启(与 `encoding/json.HTMLEscape` 行为一致)。
+
+```go
+import "github.com/bytedance/sonic"
+
+v := map[string]string{"&&":"<>"}
+ret, err := Encode(v, EscapeHTML) // ret == `{"\u0026\u0026":{"X":"\u003c\u003e"}}`
+```
+
+### 紧凑格式
+
+Sonic 默认将基本类型( `struct` , `map` 等)编码为紧凑格式的 JSON ,除非使用 `json.RawMessage` or `json.Marshaler` 进行编码: sonic 确保输出的 JSON 合法,但出于性能考虑,**不会**加工成紧凑格式。我们提供选项 `encoder.CompactMarshaler` 来添加此过程,
+
+### 打印错误
+
+如果输入的 JSON 存在无效的语法,sonic 将返回 `decoder.SyntaxError`,该错误支持错误位置的美化输出。
+
+```go
+import "github.com/bytedance/sonic"
+import "github.com/bytedance/sonic/decoder"
+
+var data interface{}
+err := sonic.UnmarshalString("[[[}]]", &data)
+if err != nil {
+    /* One line by default */
+    println(e.Error()) // "Syntax error at index 3: invalid char\n\n\t[[[}]]\n\t...^..\n"
+    /* Pretty print */
+    if e, ok := err.(decoder.SyntaxError); ok {
+        /*Syntax error at index 3: invalid char
+
+            [[[}]]
+            ...^..
+        */
+        print(e.Description())
+    } else if me, ok := err.(*decoder.MismatchTypeError); ok {
+        // decoder.MismatchTypeError is new to Sonic v1.6.0
+        print(me.Description())
+    }
+}
+```
+
+#### 类型不匹配 [Sonic v1.6.0]
+
+如果给定键中存在**类型不匹配**的值, sonic 会抛出 `decoder.MismatchTypeError` (如果有多个,只会报告最后一个),但仍会跳过错误的值并解码下一个 JSON 。
+
+```go
+import "github.com/bytedance/sonic"
+import "github.com/bytedance/sonic/decoder"
+
+var data = struct{
+    A int
+    B int
+}{}
+err := UnmarshalString(`{"A":"1","B":1}`, &data)
+println(err.Error())    // Mismatch type int with value string "at index 5: mismatched type with value\n\n\t{\"A\":\"1\",\"B\":1}\n\t.....^.........\n"
+fmt.Printf("%+v", data) // {A:0 B:1}
+```
+
+### `Ast.Node`
+
+Sonic/ast.Node 是完全独立的 JSON 抽象语法树库。它实现了序列化和反序列化,并提供了获取和修改通用数据的鲁棒的 API。
+
+#### 查找/索引
+
+通过给定的路径搜索 JSON 片段,路径必须为非负整数,字符串或 `nil` 。
+
+```go
+import "github.com/bytedance/sonic"
+
+input := []byte(`{"key1":[{},{"key2":{"key3":[1,2,3]}}]}`)
+
+// no path, returns entire json
+root, err := sonic.Get(input)
+raw := root.Raw() // == string(input)
+
+// multiple paths
+root, err := sonic.Get(input, "key1", 1, "key2")
+sub := root.Get("key3").Index(2).Int64() // == 3
+```
+
+**注意**:由于 `Index()` 使用偏移量来定位数据,比使用扫描的 `Get()` 要快的多,建议尽可能的使用 `Index` 。 Sonic 也提供了另一个 API, `IndexOrGet()` ,以偏移量为基础并且也确保键的匹配。
+
+#### 修改
+
+使用 `Set()` / `Unset()` 修改 json 的内容
+
+```go
+import "github.com/bytedance/sonic"
+
+// Set
+exist, err := root.Set("key4", NewBool(true)) // exist == false
+alias1 := root.Get("key4")
+println(alias1.Valid()) // true
+alias2 := root.Index(1)
+println(alias1 == alias2) // true
+
+// Unset
+exist, err := root.UnsetByIndex(1) // exist == true
+println(root.Get("key4").Check()) // "value not exist"
+```
+
+#### 序列化
+
+要将 `ast.Node` 编码为 json ,使用 `MarshalJson()` 或者 `json.Marshal()` (必须传递指向节点的指针)
+
+```go
+import (
+    "encoding/json"
+    "github.com/bytedance/sonic"
+)
+
+buf, err := root.MarshalJson()
+println(string(buf))                // {"key1":[{},{"key2":{"key3":[1,2,3]}}]}
+exp, err := json.Marshal(&root)     // WARN: use pointer
+println(string(buf) == string(exp)) // true
+```
+
+#### APIs
+
+- 合法性检查: `Check()`, `Error()`, `Valid()`, `Exist()`
+- 索引: `Index()`, `Get()`, `IndexPair()`, `IndexOrGet()`, `GetByPath()`
+- 转换至 go 内置类型: `Int64()`, `Float64()`, `String()`, `Number()`, `Bool()`, `Map[UseNumber|UseNode]()`, `Array[UseNumber|UseNode]()`, `Interface[UseNumber|UseNode]()`
+- go 类型打包: `NewRaw()`, `NewNumber()`, `NewNull()`, `NewBool()`, `NewString()`, `NewObject()`, `NewArray()`
+- 迭代: `Values()`, `Properties()`, `ForEach()`, `SortKeys()`
+- 修改: `Set()`, `SetByIndex()`, `Add()`
+
+### `Ast.Visitor`
+
+Sonic 提供了一个高级的 API 用于直接全量解析 JSON 到非标准容器里 (既不是 `struct` 也不是 `map[string]interface{}`) 且不需要借助任何中间表示 (`ast.Node` 或 `interface{}`)。举个例子,你可能定义了下述的类型,它们看起来像 `interface{}`,但实际上并不是:
+
+```go
+type UserNode interface {}
+
+// the following types implement the UserNode interface.
+type (
+    UserNull    struct{}
+    UserBool    struct{ Value bool }
+    UserInt64   struct{ Value int64 }
+    UserFloat64 struct{ Value float64 }
+    UserString  struct{ Value string }
+    UserObject  struct{ Value map[string]UserNode }
+    UserArray   struct{ Value []UserNode }
+)
+```
+
+Sonic 提供了下述的 API 来返回 **“对 JSON AST 的前序遍历”**。`ast.Visitor` 是一个 SAX 风格的接口,这在某些 C++ 的 JSON 解析库中被使用到。你需要自己实现一个 `ast.Visitor`,将它传递给 `ast.Preorder()` 方法。在你的实现中你可以使用自定义的类型来表示 JSON 的值。在你的 `ast.Visitor` 中,可能需要有一个 O(n) 空间复杂度的容器(比如说栈)来记录 object / array 的层级。
+
+```go
+func Preorder(str string, visitor Visitor, opts *VisitorOptions) error
+
+type Visitor interface {
+    OnNull() error
+    OnBool(v bool) error
+    OnString(v string) error
+    OnInt64(v int64, n json.Number) error
+    OnFloat64(v float64, n json.Number) error
+    OnObjectBegin(capacity int) error
+    OnObjectKey(key string) error
+    OnObjectEnd() error
+    OnArrayBegin(capacity int) error
+    OnArrayEnd() error
+}
+```
+
+详细用法参看 [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go),我们还为 `UserNode` 实现了一个示例 `ast.Visitor`,你可以在 [ast/visitor_test.go](https://github.com/bytedance/sonic/blob/main/ast/visitor_test.go) 中找到它。
+
+## 兼容性
+
+由于开发高性能代码的困难性, Sonic **不**保证对所有环境的支持。对于在不同环境中使用 Sonic 构建应用程序的开发者,我们有以下建议:
+
+- 在 **Mac M1** 上开发:确保在您的计算机上安装了 Rosetta 2,并在构建时设置 `GOARCH=amd64` 。 Rosetta 2 可以自动将 x86 二进制文件转换为 arm64 二进制文件,并在 Mac M1 上运行 x86 应用程序。
+- 在 **Linux arm64** 上开发:您可以安装 qemu 并使用 `qemu-x86_64 -cpu max` 命令来将 x86 二进制文件转换为 arm64 二进制文件。qemu可以实现与Mac M1上的Rosetta 2类似的转换效果。
+
+对于希望在不使用 qemu 下使用 sonic 的开发者,或者希望处理 JSON 时与 `encoding/JSON` 严格保持一致的开发者,我们在 `sonic.API` 中提供了一些兼容性 API
+
+- `ConfigDefault`: 在支持 sonic 的环境下 sonic 的默认配置(`EscapeHTML=false`,`SortKeys=false`等)。行为与具有相应配置的 `encoding/json` 一致,一些选项,如 `SortKeys=false` 将无效。
+- `ConfigStd`: 在支持 sonic 的环境下与标准库兼容的配置(`EscapeHTML=true`,`SortKeys=true`等)。行为与 `encoding/json` 一致。
+- `ConfigFastest`: 在支持 sonic 的环境下运行最快的配置(`NoQuoteTextMarshaler=true`)。行为与具有相应配置的 `encoding/json` 一致,某些选项将无效。
+
+## 注意事项
+
+### 预热
+
+由于 Sonic 使用 [golang-asm](https://github.com/twitchyliquid64/golang-asm) 作为 JIT 汇编器,这个库并不适用于运行时编译,第一次运行一个大型模式可能会导致请求超时甚至进程内存溢出。为了更好地稳定性,我们建议在运行大型模式或在内存有限的应用中,在使用 `Marshal()/Unmarshal()` 前运行 `Pretouch()`。
+
+```go
+import (
+    "reflect"
+    "github.com/bytedance/sonic"
+    "github.com/bytedance/sonic/option"
+)
+
+func init() {
+    var v HugeStruct
+
+    // For most large types (nesting depth <= option.DefaultMaxInlineDepth)
+    err := sonic.Pretouch(reflect.TypeOf(v))
+
+    // with more CompileOption...
+    err := sonic.Pretouch(reflect.TypeOf(v),
+        // If the type is too deep nesting (nesting depth > option.DefaultMaxInlineDepth),
+        // you can set compile recursive loops in Pretouch for better stability in JIT.
+        option.WithCompileRecursiveDepth(loop),
+        // For a large nested struct, try to set a smaller depth to reduce compiling time.
+        option.WithCompileMaxInlineDepth(depth),
+    )
+}
+```
+
+### 拷贝字符串
+
+当解码 **没有转义字符的字符串**时, sonic 会从原始的 JSON 缓冲区内引用而不是复制到新的一个缓冲区中。这对 CPU 的性能方面很有帮助,但是可能因此在解码后对象仍在使用的时候将整个 JSON 缓冲区保留在内存中。实践中我们发现,通过引用 JSON 缓冲区引入的额外内存通常是解码后对象的 20% 至 80% ,一旦应用长期保留这些对象(如缓存以备重用),服务器所使用的内存可能会增加。我们提供了选项 `decoder.CopyString()` 供用户选择,不引用 JSON 缓冲区。这可能在一定程度上降低 CPU 性能。
+
+### 传递字符串还是字节数组?
+
+为了和 `encoding/json` 保持一致,我们提供了传递 `[]byte` 作为参数的 API ,但考虑到安全性,字符串到字节的复制是同时进行的,这在原始 JSON 非常大时可能会导致性能损失。因此,你可以使用 `UnmarshalString()` 和 `GetFromString()` 来传递字符串,只要你的原始数据是字符串,或**零拷贝类型转换**对于你的字节数组是安全的。我们也提供了 `MarshalString()` 的 API ,以便对编码的 JSON 字节数组进行**零拷贝类型转换**,因为 sonic 输出的字节始终是重复并且唯一的,所以这样是安全的。
+
+### 加速 `encoding.TextMarshaler`
+
+为了保证数据安全性, `sonic.Encoder` 默认会对来自 `encoding.TextMarshaler` 接口的字符串进行引用和转义,如果大部分数据都是这种形式那可能会导致很大的性能损失。我们提供了 `encoder.NoQuoteTextMarshaler` 选项来跳过这些操作,但你**必须**保证他们的输出字符串依照 [RFC8259](https://datatracker.ietf.org/doc/html/rfc8259) 进行了转义和引用。
+
+### 泛型的性能优化
+
+在 **完全解析**的场景下, `Unmarshal()` 表现得比 `Get()`+`Node.Interface()` 更好。但是如果你只有特定 JSON 的部分模式,你可以将 `Get()` 和 `Unmarshal()` 结合使用:
+
+```go
+import "github.com/bytedance/sonic"
+
+node, err := sonic.GetFromString(_TwitterJson, "statuses", 3, "user")
+var user User // your partial schema...
+err = sonic.UnmarshalString(node.Raw(), &user)
+```
+
+甚至如果你没有任何模式,可以用 `ast.Node` 代替 `map` 或 `interface` 作为泛型的容器:
+
+```go
+import "github.com/bytedance/sonic"
+
+root, err := sonic.GetFromString(_TwitterJson)
+user := root.GetByPath("statuses", 3, "user")  // === root.Get("status").Index(3).Get("user")
+err = user.Check()
+
+// err = user.LoadAll() // only call this when you want to use 'user' concurrently...
+go someFunc(user)
+```
+
+为什么?因为 `ast.Node` 使用 `array` 来存储其子节点:
+
+- 在插入(反序列化)和扫描(序列化)数据时,`Array` 的性能比 `Map` **好得多**;
+- **哈希**(`map[x]`)的效率不如**索引**(`array[x]`)高效,而 `ast.Node` 可以在数组和对象上使用索引;
+- 使用 `Interface()` / `Map()` 意味着 sonic 必须解析所有的底层值,而 `ast.Node` 可以**按需解析**它们。
+
+**注意**:由于 `ast.Node` 的惰性加载设计,其**不能**直接保证并发安全性,但你可以调用 `Node.Load()` / `Node.LoadAll()` 来实现并发安全。尽管可能会带来性能损失,但仍比转换成 `map` 或 `interface{}` 更为高效。
+
+### 使用 `ast.Node` 还是 `ast.Visitor`?
+
+对于泛型数据的解析,`ast.Node` 在大多数场景上应该能够满足你的需求。
+
+然而,`ast.Node` 是一种针对部分解析 JSON 而设计的泛型容器,它包含一些特殊设计,比如惰性加载,如果你希望像 `Unmarshal()` 那样直接解析整个 JSON,这些设计可能并不合适。尽管 `ast.Node` 相较于 `map` 或 `interface{}` 来说是更好的一种泛型容器,但它毕竟也是一种中间表示,如果你的最终类型是自定义的,你还得在解析完成后将上述类型转化成你自定义的类型。
+
+在上述场景中,如果想要有更极致的性能,`ast.Visitor` 会是更好的选择。它采用和 `Unmarshal()` 类似的形式解析 JSON,并且你可以直接使用你的最终类型去表示 JSON AST,而不需要经过额外的任何中间表示。
+
+但是,`ast.Visitor` 并不是一个很易用的 API。你可能需要写大量的代码去实现自己的 `ast.Visitor`,并且需要在解析过程中仔细维护树的层级。如果你决定要使用这个 API,请先仔细阅读 [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go) 中的注释。
+
+## 社区
+
+Sonic 是 [CloudWeGo](https://www.cloudwego.io/) 下的一个子项目。我们致力于构建云原生生态系统。

+ 214 - 0
vendor/github.com/bytedance/sonic/api.go

@@ -0,0 +1,214 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package sonic
+
+import (
+    `io`
+
+    `github.com/bytedance/sonic/ast`
+    `github.com/bytedance/sonic/internal/rt`
+)
+
+// Config is a combination of sonic/encoder.Options and sonic/decoder.Options
+type Config struct {
+    // EscapeHTML indicates encoder to escape all HTML characters 
+    // after serializing into JSON (see https://pkg.go.dev/encoding/json#HTMLEscape).
+    // WARNING: This hurts performance A LOT, USE WITH CARE.
+    EscapeHTML                    bool
+
+    // SortMapKeys indicates encoder that the keys of a map needs to be sorted 
+    // before serializing into JSON.
+    // WARNING: This hurts performance A LOT, USE WITH CARE.
+    SortMapKeys                   bool
+
+    // CompactMarshaler indicates encoder that the output JSON from json.Marshaler 
+    // is always compact and needs no validation 
+    CompactMarshaler              bool
+
+    // NoQuoteTextMarshaler indicates encoder that the output text from encoding.TextMarshaler 
+    // is always escaped string and needs no quoting
+    NoQuoteTextMarshaler          bool
+
+    // NoNullSliceOrMap indicates encoder that all empty Array or Object are encoded as '[]' or '{}',
+    // instead of 'null'
+    NoNullSliceOrMap              bool
+
+    // UseInt64 indicates decoder to unmarshal an integer into an interface{} as an
+    // int64 instead of as a float64.
+    UseInt64                      bool
+
+    // UseNumber indicates decoder to unmarshal a number into an interface{} as a
+    // json.Number instead of as a float64.
+    UseNumber                     bool
+
+    // UseUnicodeErrors indicates decoder to return an error when encounter invalid
+    // UTF-8 escape sequences.
+    UseUnicodeErrors              bool
+
+    // DisallowUnknownFields indicates decoder to return an error when the destination
+    // is a struct and the input contains object keys which do not match any
+    // non-ignored, exported fields in the destination.
+    DisallowUnknownFields         bool
+
+    // CopyString indicates decoder to decode string values by copying instead of referring.
+    CopyString                    bool
+
+    // ValidateString indicates decoder and encoder to valid string values: decoder will return errors 
+    // when unescaped control chars(\u0000-\u001f) in the string value of JSON.
+    ValidateString                bool
+
+    // NoValidateJSONMarshaler indicates that the encoder should not validate the output string
+    // after encoding the JSONMarshaler to JSON.
+    NoValidateJSONMarshaler       bool
+    
+    // NoEncoderNewline indicates that the encoder should not add a newline after every message
+    NoEncoderNewline bool
+}
+ 
+var (
+    // ConfigDefault is the default config of APIs, aiming at efficiency and safety.
+    ConfigDefault = Config{}.Froze()
+ 
+    // ConfigStd is the standard config of APIs, aiming at being compatible with encoding/json.
+    ConfigStd = Config{
+        EscapeHTML : true,
+        SortMapKeys: true,
+        CompactMarshaler: true,
+        CopyString : true,
+        ValidateString : true,
+    }.Froze()
+ 
+    // ConfigFastest is the fastest config of APIs, aiming at speed.
+    ConfigFastest = Config{
+        NoQuoteTextMarshaler: true,
+        NoValidateJSONMarshaler: true,
+    }.Froze()
+)
+ 
+ 
+// API is a binding of specific config.
+// This interface is inspired by github.com/json-iterator/go,
+// and has same behaviors under equavilent config.
+type API interface {
+    // MarshalToString returns the JSON encoding string of v
+    MarshalToString(v interface{}) (string, error)
+    // Marshal returns the JSON encoding bytes of v.
+    Marshal(v interface{}) ([]byte, error)
+    // MarshalIndent returns the JSON encoding bytes with indent and prefix.
+    MarshalIndent(v interface{}, prefix, indent string) ([]byte, error)
+    // UnmarshalFromString parses the JSON-encoded bytes and stores the result in the value pointed to by v.
+    UnmarshalFromString(str string, v interface{}) error
+    // Unmarshal parses the JSON-encoded string and stores the result in the value pointed to by v.
+    Unmarshal(data []byte, v interface{}) error
+    // NewEncoder create a Encoder holding writer
+    NewEncoder(writer io.Writer) Encoder
+    // NewDecoder create a Decoder holding reader
+    NewDecoder(reader io.Reader) Decoder
+    // Valid validates the JSON-encoded bytes and reports if it is valid
+    Valid(data []byte) bool
+}
+
+// Encoder encodes JSON into io.Writer
+type Encoder interface {
+    // Encode writes the JSON encoding of v to the stream, followed by a newline character.
+    Encode(val interface{}) error
+    // SetEscapeHTML specifies whether problematic HTML characters 
+    // should be escaped inside JSON quoted strings. 
+    // The default behavior NOT ESCAPE 
+    SetEscapeHTML(on bool)
+    // SetIndent instructs the encoder to format each subsequent encoded value 
+    // as if indented by the package-level function Indent(dst, src, prefix, indent).
+    // Calling SetIndent("", "") disables indentation
+    SetIndent(prefix, indent string)
+}
+
+// Decoder decodes JSON from io.Read
+type Decoder interface {
+    // Decode reads the next JSON-encoded value from its input and stores it in the value pointed to by v.
+    Decode(val interface{}) error
+    // Buffered returns a reader of the data remaining in the Decoder's buffer.
+    // The reader is valid until the next call to Decode.
+    Buffered() io.Reader
+    // DisallowUnknownFields causes the Decoder to return an error when the destination is a struct 
+    // and the input contains object keys which do not match any non-ignored, exported fields in the destination.
+    DisallowUnknownFields()
+    // More reports whether there is another element in the current array or object being parsed.
+    More() bool
+    // UseNumber causes the Decoder to unmarshal a number into an interface{} as a Number instead of as a float64.
+    UseNumber()
+}
+
+// Marshal returns the JSON encoding bytes of v.
+func Marshal(val interface{}) ([]byte, error) {
+    return ConfigDefault.Marshal(val)
+}
+
+// MarshalString returns the JSON encoding string of v.
+func MarshalString(val interface{}) (string, error) {
+    return ConfigDefault.MarshalToString(val)
+}
+
+// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v.
+// NOTICE: This API copies given buffer by default,
+// if you want to pass JSON more efficiently, use UnmarshalString instead.
+func Unmarshal(buf []byte, val interface{}) error {
+    return ConfigDefault.Unmarshal(buf, val)
+}
+
+// UnmarshalString is like Unmarshal, except buf is a string.
+func UnmarshalString(buf string, val interface{}) error {
+    return ConfigDefault.UnmarshalFromString(buf, val)
+}
+
+// Get searches and locates the given path from src json,
+// and returns a ast.Node representing the partially json.
+//
+// Each path arg must be integer or string:
+//     - Integer is target index(>=0), means searching current node as array.
+//     - String is target key, means searching current node as object.
+//
+// 
+// Notice: It expects the src json is **Well-formed** and **Immutable** when calling,
+// otherwise it may return unexpected result. 
+// Considering memory safety, the returned JSON is **Copied** from the input
+func Get(src []byte, path ...interface{}) (ast.Node, error) {
+    return GetCopyFromString(rt.Mem2Str(src), path...)
+}
+
+// GetFromString is same with Get except src is string.
+//
+// WARNING: The returned JSON is **Referenced** from the input. 
+// Caching or long-time holding the returned node may cause OOM.
+// If your src is big, consider use GetFromStringCopy().
+func GetFromString(src string, path ...interface{}) (ast.Node, error) {
+    return ast.NewSearcher(src).GetByPath(path...)
+}
+
+// GetCopyFromString is same with Get except src is string
+func GetCopyFromString(src string, path ...interface{}) (ast.Node, error) {
+    return ast.NewSearcher(src).GetByPathCopy(path...)
+}
+
+// Valid reports whether data is a valid JSON encoding.
+func Valid(data []byte) bool {
+    return ConfigDefault.Valid(data)
+}
+
+// Valid reports whether data is a valid JSON encoding.
+func ValidString(data string) bool {
+    return ConfigDefault.Valid(rt.Str2Mem(data))
+}

+ 135 - 0
vendor/github.com/bytedance/sonic/ast/api.go

@@ -0,0 +1,135 @@
+//go:build (amd64 && go1.16 && !go1.23) || (arm64 && go1.20 && !go1.23)
+// +build amd64,go1.16,!go1.23 arm64,go1.20,!go1.23
+
+/*
+ * Copyright 2022 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ast
+
+import (
+    `runtime`
+    `unsafe`
+
+    `github.com/bytedance/sonic/encoder`
+    `github.com/bytedance/sonic/internal/native`
+    `github.com/bytedance/sonic/internal/native/types`
+    `github.com/bytedance/sonic/internal/rt`
+    uq `github.com/bytedance/sonic/unquote`
+    `github.com/bytedance/sonic/utf8`
+)
+
+var typeByte = rt.UnpackEface(byte(0)).Type
+
+//go:nocheckptr
+func quote(buf *[]byte, val string) {
+    *buf = append(*buf, '"')
+    if len(val) == 0 {
+        *buf = append(*buf, '"')
+        return
+    }
+
+    sp := rt.IndexChar(val, 0)
+    nb := len(val)
+    b := (*rt.GoSlice)(unsafe.Pointer(buf))
+
+    // input buffer
+    for nb > 0 {
+        // output buffer
+        dp := unsafe.Pointer(uintptr(b.Ptr) + uintptr(b.Len))
+        dn := b.Cap - b.Len
+        // call native.Quote, dn is byte count it outputs
+        ret := native.Quote(sp, nb, dp, &dn, 0)
+        // update *buf length
+        b.Len += dn
+
+        // no need more output
+        if ret >= 0 {
+            break
+        }
+
+        // double buf size
+        *b = growslice(typeByte, *b, b.Cap*2)
+        // ret is the complement of consumed input
+        ret = ^ret
+        // update input buffer
+        nb -= ret
+        sp = unsafe.Pointer(uintptr(sp) + uintptr(ret))
+    }
+
+    runtime.KeepAlive(buf)
+    runtime.KeepAlive(sp)
+    *buf = append(*buf, '"')
+}
+
+func unquote(src string) (string, types.ParsingError) {
+    return uq.String(src)
+}
+
+func (self *Parser) decodeValue() (val types.JsonState) {
+    sv := (*rt.GoString)(unsafe.Pointer(&self.s))
+    flag := types.F_USE_NUMBER
+    if self.dbuf != nil {
+        flag = 0
+        val.Dbuf = self.dbuf
+        val.Dcap = types.MaxDigitNums
+    }
+    self.p = native.Value(sv.Ptr, sv.Len, self.p, &val, uint64(flag))
+    return
+}
+
+func (self *Parser) skip() (int, types.ParsingError) {
+    fsm := types.NewStateMachine()
+    start := native.SkipOne(&self.s, &self.p, fsm, 0)
+    types.FreeStateMachine(fsm)
+
+    if start < 0 {
+        return self.p, types.ParsingError(-start)
+    }
+    return start, 0
+}
+
+func (self *Node) encodeInterface(buf *[]byte) error {
+    //WARN: NOT compatible with json.Encoder
+    return encoder.EncodeInto(buf, self.packAny(), encoder.NoEncoderNewline)
+}
+
+func (self *Parser) skipFast() (int, types.ParsingError) {
+    start := native.SkipOneFast(&self.s, &self.p)
+    if start < 0 {
+        return self.p, types.ParsingError(-start)
+    }
+    return start, 0
+}
+
+func (self *Parser) getByPath(validate bool, path ...interface{}) (int, types.ParsingError) {
+    var fsm *types.StateMachine
+    if validate {
+        fsm = types.NewStateMachine()
+    }
+    start := native.GetByPath(&self.s, &self.p, &path, fsm)
+    if validate {
+        types.FreeStateMachine(fsm)
+    }
+    runtime.KeepAlive(path)
+    if start < 0 {
+        return self.p, types.ParsingError(-start)
+    }
+    return start, 0
+}
+
+func validate_utf8(str string) bool {
+    return utf8.ValidateString(str)
+}

+ 114 - 0
vendor/github.com/bytedance/sonic/ast/api_compat.go

@@ -0,0 +1,114 @@
+// +build !amd64,!arm64 go1.23 !go1.16 arm64,!go1.20
+
+/*
+* Copyright 2022 ByteDance Inc.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package ast
+
+import (
+    `encoding/json`
+    `unicode/utf8`
+
+    `github.com/bytedance/sonic/internal/native/types`
+    `github.com/bytedance/sonic/internal/rt`
+)
+
+func init() {
+    println("WARNING:(ast) sonic only supports Go1.16~1.22, but your environment is not suitable")
+}
+
+func quote(buf *[]byte, val string) {
+    quoteString(buf, val)
+}
+
+// unquote unescapes a internal JSON string (it doesn't count quotas at the begining and end)
+func unquote(src string) (string, types.ParsingError) {
+    sp := rt.IndexChar(src, -1)
+    out, ok := unquoteBytes(rt.BytesFrom(sp, len(src)+2, len(src)+2))
+    if !ok {
+        return "", types.ERR_INVALID_ESCAPE
+    }
+    return rt.Mem2Str(out), 0
+}
+
+
+func (self *Parser) decodeValue() (val types.JsonState) {
+    e, v := decodeValue(self.s, self.p, self.dbuf == nil)
+    if e < 0 {
+        return v
+    }
+    self.p = e
+    return v
+}
+
+func (self *Parser) skip() (int, types.ParsingError) {
+    e, s := skipValue(self.s, self.p)
+    if e < 0 {
+        return self.p, types.ParsingError(-e)
+    }
+    self.p = e
+    return s, 0
+}
+
+func (self *Parser) skipFast() (int, types.ParsingError) {
+    e, s := skipValueFast(self.s, self.p)
+    if e < 0 {
+        return self.p, types.ParsingError(-e)
+    }
+    self.p = e
+    return s, 0
+}
+
+func (self *Node) encodeInterface(buf *[]byte) error {
+    out, err := json.Marshal(self.packAny())
+    if err != nil {
+        return err
+    }
+    *buf = append(*buf, out...)
+    return nil
+}
+
+func (self *Parser) getByPath(validate bool, path ...interface{}) (int, types.ParsingError) {
+    for _, p := range path {
+        if idx, ok := p.(int); ok && idx >= 0 {
+            if err := self.searchIndex(idx); err != 0 {
+                return self.p, err
+            }
+        } else if key, ok := p.(string); ok {
+            if err := self.searchKey(key); err != 0 {
+                return self.p, err
+            }
+        } else {
+            panic("path must be either int(>=0) or string")
+        }
+    }
+
+    var start int
+    var e types.ParsingError
+    if validate {
+        start, e = self.skip()
+    } else {
+        start, e = self.skipFast()
+    }
+    if e != 0 {
+        return self.p, e
+    }
+    return start, 0
+}
+
+func validate_utf8(str string) bool {
+    return utf8.ValidString(str)
+}

+ 0 - 0
vendor/github.com/bytedance/sonic/ast/asm.s


+ 31 - 0
vendor/github.com/bytedance/sonic/ast/b64_amd64.go

@@ -0,0 +1,31 @@
+// +build amd64,go1.16
+
+/**
+ * Copyright 2023 ByteDance Inc.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ast 
+
+import (
+    `github.com/cloudwego/base64x`
+)
+
+func decodeBase64(src string) ([]byte, error) {
+    return base64x.StdEncoding.DecodeString(src)
+}
+
+func encodeBase64(src []byte) string {
+    return base64x.StdEncoding.EncodeToString(src)
+}

+ 31 - 0
vendor/github.com/bytedance/sonic/ast/b64_compat.go

@@ -0,0 +1,31 @@
+// +build !amd64 !go1.16
+
+/*
+ * Copyright 2022 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ast
+
+import (
+    `encoding/base64`
+)
+
+func decodeBase64(src string) ([]byte, error) {
+    return base64.StdEncoding.DecodeString(src)
+}
+
+func encodeBase64(src []byte) string {
+    return base64.StdEncoding.EncodeToString(src)
+}

+ 409 - 0
vendor/github.com/bytedance/sonic/ast/buffer.go

@@ -0,0 +1,409 @@
+/**
+ * Copyright 2023 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ast
+
+import (
+    `sort`
+    `unsafe`
+)
+
+type nodeChunk [_DEFAULT_NODE_CAP]Node
+
+type linkedNodes struct {
+    head   nodeChunk
+    tail   []*nodeChunk
+    size   int
+}
+
+func (self *linkedNodes) Cap() int {
+    if self == nil {
+        return 0
+    }
+    return (len(self.tail)+1)*_DEFAULT_NODE_CAP 
+}
+
+func (self *linkedNodes) Len() int {
+    if self == nil {
+        return 0
+    }
+    return self.size 
+}
+
+func (self *linkedNodes) At(i int) (*Node) {
+    if self == nil {
+        return nil
+    }
+    if i >= 0 && i<self.size && i < _DEFAULT_NODE_CAP {
+        return &self.head[i]
+    } else if i >= _DEFAULT_NODE_CAP && i<self.size  {
+        a, b := i/_DEFAULT_NODE_CAP-1, i%_DEFAULT_NODE_CAP
+        if a < len(self.tail) {
+            return &self.tail[a][b]
+        }
+    }
+    return nil
+}
+
+func (self *linkedNodes) MoveOne(source int,  target int) {
+    if source == target {
+        return
+    }
+    if source < 0 || source >= self.size || target < 0 || target >= self.size {
+        return
+    }
+    // reserve source
+    n := *self.At(source)
+    if source < target {
+        // move every element (source,target] one step back
+        for i:=source; i<target; i++ {
+            *self.At(i) = *self.At(i+1)
+        } 
+    } else {
+        // move every element [target,source) one step forward
+        for i:=source; i>target; i-- {
+            *self.At(i) = *self.At(i-1)
+        }
+    } 
+    // set target
+    *self.At(target) = n
+}
+
+func (self *linkedNodes) Pop() {
+    if self == nil || self.size == 0 {
+        return
+    }
+    self.Set(self.size-1, Node{})
+    self.size--
+}
+
+func (self *linkedPairs) Pop() {
+    if self == nil || self.size == 0 {
+        return
+    }
+    self.Set(self.size-1, Pair{})
+    self.size--
+}
+
+func (self *linkedNodes) Push(v Node) {
+    self.Set(self.size, v)
+}
+
+func (self *linkedNodes) Set(i int, v Node) {
+    if i < _DEFAULT_NODE_CAP {
+        self.head[i] = v
+        if self.size <= i {
+            self.size = i+1
+        }
+        return
+    }
+    a, b := i/_DEFAULT_NODE_CAP-1, i%_DEFAULT_NODE_CAP
+    if a < 0 {
+        self.head[b] = v
+    } else {
+        self.growTailLength(a+1)
+        var n = &self.tail[a]
+        if *n == nil {
+            *n = new(nodeChunk)
+        }
+        (*n)[b] = v
+    }
+    if self.size <= i {
+        self.size = i+1
+    }
+}
+
+func (self *linkedNodes) growTailLength(l int) {
+    if l <= len(self.tail) {
+        return
+    }
+    c := cap(self.tail)
+    for c < l {
+        c += 1 + c>>_APPEND_GROW_SHIFT
+    }
+    if c == cap(self.tail) {
+        self.tail = self.tail[:l]
+        return
+    }
+    tmp := make([]*nodeChunk, l, c)
+    copy(tmp, self.tail)
+    self.tail = tmp
+}
+
+func (self *linkedNodes) ToSlice(con []Node) {
+    if len(con) < self.size {
+        return
+    }
+    i := (self.size-1)
+    a, b := i/_DEFAULT_NODE_CAP-1, i%_DEFAULT_NODE_CAP
+    if a < 0 {
+        copy(con, self.head[:b+1])
+        return
+    } else {
+        copy(con, self.head[:])
+        con = con[_DEFAULT_NODE_CAP:]
+    }
+
+    for i:=0; i<a; i++ {
+        copy(con, self.tail[i][:])
+        con = con[_DEFAULT_NODE_CAP:]
+    }
+    copy(con, self.tail[a][:b+1])
+}
+
+func (self *linkedNodes) FromSlice(con []Node) {
+    self.size = len(con)
+    i := self.size-1
+    a, b := i/_DEFAULT_NODE_CAP-1, i%_DEFAULT_NODE_CAP
+    if a < 0 {
+        copy(self.head[:b+1], con)
+        return
+    } else {
+        copy(self.head[:], con)
+        con = con[_DEFAULT_NODE_CAP:]
+    }
+
+    if cap(self.tail) <= a {
+        c := (a+1) + (a+1)>>_APPEND_GROW_SHIFT
+        self.tail = make([]*nodeChunk, a+1, c)
+    }
+    self.tail = self.tail[:a+1]
+
+    for i:=0; i<a; i++ {
+        self.tail[i] = new(nodeChunk)
+        copy(self.tail[i][:], con)
+        con = con[_DEFAULT_NODE_CAP:]
+    }
+
+    self.tail[a] = new(nodeChunk)
+    copy(self.tail[a][:b+1], con)
+}
+
+type pairChunk [_DEFAULT_NODE_CAP]Pair
+
+type linkedPairs struct {
+    head pairChunk
+    tail []*pairChunk
+    size int
+}
+
+func (self *linkedPairs) Cap() int {
+    if self == nil {
+        return 0
+    }
+    return (len(self.tail)+1)*_DEFAULT_NODE_CAP 
+}
+
+func (self *linkedPairs) Len() int {
+    if self == nil {
+        return 0
+    }
+    return self.size 
+}
+
+func (self *linkedPairs) At(i int) *Pair {
+    if self == nil {
+        return nil
+    }
+    if i >= 0 && i < _DEFAULT_NODE_CAP && i<self.size {
+        return &self.head[i]
+    } else if i >= _DEFAULT_NODE_CAP && i<self.size {
+        a, b := i/_DEFAULT_NODE_CAP-1, i%_DEFAULT_NODE_CAP
+        if a < len(self.tail) {
+            return &self.tail[a][b]
+        }
+    }
+    return nil
+}
+
+func (self *linkedPairs) Push(v Pair) {
+    self.Set(self.size, v)
+}
+
+func (self *linkedPairs) Set(i int, v Pair) {
+    if i < _DEFAULT_NODE_CAP {
+        self.head[i] = v
+        if self.size <= i {
+            self.size = i+1
+        }
+        return
+    }
+    a, b := i/_DEFAULT_NODE_CAP-1, i%_DEFAULT_NODE_CAP
+    if a < 0 {
+        self.head[b] = v
+    } else {
+        self.growTailLength(a+1)
+        var n = &self.tail[a]
+        if *n == nil {
+            *n = new(pairChunk)
+        }
+        (*n)[b] = v
+    }
+    if self.size <= i {
+        self.size = i+1
+    }
+}
+
+func (self *linkedPairs) growTailLength(l int) {
+    if l <= len(self.tail) {
+        return
+    }
+    c := cap(self.tail)
+    for c < l {
+        c += 1 + c>>_APPEND_GROW_SHIFT
+    }
+    if c == cap(self.tail) {
+        self.tail = self.tail[:l]
+        return
+    }
+    tmp := make([]*pairChunk, l, c)
+    copy(tmp, self.tail)
+    self.tail = tmp
+}
+
+// linear search
+func (self *linkedPairs) Get(key string) (*Pair, int) {
+    for i:=0; i<self.size; i++ {
+        if n := self.At(i); n.Key == key {
+            return n, i
+        }
+    }
+    return nil, -1
+}
+
+func (self *linkedPairs) ToSlice(con []Pair) {
+    if len(con) < self.size {
+        return
+    }
+    i := self.size-1
+    a, b := i/_DEFAULT_NODE_CAP-1, i%_DEFAULT_NODE_CAP
+
+    if a < 0 {
+        copy(con, self.head[:b+1])
+        return
+    } else {
+        copy(con, self.head[:])
+        con = con[_DEFAULT_NODE_CAP:]
+    }
+
+    for i:=0; i<a; i++ {
+        copy(con, self.tail[i][:])
+        con = con[_DEFAULT_NODE_CAP:]
+    }
+    copy(con, self.tail[a][:b+1])
+}
+
+func (self *linkedPairs) ToMap(con map[string]Node) {
+    for i:=0; i<self.size; i++ {
+        n := self.At(i)
+        con[n.Key] = n.Value
+    }
+}
+
+func (self *linkedPairs) FromSlice(con []Pair) {
+    self.size = len(con)
+    i := self.size-1
+    a, b := i/_DEFAULT_NODE_CAP-1, i%_DEFAULT_NODE_CAP
+    if a < 0 {
+        copy(self.head[:b+1], con)
+        return
+    } else {
+        copy(self.head[:], con)
+        con = con[_DEFAULT_NODE_CAP:]
+    }
+
+    if cap(self.tail) <= a {
+        c := (a+1) + (a+1)>>_APPEND_GROW_SHIFT
+        self.tail = make([]*pairChunk, a+1, c)
+    }
+    self.tail = self.tail[:a+1]
+
+    for i:=0; i<a; i++ {
+        self.tail[i] = new(pairChunk)
+        copy(self.tail[i][:], con)
+        con = con[_DEFAULT_NODE_CAP:]
+    }
+
+    self.tail[a] = new(pairChunk)
+    copy(self.tail[a][:b+1], con)
+}
+
+func (self *linkedPairs) Less(i, j int) bool {
+    return lessFrom(self.At(i).Key, self.At(j).Key, 0)
+}
+
+func (self *linkedPairs) Swap(i, j int) {
+    a, b := self.At(i), self.At(j)
+    *a, *b = *b, *a
+}
+
+func (self *linkedPairs) Sort() {
+    sort.Stable(self)
+}
+
+// Compare two strings from the pos d.
+func lessFrom(a, b string, d int) bool {
+    l := len(a)
+    if l > len(b) {
+        l = len(b)
+    }
+    for i := d; i < l; i++ {
+        if a[i] == b[i] {
+            continue
+        }
+        return a[i] < b[i]
+    }
+    return len(a) < len(b)
+}
+
+type parseObjectStack struct {
+    parser Parser
+    v      linkedPairs
+}
+
+type parseArrayStack struct {
+    parser Parser
+    v      linkedNodes
+}
+
+func newLazyArray(p *Parser) Node {
+    s := new(parseArrayStack)
+    s.parser = *p
+    return Node{
+        t: _V_ARRAY_LAZY,
+        p: unsafe.Pointer(s),
+    }
+}
+
+func newLazyObject(p *Parser) Node {
+    s := new(parseObjectStack)
+    s.parser = *p
+    return Node{
+        t: _V_OBJECT_LAZY,
+        p: unsafe.Pointer(s),
+    }
+}
+
+func (self *Node) getParserAndArrayStack() (*Parser, *parseArrayStack) {
+    stack := (*parseArrayStack)(self.p)
+    return &stack.parser, stack
+}
+
+func (self *Node) getParserAndObjectStack() (*Parser, *parseObjectStack) {
+    stack := (*parseObjectStack)(self.p)
+    return &stack.parser, stack
+}
+

+ 618 - 0
vendor/github.com/bytedance/sonic/ast/decode.go

@@ -0,0 +1,618 @@
+/*
+ * Copyright 2022 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ast
+
+import (
+    `encoding/base64`
+    `runtime`
+    `strconv`
+    `unsafe`
+
+    `github.com/bytedance/sonic/internal/native/types`
+    `github.com/bytedance/sonic/internal/rt`
+)
+
+const _blankCharsMask = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n')
+
+const (
+    bytesNull   = "null"
+    bytesTrue   = "true"
+    bytesFalse  = "false"
+    bytesObject = "{}"
+    bytesArray  = "[]"
+)
+
+func isSpace(c byte) bool {
+    return (int(1<<c) & _blankCharsMask) != 0
+}
+
+//go:nocheckptr
+func skipBlank(src string, pos int) int {
+    se := uintptr(rt.IndexChar(src, len(src)))
+    sp := uintptr(rt.IndexChar(src, pos))
+
+    for sp < se {
+        if !isSpace(*(*byte)(unsafe.Pointer(sp))) {
+            break
+        }
+        sp += 1
+    }
+    if sp >= se {
+        return -int(types.ERR_EOF)
+    }
+    runtime.KeepAlive(src)
+    return int(sp - uintptr(rt.IndexChar(src, 0)))
+}
+
+func decodeNull(src string, pos int) (ret int) {
+    ret = pos + 4
+    if ret > len(src) {
+        return -int(types.ERR_EOF)
+    }
+    if src[pos:ret] == bytesNull {
+        return ret
+    } else {
+        return -int(types.ERR_INVALID_CHAR)
+    }
+}
+
+func decodeTrue(src string, pos int) (ret int) {
+    ret = pos + 4
+    if ret > len(src) {
+        return -int(types.ERR_EOF)
+    }
+    if src[pos:ret] == bytesTrue {
+        return ret
+    } else {
+        return -int(types.ERR_INVALID_CHAR)
+    }
+
+}
+
+func decodeFalse(src string, pos int) (ret int) {
+    ret = pos + 5
+    if ret > len(src) {
+        return -int(types.ERR_EOF)
+    }
+    if src[pos:ret] == bytesFalse {
+        return ret
+    }
+    return -int(types.ERR_INVALID_CHAR)
+}
+
+//go:nocheckptr
+func decodeString(src string, pos int) (ret int, v string) {
+    ret, ep := skipString(src, pos)
+    if ep == -1 {
+        (*rt.GoString)(unsafe.Pointer(&v)).Ptr = rt.IndexChar(src, pos+1)
+        (*rt.GoString)(unsafe.Pointer(&v)).Len = ret - pos - 2
+        return ret, v
+    }
+
+    vv, ok := unquoteBytes(rt.Str2Mem(src[pos:ret]))
+    if !ok {
+        return -int(types.ERR_INVALID_CHAR), ""
+    }
+
+    runtime.KeepAlive(src)
+    return ret, rt.Mem2Str(vv)
+}
+
+func decodeBinary(src string, pos int) (ret int, v []byte) {
+    var vv string
+    ret, vv = decodeString(src, pos)
+    if ret < 0 {
+        return ret, nil
+    }
+    var err error
+    v, err = base64.StdEncoding.DecodeString(vv)
+    if err != nil {
+        return -int(types.ERR_INVALID_CHAR), nil
+    }
+    return ret, v
+}
+
+func isDigit(c byte) bool {
+    return c >= '0' && c <= '9'
+}
+
+//go:nocheckptr
+func decodeInt64(src string, pos int) (ret int, v int64, err error) {
+    sp := uintptr(rt.IndexChar(src, pos))
+    ss := uintptr(sp)
+    se := uintptr(rt.IndexChar(src, len(src)))
+    if uintptr(sp) >= se {
+        return -int(types.ERR_EOF), 0, nil
+    }
+
+    if c := *(*byte)(unsafe.Pointer(sp)); c == '-' {
+        sp += 1
+    }
+    if sp == se {
+        return -int(types.ERR_EOF), 0, nil
+    }
+
+    for ; sp < se; sp += uintptr(1) {
+        if !isDigit(*(*byte)(unsafe.Pointer(sp))) {
+            break
+        }
+    }
+
+    if sp < se {
+        if c := *(*byte)(unsafe.Pointer(sp)); c == '.' || c == 'e' || c == 'E' {
+            return -int(types.ERR_INVALID_NUMBER_FMT), 0, nil
+        }
+    }
+
+    var vv string
+    ret = int(uintptr(sp) - uintptr((*rt.GoString)(unsafe.Pointer(&src)).Ptr))
+    (*rt.GoString)(unsafe.Pointer(&vv)).Ptr = unsafe.Pointer(ss)
+    (*rt.GoString)(unsafe.Pointer(&vv)).Len = ret - pos
+
+    v, err = strconv.ParseInt(vv, 10, 64)
+    if err != nil {
+        //NOTICE: allow overflow here
+        if err.(*strconv.NumError).Err == strconv.ErrRange {
+            return ret, 0, err
+        }
+        return -int(types.ERR_INVALID_CHAR), 0, err
+    }
+
+    runtime.KeepAlive(src)
+    return ret, v, nil
+}
+
+func isNumberChars(c byte) bool {
+    return (c >= '0' && c <= '9') || c == '+' || c == '-' || c == 'e' || c == 'E' || c == '.'
+}
+
+//go:nocheckptr
+func decodeFloat64(src string, pos int) (ret int, v float64, err error) {
+    sp := uintptr(rt.IndexChar(src, pos))
+    ss := uintptr(sp)
+    se := uintptr(rt.IndexChar(src, len(src)))
+    if uintptr(sp) >= se {
+        return -int(types.ERR_EOF), 0, nil
+    }
+
+    if c := *(*byte)(unsafe.Pointer(sp)); c == '-' {
+        sp += 1
+    }
+    if sp == se {
+        return -int(types.ERR_EOF), 0, nil
+    }
+
+    for ; sp < se; sp += uintptr(1) {
+        if !isNumberChars(*(*byte)(unsafe.Pointer(sp))) {
+            break
+        }
+    }
+
+    var vv string
+    ret = int(uintptr(sp) - uintptr((*rt.GoString)(unsafe.Pointer(&src)).Ptr))
+    (*rt.GoString)(unsafe.Pointer(&vv)).Ptr = unsafe.Pointer(ss)
+    (*rt.GoString)(unsafe.Pointer(&vv)).Len = ret - pos
+
+    v, err = strconv.ParseFloat(vv, 64)
+    if err != nil {
+        //NOTICE: allow overflow here
+        if err.(*strconv.NumError).Err == strconv.ErrRange {
+            return ret, 0, err
+        }
+        return -int(types.ERR_INVALID_CHAR), 0, err
+    }
+
+    runtime.KeepAlive(src)
+    return ret, v, nil
+}
+
+func decodeValue(src string, pos int, skipnum bool) (ret int, v types.JsonState) {
+    pos = skipBlank(src, pos)
+    if pos < 0 {
+        return pos, types.JsonState{Vt: types.ValueType(pos)}
+    }
+    switch c := src[pos]; c {
+    case 'n':
+        ret = decodeNull(src, pos)
+        if ret < 0 {
+            return ret, types.JsonState{Vt: types.ValueType(ret)}
+        }
+        return ret, types.JsonState{Vt: types.V_NULL}
+    case '"':
+        var ep int
+        ret, ep = skipString(src, pos)
+        if ret < 0 {
+            return ret, types.JsonState{Vt: types.ValueType(ret)}
+        }
+        return ret, types.JsonState{Vt: types.V_STRING, Iv: int64(pos + 1), Ep: ep}
+    case '{':
+        return pos + 1, types.JsonState{Vt: types.V_OBJECT}
+    case '[':
+        return pos + 1, types.JsonState{Vt: types.V_ARRAY}
+    case 't':
+        ret = decodeTrue(src, pos)
+        if ret < 0 {
+            return ret, types.JsonState{Vt: types.ValueType(ret)}
+        }
+        return ret, types.JsonState{Vt: types.V_TRUE}
+    case 'f':
+        ret = decodeFalse(src, pos)
+        if ret < 0 {
+            return ret, types.JsonState{Vt: types.ValueType(ret)}
+        }
+        return ret, types.JsonState{Vt: types.V_FALSE}
+    case '-', '+', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+        if skipnum {
+            ret = skipNumber(src, pos)
+            if ret >= 0 {
+                return ret, types.JsonState{Vt: types.V_DOUBLE, Iv: 0, Ep: pos}
+            } else {
+                return ret, types.JsonState{Vt: types.ValueType(ret)}
+            }
+        } else {
+            var iv int64
+            ret, iv, _ = decodeInt64(src, pos)
+            if ret >= 0 {
+                return ret, types.JsonState{Vt: types.V_INTEGER, Iv: iv, Ep: pos}
+            } else if ret != -int(types.ERR_INVALID_NUMBER_FMT) {
+                return ret, types.JsonState{Vt: types.ValueType(ret)}
+            }
+            var fv float64
+            ret, fv, _ = decodeFloat64(src, pos)
+            if ret >= 0 {
+                return ret, types.JsonState{Vt: types.V_DOUBLE, Dv: fv, Ep: pos}
+            } else {
+                return ret, types.JsonState{Vt: types.ValueType(ret)}
+            }
+        }
+        
+    default:
+        return -int(types.ERR_INVALID_CHAR), types.JsonState{Vt:-types.ValueType(types.ERR_INVALID_CHAR)}
+    }
+}
+
+//go:nocheckptr
+func skipNumber(src string, pos int) (ret int) {
+    sp := uintptr(rt.IndexChar(src, pos))
+    se := uintptr(rt.IndexChar(src, len(src)))
+    if uintptr(sp) >= se {
+        return -int(types.ERR_EOF)
+    }
+
+    if c := *(*byte)(unsafe.Pointer(sp)); c == '-' {
+        sp += 1
+    }
+    ss := sp
+
+    var pointer bool
+    var exponent bool
+    var lastIsDigit bool
+    var nextNeedDigit = true
+
+    for ; sp < se; sp += uintptr(1) {
+        c := *(*byte)(unsafe.Pointer(sp))
+        if isDigit(c) {
+            lastIsDigit = true
+            nextNeedDigit = false
+            continue
+        } else if nextNeedDigit {
+            return -int(types.ERR_INVALID_CHAR)
+        } else if c == '.' {
+            if !lastIsDigit || pointer || exponent || sp == ss {
+                return -int(types.ERR_INVALID_CHAR)
+            }
+            pointer = true
+            lastIsDigit = false
+            nextNeedDigit = true
+            continue
+        } else if c == 'e' || c == 'E' {
+            if !lastIsDigit || exponent {
+                return -int(types.ERR_INVALID_CHAR)
+            }
+            if sp == se-1 {
+                return -int(types.ERR_EOF)
+            }
+            exponent = true
+            lastIsDigit = false
+            nextNeedDigit = false
+            continue
+        } else if c == '-' || c == '+' {
+            if prev := *(*byte)(unsafe.Pointer(sp - 1)); prev != 'e' && prev != 'E' {
+                return -int(types.ERR_INVALID_CHAR)
+            }
+            lastIsDigit = false
+            nextNeedDigit = true
+            continue
+        } else {
+            break
+        }
+    }
+
+    if nextNeedDigit {
+        return -int(types.ERR_EOF)
+    }
+
+    runtime.KeepAlive(src)
+    return int(uintptr(sp) - uintptr((*rt.GoString)(unsafe.Pointer(&src)).Ptr))
+}
+
+//go:nocheckptr
+func skipString(src string, pos int) (ret int, ep int) {
+    if pos+1 >= len(src) {
+        return -int(types.ERR_EOF), -1
+    }
+
+    sp := uintptr(rt.IndexChar(src, pos))
+    se := uintptr(rt.IndexChar(src, len(src)))
+
+    // not start with quote
+    if *(*byte)(unsafe.Pointer(sp)) != '"' {
+        return -int(types.ERR_INVALID_CHAR), -1
+    }
+    sp += 1
+
+    ep = -1
+    for sp < se {
+        c := *(*byte)(unsafe.Pointer(sp))
+        if c == '\\' {
+            if ep == -1 {
+                ep = int(uintptr(sp) - uintptr((*rt.GoString)(unsafe.Pointer(&src)).Ptr))
+            }
+            sp += 2
+            continue
+        }
+        sp += 1
+        if c == '"' {
+            return int(uintptr(sp) - uintptr((*rt.GoString)(unsafe.Pointer(&src)).Ptr)), ep
+        }
+    }
+
+    runtime.KeepAlive(src)
+    // not found the closed quote until EOF
+    return -int(types.ERR_EOF), -1
+}
+
+//go:nocheckptr
+func skipPair(src string, pos int, lchar byte, rchar byte) (ret int) {
+    if pos+1 >= len(src) {
+        return -int(types.ERR_EOF)
+    }
+
+    sp := uintptr(rt.IndexChar(src, pos))
+    se := uintptr(rt.IndexChar(src, len(src)))
+
+    if *(*byte)(unsafe.Pointer(sp)) != lchar {
+        return -int(types.ERR_INVALID_CHAR)
+    }
+
+    sp += 1
+    nbrace := 1
+    inquote := false
+
+    for sp < se {
+        c := *(*byte)(unsafe.Pointer(sp))
+        if c == '\\' {
+            sp += 2
+            continue
+        } else if c == '"' {
+            inquote = !inquote
+        } else if c == lchar {
+            if !inquote {
+                nbrace += 1
+            }
+        } else if c == rchar {
+            if !inquote {
+                nbrace -= 1
+                if nbrace == 0 {
+                    sp += 1
+                    break
+                }
+            }
+        }
+        sp += 1
+    }
+
+    if nbrace != 0 {
+        return -int(types.ERR_INVALID_CHAR)
+    }
+
+    runtime.KeepAlive(src)
+    return int(uintptr(sp) - uintptr((*rt.GoString)(unsafe.Pointer(&src)).Ptr))
+}
+
+func skipValueFast(src string, pos int) (ret int, start int) {
+    pos = skipBlank(src, pos)
+    if pos < 0 {
+        return pos, -1
+    }
+    switch c := src[pos]; c {
+    case 'n':
+        ret = decodeNull(src, pos)
+    case '"':
+        ret, _ = skipString(src, pos)
+    case '{':
+        ret = skipPair(src, pos, '{', '}')
+    case '[':
+        ret = skipPair(src, pos, '[', ']')
+    case 't':
+        ret = decodeTrue(src, pos)
+    case 'f':
+        ret = decodeFalse(src, pos)
+    case '-', '+', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+        ret = skipNumber(src, pos)
+    default:
+        ret = -int(types.ERR_INVALID_CHAR)
+    }
+    return ret, pos
+}
+
+func skipValue(src string, pos int) (ret int, start int) {
+    pos = skipBlank(src, pos)
+    if pos < 0 {
+        return pos, -1
+    }
+    switch c := src[pos]; c {
+    case 'n':
+        ret = decodeNull(src, pos)
+    case '"':
+        ret, _ = skipString(src, pos)
+    case '{':
+        ret, _ = skipObject(src, pos)
+    case '[':
+        ret, _ = skipArray(src, pos)
+    case 't':
+        ret = decodeTrue(src, pos)
+    case 'f':
+        ret = decodeFalse(src, pos)
+    case '-', '+', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+        ret = skipNumber(src, pos)
+    default:
+        ret = -int(types.ERR_INVALID_CHAR)
+    }
+    return ret, pos
+}
+
+func skipObject(src string, pos int) (ret int, start int) {
+    start = skipBlank(src, pos)
+    if start < 0 {
+        return start, -1
+    }
+
+    if src[start] != '{' {
+        return -int(types.ERR_INVALID_CHAR), -1
+    }
+
+    pos = start + 1
+    pos = skipBlank(src, pos)
+    if pos < 0 {
+        return pos, -1
+    }
+    if src[pos] == '}' {
+        return pos + 1, start
+    }
+
+    for {
+        pos, _ = skipString(src, pos)
+        if pos < 0 {
+            return pos, -1
+        }
+
+        pos = skipBlank(src, pos)
+        if pos < 0 {
+            return pos, -1
+        }
+        if src[pos] != ':' {
+            return -int(types.ERR_INVALID_CHAR), -1
+        }
+
+        pos++
+        pos, _ = skipValue(src, pos)
+        if pos < 0 {
+            return pos, -1
+        }
+
+        pos = skipBlank(src, pos)
+        if pos < 0 {
+            return pos, -1
+        }
+        if src[pos] == '}' {
+            return pos + 1, start
+        }
+        if src[pos] != ',' {
+            return -int(types.ERR_INVALID_CHAR), -1
+        }
+
+        pos++
+        pos = skipBlank(src, pos)
+        if pos < 0 {
+            return pos, -1
+        }
+
+    }
+}
+
+func skipArray(src string, pos int) (ret int, start int) {
+    start = skipBlank(src, pos)
+    if start < 0 {
+        return start, -1
+    }
+
+    if src[start] != '[' {
+        return -int(types.ERR_INVALID_CHAR), -1
+    }
+
+    pos = start + 1
+    pos = skipBlank(src, pos)
+    if pos < 0 {
+        return pos, -1
+    }
+    if src[pos] == ']' {
+        return pos + 1, start
+    }
+
+    for {
+        pos, _ = skipValue(src, pos)
+        if pos < 0 {
+            return pos, -1
+        }
+
+        pos = skipBlank(src, pos)
+        if pos < 0 {
+            return pos, -1
+        }
+        if src[pos] == ']' {
+            return pos + 1, start
+        }
+        if src[pos] != ',' {
+            return -int(types.ERR_INVALID_CHAR), -1
+        }
+        pos++
+    }
+}
+
+// DecodeString decodes a JSON string from pos and return golang string.
+//   - needEsc indicates if to unescaped escaping chars
+//   - hasEsc tells if the returned string has escaping chars
+//   - validStr enables validating UTF8 charset
+//
+func _DecodeString(src string, pos int, needEsc bool, validStr bool) (v string, ret int, hasEsc bool) {
+    p := NewParserObj(src)
+    p.p = pos
+    switch val := p.decodeValue(); val.Vt {
+    case types.V_STRING:
+        str := p.s[val.Iv : p.p-1]
+        if validStr && !validate_utf8(str) {
+           return "", -int(types.ERR_INVALID_UTF8), false
+        }
+        /* fast path: no escape sequence */
+        if val.Ep == -1 {
+            return str, p.p, false
+        } else if !needEsc {
+            return str, p.p, true
+        }
+        /* unquote the string */
+        out, err := unquote(str)
+        /* check for errors */
+        if err != 0 {
+            return "", -int(err), true
+        } else {
+            return out, p.p, true
+        }
+    default:
+        return "", -int(_ERR_UNSUPPORT_TYPE), false
+    }
+}

+ 259 - 0
vendor/github.com/bytedance/sonic/ast/encode.go

@@ -0,0 +1,259 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ast
+
+import (
+    `sync`
+    `unicode/utf8`
+)
+
+const (
+    _MaxBuffer = 1024    // 1KB buffer size
+)
+
+func quoteString(e *[]byte, s string) {
+    *e = append(*e, '"')
+    start := 0
+    for i := 0; i < len(s); {
+        if b := s[i]; b < utf8.RuneSelf {
+            if safeSet[b] {
+                i++
+                continue
+            }
+            if start < i {
+                *e = append(*e, s[start:i]...)
+            }
+            *e = append(*e, '\\')
+            switch b {
+            case '\\', '"':
+                *e = append(*e, b)
+            case '\n':
+                *e = append(*e, 'n')
+            case '\r':
+                *e = append(*e, 'r')
+            case '\t':
+                *e = append(*e, 't')
+            default:
+                // This encodes bytes < 0x20 except for \t, \n and \r.
+                // If escapeHTML is set, it also escapes <, >, and &
+                // because they can lead to security holes when
+                // user-controlled strings are rendered into JSON
+                // and served to some browsers.
+                *e = append(*e, `u00`...)
+                *e = append(*e, hex[b>>4])
+                *e = append(*e, hex[b&0xF])
+            }
+            i++
+            start = i
+            continue
+        }
+        c, size := utf8.DecodeRuneInString(s[i:])
+        // if c == utf8.RuneError && size == 1 {
+        //     if start < i {
+        //         e.Write(s[start:i])
+        //     }
+        //     e.WriteString(`\ufffd`)
+        //     i += size
+        //     start = i
+        //     continue
+        // }
+        if c == '\u2028' || c == '\u2029' {
+            if start < i {
+                *e = append(*e, s[start:i]...)
+            }
+            *e = append(*e, `\u202`...)
+            *e = append(*e, hex[c&0xF])
+            i += size
+            start = i
+            continue
+        }
+        i += size
+    }
+    if start < len(s) {
+        *e = append(*e, s[start:]...)
+    }
+    *e = append(*e, '"')
+}
+
+var bytesPool   = sync.Pool{}
+
+func (self *Node) MarshalJSON() ([]byte, error) {
+    buf := newBuffer()
+    err := self.encode(buf)
+    if err != nil {
+        freeBuffer(buf)
+        return nil, err
+    }
+
+    ret := make([]byte, len(*buf))
+    copy(ret, *buf)
+    freeBuffer(buf)
+    return ret, err
+}
+
+func newBuffer() *[]byte {
+    if ret := bytesPool.Get(); ret != nil {
+        return ret.(*[]byte)
+    } else {
+        buf := make([]byte, 0, _MaxBuffer)
+        return &buf
+    }
+}
+
+func freeBuffer(buf *[]byte) {
+    *buf = (*buf)[:0]
+    bytesPool.Put(buf)
+}
+
+func (self *Node) encode(buf *[]byte) error {
+    if self.IsRaw() {
+        return self.encodeRaw(buf)
+    }
+    switch self.Type() {
+        case V_NONE  : return ErrNotExist
+        case V_ERROR : return self.Check()
+        case V_NULL  : return self.encodeNull(buf)
+        case V_TRUE  : return self.encodeTrue(buf)
+        case V_FALSE : return self.encodeFalse(buf)
+        case V_ARRAY : return self.encodeArray(buf)
+        case V_OBJECT: return self.encodeObject(buf)
+        case V_STRING: return self.encodeString(buf)
+        case V_NUMBER: return self.encodeNumber(buf)
+        case V_ANY   : return self.encodeInterface(buf)
+        default      : return ErrUnsupportType 
+    }
+}
+
+func (self *Node) encodeRaw(buf *[]byte) error {
+    raw, err := self.Raw()
+    if err != nil {
+        return err
+    }
+    *buf = append(*buf, raw...)
+    return nil
+}
+
+func (self *Node) encodeNull(buf *[]byte) error {
+    *buf = append(*buf, bytesNull...)
+    return nil
+}
+
+func (self *Node) encodeTrue(buf *[]byte) error {
+    *buf = append(*buf, bytesTrue...)
+    return nil
+}
+
+func (self *Node) encodeFalse(buf *[]byte) error {
+    *buf = append(*buf, bytesFalse...)
+    return nil
+}
+
+func (self *Node) encodeNumber(buf *[]byte) error {
+    str := self.toString()
+    *buf = append(*buf, str...)
+    return nil
+}
+
+func (self *Node) encodeString(buf *[]byte) error {
+    if self.l == 0 {
+        *buf = append(*buf, '"', '"')
+        return nil
+    }
+
+    quote(buf, self.toString())
+    return nil
+}
+
+func (self *Node) encodeArray(buf *[]byte) error {
+    if self.isLazy() {
+        if err := self.skipAllIndex(); err != nil {
+            return err
+        }
+    }
+
+    nb := self.len()
+    if nb == 0 {
+        *buf = append(*buf, bytesArray...)
+        return nil
+    }
+    
+    *buf = append(*buf, '[')
+
+    var started bool
+    for i := 0; i < nb; i++ {
+        n := self.nodeAt(i)
+        if !n.Exists() {
+            continue
+        }
+        if started {
+            *buf = append(*buf, ',')
+        }
+        started = true
+        if err := n.encode(buf); err != nil {
+            return err
+        }
+    }
+
+    *buf = append(*buf, ']')
+    return nil
+}
+
+func (self *Pair) encode(buf *[]byte) error {
+    if len(*buf) == 0 {
+        *buf = append(*buf, '"', '"', ':')
+        return self.Value.encode(buf)
+    }
+
+    quote(buf, self.Key)
+    *buf = append(*buf, ':')
+
+    return self.Value.encode(buf)
+}
+
+func (self *Node) encodeObject(buf *[]byte) error {
+    if self.isLazy() {
+        if err := self.skipAllKey(); err != nil {
+            return err
+        }
+    }
+    
+    nb := self.len()
+    if nb == 0 {
+        *buf = append(*buf, bytesObject...)
+        return nil
+    }
+    
+    *buf = append(*buf, '{')
+
+    var started bool
+    for i := 0; i < nb; i++ {
+        n := self.pairAt(i)
+        if n == nil || !n.Value.Exists() {
+            continue
+        }
+        if started {
+            *buf = append(*buf, ',')
+        }
+        started = true
+        if err := n.encode(buf); err != nil {
+            return err
+        }
+    }
+
+    *buf = append(*buf, '}')
+    return nil
+}

+ 130 - 0
vendor/github.com/bytedance/sonic/ast/error.go

@@ -0,0 +1,130 @@
+package ast
+
+import (
+    `fmt`
+    `strings`
+    `unsafe`
+
+    `github.com/bytedance/sonic/internal/native/types`
+)
+
+
+func newError(err types.ParsingError, msg string) *Node {
+    return &Node{
+        t: V_ERROR,
+        l: uint(err),
+        p: unsafe.Pointer(&msg),
+    }
+}
+
+// Error returns error message if the node is invalid
+func (self Node) Error() string {
+    if self.t != V_ERROR {
+        return ""
+    } else {
+        return *(*string)(self.p)
+    } 
+}
+
+func newSyntaxError(err SyntaxError) *Node {
+    msg := err.Description()
+    return &Node{
+        t: V_ERROR,
+        l: uint(err.Code),
+        p: unsafe.Pointer(&msg),
+    }
+}
+
+func (self *Parser) syntaxError(err types.ParsingError) SyntaxError {
+    return SyntaxError{
+        Pos : self.p,
+        Src : self.s,
+        Code: err,
+    }
+}
+
+func unwrapError(err error) *Node {
+    if se, ok := err.(*Node); ok {
+        return se
+    }else if sse, ok := err.(Node); ok {
+        return &sse
+    } else {
+        msg := err.Error()
+        return &Node{
+            t: V_ERROR,
+            p: unsafe.Pointer(&msg),
+        }
+    }
+}
+
+type SyntaxError struct {
+    Pos  int
+    Src  string
+    Code types.ParsingError
+    Msg  string
+}
+
+func (self SyntaxError) Error() string {
+    return fmt.Sprintf("%q", self.Description())
+}
+
+func (self SyntaxError) Description() string {
+    return "Syntax error " + self.description()
+}
+
+func (self SyntaxError) description() string {
+    i := 16
+    p := self.Pos - i
+    q := self.Pos + i
+
+    /* check for empty source */
+    if self.Src == "" {
+        return fmt.Sprintf("no sources available: %#v", self)
+    }
+
+    /* prevent slicing before the beginning */
+    if p < 0 {
+        p, q, i = 0, q - p, i + p
+    }
+
+    /* prevent slicing beyond the end */
+    if n := len(self.Src); q > n {
+        n = q - n
+        q = len(self.Src)
+
+        /* move the left bound if possible */
+        if p > n {
+            i += n
+            p -= n
+        }
+    }
+
+    /* left and right length */
+    x := clamp_zero(i)
+    y := clamp_zero(q - p - i - 1)
+
+    /* compose the error description */
+    return fmt.Sprintf(
+        "at index %d: %s\n\n\t%s\n\t%s^%s\n",
+        self.Pos,
+        self.Message(),
+        self.Src[p:q],
+        strings.Repeat(".", x),
+        strings.Repeat(".", y),
+    )
+}
+
+func (self SyntaxError) Message() string {
+    if self.Msg == "" {
+        return self.Code.Message()
+    }
+    return self.Msg
+}
+
+func clamp_zero(v int) int {
+    if v < 0 {
+        return 0
+    } else {
+        return v
+    }
+}

+ 203 - 0
vendor/github.com/bytedance/sonic/ast/iterator.go

@@ -0,0 +1,203 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ast
+
+import (
+    `fmt`
+
+    `github.com/bytedance/sonic/internal/native/types`
+)
+
+type Pair struct {
+    Key   string
+    Value Node
+}
+
+// Values returns iterator for array's children traversal
+func (self *Node) Values() (ListIterator, error) {
+    if err := self.should(types.V_ARRAY, "an array"); err != nil {
+        return ListIterator{}, err
+    }
+    return self.values(), nil
+}
+
+func (self *Node) values() ListIterator {
+    return ListIterator{Iterator{p: self}}
+}
+
+// Properties returns iterator for object's children traversal
+func (self *Node) Properties() (ObjectIterator, error) {
+    if err := self.should(types.V_OBJECT, "an object"); err != nil {
+        return ObjectIterator{}, err
+    }
+    return self.properties(), nil
+}
+
+func (self *Node) properties() ObjectIterator {
+    return ObjectIterator{Iterator{p: self}}
+}
+
+type Iterator struct {
+    i int
+    p *Node
+}
+
+func (self *Iterator) Pos() int {
+    return self.i
+}
+
+func (self *Iterator) Len() int {
+    return self.p.len()
+}
+
+// HasNext reports if it is the end of iteration or has error.
+func (self *Iterator) HasNext() bool {
+    if !self.p.isLazy() {
+        return self.p.Valid() && self.i < self.p.len()
+    } else if self.p.t == _V_ARRAY_LAZY {
+        return self.p.skipNextNode().Valid()
+    } else if self.p.t == _V_OBJECT_LAZY {
+        pair := self.p.skipNextPair()
+        if pair == nil {
+            return false
+        }
+        return pair.Value.Valid()
+    }
+    return false
+}
+
+// ListIterator is specialized iterator for V_ARRAY
+type ListIterator struct {
+    Iterator
+}
+
+// ObjectIterator is specialized iterator for V_ARRAY
+type ObjectIterator struct {
+    Iterator
+}
+
+func (self *ListIterator) next() *Node {
+next_start:
+    if !self.HasNext() {
+        return nil
+    } else {
+        n := self.p.nodeAt(self.i)
+        self.i++
+        if !n.Exists() {
+            goto next_start
+        }
+        return n
+    }
+}
+
+// Next scans through children of underlying V_ARRAY, 
+// copies each child to v, and returns .HasNext().
+func (self *ListIterator) Next(v *Node) bool {
+    n := self.next()
+    if n == nil {
+        return false
+    }
+    *v = *n
+    return true
+}
+
+func (self *ObjectIterator) next() *Pair {
+next_start:
+    if !self.HasNext() {
+        return nil
+    } else {
+        n := self.p.pairAt(self.i)
+        self.i++
+        if n == nil || !n.Value.Exists() {
+            goto next_start
+        }
+        return n
+    }
+}
+
+// Next scans through children of underlying V_OBJECT, 
+// copies each child to v, and returns .HasNext().
+func (self *ObjectIterator) Next(p *Pair) bool {
+    n := self.next()
+    if n == nil {
+        return false
+    }
+    *p = *n
+    return true
+}
+
+// Sequence represents scanning path of single-layer nodes.
+// Index indicates the value's order in both V_ARRAY and V_OBJECT json.
+// Key is the value's key (for V_OBJECT json only, otherwise it will be nil).
+type Sequence struct {
+    Index int 
+    Key *string
+    // Level int
+}
+
+// String is string representation of one Sequence
+func (s Sequence) String() string {
+    k := ""
+    if s.Key != nil {
+        k = *s.Key
+    }
+    return fmt.Sprintf("Sequence(%d, %q)", s.Index, k)
+}
+
+type Scanner func(path Sequence, node *Node) bool
+
+// ForEach scans one V_OBJECT node's children from JSON head to tail, 
+// and pass the Sequence and Node of corresponding JSON value.
+//
+// Especailly, if the node is not V_ARRAY or V_OBJECT, 
+// the node itself will be returned and Sequence.Index == -1.
+// 
+// NOTICE: A unsetted node WON'T trigger sc, but its index still counts into Path.Index
+func (self *Node) ForEach(sc Scanner) error {
+    switch self.itype() {
+    case types.V_ARRAY:
+        iter, err := self.Values()
+        if err != nil {
+            return err
+        }
+        v := iter.next()
+        for v != nil {
+            if !sc(Sequence{iter.i-1, nil}, v) {
+                return nil
+            }
+            v = iter.next()
+        }
+    case types.V_OBJECT:
+        iter, err := self.Properties()
+        if err != nil {
+            return err
+        }
+        v := iter.next()
+        for v != nil {
+            if !sc(Sequence{iter.i-1, &v.Key}, &v.Value) {
+                return nil
+            }
+            v = iter.next()
+        }
+    default:
+        if self.Check() != nil {
+            return self
+        }
+        sc(Sequence{-1, nil}, self)
+    }
+    return nil
+}

+ 1824 - 0
vendor/github.com/bytedance/sonic/ast/node.go

@@ -0,0 +1,1824 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ast
+
+import (
+    `encoding/json`
+    `fmt`
+    `strconv`
+    `unsafe`
+    
+    `github.com/bytedance/sonic/internal/native/types`
+    `github.com/bytedance/sonic/internal/rt`
+)
+
+const (
+    _V_NONE         types.ValueType = 0
+    _V_NODE_BASE    types.ValueType = 1 << 5
+    _V_LAZY         types.ValueType = 1 << 7
+    _V_RAW          types.ValueType = 1 << 8
+    _V_NUMBER                       = _V_NODE_BASE + 1
+    _V_ANY                          = _V_NODE_BASE + 2
+    _V_ARRAY_LAZY                   = _V_LAZY | types.V_ARRAY
+    _V_OBJECT_LAZY                  = _V_LAZY | types.V_OBJECT
+    _MASK_LAZY                      = _V_LAZY - 1
+    _MASK_RAW                       = _V_RAW - 1
+)
+
+const (
+    V_NONE   = 0
+    V_ERROR  = 1
+    V_NULL   = int(types.V_NULL)
+    V_TRUE   = int(types.V_TRUE)
+    V_FALSE  = int(types.V_FALSE)
+    V_ARRAY  = int(types.V_ARRAY)
+    V_OBJECT = int(types.V_OBJECT)
+    V_STRING = int(types.V_STRING)
+    V_NUMBER = int(_V_NUMBER)
+    V_ANY    = int(_V_ANY)
+)
+
+type Node struct {
+    t types.ValueType
+    l uint
+    p unsafe.Pointer
+}
+
+// UnmarshalJSON is just an adapter to json.Unmarshaler.
+// If you want better performance, use Searcher.GetByPath() directly
+func (self *Node) UnmarshalJSON(data []byte) (err error) {
+    *self = NewRaw(string(data))
+    return self.Check()
+}
+
+/** Node Type Accessor **/
+
+// Type returns json type represented by the node
+// It will be one of belows:
+//    V_NONE   = 0 (empty node, key not exists)
+//    V_ERROR  = 1 (error node)
+//    V_NULL   = 2 (json value `null`, key exists)
+//    V_TRUE   = 3 (json value `true`)
+//    V_FALSE  = 4 (json value `false`)
+//    V_ARRAY  = 5 (json value array)
+//    V_OBJECT = 6 (json value object)
+//    V_STRING = 7 (json value string)
+//    V_NUMBER = 33 (json value number )
+//    V_ANY    = 34 (golang interface{})
+func (self Node) Type() int {
+    return int(self.t & _MASK_LAZY & _MASK_RAW)
+}
+
+func (self Node) itype() types.ValueType {
+    return self.t & _MASK_LAZY & _MASK_RAW
+}
+
+// Exists returns false only if the self is nil or empty node V_NONE
+func (self *Node) Exists() bool {
+    return self.Valid() && self.t != _V_NONE
+}
+
+// Valid reports if self is NOT V_ERROR or nil
+func (self *Node) Valid() bool {
+    if self == nil {
+        return false
+    }
+    return self.t != V_ERROR
+}
+
+// Check checks if the node itself is valid, and return:
+//   - ErrNotExist If the node is nil
+//   - Its underlying error If the node is V_ERROR
+func (self *Node)  Check() error {
+    if self == nil {
+        return ErrNotExist
+    } else if self.t != V_ERROR {
+        return nil
+    } else {
+        return self
+    }
+}
+
+// IsRaw returns true if node's underlying value is raw json
+func (self Node) IsRaw() bool {
+    return self.t&_V_RAW != 0
+}
+
+func (self *Node) isLazy() bool {
+    return self != nil && self.t&_V_LAZY != 0
+}
+
+func (self *Node) isAny() bool {
+    return self != nil && self.t == _V_ANY
+}
+
+/** Simple Value Methods **/
+
+// Raw returns json representation of the node,
+func (self *Node) Raw() (string, error) {
+    if self == nil {
+        return "", ErrNotExist
+    }
+    if !self.IsRaw() {
+        buf, err := self.MarshalJSON()
+        return rt.Mem2Str(buf), err
+    }
+    return self.toString(), nil
+}
+
+func (self *Node) checkRaw() error {
+    if err := self.Check(); err != nil {
+        return err
+    }
+    if self.IsRaw() {
+        self.parseRaw(false)
+    }
+    return self.Check()
+}
+
+// Bool returns bool value represented by this node, 
+// including types.V_TRUE|V_FALSE|V_NUMBER|V_STRING|V_ANY|V_NULL, 
+// V_NONE will return error
+func (self *Node) Bool() (bool, error) {
+    if err := self.checkRaw(); err != nil {
+        return false, err
+    }
+    switch self.t {
+        case types.V_TRUE  : return true , nil
+        case types.V_FALSE : return false, nil
+        case types.V_NULL  : return false, nil
+        case _V_NUMBER     : 
+            if i, err := self.toInt64(); err == nil {
+                return i != 0, nil
+            } else if f, err := self.toFloat64(); err == nil {
+                return f != 0, nil
+            } else {
+                return false, err
+            }
+        case types.V_STRING: return strconv.ParseBool(self.toString())
+        case _V_ANY        :   
+            any := self.packAny()     
+            switch v := any.(type) {
+                case bool   : return v, nil
+                case int    : return v != 0, nil
+                case int8   : return v != 0, nil
+                case int16  : return v != 0, nil
+                case int32  : return v != 0, nil
+                case int64  : return v != 0, nil
+                case uint   : return v != 0, nil
+                case uint8  : return v != 0, nil
+                case uint16 : return v != 0, nil
+                case uint32 : return v != 0, nil
+                case uint64 : return v != 0, nil
+                case float32: return v != 0, nil
+                case float64: return v != 0, nil
+                case string : return strconv.ParseBool(v)
+                case json.Number: 
+                    if i, err := v.Int64(); err == nil {
+                        return i != 0, nil
+                    } else if f, err := v.Float64(); err == nil {
+                        return f != 0, nil
+                    } else {
+                        return false, err
+                    }
+                default: return false, ErrUnsupportType
+            }
+        default            : return false, ErrUnsupportType
+    }
+}
+
+// Int64 casts the node to int64 value, 
+// including V_NUMBER|V_TRUE|V_FALSE|V_ANY|V_STRING
+// V_NONE it will return error
+func (self *Node) Int64() (int64, error) {
+    if err := self.checkRaw(); err != nil {
+        return 0, err
+    }
+    switch self.t {
+        case _V_NUMBER, types.V_STRING :
+            if i, err := self.toInt64(); err == nil {
+                return i, nil
+            } else if f, err := self.toFloat64(); err == nil {
+                return int64(f), nil
+            } else {
+                return 0, err
+            }
+        case types.V_TRUE     : return 1, nil
+        case types.V_FALSE    : return 0, nil
+        case types.V_NULL     : return 0, nil
+        case _V_ANY           :  
+            any := self.packAny()
+            switch v := any.(type) {
+                case bool   : if v { return 1, nil } else { return 0, nil }
+                case int    : return int64(v), nil
+                case int8   : return int64(v), nil
+                case int16  : return int64(v), nil
+                case int32  : return int64(v), nil
+                case int64  : return int64(v), nil
+                case uint   : return int64(v), nil
+                case uint8  : return int64(v), nil
+                case uint16 : return int64(v), nil
+                case uint32 : return int64(v), nil
+                case uint64 : return int64(v), nil
+                case float32: return int64(v), nil
+                case float64: return int64(v), nil
+                case string : 
+                    if i, err := strconv.ParseInt(v, 10, 64); err == nil {
+                        return i, nil
+                    } else if f, err := strconv.ParseFloat(v, 64); err == nil {
+                        return int64(f), nil
+                    } else {
+                        return 0, err
+                    }
+                case json.Number: 
+                    if i, err := v.Int64(); err == nil {
+                        return i, nil
+                    } else if f, err := v.Float64(); err == nil {
+                        return int64(f), nil
+                    } else {
+                        return 0, err
+                    }
+                default: return 0, ErrUnsupportType
+            }
+        default               : return 0, ErrUnsupportType
+    }
+}
+
+// StrictInt64 exports underlying int64 value, including V_NUMBER, V_ANY
+func (self *Node) StrictInt64() (int64, error) {
+    if err := self.checkRaw(); err != nil {
+        return 0, err
+    }
+    switch self.t {
+        case _V_NUMBER        : return self.toInt64()
+        case _V_ANY           :  
+            any := self.packAny()
+            switch v := any.(type) {
+                case int   : return int64(v), nil
+                case int8  : return int64(v), nil
+                case int16 : return int64(v), nil
+                case int32 : return int64(v), nil
+                case int64 : return int64(v), nil
+                case uint  : return int64(v), nil
+                case uint8 : return int64(v), nil
+                case uint16: return int64(v), nil
+                case uint32: return int64(v), nil
+                case uint64: return int64(v), nil
+                case json.Number: 
+                    if i, err := v.Int64(); err == nil {
+                        return i, nil
+                    } else {
+                        return 0, err
+                    }
+                default: return 0, ErrUnsupportType
+            }
+        default               : return 0, ErrUnsupportType
+    }
+}
+
+func castNumber(v bool) json.Number {
+    if v {
+        return json.Number("1")
+    } else {
+        return json.Number("0")
+    }
+}
+
+// Number casts node to float64, 
+// including V_NUMBER|V_TRUE|V_FALSE|V_ANY|V_STRING|V_NULL,
+// V_NONE it will return error
+func (self *Node) Number() (json.Number, error) {
+    if err := self.checkRaw(); err != nil {
+        return json.Number(""), err
+    }
+    switch self.t {
+        case _V_NUMBER        : return self.toNumber(), nil
+        case types.V_STRING : 
+            if _, err := self.toInt64(); err == nil {
+                return self.toNumber(), nil
+            } else if _, err := self.toFloat64(); err == nil {
+                return self.toNumber(), nil
+            } else {
+                return json.Number(""), err
+            }
+        case types.V_TRUE     : return json.Number("1"), nil
+        case types.V_FALSE    : return json.Number("0"), nil
+        case types.V_NULL     : return json.Number("0"), nil
+        case _V_ANY           :        
+            any := self.packAny()
+            switch v := any.(type) {
+                case bool   : return castNumber(v), nil
+                case int    : return castNumber(v != 0), nil
+                case int8   : return castNumber(v != 0), nil
+                case int16  : return castNumber(v != 0), nil
+                case int32  : return castNumber(v != 0), nil
+                case int64  : return castNumber(v != 0), nil
+                case uint   : return castNumber(v != 0), nil
+                case uint8  : return castNumber(v != 0), nil
+                case uint16 : return castNumber(v != 0), nil
+                case uint32 : return castNumber(v != 0), nil
+                case uint64 : return castNumber(v != 0), nil
+                case float32: return castNumber(v != 0), nil
+                case float64: return castNumber(v != 0), nil
+                case string : 
+                    if _, err := strconv.ParseFloat(v, 64); err == nil {
+                        return json.Number(v), nil
+                    } else {
+                        return json.Number(""), err
+                    }
+                case json.Number: return v, nil
+                default: return json.Number(""), ErrUnsupportType
+            }
+        default               : return json.Number(""), ErrUnsupportType
+    }
+}
+
+// Number exports underlying float64 value, including V_NUMBER, V_ANY of json.Number
+func (self *Node) StrictNumber() (json.Number, error) {
+    if err := self.checkRaw(); err != nil {
+        return json.Number(""), err
+    }
+    switch self.t {
+        case _V_NUMBER        : return self.toNumber()  , nil
+        case _V_ANY        :        
+            if v, ok := self.packAny().(json.Number); ok {
+                return v, nil
+            } else {
+                return json.Number(""), ErrUnsupportType
+            }
+        default               : return json.Number(""), ErrUnsupportType
+    }
+}
+
+// String cast node to string, 
+// including V_NUMBER|V_TRUE|V_FALSE|V_ANY|V_STRING|V_NULL,
+// V_NONE it will return error
+func (self *Node) String() (string, error) {
+    if err := self.checkRaw(); err != nil {
+        return "", err
+    }
+    switch self.t {
+        case types.V_NULL    : return "" , nil
+        case types.V_TRUE    : return "true" , nil
+        case types.V_FALSE   : return "false", nil
+        case types.V_STRING, _V_NUMBER  : return self.toString(), nil
+        case _V_ANY          :        
+        any := self.packAny()
+        switch v := any.(type) {
+            case bool   : return strconv.FormatBool(v), nil
+            case int    : return strconv.Itoa(v), nil
+            case int8   : return strconv.Itoa(int(v)), nil
+            case int16  : return strconv.Itoa(int(v)), nil
+            case int32  : return strconv.Itoa(int(v)), nil
+            case int64  : return strconv.Itoa(int(v)), nil
+            case uint   : return strconv.Itoa(int(v)), nil
+            case uint8  : return strconv.Itoa(int(v)), nil
+            case uint16 : return strconv.Itoa(int(v)), nil
+            case uint32 : return strconv.Itoa(int(v)), nil
+            case uint64 : return strconv.Itoa(int(v)), nil
+            case float32: return strconv.FormatFloat(float64(v), 'g', -1, 64), nil
+            case float64: return strconv.FormatFloat(float64(v), 'g', -1, 64), nil
+            case string : return v, nil 
+            case json.Number: return v.String(), nil
+            default: return "", ErrUnsupportType
+        }
+        default              : return ""     , ErrUnsupportType
+    }
+}
+
+// StrictString returns string value (unescaped), includeing V_STRING, V_ANY of string.
+// In other cases, it will return empty string.
+func (self *Node) StrictString() (string, error) {
+    if err := self.checkRaw(); err != nil {
+        return "", err
+    }
+    switch self.t {
+        case types.V_STRING  : return self.toString(), nil
+        case _V_ANY          :        
+            if v, ok := self.packAny().(string); ok {
+                return v, nil
+            } else {
+                return "", ErrUnsupportType
+            }
+        default              : return "", ErrUnsupportType
+    }
+}
+
+// Float64 cast node to float64, 
+// including V_NUMBER|V_TRUE|V_FALSE|V_ANY|V_STRING|V_NULL,
+// V_NONE it will return error
+func (self *Node) Float64() (float64, error) {
+    if err := self.checkRaw(); err != nil {
+        return 0.0, err
+    }
+    switch self.t {
+        case _V_NUMBER, types.V_STRING : return self.toFloat64()
+        case types.V_TRUE    : return 1.0, nil
+        case types.V_FALSE   : return 0.0, nil
+        case types.V_NULL    : return 0.0, nil
+        case _V_ANY          :        
+            any := self.packAny()
+            switch v := any.(type) {
+                case bool    : 
+                    if v {
+                        return 1.0, nil
+                    } else {
+                        return 0.0, nil
+                    }
+                case int    : return float64(v), nil
+                case int8   : return float64(v), nil
+                case int16  : return float64(v), nil
+                case int32  : return float64(v), nil
+                case int64  : return float64(v), nil
+                case uint   : return float64(v), nil
+                case uint8  : return float64(v), nil
+                case uint16 : return float64(v), nil
+                case uint32 : return float64(v), nil
+                case uint64 : return float64(v), nil
+                case float32: return float64(v), nil
+                case float64: return float64(v), nil
+                case string : 
+                    if f, err := strconv.ParseFloat(v, 64); err == nil {
+                        return float64(f), nil
+                    } else {
+                        return 0, err
+                    }
+                case json.Number: 
+                    if f, err := v.Float64(); err == nil {
+                        return float64(f), nil
+                    } else {
+                        return 0, err
+                    }
+                default     : return 0, ErrUnsupportType
+            }
+        default             : return 0.0, ErrUnsupportType
+    }
+}
+
+// Float64 exports underlying float64 value, includeing V_NUMBER, V_ANY 
+func (self *Node) StrictFloat64() (float64, error) {
+    if err := self.checkRaw(); err != nil {
+        return 0.0, err
+    }
+    switch self.t {
+        case _V_NUMBER       : return self.toFloat64()
+        case _V_ANY        :        
+            any := self.packAny()
+            switch v := any.(type) {
+                case float32 : return float64(v), nil
+                case float64 : return float64(v), nil
+                default      : return 0, ErrUnsupportType
+            }
+        default              : return 0.0, ErrUnsupportType
+    }
+}
+
+/** Sequencial Value Methods **/
+
+// Len returns children count of a array|object|string node
+// WARN: For partially loaded node, it also works but only counts the parsed children
+func (self *Node) Len() (int, error) {
+    if err := self.checkRaw(); err != nil {
+        return 0, err
+    }
+    if self.t == types.V_ARRAY || self.t == types.V_OBJECT || self.t == _V_ARRAY_LAZY || self.t == _V_OBJECT_LAZY || self.t == types.V_STRING {
+        return int(self.l), nil
+    } else if self.t == _V_NONE || self.t == types.V_NULL {
+        return 0, nil
+    } else {
+        return 0, ErrUnsupportType
+    }
+}
+
+func (self Node) len() int {
+    return int(self.l)
+}
+
+// Cap returns malloc capacity of a array|object node for children
+func (self *Node) Cap() (int, error) {
+    if err := self.checkRaw(); err != nil {
+        return 0, err
+    }
+    switch self.t {
+    case types.V_ARRAY: return (*linkedNodes)(self.p).Cap(), nil
+    case types.V_OBJECT: return (*linkedPairs)(self.p).Cap(), nil
+    case _V_ARRAY_LAZY: return (*parseArrayStack)(self.p).v.Cap(), nil
+    case _V_OBJECT_LAZY: return (*parseObjectStack)(self.p).v.Cap(), nil
+    case _V_NONE, types.V_NULL: return 0, nil
+    default: return 0, ErrUnsupportType
+    }
+}
+
+// Set sets the node of given key under self, and reports if the key has existed.
+//
+// If self is V_NONE or V_NULL, it becomes V_OBJECT and sets the node at the key.
+func (self *Node) Set(key string, node Node) (bool, error) {
+    if err := self.Check(); err != nil {
+        return false, err
+    }
+    if err := node.Check(); err != nil {
+        return false, err 
+    }
+    
+    if self.t == _V_NONE || self.t == types.V_NULL {
+        *self = NewObject([]Pair{{key, node}})
+        return false, nil
+    } else if self.itype() != types.V_OBJECT {
+        return false, ErrUnsupportType
+    }
+
+    p := self.Get(key)
+
+    if !p.Exists() {
+        // self must be fully-loaded here
+        if self.len() == 0 {
+            *self = newObject(new(linkedPairs))
+        }
+        s := (*linkedPairs)(self.p)
+        s.Push(Pair{key, node})
+        self.l++
+        return false, nil
+
+    } else if err := p.Check(); err != nil {
+        return false, err
+    } 
+
+    *p = node
+    return true, nil
+}
+
+// SetAny wraps val with V_ANY node, and Set() the node.
+func (self *Node) SetAny(key string, val interface{}) (bool, error) {
+    return self.Set(key, NewAny(val))
+}
+
+// Unset REMOVE (soft) the node of given key under object parent, and reports if the key has existed.
+func (self *Node) Unset(key string) (bool, error) {
+    if err := self.should(types.V_OBJECT, "an object"); err != nil {
+        return false, err
+    }
+    // NOTICE: must get acurate length before deduct
+    if err := self.skipAllKey(); err != nil {
+        return false, err
+    }
+    p, i := self.skipKey(key)
+    if !p.Exists() {
+        return false, nil
+    } else if err := p.Check(); err != nil {
+        return false, err
+    }
+    self.removePairAt(i)
+    return true, nil
+}
+
+// SetByIndex sets the node of given index, and reports if the key has existed.
+//
+// The index must be within self's children.
+func (self *Node) SetByIndex(index int, node Node) (bool, error) {
+    if err := self.Check(); err != nil {
+        return false, err 
+    }
+    if err := node.Check(); err != nil {
+        return false, err 
+    }
+
+    if index == 0 && (self.t == _V_NONE || self.t == types.V_NULL) {
+        *self = NewArray([]Node{node})
+        return false, nil
+    }
+
+    p := self.Index(index)
+    if !p.Exists() {
+        return false, ErrNotExist
+    } else if err := p.Check(); err != nil {
+        return false, err
+    }
+
+    *p = node
+    return true, nil
+}
+
+// SetAny wraps val with V_ANY node, and SetByIndex() the node.
+func (self *Node) SetAnyByIndex(index int, val interface{}) (bool, error) {
+    return self.SetByIndex(index, NewAny(val))
+}
+
+// UnsetByIndex REOMVE (softly) the node of given index.
+//
+// WARN: this will change address of elements, which is a dangerous action.
+// Use Unset() for object or Pop() for array instead.
+func (self *Node) UnsetByIndex(index int) (bool, error) {
+    if err := self.checkRaw(); err != nil {
+        return false, err
+    }
+
+    var p *Node
+    it := self.itype()
+
+    if it == types.V_ARRAY {
+        if err := self.skipAllIndex(); err != nil {
+            return false, err
+        }
+        p = self.nodeAt(index)
+    } else if it == types.V_OBJECT {
+        if err := self.skipAllKey(); err != nil {
+            return false, err
+        }
+        pr := self.pairAt(index)
+        if pr == nil {
+           return false, ErrNotExist
+        }
+        p = &pr.Value
+    } else {
+        return false, ErrUnsupportType
+    }
+
+    if !p.Exists() {
+        return false, ErrNotExist
+    }
+
+    // last elem
+    if index == self.len() - 1 {
+        return true, self.Pop()
+    }
+
+    // not last elem, self.len() change but linked-chunk not change
+    if it == types.V_ARRAY {
+        self.removeNode(index)
+    }else if it == types.V_OBJECT {
+        self.removePair(index)
+    }
+    return true, nil
+}
+
+// Add appends the given node under self.
+//
+// If self is V_NONE or V_NULL, it becomes V_ARRAY and sets the node at index 0.
+func (self *Node) Add(node Node) error {
+    if err := self.Check(); err != nil {
+        return err
+    }
+
+    if self != nil && (self.t == _V_NONE || self.t == types.V_NULL) {
+        *self = NewArray([]Node{node})
+        return nil
+    }
+    if err := self.should(types.V_ARRAY, "an array"); err != nil {
+        return err
+    }
+
+    s, err := self.unsafeArray()
+    if err != nil {
+        return err
+    }
+
+    // Notice: array won't have unset node in tail
+    s.Push(node)
+    self.l++
+    return nil
+}
+
+// Pop remove the last child of the V_Array or V_Object node.
+func (self *Node) Pop() error {
+    if err := self.checkRaw(); err != nil {
+        return err
+    }
+
+    if it := self.itype(); it == types.V_ARRAY {
+        s, err := self.unsafeArray()
+        if err != nil {
+            return err
+        }
+        // remove tail unset nodes
+        for i := s.Len()-1; i >= 0; i-- {
+            if s.At(i).Exists() {
+                s.Pop()
+                self.l--
+                break
+            }
+            s.Pop()
+        }
+
+    } else if it == types.V_OBJECT {
+        s, err := self.unsafeMap()
+        if err != nil {
+            return err
+        }
+        // remove tail unset nodes
+        for i := s.Len()-1; i >= 0; i-- {
+            if p := s.At(i); p != nil && p.Value.Exists() {
+                s.Pop()
+                self.l--
+                break
+            }
+            s.Pop()
+        }
+
+    } else {
+        return ErrUnsupportType
+    }
+
+    return nil
+}
+
+// Move moves the child at src index to dst index,
+// meanwhile slides sliblings from src+1 to dst.
+// 
+// WARN: this will change address of elements, which is a dangerous action.
+func (self *Node) Move(dst, src int) error {
+    if err := self.should(types.V_ARRAY, "an array"); err != nil {
+        return err
+    }
+
+    s, err := self.unsafeArray()
+    if err != nil {
+        return err
+    }
+
+    // check if any unset node exists
+    if l :=  s.Len(); self.len() != l {
+        di, si := dst, src
+        // find real pos of src and dst
+        for i := 0; i < l; i++ {
+            if s.At(i).Exists() {
+                di--
+                si--
+            }
+            if di == -1 {
+                dst = i
+                di--
+            } 
+            if si == -1 {
+                src = i
+                si--
+            }
+            if di == -2 && si == -2 {
+                break
+            }
+        }
+    }
+
+    s.MoveOne(src, dst)
+    return nil
+}
+
+// SetAny wraps val with V_ANY node, and Add() the node.
+func (self *Node) AddAny(val interface{}) error {
+    return self.Add(NewAny(val))
+}
+
+// GetByPath load given path on demands,
+// which only ensure nodes before this path got parsed.
+//
+// Note, the api expects the json is well-formed at least,
+// otherwise it may return unexpected result.
+func (self *Node) GetByPath(path ...interface{}) *Node {
+    if !self.Valid() {
+        return self
+    }
+    var s = self
+    for _, p := range path {
+        switch p := p.(type) {
+        case int:
+            s = s.Index(p)
+            if !s.Valid() {
+                return s
+            }
+        case string:
+            s = s.Get(p)
+            if !s.Valid() {
+                return s
+            }
+        default:
+            panic("path must be either int or string")
+        }
+    }
+    return s
+}
+
+// Get loads given key of an object node on demands
+func (self *Node) Get(key string) *Node {
+    if err := self.should(types.V_OBJECT, "an object"); err != nil {
+        return unwrapError(err)
+    }
+    n, _ := self.skipKey(key)
+    return n
+}
+
+// Index indexies node at given idx,
+// node type CAN be either V_OBJECT or V_ARRAY
+func (self *Node) Index(idx int) *Node {
+    if err := self.checkRaw(); err != nil {
+        return unwrapError(err)
+    }
+
+    it := self.itype()
+    if it == types.V_ARRAY {
+        return self.skipIndex(idx)
+
+    }else if it == types.V_OBJECT {
+        pr := self.skipIndexPair(idx)
+        if pr == nil {
+           return newError(_ERR_NOT_FOUND, "value not exists")
+        }
+        return &pr.Value
+
+    } else {
+        return newError(_ERR_UNSUPPORT_TYPE, fmt.Sprintf("unsupported type: %v", self.itype()))
+    }
+}
+
+// IndexPair indexies pair at given idx,
+// node type MUST be either V_OBJECT
+func (self *Node) IndexPair(idx int) *Pair {
+    if err := self.should(types.V_OBJECT, "an object"); err != nil {
+        return nil
+    }
+    return self.skipIndexPair(idx)
+}
+
+func (self *Node) indexOrGet(idx int, key string) (*Node, int) {
+    if err := self.should(types.V_OBJECT, "an object"); err != nil {
+        return unwrapError(err), idx
+    }
+
+    pr := self.skipIndexPair(idx)
+    if pr != nil && pr.Key == key {
+        return &pr.Value, idx
+    }
+
+    return self.skipKey(key)
+}
+
+// IndexOrGet firstly use idx to index a value and check if its key matches
+// If not, then use the key to search value
+func (self *Node) IndexOrGet(idx int, key string) *Node {
+    node, _ := self.indexOrGet(idx, key)
+    return node
+}
+
+// IndexOrGetWithIdx attempts to retrieve a node by index and key, returning the node and its correct index.
+// If the key does not match at the given index, it searches by key and returns the node with its updated index.
+func (self *Node) IndexOrGetWithIdx(idx int, key string) (*Node, int) {
+    return self.indexOrGet(idx, key)
+}
+
+/** Generic Value Converters **/
+
+// Map loads all keys of an object node
+func (self *Node) Map() (map[string]interface{}, error) {
+    if self.isAny() {
+        any := self.packAny()
+        if v, ok := any.(map[string]interface{}); ok {
+            return v, nil
+        } else {
+            return nil, ErrUnsupportType
+        }
+    }
+    if err := self.should(types.V_OBJECT, "an object"); err != nil {
+        return nil, err
+    }
+    if err := self.loadAllKey(); err != nil {
+        return nil, err
+    }
+    return self.toGenericObject()
+}
+
+// MapUseNumber loads all keys of an object node, with numeric nodes casted to json.Number
+func (self *Node) MapUseNumber() (map[string]interface{}, error) {
+    if self.isAny() {
+        any := self.packAny()
+        if v, ok := any.(map[string]interface{}); ok {
+            return v, nil
+        } else {
+            return nil, ErrUnsupportType
+        }
+    }
+    if err := self.should(types.V_OBJECT, "an object"); err != nil {
+        return nil, err
+    }
+    if err := self.loadAllKey(); err != nil {
+        return nil, err
+    }
+    return self.toGenericObjectUseNumber()
+}
+
+// MapUseNode scans both parsed and non-parsed chidren nodes, 
+// and map them by their keys
+func (self *Node) MapUseNode() (map[string]Node, error) {
+    if self.isAny() {
+        any := self.packAny()
+        if v, ok := any.(map[string]Node); ok {
+            return v, nil
+        } else {
+            return nil, ErrUnsupportType
+        }
+    }
+    if err := self.should(types.V_OBJECT, "an object"); err != nil {
+        return nil, err
+    }
+    if err := self.skipAllKey(); err != nil {
+        return nil, err
+    }
+    return self.toGenericObjectUseNode()
+}
+
+// MapUnsafe exports the underlying pointer to its children map
+// WARN: don't use it unless you know what you are doing
+//
+// Deprecated:  this API now returns copied nodes instead of directly reference, 
+// func (self *Node) UnsafeMap() ([]Pair, error) {
+//     if err := self.should(types.V_OBJECT, "an object"); err != nil {
+//         return nil, err
+//     }
+//     if err := self.skipAllKey(); err != nil {
+//         return nil, err
+//     }
+//     return self.toGenericObjectUsePair()
+// }
+
+//go:nocheckptr
+func (self *Node) unsafeMap() (*linkedPairs, error) {
+    if err := self.skipAllKey(); err != nil {
+        return nil, err
+    }
+    if self.p == nil {
+        *self = newObject(new(linkedPairs))
+    }
+    return (*linkedPairs)(self.p), nil
+}
+
+// SortKeys sorts children of a V_OBJECT node in ascending key-order.
+// If recurse is true, it recursively sorts children's children as long as a V_OBJECT node is found.
+func (self *Node) SortKeys(recurse bool) error {
+    // check raw node first
+    if err := self.checkRaw(); err != nil {
+        return err
+    }
+    if self.itype() == types.V_OBJECT {
+        return self.sortKeys(recurse)
+    } else if self.itype() == types.V_ARRAY {
+        var err error
+        err2 := self.ForEach(func(path Sequence, node *Node) bool {
+            it := node.itype()
+            if it == types.V_ARRAY || it == types.V_OBJECT {
+                err = node.SortKeys(recurse)
+                if err != nil {
+                    return false
+                }
+            }
+            return true
+        })
+        if err != nil {
+            return err
+        }
+        return err2
+    } else {
+        return nil
+    }
+}
+
+func (self *Node) sortKeys(recurse bool) (err error) {
+    // check raw node first
+    if err := self.checkRaw(); err != nil {
+        return err
+    }
+    ps, err := self.unsafeMap()
+    if err != nil {
+        return err
+    }
+    ps.Sort()
+    if recurse {
+        var sc Scanner
+        sc = func(path Sequence, node *Node) bool {
+            if node.itype() == types.V_OBJECT {
+                if err := node.sortKeys(recurse); err != nil {
+                    return false
+                }
+            }
+            if node.itype() == types.V_ARRAY {
+                if err := node.ForEach(sc); err != nil {
+                    return false
+                }
+            }
+            return true
+        }
+        if err := self.ForEach(sc); err != nil {
+            return err
+        }
+    }
+    return nil
+}
+
+// Array loads all indexes of an array node
+func (self *Node) Array() ([]interface{}, error) {
+    if self.isAny() {
+        any := self.packAny()
+        if v, ok := any.([]interface{}); ok {
+            return v, nil
+        } else {
+            return nil, ErrUnsupportType
+        }
+    }
+    if err := self.should(types.V_ARRAY, "an array"); err != nil {
+        return nil, err
+    }
+    if err := self.loadAllIndex(); err != nil {
+        return nil, err
+    }
+    return self.toGenericArray()
+}
+
+// ArrayUseNumber loads all indexes of an array node, with numeric nodes casted to json.Number
+func (self *Node) ArrayUseNumber() ([]interface{}, error) {
+    if self.isAny() {
+        any := self.packAny()
+        if v, ok := any.([]interface{}); ok {
+            return v, nil
+        } else {
+            return nil, ErrUnsupportType
+        }
+    }
+    if err := self.should(types.V_ARRAY, "an array"); err != nil {
+        return nil, err
+    }
+    if err := self.loadAllIndex(); err != nil {
+        return nil, err
+    }
+    return self.toGenericArrayUseNumber()
+}
+
+// ArrayUseNode copys both parsed and non-parsed chidren nodes, 
+// and indexes them by original order
+func (self *Node) ArrayUseNode() ([]Node, error) {
+    if self.isAny() {
+        any := self.packAny()
+        if v, ok := any.([]Node); ok {
+            return v, nil
+        } else {
+            return nil, ErrUnsupportType
+        }
+    }
+    if err := self.should(types.V_ARRAY, "an array"); err != nil {
+        return nil, err
+    }
+    if err := self.skipAllIndex(); err != nil {
+        return nil, err
+    }
+    return self.toGenericArrayUseNode()
+}
+
+// ArrayUnsafe exports the underlying pointer to its children array
+// WARN: don't use it unless you know what you are doing
+//
+// Deprecated:  this API now returns copied nodes instead of directly reference, 
+// which has no difference with ArrayUseNode
+// func (self *Node) UnsafeArray() ([]Node, error) {
+//     if err := self.should(types.V_ARRAY, "an array"); err != nil {
+//         return nil, err
+//     }
+//     if err := self.skipAllIndex(); err != nil {
+//         return nil, err
+//     }
+//     return self.toGenericArrayUseNode()
+// }
+
+func (self *Node) unsafeArray() (*linkedNodes, error) {
+    if err := self.skipAllIndex(); err != nil {
+        return nil, err
+    }
+    if self.p == nil {
+        *self = newArray(new(linkedNodes))
+    }
+    return (*linkedNodes)(self.p), nil
+}
+
+// Interface loads all children under all pathes from this node,
+// and converts itself as generic type.
+// WARN: all numberic nodes are casted to float64
+func (self *Node) Interface() (interface{}, error) {
+    if err := self.checkRaw(); err != nil {
+        return nil, err
+    }
+    switch self.t {
+        case V_ERROR         : return nil, self.Check()
+        case types.V_NULL    : return nil, nil
+        case types.V_TRUE    : return true, nil
+        case types.V_FALSE   : return false, nil
+        case types.V_ARRAY   : return self.toGenericArray()
+        case types.V_OBJECT  : return self.toGenericObject()
+        case types.V_STRING  : return self.toString(), nil
+        case _V_NUMBER       : 
+            v, err := self.toFloat64()
+            if err != nil {
+                return nil, err
+            }
+            return v, nil
+        case _V_ARRAY_LAZY   :
+            if err := self.loadAllIndex(); err != nil {
+                return nil, err
+            }
+            return self.toGenericArray()
+        case _V_OBJECT_LAZY  :
+            if err := self.loadAllKey(); err != nil {
+                return nil, err
+            }
+            return self.toGenericObject()
+        case _V_ANY:
+            switch v := self.packAny().(type) {
+                case Node : return v.Interface()
+                case *Node: return v.Interface()
+                default   : return v, nil
+            }
+        default              : return nil,  ErrUnsupportType
+    }
+}
+
+func (self *Node) packAny() interface{} {
+    return *(*interface{})(self.p)
+}
+
+// InterfaceUseNumber works same with Interface()
+// except numberic nodes  are casted to json.Number
+func (self *Node) InterfaceUseNumber() (interface{}, error) {
+    if err := self.checkRaw(); err != nil {
+        return nil, err
+    }
+    switch self.t {
+        case V_ERROR         : return nil, self.Check()
+        case types.V_NULL    : return nil, nil
+        case types.V_TRUE    : return true, nil
+        case types.V_FALSE   : return false, nil
+        case types.V_ARRAY   : return self.toGenericArrayUseNumber()
+        case types.V_OBJECT  : return self.toGenericObjectUseNumber()
+        case types.V_STRING  : return self.toString(), nil
+        case _V_NUMBER       : return self.toNumber(), nil
+        case _V_ARRAY_LAZY   :
+            if err := self.loadAllIndex(); err != nil {
+                return nil, err
+            }
+            return self.toGenericArrayUseNumber()
+        case _V_OBJECT_LAZY  :
+            if err := self.loadAllKey(); err != nil {
+                return nil, err
+            }
+            return self.toGenericObjectUseNumber()
+        case _V_ANY          : return self.packAny(), nil
+        default              : return nil, ErrUnsupportType
+    }
+}
+
+// InterfaceUseNode clone itself as a new node, 
+// or its children as map[string]Node (or []Node)
+func (self *Node) InterfaceUseNode() (interface{}, error) {
+    if err := self.checkRaw(); err != nil {
+        return nil, err
+    }
+    switch self.t {
+        case types.V_ARRAY   : return self.toGenericArrayUseNode()
+        case types.V_OBJECT  : return self.toGenericObjectUseNode()
+        case _V_ARRAY_LAZY   :
+            if err := self.skipAllIndex(); err != nil {
+                return nil, err
+            }
+            return self.toGenericArrayUseNode()
+        case _V_OBJECT_LAZY  :
+            if err := self.skipAllKey(); err != nil {
+                return nil, err
+            }
+            return self.toGenericObjectUseNode()
+        default              : return *self, self.Check()
+    }
+}
+
+// LoadAll loads all the node's children and children's children as parsed.
+// After calling it, the node can be safely used on concurrency
+func (self *Node) LoadAll() error {
+    if self.IsRaw() {
+        self.parseRaw(true)
+        return self.Check()
+    }
+
+    switch self.itype() {
+    case types.V_ARRAY:
+        e := self.len()
+        if err := self.loadAllIndex(); err != nil {
+            return err
+        }
+        for i := 0; i < e; i++ {
+            n := self.nodeAt(i)
+            if n.IsRaw() {
+                n.parseRaw(true)
+            }
+            if err := n.Check(); err != nil {
+                return err
+            }
+        }
+        return nil
+    case types.V_OBJECT:
+        e := self.len()
+        if err := self.loadAllKey(); err != nil {
+            return err
+        }
+        for i := 0; i < e; i++ {
+            n := self.pairAt(i)
+            if n.Value.IsRaw() {
+                n.Value.parseRaw(true)
+            }
+            if err := n.Value.Check(); err != nil {
+                return err
+            }
+        }
+        return nil
+    default:
+        return self.Check()
+    }
+}
+
+// Load loads the node's children as parsed.
+// After calling it, only the node itself can be used on concurrency (not include its children)
+func (self *Node) Load() error {
+    if err := self.checkRaw(); err != nil {
+        return err
+    }
+
+    switch self.t {
+    case _V_ARRAY_LAZY:
+        return self.skipAllIndex()
+    case _V_OBJECT_LAZY:
+        return self.skipAllKey()
+    default:
+        return self.Check()
+    }
+}
+
+/**---------------------------------- Internal Helper Methods ----------------------------------**/
+
+func (self *Node) should(t types.ValueType, s string) error {
+    if err := self.checkRaw(); err != nil {
+        return err
+    }
+    if  self.itype() != t {
+        return ErrUnsupportType
+    }
+    return nil
+}
+
+func (self *Node) nodeAt(i int) *Node {
+    var p *linkedNodes
+    if self.isLazy() {
+        _, stack := self.getParserAndArrayStack()
+        p = &stack.v
+    } else {
+        p = (*linkedNodes)(self.p)
+        if l := p.Len(); l != self.len() {
+            // some nodes got unset, iterate to skip them
+            for j:=0; j<l; j++ {
+                v := p.At(j)
+                if v.Exists() {
+                    i--
+                }
+                if i < 0 {
+                    return v
+                }
+            }
+            return nil
+        } 
+    }
+    return p.At(i)
+}
+
+func (self *Node) pairAt(i int) *Pair {
+    var p *linkedPairs
+    if self.isLazy() {
+        _, stack := self.getParserAndObjectStack()
+        p = &stack.v
+    } else {
+        p = (*linkedPairs)(self.p)
+        if l := p.Len(); l != self.len() {
+            // some nodes got unset, iterate to skip them
+            for j:=0; j<l; j++ {
+                v := p.At(j)
+                if v != nil && v.Value.Exists() {
+                    i--
+                }
+                if i < 0 {
+                    return v
+                }
+            }
+           return nil
+       } 
+    }
+    return p.At(i)
+}
+
+func (self *Node) skipAllIndex() error {
+    if !self.isLazy() {
+        return nil
+    }
+    var err types.ParsingError
+    parser, stack := self.getParserAndArrayStack()
+    parser.skipValue = true
+    parser.noLazy = true
+    *self, err = parser.decodeArray(&stack.v)
+    if err != 0 {
+        return parser.ExportError(err)
+    }
+    return nil
+}
+
+func (self *Node) skipAllKey() error {
+    if !self.isLazy() {
+        return nil
+    }
+    var err types.ParsingError
+    parser, stack := self.getParserAndObjectStack()
+    parser.skipValue = true
+    parser.noLazy = true
+    *self, err = parser.decodeObject(&stack.v)
+    if err != 0 {
+        return parser.ExportError(err)
+    }
+    return nil
+}
+
+func (self *Node) skipKey(key string) (*Node, int) {
+    nb := self.len()
+    lazy := self.isLazy()
+
+    if nb > 0 {
+        /* linear search */
+        var p *Pair
+        var i int
+        if lazy {
+            s := (*parseObjectStack)(self.p)
+            p, i = s.v.Get(key)
+        } else {
+            p, i = (*linkedPairs)(self.p).Get(key)
+        }
+
+        if p != nil {
+            return &p.Value, i
+        }
+    }
+
+    /* not found */
+    if !lazy {
+        return nil, -1
+    }
+
+    // lazy load
+    for last, i := self.skipNextPair(), nb; last != nil; last, i = self.skipNextPair(), i+1 {
+        if last.Value.Check() != nil {
+            return &last.Value, -1
+        }
+        if last.Key == key {
+            return &last.Value, i
+        }
+    }
+
+    return nil, -1
+}
+
+func (self *Node) skipIndex(index int) *Node {
+    nb := self.len()
+    if nb > index {
+        v := self.nodeAt(index)
+        return v
+    }
+    if !self.isLazy() {
+        return nil
+    }
+
+    // lazy load
+    for last := self.skipNextNode(); last != nil; last = self.skipNextNode(){
+        if last.Check() != nil {
+            return last
+        }
+        if self.len() > index {
+            return last
+        }
+    }
+
+    return nil
+}
+
+func (self *Node) skipIndexPair(index int) *Pair {
+    nb := self.len()
+    if nb > index {
+        return self.pairAt(index)
+    }
+    if !self.isLazy() {
+        return nil
+    }
+
+    // lazy load
+    for last := self.skipNextPair(); last != nil; last = self.skipNextPair(){
+        if last.Value.Check() != nil {
+            return last
+        }
+        if self.len() > index {
+            return last
+        }
+    }
+
+    return nil
+}
+
+func (self *Node) loadAllIndex() error {
+    if !self.isLazy() {
+        return nil
+    }
+    var err types.ParsingError
+    parser, stack := self.getParserAndArrayStack()
+    parser.noLazy = true
+    *self, err = parser.decodeArray(&stack.v)
+    if err != 0 {
+        return parser.ExportError(err)
+    }
+    return nil
+}
+
+func (self *Node) loadAllKey() error {
+    if !self.isLazy() {
+        return nil
+    }
+    var err types.ParsingError
+    parser, stack := self.getParserAndObjectStack()
+    parser.noLazy = true
+    *self, err = parser.decodeObject(&stack.v)
+    if err != 0 {
+        return parser.ExportError(err)
+    }
+    return nil
+}
+
+func (self *Node) removeNode(i int) {
+    node := self.nodeAt(i)
+    if node == nil {
+        return
+    }
+    *node = Node{}
+    // NOTICE: not be consistent with linkedNode.Len()
+    self.l--
+}
+
+func (self *Node) removePair(i int) {
+    last := self.pairAt(i)
+    if last == nil {
+        return
+    }
+    *last = Pair{}
+    // NOTICE: should be consistent with linkedPair.Len()
+    self.l--
+}
+
+func (self *Node) removePairAt(i int) {
+    p := (*linkedPairs)(self.p).At(i)
+    if p == nil {
+        return
+    }
+    *p = Pair{}
+    // NOTICE: should be consistent with linkedPair.Len()
+    self.l--
+}
+
+func (self *Node) toGenericArray() ([]interface{}, error) {
+    nb := self.len()
+    if nb == 0 {
+        return []interface{}{}, nil
+    }
+    ret := make([]interface{}, 0, nb)
+    
+    /* convert each item */
+    it := self.values()
+    for v := it.next(); v != nil; v = it.next() {
+        vv, err := v.Interface()
+        if err != nil {
+            return nil, err
+        }
+        ret = append(ret, vv)
+    }
+
+    /* all done */
+    return ret, nil
+}
+
+func (self *Node) toGenericArrayUseNumber() ([]interface{}, error) {
+    nb := self.len()
+    if nb == 0 {
+        return []interface{}{}, nil
+    }
+    ret := make([]interface{}, 0, nb)
+
+    /* convert each item */
+    it := self.values()
+    for v := it.next(); v != nil; v = it.next() {
+        vv, err := v.InterfaceUseNumber()
+        if err != nil {
+            return nil, err
+        }
+        ret = append(ret, vv)
+    }
+
+    /* all done */
+    return ret, nil
+}
+
+func (self *Node) toGenericArrayUseNode() ([]Node, error) {
+    var nb = self.len()
+    if nb == 0 {
+        return []Node{}, nil
+    }
+
+    var s = (*linkedNodes)(self.p)
+    var out = make([]Node, nb)
+    s.ToSlice(out)
+
+    return out, nil
+}
+
+func (self *Node) toGenericObject() (map[string]interface{}, error) {
+    nb := self.len()
+    if nb == 0 {
+        return map[string]interface{}{}, nil
+    }
+    ret := make(map[string]interface{}, nb)
+
+    /* convert each item */
+    it := self.properties()
+    for v := it.next(); v != nil; v = it.next() {
+        vv, err := v.Value.Interface()
+        if err != nil {
+            return nil, err
+        }
+        ret[v.Key] = vv
+    }
+
+    /* all done */
+    return ret, nil
+}
+
+
+func (self *Node) toGenericObjectUseNumber() (map[string]interface{}, error) {
+    nb := self.len()
+    if nb == 0 {
+        return map[string]interface{}{}, nil
+    }
+    ret := make(map[string]interface{}, nb)
+
+    /* convert each item */
+    it := self.properties()
+    for v := it.next(); v != nil; v = it.next() {
+        vv, err := v.Value.InterfaceUseNumber()
+        if err != nil {
+            return nil, err
+        }
+        ret[v.Key] = vv
+    }
+
+    /* all done */
+    return ret, nil
+}
+
+func (self *Node) toGenericObjectUseNode() (map[string]Node, error) {
+    var nb = self.len()
+    if nb == 0 {
+        return map[string]Node{}, nil
+    }
+
+    var s = (*linkedPairs)(self.p)
+    var out = make(map[string]Node, nb)
+    s.ToMap(out)
+
+    /* all done */
+    return out, nil
+}
+
+/**------------------------------------ Factory Methods ------------------------------------**/
+
+var (
+    nullNode  = Node{t: types.V_NULL}
+    trueNode  = Node{t: types.V_TRUE}
+    falseNode = Node{t: types.V_FALSE}
+)
+
+// NewRaw creates a node of raw json.
+// If the input json is invalid, NewRaw returns a error Node.
+func NewRaw(json string) Node {
+    parser := NewParserObj(json)
+    start, err := parser.skip()
+    if err != 0 {
+        return *newError(err, err.Message()) 
+    }
+    it := switchRawType(parser.s[start])
+    if it == _V_NONE {
+        return Node{}
+    }
+    return newRawNode(parser.s[start:parser.p], it)
+}
+
+// NewAny creates a node of type V_ANY if any's type isn't Node or *Node, 
+// which stores interface{} and can be only used for `.Interface()`\`.MarshalJSON()`.
+func NewAny(any interface{}) Node {
+    switch n := any.(type) {
+    case Node:
+        return n
+    case *Node:
+        return *n
+    default:
+        return Node{
+            t: _V_ANY,
+            p: unsafe.Pointer(&any),
+        }
+    }
+}
+
+// NewBytes encodes given src with Base64 (RFC 4648), and creates a node of type V_STRING.
+func NewBytes(src []byte) Node {
+    if len(src) == 0 {
+        panic("empty src bytes")
+    }
+    out := encodeBase64(src)
+    return NewString(out)
+}
+
+// NewNull creates a node of type V_NULL
+func NewNull() Node {
+    return Node{
+        p: nil,
+        t: types.V_NULL,
+    }
+}
+
+// NewBool creates a node of type bool:
+//  If v is true, returns V_TRUE node
+//  If v is false, returns V_FALSE node
+func NewBool(v bool) Node {
+    var t = types.V_FALSE
+    if v {
+        t = types.V_TRUE
+    }
+    return Node{
+        p: nil,
+        t: t,
+    }
+}
+
+// NewNumber creates a json.Number node
+// v must be a decimal string complying with RFC8259
+func NewNumber(v string) Node {
+    return Node{
+        l: uint(len(v)),
+        p: rt.StrPtr(v),
+        t: _V_NUMBER,
+    }
+}
+
+func (node Node) toNumber() json.Number {
+    return json.Number(rt.StrFrom(node.p, int64(node.l)))
+}
+
+func (self Node) toString() string {
+    return rt.StrFrom(self.p, int64(self.l))
+}
+
+func (node Node) toFloat64() (float64, error) {
+    ret, err := node.toNumber().Float64()
+    if err != nil {
+        return 0, err
+    }
+    return ret, nil
+}
+
+func (node Node) toInt64() (int64, error) {
+    ret,err := node.toNumber().Int64()
+    if err != nil {
+        return 0, err
+    }
+    return ret, nil
+}
+
+func newBytes(v []byte) Node {
+    return Node{
+        t: types.V_STRING,
+        p: mem2ptr(v),
+        l: uint(len(v)),
+    }
+}
+
+// NewString creates a node of type V_STRING. 
+// v is considered to be a valid UTF-8 string,
+// which means it won't be validated and unescaped.
+// when the node is encoded to json, v will be escaped.
+func NewString(v string) Node {
+    return Node{
+        t: types.V_STRING,
+        p: rt.StrPtr(v),
+        l: uint(len(v)),
+    }
+}
+
+// NewArray creates a node of type V_ARRAY,
+// using v as its underlying children
+func NewArray(v []Node) Node {
+    s := new(linkedNodes)
+    s.FromSlice(v)
+    return newArray(s)
+}
+
+func newArray(v *linkedNodes) Node {
+    return Node{
+        t: types.V_ARRAY,
+        l: uint(v.Len()),
+        p: unsafe.Pointer(v),
+    }
+}
+
+func (self *Node) setArray(v *linkedNodes) {
+    self.t = types.V_ARRAY
+    self.l = uint(v.Len())
+    self.p = unsafe.Pointer(v)
+}
+
+// NewObject creates a node of type V_OBJECT,
+// using v as its underlying children
+func NewObject(v []Pair) Node {
+    s := new(linkedPairs)
+    s.FromSlice(v)
+    return newObject(s)
+}
+
+func newObject(v *linkedPairs) Node {
+    return Node{
+        t: types.V_OBJECT,
+        l: uint(v.Len()),
+        p: unsafe.Pointer(v),
+    }
+}
+
+func (self *Node) setObject(v *linkedPairs) {
+    self.t = types.V_OBJECT
+    self.l = uint(v.Len())
+    self.p = unsafe.Pointer(v)
+}
+
+func newRawNode(str string, typ types.ValueType) Node {
+    return Node{
+        t: _V_RAW | typ,
+        p: rt.StrPtr(str),
+        l: uint(len(str)),
+    }
+}
+
+func (self *Node) parseRaw(full bool) {
+    raw := self.toString()
+    parser := NewParserObj(raw)
+    if full {
+        parser.noLazy = true
+        parser.skipValue = false
+    }
+    var e types.ParsingError
+    *self, e = parser.Parse()
+    if e != 0 {
+        *self = *newSyntaxError(parser.syntaxError(e))
+    }
+}
+
+var typeJumpTable = [256]types.ValueType{
+    '"' : types.V_STRING,
+    '-' : _V_NUMBER,
+    '0' : _V_NUMBER,
+    '1' : _V_NUMBER,
+    '2' : _V_NUMBER,
+    '3' : _V_NUMBER,
+    '4' : _V_NUMBER,
+    '5' : _V_NUMBER,
+    '6' : _V_NUMBER,
+    '7' : _V_NUMBER,
+    '8' : _V_NUMBER,
+    '9' : _V_NUMBER,
+    '[' : types.V_ARRAY,
+    'f' : types.V_FALSE,
+    'n' : types.V_NULL,
+    't' : types.V_TRUE,
+    '{' : types.V_OBJECT,
+}
+
+func switchRawType(c byte) types.ValueType {
+    return typeJumpTable[c]
+}

+ 660 - 0
vendor/github.com/bytedance/sonic/ast/parser.go

@@ -0,0 +1,660 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ast
+
+import (
+    `fmt`
+
+    `github.com/bytedance/sonic/internal/native/types`
+    `github.com/bytedance/sonic/internal/rt`
+)
+
+const (
+    _DEFAULT_NODE_CAP int = 8
+    _APPEND_GROW_SHIFT = 1
+)
+
+const (
+    _ERR_NOT_FOUND      types.ParsingError = 33
+    _ERR_UNSUPPORT_TYPE types.ParsingError = 34
+)
+
+var (
+    // ErrNotExist means both key and value doesn't exist 
+    ErrNotExist error = newError(_ERR_NOT_FOUND, "value not exists")
+
+    // ErrUnsupportType means API on the node is unsupported
+    ErrUnsupportType error = newError(_ERR_UNSUPPORT_TYPE, "unsupported type")
+)
+
+type Parser struct {
+    p           int
+    s           string
+    noLazy      bool
+    skipValue   bool
+    dbuf        *byte
+}
+
+/** Parser Private Methods **/
+
+func (self *Parser) delim() types.ParsingError {
+    n := len(self.s)
+    p := self.lspace(self.p)
+
+    /* check for EOF */
+    if p >= n {
+        return types.ERR_EOF
+    }
+
+    /* check for the delimtier */
+    if self.s[p] != ':' {
+        return types.ERR_INVALID_CHAR
+    }
+
+    /* update the read pointer */
+    self.p = p + 1
+    return 0
+}
+
+func (self *Parser) object() types.ParsingError {
+    n := len(self.s)
+    p := self.lspace(self.p)
+
+    /* check for EOF */
+    if p >= n {
+        return types.ERR_EOF
+    }
+
+    /* check for the delimtier */
+    if self.s[p] != '{' {
+        return types.ERR_INVALID_CHAR
+    }
+
+    /* update the read pointer */
+    self.p = p + 1
+    return 0
+}
+
+func (self *Parser) array() types.ParsingError {
+    n := len(self.s)
+    p := self.lspace(self.p)
+
+    /* check for EOF */
+    if p >= n {
+        return types.ERR_EOF
+    }
+
+    /* check for the delimtier */
+    if self.s[p] != '[' {
+        return types.ERR_INVALID_CHAR
+    }
+
+    /* update the read pointer */
+    self.p = p + 1
+    return 0
+}
+
+func (self *Parser) lspace(sp int) int {
+    ns := len(self.s)
+    for ; sp<ns && isSpace(self.s[sp]); sp+=1 {}
+
+    return sp
+}
+
+func (self *Parser) decodeArray(ret *linkedNodes) (Node, types.ParsingError) {
+    sp := self.p
+    ns := len(self.s)
+
+    /* check for EOF */
+    if self.p = self.lspace(sp); self.p >= ns {
+        return Node{}, types.ERR_EOF
+    }
+
+    /* check for empty array */
+    if self.s[self.p] == ']' {
+        self.p++
+        return Node{t: types.V_ARRAY}, 0
+    }
+
+    /* allocate array space and parse every element */
+    for {
+        var val Node
+        var err types.ParsingError
+
+        if self.skipValue {
+            /* skip the value */
+            var start int
+            if start, err = self.skipFast(); err != 0 {
+                return Node{}, err
+            }
+            if self.p > ns {
+                return Node{}, types.ERR_EOF
+            }
+            t := switchRawType(self.s[start])
+            if t == _V_NONE {
+                return Node{}, types.ERR_INVALID_CHAR
+            }
+            val = newRawNode(self.s[start:self.p], t)
+        }else{
+            /* decode the value */
+            if val, err = self.Parse(); err != 0 {
+                return Node{}, err
+            }
+        }
+
+        /* add the value to result */
+        ret.Push(val)
+        self.p = self.lspace(self.p)
+
+        /* check for EOF */
+        if self.p >= ns {
+            return Node{}, types.ERR_EOF
+        }
+
+        /* check for the next character */
+        switch self.s[self.p] {
+            case ',' : self.p++
+            case ']' : self.p++; return newArray(ret), 0
+            default:
+                // if val.isLazy() {
+                //     return newLazyArray(self, ret), 0
+                // }
+                return Node{}, types.ERR_INVALID_CHAR
+        }
+    }
+}
+
+func (self *Parser) decodeObject(ret *linkedPairs) (Node, types.ParsingError) {
+    sp := self.p
+    ns := len(self.s)
+
+    /* check for EOF */
+    if self.p = self.lspace(sp); self.p >= ns {
+        return Node{}, types.ERR_EOF
+    }
+
+    /* check for empty object */
+    if self.s[self.p] == '}' {
+        self.p++
+        return Node{t: types.V_OBJECT}, 0
+    }
+
+    /* decode each pair */
+    for {
+        var val Node
+        var njs types.JsonState
+        var err types.ParsingError
+
+        /* decode the key */
+        if njs = self.decodeValue(); njs.Vt != types.V_STRING {
+            return Node{}, types.ERR_INVALID_CHAR
+        }
+
+        /* extract the key */
+        idx := self.p - 1
+        key := self.s[njs.Iv:idx]
+
+        /* check for escape sequence */
+        if njs.Ep != -1 {
+            if key, err = unquote(key); err != 0 {
+                return Node{}, err
+            }
+        }
+
+        /* expect a ':' delimiter */
+        if err = self.delim(); err != 0 {
+            return Node{}, err
+        }
+
+        
+        if self.skipValue {
+            /* skip the value */
+            var start int
+            if start, err = self.skipFast(); err != 0 {
+                return Node{}, err
+            }
+            if self.p > ns {
+                return Node{}, types.ERR_EOF
+            }
+            t := switchRawType(self.s[start])
+            if t == _V_NONE {
+                return Node{}, types.ERR_INVALID_CHAR
+            }
+            val = newRawNode(self.s[start:self.p], t)
+        } else {
+            /* decode the value */
+            if val, err = self.Parse(); err != 0 {
+                return Node{}, err
+            }
+        }
+
+        /* add the value to result */
+        // FIXME: ret's address may change here, thus previous referred node in ret may be invalid !!
+        ret.Push(Pair{Key: key, Value: val})
+        self.p = self.lspace(self.p)
+
+        /* check for EOF */
+        if self.p >= ns {
+            return Node{}, types.ERR_EOF
+        }
+
+        /* check for the next character */
+        switch self.s[self.p] {
+            case ',' : self.p++
+            case '}' : self.p++; return newObject(ret), 0
+        default:
+            // if val.isLazy() {
+            //     return newLazyObject(self, ret), 0
+            // }
+            return Node{}, types.ERR_INVALID_CHAR
+        }
+    }
+}
+
+func (self *Parser) decodeString(iv int64, ep int) (Node, types.ParsingError) {
+    p := self.p - 1
+    s := self.s[iv:p]
+
+    /* fast path: no escape sequence */
+    if ep == -1 {
+        return NewString(s), 0
+    }
+
+    /* unquote the string */
+    out, err := unquote(s)
+
+    /* check for errors */
+    if err != 0 {
+        return Node{}, err
+    } else {
+        return newBytes(rt.Str2Mem(out)), 0
+    }
+}
+
+/** Parser Interface **/
+
+func (self *Parser) Pos() int {
+    return self.p
+}
+
+func (self *Parser) Parse() (Node, types.ParsingError) {
+    switch val := self.decodeValue(); val.Vt {
+        case types.V_EOF     : return Node{}, types.ERR_EOF
+        case types.V_NULL    : return nullNode, 0
+        case types.V_TRUE    : return trueNode, 0
+        case types.V_FALSE   : return falseNode, 0
+        case types.V_STRING  : return self.decodeString(val.Iv, val.Ep)
+        case types.V_ARRAY:
+            if p := skipBlank(self.s, self.p); p >= self.p && self.s[p] == ']' {
+                self.p = p + 1
+                return Node{t: types.V_ARRAY}, 0
+            }
+            if self.noLazy {
+                return self.decodeArray(new(linkedNodes))
+            }
+            return newLazyArray(self), 0
+        case types.V_OBJECT:
+            if p := skipBlank(self.s, self.p); p >= self.p && self.s[p] == '}' {
+                self.p = p + 1
+                return Node{t: types.V_OBJECT}, 0
+            }
+            if self.noLazy {
+                return self.decodeObject(new(linkedPairs))
+            }
+            return newLazyObject(self), 0
+        case types.V_DOUBLE  : return NewNumber(self.s[val.Ep:self.p]), 0
+        case types.V_INTEGER : return NewNumber(self.s[val.Ep:self.p]), 0
+        default              : return Node{}, types.ParsingError(-val.Vt)
+    }
+}
+
+func (self *Parser) searchKey(match string) types.ParsingError {
+    ns := len(self.s)
+    if err := self.object(); err != 0 {
+        return err
+    }
+
+    /* check for EOF */
+    if self.p = self.lspace(self.p); self.p >= ns {
+        return types.ERR_EOF
+    }
+
+    /* check for empty object */
+    if self.s[self.p] == '}' {
+        self.p++
+        return _ERR_NOT_FOUND
+    }
+
+    var njs types.JsonState
+    var err types.ParsingError
+    /* decode each pair */
+    for {
+
+        /* decode the key */
+        if njs = self.decodeValue(); njs.Vt != types.V_STRING {
+            return types.ERR_INVALID_CHAR
+        }
+
+        /* extract the key */
+        idx := self.p - 1
+        key := self.s[njs.Iv:idx]
+
+        /* check for escape sequence */
+        if njs.Ep != -1 {
+            if key, err = unquote(key); err != 0 {
+                return err
+            }
+        }
+
+        /* expect a ':' delimiter */
+        if err = self.delim(); err != 0 {
+            return err
+        }
+
+        /* skip value */
+        if key != match {
+            if _, err = self.skipFast(); err != 0 {
+                return err
+            }
+        } else {
+            return 0
+        }
+
+        /* check for EOF */
+        self.p = self.lspace(self.p)
+        if self.p >= ns {
+            return types.ERR_EOF
+        }
+
+        /* check for the next character */
+        switch self.s[self.p] {
+        case ',':
+            self.p++
+        case '}':
+            self.p++
+            return _ERR_NOT_FOUND
+        default:
+            return types.ERR_INVALID_CHAR
+        }
+    }
+}
+
+func (self *Parser) searchIndex(idx int) types.ParsingError {
+    ns := len(self.s)
+    if err := self.array(); err != 0 {
+        return err
+    }
+
+    /* check for EOF */
+    if self.p = self.lspace(self.p); self.p >= ns {
+        return types.ERR_EOF
+    }
+
+    /* check for empty array */
+    if self.s[self.p] == ']' {
+        self.p++
+        return _ERR_NOT_FOUND
+    }
+
+    var err types.ParsingError
+    /* allocate array space and parse every element */
+    for i := 0; i < idx; i++ {
+
+        /* decode the value */
+        if _, err = self.skipFast(); err != 0 {
+            return err
+        }
+
+        /* check for EOF */
+        self.p = self.lspace(self.p)
+        if self.p >= ns {
+            return types.ERR_EOF
+        }
+
+        /* check for the next character */
+        switch self.s[self.p] {
+        case ',':
+            self.p++
+        case ']':
+            self.p++
+            return _ERR_NOT_FOUND
+        default:
+            return types.ERR_INVALID_CHAR
+        }
+    }
+
+    return 0
+}
+
+func (self *Node) skipNextNode() *Node {
+    if !self.isLazy() {
+        return nil
+    }
+
+    parser, stack := self.getParserAndArrayStack()
+    ret := &stack.v
+    sp := parser.p
+    ns := len(parser.s)
+
+    /* check for EOF */
+    if parser.p = parser.lspace(sp); parser.p >= ns {
+        return newSyntaxError(parser.syntaxError(types.ERR_EOF))
+    }
+
+    /* check for empty array */
+    if parser.s[parser.p] == ']' {
+        parser.p++
+        self.setArray(ret)
+        return nil
+    }
+
+    var val Node
+    /* skip the value */
+    if start, err := parser.skipFast(); err != 0 {
+        return newSyntaxError(parser.syntaxError(err))
+    } else {
+        t := switchRawType(parser.s[start])
+        if t == _V_NONE {
+            return newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))
+        }
+        val = newRawNode(parser.s[start:parser.p], t)
+    }
+
+    /* add the value to result */
+    ret.Push(val)
+    self.l++
+    parser.p = parser.lspace(parser.p)
+
+    /* check for EOF */
+    if parser.p >= ns {
+        return newSyntaxError(parser.syntaxError(types.ERR_EOF))
+    }
+
+    /* check for the next character */
+    switch parser.s[parser.p] {
+    case ',':
+        parser.p++
+        return ret.At(ret.Len()-1)
+    case ']':
+        parser.p++
+        self.setArray(ret)
+        return ret.At(ret.Len()-1)
+    default:
+        return newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))
+    }
+}
+
+func (self *Node) skipNextPair() (*Pair) {
+    if !self.isLazy() {
+        return nil
+    }
+
+    parser, stack := self.getParserAndObjectStack()
+    ret := &stack.v
+    sp := parser.p
+    ns := len(parser.s)
+
+    /* check for EOF */
+    if parser.p = parser.lspace(sp); parser.p >= ns {
+        return &Pair{"", *newSyntaxError(parser.syntaxError(types.ERR_EOF))}
+    }
+
+    /* check for empty object */
+    if parser.s[parser.p] == '}' {
+        parser.p++
+        self.setObject(ret)
+        return nil
+    }
+
+    /* decode one pair */
+    var val Node
+    var njs types.JsonState
+    var err types.ParsingError
+
+    /* decode the key */
+    if njs = parser.decodeValue(); njs.Vt != types.V_STRING {
+        return &Pair{"", *newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))}
+    }
+
+    /* extract the key */
+    idx := parser.p - 1
+    key := parser.s[njs.Iv:idx]
+
+    /* check for escape sequence */
+    if njs.Ep != -1 {
+        if key, err = unquote(key); err != 0 {
+            return &Pair{key, *newSyntaxError(parser.syntaxError(err))}
+        }
+    }
+
+    /* expect a ':' delimiter */
+    if err = parser.delim(); err != 0 {
+        return &Pair{key, *newSyntaxError(parser.syntaxError(err))}
+    }
+
+    /* skip the value */
+    if start, err := parser.skipFast(); err != 0 {
+        return &Pair{key, *newSyntaxError(parser.syntaxError(err))}
+    } else {
+        t := switchRawType(parser.s[start])
+        if t == _V_NONE {
+            return &Pair{key, *newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))}
+        }
+        val = newRawNode(parser.s[start:parser.p], t)
+    }
+
+    /* add the value to result */
+    ret.Push(Pair{Key: key, Value: val})
+    self.l++
+    parser.p = parser.lspace(parser.p)
+
+    /* check for EOF */
+    if parser.p >= ns {
+        return &Pair{key, *newSyntaxError(parser.syntaxError(types.ERR_EOF))}
+    }
+
+    /* check for the next character */
+    switch parser.s[parser.p] {
+    case ',':
+        parser.p++
+        return ret.At(ret.Len()-1)
+    case '}':
+        parser.p++
+        self.setObject(ret)
+        return ret.At(ret.Len()-1)
+    default:
+        return &Pair{key, *newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))}
+    }
+}
+
+
+/** Parser Factory **/
+
+// Loads parse all json into interface{}
+func Loads(src string) (int, interface{}, error) {
+    ps := &Parser{s: src}
+    np, err := ps.Parse()
+
+    /* check for errors */
+    if err != 0 {
+        return 0, nil, ps.ExportError(err)
+    } else {
+        x, err := np.Interface()
+        if err != nil {
+            return 0, nil, err
+        }
+        return ps.Pos(), x, nil
+    }
+}
+
+// LoadsUseNumber parse all json into interface{}, with numeric nodes casted to json.Number
+func LoadsUseNumber(src string) (int, interface{}, error) {
+    ps := &Parser{s: src}
+    np, err := ps.Parse()
+
+    /* check for errors */
+    if err != 0 {
+        return 0, nil, err
+    } else {
+        x, err := np.InterfaceUseNumber()
+        if err != nil {
+            return 0, nil, err
+        }
+        return ps.Pos(), x, nil
+    }
+}
+
+// NewParser returns pointer of new allocated parser
+func NewParser(src string) *Parser {
+    return &Parser{s: src}
+}
+
+// NewParser returns new allocated parser
+func NewParserObj(src string) Parser {
+    return Parser{s: src}
+}
+
+// decodeNumber controls if parser decodes the number values instead of skip them
+//   WARN: once you set decodeNumber(true), please set decodeNumber(false) before you drop the parser 
+//   otherwise the memory CANNOT be reused
+func (self *Parser) decodeNumber(decode bool) {
+    if !decode && self.dbuf != nil {
+        types.FreeDbuf(self.dbuf)
+        self.dbuf = nil
+        return
+    }
+    if decode && self.dbuf == nil {
+        self.dbuf = types.NewDbuf()
+    }
+}
+
+// ExportError converts types.ParsingError to std Error
+func (self *Parser) ExportError(err types.ParsingError) error {
+    if err == _ERR_NOT_FOUND {
+        return ErrNotExist
+    }
+    return fmt.Errorf("%q", SyntaxError{
+        Pos : self.p,
+        Src : self.s,
+        Code: err,
+    }.Description())
+}
+
+func backward(src string, i int) int {
+    for ; i>=0 && isSpace(src[i]); i-- {}
+    return i
+}

+ 138 - 0
vendor/github.com/bytedance/sonic/ast/search.go

@@ -0,0 +1,138 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ast
+
+import (
+    `github.com/bytedance/sonic/internal/rt`
+    `github.com/bytedance/sonic/internal/native/types`
+)
+
+type Searcher struct {
+    parser Parser
+}
+
+func NewSearcher(str string) *Searcher {
+    return &Searcher{
+        parser: Parser{
+            s:      str,
+            noLazy: false,
+        },
+    }
+}
+
+// GetByPathCopy search in depth from top json and returns a **Copied** json node at the path location
+func (self *Searcher) GetByPathCopy(path ...interface{}) (Node, error) {
+    return self.getByPath(true, true, path...)
+}
+
+// GetByPathNoCopy search in depth from top json and returns a **Referenced** json node at the path location
+//
+// WARN: this search directly refer partial json from top json, which has faster speed,
+// may consumes more memory.
+func (self *Searcher) GetByPath(path ...interface{}) (Node, error) {
+    return self.getByPath(false, true, path...)
+}
+
+func (self *Searcher) getByPath(copystring bool, validate bool, path ...interface{}) (Node, error) {
+    var err types.ParsingError
+    var start int
+
+    self.parser.p = 0
+    start, err = self.parser.getByPath(validate, path...)
+    if err != 0 {
+        // for compatibility with old version
+        if err == types.ERR_NOT_FOUND {
+            return Node{}, ErrNotExist
+        }
+        if err == types.ERR_UNSUPPORT_TYPE {
+            panic("path must be either int(>=0) or string")
+        }
+        return Node{}, self.parser.syntaxError(err)
+    }
+
+    t := switchRawType(self.parser.s[start])
+    if t == _V_NONE {
+        return Node{}, self.parser.ExportError(err)
+    }
+
+    // copy string to reducing memory usage
+    var raw string
+    if copystring {
+        raw = rt.Mem2Str([]byte(self.parser.s[start:self.parser.p]))
+    } else {
+        raw = self.parser.s[start:self.parser.p]
+    }
+    return newRawNode(raw, t), nil
+}
+
+// GetByPath searches a path and returns relaction and types of target
+func _GetByPath(src string, path ...interface{}) (start int, end int, typ int, err error) {
+	p := NewParserObj(src)
+	s, e := p.getByPath(false, path...)
+	if e != 0 {
+		// for compatibility with old version
+		if e == types.ERR_NOT_FOUND {
+			return -1, -1, 0, ErrNotExist
+		}
+		if e == types.ERR_UNSUPPORT_TYPE {
+			panic("path must be either int(>=0) or string")
+		}
+		return -1, -1, 0, p.syntaxError(e)
+	}
+
+	t := switchRawType(p.s[s])
+	if t == _V_NONE {
+		return -1, -1, 0, ErrNotExist
+	}
+    if t == _V_NUMBER {
+        p.p = 1 + backward(p.s, p.p-1)
+    }
+	return s, p.p, int(t), nil
+}
+
+// ValidSyntax check if a json has a valid JSON syntax,
+// while not validate UTF-8 charset
+func _ValidSyntax(json string) bool {
+	p := NewParserObj(json)
+    _, e := p.skip()
+	if e != 0 {
+        return false
+    }
+   if skipBlank(p.s, p.p) != -int(types.ERR_EOF) {
+        return false
+   }
+   return true
+}
+
+// SkipFast skip a json value in fast-skip algs, 
+// while not strictly validate JSON syntax and UTF-8 charset.
+func _SkipFast(src string, i int) (int, int, error) {
+    p := NewParserObj(src)
+    p.p = i
+    s, e := p.skipFast()
+    if e != 0 {
+        return -1, -1, p.ExportError(e)
+    }
+    t := switchRawType(p.s[s])
+	if t == _V_NONE {
+		return -1, -1, ErrNotExist
+	}
+    if t == _V_NUMBER {
+        p.p = 1 + backward(p.s, p.p-1)
+    }
+    return s, p.p, nil
+}

+ 55 - 0
vendor/github.com/bytedance/sonic/ast/stubs_go115.go

@@ -0,0 +1,55 @@
+// +build !go1.20
+
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ast
+
+import (
+    `unsafe`
+    `unicode/utf8`
+
+    `github.com/bytedance/sonic/internal/rt`
+)
+
+//go:noescape
+//go:linkname memmove runtime.memmove
+//goland:noinspection GoUnusedParameter
+func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
+
+//go:linkname unsafe_NewArray reflect.unsafe_NewArray
+//goland:noinspection GoUnusedParameter
+func unsafe_NewArray(typ *rt.GoType, n int) unsafe.Pointer
+
+//go:linkname growslice runtime.growslice
+//goland:noinspection GoUnusedParameter
+func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
+
+//go:nosplit
+func mem2ptr(s []byte) unsafe.Pointer {
+    return (*rt.GoSlice)(unsafe.Pointer(&s)).Ptr
+}
+
+var (
+    //go:linkname safeSet encoding/json.safeSet
+    safeSet [utf8.RuneSelf]bool
+
+    //go:linkname hex encoding/json.hex
+    hex string
+)
+
+//go:linkname unquoteBytes encoding/json.unquoteBytes
+func unquoteBytes(s []byte) (t []byte, ok bool)

+ 55 - 0
vendor/github.com/bytedance/sonic/ast/stubs_go120.go

@@ -0,0 +1,55 @@
+// +build go1.20
+
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ast
+
+import (
+    `unsafe`
+    `unicode/utf8`
+
+    `github.com/bytedance/sonic/internal/rt`
+)
+
+//go:noescape
+//go:linkname memmove runtime.memmove
+//goland:noinspection GoUnusedParameter
+func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
+
+//go:linkname unsafe_NewArray reflect.unsafe_NewArray
+//goland:noinspection GoUnusedParameter
+func unsafe_NewArray(typ *rt.GoType, n int) unsafe.Pointer
+
+//go:linkname growslice reflect.growslice
+//goland:noinspection GoUnusedParameter
+func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
+
+//go:nosplit
+func mem2ptr(s []byte) unsafe.Pointer {
+    return (*rt.GoSlice)(unsafe.Pointer(&s)).Ptr
+}
+
+var (
+    //go:linkname safeSet encoding/json.safeSet
+    safeSet [utf8.RuneSelf]bool
+
+    //go:linkname hex encoding/json.hex
+    hex string
+)
+
+//go:linkname unquoteBytes encoding/json.unquoteBytes
+func unquoteBytes(s []byte) (t []byte, ok bool)

+ 315 - 0
vendor/github.com/bytedance/sonic/ast/visitor.go

@@ -0,0 +1,315 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ast
+
+import (
+    `encoding/json`
+
+    `github.com/bytedance/sonic/internal/native/types`
+)
+
+// Visitor handles the callbacks during preorder traversal of a JSON AST.
+//
+// According to the JSON RFC8259, a JSON AST can be defined by
+// the following rules without separator / whitespace tokens.
+//
+//  JSON-AST  = value
+//  value     = false / null / true / object / array / number / string
+//  object    = begin-object [ member *( member ) ] end-object
+//  member    = string value
+//  array     = begin-array [ value *( value ) ] end-array
+//
+type Visitor interface {
+
+    // OnNull handles a JSON null value.
+    OnNull() error
+
+    // OnBool handles a JSON true / false value.
+    OnBool(v bool) error
+
+    // OnString handles a JSON string value.
+    OnString(v string) error
+
+    // OnInt64 handles a JSON number value with int64 type.
+    OnInt64(v int64, n json.Number) error
+
+    // OnFloat64 handles a JSON number value with float64 type.
+    OnFloat64(v float64, n json.Number) error
+
+    // OnObjectBegin handles the beginning of a JSON object value with a
+    // suggested capacity that can be used to make your custom object container.
+    //
+    // After this point the visitor will receive a sequence of callbacks like
+    // [string, value, string, value, ......, ObjectEnd].
+    //
+    // Note:
+    // 1. This is a recursive definition which means the value can
+    // also be a JSON object / array described by a sequence of callbacks.
+    // 2. The suggested capacity will be 0 if current object is empty.
+    // 3. Currently sonic use a fixed capacity for non-empty object (keep in
+    // sync with ast.Node) which might not be very suitable. This may be
+    // improved in future version.
+    OnObjectBegin(capacity int) error
+
+    // OnObjectKey handles a JSON object key string in member.
+    OnObjectKey(key string) error
+
+    // OnObjectEnd handles the ending of a JSON object value.
+    OnObjectEnd() error
+
+    // OnArrayBegin handles the beginning of a JSON array value with a
+    // suggested capacity that can be used to make your custom array container.
+    //
+    // After this point the visitor will receive a sequence of callbacks like
+    // [value, value, value, ......, ArrayEnd].
+    //
+    // Note:
+    // 1. This is a recursive definition which means the value can
+    // also be a JSON object / array described by a sequence of callbacks.
+    // 2. The suggested capacity will be 0 if current array is empty.
+    // 3. Currently sonic use a fixed capacity for non-empty array (keep in
+    // sync with ast.Node) which might not be very suitable. This may be
+    // improved in future version.
+    OnArrayBegin(capacity int) error
+
+    // OnArrayEnd handles the ending of a JSON array value.
+    OnArrayEnd() error
+}
+
+// VisitorOptions contains all Visitor's options. The default value is an
+// empty VisitorOptions{}.
+type VisitorOptions struct {
+    // OnlyNumber indicates parser to directly return number value without
+    // conversion, then the first argument of OnInt64 / OnFloat64 will always
+    // be zero.
+    OnlyNumber bool
+}
+
+var defaultVisitorOptions = &VisitorOptions{}
+
+// Preorder decodes the whole JSON string and callbacks each AST node to visitor
+// during preorder traversal. Any visitor method with an error returned will
+// break the traversal and the given error will be directly returned. The opts
+// argument can be reused after every call.
+func Preorder(str string, visitor Visitor, opts *VisitorOptions) error {
+    if opts == nil {
+        opts = defaultVisitorOptions
+    }
+    // process VisitorOptions first to guarantee that all options will be
+    // constant during decoding and make options more readable.
+    var (
+        optDecodeNumber = !opts.OnlyNumber
+    )
+
+    tv := &traverser{
+        parser: Parser{
+            s:         str,
+            noLazy:    true,
+            skipValue: false,
+        },
+        visitor: visitor,
+    }
+
+    if optDecodeNumber {
+        tv.parser.decodeNumber(true)
+    }
+
+    err := tv.decodeValue()
+
+    if optDecodeNumber {
+        tv.parser.decodeNumber(false)
+    }
+    return err
+}
+
+type traverser struct {
+    parser  Parser
+    visitor Visitor
+}
+
+// NOTE: keep in sync with (*Parser).Parse method.
+func (self *traverser) decodeValue() error {
+    switch val := self.parser.decodeValue(); val.Vt {
+    case types.V_EOF:
+        return types.ERR_EOF
+    case types.V_NULL:
+        return self.visitor.OnNull()
+    case types.V_TRUE:
+        return self.visitor.OnBool(true)
+    case types.V_FALSE:
+        return self.visitor.OnBool(false)
+    case types.V_STRING:
+        return self.decodeString(val.Iv, val.Ep)
+    case types.V_DOUBLE:
+        return self.visitor.OnFloat64(val.Dv,
+            json.Number(self.parser.s[val.Ep:self.parser.p]))
+    case types.V_INTEGER:
+        return self.visitor.OnInt64(val.Iv,
+            json.Number(self.parser.s[val.Ep:self.parser.p]))
+    case types.V_ARRAY:
+        return self.decodeArray()
+    case types.V_OBJECT:
+        return self.decodeObject()
+    default:
+        return types.ParsingError(-val.Vt)
+    }
+}
+
+// NOTE: keep in sync with (*Parser).decodeArray method.
+func (self *traverser) decodeArray() error {
+    sp := self.parser.p
+    ns := len(self.parser.s)
+
+    /* check for EOF */
+    self.parser.p = self.parser.lspace(sp)
+    if self.parser.p >= ns {
+        return types.ERR_EOF
+    }
+
+    /* check for empty array */
+    if self.parser.s[self.parser.p] == ']' {
+        self.parser.p++
+        if err := self.visitor.OnArrayBegin(0); err != nil {
+            return err
+        }
+        return self.visitor.OnArrayEnd()
+    }
+
+    /* allocate array space and parse every element */
+    if err := self.visitor.OnArrayBegin(_DEFAULT_NODE_CAP); err != nil {
+        return err
+    }
+    for {
+        /* decode the value */
+        if err := self.decodeValue(); err != nil {
+            return err
+        }
+        self.parser.p = self.parser.lspace(self.parser.p)
+
+        /* check for EOF */
+        if self.parser.p >= ns {
+            return types.ERR_EOF
+        }
+
+        /* check for the next character */
+        switch self.parser.s[self.parser.p] {
+        case ',':
+            self.parser.p++
+        case ']':
+            self.parser.p++
+            return self.visitor.OnArrayEnd()
+        default:
+            return types.ERR_INVALID_CHAR
+        }
+    }
+}
+
+// NOTE: keep in sync with (*Parser).decodeObject method.
+func (self *traverser) decodeObject() error {
+    sp := self.parser.p
+    ns := len(self.parser.s)
+
+    /* check for EOF */
+    self.parser.p = self.parser.lspace(sp)
+    if self.parser.p >= ns {
+        return types.ERR_EOF
+    }
+
+    /* check for empty object */
+    if self.parser.s[self.parser.p] == '}' {
+        self.parser.p++
+        if err := self.visitor.OnObjectBegin(0); err != nil {
+            return err
+        }
+        return self.visitor.OnObjectEnd()
+    }
+
+    /* allocate object space and decode each pair */
+    if err := self.visitor.OnObjectBegin(_DEFAULT_NODE_CAP); err != nil {
+        return err
+    }
+    for {
+        var njs types.JsonState
+        var err types.ParsingError
+
+        /* decode the key */
+        if njs = self.parser.decodeValue(); njs.Vt != types.V_STRING {
+            return types.ERR_INVALID_CHAR
+        }
+
+        /* extract the key */
+        idx := self.parser.p - 1
+        key := self.parser.s[njs.Iv:idx]
+
+        /* check for escape sequence */
+        if njs.Ep != -1 {
+            if key, err = unquote(key); err != 0 {
+                return err
+            }
+        }
+
+        if err := self.visitor.OnObjectKey(key); err != nil {
+            return err
+        }
+
+        /* expect a ':' delimiter */
+        if err = self.parser.delim(); err != 0 {
+            return err
+        }
+
+        /* decode the value */
+        if err := self.decodeValue(); err != nil {
+            return err
+        }
+
+        self.parser.p = self.parser.lspace(self.parser.p)
+
+        /* check for EOF */
+        if self.parser.p >= ns {
+            return types.ERR_EOF
+        }
+
+        /* check for the next character */
+        switch self.parser.s[self.parser.p] {
+        case ',':
+            self.parser.p++
+        case '}':
+            self.parser.p++
+            return self.visitor.OnObjectEnd()
+        default:
+            return types.ERR_INVALID_CHAR
+        }
+    }
+}
+
+// NOTE: keep in sync with (*Parser).decodeString method.
+func (self *traverser) decodeString(iv int64, ep int) error {
+    p := self.parser.p - 1
+    s := self.parser.s[iv:p]
+
+    /* fast path: no escape sequence */
+    if ep == -1 {
+        return self.visitor.OnString(s)
+    }
+
+    /* unquote the string */
+    out, err := unquote(s)
+    if err != 0 {
+        return err
+    }
+    return self.visitor.OnString(out)
+}

+ 131 - 0
vendor/github.com/bytedance/sonic/compat.go

@@ -0,0 +1,131 @@
+// +build !amd64 !go1.16 go1.23
+
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package sonic
+
+import (
+    `bytes`
+    `encoding/json`
+    `io`
+    `reflect`
+
+    `github.com/bytedance/sonic/option`
+)
+
+type frozenConfig struct {
+    Config
+}
+
+// Froze convert the Config to API
+func (cfg Config) Froze() API {
+    api := &frozenConfig{Config: cfg}
+    return api
+}
+
+func (cfg frozenConfig) marshalOptions(val interface{}, prefix, indent string) ([]byte, error) {
+    w := bytes.NewBuffer([]byte{})
+    enc := json.NewEncoder(w)
+    enc.SetEscapeHTML(cfg.EscapeHTML)
+    enc.SetIndent(prefix, indent)
+    err := enc.Encode(val)
+	out := w.Bytes()
+
+	// json.Encoder always appends '\n' after encoding,
+	// which is not same with json.Marshal()
+	if len(out) > 0 && out[len(out)-1] == '\n' {
+		out = out[:len(out)-1]
+	}
+	return out, err
+}
+
+// Marshal is implemented by sonic
+func (cfg frozenConfig) Marshal(val interface{}) ([]byte, error) {
+    if !cfg.EscapeHTML {
+        return cfg.marshalOptions(val, "", "")
+    }
+    return json.Marshal(val)
+}
+
+// MarshalToString is implemented by sonic
+func (cfg frozenConfig) MarshalToString(val interface{}) (string, error) {
+    out, err := cfg.Marshal(val)
+    return string(out), err
+}
+
+// MarshalIndent is implemented by sonic
+func (cfg frozenConfig) MarshalIndent(val interface{}, prefix, indent string) ([]byte, error) {
+    if !cfg.EscapeHTML {
+        return cfg.marshalOptions(val, prefix, indent)
+    }
+    return json.MarshalIndent(val, prefix, indent)
+}
+
+// UnmarshalFromString is implemented by sonic
+func (cfg frozenConfig) UnmarshalFromString(buf string, val interface{}) error {
+    r := bytes.NewBufferString(buf)
+    dec := json.NewDecoder(r)
+    if cfg.UseNumber {
+        dec.UseNumber()
+    }
+    if cfg.DisallowUnknownFields {
+        dec.DisallowUnknownFields()
+    }
+    return dec.Decode(val)
+}
+
+// Unmarshal is implemented by sonic
+func (cfg frozenConfig) Unmarshal(buf []byte, val interface{}) error {
+    return cfg.UnmarshalFromString(string(buf), val)
+}
+
+// NewEncoder is implemented by sonic
+func (cfg frozenConfig) NewEncoder(writer io.Writer) Encoder {
+    enc := json.NewEncoder(writer)
+    if !cfg.EscapeHTML {
+        enc.SetEscapeHTML(cfg.EscapeHTML)
+    }
+    return enc
+}
+
+// NewDecoder is implemented by sonic
+func (cfg frozenConfig) NewDecoder(reader io.Reader) Decoder {
+    dec := json.NewDecoder(reader)
+    if cfg.UseNumber {
+        dec.UseNumber()
+    }
+    if cfg.DisallowUnknownFields {
+        dec.DisallowUnknownFields()
+    }
+    return dec
+}
+
+// Valid is implemented by sonic
+func (cfg frozenConfig) Valid(data []byte) bool {
+    return json.Valid(data)
+}
+
+// Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in
+// order to reduce the first-hit latency at **amd64** Arch.
+// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is
+// a compile option to set the depth of recursive compile for the nested struct type.
+// * This is the none implement for !amd64.
+// It will be useful for someone who develop with !amd64 arch,like Mac M1.
+func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
+    return nil
+}
+

+ 68 - 0
vendor/github.com/bytedance/sonic/decoder/decoder_amd64.go

@@ -0,0 +1,68 @@
+// +build amd64,go1.16,!go1.23
+
+/*
+* Copyright 2023 ByteDance Inc.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package decoder
+
+import (
+    `github.com/bytedance/sonic/internal/decoder`
+)
+
+// Decoder is the decoder context object
+type Decoder = decoder.Decoder
+
+// SyntaxError represents json syntax error
+type SyntaxError = decoder.SyntaxError
+
+// MismatchTypeError represents dismatching between json and object
+type MismatchTypeError = decoder.MismatchTypeError
+
+// Options for decode.
+type Options = decoder.Options
+
+const (
+    OptionUseInt64         Options = decoder.OptionUseInt64
+    OptionUseNumber        Options = decoder.OptionUseNumber
+    OptionUseUnicodeErrors Options = decoder.OptionUseUnicodeErrors
+    OptionDisableUnknown   Options = decoder.OptionDisableUnknown
+    OptionCopyString       Options = decoder.OptionCopyString
+    OptionValidateString   Options = decoder.OptionValidateString
+)
+
+// StreamDecoder is the decoder context object for streaming input.
+type StreamDecoder = decoder.StreamDecoder
+
+var (
+    // NewDecoder creates a new decoder instance.
+    NewDecoder = decoder.NewDecoder
+
+    // NewStreamDecoder adapts to encoding/json.NewDecoder API.
+    //
+    // NewStreamDecoder returns a new decoder that reads from r.
+    NewStreamDecoder = decoder.NewStreamDecoder
+
+    // Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in
+    // order to reduce the first-hit latency.
+    //
+    // Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is
+    // a compile option to set the depth of recursive compile for the nested struct type.
+    Pretouch = decoder.Pretouch
+    
+    // Skip skips only one json value, and returns first non-blank character position and its ending position if it is valid.
+    // Otherwise, returns negative error code using start and invalid character position using end
+    Skip = decoder.Skip
+)

+ 194 - 0
vendor/github.com/bytedance/sonic/decoder/decoder_compat.go

@@ -0,0 +1,194 @@
+// +build !amd64 !go1.16 go1.23
+
+/*
+* Copyright 2023 ByteDance Inc.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+ */
+
+package decoder
+
+import (
+    `bytes`
+    `encoding/json`
+    `io`
+    `reflect`
+    `unsafe`
+
+    `github.com/bytedance/sonic/internal/native/types`
+    `github.com/bytedance/sonic/option`
+)
+
+func init() {
+     println("WARNING: sonic only supports Go1.16~1.22 && CPU amd64, but your environment is not suitable")
+}
+
+const (
+     _F_use_int64       = 0
+     _F_disable_urc     = 2
+     _F_disable_unknown = 3
+     _F_copy_string     = 4
+ 
+     _F_use_number      = types.B_USE_NUMBER
+     _F_validate_string = types.B_VALIDATE_STRING
+     _F_allow_control   = types.B_ALLOW_CONTROL
+)
+
+type Options uint64
+
+const (
+     OptionUseInt64         Options = 1 << _F_use_int64
+     OptionUseNumber        Options = 1 << _F_use_number
+     OptionUseUnicodeErrors Options = 1 << _F_disable_urc
+     OptionDisableUnknown   Options = 1 << _F_disable_unknown
+     OptionCopyString       Options = 1 << _F_copy_string
+     OptionValidateString   Options = 1 << _F_validate_string
+)
+
+func (self *Decoder) SetOptions(opts Options) {
+     if (opts & OptionUseNumber != 0) && (opts & OptionUseInt64 != 0) {
+         panic("can't set OptionUseInt64 and OptionUseNumber both!")
+     }
+     self.f = uint64(opts)
+}
+
+
+// Decoder is the decoder context object
+type Decoder struct {
+     i int
+     f uint64
+     s string
+}
+
+// NewDecoder creates a new decoder instance.
+func NewDecoder(s string) *Decoder {
+     return &Decoder{s: s}
+}
+
+// Pos returns the current decoding position.
+func (self *Decoder) Pos() int {
+     return self.i
+}
+
+func (self *Decoder) Reset(s string) {
+     self.s = s
+     self.i = 0
+     // self.f = 0
+}
+
+// NOTE: api fallback do nothing
+func (self *Decoder) CheckTrailings() error {
+     pos := self.i
+     buf := self.s
+     /* skip all the trailing spaces */
+     if pos != len(buf) {
+         for pos < len(buf) && (types.SPACE_MASK & (1 << buf[pos])) != 0 {
+             pos++
+         }
+     }
+
+     /* then it must be at EOF */
+     if pos == len(buf) {
+         return nil
+     }
+
+     /* junk after JSON value */
+     return nil
+}
+
+
+// Decode parses the JSON-encoded data from current position and stores the result
+// in the value pointed to by val.
+func (self *Decoder) Decode(val interface{}) error {
+    r := bytes.NewBufferString(self.s)
+   dec := json.NewDecoder(r)
+   if (self.f & uint64(OptionUseNumber)) != 0  {
+       dec.UseNumber()
+   }
+   if (self.f & uint64(OptionDisableUnknown)) != 0  {
+       dec.DisallowUnknownFields()
+   }
+   return dec.Decode(val)
+}
+
+// UseInt64 indicates the Decoder to unmarshal an integer into an interface{} as an
+// int64 instead of as a float64.
+func (self *Decoder) UseInt64() {
+     self.f  |= 1 << _F_use_int64
+     self.f &^= 1 << _F_use_number
+}
+
+// UseNumber indicates the Decoder to unmarshal a number into an interface{} as a
+// json.Number instead of as a float64.
+func (self *Decoder) UseNumber() {
+     self.f &^= 1 << _F_use_int64
+     self.f  |= 1 << _F_use_number
+}
+
+// UseUnicodeErrors indicates the Decoder to return an error when encounter invalid
+// UTF-8 escape sequences.
+func (self *Decoder) UseUnicodeErrors() {
+     self.f |= 1 << _F_disable_urc
+}
+
+// DisallowUnknownFields indicates the Decoder to return an error when the destination
+// is a struct and the input contains object keys which do not match any
+// non-ignored, exported fields in the destination.
+func (self *Decoder) DisallowUnknownFields() {
+     self.f |= 1 << _F_disable_unknown
+}
+
+// CopyString indicates the Decoder to decode string values by copying instead of referring.
+func (self *Decoder) CopyString() {
+     self.f |= 1 << _F_copy_string
+}
+
+// ValidateString causes the Decoder to validate string values when decoding string value 
+// in JSON. Validation is that, returning error when unescaped control chars(0x00-0x1f) or
+// invalid UTF-8 chars in the string value of JSON.
+func (self *Decoder) ValidateString() {
+     self.f |= 1 << _F_validate_string
+}
+
+// Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in
+// order to reduce the first-hit latency.
+//
+// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is
+// a compile option to set the depth of recursive compile for the nested struct type.
+func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
+     return nil
+}
+
+type StreamDecoder = json.Decoder
+
+// NewStreamDecoder adapts to encoding/json.NewDecoder API.
+//
+// NewStreamDecoder returns a new decoder that reads from r.
+func NewStreamDecoder(r io.Reader) *StreamDecoder {
+   return json.NewDecoder(r)
+}
+
+// SyntaxError represents json syntax error
+type SyntaxError json.SyntaxError
+
+// Description
+func (s SyntaxError) Description() string {
+     return (*json.SyntaxError)(unsafe.Pointer(&s)).Error()
+}
+// Error
+func (s SyntaxError) Error() string {
+     return (*json.SyntaxError)(unsafe.Pointer(&s)).Error()
+}
+
+// MismatchTypeError represents dismatching between json and object
+type MismatchTypeError json.UnmarshalTypeError

+ 117 - 0
vendor/github.com/bytedance/sonic/encoder/encoder_amd64.go

@@ -0,0 +1,117 @@
+// +build amd64,go1.16,!go1.23
+
+/*
+ * Copyright 2023 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package encoder
+
+import (
+    `github.com/bytedance/sonic/internal/encoder`
+)
+
+// EnableFallback indicates if encoder use fallback
+const EnableFallback = false
+
+// Encoder represents a specific set of encoder configurations.
+type Encoder = encoder.Encoder
+
+// StreamEncoder uses io.Writer as input.
+type StreamEncoder = encoder.StreamEncoder
+
+// Options is a set of encoding options.
+type Options = encoder.Options
+
+const (
+    // SortMapKeys indicates that the keys of a map needs to be sorted
+    // before serializing into JSON.
+    // WARNING: This hurts performance A LOT, USE WITH CARE.
+    SortMapKeys Options = encoder.SortMapKeys
+
+    // EscapeHTML indicates encoder to escape all HTML characters
+    // after serializing into JSON (see https://pkg.go.dev/encoding/json#HTMLEscape).
+    // WARNING: This hurts performance A LOT, USE WITH CARE.
+    EscapeHTML Options = encoder.EscapeHTML
+
+    // CompactMarshaler indicates that the output JSON from json.Marshaler
+    // is always compact and needs no validation
+    CompactMarshaler Options = encoder.CompactMarshaler
+
+    // NoQuoteTextMarshaler indicates that the output text from encoding.TextMarshaler
+    // is always escaped string and needs no quoting
+    NoQuoteTextMarshaler Options = encoder.NoQuoteTextMarshaler
+
+    // NoNullSliceOrMap indicates all empty Array or Object are encoded as '[]' or '{}',
+    // instead of 'null'
+    NoNullSliceOrMap Options = encoder.NoNullSliceOrMap
+
+    // ValidateString indicates that encoder should validate the input string
+    // before encoding it into JSON.
+    ValidateString Options = encoder.ValidateString
+
+    // NoValidateJSONMarshaler indicates that the encoder should not validate the output string
+    // after encoding the JSONMarshaler to JSON.
+    NoValidateJSONMarshaler Options = encoder.NoValidateJSONMarshaler
+
+    // NoEncoderNewline indicates that the encoder should not add a newline after every message
+    NoEncoderNewline Options = encoder.NoEncoderNewline
+
+    // CompatibleWithStd is used to be compatible with std encoder.
+    CompatibleWithStd Options = encoder.CompatibleWithStd
+)
+
+
+var (
+    // Encode returns the JSON encoding of val, encoded with opts.
+    Encode = encoder.Encode
+
+    // EncodeInto is like Encode but uses a user-supplied buffer instead of allocating a new one.
+    EncodeIndented = encoder.EncodeIndented
+
+    // EncodeIndented is like Encode but applies Indent to format the output.
+    // Each JSON element in the output will begin on a new line beginning with prefix
+    // followed by one or more copies of indent according to the indentation nesting.
+    EncodeInto = encoder.EncodeInto
+
+    // HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029
+    // characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029
+    // so that the JSON will be safe to embed inside HTML <script> tags.
+    // For historical reasons, web browsers don't honor standard HTML
+    // escaping within <script> tags, so an alternative JSON encoding must
+    // be used.
+    HTMLEscape = encoder.HTMLEscape
+
+    // Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in
+    // order to reduce the first-hit latency.
+    //
+    // Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is
+    // a compile option to set the depth of recursive compile for the nested struct type.
+    Pretouch = encoder.Pretouch
+
+    // Quote returns the JSON-quoted version of s.
+    Quote = encoder.Quote
+
+    // Valid validates json and returns first non-blank character position,
+    // if it is only one valid json value.
+    // Otherwise returns invalid character position using start.
+    //
+    // Note: it does not check for the invalid UTF-8 characters.
+    Valid = encoder.Valid
+
+    // NewStreamEncoder adapts to encoding/json.NewDecoder API.
+    //
+    // NewStreamEncoder returns a new encoder that write to w.
+    NewStreamEncoder = encoder.NewStreamEncoder
+)

+ 261 - 0
vendor/github.com/bytedance/sonic/encoder/encoder_compat.go

@@ -0,0 +1,261 @@
+// +build !amd64 !go1.16 go1.23
+
+/*
+* Copyright 2023 ByteDance Inc.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package encoder
+
+import (
+   `io`
+    `bytes`
+    `encoding/json`
+    `reflect`
+
+    `github.com/bytedance/sonic/option`
+)
+
+func init() {
+    println("WARNING:(encoder) sonic only supports Go1.16~1.22 && CPU amd64, but your environment is not suitable")
+}
+
+// EnableFallback indicates if encoder use fallback
+const EnableFallback = true
+
+// Options is a set of encoding options.
+type Options uint64
+
+const (
+    bitSortMapKeys          = iota
+    bitEscapeHTML          
+    bitCompactMarshaler
+    bitNoQuoteTextMarshaler
+    bitNoNullSliceOrMap
+    bitValidateString
+    bitNoValidateJSONMarshaler
+    bitNoEncoderNewline
+
+    // used for recursive compile
+    bitPointerValue = 63
+)
+
+const (
+    // SortMapKeys indicates that the keys of a map needs to be sorted 
+    // before serializing into JSON.
+    // WARNING: This hurts performance A LOT, USE WITH CARE.
+    SortMapKeys          Options = 1 << bitSortMapKeys
+
+    // EscapeHTML indicates encoder to escape all HTML characters 
+    // after serializing into JSON (see https://pkg.go.dev/encoding/json#HTMLEscape).
+    // WARNING: This hurts performance A LOT, USE WITH CARE.
+    EscapeHTML           Options = 1 << bitEscapeHTML
+
+    // CompactMarshaler indicates that the output JSON from json.Marshaler 
+    // is always compact and needs no validation 
+    CompactMarshaler     Options = 1 << bitCompactMarshaler
+
+    // NoQuoteTextMarshaler indicates that the output text from encoding.TextMarshaler 
+    // is always escaped string and needs no quoting
+    NoQuoteTextMarshaler Options = 1 << bitNoQuoteTextMarshaler
+
+    // NoNullSliceOrMap indicates all empty Array or Object are encoded as '[]' or '{}',
+    // instead of 'null'
+    NoNullSliceOrMap     Options = 1 << bitNoNullSliceOrMap
+
+    // ValidateString indicates that encoder should validate the input string
+    // before encoding it into JSON.
+    ValidateString       Options = 1 << bitValidateString
+
+    // NoValidateJSONMarshaler indicates that the encoder should not validate the output string
+    // after encoding the JSONMarshaler to JSON.
+    NoValidateJSONMarshaler Options = 1 << bitNoValidateJSONMarshaler
+
+    // NoEncoderNewline indicates that the encoder should not add a newline after every message
+    NoEncoderNewline Options = 1 << bitNoEncoderNewline
+  
+    // CompatibleWithStd is used to be compatible with std encoder.
+    CompatibleWithStd Options = SortMapKeys | EscapeHTML | CompactMarshaler
+)
+
+// Encoder represents a specific set of encoder configurations.
+type Encoder struct {
+    Opts Options
+    prefix string
+    indent string
+}
+
+// Encode returns the JSON encoding of v.
+func (self *Encoder) Encode(v interface{}) ([]byte, error) {
+    if self.indent != "" || self.prefix != "" { 
+        return EncodeIndented(v, self.prefix, self.indent, self.Opts)
+    }
+    return Encode(v, self.Opts)
+}
+
+// SortKeys enables the SortMapKeys option.
+func (self *Encoder) SortKeys() *Encoder {
+    self.Opts |= SortMapKeys
+    return self
+}
+
+// SetEscapeHTML specifies if option EscapeHTML opens
+func (self *Encoder) SetEscapeHTML(f bool) {
+    if f {
+        self.Opts |= EscapeHTML
+    } else {
+        self.Opts &= ^EscapeHTML
+    }
+}
+
+// SetValidateString specifies if option ValidateString opens
+func (self *Encoder) SetValidateString(f bool) {
+    if f {
+        self.Opts |= ValidateString
+    } else {
+        self.Opts &= ^ValidateString
+    }
+}
+
+// SetNoValidateJSONMarshaler specifies if option NoValidateJSONMarshaler opens
+func (self *Encoder) SetNoValidateJSONMarshaler(f bool) {
+    if f {
+        self.Opts |= NoValidateJSONMarshaler
+    } else {
+        self.Opts &= ^NoValidateJSONMarshaler
+    }
+}
+
+// SetNoEncoderNewline specifies if option NoEncoderNewline opens
+func (self *Encoder) SetNoEncoderNewline(f bool) {
+    if f {
+        self.Opts |= NoEncoderNewline
+    } else {
+        self.Opts &= ^NoEncoderNewline
+    }
+}
+
+// SetCompactMarshaler specifies if option CompactMarshaler opens
+func (self *Encoder) SetCompactMarshaler(f bool) {
+    if f {
+        self.Opts |= CompactMarshaler
+    } else {
+        self.Opts &= ^CompactMarshaler
+    }
+}
+
+// SetNoQuoteTextMarshaler specifies if option NoQuoteTextMarshaler opens
+func (self *Encoder) SetNoQuoteTextMarshaler(f bool) {
+    if f {
+        self.Opts |= NoQuoteTextMarshaler
+    } else {
+        self.Opts &= ^NoQuoteTextMarshaler
+    }
+}
+
+// SetIndent instructs the encoder to format each subsequent encoded
+// value as if indented by the package-level function EncodeIndent().
+// Calling SetIndent("", "") disables indentation.
+func (enc *Encoder) SetIndent(prefix, indent string) {
+    enc.prefix = prefix
+    enc.indent = indent
+}
+
+// Quote returns the JSON-quoted version of s.
+func Quote(s string) string {
+    /* check for empty string */
+    if s == "" {
+        return `""`
+    }
+
+    out, _ := json.Marshal(s)
+    return string(out)
+}
+
+// Encode returns the JSON encoding of val, encoded with opts.
+func Encode(val interface{}, opts Options) ([]byte, error) {
+   return json.Marshal(val)
+}
+
+// EncodeInto is like Encode but uses a user-supplied buffer instead of allocating
+// a new one.
+func EncodeInto(buf *[]byte, val interface{}, opts Options) error {
+    if buf == nil {
+        panic("user-supplied buffer buf is nil")
+    }
+    w := bytes.NewBuffer(*buf)
+    enc := json.NewEncoder(w)
+    enc.SetEscapeHTML((opts & EscapeHTML) != 0)
+    err := enc.Encode(val)
+    *buf = w.Bytes()
+    l := len(*buf)
+    if l > 0 && (opts & NoEncoderNewline != 0) && (*buf)[l-1] == '\n' {
+        *buf = (*buf)[:l-1]
+    }
+    return err
+}
+
+// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029
+// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029
+// so that the JSON will be safe to embed inside HTML <script> tags.
+// For historical reasons, web browsers don't honor standard HTML
+// escaping within <script> tags, so an alternative JSON encoding must
+// be used.
+func HTMLEscape(dst []byte, src []byte) []byte {
+   d := bytes.NewBuffer(dst)
+   json.HTMLEscape(d, src)
+   return d.Bytes()
+}
+
+// EncodeIndented is like Encode but applies Indent to format the output.
+// Each JSON element in the output will begin on a new line beginning with prefix
+// followed by one or more copies of indent according to the indentation nesting.
+func EncodeIndented(val interface{}, prefix string, indent string, opts Options) ([]byte, error) {
+   w := bytes.NewBuffer([]byte{})
+   enc := json.NewEncoder(w)
+   enc.SetEscapeHTML((opts & EscapeHTML) != 0)
+   enc.SetIndent(prefix, indent)
+   err := enc.Encode(val)
+   out := w.Bytes()
+   return out, err
+}
+
+// Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in
+// order to reduce the first-hit latency.
+//
+// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is
+// a compile option to set the depth of recursive compile for the nested struct type.
+func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
+   return nil
+}
+
+// Valid validates json and returns first non-blank character position,
+// if it is only one valid json value.
+// Otherwise returns invalid character position using start.
+//
+// Note: it does not check for the invalid UTF-8 characters.
+func Valid(data []byte) (ok bool, start int) {
+   return json.Valid(data), 0
+}
+
+// StreamEncoder uses io.Writer as 
+type StreamEncoder = json.Encoder
+
+// NewStreamEncoder adapts to encoding/json.NewDecoder API.
+//
+// NewStreamEncoder returns a new encoder that write to w.
+func NewStreamEncoder(w io.Writer) *StreamEncoder {
+   return json.NewEncoder(w)
+}
+

+ 9 - 0
vendor/github.com/bytedance/sonic/go.work

@@ -0,0 +1,9 @@
+go 1.18
+
+use (
+	.
+	./external_jsonlib_test
+	./fuzz
+	./generic_test
+	./loader
+)

+ 0 - 0
vendor/github.com/bytedance/sonic/internal/caching/asm.s


+ 115 - 0
vendor/github.com/bytedance/sonic/internal/caching/fcache.go

@@ -0,0 +1,115 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package caching
+
+import (
+    `strings`
+    `unsafe`
+
+    `github.com/bytedance/sonic/internal/rt`
+)
+
+type FieldMap struct {
+    N uint64
+    b unsafe.Pointer
+    m map[string]int
+}
+
+type FieldEntry struct {
+    ID   int
+    Name string
+    Hash uint64
+}
+
+const (
+    FieldMap_N     = int64(unsafe.Offsetof(FieldMap{}.N))
+    FieldMap_b     = int64(unsafe.Offsetof(FieldMap{}.b))
+	FieldEntrySize = int64(unsafe.Sizeof(FieldEntry{}))
+)
+
+func newBucket(n int) unsafe.Pointer {
+    v := make([]FieldEntry, n)
+    return (*rt.GoSlice)(unsafe.Pointer(&v)).Ptr
+}
+
+func CreateFieldMap(n int) *FieldMap {
+    return &FieldMap {
+        N: uint64(n * 2),
+        b: newBucket(n * 2),    // LoadFactor = 0.5
+        m: make(map[string]int, n * 2),
+    }
+}
+
+func (self *FieldMap) At(p uint64) *FieldEntry {
+    off := uintptr(p) * uintptr(FieldEntrySize)
+    return (*FieldEntry)(unsafe.Pointer(uintptr(self.b) + off))
+}
+
+// Get searches FieldMap by name. JIT generated assembly does NOT call this
+// function, rather it implements its own version directly in assembly. So
+// we must ensure this function stays in sync with the JIT generated one.
+func (self *FieldMap) Get(name string) int {
+    h := StrHash(name)
+    p := h % self.N
+    s := self.At(p)
+
+    /* find the element;
+     * the hash map is never full, so the loop will always terminate */
+    for s.Hash != 0 {
+        if s.Hash == h && s.Name == name {
+            return s.ID
+        } else {
+            p = (p + 1) % self.N
+            s = self.At(p)
+        }
+    }
+
+    /* not found */
+    return -1
+}
+
+func (self *FieldMap) Set(name string, i int) {
+    h := StrHash(name)
+    p := h % self.N
+    s := self.At(p)
+
+    /* searching for an empty slot;
+     * the hash map is never full, so the loop will always terminate */
+    for s.Hash != 0 {
+        p = (p + 1) % self.N
+        s = self.At(p)
+    }
+
+    /* set the value */
+    s.ID   = i
+    s.Hash = h
+    s.Name = name
+
+    /* add the case-insensitive version, prefer the one with smaller field ID */
+    key := strings.ToLower(name)
+    if v, ok := self.m[key]; !ok || i < v {
+        self.m[key] = i
+    }
+}
+
+func (self *FieldMap) GetCaseInsensitive(name string) int {
+    if i, ok := self.m[strings.ToLower(name)]; ok {
+        return i
+    } else {
+        return -1
+    }
+}

+ 40 - 0
vendor/github.com/bytedance/sonic/internal/caching/hashing.go

@@ -0,0 +1,40 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package caching
+
+import (
+    `unsafe`
+
+    `github.com/bytedance/sonic/internal/rt`
+)
+
+var (
+    V_strhash = rt.UnpackEface(strhash)
+    S_strhash = *(*uintptr)(V_strhash.Value)
+)
+
+//go:noescape
+//go:linkname strhash runtime.strhash
+func strhash(_ unsafe.Pointer, _ uintptr) uintptr
+
+func StrHash(s string) uint64 {
+    if v := strhash(unsafe.Pointer(&s), 0); v == 0 {
+        return 1
+    } else {
+        return uint64(v)
+    }
+}

+ 173 - 0
vendor/github.com/bytedance/sonic/internal/caching/pcache.go

@@ -0,0 +1,173 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package caching
+
+import (
+    `sync`
+    `sync/atomic`
+    `unsafe`
+
+    `github.com/bytedance/sonic/internal/rt`
+)
+
+/** Program Map **/
+
+const (
+    _LoadFactor   = 0.5
+    _InitCapacity = 4096    // must be a power of 2
+)
+
+type _ProgramMap struct {
+    n uint64
+    m uint32
+    b []_ProgramEntry
+}
+
+type _ProgramEntry struct {
+    vt *rt.GoType
+    fn interface{}
+}
+
+func newProgramMap() *_ProgramMap {
+    return &_ProgramMap {
+        n: 0,
+        m: _InitCapacity - 1,
+        b: make([]_ProgramEntry, _InitCapacity),
+    }
+}
+
+func (self *_ProgramMap) copy() *_ProgramMap {
+    fork := &_ProgramMap{
+        n: self.n,
+        m: self.m,
+        b: make([]_ProgramEntry, len(self.b)),
+    }
+    for i, f := range self.b {
+        fork.b[i] = f
+    }
+    return fork
+}
+
+func (self *_ProgramMap) get(vt *rt.GoType) interface{} {
+    i := self.m + 1
+    p := vt.Hash & self.m
+
+    /* linear probing */
+    for ; i > 0; i-- {
+        if b := self.b[p]; b.vt == vt {
+            return b.fn
+        } else if b.vt == nil {
+            break
+        } else {
+            p = (p + 1) & self.m
+        }
+    }
+
+    /* not found */
+    return nil
+}
+
+func (self *_ProgramMap) add(vt *rt.GoType, fn interface{}) *_ProgramMap {
+    p := self.copy()
+    f := float64(atomic.LoadUint64(&p.n) + 1) / float64(p.m + 1)
+
+    /* check for load factor */
+    if f > _LoadFactor {
+        p = p.rehash()
+    }
+
+    /* insert the value */
+    p.insert(vt, fn)
+    return p
+}
+
+func (self *_ProgramMap) rehash() *_ProgramMap {
+    c := (self.m + 1) << 1
+    r := &_ProgramMap{m: c - 1, b: make([]_ProgramEntry, int(c))}
+
+    /* rehash every entry */
+    for i := uint32(0); i <= self.m; i++ {
+        if b := self.b[i]; b.vt != nil {
+            r.insert(b.vt, b.fn)
+        }
+    }
+
+    /* rebuild successful */
+    return r
+}
+
+func (self *_ProgramMap) insert(vt *rt.GoType, fn interface{}) {
+    h := vt.Hash
+    p := h & self.m
+
+    /* linear probing */
+    for i := uint32(0); i <= self.m; i++ {
+        if b := &self.b[p]; b.vt != nil {
+            p += 1
+            p &= self.m
+        } else {
+            b.vt = vt
+            b.fn = fn
+            atomic.AddUint64(&self.n, 1)
+            return
+        }
+    }
+
+    /* should never happens */
+    panic("no available slots")
+}
+
+/** RCU Program Cache **/
+
+type ProgramCache struct {
+    m sync.Mutex
+    p unsafe.Pointer
+}
+
+func CreateProgramCache() *ProgramCache {
+    return &ProgramCache {
+        m: sync.Mutex{},
+        p: unsafe.Pointer(newProgramMap()),
+    }
+}
+
+func (self *ProgramCache) Get(vt *rt.GoType) interface{} {
+    return (*_ProgramMap)(atomic.LoadPointer(&self.p)).get(vt)
+}
+
+func (self *ProgramCache) Compute(vt *rt.GoType, compute func(*rt.GoType, ... interface{}) (interface{}, error), ex ...interface{}) (interface{}, error) {
+    var err error
+    var val interface{}
+
+    /* use defer to prevent inlining of this function */
+    self.m.Lock()
+    defer self.m.Unlock()
+
+    /* double check with write lock held */
+    if val = self.Get(vt); val != nil {
+        return val, nil
+    }
+
+    /* compute the value */
+    if val, err = compute(vt, ex...); err != nil {
+        return nil, err
+    }
+
+    /* update the RCU cache */
+    atomic.StorePointer(&self.p, unsafe.Pointer((*_ProgramMap)(atomic.LoadPointer(&self.p)).add(vt, val)))
+    return val, nil
+}

+ 40 - 0
vendor/github.com/bytedance/sonic/internal/cpu/features.go

@@ -0,0 +1,40 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package cpu
+
+import (
+    `fmt`
+    `os`
+
+    `github.com/klauspost/cpuid/v2`
+)
+
+var (
+    HasAVX  = cpuid.CPU.Has(cpuid.AVX)
+    HasAVX2 = cpuid.CPU.Has(cpuid.AVX2)
+    HasSSE = cpuid.CPU.Has(cpuid.SSE)
+)
+
+func init() {
+    switch v := os.Getenv("SONIC_MODE"); v {
+        case ""       : break
+        case "auto"   : break
+        case "noavx"  : HasAVX = false; fallthrough
+        case "noavx2" : HasAVX2 = false
+        default       : panic(fmt.Sprintf("invalid mode: '%s', should be one of 'auto', 'noavx', 'noavx2'", v))
+    }
+}

+ 0 - 0
vendor/github.com/bytedance/sonic/internal/decoder/asm.s


+ 130 - 0
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go116.go

@@ -0,0 +1,130 @@
+// +build go1.16,!go1.17
+
+// Copyright 2023 CloudWeGo Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package decoder
+
+import (
+    `strconv`
+    _ `unsafe`
+
+    `github.com/bytedance/sonic/internal/jit`
+    `github.com/bytedance/sonic/internal/rt`
+    `github.com/twitchyliquid64/golang-asm/obj`
+    `github.com/twitchyliquid64/golang-asm/obj/x86`
+)
+
+var _runtime_writeBarrier uintptr = rt.GcwbAddr()
+
+//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
+func gcWriteBarrierAX()
+
+var (
+    _V_writeBarrier = jit.Imm(int64(_runtime_writeBarrier))
+
+    _F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
+)
+
+func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
+    self.Emit("MOVQ", _V_writeBarrier, _R10)
+    self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
+    self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    if saveDI {
+        self.save(_DI)
+    }
+    self.Emit("LEAQ", rec, _DI)
+    self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10)  // MOVQ ${fn}, AX
+    self.Rjmp("CALL", _R10)  
+    if saveDI {
+        self.load(_DI)
+    }    
+    self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Emit("MOVQ", _AX, rec)
+    self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
+}
+
+func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) {
+    if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
+        panic("rec contains AX!")
+    }
+    self.Emit("MOVQ", _V_writeBarrier, _R10)
+    self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
+    self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    if saveAX {
+        self.Emit("XCHGQ", ptr, _AX)
+    } else {
+        self.Emit("MOVQ", ptr, _AX)
+    }
+    if saveDI {
+        self.save(_DI)
+    }
+    self.Emit("LEAQ", rec, _DI)
+    self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10)  // MOVQ ${fn}, AX
+    self.Rjmp("CALL", _R10)  
+    if saveDI {
+        self.load(_DI)
+    } 
+    if saveAX {
+        self.Emit("XCHGQ", ptr, _AX)
+    }    
+    self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Emit("MOVQ", ptr, rec)
+    self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
+}
+
+
+func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
+    self.Emit("MOVQ", _V_writeBarrier, _R10)
+    self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
+    self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    if saveDI {
+        self.save(_DI)
+    }
+    self.Emit("LEAQ", rec, _DI)
+    self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10)  // MOVQ ${fn}, AX
+    self.Rjmp("CALL", _R10)  
+    if saveDI {
+        self.load(_DI)
+    }    
+    self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Emit("MOVQ", _AX, rec)
+    self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
+}
+
+func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
+    if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
+        panic("rec contains AX!")
+    }
+    self.Emit("MOVQ", _V_writeBarrier, _R10)
+    self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
+    self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Emit("MOVQ", ptr, _AX)
+    if saveDI {
+        self.save(_DI)
+    }
+    self.Emit("LEAQ", rec, _DI)
+    self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10)  // MOVQ ${fn}, AX
+    self.Rjmp("CALL", _R10)  
+    if saveDI {
+        self.load(_DI)
+    }    
+    self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Emit("MOVQ", ptr, rec)
+    self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
+}

+ 126 - 0
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go117.go

@@ -0,0 +1,126 @@
+// +build go1.17,!go1.21
+
+// Copyright 2023 CloudWeGo Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package decoder
+
+import (
+    `strconv`
+    `unsafe`
+
+    `github.com/bytedance/sonic/internal/jit`
+    `github.com/twitchyliquid64/golang-asm/obj`
+    `github.com/twitchyliquid64/golang-asm/obj/x86`
+)
+
+//go:linkname _runtime_writeBarrier runtime.writeBarrier
+var _runtime_writeBarrier uintptr
+
+//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
+func gcWriteBarrierAX()
+
+var (
+    _V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier))))
+
+    _F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
+)
+
+func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
+    self.Emit("MOVQ", _V_writeBarrier, _R9)
+    self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
+    self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    if saveDI {
+        self.save(_DI)
+    }
+    self.Emit("LEAQ", rec, _DI)
+    self.call(_F_gcWriteBarrierAX)  
+    if saveDI {
+        self.load(_DI)
+    }    
+    self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Emit("MOVQ", _AX, rec)
+    self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
+}
+
+func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) {
+    if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
+        panic("rec contains AX!")
+    }
+    self.Emit("MOVQ", _V_writeBarrier, _R9)
+    self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
+    self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    if saveAX {
+        self.Emit("XCHGQ", ptr, _AX)
+    } else {
+        self.Emit("MOVQ", ptr, _AX)
+    }
+    if saveDI {
+        self.save(_DI)
+    }
+    self.Emit("LEAQ", rec, _DI)
+    self.call(_F_gcWriteBarrierAX) 
+    if saveDI {
+        self.load(_DI)
+    } 
+    if saveAX {
+        self.Emit("XCHGQ", ptr, _AX)
+    }    
+    self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Emit("MOVQ", ptr, rec)
+    self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
+}
+
+
+func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
+    self.Emit("MOVQ", _V_writeBarrier, _R9)
+    self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
+    self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    if saveDI {
+        self.save(_DI)
+    }
+    self.Emit("LEAQ", rec, _DI)
+    self.call(_F_gcWriteBarrierAX)
+    if saveDI {
+        self.load(_DI)
+    }    
+    self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Emit("MOVQ", _AX, rec)
+    self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
+}
+
+func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
+    if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
+        panic("rec contains AX!")
+    }
+    self.Emit("MOVQ", _V_writeBarrier, _AX)
+    self.Emit("CMPL", jit.Ptr(_AX, 0), jit.Imm(0))
+    self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Emit("MOVQ", ptr, _AX)
+    if saveDI {
+        self.save(_DI)
+    }
+    self.Emit("LEAQ", rec, _DI)
+    self.call(_F_gcWriteBarrierAX)
+    if saveDI {
+        self.load(_DI)
+    }    
+    self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Emit("MOVQ", ptr, rec)
+    self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
+}

+ 132 - 0
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go121.go

@@ -0,0 +1,132 @@
+// +build go1.21,!go1.23
+
+// Copyright 2023 CloudWeGo Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package decoder
+
+import (
+    `strconv`
+    `unsafe`
+
+    `github.com/bytedance/sonic/internal/jit`
+    `github.com/twitchyliquid64/golang-asm/obj`
+    `github.com/twitchyliquid64/golang-asm/obj/x86`
+)
+
+//go:linkname _runtime_writeBarrier runtime.writeBarrier
+var _runtime_writeBarrier uintptr
+
+//go:nosplit
+//go:linkname gcWriteBarrier2 runtime.gcWriteBarrier2
+func gcWriteBarrier2()
+
+// Notice: gcWriteBarrier must use R11 register!!
+var _R11 = _IC
+
+var (
+    _V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier))))
+
+    _F_gcWriteBarrier2 = jit.Func(gcWriteBarrier2)
+)
+
+func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
+    self.Emit("MOVQ", _V_writeBarrier, _R9)
+    self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
+    self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    if saveDI {
+        self.save(_DI, _R11)
+    } else {
+        self.save(_R11)
+    }
+    self.Emit("MOVQ", _F_gcWriteBarrier2, _R11)  
+    self.Rjmp("CALL", _R11)  
+    self.Emit("MOVQ", _AX, jit.Ptr(_R11, 0))
+    self.Emit("MOVQ", rec, _DI)
+    self.Emit("MOVQ", _DI, jit.Ptr(_R11, 8))
+    if saveDI {
+        self.load(_DI, _R11)
+    } else {
+        self.load(_R11)
+    }   
+    self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Emit("MOVQ", _AX, rec)
+}
+
+func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) {
+    if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
+        panic("rec contains AX!")
+    }
+    self.Emit("MOVQ", _V_writeBarrier, _R9)
+    self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
+    self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    if saveAX {
+        self.save(_AX, _R11)
+    } else {
+        self.save(_R11)
+    }
+    self.Emit("MOVQ", _F_gcWriteBarrier2, _R11)  
+    self.Rjmp("CALL", _R11)  
+    self.Emit("MOVQ", ptr, jit.Ptr(_R11, 0))
+    self.Emit("MOVQ", rec, _AX)
+    self.Emit("MOVQ", _AX, jit.Ptr(_R11, 8))
+    if saveAX {
+        self.load(_AX, _R11)
+    } else {
+        self.load(_R11)
+    }   
+    self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Emit("MOVQ", ptr, rec)
+}
+
+func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
+    self.Emit("MOVQ", _V_writeBarrier, _R9)
+    self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
+    self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    if saveDI {
+        self.save(_DI, _R11)
+    } else {
+        self.save(_R11)
+    }
+    self.Emit("MOVQ", _F_gcWriteBarrier2, _R11)  
+    self.Rjmp("CALL", _R11)   
+    self.Emit("MOVQ", _AX, jit.Ptr(_R11, 0))
+    self.Emit("MOVQ", rec, _DI)
+    self.Emit("MOVQ", _DI, jit.Ptr(_R11, 8))
+    if saveDI {
+        self.load(_DI, _R11)
+    } else {
+        self.load(_R11)
+    }   
+    self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Emit("MOVQ", _AX, rec)
+}
+
+func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
+    if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
+        panic("rec contains AX!")
+    }
+    self.Emit("MOVQ", _V_writeBarrier, _AX)
+    self.Emit("CMPL", jit.Ptr(_AX, 0), jit.Imm(0))
+    self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.save(_R11)
+    self.Emit("MOVQ", _F_gcWriteBarrier2, _R11)  
+    self.Rjmp("CALL", _R11)     
+    self.Emit("MOVQ", ptr, jit.Ptr(_R11, 0))
+    self.Emit("MOVQ", rec, _AX)
+    self.Emit("MOVQ", _AX, jit.Ptr(_R11, 8))
+    self.load(_R11)  
+    self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
+    self.Emit("MOVQ", ptr, rec)
+}

+ 1930 - 0
vendor/github.com/bytedance/sonic/internal/decoder/assembler_regabi_amd64.go

@@ -0,0 +1,1930 @@
+// +build go1.17,!go1.23
+
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package decoder
+
+import (
+    `encoding/json`
+    `fmt`
+    `math`
+    `reflect`
+    `unsafe`
+
+    `github.com/bytedance/sonic/internal/caching`
+    `github.com/bytedance/sonic/internal/jit`
+    `github.com/bytedance/sonic/internal/native`
+    `github.com/bytedance/sonic/internal/native/types`
+    `github.com/bytedance/sonic/internal/rt`
+    `github.com/twitchyliquid64/golang-asm/obj`
+)
+
+/** Register Allocations
+ *
+ *  State Registers:
+ *
+ *      %r13 : stack base
+ *      %r10 : input pointer
+ *      %r12 : input length
+ *      %r11 : input cursor
+ *      %r15 : value pointer
+ *
+ *  Error Registers:
+ *
+ *      %rax : error type register
+ *      %rbx : error pointer register
+ */
+
+/** Function Prototype & Stack Map
+ *
+ *  func (s string, ic int, vp unsafe.Pointer, sb *_Stack, fv uint64, sv string) (rc int, err error)
+ *
+ *  s.buf  :   (FP)
+ *  s.len  :  8(FP)
+ *  ic     : 16(FP)
+ *  vp     : 24(FP)
+ *  sb     : 32(FP)
+ *  fv     : 40(FP)
+ *  sv     : 56(FP)
+ *  err.vt : 72(FP)
+ *  err.vp : 80(FP)
+ */
+
+const (
+    _FP_args   = 72     // 72 bytes to pass and spill register arguements
+    _FP_fargs  = 80     // 80 bytes for passing arguments to other Go functions
+    _FP_saves  = 48     // 48 bytes for saving the registers before CALL instructions
+    _FP_locals = 144    // 144 bytes for local variables
+)
+
+const (
+    _FP_offs = _FP_fargs + _FP_saves + _FP_locals
+    _FP_size = _FP_offs + 8     // 8 bytes for the parent frame pointer
+    _FP_base = _FP_size + 8     // 8 bytes for the return address
+)
+
+const (
+    _IM_null = 0x6c6c756e   // 'null'
+    _IM_true = 0x65757274   // 'true'
+    _IM_alse = 0x65736c61   // 'alse' ('false' without the 'f')
+)
+
+const (
+    _BM_space = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n')
+)
+
+const (
+    _MODE_JSON = 1 << 3 // base64 mode
+)
+
+const (
+    _LB_error           = "_error"
+    _LB_im_error        = "_im_error"
+    _LB_eof_error       = "_eof_error"
+    _LB_type_error      = "_type_error"
+    _LB_field_error     = "_field_error"
+    _LB_range_error     = "_range_error"
+    _LB_stack_error     = "_stack_error"
+    _LB_base64_error    = "_base64_error"
+    _LB_unquote_error   = "_unquote_error"
+    _LB_parsing_error   = "_parsing_error"
+    _LB_parsing_error_v = "_parsing_error_v"
+    _LB_mismatch_error   = "_mismatch_error"
+)
+
+const (
+    _LB_char_0_error  = "_char_0_error"
+    _LB_char_1_error  = "_char_1_error"
+    _LB_char_2_error  = "_char_2_error"
+    _LB_char_3_error  = "_char_3_error"
+    _LB_char_4_error  = "_char_4_error"
+    _LB_char_m2_error = "_char_m2_error"
+    _LB_char_m3_error = "_char_m3_error"
+)
+
+const (
+    _LB_skip_one = "_skip_one"
+    _LB_skip_key_value = "_skip_key_value"
+)
+
+var (
+    _AX = jit.Reg("AX")
+    _BX = jit.Reg("BX")
+    _CX = jit.Reg("CX")
+    _DX = jit.Reg("DX")
+    _DI = jit.Reg("DI")
+    _SI = jit.Reg("SI")
+    _BP = jit.Reg("BP")
+    _SP = jit.Reg("SP")
+    _R8 = jit.Reg("R8")
+    _R9 = jit.Reg("R9")
+    _X0 = jit.Reg("X0")
+    _X1 = jit.Reg("X1")
+    _X15 = jit.Reg("X15")
+)
+
+var (
+    _IP = jit.Reg("R10")  // saved on BP when callc
+    _IC = jit.Reg("R11")  // saved on BX when call_c
+    _IL = jit.Reg("R12")
+    _ST = jit.Reg("R13")
+    _VP = jit.Reg("R15")
+)
+
+var (
+    _DF = jit.Reg("AX")    // reuse AX in generic decoder for flags
+    _ET = jit.Reg("AX")
+    _EP = jit.Reg("BX")
+)
+
+
+
+var (
+    _ARG_s  = _ARG_sp
+    _ARG_sp = jit.Ptr(_SP, _FP_base + 0)
+    _ARG_sl = jit.Ptr(_SP, _FP_base + 8)
+    _ARG_ic = jit.Ptr(_SP, _FP_base + 16)
+    _ARG_vp = jit.Ptr(_SP, _FP_base + 24)
+    _ARG_sb = jit.Ptr(_SP, _FP_base + 32)
+    _ARG_fv = jit.Ptr(_SP, _FP_base + 40)
+)
+
+var (
+    _ARG_sv   = _ARG_sv_p
+    _ARG_sv_p = jit.Ptr(_SP, _FP_base + 48)
+    _ARG_sv_n = jit.Ptr(_SP, _FP_base + 56)
+    _ARG_vk   = jit.Ptr(_SP, _FP_base + 64)
+)
+
+var (
+    _VAR_st = _VAR_st_Vt
+    _VAR_sr = jit.Ptr(_SP, _FP_fargs + _FP_saves)
+)
+
+var (
+    _VAR_st_Vt = jit.Ptr(_SP, _FP_fargs + _FP_saves + 0)
+    _VAR_st_Dv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
+    _VAR_st_Iv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
+    _VAR_st_Ep = jit.Ptr(_SP, _FP_fargs + _FP_saves + 24)
+    _VAR_st_Db = jit.Ptr(_SP, _FP_fargs + _FP_saves + 32)
+    _VAR_st_Dc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 40)
+)
+
+var (
+    _VAR_ss_AX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 48)
+    _VAR_ss_CX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 56)
+    _VAR_ss_SI = jit.Ptr(_SP, _FP_fargs + _FP_saves + 64)
+    _VAR_ss_R8 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 72)
+    _VAR_ss_R9 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 80)
+)
+
+var (
+    _VAR_bs_p = jit.Ptr(_SP, _FP_fargs + _FP_saves + 88)
+    _VAR_bs_n = jit.Ptr(_SP, _FP_fargs + _FP_saves + 96)
+    _VAR_bs_LR = jit.Ptr(_SP, _FP_fargs + _FP_saves + 104)
+)
+
+var _VAR_fl = jit.Ptr(_SP, _FP_fargs + _FP_saves + 112)
+
+var (
+    _VAR_et = jit.Ptr(_SP, _FP_fargs + _FP_saves + 120) // save dismatched type
+    _VAR_pc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 128) // save skip return pc
+    _VAR_ic = jit.Ptr(_SP, _FP_fargs + _FP_saves + 136) // save dismatched position
+)
+
+type _Assembler struct {
+    jit.BaseAssembler
+    p _Program
+    name string
+}
+
+func newAssembler(p _Program) *_Assembler {
+    return new(_Assembler).Init(p)
+}
+
+/** Assembler Interface **/
+
+func (self *_Assembler) Load() _Decoder {
+    return ptodec(self.BaseAssembler.Load("decode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
+}
+
+func (self *_Assembler) Init(p _Program) *_Assembler {
+    self.p = p
+    self.BaseAssembler.Init(self.compile)
+    return self
+}
+
+func (self *_Assembler) compile() {
+    self.prologue()
+    self.instrs()
+    self.epilogue()
+    self.copy_string()
+    self.escape_string()
+    self.escape_string_twice()
+    self.skip_one()
+    self.skip_key_value()
+    self.type_error()
+    self.mismatch_error()
+    self.field_error()
+    self.range_error()
+    self.stack_error()
+    self.base64_error()
+    self.parsing_error()
+}
+
+/** Assembler Stages **/
+
+var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
+    _OP_any              : (*_Assembler)._asm_OP_any,
+    _OP_dyn              : (*_Assembler)._asm_OP_dyn,
+    _OP_str              : (*_Assembler)._asm_OP_str,
+    _OP_bin              : (*_Assembler)._asm_OP_bin,
+    _OP_bool             : (*_Assembler)._asm_OP_bool,
+    _OP_num              : (*_Assembler)._asm_OP_num,
+    _OP_i8               : (*_Assembler)._asm_OP_i8,
+    _OP_i16              : (*_Assembler)._asm_OP_i16,
+    _OP_i32              : (*_Assembler)._asm_OP_i32,
+    _OP_i64              : (*_Assembler)._asm_OP_i64,
+    _OP_u8               : (*_Assembler)._asm_OP_u8,
+    _OP_u16              : (*_Assembler)._asm_OP_u16,
+    _OP_u32              : (*_Assembler)._asm_OP_u32,
+    _OP_u64              : (*_Assembler)._asm_OP_u64,
+    _OP_f32              : (*_Assembler)._asm_OP_f32,
+    _OP_f64              : (*_Assembler)._asm_OP_f64,
+    _OP_unquote          : (*_Assembler)._asm_OP_unquote,
+    _OP_nil_1            : (*_Assembler)._asm_OP_nil_1,
+    _OP_nil_2            : (*_Assembler)._asm_OP_nil_2,
+    _OP_nil_3            : (*_Assembler)._asm_OP_nil_3,
+    _OP_deref            : (*_Assembler)._asm_OP_deref,
+    _OP_index            : (*_Assembler)._asm_OP_index,
+    _OP_is_null          : (*_Assembler)._asm_OP_is_null,
+    _OP_is_null_quote    : (*_Assembler)._asm_OP_is_null_quote,
+    _OP_map_init         : (*_Assembler)._asm_OP_map_init,
+    _OP_map_key_i8       : (*_Assembler)._asm_OP_map_key_i8,
+    _OP_map_key_i16      : (*_Assembler)._asm_OP_map_key_i16,
+    _OP_map_key_i32      : (*_Assembler)._asm_OP_map_key_i32,
+    _OP_map_key_i64      : (*_Assembler)._asm_OP_map_key_i64,
+    _OP_map_key_u8       : (*_Assembler)._asm_OP_map_key_u8,
+    _OP_map_key_u16      : (*_Assembler)._asm_OP_map_key_u16,
+    _OP_map_key_u32      : (*_Assembler)._asm_OP_map_key_u32,
+    _OP_map_key_u64      : (*_Assembler)._asm_OP_map_key_u64,
+    _OP_map_key_f32      : (*_Assembler)._asm_OP_map_key_f32,
+    _OP_map_key_f64      : (*_Assembler)._asm_OP_map_key_f64,
+    _OP_map_key_str      : (*_Assembler)._asm_OP_map_key_str,
+    _OP_map_key_utext    : (*_Assembler)._asm_OP_map_key_utext,
+    _OP_map_key_utext_p  : (*_Assembler)._asm_OP_map_key_utext_p,
+    _OP_array_skip       : (*_Assembler)._asm_OP_array_skip,
+    _OP_array_clear      : (*_Assembler)._asm_OP_array_clear,
+    _OP_array_clear_p    : (*_Assembler)._asm_OP_array_clear_p,
+    _OP_slice_init       : (*_Assembler)._asm_OP_slice_init,
+    _OP_slice_append     : (*_Assembler)._asm_OP_slice_append,
+    _OP_object_skip      : (*_Assembler)._asm_OP_object_skip,
+    _OP_object_next      : (*_Assembler)._asm_OP_object_next,
+    _OP_struct_field     : (*_Assembler)._asm_OP_struct_field,
+    _OP_unmarshal        : (*_Assembler)._asm_OP_unmarshal,
+    _OP_unmarshal_p      : (*_Assembler)._asm_OP_unmarshal_p,
+    _OP_unmarshal_text   : (*_Assembler)._asm_OP_unmarshal_text,
+    _OP_unmarshal_text_p : (*_Assembler)._asm_OP_unmarshal_text_p,
+    _OP_lspace           : (*_Assembler)._asm_OP_lspace,
+    _OP_match_char       : (*_Assembler)._asm_OP_match_char,
+    _OP_check_char       : (*_Assembler)._asm_OP_check_char,
+    _OP_load             : (*_Assembler)._asm_OP_load,
+    _OP_save             : (*_Assembler)._asm_OP_save,
+    _OP_drop             : (*_Assembler)._asm_OP_drop,
+    _OP_drop_2           : (*_Assembler)._asm_OP_drop_2,
+    _OP_recurse          : (*_Assembler)._asm_OP_recurse,
+    _OP_goto             : (*_Assembler)._asm_OP_goto,
+    _OP_switch           : (*_Assembler)._asm_OP_switch,
+    _OP_check_char_0     : (*_Assembler)._asm_OP_check_char_0,
+    _OP_dismatch_err     : (*_Assembler)._asm_OP_dismatch_err,
+    _OP_go_skip          : (*_Assembler)._asm_OP_go_skip,
+    _OP_add              : (*_Assembler)._asm_OP_add,
+    _OP_check_empty      : (*_Assembler)._asm_OP_check_empty,
+    _OP_debug            : (*_Assembler)._asm_OP_debug,
+}
+
+func (self *_Assembler) _asm_OP_debug(_ *_Instr) {
+    self.Byte(0xcc)
+}
+
+func (self *_Assembler) instr(v *_Instr) {
+    if fn := _OpFuncTab[v.op()]; fn != nil {
+        fn(self, v)
+    } else {
+        panic(fmt.Sprintf("invalid opcode: %d", v.op()))
+    }
+}
+
+func (self *_Assembler) instrs() {
+    for i, v := range self.p {
+        self.Mark(i)
+        self.instr(&v)
+        self.debug_instr(i, &v)
+    }
+}
+
+func (self *_Assembler) epilogue() {
+    self.Mark(len(self.p))
+    self.Emit("XORL", _EP, _EP)                     // XORL EP, EP
+    self.Emit("MOVQ", _VAR_et, _ET)                 // MOVQ VAR_et, ET
+    self.Emit("TESTQ", _ET, _ET)                    // TESTQ ET, ET
+    self.Sjmp("JNZ", _LB_mismatch_error)            // JNZ _LB_mismatch_error
+    self.Link(_LB_error)                            // _error:
+    self.Emit("MOVQ", _EP, _CX)                     // MOVQ BX, CX
+    self.Emit("MOVQ", _ET, _BX)                     // MOVQ AX, BX
+    self.Emit("MOVQ", _IC, _AX)                     // MOVQ IC, AX
+    self.Emit("MOVQ", jit.Imm(0), _ARG_sp)          // MOVQ $0, sv.p<>+48(FP)
+    self.Emit("MOVQ", jit.Imm(0), _ARG_vp)          // MOVQ $0, sv.p<>+48(FP)
+    self.Emit("MOVQ", jit.Imm(0), _ARG_sv_p)        // MOVQ $0, sv.p<>+48(FP)
+    self.Emit("MOVQ", jit.Imm(0), _ARG_vk)          // MOVQ $0, vk<>+64(FP)
+    self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP)  // MOVQ _FP_offs(SP), BP
+    self.Emit("ADDQ", jit.Imm(_FP_size), _SP)       // ADDQ $_FP_size, SP
+    self.Emit("RET")                                // RET
+}
+
+func (self *_Assembler) prologue() {
+    self.Emit("SUBQ", jit.Imm(_FP_size), _SP)       // SUBQ $_FP_size, SP
+    self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs))  // MOVQ BP, _FP_offs(SP)
+    self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP)  // LEAQ _FP_offs(SP), BP
+    self.Emit("MOVQ", _AX, _ARG_sp)                 // MOVQ AX, s.p<>+0(FP)
+    self.Emit("MOVQ", _AX, _IP)                     // MOVQ AX, IP
+    self.Emit("MOVQ", _BX, _ARG_sl)                 // MOVQ BX, s.l<>+8(FP)
+    self.Emit("MOVQ", _BX, _IL)                     // MOVQ BX, IL
+    self.Emit("MOVQ", _CX, _ARG_ic)                 // MOVQ CX, ic<>+16(FP)
+    self.Emit("MOVQ", _CX, _IC)                     // MOVQ CX, IC
+    self.Emit("MOVQ", _DI, _ARG_vp)                 // MOVQ DI, vp<>+24(FP)
+    self.Emit("MOVQ", _DI, _VP)                     // MOVQ DI, VP
+    self.Emit("MOVQ", _SI, _ARG_sb)                 // MOVQ SI, sb<>+32(FP)
+    self.Emit("MOVQ", _SI, _ST)                     // MOVQ SI, ST
+    self.Emit("MOVQ", _R8, _ARG_fv)                 // MOVQ R8, fv<>+40(FP)
+    self.Emit("MOVQ", jit.Imm(0), _ARG_sv_p)        // MOVQ $0, sv.p<>+48(FP)
+    self.Emit("MOVQ", jit.Imm(0), _ARG_sv_n)        // MOVQ $0, sv.n<>+56(FP)
+    self.Emit("MOVQ", jit.Imm(0), _ARG_vk)          // MOVQ $0, vk<>+64(FP)
+    self.Emit("MOVQ", jit.Imm(0), _VAR_et)          // MOVQ $0, et<>+120(FP)
+    // initialize digital buffer first
+    self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_st_Dc)    // MOVQ $_MaxDigitNums, ss.Dcap
+    self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX)        // LEAQ _DbufOffset(ST), AX
+    self.Emit("MOVQ", _AX, _VAR_st_Db)                       // MOVQ AX, ss.Dbuf
+}
+
+/** Function Calling Helpers **/
+
+var (
+    _REG_go = []obj.Addr { _ST, _VP, _IP, _IL, _IC }
+    _REG_rt = []obj.Addr { _ST, _VP, _IP, _IL, _IC, _IL }
+)
+
+func (self *_Assembler) save(r ...obj.Addr) {
+    for i, v := range r {
+        if i > _FP_saves / 8 - 1 {
+            panic("too many registers to save")
+        } else {
+            self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
+        }
+    }
+}
+
+func (self *_Assembler) load(r ...obj.Addr) {
+    for i, v := range r {
+        if i > _FP_saves / 8 - 1 {
+            panic("too many registers to load")
+        } else {
+            self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
+        }
+    }
+}
+
+func (self *_Assembler) call(fn obj.Addr) {
+    self.Emit("MOVQ", fn, _R9)  // MOVQ ${fn}, R11
+    self.Rjmp("CALL", _R9)      // CALL R11
+}
+
+func (self *_Assembler) call_go(fn obj.Addr) {
+    self.save(_REG_go...)   // SAVE $REG_go
+    self.call(fn)
+    self.load(_REG_go...)   // LOAD $REG_go
+}
+
+func (self *_Assembler) callc(fn obj.Addr) {
+    self.save(_IP)
+    self.call(fn)
+    self.Emit("XORPS", _X15, _X15)
+    self.load(_IP)
+}
+
+func (self *_Assembler) call_c(fn obj.Addr) {
+    self.Emit("XCHGQ", _IC, _BX)
+    self.callc(fn)
+    self.Emit("XCHGQ", _IC, _BX)
+}
+
+func (self *_Assembler) call_sf(fn obj.Addr) {
+    self.Emit("LEAQ", _ARG_s, _DI)                      // LEAQ s<>+0(FP), DI
+    self.Emit("MOVQ", _IC, _ARG_ic)                     // MOVQ IC, ic<>+16(FP)
+    self.Emit("LEAQ", _ARG_ic, _SI)                     // LEAQ ic<>+16(FP), SI
+    self.Emit("LEAQ", jit.Ptr(_ST, _FsmOffset), _DX)    // LEAQ _FsmOffset(ST), DX
+    self.Emit("MOVQ", _ARG_fv, _CX)
+    self.callc(fn)
+    self.Emit("MOVQ", _ARG_ic, _IC)                     // MOVQ ic<>+16(FP), IC
+}
+
+func (self *_Assembler) call_vf(fn obj.Addr) {
+    self.Emit("LEAQ", _ARG_s, _DI)      // LEAQ s<>+0(FP), DI
+    self.Emit("MOVQ", _IC, _ARG_ic)     // MOVQ IC, ic<>+16(FP)
+    self.Emit("LEAQ", _ARG_ic, _SI)     // LEAQ ic<>+16(FP), SI
+    self.Emit("LEAQ", _VAR_st, _DX)     // LEAQ st, DX
+    self.callc(fn)
+    self.Emit("MOVQ", _ARG_ic, _IC)     // MOVQ ic<>+16(FP), IC
+}
+
+/** Assembler Error Handlers **/
+
+var (
+    _F_convT64        = jit.Func(convT64)
+    _F_error_wrap     = jit.Func(error_wrap)
+    _F_error_type     = jit.Func(error_type)
+    _F_error_field    = jit.Func(error_field)
+    _F_error_value    = jit.Func(error_value)
+    _F_error_mismatch = jit.Func(error_mismatch)
+)
+
+var (
+    _I_int8    , _T_int8    = rtype(reflect.TypeOf(int8(0)))
+    _I_int16   , _T_int16   = rtype(reflect.TypeOf(int16(0)))
+    _I_int32   , _T_int32   = rtype(reflect.TypeOf(int32(0)))
+    _I_uint8   , _T_uint8   = rtype(reflect.TypeOf(uint8(0)))
+    _I_uint16  , _T_uint16  = rtype(reflect.TypeOf(uint16(0)))
+    _I_uint32  , _T_uint32  = rtype(reflect.TypeOf(uint32(0)))
+    _I_float32 , _T_float32 = rtype(reflect.TypeOf(float32(0)))
+)
+
+var (
+    _T_error                    = rt.UnpackType(errorType)
+    _I_base64_CorruptInputError = jit.Itab(_T_error, base64CorruptInputError)
+)
+
+var (
+    _V_stackOverflow              = jit.Imm(int64(uintptr(unsafe.Pointer(&stackOverflow))))
+    _I_json_UnsupportedValueError = jit.Itab(_T_error, reflect.TypeOf(new(json.UnsupportedValueError)))
+    _I_json_MismatchTypeError     = jit.Itab(_T_error, reflect.TypeOf(new(MismatchTypeError)))
+)
+
+func (self *_Assembler) type_error() {
+    self.Link(_LB_type_error)                   // _type_error:
+    self.call_go(_F_error_type)                 // CALL_GO error_type
+    self.Sjmp("JMP" , _LB_error)                // JMP     _error
+}
+
+func (self *_Assembler) mismatch_error() {
+    self.Link(_LB_mismatch_error)                     // _type_error:
+    self.Emit("MOVQ", _VAR_et, _ET)                   // MOVQ _VAR_et, ET
+    self.Emit("MOVQ", _VAR_ic, _EP)                   // MOVQ _VAR_ic, EP
+    self.Emit("MOVQ", _I_json_MismatchTypeError, _CX) // MOVQ _I_json_MismatchType, CX
+    self.Emit("CMPQ", _ET, _CX)                       // CMPQ ET, CX
+    self.Sjmp("JE"  , _LB_error)                      // JE _LB_error
+    self.Emit("MOVQ", _ARG_sp, _AX)
+    self.Emit("MOVQ", _ARG_sl, _BX)
+    self.Emit("MOVQ", _VAR_ic, _CX)
+    self.Emit("MOVQ", _VAR_et, _DI)
+    self.call_go(_F_error_mismatch)             // CALL_GO error_type
+    self.Sjmp("JMP" , _LB_error)                // JMP     _error
+}
+
+func (self *_Assembler) field_error() {
+    self.Link(_LB_field_error)                  // _field_error:
+    self.Emit("MOVQ", _ARG_sv_p, _AX)           // MOVQ   sv.p, AX
+    self.Emit("MOVQ", _ARG_sv_n, _BX)           // MOVQ   sv.n, BX
+    self.call_go(_F_error_field)                // CALL_GO error_field
+    self.Sjmp("JMP" , _LB_error)                // JMP     _error
+}
+
+func (self *_Assembler) range_error() {
+    self.Link(_LB_range_error)                  // _range_error:
+    self.Emit("MOVQ", _ET, _CX)                 // MOVQ    ET, CX
+    self.slice_from(_VAR_st_Ep, 0)              // SLICE   st.Ep, $0
+    self.Emit("MOVQ", _DI, _AX)                 // MOVQ    DI, AX
+    self.Emit("MOVQ", _EP, _DI)                 // MOVQ    EP, DI
+    self.Emit("MOVQ", _SI, _BX)                 // MOVQ    SI, BX
+    self.call_go(_F_error_value)                // CALL_GO error_value
+    self.Sjmp("JMP" , _LB_error)                // JMP     _error
+}
+
+func (self *_Assembler) stack_error() {
+    self.Link(_LB_stack_error)                              // _stack_error:
+    self.Emit("MOVQ", _V_stackOverflow, _EP)                // MOVQ ${_V_stackOverflow}, EP
+    self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET)   // MOVQ ${_I_json_UnsupportedValueError}, ET
+    self.Sjmp("JMP" , _LB_error)                            // JMP  _error
+}
+
+func (self *_Assembler) base64_error() {
+    self.Link(_LB_base64_error)
+    self.Emit("NEGQ", _AX)                                  // NEGQ    AX
+    self.Emit("SUBQ", jit.Imm(1), _AX)                      // SUBQ    $1, AX
+    self.call_go(_F_convT64)                                // CALL_GO convT64
+    self.Emit("MOVQ", _AX, _EP)                             // MOVQ    AX, EP
+    self.Emit("MOVQ", _I_base64_CorruptInputError, _ET)     // MOVQ    ${itab(base64.CorruptInputError)}, ET
+    self.Sjmp("JMP" , _LB_error)                            // JMP     _error
+}
+
+func (self *_Assembler) parsing_error() {
+    self.Link(_LB_eof_error)                                            // _eof_error:
+    self.Emit("MOVQ" , _IL, _IC)                                        // MOVQ    IL, IC
+    self.Emit("MOVL" , jit.Imm(int64(types.ERR_EOF)), _EP)              // MOVL    ${types.ERR_EOF}, EP
+    self.Sjmp("JMP"  , _LB_parsing_error)                               // JMP     _parsing_error
+    self.Link(_LB_unquote_error)                                        // _unquote_error:
+    self.Emit("SUBQ" , _VAR_sr, _SI)                                    // SUBQ    sr, SI
+    self.Emit("SUBQ" , _SI, _IC)                                        // SUBQ    IL, IC
+    self.Link(_LB_parsing_error_v)                                      // _parsing_error_v:
+    self.Emit("MOVQ" , _AX, _EP)                                        // MOVQ    AX, EP
+    self.Emit("NEGQ" , _EP)                                             // NEGQ    EP
+    self.Sjmp("JMP"  , _LB_parsing_error)                               // JMP     _parsing_error
+    self.Link(_LB_char_m3_error)                                        // _char_m3_error:
+    self.Emit("SUBQ" , jit.Imm(1), _IC)                                 // SUBQ    $1, IC
+    self.Link(_LB_char_m2_error)                                        // _char_m2_error:
+    self.Emit("SUBQ" , jit.Imm(2), _IC)                                 // SUBQ    $2, IC
+    self.Sjmp("JMP"  , _LB_char_0_error)                                // JMP     _char_0_error
+    self.Link(_LB_im_error)                                             // _im_error:
+    self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 0))                    // CMPB    CX, (IP)(IC)
+    self.Sjmp("JNE"  , _LB_char_0_error)                                // JNE     _char_0_error
+    self.Emit("SHRL" , jit.Imm(8), _CX)                                 // SHRL    $8, CX
+    self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 1))                    // CMPB    CX, 1(IP)(IC)
+    self.Sjmp("JNE"  , _LB_char_1_error)                                // JNE     _char_1_error
+    self.Emit("SHRL" , jit.Imm(8), _CX)                                 // SHRL    $8, CX
+    self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 2))                    // CMPB    CX, 2(IP)(IC)
+    self.Sjmp("JNE"  , _LB_char_2_error)                                // JNE     _char_2_error
+    self.Sjmp("JMP"  , _LB_char_3_error)                                // JNE     _char_3_error
+    self.Link(_LB_char_4_error)                                         // _char_4_error:
+    self.Emit("ADDQ" , jit.Imm(1), _IC)                                 // ADDQ    $1, IC
+    self.Link(_LB_char_3_error)                                         // _char_3_error:
+    self.Emit("ADDQ" , jit.Imm(1), _IC)                                 // ADDQ    $1, IC
+    self.Link(_LB_char_2_error)                                         // _char_2_error:
+    self.Emit("ADDQ" , jit.Imm(1), _IC)                                 // ADDQ    $1, IC
+    self.Link(_LB_char_1_error)                                         // _char_1_error:
+    self.Emit("ADDQ" , jit.Imm(1), _IC)                                 // ADDQ    $1, IC
+    self.Link(_LB_char_0_error)                                         // _char_0_error:
+    self.Emit("MOVL" , jit.Imm(int64(types.ERR_INVALID_CHAR)), _EP)     // MOVL    ${types.ERR_INVALID_CHAR}, EP
+    self.Link(_LB_parsing_error)                                        // _parsing_error:
+    self.Emit("MOVQ" , _EP, _DI)                                        // MOVQ    EP, DI
+    self.Emit("MOVQ",  _ARG_sp, _AX)                                     // MOVQ  sp, AX
+    self.Emit("MOVQ",  _ARG_sl, _BX)                                     // MOVQ  sl, BX
+    self.Emit("MOVQ" , _IC, _CX)                                        // MOVQ    IC, CX
+    self.call_go(_F_error_wrap)                                         // CALL_GO error_wrap
+    self.Sjmp("JMP"  , _LB_error)                                       // JMP     _error
+}
+
+func (self *_Assembler) _asm_OP_dismatch_err(p *_Instr) {
+    self.Emit("MOVQ", _IC, _VAR_ic)      
+    self.Emit("MOVQ", jit.Type(p.vt()), _ET)     
+    self.Emit("MOVQ", _ET, _VAR_et)
+}
+
+func (self *_Assembler) _asm_OP_go_skip(p *_Instr) {
+    self.Byte(0x4c, 0x8d, 0x0d)         // LEAQ (PC), R9
+    self.Xref(p.vi(), 4)
+    // self.Byte(0xcc)
+    self.Emit("MOVQ", _R9, _VAR_pc)
+    self.Sjmp("JMP"  , _LB_skip_one)            // JMP     _skip_one
+}
+
+func (self *_Assembler) skip_one() {
+    self.Link(_LB_skip_one)                     // _skip:
+    self.Emit("MOVQ", _VAR_ic, _IC)             // MOVQ    _VAR_ic, IC
+    self.call_sf(_F_skip_one)                   // CALL_SF skip_one
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
+    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
+    self.Emit("MOVQ" , _VAR_pc, _R9)            // MOVQ    pc, R9
+    self.Rjmp("JMP"  , _R9)                     // JMP     (R9)
+}
+
+func (self *_Assembler) skip_key_value() {
+    self.Link(_LB_skip_key_value)               // _skip:
+    // skip the key
+    self.Emit("MOVQ", _VAR_ic, _IC)             // MOVQ    _VAR_ic, IC
+    self.call_sf(_F_skip_one)                   // CALL_SF skip_one
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
+    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
+    // match char ':'
+    self.lspace("_global_1")
+    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(':'))
+    self.Sjmp("JNE"  , _LB_parsing_error_v)     // JNE     _parse_error_v
+    self.Emit("ADDQ", jit.Imm(1), _IC)          // ADDQ    $1, IC
+    self.lspace("_global_2")
+    // skip the value
+    self.call_sf(_F_skip_one)                   // CALL_SF skip_one
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
+    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
+    // jump back to specified address
+    self.Emit("MOVQ" , _VAR_pc, _R9)            // MOVQ    pc, R9
+    self.Rjmp("JMP"  , _R9)                     // JMP     (R9)
+}
+
+
+/** Memory Management Routines **/
+
+var (
+    _T_byte     = jit.Type(byteType)
+    _F_mallocgc = jit.Func(mallocgc)
+)
+
+func (self *_Assembler) malloc_AX(nb obj.Addr, ret obj.Addr) {
+    self.Emit("MOVQ", nb, _AX)                  // MOVQ    ${nb}, AX
+    self.Emit("MOVQ", _T_byte, _BX)             // MOVQ    ${type(byte)}, BX
+    self.Emit("XORL", _CX, _CX)                 // XORL    CX, CX
+    self.call_go(_F_mallocgc)                   // CALL_GO mallocgc
+    self.Emit("MOVQ", _AX, ret)                 // MOVQ    AX, ${ret}
+}
+
+func (self *_Assembler) valloc(vt reflect.Type, ret obj.Addr) {
+    self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX)   // MOVQ    ${vt.Size()}, AX
+    self.Emit("MOVQ", jit.Type(vt), _BX)                // MOVQ    ${vt}, BX
+    self.Emit("MOVB", jit.Imm(1), _CX)                  // MOVB    $1, CX
+    self.call_go(_F_mallocgc)                           // CALL_GO mallocgc
+    self.Emit("MOVQ", _AX, ret)                         // MOVQ    AX, ${ret}
+}
+
+func (self *_Assembler) valloc_AX(vt reflect.Type) {
+    self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX)   // MOVQ    ${vt.Size()}, AX
+    self.Emit("MOVQ", jit.Type(vt), _BX)                // MOVQ    ${vt}, BX
+    self.Emit("MOVB", jit.Imm(1), _CX)                  // MOVB    $1, CX
+    self.call_go(_F_mallocgc)                           // CALL_GO mallocgc
+}
+
+func (self *_Assembler) vfollow(vt reflect.Type) {
+    self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX)    // MOVQ   (VP), AX
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ  AX, AX
+    self.Sjmp("JNZ"  , "_end_{n}")              // JNZ    _end_{n}
+    self.valloc_AX(vt)                          // VALLOC ${vt}, AX
+    self.WritePtrAX(1, jit.Ptr(_VP, 0), true)   // MOVQ   AX, (VP)
+    self.Link("_end_{n}")                       // _end_{n}:
+    self.Emit("MOVQ" , _AX, _VP)                // MOVQ   AX, VP
+}
+
+/** Value Parsing Routines **/
+
+var (
+    _F_vstring   = jit.Imm(int64(native.S_vstring))
+    _F_vnumber   = jit.Imm(int64(native.S_vnumber))
+    _F_vsigned   = jit.Imm(int64(native.S_vsigned))
+    _F_vunsigned = jit.Imm(int64(native.S_vunsigned))
+)
+
+func (self *_Assembler) check_err(vt reflect.Type, pin string, pin2 int) {
+    self.Emit("MOVQ" , _VAR_st_Vt, _AX)         // MOVQ st.Vt, AX
+    self.Emit("TESTQ", _AX, _AX)                // CMPQ AX, ${native.V_STRING}
+    // try to skip the value
+    if vt != nil {
+        self.Sjmp("JNS" , "_check_err_{n}")        // JNE  _parsing_error_v
+        self.Emit("MOVQ", jit.Type(vt), _ET)         
+        self.Emit("MOVQ", _ET, _VAR_et)
+        if pin2 != -1 {
+            self.Emit("SUBQ", jit.Imm(1), _BX)
+            self.Emit("MOVQ", _BX, _VAR_ic)
+            self.Byte(0x4c  , 0x8d, 0x0d)         // LEAQ (PC), R9
+            self.Xref(pin2, 4)
+            self.Emit("MOVQ", _R9, _VAR_pc)
+            self.Sjmp("JMP" , _LB_skip_key_value)
+        } else {
+            self.Emit("MOVQ", _BX, _VAR_ic)
+            self.Byte(0x4c  , 0x8d, 0x0d)         // LEAQ (PC), R9
+            self.Sref(pin, 4)
+            self.Emit("MOVQ", _R9, _VAR_pc)
+            self.Sjmp("JMP" , _LB_skip_one)
+        }
+        self.Link("_check_err_{n}")
+    } else {
+        self.Sjmp("JS"   , _LB_parsing_error_v)     // JNE  _parsing_error_v
+    }
+}
+
+func (self *_Assembler) check_eof(d int64) {
+    if d == 1 {
+        self.Emit("CMPQ", _IC, _IL)         // CMPQ IC, IL
+        self.Sjmp("JAE" , _LB_eof_error)    // JAE  _eof_error
+    } else {
+        self.Emit("LEAQ", jit.Ptr(_IC, d), _AX)     // LEAQ ${d}(IC), AX
+        self.Emit("CMPQ", _AX, _IL)                 // CMPQ AX, IL
+        self.Sjmp("JA"  , _LB_eof_error)            // JA   _eof_error
+    }
+}
+
+
+func (self *_Assembler) parse_string() {
+    self.Emit("MOVQ", _ARG_fv, _CX)
+    self.call_vf(_F_vstring)
+    self.check_err(nil, "", -1)
+}
+
+func (self *_Assembler) parse_number(vt reflect.Type, pin string, pin2 int) {
+    self.Emit("MOVQ", _IC, _BX)       // save ic when call native func    
+    self.call_vf(_F_vnumber)
+    self.check_err(vt, pin, pin2)
+}
+
+func (self *_Assembler) parse_signed(vt reflect.Type, pin string, pin2 int) {
+    self.Emit("MOVQ", _IC, _BX)       // save ic when call native func    
+    self.call_vf(_F_vsigned)
+    self.check_err(vt, pin, pin2)
+}
+
+func (self *_Assembler) parse_unsigned(vt reflect.Type, pin string, pin2 int) {
+    self.Emit("MOVQ", _IC, _BX)       // save ic when call native func    
+    self.call_vf(_F_vunsigned)
+    self.check_err(vt, pin, pin2)
+}
+
+// Pointer: DI, Size: SI, Return: R9  
+func (self *_Assembler) copy_string() {
+    self.Link("_copy_string")
+    self.Emit("MOVQ", _DI, _VAR_bs_p)
+    self.Emit("MOVQ", _SI, _VAR_bs_n)
+    self.Emit("MOVQ", _R9, _VAR_bs_LR)
+    self.malloc_AX(_SI, _ARG_sv_p)                              
+    self.Emit("MOVQ", _VAR_bs_p, _BX)
+    self.Emit("MOVQ", _VAR_bs_n, _CX)
+    self.call_go(_F_memmove)
+    self.Emit("MOVQ", _ARG_sv_p, _DI)
+    self.Emit("MOVQ", _VAR_bs_n, _SI)
+    self.Emit("MOVQ", _VAR_bs_LR, _R9)
+    self.Rjmp("JMP", _R9)
+}
+
+// Pointer: DI, Size: SI, Return: R9
+func (self *_Assembler) escape_string() {
+    self.Link("_escape_string")
+    self.Emit("MOVQ" , _DI, _VAR_bs_p)
+    self.Emit("MOVQ" , _SI, _VAR_bs_n)
+    self.Emit("MOVQ" , _R9, _VAR_bs_LR)
+    self.malloc_AX(_SI, _DX)                                    // MALLOC SI, DX
+    self.Emit("MOVQ" , _DX, _ARG_sv_p)
+    self.Emit("MOVQ" , _VAR_bs_p, _DI)
+    self.Emit("MOVQ" , _VAR_bs_n, _SI)                                  
+    self.Emit("LEAQ" , _VAR_sr, _CX)                            // LEAQ   sr, CX
+    self.Emit("XORL" , _R8, _R8)                                // XORL   R8, R8
+    self.Emit("BTQ"  , jit.Imm(_F_disable_urc), _ARG_fv)        // BTQ    ${_F_disable_urc}, fv
+    self.Emit("SETCC", _R8)                                     // SETCC  R8
+    self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8)   // SHLQ   ${types.B_UNICODE_REPLACE}, R8
+    self.call_c(_F_unquote)                                       // CALL   unquote
+    self.Emit("MOVQ" , _VAR_bs_n, _SI)                                  // MOVQ   ${n}, SI
+    self.Emit("ADDQ" , jit.Imm(1), _SI)                         // ADDQ   $1, SI
+    self.Emit("TESTQ", _AX, _AX)                                // TESTQ  AX, AX
+    self.Sjmp("JS"   , _LB_unquote_error)                       // JS     _unquote_error
+    self.Emit("MOVQ" , _AX, _SI)
+    self.Emit("MOVQ" , _ARG_sv_p, _DI)
+    self.Emit("MOVQ" , _VAR_bs_LR, _R9)
+    self.Rjmp("JMP", _R9)
+}
+
+func (self *_Assembler) escape_string_twice() {
+    self.Link("_escape_string_twice")
+    self.Emit("MOVQ" , _DI, _VAR_bs_p)
+    self.Emit("MOVQ" , _SI, _VAR_bs_n)
+    self.Emit("MOVQ" , _R9, _VAR_bs_LR)
+    self.malloc_AX(_SI, _DX)                                        // MALLOC SI, DX
+    self.Emit("MOVQ" , _DX, _ARG_sv_p)
+    self.Emit("MOVQ" , _VAR_bs_p, _DI)
+    self.Emit("MOVQ" , _VAR_bs_n, _SI)        
+    self.Emit("LEAQ" , _VAR_sr, _CX)                                // LEAQ   sr, CX
+    self.Emit("MOVL" , jit.Imm(types.F_DOUBLE_UNQUOTE), _R8)        // MOVL   ${types.F_DOUBLE_UNQUOTE}, R8
+    self.Emit("BTQ"  , jit.Imm(_F_disable_urc), _ARG_fv)            // BTQ    ${_F_disable_urc}, AX
+    self.Emit("XORL" , _AX, _AX)                                    // XORL   AX, AX
+    self.Emit("SETCC", _AX)                                         // SETCC  AX
+    self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _AX)       // SHLQ   ${types.B_UNICODE_REPLACE}, AX
+    self.Emit("ORQ"  , _AX, _R8)                                    // ORQ    AX, R8
+    self.call_c(_F_unquote)                                         // CALL   unquote
+    self.Emit("MOVQ" , _VAR_bs_n, _SI)                              // MOVQ   ${n}, SI
+    self.Emit("ADDQ" , jit.Imm(3), _SI)                             // ADDQ   $3, SI
+    self.Emit("TESTQ", _AX, _AX)                                    // TESTQ  AX, AX
+    self.Sjmp("JS"   , _LB_unquote_error)                           // JS     _unquote_error
+    self.Emit("MOVQ" , _AX, _SI)
+    self.Emit("MOVQ" , _ARG_sv_p, _DI)
+    self.Emit("MOVQ" , _VAR_bs_LR, _R9)
+    self.Rjmp("JMP", _R9)
+}
+
+/** Range Checking Routines **/
+
+var (
+    _V_max_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_max_f32))))
+    _V_min_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_min_f32))))
+)
+
+var (
+    _Vp_max_f32 = new(float32)
+    _Vp_min_f32 = new(float32)
+)
+
+func init() {
+    *_Vp_max_f32 = math.MaxFloat32
+    *_Vp_min_f32 = -math.MaxFloat32
+}
+
+func (self *_Assembler) range_single_X0() {
+    self.Emit("CVTSD2SS", _VAR_st_Dv, _X0)              // CVTSD2SS _VAR_st_Dv, X0
+    self.Emit("MOVQ"    , _V_max_f32, _CX)              // MOVQ     _max_f32, CX
+    self.Emit("MOVQ"    , jit.Gitab(_I_float32), _ET)   // MOVQ     ${itab(float32)}, ET
+    self.Emit("MOVQ"    , jit.Gtype(_T_float32), _EP)   // MOVQ     ${type(float32)}, EP
+    self.Emit("UCOMISS" , jit.Ptr(_CX, 0), _X0)         // UCOMISS  (CX), X0
+    self.Sjmp("JA"      , _LB_range_error)              // JA       _range_error
+    self.Emit("MOVQ"    , _V_min_f32, _CX)              // MOVQ     _min_f32, CX
+    self.Emit("UCOMISS" , jit.Ptr(_CX, 0), _X0)         // UCOMISS  (CX), X0
+    self.Sjmp("JB"      , _LB_range_error)              // JB      _range_error
+}
+
+func (self *_Assembler) range_signed_CX(i *rt.GoItab, t *rt.GoType, a int64, b int64) {
+    self.Emit("MOVQ", _VAR_st_Iv, _CX)      // MOVQ st.Iv, CX
+    self.Emit("MOVQ", jit.Gitab(i), _ET)    // MOVQ ${i}, ET
+    self.Emit("MOVQ", jit.Gtype(t), _EP)    // MOVQ ${t}, EP
+    self.Emit("CMPQ", _CX, jit.Imm(a))      // CMPQ CX, ${a}
+    self.Sjmp("JL"  , _LB_range_error)      // JL   _range_error
+    self.Emit("CMPQ", _CX, jit.Imm(b))      // CMPQ CX, ${B}
+    self.Sjmp("JG"  , _LB_range_error)      // JG   _range_error
+}
+
+func (self *_Assembler) range_unsigned_CX(i *rt.GoItab, t *rt.GoType, v uint64) {
+    self.Emit("MOVQ" , _VAR_st_Iv, _CX)         // MOVQ  st.Iv, CX
+    self.Emit("MOVQ" , jit.Gitab(i), _ET)       // MOVQ  ${i}, ET
+    self.Emit("MOVQ" , jit.Gtype(t), _EP)       // MOVQ  ${t}, EP
+    self.Emit("TESTQ", _CX, _CX)                // TESTQ CX, CX
+    self.Sjmp("JS"   , _LB_range_error)         // JS    _range_error
+    self.Emit("CMPQ" , _CX, jit.Imm(int64(v)))  // CMPQ  CX, ${a}
+    self.Sjmp("JA"   , _LB_range_error)         // JA    _range_error
+}
+
+/** String Manipulating Routines **/
+
+var (
+    _F_unquote = jit.Imm(int64(native.S_unquote))
+)
+
+func (self *_Assembler) slice_from(p obj.Addr, d int64) {
+    self.Emit("MOVQ", p, _SI)   // MOVQ    ${p}, SI
+    self.slice_from_r(_SI, d)   // SLICE_R SI, ${d}
+}
+
+func (self *_Assembler) slice_from_r(p obj.Addr, d int64) {
+    self.Emit("LEAQ", jit.Sib(_IP, p, 1, 0), _DI)   // LEAQ (IP)(${p}), DI
+    self.Emit("NEGQ", p)                            // NEGQ ${p}
+    self.Emit("LEAQ", jit.Sib(_IC, p, 1, d), _SI)   // LEAQ d(IC)(${p}), SI
+}
+
+func (self *_Assembler) unquote_once(p obj.Addr, n obj.Addr, stack bool, copy bool) {
+    self.slice_from(_VAR_st_Iv, -1)                             // SLICE  st.Iv, $-1
+    self.Emit("CMPQ", _VAR_st_Ep, jit.Imm(-1))                 // CMPQ   st.Ep, $-1
+    self.Sjmp("JE"  , "_noescape_{n}")                         // JE     _escape_{n}
+    self.Byte(0x4c, 0x8d, 0x0d)         // LEAQ (PC), R9
+    self.Sref("_unquote_once_write_{n}", 4)
+    self.Sjmp("JMP" , "_escape_string")
+    self.Link("_noescape_{n}")
+    if copy {
+        self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv)    
+        self.Sjmp("JNC", "_unquote_once_write_{n}")
+        self.Byte(0x4c, 0x8d, 0x0d)         // LEAQ (PC), R9
+        self.Sref("_unquote_once_write_{n}", 4)
+        self.Sjmp("JMP", "_copy_string")
+    }
+    self.Link("_unquote_once_write_{n}")
+    self.Emit("MOVQ", _SI, n)                                  // MOVQ   SI, ${n}
+    if stack {
+        self.Emit("MOVQ", _DI, p) 
+    } else {
+        self.WriteRecNotAX(10, _DI, p, false, false)
+    }
+}
+
+func (self *_Assembler) unquote_twice(p obj.Addr, n obj.Addr, stack bool) {
+    self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1))                     // CMPQ   st.Ep, $-1
+    self.Sjmp("JE"   , _LB_eof_error)                               // JE     _eof_error
+    self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -3), jit.Imm('\\'))     // CMPB   -3(IP)(IC), $'\\'
+    self.Sjmp("JNE"  , _LB_char_m3_error)                           // JNE    _char_m3_error
+    self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -2), jit.Imm('"'))      // CMPB   -2(IP)(IC), $'"'
+    self.Sjmp("JNE"  , _LB_char_m2_error)                           // JNE    _char_m2_error
+    self.slice_from(_VAR_st_Iv, -3)                                 // SLICE  st.Iv, $-3
+    self.Emit("MOVQ" , _SI, _AX)                                    // MOVQ   SI, AX
+    self.Emit("ADDQ" , _VAR_st_Iv, _AX)                             // ADDQ   st.Iv, AX
+    self.Emit("CMPQ" , _VAR_st_Ep, _AX)                             // CMPQ   st.Ep, AX
+    self.Sjmp("JE"   , "_noescape_{n}")                             // JE     _noescape_{n}
+    self.Byte(0x4c, 0x8d, 0x0d)         // LEAQ (PC), R9
+    self.Sref("_unquote_twice_write_{n}", 4)
+    self.Sjmp("JMP" , "_escape_string_twice")
+    self.Link("_noescape_{n}")                                      // _noescape_{n}:
+    self.Emit("BTQ"  , jit.Imm(_F_copy_string), _ARG_fv)    
+    self.Sjmp("JNC", "_unquote_twice_write_{n}") 
+    self.Byte(0x4c, 0x8d, 0x0d)         // LEAQ (PC), R9
+    self.Sref("_unquote_twice_write_{n}", 4)
+    self.Sjmp("JMP", "_copy_string")
+    self.Link("_unquote_twice_write_{n}")
+    self.Emit("MOVQ" , _SI, n)                                      // MOVQ   SI, ${n}
+    if stack {
+        self.Emit("MOVQ", _DI, p) 
+    } else {
+        self.WriteRecNotAX(12, _DI, p, false, false)
+    }
+    self.Link("_unquote_twice_end_{n}")
+}
+
+/** Memory Clearing Routines **/
+
+var (
+    _F_memclrHasPointers    = jit.Func(memclrHasPointers)
+    _F_memclrNoHeapPointers = jit.Func(memclrNoHeapPointers)
+)
+
+func (self *_Assembler) mem_clear_fn(ptrfree bool) {
+    if !ptrfree {
+        self.call_go(_F_memclrHasPointers)
+    } else {
+        self.call_go(_F_memclrNoHeapPointers)
+    }
+}
+
+func (self *_Assembler) mem_clear_rem(size int64, ptrfree bool) {
+    self.Emit("MOVQ", jit.Imm(size), _BX)               // MOVQ    ${size}, BX
+    self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)             // MOVQ    (ST), AX
+    self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _AX)     // MOVQ    (ST)(AX), AX
+    self.Emit("SUBQ", _VP, _AX)                         // SUBQ    VP, AX
+    self.Emit("ADDQ", _AX, _BX)                         // ADDQ    AX, BX
+    self.Emit("MOVQ", _VP, _AX)                         // MOVQ    VP, (SP)
+    self.mem_clear_fn(ptrfree)                          // CALL_GO memclr{Has,NoHeap}Pointers
+}
+
+/** Map Assigning Routines **/
+
+var (
+    _F_mapassign           = jit.Func(mapassign)
+    _F_mapassign_fast32    = jit.Func(mapassign_fast32)
+    _F_mapassign_faststr   = jit.Func(mapassign_faststr)
+    _F_mapassign_fast64ptr = jit.Func(mapassign_fast64ptr)
+)
+
+var (
+    _F_decodeJsonUnmarshaler obj.Addr
+    _F_decodeTextUnmarshaler obj.Addr
+)
+
+func init() {
+    _F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler)
+    _F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler)
+}
+
+func (self *_Assembler) mapaccess_ptr(t reflect.Type) {
+    if rt.MapType(rt.UnpackType(t)).IndirectElem() {
+        self.vfollow(t.Elem())
+    }
+}
+
+func (self *_Assembler) mapassign_std(t reflect.Type, v obj.Addr) {
+    self.Emit("LEAQ", v, _AX)               // LEAQ      ${v}, AX
+    self.mapassign_call_from_AX(t, _F_mapassign)    // MAPASSIGN ${t}, mapassign
+}
+
+func (self *_Assembler) mapassign_str_fast(t reflect.Type, p obj.Addr, n obj.Addr) {
+    self.Emit("MOVQ", jit.Type(t), _AX)         // MOVQ    ${t}, AX
+    self.Emit("MOVQ", _VP, _BX)                 // MOVQ    VP, BX
+    self.Emit("MOVQ", p, _CX)                   // MOVQ    ${p}, CX
+    self.Emit("MOVQ", n, _DI)                   // MOVQ    ${n}, DI
+    self.call_go(_F_mapassign_faststr)          // CALL_GO ${fn}
+    self.Emit("MOVQ", _AX, _VP)                 // MOVQ    AX, VP
+    self.mapaccess_ptr(t)
+}
+
+func (self *_Assembler) mapassign_call_from_AX(t reflect.Type, fn obj.Addr) {
+    self.Emit("MOVQ", _AX, _CX)
+    self.Emit("MOVQ", jit.Type(t), _AX)         // MOVQ    ${t}, AX
+    self.Emit("MOVQ", _VP, _BX)                 // MOVQ    VP, _BX
+    self.call_go(fn)                            // CALL_GO ${fn}
+    self.Emit("MOVQ", _AX, _VP)                 // MOVQ    AX, VP
+}
+
+func (self *_Assembler) mapassign_fastx(t reflect.Type, fn obj.Addr) {
+    self.mapassign_call_from_AX(t, fn)
+    self.mapaccess_ptr(t)
+}
+
+func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) {
+    pv := false
+    vk := t.Key()
+    tk := t.Key()
+
+    /* deref pointer if needed */
+    if vk.Kind() == reflect.Ptr {
+        pv = true
+        vk = vk.Elem()
+    }
+
+    /* addressable value with pointer receiver */
+    if addressable {
+        pv = false
+        tk = reflect.PtrTo(tk)
+    }
+
+    /* allocate the key, and call the unmarshaler */
+    self.valloc(vk, _BX)                        // VALLOC  ${vk}, BX
+    // must spill vk pointer since next call_go may invoke GC
+    self.Emit("MOVQ" , _BX, _ARG_vk)
+    self.Emit("MOVQ" , jit.Type(tk), _AX)       // MOVQ    ${tk}, AX
+    self.Emit("MOVQ" , _ARG_sv_p, _CX)          // MOVQ    sv.p, CX
+    self.Emit("MOVQ" , _ARG_sv_n, _DI)          // MOVQ    sv.n, DI
+    self.call_go(_F_decodeTextUnmarshaler)      // CALL_GO decodeTextUnmarshaler
+    self.Emit("TESTQ", _ET, _ET)                // TESTQ   ET, ET
+    self.Sjmp("JNZ"  , _LB_error)               // JNZ     _error
+    self.Emit("MOVQ" , _ARG_vk, _AX)            // MOVQ    VAR.vk, AX
+    self.Emit("MOVQ", jit.Imm(0), _ARG_vk)
+
+    /* select the correct assignment function */
+    if !pv {
+        self.mapassign_call_from_AX(t, _F_mapassign)
+    } else {
+        self.mapassign_fastx(t, _F_mapassign_fast64ptr)
+    }
+}
+
+/** External Unmarshaler Routines **/
+
+var (
+    _F_skip_one = jit.Imm(int64(native.S_skip_one))
+    _F_skip_array  = jit.Imm(int64(native.S_skip_array))
+    _F_skip_object = jit.Imm(int64(native.S_skip_object))
+    _F_skip_number = jit.Imm(int64(native.S_skip_number))
+)
+
+func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) {
+    self.call_sf(_F_skip_one)                                   // CALL_SF   skip_one
+    self.Emit("TESTQ", _AX, _AX)                                // TESTQ     AX, AX
+    self.Sjmp("JS"   , _LB_parsing_error_v)                     // JS        _parse_error_v
+    self.slice_from_r(_AX, 0)                                   // SLICE_R   AX, $0
+    self.Emit("MOVQ" , _DI, _ARG_sv_p)                          // MOVQ      DI, sv.p
+    self.Emit("MOVQ" , _SI, _ARG_sv_n)                          // MOVQ      SI, sv.n
+    self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref)     // UNMARSHAL json, ${t}, ${deref}
+}
+
+func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) {
+    self.parse_string()                                         // PARSE     STRING
+    self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true)        // UNQUOTE   once, sv.p, sv.n
+    self.unmarshal_func(t, _F_decodeTextUnmarshaler, deref)     // UNMARSHAL text, ${t}, ${deref}
+}
+
+func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool) {
+    pt := t
+    vk := t.Kind()
+
+    /* allocate the field if needed */
+    if deref && vk == reflect.Ptr {
+        self.Emit("MOVQ" , _VP, _BX)                // MOVQ   VP, BX
+        self.Emit("MOVQ" , jit.Ptr(_BX, 0), _BX)    // MOVQ   (BX), BX
+        self.Emit("TESTQ", _BX, _BX)                // TESTQ  BX, BX
+        self.Sjmp("JNZ"  , "_deref_{n}")            // JNZ    _deref_{n}
+        self.valloc(t.Elem(), _BX)                  // VALLOC ${t.Elem()}, BX
+        self.WriteRecNotAX(3, _BX, jit.Ptr(_VP, 0), false, false)    // MOVQ   BX, (VP)
+        self.Link("_deref_{n}")                     // _deref_{n}:
+    } else {
+        /* set value pointer */
+        self.Emit("MOVQ", _VP, _BX)                 // MOVQ   (VP), BX
+    }
+
+    /* set value type */
+    self.Emit("MOVQ", jit.Type(pt), _AX)        // MOVQ ${pt}, AX
+
+    /* set the source string and call the unmarshaler */
+    self.Emit("MOVQ" , _ARG_sv_p, _CX)          // MOVQ    sv.p, CX
+    self.Emit("MOVQ" , _ARG_sv_n, _DI)          // MOVQ    sv.n, DI
+    self.call_go(fn)                            // CALL_GO ${fn}
+    self.Emit("TESTQ", _ET, _ET)                // TESTQ   ET, ET
+    self.Sjmp("JNZ"  , _LB_error)               // JNZ     _error
+}
+
+/** Dynamic Decoding Routine **/
+
+var (
+    _F_decodeTypedPointer obj.Addr
+)
+
+func init() {
+    _F_decodeTypedPointer = jit.Func(decodeTypedPointer)
+}
+
+func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) {
+    self.Emit("MOVQ" , vp, _SI)    // MOVQ    ${vp}, SI
+    self.Emit("MOVQ" , vt, _DI)    // MOVQ    ${vt}, DI
+    self.Emit("MOVQ", _ARG_sp, _AX)            // MOVQ    sp, AX
+    self.Emit("MOVQ", _ARG_sl, _BX)            // MOVQ    sp, BX
+    self.Emit("MOVQ" , _IC, _CX)                // MOVQ    IC, CX
+    self.Emit("MOVQ" , _ST, _R8)                // MOVQ    ST, R8 
+    self.Emit("MOVQ" , _ARG_fv, _R9)            // MOVQ    fv, R9
+    self.save(_REG_rt...)
+    self.Emit("MOVQ", _F_decodeTypedPointer, _IL)  // MOVQ ${fn}, R11
+    self.Rjmp("CALL", _IL)      // CALL R11
+    self.load(_REG_rt...)
+    self.Emit("MOVQ" , _AX, _IC)                // MOVQ    AX, IC
+    self.Emit("MOVQ" , _BX, _ET)                // MOVQ    BX, ET
+    self.Emit("MOVQ" , _CX, _EP)                // MOVQ    CX, EP
+    self.Emit("TESTQ", _ET, _ET)                // TESTQ   ET, ET
+    self.Sjmp("JE", "_decode_dynamic_end_{n}")  // JE, _decode_dynamic_end_{n}
+    self.Emit("MOVQ", _I_json_MismatchTypeError, _CX) // MOVQ _I_json_MismatchTypeError, CX
+    self.Emit("CMPQ", _ET, _CX)                 // CMPQ ET, CX
+    self.Sjmp("JNE",  _LB_error)                // JNE  LB_error
+    self.Emit("MOVQ", _EP, _VAR_ic)             // MOVQ EP, VAR_ic
+    self.Emit("MOVQ", _ET, _VAR_et)             // MOVQ ET, VAR_et
+    self.Link("_decode_dynamic_end_{n}")
+}
+
+/** OpCode Assembler Functions **/
+
+var (
+    _F_memequal         = jit.Func(memequal)
+    _F_memmove          = jit.Func(memmove)
+    _F_growslice        = jit.Func(growslice)
+    _F_makeslice        = jit.Func(makeslice)
+    _F_makemap_small    = jit.Func(makemap_small)
+    _F_mapassign_fast64 = jit.Func(mapassign_fast64)
+)
+
+var (
+    _F_lspace  = jit.Imm(int64(native.S_lspace))
+    _F_strhash = jit.Imm(int64(caching.S_strhash))
+)
+
+var (
+    _F_b64decode   = jit.Imm(int64(_subr__b64decode))
+    _F_decodeValue = jit.Imm(int64(_subr_decode_value))
+)
+
+var (
+    _F_FieldMap_GetCaseInsensitive obj.Addr
+    _Empty_Slice = []byte{}
+    _Zero_Base = int64(uintptr(((*rt.GoSlice)(unsafe.Pointer(&_Empty_Slice))).Ptr))
+)
+
+const (
+    _MODE_AVX2 = 1 << 2
+)
+
+const (
+    _Fe_ID   = int64(unsafe.Offsetof(caching.FieldEntry{}.ID))
+    _Fe_Name = int64(unsafe.Offsetof(caching.FieldEntry{}.Name))
+    _Fe_Hash = int64(unsafe.Offsetof(caching.FieldEntry{}.Hash))
+)
+
+const (
+    _Vk_Ptr       = int64(reflect.Ptr)
+    _Gt_KindFlags = int64(unsafe.Offsetof(rt.GoType{}.KindFlags))
+)
+
+func init() {
+    _F_FieldMap_GetCaseInsensitive = jit.Func((*caching.FieldMap).GetCaseInsensitive)
+}
+
+func (self *_Assembler) _asm_OP_any(_ *_Instr) {
+    self.Emit("MOVQ"   , jit.Ptr(_VP, 8), _CX)              // MOVQ    8(VP), CX
+    self.Emit("TESTQ"  , _CX, _CX)                          // TESTQ   CX, CX
+    self.Sjmp("JZ"     , "_decode_{n}")                     // JZ      _decode_{n}
+    self.Emit("CMPQ"   , _CX, _VP)                          // CMPQ    CX, VP
+    self.Sjmp("JE"     , "_decode_{n}")                     // JE      _decode_{n}
+    self.Emit("MOVQ"   , jit.Ptr(_VP, 0), _AX)              // MOVQ    (VP), AX
+    self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX)  // MOVBLZX _Gt_KindFlags(AX), DX
+    self.Emit("ANDL"   , jit.Imm(rt.F_kind_mask), _DX)      // ANDL    ${F_kind_mask}, DX
+    self.Emit("CMPL"   , _DX, jit.Imm(_Vk_Ptr))             // CMPL    DX, ${reflect.Ptr}
+    self.Sjmp("JNE"    , "_decode_{n}")                     // JNE     _decode_{n}
+    self.Emit("LEAQ"   , jit.Ptr(_VP, 8), _DI)              // LEAQ    8(VP), DI
+    self.decode_dynamic(_AX, _DI)                           // DECODE  AX, DI
+    self.Sjmp("JMP"    , "_decode_end_{n}")                 // JMP     _decode_end_{n}
+    self.Link("_decode_{n}")                                // _decode_{n}:
+    self.Emit("MOVQ"   , _ARG_fv, _DF)                      // MOVQ    fv, DF
+    self.Emit("MOVQ"   , _ST, jit.Ptr(_SP, 0))              // MOVQ    _ST, (SP)
+    self.call(_F_decodeValue)                               // CALL    decodeValue
+    self.Emit("MOVQ"   , jit.Imm(0), jit.Ptr(_SP, 0))              // MOVQ    _ST, (SP)
+    self.Emit("TESTQ"  , _EP, _EP)                          // TESTQ   EP, EP
+    self.Sjmp("JNZ"    , _LB_parsing_error)                 // JNZ     _parsing_error
+    self.Link("_decode_end_{n}")                            // _decode_end_{n}:
+}
+
+func (self *_Assembler) _asm_OP_dyn(p *_Instr) {
+    self.Emit("MOVQ"   , jit.Type(p.vt()), _ET)             // MOVQ    ${p.vt()}, ET
+    self.Emit("CMPQ"   , jit.Ptr(_VP, 8), jit.Imm(0))       // CMPQ    8(VP), $0
+    self.Sjmp("JE"     , _LB_type_error)                    // JE      _type_error
+    self.Emit("MOVQ"   , jit.Ptr(_VP, 0), _CX)              // MOVQ    (VP), CX
+    self.Emit("MOVQ"   , jit.Ptr(_CX, 8), _CX)              // MOVQ    8(CX), CX
+    self.Emit("MOVBLZX", jit.Ptr(_CX, _Gt_KindFlags), _DX)  // MOVBLZX _Gt_KindFlags(CX), DX
+    self.Emit("ANDL"   , jit.Imm(rt.F_kind_mask), _DX)      // ANDL    ${F_kind_mask}, DX
+    self.Emit("CMPL"   , _DX, jit.Imm(_Vk_Ptr))             // CMPL    DX, ${reflect.Ptr}
+    self.Sjmp("JNE"    , _LB_type_error)                    // JNE     _type_error
+    self.Emit("LEAQ"   , jit.Ptr(_VP, 8), _DI)              // LEAQ    8(VP), DI
+    self.decode_dynamic(_CX, _DI)                           // DECODE  CX, DI
+    self.Link("_decode_end_{n}")                            // _decode_end_{n}:
+}
+
+func (self *_Assembler) _asm_OP_str(_ *_Instr) {
+    self.parse_string()                                     // PARSE   STRING
+    self.unquote_once(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false, true)     // UNQUOTE once, (VP), 8(VP)
+}
+
+func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
+    self.parse_string()                                 // PARSE  STRING
+    self.slice_from(_VAR_st_Iv, -1)                     // SLICE  st.Iv, $-1
+    self.Emit("MOVQ" , _DI, jit.Ptr(_VP, 0))            // MOVQ   DI, (VP)
+    self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 8))            // MOVQ   SI, 8(VP)
+    self.Emit("SHRQ" , jit.Imm(2), _SI)                 // SHRQ   $2, SI
+    self.Emit("LEAQ" , jit.Sib(_SI, _SI, 2, 0), _SI)    // LEAQ   (SI)(SI*2), SI
+    self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16))           // MOVQ   SI, 16(VP)
+    self.malloc_AX(_SI, _SI)                               // MALLOC SI, SI
+
+    // TODO: due to base64x's bug, only use AVX mode now
+    self.Emit("MOVL", jit.Imm(_MODE_JSON), _CX)          //  MOVL $_MODE_JSON, CX
+
+    /* call the decoder */
+    self.Emit("XORL" , _DX, _DX)                // XORL  DX, DX
+    self.Emit("MOVQ" , _VP, _DI)                // MOVQ  VP, DI
+
+    self.Emit("MOVQ" , jit.Ptr(_VP, 0), _R8)    // MOVQ SI, (VP)
+    self.WriteRecNotAX(4, _SI, jit.Ptr(_VP, 0), true, false)    // XCHGQ SI, (VP) 
+    self.Emit("MOVQ" , _R8, _SI)
+
+    self.Emit("XCHGQ", _DX, jit.Ptr(_VP, 8))    // XCHGQ DX, 8(VP)
+    self.call_c(_F_b64decode)                     // CALL  b64decode
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ AX, AX
+    self.Sjmp("JS"   , _LB_base64_error)        // JS    _base64_error
+    self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))    // MOVQ  AX, 8(VP)
+}
+
+func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
+    self.Emit("LEAQ", jit.Ptr(_IC, 4), _AX)                     // LEAQ 4(IC), AX
+    self.Emit("CMPQ", _AX, _IL)                                 // CMPQ AX, IL
+    self.Sjmp("JA"  , _LB_eof_error)                            // JA   _eof_error
+    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('f'))    // CMPB (IP)(IC), $'f'
+    self.Sjmp("JE"  , "_false_{n}")                             // JE   _false_{n}
+    self.Emit("MOVL", jit.Imm(_IM_true), _CX)                   // MOVL $"true", CX
+    self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0))             // CMPL CX, (IP)(IC)
+    self.Sjmp("JE" , "_bool_true_{n}")          
+    // try to skip the value
+    self.Emit("MOVQ", _IC, _VAR_ic)           
+    self.Emit("MOVQ", _T_bool, _ET)     
+    self.Emit("MOVQ", _ET, _VAR_et)
+    self.Byte(0x4c, 0x8d, 0x0d)         // LEAQ (PC), R9
+    self.Sref("_end_{n}", 4)
+    self.Emit("MOVQ", _R9, _VAR_pc)
+    self.Sjmp("JMP"  , _LB_skip_one) 
+
+    self.Link("_bool_true_{n}")
+    self.Emit("MOVQ", _AX, _IC)                                 // MOVQ AX, IC
+    self.Emit("MOVB", jit.Imm(1), jit.Ptr(_VP, 0))              // MOVB $1, (VP)
+    self.Sjmp("JMP" , "_end_{n}")                               // JMP  _end_{n}
+    self.Link("_false_{n}")                                     // _false_{n}:
+    self.Emit("ADDQ", jit.Imm(1), _AX)                          // ADDQ $1, AX
+    self.Emit("ADDQ", jit.Imm(1), _IC)                          // ADDQ $1, IC
+    self.Emit("CMPQ", _AX, _IL)                                 // CMPQ AX, IL
+    self.Sjmp("JA"  , _LB_eof_error)                            // JA   _eof_error
+    self.Emit("MOVL", jit.Imm(_IM_alse), _CX)                   // MOVL $"alse", CX
+    self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0))             // CMPL CX, (IP)(IC)
+    self.Sjmp("JNE" , _LB_im_error)                             // JNE  _im_error
+    self.Emit("MOVQ", _AX, _IC)                                 // MOVQ AX, IC
+    self.Emit("XORL", _AX, _AX)                                 // XORL AX, AX
+    self.Emit("MOVB", _AX, jit.Ptr(_VP, 0))                     // MOVB AX, (VP)
+    self.Link("_end_{n}")                                       // _end_{n}:
+}
+
+func (self *_Assembler) _asm_OP_num(_ *_Instr) {
+    self.Emit("MOVQ", jit.Imm(0), _VAR_fl)
+    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
+    self.Emit("MOVQ", _IC, _BX)
+    self.Sjmp("JNE", "_skip_number_{n}")
+    self.Emit("MOVQ", jit.Imm(1), _VAR_fl)
+    self.Emit("ADDQ", jit.Imm(1), _IC)
+    self.Link("_skip_number_{n}")
+
+    /* call skip_number */
+    self.Emit("LEAQ", _ARG_s, _DI)                      // LEAQ  s<>+0(FP), DI
+    self.Emit("MOVQ", _IC, _ARG_ic)                     // MOVQ  IC, ic<>+16(FP)
+    self.Emit("LEAQ", _ARG_ic, _SI)                     // LEAQ  ic<>+16(FP), SI
+    self.callc(_F_skip_number)                          // CALL  _F_skip_number
+    self.Emit("MOVQ", _ARG_ic, _IC)                     // MOVQ  ic<>+16(FP), IC
+    self.Emit("TESTQ", _AX, _AX)                        // TESTQ AX, AX
+    self.Sjmp("JNS"   , "_num_next_{n}")
+
+    /* call skip one */
+    self.Emit("MOVQ", _BX, _VAR_ic)           
+    self.Emit("MOVQ", _T_number, _ET)     
+    self.Emit("MOVQ", _ET, _VAR_et)
+    self.Byte(0x4c, 0x8d, 0x0d)       
+    self.Sref("_num_end_{n}", 4)
+    self.Emit("MOVQ", _R9, _VAR_pc)
+    self.Sjmp("JMP"  , _LB_skip_one)
+
+    /* assgin string */
+    self.Link("_num_next_{n}")
+    self.slice_from_r(_AX, 0)
+    self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv)
+    self.Sjmp("JNC", "_num_write_{n}")
+    self.Byte(0x4c, 0x8d, 0x0d)         // LEAQ (PC), R9
+    self.Sref("_num_write_{n}", 4)
+    self.Sjmp("JMP", "_copy_string")
+    self.Link("_num_write_{n}")
+    self.Emit("MOVQ", _SI, jit.Ptr(_VP, 8))     // MOVQ  SI, 8(VP)
+    self.WriteRecNotAX(13, _DI, jit.Ptr(_VP, 0), false, false)
+    self.Emit("CMPQ", _VAR_fl, jit.Imm(1))
+    self.Sjmp("JNE", "_num_end_{n}")
+    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
+    self.Sjmp("JNE", _LB_char_0_error)
+    self.Emit("ADDQ", jit.Imm(1), _IC)
+    self.Link("_num_end_{n}")
+}
+
+func (self *_Assembler) _asm_OP_i8(_ *_Instr) {
+    var pin = "_i8_end_{n}"
+    self.parse_signed(int8Type, pin, -1)                                                 // PARSE int8
+    self.range_signed_CX(_I_int8, _T_int8, math.MinInt8, math.MaxInt8)     // RANGE int8
+    self.Emit("MOVB", _CX, jit.Ptr(_VP, 0))                             // MOVB  CX, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_i16(_ *_Instr) {
+    var pin = "_i16_end_{n}"
+    self.parse_signed(int16Type, pin, -1)                                                     // PARSE int16
+    self.range_signed_CX(_I_int16, _T_int16, math.MinInt16, math.MaxInt16)     // RANGE int16
+    self.Emit("MOVW", _CX, jit.Ptr(_VP, 0))                                 // MOVW  CX, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_i32(_ *_Instr) {
+    var pin = "_i32_end_{n}"
+    self.parse_signed(int32Type, pin, -1)                                                     // PARSE int32
+    self.range_signed_CX(_I_int32, _T_int32, math.MinInt32, math.MaxInt32)     // RANGE int32
+    self.Emit("MOVL", _CX, jit.Ptr(_VP, 0))                                 // MOVL  CX, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_i64(_ *_Instr) {
+    var pin = "_i64_end_{n}"
+    self.parse_signed(int64Type, pin, -1)                         // PARSE int64
+    self.Emit("MOVQ", _VAR_st_Iv, _AX)          // MOVQ  st.Iv, AX
+    self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))     // MOVQ  AX, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_u8(_ *_Instr) {
+    var pin = "_u8_end_{n}"
+    self.parse_unsigned(uint8Type, pin, -1)                                   // PARSE uint8
+    self.range_unsigned_CX(_I_uint8, _T_uint8, math.MaxUint8)  // RANGE uint8
+    self.Emit("MOVB", _CX, jit.Ptr(_VP, 0))                 // MOVB  CX, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_u16(_ *_Instr) {
+    var pin = "_u16_end_{n}"
+    self.parse_unsigned(uint16Type, pin, -1)                                       // PARSE uint16
+    self.range_unsigned_CX(_I_uint16, _T_uint16, math.MaxUint16)   // RANGE uint16
+    self.Emit("MOVW", _CX, jit.Ptr(_VP, 0))                     // MOVW  CX, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_u32(_ *_Instr) {
+    var pin = "_u32_end_{n}"
+    self.parse_unsigned(uint32Type, pin, -1)                                       // PARSE uint32
+    self.range_unsigned_CX(_I_uint32, _T_uint32, math.MaxUint32)   // RANGE uint32
+    self.Emit("MOVL", _CX, jit.Ptr(_VP, 0))                     // MOVL  CX, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_u64(_ *_Instr) {
+    var pin = "_u64_end_{n}"
+    self.parse_unsigned(uint64Type, pin, -1)                       // PARSE uint64
+    self.Emit("MOVQ", _VAR_st_Iv, _AX)          // MOVQ  st.Iv, AX
+    self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))     // MOVQ  AX, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_f32(_ *_Instr) {
+    var pin = "_f32_end_{n}"
+    self.parse_number(float32Type, pin, -1)                         // PARSE NUMBER
+    self.range_single_X0()                         // RANGE float32
+    self.Emit("MOVSS", _X0, jit.Ptr(_VP, 0))    // MOVSS X0, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_f64(_ *_Instr) {
+    var pin = "_f64_end_{n}"
+    self.parse_number(float64Type, pin, -1)                         // PARSE NUMBER
+    self.Emit("MOVSD", _VAR_st_Dv, _X0)         // MOVSD st.Dv, X0
+    self.Emit("MOVSD", _X0, jit.Ptr(_VP, 0))    // MOVSD X0, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_unquote(_ *_Instr) {
+    self.check_eof(2)
+    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('\\'))   // CMPB    (IP)(IC), $'\\'
+    self.Sjmp("JNE" , _LB_char_0_error)                         // JNE     _char_0_error
+    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 1), jit.Imm('"'))    // CMPB    1(IP)(IC), $'"'
+    self.Sjmp("JNE" , _LB_char_1_error)                         // JNE     _char_1_error
+    self.Emit("ADDQ", jit.Imm(2), _IC)                          // ADDQ    $2, IC
+    self.parse_string()                                         // PARSE   STRING
+    self.unquote_twice(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false)        // UNQUOTE twice, (VP), 8(VP)
+}
+
+func (self *_Assembler) _asm_OP_nil_1(_ *_Instr) {
+    self.Emit("XORL", _AX, _AX)                 // XORL AX, AX
+    self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))     // MOVQ AX, (VP)
+}
+
+func (self *_Assembler) _asm_OP_nil_2(_ *_Instr) {
+    self.Emit("PXOR" , _X0, _X0)                // PXOR  X0, X0
+    self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0))    // MOVOU X0, (VP)
+}
+
+func (self *_Assembler) _asm_OP_nil_3(_ *_Instr) {
+    self.Emit("XORL" , _AX, _AX)                // XORL  AX, AX
+    self.Emit("PXOR" , _X0, _X0)                // PXOR  X0, X0
+    self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0))    // MOVOU X0, (VP)
+    self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 16))   // MOVOU AX, 16(VP)
+}
+
+func (self *_Assembler) _asm_OP_deref(p *_Instr) {
+    self.vfollow(p.vt())
+}
+
+func (self *_Assembler) _asm_OP_index(p *_Instr) {
+    self.Emit("MOVQ", jit.Imm(p.i64()), _AX)    // MOVQ ${p.vi()}, AX
+    self.Emit("ADDQ", _AX, _VP)                 // ADDQ _AX, _VP
+}
+
+func (self *_Assembler) _asm_OP_is_null(p *_Instr) {
+    self.Emit("LEAQ"   , jit.Ptr(_IC, 4), _AX)                          // LEAQ    4(IC), AX
+    self.Emit("CMPQ"   , _AX, _IL)                                      // CMPQ    AX, IL
+    self.Sjmp("JA"     , "_not_null_{n}")                               // JA      _not_null_{n}
+    self.Emit("CMPL"   , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null))    // CMPL    (IP)(IC), $"null"
+    self.Emit("CMOVQEQ", _AX, _IC)                                      // CMOVQEQ AX, IC
+    self.Xjmp("JE"     , p.vi())                                        // JE      {p.vi()}
+    self.Link("_not_null_{n}")                                          // _not_null_{n}:
+}
+
+func (self *_Assembler) _asm_OP_is_null_quote(p *_Instr) {
+    self.Emit("LEAQ"   , jit.Ptr(_IC, 5), _AX)                          // LEAQ    4(IC), AX
+    self.Emit("CMPQ"   , _AX, _IL)                                      // CMPQ    AX, IL
+    self.Sjmp("JA"     , "_not_null_quote_{n}")                         // JA      _not_null_quote_{n}
+    self.Emit("CMPL"   , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null))    // CMPL    (IP)(IC), $"null"
+    self.Sjmp("JNE"    , "_not_null_quote_{n}")                         // JNE     _not_null_quote_{n}
+    self.Emit("CMPB"   , jit.Sib(_IP, _IC, 1, 4), jit.Imm('"'))         // CMPB    4(IP)(IC), $'"'
+    self.Emit("CMOVQEQ", _AX, _IC)                                      // CMOVQEQ AX, IC
+    self.Xjmp("JE"     , p.vi())                                        // JE      {p.vi()}
+    self.Link("_not_null_quote_{n}")                                    // _not_null_quote_{n}:
+}
+
+func (self *_Assembler) _asm_OP_map_init(_ *_Instr) {
+    self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX)    // MOVQ    (VP), AX
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
+    self.Sjmp("JNZ"  , "_end_{n}")              // JNZ     _end_{n}
+    self.call_go(_F_makemap_small)              // CALL_GO makemap_small
+    self.WritePtrAX(6, jit.Ptr(_VP, 0), false)    // MOVQ    AX, (VP)
+    self.Link("_end_{n}")                       // _end_{n}:
+    self.Emit("MOVQ" , _AX, _VP)                // MOVQ    AX, VP
+}
+
+func (self *_Assembler) _asm_OP_map_key_i8(p *_Instr) {
+    self.parse_signed(int8Type, "", p.vi())                                                 // PARSE     int8
+    self.range_signed_CX(_I_int8, _T_int8, math.MinInt8, math.MaxInt8)     // RANGE     int8
+    self.match_char('"')
+    self.mapassign_std(p.vt(), _VAR_st_Iv)                              // MAPASSIGN int8, mapassign, st.Iv
+}
+
+func (self *_Assembler) _asm_OP_map_key_i16(p *_Instr) {
+    self.parse_signed(int16Type, "", p.vi())                                                     // PARSE     int16
+    self.range_signed_CX(_I_int16, _T_int16, math.MinInt16, math.MaxInt16)     // RANGE     int16
+    self.match_char('"')
+    self.mapassign_std(p.vt(), _VAR_st_Iv)                                  // MAPASSIGN int16, mapassign, st.Iv
+}
+
+func (self *_Assembler) _asm_OP_map_key_i32(p *_Instr) {
+    self.parse_signed(int32Type, "", p.vi())                                                     // PARSE     int32
+    self.range_signed_CX(_I_int32, _T_int32, math.MinInt32, math.MaxInt32)     // RANGE     int32
+    self.match_char('"')
+    if vt := p.vt(); !mapfast(vt) {
+        self.mapassign_std(vt, _VAR_st_Iv)                                  // MAPASSIGN int32, mapassign, st.Iv
+    } else {
+        self.Emit("MOVQ", _CX, _AX)                                         // MOVQ CX, AX
+        self.mapassign_fastx(vt, _F_mapassign_fast32)                       // MAPASSIGN int32, mapassign_fast32
+    }
+}
+
+func (self *_Assembler) _asm_OP_map_key_i64(p *_Instr) {
+    self.parse_signed(int64Type, "", p.vi())                                 // PARSE     int64
+    self.match_char('"')
+    if vt := p.vt(); !mapfast(vt) {
+        self.mapassign_std(vt, _VAR_st_Iv)              // MAPASSIGN int64, mapassign, st.Iv
+    } else {
+        self.Emit("MOVQ", _VAR_st_Iv, _AX)              // MOVQ      st.Iv, AX
+        self.mapassign_fastx(vt, _F_mapassign_fast64)   // MAPASSIGN int64, mapassign_fast64
+    }
+}
+
+func (self *_Assembler) _asm_OP_map_key_u8(p *_Instr) {
+    self.parse_unsigned(uint8Type, "", p.vi())                                   // PARSE     uint8
+    self.range_unsigned_CX(_I_uint8, _T_uint8, math.MaxUint8)  // RANGE     uint8
+    self.match_char('"')
+    self.mapassign_std(p.vt(), _VAR_st_Iv)                    // MAPASSIGN uint8, vt.Iv
+}
+
+func (self *_Assembler) _asm_OP_map_key_u16(p *_Instr) {
+    self.parse_unsigned(uint16Type, "", p.vi())                                       // PARSE     uint16
+    self.range_unsigned_CX(_I_uint16, _T_uint16, math.MaxUint16)   // RANGE     uint16
+    self.match_char('"')
+    self.mapassign_std(p.vt(), _VAR_st_Iv)                      // MAPASSIGN uint16, vt.Iv
+}
+
+func (self *_Assembler) _asm_OP_map_key_u32(p *_Instr) {
+    self.parse_unsigned(uint32Type, "", p.vi())                                       // PARSE     uint32
+    self.range_unsigned_CX(_I_uint32, _T_uint32, math.MaxUint32)   // RANGE     uint32
+    self.match_char('"')
+    if vt := p.vt(); !mapfast(vt) {
+        self.mapassign_std(vt, _VAR_st_Iv)                      // MAPASSIGN uint32, vt.Iv
+    } else {
+        self.Emit("MOVQ", _CX, _AX)                             // MOVQ CX, AX
+        self.mapassign_fastx(vt, _F_mapassign_fast32)           // MAPASSIGN uint32, mapassign_fast32
+    }
+}
+
+func (self *_Assembler) _asm_OP_map_key_u64(p *_Instr) {
+    self.parse_unsigned(uint64Type, "", p.vi())                                       // PARSE     uint64
+    self.match_char('"')
+    if vt := p.vt(); !mapfast(vt) {
+        self.mapassign_std(vt, _VAR_st_Iv)                      // MAPASSIGN uint64, vt.Iv
+    } else {
+        self.Emit("MOVQ", _VAR_st_Iv, _AX)                      // MOVQ      st.Iv, AX
+        self.mapassign_fastx(vt, _F_mapassign_fast64)           // MAPASSIGN uint64, mapassign_fast64
+    }
+}
+
+func (self *_Assembler) _asm_OP_map_key_f32(p *_Instr) {
+    self.parse_number(float32Type, "", p.vi())                     // PARSE     NUMBER
+    self.range_single_X0()                     // RANGE     float32
+    self.Emit("MOVSS", _X0, _VAR_st_Dv)     // MOVSS     X0, st.Dv
+    self.match_char('"')
+    self.mapassign_std(p.vt(), _VAR_st_Dv)  // MAPASSIGN ${p.vt()}, mapassign, st.Dv
+}
+
+func (self *_Assembler) _asm_OP_map_key_f64(p *_Instr) {
+    self.parse_number(float64Type, "", p.vi())                     // PARSE     NUMBER
+    self.match_char('"')
+    self.mapassign_std(p.vt(), _VAR_st_Dv)  // MAPASSIGN ${p.vt()}, mapassign, st.Dv
+}
+
+func (self *_Assembler) _asm_OP_map_key_str(p *_Instr) {
+    self.parse_string()                          // PARSE     STRING
+    self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true)      // UNQUOTE   once, sv.p, sv.n
+    if vt := p.vt(); !mapfast(vt) {
+        self.valloc(vt.Key(), _DI)
+        self.Emit("MOVOU", _ARG_sv, _X0)
+        self.Emit("MOVOU", _X0, jit.Ptr(_DI, 0))
+        self.mapassign_std(vt, jit.Ptr(_DI, 0))        // MAPASSIGN string, DI, SI
+    } else {
+        self.mapassign_str_fast(vt, _ARG_sv_p, _ARG_sv_n)    // MAPASSIGN string, DI, SI
+    }
+}
+
+func (self *_Assembler) _asm_OP_map_key_utext(p *_Instr) {
+    self.parse_string()                         // PARSE     STRING
+    self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true)     // UNQUOTE   once, sv.p, sv.n
+    self.mapassign_utext(p.vt(), false)         // MAPASSIGN utext, ${p.vt()}, false
+}
+
+func (self *_Assembler) _asm_OP_map_key_utext_p(p *_Instr) {
+    self.parse_string()                         // PARSE     STRING
+    self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true)     // UNQUOTE   once, sv.p, sv.n
+    self.mapassign_utext(p.vt(), true)          // MAPASSIGN utext, ${p.vt()}, true
+}
+
+func (self *_Assembler) _asm_OP_array_skip(_ *_Instr) {
+    self.call_sf(_F_skip_array)                 // CALL_SF skip_array
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
+    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
+}
+
+func (self *_Assembler) _asm_OP_array_clear(p *_Instr) {
+    self.mem_clear_rem(p.i64(), true)
+}
+
+func (self *_Assembler) _asm_OP_array_clear_p(p *_Instr) {
+    self.mem_clear_rem(p.i64(), false)
+}
+
+func (self *_Assembler) _asm_OP_slice_init(p *_Instr) {
+    self.Emit("XORL" , _AX, _AX)                    // XORL    AX, AX
+    self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))        // MOVQ    AX, 8(VP)
+    self.Emit("MOVQ" , jit.Ptr(_VP, 16), _BX)       // MOVQ    16(VP), BX
+    self.Emit("TESTQ", _BX, _BX)                    // TESTQ   BX, BX
+    self.Sjmp("JNZ"  , "_done_{n}")                 // JNZ     _done_{n}
+    self.Emit("MOVQ" , jit.Imm(_MinSlice), _CX)     // MOVQ    ${_MinSlice}, CX
+    self.Emit("MOVQ" , _CX, jit.Ptr(_VP, 16))       // MOVQ    CX, 16(VP)
+    self.Emit("MOVQ" , jit.Type(p.vt()), _AX)       // MOVQ    ${p.vt()}, DX
+    self.call_go(_F_makeslice)                      // CALL_GO makeslice
+    self.WritePtrAX(7, jit.Ptr(_VP, 0), false)      // MOVQ    AX, (VP)
+    self.Emit("XORL" , _AX, _AX)                    // XORL    AX, AX
+    self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))        // MOVQ    AX, 8(VP)
+    self.Link("_done_{n}")                          // _done_{n}
+}
+
+func (self *_Assembler) _asm_OP_check_empty(p *_Instr) {
+    rbracket := p.vb()
+    if rbracket == ']' {
+        self.check_eof(1)
+        self.Emit("LEAQ", jit.Ptr(_IC, 1), _AX)                              // LEAQ    1(IC), AX
+        self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(rbracket))) // CMPB    (IP)(IC), ']'
+        self.Sjmp("JNE" , "_not_empty_array_{n}")                            // JNE     _not_empty_array_{n}
+        self.Emit("MOVQ", _AX, _IC)                                          // MOVQ    AX, IC
+        self.Emit("MOVQ", jit.Imm(_Zero_Base), _AX)
+        self.WritePtrAX(9, jit.Ptr(_VP, 0), false)
+        self.Emit("PXOR", _X0, _X0)                                          // PXOR    X0, X0
+        self.Emit("MOVOU", _X0, jit.Ptr(_VP, 8))                             // MOVOU   X0, 8(VP)
+        self.Xjmp("JMP" , p.vi())                                            // JMP     {p.vi()}
+        self.Link("_not_empty_array_{n}")
+    } else {
+        panic("only implement check empty array here!")
+    }
+}
+
+func (self *_Assembler) _asm_OP_slice_append(p *_Instr) {
+    self.Emit("MOVQ" , jit.Ptr(_VP, 8), _AX)            // MOVQ    8(VP), AX
+    self.Emit("CMPQ" , _AX, jit.Ptr(_VP, 16))           // CMPQ    AX, 16(VP)
+    self.Sjmp("JB"   , "_index_{n}")                    // JB      _index_{n}
+    self.Emit("MOVQ" , _AX, _SI)                        // MOVQ    AX, SI
+    self.Emit("SHLQ" , jit.Imm(1), _SI)                 // SHLQ    $1, SI
+    self.Emit("MOVQ" , jit.Type(p.vt()), _AX)           // MOVQ    ${p.vt()}, AX
+    self.Emit("MOVQ" , jit.Ptr(_VP, 0), _BX)            // MOVQ   (VP), BX
+    self.Emit("MOVQ" , jit.Ptr(_VP, 8), _CX)            // MOVQ    8(VP), CX
+    self.Emit("MOVQ" , jit.Ptr(_VP, 16), _DI)           // MOVQ    16(VP), DI
+    self.call_go(_F_growslice)                          // CALL_GO growslice
+    self.WritePtrAX(8, jit.Ptr(_VP, 0), false)          // MOVQ    AX, (VP)
+    self.Emit("MOVQ" , _BX, jit.Ptr(_VP, 8))            // MOVQ    BX, 8(VP)
+    self.Emit("MOVQ" , _CX, jit.Ptr(_VP, 16))           // MOVQ    CX, 16(VP)
+
+    // because growslice not zero memory {oldcap, newlen} when append et not has ptrdata.
+    // but we should zero it, avoid decode it as random values.
+    if rt.UnpackType(p.vt()).PtrData == 0 {
+        self.Emit("MOVQ" , _CX, _DI)                        // MOVQ    CX, DI
+        self.Emit("SUBQ" , _BX, _DI)                        // MOVQ    BX, DI
+    
+        self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8))     // ADDQ    $1, 8(VP)
+        self.Emit("MOVQ" , _AX, _VP)                        // MOVQ    AX, VP
+        self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX)   // MOVQ    ${p.vlen()}, CX
+        self.Emit("MOVQ" , _BX, _AX)                        // MOVQ    BX, AX 
+        self.From("MULQ" , _CX)                             // MULQ    CX
+        self.Emit("ADDQ" , _AX, _VP)                        // ADDQ    AX, VP
+
+        self.Emit("MOVQ" , _DI, _AX)                        // MOVQ    SI, AX
+        self.From("MULQ" , _CX)                             // MULQ    BX
+        self.Emit("MOVQ" , _AX, _BX)                        // ADDQ    AX, BX
+        self.Emit("MOVQ" , _VP, _AX)                        // MOVQ    VP, AX
+        self.mem_clear_fn(true)                             // CALL_GO memclr{Has,NoHeap}
+        self.Sjmp("JMP", "_append_slice_end_{n}")
+    }
+
+    self.Emit("MOVQ" , _BX, _AX)                        // MOVQ    BX, AX
+    self.Link("_index_{n}")                             // _index_{n}:
+    self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8))     // ADDQ    $1, 8(VP)
+    self.Emit("MOVQ" , jit.Ptr(_VP, 0), _VP)            // MOVQ    (VP), VP
+    self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX)   // MOVQ    ${p.vlen()}, CX
+    self.From("MULQ" , _CX)                             // MULQ    CX
+    self.Emit("ADDQ" , _AX, _VP)                        // ADDQ    AX, VP
+    self.Link("_append_slice_end_{n}")
+}
+
+func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) {
+    self.call_sf(_F_skip_object)                // CALL_SF skip_object
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
+    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
+}
+
+func (self *_Assembler) _asm_OP_object_next(_ *_Instr) {
+    self.call_sf(_F_skip_one)                   // CALL_SF skip_one
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
+    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
+}
+
+func (self *_Assembler) _asm_OP_struct_field(p *_Instr) {
+    assert_eq(caching.FieldEntrySize, 32, "invalid field entry size")
+    self.Emit("MOVQ" , jit.Imm(-1), _AX)                        // MOVQ    $-1, AX
+    self.Emit("MOVQ" , _AX, _VAR_sr)                            // MOVQ    AX, sr
+    self.parse_string()                                         // PARSE   STRING
+    self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, false)                     // UNQUOTE once, sv.p, sv.n
+    self.Emit("LEAQ" , _ARG_sv, _AX)                            // LEAQ    sv, AX
+    self.Emit("XORL" , _BX, _BX)                                // XORL    BX, BX
+    self.call_go(_F_strhash)                                    // CALL_GO strhash
+    self.Emit("MOVQ" , _AX, _R9)                                // MOVQ    AX, R9
+    self.Emit("MOVQ" , jit.Imm(freezeFields(p.vf())), _CX)      // MOVQ    ${p.vf()}, CX
+    self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_b), _SI)   // MOVQ    FieldMap.b(CX), SI
+    self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_N), _CX)   // MOVQ    FieldMap.N(CX), CX
+    self.Emit("TESTQ", _CX, _CX)                                // TESTQ   CX, CX
+    self.Sjmp("JZ"   , "_try_lowercase_{n}")                    // JZ      _try_lowercase_{n}
+    self.Link("_loop_{n}")                                      // _loop_{n}:
+    self.Emit("XORL" , _DX, _DX)                                // XORL    DX, DX
+    self.From("DIVQ" , _CX)                                     // DIVQ    CX
+    self.Emit("LEAQ" , jit.Ptr(_DX, 1), _AX)                    // LEAQ    1(DX), AX
+    self.Emit("SHLQ" , jit.Imm(5), _DX)                         // SHLQ    $5, DX
+    self.Emit("LEAQ" , jit.Sib(_SI, _DX, 1, 0), _DI)            // LEAQ    (SI)(DX), DI
+    self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Hash), _R8)             // MOVQ    FieldEntry.Hash(DI), R8
+    self.Emit("TESTQ", _R8, _R8)                                // TESTQ   R8, R8
+    self.Sjmp("JZ"   , "_try_lowercase_{n}")                    // JZ      _try_lowercase_{n}
+    self.Emit("CMPQ" , _R8, _R9)                                // CMPQ    R8, R9
+    self.Sjmp("JNE"  , "_loop_{n}")                             // JNE     _loop_{n}
+    self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name + 8), _DX)         // MOVQ    FieldEntry.Name+8(DI), DX
+    self.Emit("CMPQ" , _DX, _ARG_sv_n)                          // CMPQ    DX, sv.n
+    self.Sjmp("JNE"  , "_loop_{n}")                             // JNE     _loop_{n}
+    self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_ID), _R8)               // MOVQ    FieldEntry.ID(DI), R8
+    self.Emit("MOVQ" , _AX, _VAR_ss_AX)                         // MOVQ    AX, ss.AX
+    self.Emit("MOVQ" , _CX, _VAR_ss_CX)                         // MOVQ    CX, ss.CX
+    self.Emit("MOVQ" , _SI, _VAR_ss_SI)                         // MOVQ    SI, ss.SI
+    self.Emit("MOVQ" , _R8, _VAR_ss_R8)                         // MOVQ    R8, ss.R8
+    self.Emit("MOVQ" , _R9, _VAR_ss_R9)                         // MOVQ    R9, ss.R9
+    self.Emit("MOVQ" , _ARG_sv_p, _AX)                          // MOVQ    _VAR_sv_p, AX
+    self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name), _CX)             // MOVQ    FieldEntry.Name(DI), CX
+    self.Emit("MOVQ" , _CX, _BX)                                // MOVQ    CX, 8(SP)
+    self.Emit("MOVQ" , _DX, _CX)                                // MOVQ    DX, 16(SP)
+    self.call_go(_F_memequal)                                   // CALL_GO memequal
+    self.Emit("MOVB" , _AX, _DX)                                // MOVB    24(SP), DX
+    self.Emit("MOVQ" , _VAR_ss_AX, _AX)                         // MOVQ    ss.AX, AX
+    self.Emit("MOVQ" , _VAR_ss_CX, _CX)                         // MOVQ    ss.CX, CX
+    self.Emit("MOVQ" , _VAR_ss_SI, _SI)                         // MOVQ    ss.SI, SI
+    self.Emit("MOVQ" , _VAR_ss_R9, _R9)                         // MOVQ    ss.R9, R9
+    self.Emit("TESTB", _DX, _DX)                                // TESTB   DX, DX
+    self.Sjmp("JZ"   , "_loop_{n}")                             // JZ      _loop_{n}
+    self.Emit("MOVQ" , _VAR_ss_R8, _R8)                         // MOVQ    ss.R8, R8
+    self.Emit("MOVQ" , _R8, _VAR_sr)                            // MOVQ    R8, sr
+    self.Sjmp("JMP"  , "_end_{n}")                              // JMP     _end_{n}
+    self.Link("_try_lowercase_{n}")                             // _try_lowercase_{n}:
+    self.Emit("MOVQ" , jit.Imm(referenceFields(p.vf())), _AX)   // MOVQ    ${p.vf()}, AX
+    self.Emit("MOVQ", _ARG_sv_p, _BX)                            // MOVQ   sv, BX
+    self.Emit("MOVQ", _ARG_sv_n, _CX)                            // MOVQ   sv, CX
+    self.call_go(_F_FieldMap_GetCaseInsensitive)                // CALL_GO FieldMap::GetCaseInsensitive
+    self.Emit("MOVQ" , _AX, _VAR_sr)                            // MOVQ    AX, _VAR_sr
+    self.Emit("TESTQ", _AX, _AX)                                // TESTQ   AX, AX
+    self.Sjmp("JNS"  , "_end_{n}")                              // JNS     _end_{n}
+    self.Emit("BTQ"  , jit.Imm(_F_disable_unknown), _ARG_fv)    // BTQ     ${_F_disable_unknown}, fv
+    self.Sjmp("JC"   , _LB_field_error)                         // JC      _field_error
+    self.Link("_end_{n}")                                       // _end_{n}:
+}
+
+func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) {
+    self.unmarshal_json(p.vt(), true)
+}
+
+func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) {
+    self.unmarshal_json(p.vt(), false)
+}
+
+func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) {
+    self.unmarshal_text(p.vt(), true)
+}
+
+func (self *_Assembler) _asm_OP_unmarshal_text_p(p *_Instr) {
+    self.unmarshal_text(p.vt(), false)
+}
+
+func (self *_Assembler) _asm_OP_lspace(_ *_Instr) {
+    self.lspace("_{n}")
+}
+
+func (self *_Assembler) lspace(subfix string) {
+    var label = "_lspace" + subfix
+    self.Emit("CMPQ"   , _IC, _IL)                      // CMPQ    IC, IL
+    self.Sjmp("JAE"    , _LB_eof_error)                 // JAE     _eof_error
+    self.Emit("MOVQ"   , jit.Imm(_BM_space), _DX)       // MOVQ    _BM_space, DX
+    self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX)  // MOVBQZX (IP)(IC), AX
+    self.Emit("CMPQ"   , _AX, jit.Imm(' '))             // CMPQ    AX, $' '
+    self.Sjmp("JA"     , label)                // JA      _nospace_{n}
+    self.Emit("BTQ"    , _AX, _DX)                      // BTQ     AX, DX
+    self.Sjmp("JNC"    , label)                // JNC     _nospace_{n}
+
+    /* test up to 4 characters */
+    for i := 0; i < 3; i++ {
+        self.Emit("ADDQ"   , jit.Imm(1), _IC)               // ADDQ    $1, IC
+        self.Emit("CMPQ"   , _IC, _IL)                      // CMPQ    IC, IL
+        self.Sjmp("JAE"    , _LB_eof_error)                 // JAE     _eof_error
+        self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX)  // MOVBQZX (IP)(IC), AX
+        self.Emit("CMPQ"   , _AX, jit.Imm(' '))             // CMPQ    AX, $' '
+        self.Sjmp("JA"     , label)                // JA      _nospace_{n}
+        self.Emit("BTQ"    , _AX, _DX)                      // BTQ     AX, DX
+        self.Sjmp("JNC"    , label)                // JNC     _nospace_{n}
+    }
+
+    /* handle over to the native function */
+    self.Emit("MOVQ"   , _IP, _DI)                      // MOVQ    IP, DI
+    self.Emit("MOVQ"   , _IL, _SI)                      // MOVQ    IL, SI
+    self.Emit("MOVQ"   , _IC, _DX)                      // MOVQ    IC, DX
+    self.callc(_F_lspace)                                // CALL    lspace
+    self.Emit("TESTQ"  , _AX, _AX)                      // TESTQ   AX, AX
+    self.Sjmp("JS"     , _LB_parsing_error_v)           // JS      _parsing_error_v
+    self.Emit("CMPQ"   , _AX, _IL)                      // CMPQ    AX, IL
+    self.Sjmp("JAE"    , _LB_eof_error)                 // JAE     _eof_error
+    self.Emit("MOVQ"   , _AX, _IC)                      // MOVQ    AX, IC
+    self.Link(label)                           // _nospace_{n}:
+}
+
+func (self *_Assembler) _asm_OP_match_char(p *_Instr) {
+    self.match_char(p.vb())
+}
+
+func (self *_Assembler) match_char(char byte) {
+    self.check_eof(1)
+    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(char)))  // CMPB (IP)(IC), ${p.vb()}
+    self.Sjmp("JNE" , _LB_char_0_error)                                 // JNE  _char_0_error
+    self.Emit("ADDQ", jit.Imm(1), _IC)                                  // ADDQ $1, IC
+}
+
+func (self *_Assembler) _asm_OP_check_char(p *_Instr) {
+    self.check_eof(1)
+    self.Emit("LEAQ"   , jit.Ptr(_IC, 1), _AX)                              // LEAQ    1(IC), AX
+    self.Emit("CMPB"   , jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb())))   // CMPB    (IP)(IC), ${p.vb()}
+    self.Emit("CMOVQEQ", _AX, _IC)                                          // CMOVQEQ AX, IC
+    self.Xjmp("JE"     , p.vi())                                            // JE      {p.vi()}
+}
+
+func (self *_Assembler) _asm_OP_check_char_0(p *_Instr) {
+    self.check_eof(1)
+    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb())))   // CMPB    (IP)(IC), ${p.vb()}
+    self.Xjmp("JE"  , p.vi())                                            // JE      {p.vi()}
+}
+
+func (self *_Assembler) _asm_OP_add(p *_Instr) {
+    self.Emit("ADDQ", jit.Imm(int64(p.vi())), _IC)  // ADDQ ${p.vi()}, IC
+}
+
+func (self *_Assembler) _asm_OP_load(_ *_Instr) {
+    self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)             // MOVQ (ST), AX
+    self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _VP)     // MOVQ (ST)(AX), VP
+}
+
+func (self *_Assembler) _asm_OP_save(_ *_Instr) {
+    self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX)             // MOVQ (ST), CX
+    self.Emit("CMPQ", _CX, jit.Imm(_MaxStackBytes))     // CMPQ CX, ${_MaxStackBytes}
+    self.Sjmp("JAE"  , _LB_stack_error)                  // JA   _stack_error
+    self.WriteRecNotAX(0 , _VP, jit.Sib(_ST, _CX, 1, 8), false, false) // MOVQ VP, 8(ST)(CX)
+    self.Emit("ADDQ", jit.Imm(8), _CX)                  // ADDQ $8, CX
+    self.Emit("MOVQ", _CX, jit.Ptr(_ST, 0))             // MOVQ CX, (ST)
+}
+
+func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
+    self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)             // MOVQ (ST), AX
+    self.Emit("SUBQ", jit.Imm(8), _AX)                  // SUBQ $8, AX
+    self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _VP)     // MOVQ 8(ST)(AX), VP
+    self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0))             // MOVQ AX, (ST)
+    self.Emit("XORL", _BX, _BX)                         // XORL BX, BX
+    self.Emit("MOVQ", _BX, jit.Sib(_ST, _AX, 1, 8))     // MOVQ BX, 8(ST)(AX)
+}
+
+func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
+    self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX)            // MOVQ  (ST), AX
+    self.Emit("SUBQ" , jit.Imm(16), _AX)                // SUBQ  $16, AX
+    self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _VP)    // MOVQ  8(ST)(AX), VP
+    self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0))            // MOVQ  AX, (ST)
+    self.Emit("PXOR" , _X0, _X0)                        // PXOR  X0, X0
+    self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8))    // MOVOU X0, 8(ST)(AX)
+}
+
+func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
+    self.Emit("MOVQ", jit.Type(p.vt()), _AX)    // MOVQ   ${p.vt()}, AX
+    self.decode_dynamic(_AX, _VP)               // DECODE AX, VP
+}
+
+func (self *_Assembler) _asm_OP_goto(p *_Instr) {
+    self.Xjmp("JMP", p.vi())
+}
+
+func (self *_Assembler) _asm_OP_switch(p *_Instr) {
+    self.Emit("MOVQ", _VAR_sr, _AX)             // MOVQ sr, AX
+    self.Emit("CMPQ", _AX, jit.Imm(p.i64()))    // CMPQ AX, ${len(p.vs())}
+    self.Sjmp("JAE" , "_default_{n}")           // JAE  _default_{n}
+
+    /* jump table selector */
+    self.Byte(0x48, 0x8d, 0x3d)                         // LEAQ    ?(PC), DI
+    self.Sref("_switch_table_{n}", 4)                   // ....    &_switch_table_{n}
+    self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX)  // MOVLQSX (DI)(AX*4), AX
+    self.Emit("ADDQ"   , _DI, _AX)                      // ADDQ    DI, AX
+    self.Rjmp("JMP"    , _AX)                           // JMP     AX
+    self.Link("_switch_table_{n}")                      // _switch_table_{n}:
+
+    /* generate the jump table */
+    for i, v := range p.vs() {
+        self.Xref(v, int64(-i) * 4)
+    }
+
+    /* default case */
+    self.Link("_default_{n}")
+    self.NOP()
+}
+
+func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
+    self.Emit("MOVQ", jit.Imm(int64(p2.op())),  _CX)// MOVQ $(p2.op()), 16(SP)
+    self.Emit("MOVQ", jit.Imm(int64(p1.op())),  _BX) // MOVQ $(p1.op()), 8(SP)
+    self.Emit("MOVQ", jit.Imm(int64(i)),  _AX)       // MOVQ $(i), (SP)
+    self.call_go(_F_println)
+}

+ 1950 - 0
vendor/github.com/bytedance/sonic/internal/decoder/assembler_stkabi_amd64.go

@@ -0,0 +1,1950 @@
+// +build go1.16,!go1.17
+
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package decoder
+
+import (
+    `encoding/json`
+    `fmt`
+    `math`
+    `reflect`
+    `unsafe`
+    
+    `github.com/bytedance/sonic/internal/caching`
+    `github.com/bytedance/sonic/internal/jit`
+    `github.com/bytedance/sonic/internal/native`
+    `github.com/bytedance/sonic/internal/native/types`
+    `github.com/bytedance/sonic/internal/rt`
+    `github.com/twitchyliquid64/golang-asm/obj`
+)
+
+/** Register Allocations
+ *
+ *  State Registers:
+ *
+ *      %rbx : stack base
+ *      %r12 : input pointer
+ *      %r13 : input length
+ *      %r14 : input cursor
+ *      %r15 : value pointer
+ *
+ *  Error Registers:
+ *
+ *      %r10 : error type register
+ *      %r11 : error pointer register
+ */
+
+/** Function Prototype & Stack Map
+ *
+ *  func (s string, ic int, vp unsafe.Pointer, sb *_Stack, fv uint64, sv string) (rc int, err error)
+ *
+ *  s.buf  :   (FP)
+ *  s.len  :  8(FP)
+ *  ic     : 16(FP)
+ *  vp     : 24(FP)
+ *  sb     : 32(FP)
+ *  fv     : 40(FP)
+ *  sv     : 56(FP)
+ *  err.vt : 72(FP)
+ *  err.vp : 80(FP)
+ */
+
+const (
+    _FP_args   = 96     // 96 bytes to pass arguments and return values for this function
+    _FP_fargs  = 80     // 80 bytes for passing arguments to other Go functions
+    _FP_saves  = 40     // 40 bytes for saving the registers before CALL instructions
+    _FP_locals = 144    // 144 bytes for local variables
+)
+
+const (
+    _FP_offs = _FP_fargs + _FP_saves + _FP_locals
+    _FP_size = _FP_offs + 8     // 8 bytes for the parent frame pointer
+    _FP_base = _FP_size + 8     // 8 bytes for the return address
+)
+
+const (
+    _IM_null = 0x6c6c756e   // 'null'
+    _IM_true = 0x65757274   // 'true'
+    _IM_alse = 0x65736c61   // 'alse' ('false' without the 'f')
+)
+
+const (
+    _BM_space = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n')
+)
+
+const (
+    _MODE_JSON = 1 << 3 // base64 mode
+)
+
+const (
+    _LB_error           = "_error"
+    _LB_im_error        = "_im_error"
+    _LB_eof_error       = "_eof_error"
+    _LB_type_error      = "_type_error"
+    _LB_field_error     = "_field_error"
+    _LB_range_error     = "_range_error"
+    _LB_stack_error     = "_stack_error"
+    _LB_base64_error    = "_base64_error"
+    _LB_unquote_error   = "_unquote_error"
+    _LB_parsing_error   = "_parsing_error"
+    _LB_parsing_error_v = "_parsing_error_v"
+    _LB_mismatch_error   = "_mismatch_error"
+)
+
+const (
+    _LB_char_0_error  = "_char_0_error"
+    _LB_char_1_error  = "_char_1_error"
+    _LB_char_2_error  = "_char_2_error"
+    _LB_char_3_error  = "_char_3_error"
+    _LB_char_4_error  = "_char_4_error"
+    _LB_char_m2_error = "_char_m2_error"
+    _LB_char_m3_error = "_char_m3_error"
+)
+
+const (
+    _LB_skip_one = "_skip_one"
+    _LB_skip_key_value = "_skip_key_value"
+)
+
+var (
+    _AX = jit.Reg("AX")
+    _CX = jit.Reg("CX")
+    _DX = jit.Reg("DX")
+    _DI = jit.Reg("DI")
+    _SI = jit.Reg("SI")
+    _BP = jit.Reg("BP")
+    _SP = jit.Reg("SP")
+    _R8 = jit.Reg("R8")
+    _R9 = jit.Reg("R9")
+    _X0 = jit.Reg("X0")
+    _X1 = jit.Reg("X1")
+)
+
+var (
+    _ST = jit.Reg("BX")
+    _IP = jit.Reg("R12")
+    _IL = jit.Reg("R13")
+    _IC = jit.Reg("R14")
+    _VP = jit.Reg("R15")
+)
+
+var (
+    _R10 = jit.Reg("R10")    // used for gcWriteBarrier
+    _DF  = jit.Reg("R10")    // reuse R10 in generic decoder for flags
+    _ET  = jit.Reg("R10")
+    _EP  = jit.Reg("R11")
+)
+
+var (
+    _ARG_s  = _ARG_sp
+    _ARG_sp = jit.Ptr(_SP, _FP_base)
+    _ARG_sl = jit.Ptr(_SP, _FP_base + 8)
+    _ARG_ic = jit.Ptr(_SP, _FP_base + 16)
+    _ARG_vp = jit.Ptr(_SP, _FP_base + 24)
+    _ARG_sb = jit.Ptr(_SP, _FP_base + 32)
+    _ARG_fv = jit.Ptr(_SP, _FP_base + 40)
+)
+
+var (
+    _VAR_sv = _VAR_sv_p
+    _VAR_sv_p = jit.Ptr(_SP, _FP_base + 48)
+    _VAR_sv_n = jit.Ptr(_SP, _FP_base + 56)
+    _VAR_vk   = jit.Ptr(_SP, _FP_base + 64)
+)
+
+var (
+    _RET_rc = jit.Ptr(_SP, _FP_base + 72)
+    _RET_et = jit.Ptr(_SP, _FP_base + 80)
+    _RET_ep = jit.Ptr(_SP, _FP_base + 88)
+)
+
+var (
+    _VAR_st = _VAR_st_Vt
+    _VAR_sr = jit.Ptr(_SP, _FP_fargs + _FP_saves)
+)
+
+
+var (
+    _VAR_st_Vt = jit.Ptr(_SP, _FP_fargs + _FP_saves + 0)
+    _VAR_st_Dv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
+    _VAR_st_Iv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
+    _VAR_st_Ep = jit.Ptr(_SP, _FP_fargs + _FP_saves + 24)
+    _VAR_st_Db = jit.Ptr(_SP, _FP_fargs + _FP_saves + 32)
+    _VAR_st_Dc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 40)
+)
+
+var (
+    _VAR_ss_AX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 48)
+    _VAR_ss_CX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 56)
+    _VAR_ss_SI = jit.Ptr(_SP, _FP_fargs + _FP_saves + 64)
+    _VAR_ss_R8 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 72)
+    _VAR_ss_R9 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 80)
+)
+
+var (
+    _VAR_bs_p = jit.Ptr(_SP, _FP_fargs + _FP_saves + 88)
+    _VAR_bs_n = jit.Ptr(_SP, _FP_fargs + _FP_saves + 96)
+    _VAR_bs_LR = jit.Ptr(_SP, _FP_fargs + _FP_saves + 104)
+)
+
+var _VAR_fl = jit.Ptr(_SP, _FP_fargs + _FP_saves + 112)
+
+var (
+    _VAR_et = jit.Ptr(_SP, _FP_fargs + _FP_saves + 120) // save dismatched type
+    _VAR_ic = jit.Ptr(_SP, _FP_fargs + _FP_saves + 128) // save dismatched position
+    _VAR_pc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 136) // save skip return pc
+)
+
+type _Assembler struct {
+    jit.BaseAssembler
+    p _Program
+    name string
+}
+
+func newAssembler(p _Program) *_Assembler {
+    return new(_Assembler).Init(p)
+}
+
+/** Assembler Interface **/
+
+func (self *_Assembler) Load() _Decoder {
+    return ptodec(self.BaseAssembler.Load("decode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
+}
+
+func (self *_Assembler) Init(p _Program) *_Assembler {
+    self.p = p
+    self.BaseAssembler.Init(self.compile)
+    return self
+}
+
+func (self *_Assembler) compile() {
+    self.prologue()
+    self.instrs()
+    self.epilogue()
+    self.copy_string()
+    self.escape_string()
+    self.escape_string_twice()
+    self.skip_one()
+    self.skip_key_value()
+    self.mismatch_error()
+    self.type_error()
+    self.field_error()
+    self.range_error()
+    self.stack_error()
+    self.base64_error()
+    self.parsing_error()
+}
+
+/** Assembler Stages **/
+
+var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
+    _OP_any              : (*_Assembler)._asm_OP_any,
+    _OP_dyn              : (*_Assembler)._asm_OP_dyn,
+    _OP_str              : (*_Assembler)._asm_OP_str,
+    _OP_bin              : (*_Assembler)._asm_OP_bin,
+    _OP_bool             : (*_Assembler)._asm_OP_bool,
+    _OP_num              : (*_Assembler)._asm_OP_num,
+    _OP_i8               : (*_Assembler)._asm_OP_i8,
+    _OP_i16              : (*_Assembler)._asm_OP_i16,
+    _OP_i32              : (*_Assembler)._asm_OP_i32,
+    _OP_i64              : (*_Assembler)._asm_OP_i64,
+    _OP_u8               : (*_Assembler)._asm_OP_u8,
+    _OP_u16              : (*_Assembler)._asm_OP_u16,
+    _OP_u32              : (*_Assembler)._asm_OP_u32,
+    _OP_u64              : (*_Assembler)._asm_OP_u64,
+    _OP_f32              : (*_Assembler)._asm_OP_f32,
+    _OP_f64              : (*_Assembler)._asm_OP_f64,
+    _OP_unquote          : (*_Assembler)._asm_OP_unquote,
+    _OP_nil_1            : (*_Assembler)._asm_OP_nil_1,
+    _OP_nil_2            : (*_Assembler)._asm_OP_nil_2,
+    _OP_nil_3            : (*_Assembler)._asm_OP_nil_3,
+    _OP_deref            : (*_Assembler)._asm_OP_deref,
+    _OP_index            : (*_Assembler)._asm_OP_index,
+    _OP_is_null          : (*_Assembler)._asm_OP_is_null,
+    _OP_is_null_quote    : (*_Assembler)._asm_OP_is_null_quote,
+    _OP_map_init         : (*_Assembler)._asm_OP_map_init,
+    _OP_map_key_i8       : (*_Assembler)._asm_OP_map_key_i8,
+    _OP_map_key_i16      : (*_Assembler)._asm_OP_map_key_i16,
+    _OP_map_key_i32      : (*_Assembler)._asm_OP_map_key_i32,
+    _OP_map_key_i64      : (*_Assembler)._asm_OP_map_key_i64,
+    _OP_map_key_u8       : (*_Assembler)._asm_OP_map_key_u8,
+    _OP_map_key_u16      : (*_Assembler)._asm_OP_map_key_u16,
+    _OP_map_key_u32      : (*_Assembler)._asm_OP_map_key_u32,
+    _OP_map_key_u64      : (*_Assembler)._asm_OP_map_key_u64,
+    _OP_map_key_f32      : (*_Assembler)._asm_OP_map_key_f32,
+    _OP_map_key_f64      : (*_Assembler)._asm_OP_map_key_f64,
+    _OP_map_key_str      : (*_Assembler)._asm_OP_map_key_str,
+    _OP_map_key_utext    : (*_Assembler)._asm_OP_map_key_utext,
+    _OP_map_key_utext_p  : (*_Assembler)._asm_OP_map_key_utext_p,
+    _OP_array_skip       : (*_Assembler)._asm_OP_array_skip,
+    _OP_array_clear      : (*_Assembler)._asm_OP_array_clear,
+    _OP_array_clear_p    : (*_Assembler)._asm_OP_array_clear_p,
+    _OP_slice_init       : (*_Assembler)._asm_OP_slice_init,
+    _OP_slice_append     : (*_Assembler)._asm_OP_slice_append,
+    _OP_object_skip      : (*_Assembler)._asm_OP_object_skip,
+    _OP_object_next      : (*_Assembler)._asm_OP_object_next,
+    _OP_struct_field     : (*_Assembler)._asm_OP_struct_field,
+    _OP_unmarshal        : (*_Assembler)._asm_OP_unmarshal,
+    _OP_unmarshal_p      : (*_Assembler)._asm_OP_unmarshal_p,
+    _OP_unmarshal_text   : (*_Assembler)._asm_OP_unmarshal_text,
+    _OP_unmarshal_text_p : (*_Assembler)._asm_OP_unmarshal_text_p,
+    _OP_lspace           : (*_Assembler)._asm_OP_lspace,
+    _OP_match_char       : (*_Assembler)._asm_OP_match_char,
+    _OP_check_char       : (*_Assembler)._asm_OP_check_char,
+    _OP_load             : (*_Assembler)._asm_OP_load,
+    _OP_save             : (*_Assembler)._asm_OP_save,
+    _OP_drop             : (*_Assembler)._asm_OP_drop,
+    _OP_drop_2           : (*_Assembler)._asm_OP_drop_2,
+    _OP_recurse          : (*_Assembler)._asm_OP_recurse,
+    _OP_goto             : (*_Assembler)._asm_OP_goto,
+    _OP_switch           : (*_Assembler)._asm_OP_switch,
+    _OP_check_char_0     : (*_Assembler)._asm_OP_check_char_0,
+    _OP_dismatch_err     : (*_Assembler)._asm_OP_dismatch_err,
+    _OP_go_skip          : (*_Assembler)._asm_OP_go_skip,
+    _OP_add              : (*_Assembler)._asm_OP_add,
+    _OP_check_empty      : (*_Assembler)._asm_OP_check_empty,
+}
+
+func (self *_Assembler) instr(v *_Instr) {
+    if fn := _OpFuncTab[v.op()]; fn != nil {
+        fn(self, v)
+    } else {
+        panic(fmt.Sprintf("invalid opcode: %d", v.op()))
+    }
+}
+
+func (self *_Assembler) instrs() {
+    for i, v := range self.p {
+        self.Mark(i)
+        self.instr(&v)
+        self.debug_instr(i, &v)
+    }
+}
+
+func (self *_Assembler) epilogue() {
+    self.Mark(len(self.p))
+    self.Emit("XORL", _EP, _EP)                     // XORL EP, EP
+    self.Emit("MOVQ", _VAR_et, _ET)                 // MOVQ VAR_et, ET
+    self.Emit("TESTQ", _ET, _ET)                    // TESTQ ET, ET
+    self.Sjmp("JNZ", _LB_mismatch_error)            // JNZ _LB_mismatch_error
+    self.Link(_LB_error)                            // _error:
+    self.Emit("MOVQ", _IC, _RET_rc)                 // MOVQ IC, rc<>+40(FP)
+    self.Emit("MOVQ", _ET, _RET_et)                 // MOVQ ET, et<>+48(FP)
+    self.Emit("MOVQ", _EP, _RET_ep)                 // MOVQ EP, ep<>+56(FP)
+    self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP)  // MOVQ _FP_offs(SP), BP
+    self.Emit("ADDQ", jit.Imm(_FP_size), _SP)       // ADDQ $_FP_size, SP
+    self.Emit("RET")                                // RET
+}
+
+func (self *_Assembler) prologue() {
+    self.Emit("SUBQ", jit.Imm(_FP_size), _SP)       // SUBQ $_FP_size, SP
+    self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs))  // MOVQ BP, _FP_offs(SP)
+    self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP)  // LEAQ _FP_offs(SP), BP
+    self.Emit("MOVQ", _ARG_sp, _IP)                 // MOVQ s.p<>+0(FP), IP
+    self.Emit("MOVQ", _ARG_sl, _IL)                 // MOVQ s.l<>+8(FP), IL
+    self.Emit("MOVQ", _ARG_ic, _IC)                 // MOVQ ic<>+16(FP), IC
+    self.Emit("MOVQ", _ARG_vp, _VP)                 // MOVQ vp<>+24(FP), VP
+    self.Emit("MOVQ", _ARG_sb, _ST)                 // MOVQ vp<>+32(FP), ST
+    // initialize digital buffer first
+    self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_st_Dc)    // MOVQ $_MaxDigitNums, ss.Dcap
+    self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX)           // LEAQ _DbufOffset(ST), AX
+    self.Emit("MOVQ", _AX, _VAR_st_Db)                          // MOVQ AX, ss.Dbuf
+    self.Emit("XORL", _AX, _AX)                                 // XORL AX, AX
+    self.Emit("MOVQ", _AX, _VAR_et)                          // MOVQ AX, ss.Dp
+}
+
+/** Function Calling Helpers **/
+
+var _REG_go = []obj.Addr {
+    _ST,
+    _VP,
+    _IP,
+    _IL,
+    _IC,
+}
+
+func (self *_Assembler) save(r ...obj.Addr) {
+    for i, v := range r {
+        if i > _FP_saves / 8 - 1 {
+            panic("too many registers to save")
+        } else {
+            self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
+        }
+    }
+}
+
+func (self *_Assembler) load(r ...obj.Addr) {
+    for i, v := range r {
+        if i > _FP_saves / 8 - 1 {
+            panic("too many registers to load")
+        } else {
+            self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
+        }
+    }
+}
+
+func (self *_Assembler) call(fn obj.Addr) {
+    self.Emit("MOVQ", fn, _AX)  // MOVQ ${fn}, AX
+    self.Rjmp("CALL", _AX)      // CALL AX
+}
+
+func (self *_Assembler) call_go(fn obj.Addr) {
+    self.save(_REG_go...)   // SAVE $REG_go
+    self.call(fn)           // CALL ${fn}
+    self.load(_REG_go...)   // LOAD $REG_go
+}
+
+func (self *_Assembler) call_sf(fn obj.Addr) {
+    self.Emit("LEAQ", _ARG_s, _DI)                      // LEAQ s<>+0(FP), DI
+    self.Emit("MOVQ", _IC, _ARG_ic)                     // MOVQ IC, ic<>+16(FP)
+    self.Emit("LEAQ", _ARG_ic, _SI)                     // LEAQ ic<>+16(FP), SI
+    self.Emit("LEAQ", jit.Ptr(_ST, _FsmOffset), _DX)    // LEAQ _FsmOffset(ST), DX
+    self.Emit("MOVQ", _ARG_fv, _CX)
+    self.call(fn)                                       // CALL ${fn}
+    self.Emit("MOVQ", _ARG_ic, _IC)                     // MOVQ ic<>+16(FP), IC
+}
+
+func (self *_Assembler) call_vf(fn obj.Addr) {
+    self.Emit("LEAQ", _ARG_s, _DI)      // LEAQ s<>+0(FP), DI
+    self.Emit("MOVQ", _IC, _ARG_ic)     // MOVQ IC, ic<>+16(FP)
+    self.Emit("LEAQ", _ARG_ic, _SI)     // LEAQ ic<>+16(FP), SI
+    self.Emit("LEAQ", _VAR_st, _DX)     // LEAQ st, DX
+    self.call(fn)                       // CALL ${fn}
+    self.Emit("MOVQ", _ARG_ic, _IC)     // MOVQ ic<>+16(FP), IC
+}
+
+/** Assembler Error Handlers **/
+
+var (
+    _F_convT64        = jit.Func(convT64)
+    _F_error_wrap     = jit.Func(error_wrap)
+    _F_error_type     = jit.Func(error_type)
+    _F_error_field    = jit.Func(error_field)
+    _F_error_value    = jit.Func(error_value)
+    _F_error_mismatch = jit.Func(error_mismatch)
+)
+
+var (
+    _I_int8    , _T_int8    = rtype(reflect.TypeOf(int8(0)))
+    _I_int16   , _T_int16   = rtype(reflect.TypeOf(int16(0)))
+    _I_int32   , _T_int32   = rtype(reflect.TypeOf(int32(0)))
+    _I_uint8   , _T_uint8   = rtype(reflect.TypeOf(uint8(0)))
+    _I_uint16  , _T_uint16  = rtype(reflect.TypeOf(uint16(0)))
+    _I_uint32  , _T_uint32  = rtype(reflect.TypeOf(uint32(0)))
+    _I_float32 , _T_float32 = rtype(reflect.TypeOf(float32(0)))
+)
+
+var (
+    _T_error                    = rt.UnpackType(errorType)
+    _I_base64_CorruptInputError = jit.Itab(_T_error, base64CorruptInputError)
+)
+
+var (
+    _V_stackOverflow              = jit.Imm(int64(uintptr(unsafe.Pointer(&stackOverflow))))
+    _I_json_UnsupportedValueError = jit.Itab(_T_error, reflect.TypeOf(new(json.UnsupportedValueError)))
+    _I_json_MismatchTypeError     = jit.Itab(_T_error, reflect.TypeOf(new(MismatchTypeError)))
+)
+
+func (self *_Assembler) type_error() {
+    self.Link(_LB_type_error)                   // _type_error:
+    self.Emit("MOVQ", _ET, jit.Ptr(_SP, 0))     // MOVQ    ET, (SP)
+    self.call_go(_F_error_type)                 // CALL_GO error_type
+    self.Emit("MOVQ", jit.Ptr(_SP, 8), _ET)     // MOVQ    8(SP), ET
+    self.Emit("MOVQ", jit.Ptr(_SP, 16), _EP)    // MOVQ    16(SP), EP
+    self.Sjmp("JMP" , _LB_error)                // JMP     _error
+}
+
+
+func (self *_Assembler) mismatch_error() {
+    self.Link(_LB_mismatch_error)                     // _type_error:
+    self.Emit("MOVQ", _VAR_et, _ET)                   // MOVQ _VAR_et, ET
+    self.Emit("MOVQ", _VAR_ic, _EP)                   // MOVQ _VAR_ic, EP
+    self.Emit("MOVQ", _I_json_MismatchTypeError, _AX) // MOVQ _I_json_MismatchTypeError, AX
+    self.Emit("CMPQ", _ET, _AX)                       // CMPQ ET, AX
+    self.Sjmp("JE"  , _LB_error)                      // JE _LB_error
+    self.Emit("MOVQ", _ARG_sp, _AX)
+    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))     // MOVQ    AX, (SP)
+    self.Emit("MOVQ", _ARG_sl, _CX)
+    self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8))     // MOVQ    CX, 8(SP)
+    self.Emit("MOVQ", _VAR_ic, _AX)
+    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))    // MOVQ    AX, 16(SP)
+    self.Emit("MOVQ", _VAR_et, _CX)
+    self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24))    // MOVQ    CX, 24(SP)
+    self.call_go(_F_error_mismatch)             // CALL_GO error_type
+    self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET)    // MOVQ    32(SP), ET
+    self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP)    // MOVQ    40(SP), EP
+    self.Sjmp("JMP" , _LB_error)                // JMP     _error
+}
+
+func (self *_Assembler) _asm_OP_dismatch_err(p *_Instr) {
+    self.Emit("MOVQ", _IC, _VAR_ic)  
+    self.Emit("MOVQ", jit.Type(p.vt()), _ET)       
+    self.Emit("MOVQ", _ET, _VAR_et)
+}
+
+func (self *_Assembler) _asm_OP_go_skip(p *_Instr) {
+    self.Byte(0x4c, 0x8d, 0x0d)         // LEAQ (PC), R9
+    self.Xref(p.vi(), 4)
+    self.Emit("MOVQ", _R9, _VAR_pc)
+    self.Sjmp("JMP"  , _LB_skip_one)            // JMP     _skip_one
+}
+
+func (self *_Assembler) skip_one() {
+    self.Link(_LB_skip_one)                     // _skip:
+    self.Emit("MOVQ", _VAR_ic, _IC)             // MOVQ    _VAR_ic, IC
+    self.call_sf(_F_skip_one)                   // CALL_SF skip_one
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
+    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
+    self.Emit("MOVQ" , _VAR_pc, _R9)            // MOVQ    pc, R9
+    self.Rjmp("JMP"  , _R9)                     // JMP     (R9)
+}
+
+
+func (self *_Assembler) skip_key_value() {
+    self.Link(_LB_skip_key_value)               // _skip:
+    // skip the key
+    self.Emit("MOVQ", _VAR_ic, _IC)             // MOVQ    _VAR_ic, IC
+    self.call_sf(_F_skip_one)                   // CALL_SF skip_one
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
+    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
+    // match char ':'
+    self.lspace("_global_1")
+    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(':'))
+    self.Sjmp("JNE"  , _LB_parsing_error_v)     // JNE     _parse_error_v
+    self.Emit("ADDQ", jit.Imm(1), _IC)          // ADDQ    $1, IC
+    self.lspace("_global_2")
+    // skip the value
+    self.call_sf(_F_skip_one)                   // CALL_SF skip_one
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
+    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
+    // jump back to specified address
+    self.Emit("MOVQ" , _VAR_pc, _R9)            // MOVQ    pc, R9
+    self.Rjmp("JMP"  , _R9)                     // JMP     (R9)
+}
+
+func (self *_Assembler) field_error() {
+    self.Link(_LB_field_error)                  // _field_error:
+    self.Emit("MOVOU", _VAR_sv, _X0)            // MOVOU   sv, X0
+    self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0))    // MOVOU   X0, (SP)
+    self.call_go(_F_error_field)                // CALL_GO error_field
+    self.Emit("MOVQ" , jit.Ptr(_SP, 16), _ET)   // MOVQ    16(SP), ET
+    self.Emit("MOVQ" , jit.Ptr(_SP, 24), _EP)   // MOVQ    24(SP), EP
+    self.Sjmp("JMP"  , _LB_error)               // JMP     _error
+}
+
+func (self *_Assembler) range_error() {
+    self.Link(_LB_range_error)                  // _range_error:
+    self.slice_from(_VAR_st_Ep, 0)              // SLICE   st.Ep, $0
+    self.Emit("MOVQ", _DI, jit.Ptr(_SP, 0))     // MOVQ    DI, (SP)
+    self.Emit("MOVQ", _SI, jit.Ptr(_SP, 8))     // MOVQ    SI, 8(SP)
+    self.Emit("MOVQ", _ET, jit.Ptr(_SP, 16))    // MOVQ    ET, 16(SP)
+    self.Emit("MOVQ", _EP, jit.Ptr(_SP, 24))    // MOVQ    EP, 24(SP)
+    self.call_go(_F_error_value)                // CALL_GO error_value
+    self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET)    // MOVQ    32(SP), ET
+    self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP)    // MOVQ    40(SP), EP
+    self.Sjmp("JMP" , _LB_error)                // JMP     _error
+}
+
+func (self *_Assembler) stack_error() {
+    self.Link(_LB_stack_error)                              // _stack_error:
+    self.Emit("MOVQ", _V_stackOverflow, _EP)                // MOVQ ${_V_stackOverflow}, EP
+    self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET)   // MOVQ ${_I_json_UnsupportedValueError}, ET
+    self.Sjmp("JMP" , _LB_error)                            // JMP  _error
+}
+
+func (self *_Assembler) base64_error() {
+    self.Link(_LB_base64_error)
+    self.Emit("NEGQ", _AX)                                  // NEGQ    AX
+    self.Emit("SUBQ", jit.Imm(1), _AX)                      // SUBQ    $1, AX
+    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))                 // MOVQ    AX, (SP)
+    self.call_go(_F_convT64)                                // CALL_GO convT64
+    self.Emit("MOVQ", jit.Ptr(_SP, 8), _EP)                 // MOVQ    8(SP), EP
+    self.Emit("MOVQ", _I_base64_CorruptInputError, _ET)     // MOVQ    ${itab(base64.CorruptInputError)}, ET
+    self.Sjmp("JMP" , _LB_error)                            // JMP     _error
+}
+
+func (self *_Assembler) parsing_error() {
+    self.Link(_LB_eof_error)                                            // _eof_error:
+    self.Emit("MOVQ" , _IL, _IC)                                        // MOVQ    IL, IC
+    self.Emit("MOVL" , jit.Imm(int64(types.ERR_EOF)), _EP)              // MOVL    ${types.ERR_EOF}, EP
+    self.Sjmp("JMP"  , _LB_parsing_error)                               // JMP     _parsing_error
+    self.Link(_LB_unquote_error)                                        // _unquote_error:
+    self.Emit("SUBQ" , _VAR_sr, _SI)                                    // SUBQ    sr, SI
+    self.Emit("SUBQ" , _SI, _IC)                                        // SUBQ    IL, IC
+    self.Link(_LB_parsing_error_v)                                      // _parsing_error_v:
+    self.Emit("MOVQ" , _AX, _EP)                                        // MOVQ    AX, EP
+    self.Emit("NEGQ" , _EP)                                             // NEGQ    EP
+    self.Sjmp("JMP"  , _LB_parsing_error)                               // JMP     _parsing_error
+    self.Link(_LB_char_m3_error)                                        // _char_m3_error:
+    self.Emit("SUBQ" , jit.Imm(1), _IC)                                 // SUBQ    $1, IC
+    self.Link(_LB_char_m2_error)                                        // _char_m2_error:
+    self.Emit("SUBQ" , jit.Imm(2), _IC)                                 // SUBQ    $2, IC
+    self.Sjmp("JMP"  , _LB_char_0_error)                                // JMP     _char_0_error
+    self.Link(_LB_im_error)                                             // _im_error:
+    self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 0))                    // CMPB    CX, (IP)(IC)
+    self.Sjmp("JNE"  , _LB_char_0_error)                                // JNE     _char_0_error
+    self.Emit("SHRL" , jit.Imm(8), _CX)                                 // SHRL    $8, CX
+    self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 1))                    // CMPB    CX, 1(IP)(IC)
+    self.Sjmp("JNE"  , _LB_char_1_error)                                // JNE     _char_1_error
+    self.Emit("SHRL" , jit.Imm(8), _CX)                                 // SHRL    $8, CX
+    self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 2))                    // CMPB    CX, 2(IP)(IC)
+    self.Sjmp("JNE"  , _LB_char_2_error)                                // JNE     _char_2_error
+    self.Sjmp("JMP"  , _LB_char_3_error)                                // JNE     _char_3_error
+    self.Link(_LB_char_4_error)                                         // _char_4_error:
+    self.Emit("ADDQ" , jit.Imm(1), _IC)                                 // ADDQ    $1, IC
+    self.Link(_LB_char_3_error)                                         // _char_3_error:
+    self.Emit("ADDQ" , jit.Imm(1), _IC)                                 // ADDQ    $1, IC
+    self.Link(_LB_char_2_error)                                         // _char_2_error:
+    self.Emit("ADDQ" , jit.Imm(1), _IC)                                 // ADDQ    $1, IC
+    self.Link(_LB_char_1_error)                                         // _char_1_error:
+    self.Emit("ADDQ" , jit.Imm(1), _IC)                                 // ADDQ    $1, IC
+    self.Link(_LB_char_0_error)                                         // _char_0_error:
+    self.Emit("MOVL" , jit.Imm(int64(types.ERR_INVALID_CHAR)), _EP)     // MOVL    ${types.ERR_INVALID_CHAR}, EP
+    self.Link(_LB_parsing_error)                                        // _parsing_error:
+    self.Emit("MOVOU", _ARG_s, _X0)                                     // MOVOU   s, X0
+    self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0))                            // MOVOU   X0, (SP)
+    self.Emit("MOVQ" , _IC, jit.Ptr(_SP, 16))                           // MOVQ    IC, 16(SP)
+    self.Emit("MOVQ" , _EP, jit.Ptr(_SP, 24))                           // MOVQ    EP, 24(SP)
+    self.call_go(_F_error_wrap)                                         // CALL_GO error_wrap
+    self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET)                           // MOVQ    32(SP), ET
+    self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP)                           // MOVQ    40(SP), EP
+    self.Sjmp("JMP"  , _LB_error)                                       // JMP     _error
+}
+
+/** Memory Management Routines **/
+
+var (
+    _T_byte     = jit.Type(byteType)
+    _F_mallocgc = jit.Func(mallocgc)
+)
+
+func (self *_Assembler) malloc(nb obj.Addr, ret obj.Addr) {
+    self.Emit("XORL", _AX, _AX)                 // XORL    AX, AX
+    self.Emit("MOVQ", _T_byte, _CX)             // MOVQ    ${type(byte)}, CX
+    self.Emit("MOVQ", nb, jit.Ptr(_SP, 0))      // MOVQ    ${nb}, (SP)
+    self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8))     // MOVQ    CX, 8(SP)
+    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))    // MOVQ    AX, 16(SP)
+    self.call_go(_F_mallocgc)                   // CALL_GO mallocgc
+    self.Emit("MOVQ", jit.Ptr(_SP, 24), ret)    // MOVQ    24(SP), ${ret}
+}
+
+func (self *_Assembler) valloc(vt reflect.Type, ret obj.Addr) {
+    self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX)   // MOVQ    ${vt.Size()}, AX
+    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))             // MOVQ    AX, (SP)
+    self.Emit("MOVQ", jit.Type(vt), _AX)                // MOVQ    ${vt}, AX
+    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))             // MOVQ    AX, 8(SP)
+    self.Emit("MOVB", jit.Imm(1), jit.Ptr(_SP, 16))     // MOVB    $1, 16(SP)
+    self.call_go(_F_mallocgc)                           // CALL_GO mallocgc
+    self.Emit("MOVQ", jit.Ptr(_SP, 24), ret)            // MOVQ    24(SP), ${ret}
+}
+
+func (self *_Assembler) vfollow(vt reflect.Type) {
+    self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX)    // MOVQ   (VP), AX
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ  AX, AX
+    self.Sjmp("JNZ"  , "_end_{n}")              // JNZ    _end_{n}
+    self.valloc(vt, _AX)                        // VALLOC ${vt}, AX
+    self.WritePtrAX(1, jit.Ptr(_VP, 0), false)    // MOVQ   AX, (VP)
+    self.Link("_end_{n}")                       // _end_{n}:
+    self.Emit("MOVQ" , _AX, _VP)                // MOVQ   AX, VP
+}
+
+/** Value Parsing Routines **/
+
+var (
+    _F_vstring   = jit.Imm(int64(native.S_vstring))
+    _F_vnumber   = jit.Imm(int64(native.S_vnumber))
+    _F_vsigned   = jit.Imm(int64(native.S_vsigned))
+    _F_vunsigned = jit.Imm(int64(native.S_vunsigned))
+)
+
+func (self *_Assembler) check_err(vt reflect.Type, pin string, pin2 int) {
+    self.Emit("MOVQ" , _VAR_st_Vt, _AX)         // MOVQ st.Vt, AX
+    self.Emit("TESTQ", _AX, _AX)                // CMPQ AX, ${native.V_STRING}
+    // try to skip the value
+    if vt != nil {
+        self.Sjmp("JNS" , "_check_err_{n}")        // JNE  _parsing_error_v
+        self.Emit("MOVQ", jit.Type(vt), _ET)         
+        self.Emit("MOVQ", _ET, _VAR_et)
+        if pin2 != -1 {
+            self.Emit("SUBQ", jit.Imm(1), _BP)
+            self.Emit("MOVQ", _BP, _VAR_ic)
+            self.Byte(0x4c  , 0x8d, 0x0d)         // LEAQ (PC), R9
+            self.Xref(pin2, 4)
+            self.Emit("MOVQ", _R9, _VAR_pc)
+            self.Sjmp("JMP" , _LB_skip_key_value)
+        } else {
+            self.Emit("MOVQ", _BP, _VAR_ic)
+            self.Byte(0x4c  , 0x8d, 0x0d)         // LEAQ (PC), R9
+            self.Sref(pin, 4)
+            self.Emit("MOVQ", _R9, _VAR_pc)
+            self.Sjmp("JMP" , _LB_skip_one)
+        }
+        self.Link("_check_err_{n}")
+    } else {
+        self.Sjmp("JS"   , _LB_parsing_error_v)     // JNE  _parsing_error_v
+    }
+}
+
+func (self *_Assembler) check_eof(d int64) {
+    if d == 1 {
+        self.Emit("CMPQ", _IC, _IL)         // CMPQ IC, IL
+        self.Sjmp("JAE" , _LB_eof_error)    // JAE  _eof_error
+    } else {
+        self.Emit("LEAQ", jit.Ptr(_IC, d), _AX)     // LEAQ ${d}(IC), AX
+        self.Emit("CMPQ", _AX, _IL)                 // CMPQ AX, IL
+        self.Sjmp("JA"  , _LB_eof_error)            // JA   _eof_error
+    }
+}
+
+func (self *_Assembler) parse_string() {    // parse_string has a validate flag params in the last
+    self.Emit("MOVQ", _ARG_fv, _CX)
+    self.call_vf(_F_vstring)
+    self.check_err(nil, "", -1)
+}
+
+func (self *_Assembler) parse_number(vt reflect.Type, pin string, pin2 int) {
+    self.Emit("MOVQ", _IC, _BP)
+    self.call_vf(_F_vnumber)                               // call  vnumber
+    self.check_err(vt, pin, pin2)
+}
+
+func (self *_Assembler) parse_signed(vt reflect.Type, pin string, pin2 int) {
+    self.Emit("MOVQ", _IC, _BP)
+    self.call_vf(_F_vsigned)
+    self.check_err(vt, pin, pin2)
+}
+
+func (self *_Assembler) parse_unsigned(vt reflect.Type, pin string, pin2 int) {
+    self.Emit("MOVQ", _IC, _BP)
+    self.call_vf(_F_vunsigned)
+    self.check_err(vt, pin, pin2)
+}
+
+// Pointer: DI, Size: SI, Return: R9  
+func (self *_Assembler) copy_string() {
+    self.Link("_copy_string")
+    self.Emit("MOVQ", _DI, _VAR_bs_p)
+    self.Emit("MOVQ", _SI, _VAR_bs_n)
+    self.Emit("MOVQ", _R9, _VAR_bs_LR)
+    self.malloc(_SI, _AX)                              
+    self.Emit("MOVQ", _AX, _VAR_sv_p)                    
+    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))                    
+    self.Emit("MOVQ", _VAR_bs_p, _DI)
+    self.Emit("MOVQ", _DI, jit.Ptr(_SP, 8))
+    self.Emit("MOVQ", _VAR_bs_n, _SI)
+    self.Emit("MOVQ", _SI, jit.Ptr(_SP, 16))
+    self.call_go(_F_memmove)
+    self.Emit("MOVQ", _VAR_sv_p, _DI)
+    self.Emit("MOVQ", _VAR_bs_n, _SI)
+    self.Emit("MOVQ", _VAR_bs_LR, _R9)
+    self.Rjmp("JMP", _R9)
+}
+
+// Pointer: DI, Size: SI, Return: R9
+func (self *_Assembler) escape_string() {
+    self.Link("_escape_string")
+    self.Emit("MOVQ" , _DI, _VAR_bs_p)
+    self.Emit("MOVQ" , _SI, _VAR_bs_n)
+    self.Emit("MOVQ" , _R9, _VAR_bs_LR)
+    self.malloc(_SI, _DX)                                    // MALLOC SI, DX
+    self.Emit("MOVQ" , _DX, _VAR_sv_p)
+    self.Emit("MOVQ" , _VAR_bs_p, _DI)
+    self.Emit("MOVQ" , _VAR_bs_n, _SI)                                  
+    self.Emit("LEAQ" , _VAR_sr, _CX)                            // LEAQ   sr, CX
+    self.Emit("XORL" , _R8, _R8)                                // XORL   R8, R8
+    self.Emit("BTQ"  , jit.Imm(_F_disable_urc), _ARG_fv)        // BTQ    ${_F_disable_urc}, fv
+    self.Emit("SETCC", _R8)                                     // SETCC  R8
+    self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8)   // SHLQ   ${types.B_UNICODE_REPLACE}, R8
+    self.call(_F_unquote)                                       // CALL   unquote
+    self.Emit("MOVQ" , _VAR_bs_n, _SI)                                  // MOVQ   ${n}, SI
+    self.Emit("ADDQ" , jit.Imm(1), _SI)                         // ADDQ   $1, SI
+    self.Emit("TESTQ", _AX, _AX)                                // TESTQ  AX, AX
+    self.Sjmp("JS"   , _LB_unquote_error)                       // JS     _unquote_error
+    self.Emit("MOVQ" , _AX, _SI)
+    self.Emit("MOVQ" , _VAR_sv_p, _DI)
+    self.Emit("MOVQ" , _VAR_bs_LR, _R9)
+    self.Rjmp("JMP", _R9)
+}
+
+func (self *_Assembler) escape_string_twice() {
+    self.Link("_escape_string_twice")
+    self.Emit("MOVQ" , _DI, _VAR_bs_p)
+    self.Emit("MOVQ" , _SI, _VAR_bs_n)
+    self.Emit("MOVQ" , _R9, _VAR_bs_LR)
+    self.malloc(_SI, _DX)                                        // MALLOC SI, DX
+    self.Emit("MOVQ" , _DX, _VAR_sv_p)
+    self.Emit("MOVQ" , _VAR_bs_p, _DI)
+    self.Emit("MOVQ" , _VAR_bs_n, _SI)        
+    self.Emit("LEAQ" , _VAR_sr, _CX)                                // LEAQ   sr, CX
+    self.Emit("MOVL" , jit.Imm(types.F_DOUBLE_UNQUOTE), _R8)        // MOVL   ${types.F_DOUBLE_UNQUOTE}, R8
+    self.Emit("BTQ"  , jit.Imm(_F_disable_urc), _ARG_fv)            // BTQ    ${_F_disable_urc}, AX
+    self.Emit("XORL" , _AX, _AX)                                    // XORL   AX, AX
+    self.Emit("SETCC", _AX)                                         // SETCC  AX
+    self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _AX)       // SHLQ   ${types.B_UNICODE_REPLACE}, AX
+    self.Emit("ORQ"  , _AX, _R8)                                    // ORQ    AX, R8
+    self.call(_F_unquote)                                           // CALL   unquote
+    self.Emit("MOVQ" , _VAR_bs_n, _SI)                              // MOVQ   ${n}, SI
+    self.Emit("ADDQ" , jit.Imm(3), _SI)                             // ADDQ   $3, SI
+    self.Emit("TESTQ", _AX, _AX)                                    // TESTQ  AX, AX
+    self.Sjmp("JS"   , _LB_unquote_error)                           // JS     _unquote_error
+    self.Emit("MOVQ" , _AX, _SI)
+    self.Emit("MOVQ" , _VAR_sv_p, _DI)
+    self.Emit("MOVQ" , _VAR_bs_LR, _R9)
+    self.Rjmp("JMP", _R9)
+}
+
+/** Range Checking Routines **/
+
+var (
+    _V_max_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_max_f32))))
+    _V_min_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_min_f32))))
+)
+
+var (
+    _Vp_max_f32 = new(float32)
+    _Vp_min_f32 = new(float32)
+)
+
+func init() {
+    *_Vp_max_f32 = math.MaxFloat32
+    *_Vp_min_f32 = -math.MaxFloat32
+}
+
+func (self *_Assembler) range_single() {
+    self.Emit("CVTSD2SS", _VAR_st_Dv, _X0)              // CVTSD2SS st.Dv, X0
+    self.Emit("MOVQ"    , _V_max_f32, _AX)              // MOVQ     _max_f32, AX
+    self.Emit("MOVQ"    , jit.Gitab(_I_float32), _ET)   // MOVQ     ${itab(float32)}, ET
+    self.Emit("MOVQ"    , jit.Gtype(_T_float32), _EP)   // MOVQ     ${type(float32)}, EP
+    self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0)         // UCOMISS  (AX), X0
+    self.Sjmp("JA"      , _LB_range_error)              // JA       _range_error
+    self.Emit("MOVQ"    , _V_min_f32, _AX)              // MOVQ     _min_f32, AX
+    self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0)         // UCOMISS  (AX), X0
+    self.Sjmp("JB"      , _LB_range_error)              // JB       _range_error
+}
+
+func (self *_Assembler) range_signed(i *rt.GoItab, t *rt.GoType, a int64, b int64) {
+    self.Emit("MOVQ", _VAR_st_Iv, _AX)      // MOVQ st.Iv, AX
+    self.Emit("MOVQ", jit.Gitab(i), _ET)    // MOVQ ${i}, ET
+    self.Emit("MOVQ", jit.Gtype(t), _EP)    // MOVQ ${t}, EP
+    self.Emit("CMPQ", _AX, jit.Imm(a))      // CMPQ AX, ${a}
+    self.Sjmp("JL"  , _LB_range_error)      // JL   _range_error
+    self.Emit("CMPQ", _AX, jit.Imm(b))      // CMPQ AX, ${B}
+    self.Sjmp("JG"  , _LB_range_error)      // JG   _range_error
+}
+
+func (self *_Assembler) range_unsigned(i *rt.GoItab, t *rt.GoType, v uint64) {
+    self.Emit("MOVQ" , _VAR_st_Iv, _AX)         // MOVQ  st.Iv, AX
+    self.Emit("MOVQ" , jit.Gitab(i), _ET)       // MOVQ  ${i}, ET
+    self.Emit("MOVQ" , jit.Gtype(t), _EP)       // MOVQ  ${t}, EP
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ AX, AX
+    self.Sjmp("JS"   , _LB_range_error)         // JS    _range_error
+    self.Emit("CMPQ" , _AX, jit.Imm(int64(v)))  // CMPQ  AX, ${a}
+    self.Sjmp("JA"   , _LB_range_error)         // JA    _range_error
+}
+
+/** String Manipulating Routines **/
+
+var (
+    _F_unquote = jit.Imm(int64(native.S_unquote))
+)
+
+func (self *_Assembler) slice_from(p obj.Addr, d int64) {
+    self.Emit("MOVQ", p, _SI)   // MOVQ    ${p}, SI
+    self.slice_from_r(_SI, d)   // SLICE_R SI, ${d}
+}
+
+func (self *_Assembler) slice_from_r(p obj.Addr, d int64) {
+    self.Emit("LEAQ", jit.Sib(_IP, p, 1, 0), _DI)   // LEAQ (IP)(${p}), DI
+    self.Emit("NEGQ", p)                            // NEGQ ${p}
+    self.Emit("LEAQ", jit.Sib(_IC, p, 1, d), _SI)   // LEAQ d(IC)(${p}), SI
+}
+
+func (self *_Assembler) unquote_once(p obj.Addr, n obj.Addr, stack bool, copy bool) {
+    self.slice_from(_VAR_st_Iv, -1)                             // SLICE  st.Iv, $-1
+    self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1))                 // CMPQ   st.Ep, $-1
+    self.Sjmp("JE"   , "_noescape_{n}")                         // JE     _noescape_{n}
+    self.Byte(0x4c, 0x8d, 0x0d)                                 // LEAQ (PC), R9
+    self.Sref("_unquote_once_write_{n}", 4)
+    self.Sjmp("JMP" , "_escape_string")
+    self.Link("_noescape_{n}")                                  // _noescape_{n}:
+    if copy {
+        self.Emit("BTQ"  , jit.Imm(_F_copy_string), _ARG_fv)    
+        self.Sjmp("JNC", "_unquote_once_write_{n}")
+        self.Byte(0x4c, 0x8d, 0x0d)                             // LEAQ (PC), R9
+        self.Sref("_unquote_once_write_{n}", 4)
+        self.Sjmp("JMP", "_copy_string")
+    }
+    self.Link("_unquote_once_write_{n}")
+    self.Emit("MOVQ" , _SI, n)                                  // MOVQ   SI, ${n}
+    if stack {
+        self.Emit("MOVQ", _DI, p) 
+    } else {
+        self.WriteRecNotAX(10, _DI, p, false, false)
+    }
+}
+
+func (self *_Assembler) unquote_twice(p obj.Addr, n obj.Addr, stack bool) {
+    self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1))                     // CMPQ   st.Ep, $-1
+    self.Sjmp("JE"   , _LB_eof_error)                               // JE     _eof_error
+    self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -3), jit.Imm('\\'))     // CMPB   -3(IP)(IC), $'\\'
+    self.Sjmp("JNE"  , _LB_char_m3_error)                           // JNE    _char_m3_error
+    self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -2), jit.Imm('"'))      // CMPB   -2(IP)(IC), $'"'
+    self.Sjmp("JNE"  , _LB_char_m2_error)                           // JNE    _char_m2_error
+    self.slice_from(_VAR_st_Iv, -3)                                 // SLICE  st.Iv, $-3
+    self.Emit("MOVQ" , _SI, _AX)                                    // MOVQ   SI, AX
+    self.Emit("ADDQ" , _VAR_st_Iv, _AX)                             // ADDQ   st.Iv, AX
+    self.Emit("CMPQ" , _VAR_st_Ep, _AX)                             // CMPQ   st.Ep, AX
+    self.Sjmp("JE"   , "_noescape_{n}")                             // JE     _noescape_{n}
+    self.Byte(0x4c, 0x8d, 0x0d)                                     // LEAQ (PC), R9
+    self.Sref("_unquote_twice_write_{n}", 4)
+    self.Sjmp("JMP" , "_escape_string_twice")
+    self.Link("_noescape_{n}")                                      // _noescape_{n}:
+    self.Emit("BTQ"  , jit.Imm(_F_copy_string), _ARG_fv)    
+    self.Sjmp("JNC", "_unquote_twice_write_{n}") 
+    self.Byte(0x4c, 0x8d, 0x0d)                                     // LEAQ (PC), R9
+    self.Sref("_unquote_twice_write_{n}", 4)
+    self.Sjmp("JMP", "_copy_string")
+    self.Link("_unquote_twice_write_{n}")
+    self.Emit("MOVQ" , _SI, n)                                      // MOVQ   SI, ${n}
+    if stack {
+        self.Emit("MOVQ", _DI, p) 
+    } else {
+        self.WriteRecNotAX(12, _DI, p, false, false)
+    }
+}
+
+/** Memory Clearing Routines **/
+
+var (
+    _F_memclrHasPointers    = jit.Func(memclrHasPointers)
+    _F_memclrNoHeapPointers = jit.Func(memclrNoHeapPointers)
+)
+
+func (self *_Assembler) mem_clear_fn(ptrfree bool) {
+    if !ptrfree {
+        self.call_go(_F_memclrHasPointers)
+    } else {
+        self.call_go(_F_memclrNoHeapPointers)
+    }
+}
+
+func (self *_Assembler) mem_clear_rem(size int64, ptrfree bool) {
+    self.Emit("MOVQ", jit.Imm(size), _CX)               // MOVQ    ${size}, CX
+    self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)             // MOVQ    (ST), AX
+    self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _AX)     // MOVQ    (ST)(AX), AX
+    self.Emit("SUBQ", _VP, _AX)                         // SUBQ    VP, AX
+    self.Emit("ADDQ", _AX, _CX)                         // ADDQ    AX, CX
+    self.Emit("MOVQ", _VP, jit.Ptr(_SP, 0))             // MOVQ    VP, (SP)
+    self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8))             // MOVQ    CX, 8(SP)
+    self.mem_clear_fn(ptrfree)                          // CALL_GO memclr{Has,NoHeap}Pointers
+}
+
+/** Map Assigning Routines **/
+
+var (
+    _F_mapassign           = jit.Func(mapassign)
+    _F_mapassign_fast32    = jit.Func(mapassign_fast32)
+    _F_mapassign_faststr   = jit.Func(mapassign_faststr)
+    _F_mapassign_fast64ptr = jit.Func(mapassign_fast64ptr)
+)
+
+var (
+    _F_decodeJsonUnmarshaler obj.Addr
+    _F_decodeTextUnmarshaler obj.Addr
+)
+
+func init() {
+    _F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler)
+    _F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler)
+}
+
+func (self *_Assembler) mapaccess_ptr(t reflect.Type) {
+    if rt.MapType(rt.UnpackType(t)).IndirectElem() {
+        self.vfollow(t.Elem())
+    }
+}
+
+func (self *_Assembler) mapassign_std(t reflect.Type, v obj.Addr) {
+    self.Emit("LEAQ", v, _AX)               // LEAQ      ${v}, AX
+    self.mapassign_call(t, _F_mapassign)    // MAPASSIGN ${t}, mapassign
+}
+
+func (self *_Assembler) mapassign_str_fast(t reflect.Type, p obj.Addr, n obj.Addr) {
+    self.Emit("MOVQ", jit.Type(t), _AX)         // MOVQ    ${t}, AX
+    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))     // MOVQ    AX, (SP)
+    self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8))     // MOVQ    VP, 8(SP)
+    self.Emit("MOVQ", p, jit.Ptr(_SP, 16))      // MOVQ    ${p}, 16(SP)
+    self.Emit("MOVQ", n, jit.Ptr(_SP, 24))      // MOVQ    ${n}, 24(SP)
+    self.call_go(_F_mapassign_faststr)          // CALL_GO ${fn}
+    self.Emit("MOVQ", jit.Ptr(_SP, 32), _VP)    // MOVQ    32(SP), VP
+    self.mapaccess_ptr(t)
+}
+
+func (self *_Assembler) mapassign_call(t reflect.Type, fn obj.Addr) {
+    self.Emit("MOVQ", jit.Type(t), _SI)         // MOVQ    ${t}, SI
+    self.Emit("MOVQ", _SI, jit.Ptr(_SP, 0))     // MOVQ    SI, (SP)
+    self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8))     // MOVQ    VP, 8(SP)
+    self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))    // MOVQ    AX, 16(SP)
+    self.call_go(fn)                            // CALL_GO ${fn}
+    self.Emit("MOVQ", jit.Ptr(_SP, 24), _VP)    // MOVQ    24(SP), VP
+}
+
+func (self *_Assembler) mapassign_fastx(t reflect.Type, fn obj.Addr) {
+    self.mapassign_call(t, fn)
+    self.mapaccess_ptr(t)
+}
+
+func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) {
+    pv := false
+    vk := t.Key()
+    tk := t.Key()
+
+    /* deref pointer if needed */
+    if vk.Kind() == reflect.Ptr {
+        pv = true
+        vk = vk.Elem()
+    }
+
+    /* addressable value with pointer receiver */
+    if addressable {
+        pv = false
+        tk = reflect.PtrTo(tk)
+    }
+
+    /* allocate the key, and call the unmarshaler */
+    self.valloc(vk, _DI)                        // VALLOC  ${vk}, DI
+    // must spill vk pointer since next call_go may invoke GC
+    self.Emit("MOVQ" , _DI, _VAR_vk)
+    self.Emit("MOVQ" , jit.Type(tk), _AX)       // MOVQ    ${tk}, AX
+    self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))    // MOVQ    AX, (SP)
+    self.Emit("MOVQ" , _DI, jit.Ptr(_SP, 8))    // MOVQ    DI, 8(SP)
+    self.Emit("MOVOU", _VAR_sv, _X0)            // MOVOU   sv, X0
+    self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16))   // MOVOU   X0, 16(SP)
+    self.call_go(_F_decodeTextUnmarshaler)      // CALL_GO decodeTextUnmarshaler
+    self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET)   // MOVQ    32(SP), ET
+    self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP)   // MOVQ    40(SP), EP
+    self.Emit("TESTQ", _ET, _ET)                // TESTQ   ET, ET
+    self.Sjmp("JNZ"  , _LB_error)               // JNZ     _error
+    self.Emit("MOVQ" , _VAR_vk, _AX)
+
+    /* select the correct assignment function */
+    if !pv {
+        self.mapassign_call(t, _F_mapassign)
+    } else {
+        self.mapassign_fastx(t, _F_mapassign_fast64ptr)
+    }
+}
+
+/** External Unmarshaler Routines **/
+
+var (
+    _F_skip_one = jit.Imm(int64(native.S_skip_one))
+    _F_skip_number = jit.Imm(int64(native.S_skip_number))
+)
+
+func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) {
+    self.call_sf(_F_skip_one)                                   // CALL_SF   skip_one
+    self.Emit("TESTQ", _AX, _AX)                                // TESTQ     AX, AX
+    self.Sjmp("JS"   , _LB_parsing_error_v)                     // JS        _parse_error_v
+    self.slice_from_r(_AX, 0)                                   // SLICE_R   AX, $0
+    self.Emit("MOVQ" , _DI, _VAR_sv_p)                          // MOVQ      DI, sv.p
+    self.Emit("MOVQ" , _SI, _VAR_sv_n)                          // MOVQ      SI, sv.n
+    self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref)     // UNMARSHAL json, ${t}, ${deref}
+}
+
+func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) {
+    self.parse_string()                                         // PARSE     STRING
+    self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true)        // UNQUOTE   once, sv.p, sv.n
+    self.unmarshal_func(t, _F_decodeTextUnmarshaler, deref)     // UNMARSHAL text, ${t}, ${deref}
+}
+
+func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool) {
+    pt := t
+    vk := t.Kind()
+
+    /* allocate the field if needed */
+    if deref && vk == reflect.Ptr {
+        self.Emit("MOVQ" , _VP, _AX)                // MOVQ   VP, AX
+        self.Emit("MOVQ" , jit.Ptr(_AX, 0), _AX)    // MOVQ   (AX), AX
+        self.Emit("TESTQ", _AX, _AX)                // TESTQ  AX, AX
+        self.Sjmp("JNZ"  , "_deref_{n}")            // JNZ    _deref_{n}
+        self.valloc(t.Elem(), _AX)                  // VALLOC ${t.Elem()}, AX
+        self.WritePtrAX(3, jit.Ptr(_VP, 0), false)    // MOVQ   AX, (VP)
+        self.Link("_deref_{n}")                     // _deref_{n}:
+    }
+
+    /* set value type */
+    self.Emit("MOVQ", jit.Type(pt), _CX)        // MOVQ ${pt}, CX
+    self.Emit("MOVQ", _CX, jit.Ptr(_SP, 0))     // MOVQ CX, (SP)
+
+    /* set value pointer */
+    if deref && vk == reflect.Ptr {
+        self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))     // MOVQ AX, 8(SP)
+    } else {
+        self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8))     // MOVQ VP, 8(SP)
+    }
+
+    /* set the source string and call the unmarshaler */
+    self.Emit("MOVOU", _VAR_sv, _X0)            // MOVOU   sv, X0
+    self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16))   // MOVOU   X0, 16(SP)
+    self.call_go(fn)                            // CALL_GO ${fn}
+    self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET)   // MOVQ    32(SP), ET
+    self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP)   // MOVQ    40(SP), EP
+    self.Emit("TESTQ", _ET, _ET)                // TESTQ   ET, ET
+    self.Sjmp("JNZ"  , _LB_error)               // JNZ     _error
+}
+
+/** Dynamic Decoding Routine **/
+
+var (
+    _F_decodeTypedPointer obj.Addr
+)
+
+func init() {
+    _F_decodeTypedPointer = jit.Func(decodeTypedPointer)
+}
+
+func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) {
+    self.Emit("MOVQ" , _ARG_fv, _CX)            // MOVQ    fv, CX
+    self.Emit("MOVOU", _ARG_sp, _X0)            // MOVOU   sp, X0
+    self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0))    // MOVOU   X0, (SP)
+    self.Emit("MOVQ" , _IC, jit.Ptr(_SP, 16))   // MOVQ    IC, 16(SP)
+    self.Emit("MOVQ" , vt, jit.Ptr(_SP, 24))    // MOVQ    ${vt}, 24(SP)
+    self.Emit("MOVQ" , vp, jit.Ptr(_SP, 32))    // MOVQ    ${vp}, 32(SP)
+    self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 40))   // MOVQ    ST, 40(SP)
+    self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 48))   // MOVQ    CX, 48(SP)
+    self.call_go(_F_decodeTypedPointer)         // CALL_GO decodeTypedPointer
+    self.Emit("MOVQ" , jit.Ptr(_SP, 64), _ET)   // MOVQ    64(SP), ET
+    self.Emit("MOVQ" , jit.Ptr(_SP, 72), _EP)   // MOVQ    72(SP), EP
+    self.Emit("MOVQ" , jit.Ptr(_SP, 56), _IC)   // MOVQ    56(SP), IC
+    self.Emit("TESTQ", _ET, _ET)                // TESTQ   ET, ET
+    self.Sjmp("JE", "_decode_dynamic_end_{n}")  // JE, _decode_dynamic_end_{n}
+    self.Emit("MOVQ", _I_json_MismatchTypeError, _AX) // MOVQ _I_json_MismatchTypeError, AX
+    self.Emit("CMPQ",  _ET, _AX)                // CMPQ ET, AX
+    self.Sjmp("JNE" , _LB_error)                // JNE  LB_error
+    self.Emit("MOVQ", _EP, _VAR_ic)             // MOVQ EP, VAR_ic
+    self.Emit("MOVQ", _ET, _VAR_et)             // MOVQ ET, VAR_et
+    self.Link("_decode_dynamic_end_{n}")
+    
+}
+
+/** OpCode Assembler Functions **/
+
+var (
+    _F_memequal         = jit.Func(memequal)
+    _F_memmove          = jit.Func(memmove)
+    _F_growslice        = jit.Func(growslice)
+    _F_makeslice        = jit.Func(makeslice)
+    _F_makemap_small    = jit.Func(makemap_small)
+    _F_mapassign_fast64 = jit.Func(mapassign_fast64)
+)
+
+var (
+    _F_lspace  = jit.Imm(int64(native.S_lspace))
+    _F_strhash = jit.Imm(int64(caching.S_strhash))
+)
+
+var (
+    _F_b64decode   = jit.Imm(int64(_subr__b64decode))
+    _F_decodeValue = jit.Imm(int64(_subr_decode_value))
+)
+
+var (
+    _F_skip_array  = jit.Imm(int64(native.S_skip_array))
+    _F_skip_object = jit.Imm(int64(native.S_skip_object))
+)
+
+var (
+    _F_FieldMap_GetCaseInsensitive obj.Addr
+    _Empty_Slice = make([]byte, 0)
+    _Zero_Base = int64(uintptr(((*rt.GoSlice)(unsafe.Pointer(&_Empty_Slice))).Ptr))
+)
+
+const (
+    _MODE_AVX2 = 1 << 2
+)
+
+const (
+    _Fe_ID   = int64(unsafe.Offsetof(caching.FieldEntry{}.ID))
+    _Fe_Name = int64(unsafe.Offsetof(caching.FieldEntry{}.Name))
+    _Fe_Hash = int64(unsafe.Offsetof(caching.FieldEntry{}.Hash))
+)
+
+const (
+    _Vk_Ptr       = int64(reflect.Ptr)
+    _Gt_KindFlags = int64(unsafe.Offsetof(rt.GoType{}.KindFlags))
+)
+
+func init() {
+    _F_FieldMap_GetCaseInsensitive = jit.Func((*caching.FieldMap).GetCaseInsensitive)
+}
+
+func (self *_Assembler) _asm_OP_any(_ *_Instr) {
+    self.Emit("MOVQ"   , jit.Ptr(_VP, 8), _CX)              // MOVQ    8(VP), CX
+    self.Emit("TESTQ"  , _CX, _CX)                          // TESTQ   CX, CX
+    self.Sjmp("JZ"     , "_decode_{n}")                     // JZ      _decode_{n}
+    self.Emit("CMPQ"   , _CX, _VP)                          // CMPQ    CX, VP
+    self.Sjmp("JE"     , "_decode_{n}")                     // JE      _decode_{n}
+    self.Emit("MOVQ"   , jit.Ptr(_VP, 0), _AX)              // MOVQ    (VP), AX
+    self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX)  // MOVBLZX _Gt_KindFlags(AX), DX
+    self.Emit("ANDL"   , jit.Imm(rt.F_kind_mask), _DX)      // ANDL    ${F_kind_mask}, DX
+    self.Emit("CMPL"   , _DX, jit.Imm(_Vk_Ptr))             // CMPL    DX, ${reflect.Ptr}
+    self.Sjmp("JNE"    , "_decode_{n}")                     // JNE     _decode_{n}
+    self.Emit("LEAQ"   , jit.Ptr(_VP, 8), _DI)              // LEAQ    8(VP), DI
+    self.decode_dynamic(_AX, _DI)                           // DECODE  AX, DI
+    self.Sjmp("JMP"    , "_decode_end_{n}")                 // JMP     _decode_end_{n}
+    self.Link("_decode_{n}")                                // _decode_{n}:
+    self.Emit("MOVQ"   , _ARG_fv, _DF)                      // MOVQ    fv, DF
+    self.Emit("MOVQ"   , _ST, jit.Ptr(_SP, 0))              // MOVQ    _ST, (SP)
+    self.call(_F_decodeValue)                               // CALL    decodeValue
+    self.Emit("TESTQ"  , _EP, _EP)                          // TESTQ   EP, EP
+    self.Sjmp("JNZ"    , _LB_parsing_error)                 // JNZ     _parsing_error
+    self.Link("_decode_end_{n}")                            // _decode_end_{n}:
+}
+
+func (self *_Assembler) _asm_OP_dyn(p *_Instr) {
+    self.Emit("MOVQ"   , jit.Type(p.vt()), _ET)             // MOVQ    ${p.vt()}, ET
+    self.Emit("CMPQ"   , jit.Ptr(_VP, 8), jit.Imm(0))       // CMPQ    8(VP), $0
+    self.Sjmp("JE"     , _LB_type_error)                    // JE      _type_error
+    self.Emit("MOVQ"   , jit.Ptr(_VP, 0), _AX)              // MOVQ    (VP), AX
+    self.Emit("MOVQ"   , jit.Ptr(_AX, 8), _AX)              // MOVQ    8(AX), AX
+    self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX)  // MOVBLZX _Gt_KindFlags(AX), DX
+    self.Emit("ANDL"   , jit.Imm(rt.F_kind_mask), _DX)      // ANDL    ${F_kind_mask}, DX
+    self.Emit("CMPL"   , _DX, jit.Imm(_Vk_Ptr))             // CMPL    DX, ${reflect.Ptr}
+    self.Sjmp("JNE"    , _LB_type_error)                    // JNE     _type_error
+    self.Emit("LEAQ"   , jit.Ptr(_VP, 8), _DI)              // LEAQ    8(VP), DI
+    self.decode_dynamic(_AX, _DI)                           // DECODE  AX, DI
+    self.Link("_decode_end_{n}")                            // _decode_end_{n}:
+}
+
+func (self *_Assembler) _asm_OP_str(_ *_Instr) {
+    self.parse_string()                                     // PARSE   STRING
+    self.unquote_once(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false, true)     // UNQUOTE once, (VP), 8(VP)
+}
+
+func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
+    self.parse_string()                                 // PARSE  STRING
+    self.slice_from(_VAR_st_Iv, -1)                     // SLICE  st.Iv, $-1
+    self.Emit("MOVQ" , _DI, jit.Ptr(_VP, 0))            // MOVQ   DI, (VP)
+    self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 8))            // MOVQ   SI, 8(VP)
+    self.Emit("SHRQ" , jit.Imm(2), _SI)                 // SHRQ   $2, SI
+    self.Emit("LEAQ" , jit.Sib(_SI, _SI, 2, 0), _SI)    // LEAQ   (SI)(SI*2), SI
+    self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16))           // MOVQ   SI, 16(VP)
+    self.malloc(_SI, _SI)                               // MALLOC SI, SI
+
+    // TODO: due to base64x's bug, only use AVX mode now
+    self.Emit("MOVL", jit.Imm(_MODE_JSON), _CX)          //  MOVL $_MODE_JSON, CX
+
+    /* call the decoder */
+    self.Emit("XORL" , _DX, _DX)                // XORL  DX, DX
+    self.Emit("MOVQ" , _VP, _DI)                // MOVQ  VP, DI
+
+    self.Emit("MOVQ" , jit.Ptr(_VP, 0), _R9)    // MOVQ SI, (VP)
+    self.WriteRecNotAX(4, _SI, jit.Ptr(_VP, 0), true, false)    // XCHGQ SI, (VP) 
+    self.Emit("MOVQ" , _R9, _SI)
+
+    self.Emit("XCHGQ", _DX, jit.Ptr(_VP, 8))    // XCHGQ DX, 8(VP)
+    self.call(_F_b64decode)                     // CALL  b64decode
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ AX, AX
+    self.Sjmp("JS"   , _LB_base64_error)        // JS    _base64_error
+    self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))    // MOVQ  AX, 8(VP)
+}
+
+func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
+    self.Emit("LEAQ", jit.Ptr(_IC, 4), _AX)                     // LEAQ 4(IC), AX
+    self.Emit("CMPQ", _AX, _IL)                                 // CMPQ AX, IL
+    self.Sjmp("JA"  , _LB_eof_error)                            // JA   _eof_error
+    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('f'))    // CMPB (IP)(IC), $'f'
+    self.Sjmp("JE"  , "_false_{n}")                             // JE   _false_{n}
+    self.Emit("MOVL", jit.Imm(_IM_true), _CX)                   // MOVL $"true", CX
+    self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0))             // CMPL CX, (IP)(IC)
+    self.Sjmp("JE" , "_bool_true_{n}")  
+
+    // try to skip the value
+    self.Emit("MOVQ", _IC, _VAR_ic)           
+    self.Emit("MOVQ", _T_bool, _ET)         
+    self.Emit("MOVQ", _ET, _VAR_et)
+    self.Byte(0x4c, 0x8d, 0x0d)         // LEAQ (PC), R9
+    self.Sref("_end_{n}", 4)
+    self.Emit("MOVQ", _R9, _VAR_pc)
+    self.Sjmp("JMP"  , _LB_skip_one) 
+
+    self.Link("_bool_true_{n}")
+    self.Emit("MOVQ", _AX, _IC)                                 // MOVQ AX, IC
+    self.Emit("MOVB", jit.Imm(1), jit.Ptr(_VP, 0))              // MOVB $1, (VP)
+    self.Sjmp("JMP" , "_end_{n}")                               // JMP  _end_{n}
+    self.Link("_false_{n}")                                     // _false_{n}:
+    self.Emit("ADDQ", jit.Imm(1), _AX)                          // ADDQ $1, AX
+    self.Emit("ADDQ", jit.Imm(1), _IC)                          // ADDQ $1, IC
+    self.Emit("CMPQ", _AX, _IL)                                 // CMPQ AX, IL
+    self.Sjmp("JA"  , _LB_eof_error)                            // JA   _eof_error
+    self.Emit("MOVL", jit.Imm(_IM_alse), _CX)                   // MOVL $"alse", CX
+    self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0))             // CMPL CX, (IP)(IC)
+    self.Sjmp("JNE" , _LB_im_error)                             // JNE  _im_error
+    self.Emit("MOVQ", _AX, _IC)                                 // MOVQ AX, IC
+    self.Emit("XORL", _AX, _AX)                                 // XORL AX, AX
+    self.Emit("MOVB", _AX, jit.Ptr(_VP, 0))                     // MOVB AX, (VP)
+    self.Link("_end_{n}")                                       // _end_{n}:
+}
+
+func (self *_Assembler) _asm_OP_num(_ *_Instr) {
+    self.Emit("MOVQ", jit.Imm(0), _VAR_fl)
+    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
+    self.Emit("MOVQ", _IC, _BP)
+    self.Sjmp("JNE", "_skip_number_{n}")
+    self.Emit("MOVQ", jit.Imm(1), _VAR_fl)
+    self.Emit("ADDQ", jit.Imm(1), _IC)
+    self.Link("_skip_number_{n}")
+
+    /* call skip_number */
+    self.call_sf(_F_skip_number)                   // CALL_SF skip_one
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
+    self.Sjmp("JNS"   , "_num_next_{n}")
+
+    /* call skip one */
+    self.Emit("MOVQ", _BP, _VAR_ic)           
+    self.Emit("MOVQ", _T_number, _ET)       
+    self.Emit("MOVQ", _ET, _VAR_et)
+    self.Byte(0x4c, 0x8d, 0x0d)       
+    self.Sref("_num_end_{n}", 4)
+    self.Emit("MOVQ", _R9, _VAR_pc)
+    self.Sjmp("JMP"  , _LB_skip_one)
+
+    /* assgin string */
+    self.Link("_num_next_{n}")
+    self.slice_from_r(_AX, 0)
+    self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv)
+    self.Sjmp("JNC", "_num_write_{n}")
+    self.Byte(0x4c, 0x8d, 0x0d)                 // LEAQ (PC), R9
+    self.Sref("_num_write_{n}", 4)
+    self.Sjmp("JMP", "_copy_string")
+    self.Link("_num_write_{n}")
+    self.Emit("MOVQ", _SI, jit.Ptr(_VP, 8))     // MOVQ  SI, 8(VP)
+    self.WriteRecNotAX(13, _DI, jit.Ptr(_VP, 0), false, false)   
+    
+    /* check if quoted */
+    self.Emit("CMPQ", _VAR_fl, jit.Imm(1))
+    self.Sjmp("JNE", "_num_end_{n}")
+    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
+    self.Sjmp("JNE", _LB_char_0_error)
+    self.Emit("ADDQ", jit.Imm(1), _IC)
+    self.Link("_num_end_{n}")
+}
+
+func (self *_Assembler) _asm_OP_i8(ins *_Instr) {
+    var pin = "_i8_end_{n}"
+    self.parse_signed(int8Type, pin, -1)                                                 // PARSE int8
+    self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8)     // RANGE int8
+    self.Emit("MOVB", _AX, jit.Ptr(_VP, 0))                             // MOVB  AX, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_i16(ins *_Instr) {
+    var pin = "_i16_end_{n}"
+    self.parse_signed(int16Type, pin, -1)                                                     // PARSE int16
+    self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16)     // RANGE int16
+    self.Emit("MOVW", _AX, jit.Ptr(_VP, 0))                                 // MOVW  AX, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_i32(ins *_Instr) {
+    var pin = "_i32_end_{n}"
+    self.parse_signed(int32Type, pin, -1)                                                     // PARSE int32
+    self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32)     // RANGE int32
+    self.Emit("MOVL", _AX, jit.Ptr(_VP, 0))                                 // MOVL  AX, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_i64(ins *_Instr) {
+    var pin = "_i64_end_{n}"
+    self.parse_signed(int64Type, pin, -1)                         // PARSE int64
+    self.Emit("MOVQ", _VAR_st_Iv, _AX)          // MOVQ  st.Iv, AX
+    self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))     // MOVQ  AX, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_u8(ins *_Instr) {
+    var pin = "_u8_end_{n}"
+    self.parse_unsigned(uint8Type, pin, -1)                                   // PARSE uint8
+    self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8)  // RANGE uint8
+    self.Emit("MOVB", _AX, jit.Ptr(_VP, 0))                 // MOVB  AX, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_u16(ins *_Instr) {
+    var pin = "_u16_end_{n}"
+    self.parse_unsigned(uint16Type, pin, -1)                                       // PARSE uint16
+    self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16)   // RANGE uint16
+    self.Emit("MOVW", _AX, jit.Ptr(_VP, 0))                     // MOVW  AX, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_u32(ins *_Instr) {
+    var pin = "_u32_end_{n}"
+    self.parse_unsigned(uint32Type, pin, -1)                                       // PARSE uint32
+    self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32)   // RANGE uint32
+    self.Emit("MOVL", _AX, jit.Ptr(_VP, 0))                     // MOVL  AX, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_u64(ins *_Instr) {
+    var pin = "_u64_end_{n}"
+    self.parse_unsigned(uint64Type, pin, -1)                       // PARSE uint64
+    self.Emit("MOVQ", _VAR_st_Iv, _AX)          // MOVQ  st.Iv, AX
+    self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))     // MOVQ  AX, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_f32(ins *_Instr) {
+    var pin = "_f32_end_{n}"
+    self.parse_number(float32Type, pin, -1)                         // PARSE NUMBER
+    self.range_single()                         // RANGE float32
+    self.Emit("MOVSS", _X0, jit.Ptr(_VP, 0))    // MOVSS X0, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_f64(ins *_Instr) {
+    var pin = "_f64_end_{n}"
+    self.parse_number(float64Type, pin, -1)                         // PARSE NUMBER
+    self.Emit("MOVSD", _VAR_st_Dv, _X0)         // MOVSD st.Dv, X0
+    self.Emit("MOVSD", _X0, jit.Ptr(_VP, 0))    // MOVSD X0, (VP)
+    self.Link(pin)
+}
+
+func (self *_Assembler) _asm_OP_unquote(ins *_Instr) {
+    self.check_eof(2)
+    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('\\'))   // CMPB    (IP)(IC), $'\\'
+    self.Sjmp("JNE" , _LB_char_0_error)                         // JNE     _char_0_error
+    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 1), jit.Imm('"'))    // CMPB    1(IP)(IC), $'"'
+    self.Sjmp("JNE" , _LB_char_1_error)                         // JNE     _char_1_error
+    self.Emit("ADDQ", jit.Imm(2), _IC)                          // ADDQ    $2, IC
+    self.parse_string()                                         // PARSE   STRING
+    self.unquote_twice(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false)        // UNQUOTE twice, (VP), 8(VP)
+}
+
+func (self *_Assembler) _asm_OP_nil_1(_ *_Instr) {
+    self.Emit("XORL", _AX, _AX)                 // XORL AX, AX
+    self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))     // MOVQ AX, (VP)
+}
+
+func (self *_Assembler) _asm_OP_nil_2(_ *_Instr) {
+    self.Emit("PXOR" , _X0, _X0)                // PXOR  X0, X0
+    self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0))    // MOVOU X0, (VP)
+}
+
+func (self *_Assembler) _asm_OP_nil_3(_ *_Instr) {
+    self.Emit("XORL" , _AX, _AX)                // XORL  AX, AX
+    self.Emit("PXOR" , _X0, _X0)                // PXOR  X0, X0
+    self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0))    // MOVOU X0, (VP)
+    self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 16))   // MOVOU X0, 16(VP)
+}
+
+func (self *_Assembler) _asm_OP_deref(p *_Instr) {
+    self.vfollow(p.vt())
+}
+
+func (self *_Assembler) _asm_OP_index(p *_Instr) {
+    self.Emit("MOVQ", jit.Imm(p.i64()), _AX)    // MOVQ ${p.vi()}, AX
+    self.Emit("ADDQ", _AX, _VP)                 // ADDQ _AX, _VP
+}
+
+func (self *_Assembler) _asm_OP_is_null(p *_Instr) {
+    self.Emit("LEAQ"   , jit.Ptr(_IC, 4), _AX)                          // LEAQ    4(IC), AX
+    self.Emit("CMPQ"   , _AX, _IL)                                      // CMPQ    AX, IL
+    self.Sjmp("JA"     , "_not_null_{n}")                               // JA      _not_null_{n}
+    self.Emit("CMPL"   , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null))    // CMPL    (IP)(IC), $"null"
+    self.Emit("CMOVQEQ", _AX, _IC)                                      // CMOVQEQ AX, IC
+    self.Xjmp("JE"     , p.vi())                                        // JE      {p.vi()}
+    self.Link("_not_null_{n}")                                          // _not_null_{n}:
+}
+
+func (self *_Assembler) _asm_OP_is_null_quote(p *_Instr) {
+    self.Emit("LEAQ"   , jit.Ptr(_IC, 5), _AX)                          // LEAQ    4(IC), AX
+    self.Emit("CMPQ"   , _AX, _IL)                                      // CMPQ    AX, IL
+    self.Sjmp("JA"     , "_not_null_quote_{n}")                         // JA      _not_null_quote_{n}
+    self.Emit("CMPL"   , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null))    // CMPL    (IP)(IC), $"null"
+    self.Sjmp("JNE"    , "_not_null_quote_{n}")                         // JNE     _not_null_quote_{n}
+    self.Emit("CMPB"   , jit.Sib(_IP, _IC, 1, 4), jit.Imm('"'))         // CMPB    4(IP)(IC), $'"'
+    self.Emit("CMOVQEQ", _AX, _IC)                                      // CMOVQEQ AX, IC
+    self.Xjmp("JE"     , p.vi())                                        // JE      {p.vi()}
+    self.Link("_not_null_quote_{n}")                                    // _not_null_quote_{n}:
+}
+
+func (self *_Assembler) _asm_OP_map_init(_ *_Instr) {
+    self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX)    // MOVQ    (VP), AX
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
+    self.Sjmp("JNZ"  , "_end_{n}")              // JNZ     _end_{n}
+    self.call_go(_F_makemap_small)              // CALL_GO makemap_small
+    self.Emit("MOVQ" , jit.Ptr(_SP, 0), _AX)    // MOVQ    (SP), AX
+    self.WritePtrAX(6, jit.Ptr(_VP, 0), false)    // MOVQ    AX, (VP)
+    self.Link("_end_{n}")                       // _end_{n}:
+    self.Emit("MOVQ" , _AX, _VP)                // MOVQ    AX, VP
+}
+
+func (self *_Assembler) _asm_OP_map_key_i8(p *_Instr) {
+    self.parse_signed(int8Type, "", p.vi())                                                 // PARSE     int8
+    self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8)     // RANGE     int8
+    self.match_char('"')
+    self.mapassign_std(p.vt(), _VAR_st_Iv)                              // MAPASSIGN int8, mapassign, st.Iv
+}
+
+func (self *_Assembler) _asm_OP_map_key_i16(p *_Instr) {
+    self.parse_signed(int16Type, "", p.vi())                                                     // PARSE     int16
+    self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16)     // RANGE     int16
+    self.match_char('"')
+    self.mapassign_std(p.vt(), _VAR_st_Iv)                                  // MAPASSIGN int16, mapassign, st.Iv
+}
+
+func (self *_Assembler) _asm_OP_map_key_i32(p *_Instr) {
+    self.parse_signed(int32Type, "", p.vi())                                                     // PARSE     int32
+    self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32)     // RANGE     int32
+    self.match_char('"')
+    if vt := p.vt(); !mapfast(vt) {
+        self.mapassign_std(vt, _VAR_st_Iv)                                  // MAPASSIGN int32, mapassign, st.Iv
+    } else {
+        self.mapassign_fastx(vt, _F_mapassign_fast32)                       // MAPASSIGN int32, mapassign_fast32
+    }
+}
+
+func (self *_Assembler) _asm_OP_map_key_i64(p *_Instr) {
+    self.parse_signed(int64Type, "", p.vi())                                 // PARSE     int64
+    self.match_char('"')
+    if vt := p.vt(); !mapfast(vt) {
+        self.mapassign_std(vt, _VAR_st_Iv)              // MAPASSIGN int64, mapassign, st.Iv
+    } else {
+        self.Emit("MOVQ", _VAR_st_Iv, _AX)              // MOVQ      st.Iv, AX
+        self.mapassign_fastx(vt, _F_mapassign_fast64)   // MAPASSIGN int64, mapassign_fast64
+    }
+}
+
+func (self *_Assembler) _asm_OP_map_key_u8(p *_Instr) {
+    self.parse_unsigned(uint8Type, "", p.vi())                                   // PARSE     uint8
+    self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8)  // RANGE     uint8
+    self.match_char('"')
+    self.mapassign_std(p.vt(), _VAR_st_Iv)                  // MAPASSIGN uint8, vt.Iv
+}
+
+func (self *_Assembler) _asm_OP_map_key_u16(p *_Instr) {
+    self.parse_unsigned(uint16Type, "", p.vi())                                       // PARSE     uint16
+    self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16)   // RANGE     uint16
+    self.match_char('"')
+    self.mapassign_std(p.vt(), _VAR_st_Iv)                      // MAPASSIGN uint16, vt.Iv
+}
+
+func (self *_Assembler) _asm_OP_map_key_u32(p *_Instr) {
+    self.parse_unsigned(uint32Type, "", p.vi())                                       // PARSE     uint32
+    self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32)   // RANGE     uint32
+    self.match_char('"')
+    if vt := p.vt(); !mapfast(vt) {
+        self.mapassign_std(vt, _VAR_st_Iv)                      // MAPASSIGN uint32, vt.Iv
+    } else {
+        self.mapassign_fastx(vt, _F_mapassign_fast32)           // MAPASSIGN uint32, mapassign_fast32
+    }
+}
+
+func (self *_Assembler) _asm_OP_map_key_u64(p *_Instr) {
+    self.parse_unsigned(uint64Type, "", p.vi())                                       // PARSE     uint64
+    self.match_char('"')
+    if vt := p.vt(); !mapfast(vt) {
+        self.mapassign_std(vt, _VAR_st_Iv)                      // MAPASSIGN uint64, vt.Iv
+    } else {
+        self.Emit("MOVQ", _VAR_st_Iv, _AX)                      // MOVQ      st.Iv, AX
+        self.mapassign_fastx(vt, _F_mapassign_fast64)           // MAPASSIGN uint64, mapassign_fast64
+    }
+}
+
+func (self *_Assembler) _asm_OP_map_key_f32(p *_Instr) {
+    self.parse_number(float32Type, "", p.vi())                     // PARSE     NUMBER
+    self.range_single()                     // RANGE     float32
+    self.Emit("MOVSS", _X0, _VAR_st_Dv)     // MOVSS     X0, st.Dv
+    self.match_char('"')
+    self.mapassign_std(p.vt(), _VAR_st_Dv)  // MAPASSIGN ${p.vt()}, mapassign, st.Dv
+}
+
+func (self *_Assembler) _asm_OP_map_key_f64(p *_Instr) {
+    self.parse_number(float64Type, "", p.vi())                     // PARSE     NUMBER
+    self.match_char('"')
+    self.mapassign_std(p.vt(), _VAR_st_Dv)  // MAPASSIGN ${p.vt()}, mapassign, st.Dv
+}
+
+func (self *_Assembler) _asm_OP_map_key_str(p *_Instr) {
+    self.parse_string()                          // PARSE     STRING
+    self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true)      // UNQUOTE   once, sv.p, sv.n
+    if vt := p.vt(); !mapfast(vt) {
+        self.valloc(vt.Key(), _DI)
+        self.Emit("MOVOU", _VAR_sv, _X0)
+        self.Emit("MOVOU", _X0, jit.Ptr(_DI, 0))
+        self.mapassign_std(vt, jit.Ptr(_DI, 0))        
+    } else {
+        self.Emit("MOVQ", _VAR_sv_p, _DI)        // MOVQ      sv.p, DI
+        self.Emit("MOVQ", _VAR_sv_n, _SI)        // MOVQ      sv.n, SI
+        self.mapassign_str_fast(vt, _DI, _SI)    // MAPASSIGN string, DI, SI
+    }
+}
+
+func (self *_Assembler) _asm_OP_map_key_utext(p *_Instr) {
+    self.parse_string()                         // PARSE     STRING
+    self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true)     // UNQUOTE   once, sv.p, sv.n
+    self.mapassign_utext(p.vt(), false)         // MAPASSIGN utext, ${p.vt()}, false
+}
+
+func (self *_Assembler) _asm_OP_map_key_utext_p(p *_Instr) {
+    self.parse_string()                         // PARSE     STRING
+    self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false)     // UNQUOTE   once, sv.p, sv.n
+    self.mapassign_utext(p.vt(), true)          // MAPASSIGN utext, ${p.vt()}, true
+}
+
+func (self *_Assembler) _asm_OP_array_skip(_ *_Instr) {
+    self.call_sf(_F_skip_array)                 // CALL_SF skip_array
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
+    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
+}
+
+func (self *_Assembler) _asm_OP_array_clear(p *_Instr) {
+    self.mem_clear_rem(p.i64(), true)
+}
+
+func (self *_Assembler) _asm_OP_array_clear_p(p *_Instr) {
+    self.mem_clear_rem(p.i64(), false)
+}
+
+func (self *_Assembler) _asm_OP_slice_init(p *_Instr) {
+    self.Emit("XORL" , _AX, _AX)                    // XORL    AX, AX
+    self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))        // MOVQ    AX, 8(VP)
+    self.Emit("MOVQ" , jit.Ptr(_VP, 16), _AX)       // MOVQ    16(VP), AX
+    self.Emit("TESTQ", _AX, _AX)                    // TESTQ   AX, AX
+    self.Sjmp("JNZ"  , "_done_{n}")                 // JNZ     _done_{n}
+    self.Emit("MOVQ" , jit.Imm(_MinSlice), _CX)     // MOVQ    ${_MinSlice}, CX
+    self.Emit("MOVQ" , _CX, jit.Ptr(_VP, 16))       // MOVQ    CX, 16(VP)
+    self.Emit("MOVQ" , jit.Type(p.vt()), _DX)       // MOVQ    ${p.vt()}, DX
+    self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 0))        // MOVQ    DX, (SP)
+    self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8))        // MOVQ    AX, 8(SP)
+    self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 16))       // MOVQ    CX, 16(SP)
+    self.call_go(_F_makeslice)                      // CALL_GO makeslice
+    self.Emit("MOVQ" , jit.Ptr(_SP, 24), _AX)       // MOVQ    24(SP), AX
+    self.WritePtrAX(7, jit.Ptr(_VP, 0), false)      // MOVQ    AX, (VP)
+    self.Link("_done_{n}")                          // _done_{n}:
+    self.Emit("XORL" , _AX, _AX)                    // XORL    AX, AX
+    self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))        // MOVQ    AX, 8(VP)
+}
+
+func (self *_Assembler) _asm_OP_check_empty(p *_Instr) {
+    rbracket := p.vb()
+    if rbracket == ']' {
+        self.check_eof(1)
+        self.Emit("LEAQ", jit.Ptr(_IC, 1), _AX)                              // LEAQ    1(IC), AX
+        self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(rbracket))) // CMPB    (IP)(IC), ']'
+        self.Sjmp("JNE" , "_not_empty_array_{n}")                            // JNE     _not_empty_array_{n}
+        self.Emit("MOVQ", _AX, _IC)                                          // MOVQ    AX, IC
+        self.Emit("MOVQ", jit.Imm(_Zero_Base), _AX)
+        self.WritePtrAX(9, jit.Ptr(_VP, 0), false)
+        self.Emit("PXOR" , _X0, _X0)                                         // PXOR    X0, X0
+        self.Emit("MOVOU", _X0, jit.Ptr(_VP, 8))                             // MOVOU   X0, 8(VP)
+        self.Xjmp("JMP" , p.vi())                                            // JMP     {p.vi()}
+        self.Link("_not_empty_array_{n}")
+    } else {
+        panic("only implement check empty array here!")
+    }
+}
+
+func (self *_Assembler) _asm_OP_slice_append(p *_Instr) {
+    self.Emit("MOVQ" , jit.Ptr(_VP, 8), _AX)            // MOVQ    8(VP), AX
+    self.Emit("CMPQ" , _AX, jit.Ptr(_VP, 16))           // CMPQ    AX, 16(VP)
+    self.Sjmp("JB"   , "_index_{n}")                    // JB      _index_{n}
+    self.Emit("MOVQ" , jit.Type(p.vt()), _AX)           // MOVQ    ${p.vt()}, AX
+    self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))            // MOVQ    AX, (SP)
+    self.Emit("MOVOU", jit.Ptr(_VP, 0), _X0)            // MOVOU   (VP), X0
+    self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8))            // MOVOU   X0, 8(SP)
+    self.Emit("MOVQ" , jit.Ptr(_VP, 16), _AX)           // MOVQ    16(VP), AX
+    self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 24))           // MOVQ    AX, 24(SP)
+    self.Emit("SHLQ" , jit.Imm(1), _AX)                 // SHLQ    $1, AX
+    self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32))           // MOVQ    AX, 32(SP)
+    self.call_go(_F_growslice)                          // CALL_GO growslice
+    self.Emit("MOVQ" , jit.Ptr(_SP, 40), _DI)           // MOVQ    40(SP), DI
+    self.Emit("MOVQ" , jit.Ptr(_SP, 48), _AX)           // MOVQ    48(SP), AX
+    self.Emit("MOVQ" , jit.Ptr(_SP, 56), _SI)           // MOVQ    56(SP), SI
+    self.WriteRecNotAX(8, _DI, jit.Ptr(_VP, 0), true, true)// MOVQ    DI, (VP)
+    self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))            // MOVQ    AX, 8(VP)
+    self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16))           // MOVQ    SI, 16(VP)
+
+    // because growslice not zero memory {oldcap, newlen} when append et not has ptrdata.
+    // but we should zero it, avoid decode it as random values.
+    if rt.UnpackType(p.vt()).PtrData == 0 {
+        self.Emit("SUBQ" , _AX, _SI)                        // MOVQ    AX, SI
+    
+        self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8))     // ADDQ    $1, 8(VP)
+        self.Emit("MOVQ" , _DI, _VP)                        // MOVQ    DI, VP
+        self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX)   // MOVQ    ${p.vlen()}, CX
+        self.From("MULQ" , _CX)                             // MULQ    CX
+        self.Emit("ADDQ" , _AX, _VP)                        // ADDQ    AX, VP
+
+        self.Emit("MOVQ" , _SI, _AX)                        // MOVQ    SI, AX
+        self.From("MULQ" , _CX)                             // MULQ    CX
+        self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8))            // MOVQ    AX, 8(SP)
+
+        self.Emit("MOVQ" , _VP, jit.Ptr(_SP, 0))            // MOVQ    VP, (SP)
+        self.mem_clear_fn(true)                             // CALL_GO memclr{Has,NoHeap}
+        self.Sjmp("JMP", "_append_slice_end_{n}")           // JMP    _append_slice_end_{n}
+    }
+
+    self.Link("_index_{n}")                             // _index_{n}:
+    self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8))     // ADDQ    $1, 8(VP)
+    self.Emit("MOVQ" , jit.Ptr(_VP, 0), _VP)            // MOVQ    (VP), VP
+    self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX)   // MOVQ    ${p.vlen()}, CX
+    self.From("MULQ" , _CX)                             // MULQ    CX
+    self.Emit("ADDQ" , _AX, _VP)                        // ADDQ    AX, VP
+    self.Link("_append_slice_end_{n}")
+}
+
+func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) {
+    self.call_sf(_F_skip_object)                // CALL_SF skip_object
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
+    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
+}
+
+func (self *_Assembler) _asm_OP_object_next(_ *_Instr) {
+    self.call_sf(_F_skip_one)                   // CALL_SF skip_one
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ   AX, AX
+    self.Sjmp("JS"   , _LB_parsing_error_v)     // JS      _parse_error_v
+}
+
+func (self *_Assembler) _asm_OP_struct_field(p *_Instr) {
+    assert_eq(caching.FieldEntrySize, 32, "invalid field entry size")
+    self.Emit("MOVQ" , jit.Imm(-1), _AX)                        // MOVQ    $-1, AX
+    self.Emit("MOVQ" , _AX, _VAR_sr)                            // MOVQ    AX, sr
+    self.parse_string()                                         // PARSE   STRING
+    self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false)                     // UNQUOTE once, sv.p, sv.n
+    self.Emit("LEAQ" , _VAR_sv, _AX)                            // LEAQ    sv, AX
+    self.Emit("XORL" , _CX, _CX)                                // XORL    CX, CX
+    self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))                    // MOVQ    AX, (SP)
+    self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8))                    // MOVQ    CX, 8(SP)
+    self.call_go(_F_strhash)                                    // CALL_GO strhash
+    self.Emit("MOVQ" , jit.Ptr(_SP, 16), _AX)                   // MOVQ    16(SP), AX
+    self.Emit("MOVQ" , _AX, _R9)                                // MOVQ    AX, R9
+    self.Emit("MOVQ" , jit.Imm(freezeFields(p.vf())), _CX)      // MOVQ    ${p.vf()}, CX
+    self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_b), _SI)   // MOVQ    FieldMap.b(CX), SI
+    self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_N), _CX)   // MOVQ    FieldMap.N(CX), CX
+    self.Emit("TESTQ", _CX, _CX)                                // TESTQ   CX, CX
+    self.Sjmp("JZ"   , "_try_lowercase_{n}")                    // JZ      _try_lowercase_{n}
+    self.Link("_loop_{n}")                                      // _loop_{n}:
+    self.Emit("XORL" , _DX, _DX)                                // XORL    DX, DX
+    self.From("DIVQ" , _CX)                                     // DIVQ    CX
+    self.Emit("LEAQ" , jit.Ptr(_DX, 1), _AX)                    // LEAQ    1(DX), AX
+    self.Emit("SHLQ" , jit.Imm(5), _DX)                         // SHLQ    $5, DX
+    self.Emit("LEAQ" , jit.Sib(_SI, _DX, 1, 0), _DI)            // LEAQ    (SI)(DX), DI
+    self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Hash), _R8)             // MOVQ    FieldEntry.Hash(DI), R8
+    self.Emit("TESTQ", _R8, _R8)                                // TESTQ   R8, R8
+    self.Sjmp("JZ"   , "_try_lowercase_{n}")                    // JZ      _try_lowercase_{n}
+    self.Emit("CMPQ" , _R8, _R9)                                // CMPQ    R8, R9
+    self.Sjmp("JNE"  , "_loop_{n}")                             // JNE     _loop_{n}
+    self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name + 8), _DX)         // MOVQ    FieldEntry.Name+8(DI), DX
+    self.Emit("CMPQ" , _DX, _VAR_sv_n)                          // CMPQ    DX, sv.n
+    self.Sjmp("JNE"  , "_loop_{n}")                             // JNE     _loop_{n}
+    self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_ID), _R8)               // MOVQ    FieldEntry.ID(DI), R8
+    self.Emit("MOVQ" , _AX, _VAR_ss_AX)                         // MOVQ    AX, ss.AX
+    self.Emit("MOVQ" , _CX, _VAR_ss_CX)                         // MOVQ    CX, ss.CX
+    self.Emit("MOVQ" , _SI, _VAR_ss_SI)                         // MOVQ    SI, ss.SI
+    self.Emit("MOVQ" , _R8, _VAR_ss_R8)                         // MOVQ    R8, ss.R8
+    self.Emit("MOVQ" , _R9, _VAR_ss_R9)                         // MOVQ    R9, ss.R9
+    self.Emit("MOVQ" , _VAR_sv_p, _AX)                          // MOVQ    _VAR_sv_p, AX
+    self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name), _CX)             // MOVQ    FieldEntry.Name(DI), CX
+    self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))                    // MOVQ    AX, (SP)
+    self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8))                    // MOVQ    CX, 8(SP)
+    self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 16))                   // MOVQ    DX, 16(SP)
+    self.call_go(_F_memequal)                                   // CALL_GO memequal
+    self.Emit("MOVQ" , _VAR_ss_AX, _AX)                         // MOVQ    ss.AX, AX
+    self.Emit("MOVQ" , _VAR_ss_CX, _CX)                         // MOVQ    ss.CX, CX
+    self.Emit("MOVQ" , _VAR_ss_SI, _SI)                         // MOVQ    ss.SI, SI
+    self.Emit("MOVQ" , _VAR_ss_R9, _R9)                         // MOVQ    ss.R9, R9
+    self.Emit("MOVB" , jit.Ptr(_SP, 24), _DX)                   // MOVB    24(SP), DX
+    self.Emit("TESTB", _DX, _DX)                                // TESTB   DX, DX
+    self.Sjmp("JZ"   , "_loop_{n}")                             // JZ      _loop_{n}
+    self.Emit("MOVQ" , _VAR_ss_R8, _R8)                         // MOVQ    ss.R8, R8
+    self.Emit("MOVQ" , _R8, _VAR_sr)                            // MOVQ    R8, sr
+    self.Sjmp("JMP"  , "_end_{n}")                              // JMP     _end_{n}
+    self.Link("_try_lowercase_{n}")                             // _try_lowercase_{n}:
+    self.Emit("MOVQ" , jit.Imm(referenceFields(p.vf())), _AX)   // MOVQ    ${p.vf()}, AX
+    self.Emit("MOVOU", _VAR_sv, _X0)                            // MOVOU   sv, X0
+    self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))                    // MOVQ    AX, (SP)
+    self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8))                    // MOVOU   X0, 8(SP)
+    self.call_go(_F_FieldMap_GetCaseInsensitive)                // CALL_GO FieldMap::GetCaseInsensitive
+    self.Emit("MOVQ" , jit.Ptr(_SP, 24), _AX)                   // MOVQ    24(SP), AX
+    self.Emit("MOVQ" , _AX, _VAR_sr)                            // MOVQ    AX, _VAR_sr
+    self.Emit("TESTQ", _AX, _AX)                                // TESTQ   AX, AX
+    self.Sjmp("JNS"  , "_end_{n}")                              // JNS     _end_{n}
+    self.Emit("BTQ"  , jit.Imm(_F_disable_unknown), _ARG_fv)    // BTQ     ${_F_disable_unknown}, fv
+    self.Sjmp("JC"   , _LB_field_error)                         // JC      _field_error
+    self.Link("_end_{n}")                                       // _end_{n}:
+}
+
+func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) {
+    self.unmarshal_json(p.vt(), true)
+}
+
+func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) {
+    self.unmarshal_json(p.vt(), false)
+}
+
+func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) {
+    self.unmarshal_text(p.vt(), true)
+}
+
+func (self *_Assembler) _asm_OP_unmarshal_text_p(p *_Instr) {
+    self.unmarshal_text(p.vt(), false)
+}
+
+func (self *_Assembler) _asm_OP_lspace(_ *_Instr) {
+    self.lspace("_{n}")
+}
+
+func (self *_Assembler) lspace(subfix string) {
+    var label = "_lspace" + subfix
+
+    self.Emit("CMPQ"   , _IC, _IL)                      // CMPQ    IC, IL
+    self.Sjmp("JAE"    , _LB_eof_error)                 // JAE     _eof_error
+    self.Emit("MOVQ"   , jit.Imm(_BM_space), _DX)       // MOVQ    _BM_space, DX
+    self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX)  // MOVBQZX (IP)(IC), AX
+    self.Emit("CMPQ"   , _AX, jit.Imm(' '))             // CMPQ    AX, $' '
+    self.Sjmp("JA"     , label)                // JA      _nospace_{n}
+    self.Emit("BTQ"    , _AX, _DX)                      // BTQ     AX, DX
+    self.Sjmp("JNC"    , label)                // JNC     _nospace_{n}
+
+    /* test up to 4 characters */
+    for i := 0; i < 3; i++ {
+        self.Emit("ADDQ"   , jit.Imm(1), _IC)               // ADDQ    $1, IC
+        self.Emit("CMPQ"   , _IC, _IL)                      // CMPQ    IC, IL
+        self.Sjmp("JAE"    , _LB_eof_error)                 // JAE     _eof_error
+        self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX)  // MOVBQZX (IP)(IC), AX
+        self.Emit("CMPQ"   , _AX, jit.Imm(' '))             // CMPQ    AX, $' '
+        self.Sjmp("JA"     , label)                // JA      _nospace_{n}
+        self.Emit("BTQ"    , _AX, _DX)                      // BTQ     AX, DX
+        self.Sjmp("JNC"    , label)                // JNC     _nospace_{n}
+    }
+
+    /* handle over to the native function */
+    self.Emit("MOVQ"   , _IP, _DI)                      // MOVQ    IP, DI
+    self.Emit("MOVQ"   , _IL, _SI)                      // MOVQ    IL, SI
+    self.Emit("MOVQ"   , _IC, _DX)                      // MOVQ    IC, DX
+    self.call(_F_lspace)                                // CALL    lspace
+    self.Emit("TESTQ"  , _AX, _AX)                      // TESTQ   AX, AX
+    self.Sjmp("JS"     , _LB_parsing_error_v)           // JS      _parsing_error_v
+    self.Emit("CMPQ"   , _AX, _IL)                      // CMPQ    AX, IL
+    self.Sjmp("JAE"    , _LB_eof_error)                 // JAE     _eof_error
+    self.Emit("MOVQ"   , _AX, _IC)                      // MOVQ    AX, IC
+    self.Link(label)                           // _nospace_{n}:
+}
+
+func (self *_Assembler) _asm_OP_match_char(p *_Instr) {
+    self.match_char(p.vb())
+}
+
+func (self *_Assembler) match_char(char byte) {
+    self.check_eof(1)
+    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(char)))  // CMPB (IP)(IC), ${p.vb()}
+    self.Sjmp("JNE" , _LB_char_0_error)                               // JNE  _char_0_error
+    self.Emit("ADDQ", jit.Imm(1), _IC)                                // ADDQ $1, IC
+}
+
+func (self *_Assembler) _asm_OP_check_char(p *_Instr) {
+    self.check_eof(1)
+    self.Emit("LEAQ"   , jit.Ptr(_IC, 1), _AX)                              // LEAQ    1(IC), AX
+    self.Emit("CMPB"   , jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb())))   // CMPB    (IP)(IC), ${p.vb()}
+    self.Emit("CMOVQEQ", _AX, _IC)                                          // CMOVQEQ AX, IC
+    self.Xjmp("JE"     , p.vi())                                            // JE      {p.vi()}
+}
+
+func (self *_Assembler) _asm_OP_check_char_0(p *_Instr) {
+    self.check_eof(1)
+    self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb())))   // CMPB    (IP)(IC), ${p.vb()}
+    self.Xjmp("JE"  , p.vi())                                            // JE      {p.vi()}
+}
+
+func (self *_Assembler) _asm_OP_add(p *_Instr) {
+    self.Emit("ADDQ", jit.Imm(int64(p.vi())), _IC)  // ADDQ ${p.vi()}, IC
+}
+
+func (self *_Assembler) _asm_OP_load(_ *_Instr) {
+    self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)             // MOVQ (ST), AX
+    self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _VP)     // MOVQ (ST)(AX), VP
+}
+
+func (self *_Assembler) _asm_OP_save(_ *_Instr) {
+    self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX)             // MOVQ (ST), CX
+    self.Emit("CMPQ", _CX, jit.Imm(_MaxStackBytes))          // CMPQ CX, ${_MaxStackBytes}
+    self.Sjmp("JAE"  , _LB_stack_error)                 // JA   _stack_error
+    self.WriteRecNotAX(0 , _VP, jit.Sib(_ST, _CX, 1, 8), false, false) // MOVQ VP, 8(ST)(CX)
+    self.Emit("ADDQ", jit.Imm(8), _CX)                  // ADDQ $8, CX
+    self.Emit("MOVQ", _CX, jit.Ptr(_ST, 0))             // MOVQ CX, (ST)
+}
+
+func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
+    self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)             // MOVQ (ST), AX
+    self.Emit("SUBQ", jit.Imm(8), _AX)                  // SUBQ $8, AX
+    self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _VP)     // MOVQ 8(ST)(AX), VP
+    self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0))             // MOVQ AX, (ST)
+    self.Emit("XORL", _ET, _ET)                         // XORL ET, ET
+    self.Emit("MOVQ", _ET, jit.Sib(_ST, _AX, 1, 8))     // MOVQ ET, 8(ST)(AX)
+}
+
+func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
+    self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX)            // MOVQ  (ST), AX
+    self.Emit("SUBQ" , jit.Imm(16), _AX)                // SUBQ  $16, AX
+    self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _VP)    // MOVQ  8(ST)(AX), VP
+    self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0))            // MOVQ  AX, (ST)
+    self.Emit("PXOR" , _X0, _X0)                        // PXOR  X0, X0
+    self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8))    // MOVOU X0, 8(ST)(AX)
+}
+
+func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
+    self.Emit("MOVQ", jit.Type(p.vt()), _AX)    // MOVQ   ${p.vt()}, AX
+    self.decode_dynamic(_AX, _VP)               // DECODE AX, VP
+}
+
+func (self *_Assembler) _asm_OP_goto(p *_Instr) {
+    self.Xjmp("JMP", p.vi())
+}
+
+func (self *_Assembler) _asm_OP_switch(p *_Instr) {
+    self.Emit("MOVQ", _VAR_sr, _AX)             // MOVQ sr, AX
+    self.Emit("CMPQ", _AX, jit.Imm(p.i64()))    // CMPQ AX, ${len(p.vs())}
+    self.Sjmp("JAE" , "_default_{n}")           // JAE  _default_{n}
+
+    /* jump table selector */
+    self.Byte(0x48, 0x8d, 0x3d)                         // LEAQ    ?(PC), DI
+    self.Sref("_switch_table_{n}", 4)                   // ....    &_switch_table_{n}
+    self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX)  // MOVLQSX (DI)(AX*4), AX
+    self.Emit("ADDQ"   , _DI, _AX)                      // ADDQ    DI, AX
+    self.Rjmp("JMP"    , _AX)                           // JMP     AX
+    self.Link("_switch_table_{n}")                      // _switch_table_{n}:
+
+    /* generate the jump table */
+    for i, v := range p.vs() {
+        self.Xref(v, int64(-i) * 4)
+    }
+
+    /* default case */
+    self.Link("_default_{n}")
+    self.NOP()
+}
+
+func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
+    self.Emit("MOVQ", jit.Imm(int64(p2.op())),  jit.Ptr(_SP, 16))// MOVQ $(p2.op()), 16(SP)
+    self.Emit("MOVQ", jit.Imm(int64(p1.op())),  jit.Ptr(_SP, 8)) // MOVQ $(p1.op()), 8(SP)
+    self.Emit("MOVQ", jit.Imm(int64(i)),  jit.Ptr(_SP, 0))       // MOVQ $(i), (SP)
+    self.call_go(_F_println)
+}

+ 1158 - 0
vendor/github.com/bytedance/sonic/internal/decoder/compiler.go

@@ -0,0 +1,1158 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package decoder
+
+import (
+    `encoding/json`
+    `fmt`
+    `reflect`
+    `sort`
+    `strconv`
+    `strings`
+    `unsafe`
+
+    `github.com/bytedance/sonic/internal/caching`
+    `github.com/bytedance/sonic/internal/resolver`
+    `github.com/bytedance/sonic/internal/rt`
+    `github.com/bytedance/sonic/option`
+)
+
+type _Op uint8
+
+const (
+    _OP_any _Op = iota + 1
+    _OP_dyn
+    _OP_str
+    _OP_bin
+    _OP_bool
+    _OP_num
+    _OP_i8
+    _OP_i16
+    _OP_i32
+    _OP_i64
+    _OP_u8
+    _OP_u16
+    _OP_u32
+    _OP_u64
+    _OP_f32
+    _OP_f64
+    _OP_unquote
+    _OP_nil_1
+    _OP_nil_2
+    _OP_nil_3
+    _OP_deref
+    _OP_index
+    _OP_is_null
+    _OP_is_null_quote
+    _OP_map_init
+    _OP_map_key_i8
+    _OP_map_key_i16
+    _OP_map_key_i32
+    _OP_map_key_i64
+    _OP_map_key_u8
+    _OP_map_key_u16
+    _OP_map_key_u32
+    _OP_map_key_u64
+    _OP_map_key_f32
+    _OP_map_key_f64
+    _OP_map_key_str
+    _OP_map_key_utext
+    _OP_map_key_utext_p
+    _OP_array_skip
+    _OP_array_clear
+    _OP_array_clear_p
+    _OP_slice_init
+    _OP_slice_append
+    _OP_object_skip
+    _OP_object_next
+    _OP_struct_field
+    _OP_unmarshal
+    _OP_unmarshal_p
+    _OP_unmarshal_text
+    _OP_unmarshal_text_p
+    _OP_lspace
+    _OP_match_char
+    _OP_check_char
+    _OP_load
+    _OP_save
+    _OP_drop
+    _OP_drop_2
+    _OP_recurse
+    _OP_goto
+    _OP_switch
+    _OP_check_char_0
+    _OP_dismatch_err
+    _OP_go_skip
+    _OP_add
+    _OP_check_empty
+    _OP_debug
+)
+
+const (
+    _INT_SIZE = 32 << (^uint(0) >> 63)
+    _PTR_SIZE = 32 << (^uintptr(0) >> 63)
+    _PTR_BYTE = unsafe.Sizeof(uintptr(0))
+)
+
+const (
+    _MAX_ILBUF = 100000     // cutoff at 100k of IL instructions
+    _MAX_FIELDS = 50        // cutoff at 50 fields struct
+)
+
+var _OpNames = [256]string {
+    _OP_any              : "any",
+    _OP_dyn              : "dyn",
+    _OP_str              : "str",
+    _OP_bin              : "bin",
+    _OP_bool             : "bool",
+    _OP_num              : "num",
+    _OP_i8               : "i8",
+    _OP_i16              : "i16",
+    _OP_i32              : "i32",
+    _OP_i64              : "i64",
+    _OP_u8               : "u8",
+    _OP_u16              : "u16",
+    _OP_u32              : "u32",
+    _OP_u64              : "u64",
+    _OP_f32              : "f32",
+    _OP_f64              : "f64",
+    _OP_unquote          : "unquote",
+    _OP_nil_1            : "nil_1",
+    _OP_nil_2            : "nil_2",
+    _OP_nil_3            : "nil_3",
+    _OP_deref            : "deref",
+    _OP_index            : "index",
+    _OP_is_null          : "is_null",
+    _OP_is_null_quote    : "is_null_quote",
+    _OP_map_init         : "map_init",
+    _OP_map_key_i8       : "map_key_i8",
+    _OP_map_key_i16      : "map_key_i16",
+    _OP_map_key_i32      : "map_key_i32",
+    _OP_map_key_i64      : "map_key_i64",
+    _OP_map_key_u8       : "map_key_u8",
+    _OP_map_key_u16      : "map_key_u16",
+    _OP_map_key_u32      : "map_key_u32",
+    _OP_map_key_u64      : "map_key_u64",
+    _OP_map_key_f32      : "map_key_f32",
+    _OP_map_key_f64      : "map_key_f64",
+    _OP_map_key_str      : "map_key_str",
+    _OP_map_key_utext    : "map_key_utext",
+    _OP_map_key_utext_p  : "map_key_utext_p",
+    _OP_array_skip       : "array_skip",
+    _OP_slice_init       : "slice_init",
+    _OP_slice_append     : "slice_append",
+    _OP_object_skip      : "object_skip",
+    _OP_object_next      : "object_next",
+    _OP_struct_field     : "struct_field",
+    _OP_unmarshal        : "unmarshal",
+    _OP_unmarshal_p      : "unmarshal_p",
+    _OP_unmarshal_text   : "unmarshal_text",
+    _OP_unmarshal_text_p : "unmarshal_text_p",
+    _OP_lspace           : "lspace",
+    _OP_match_char       : "match_char",
+    _OP_check_char       : "check_char",
+    _OP_load             : "load",
+    _OP_save             : "save",
+    _OP_drop             : "drop",
+    _OP_drop_2           : "drop_2",
+    _OP_recurse          : "recurse",
+    _OP_goto             : "goto",
+    _OP_switch           : "switch",
+    _OP_check_char_0     : "check_char_0",
+    _OP_dismatch_err     : "dismatch_err",
+    _OP_add              : "add",
+    _OP_go_skip          : "go_skip",
+    _OP_check_empty      : "check_empty",
+    _OP_debug            : "debug",
+}
+
+func (self _Op) String() string {
+    if ret := _OpNames[self]; ret != "" {
+        return ret
+    } else {
+        return "<invalid>"
+    }
+}
+
+func _OP_int() _Op {
+    switch _INT_SIZE {
+        case 32: return _OP_i32
+        case 64: return _OP_i64
+        default: panic("unsupported int size")
+    }
+}
+
+func _OP_uint() _Op {
+    switch _INT_SIZE {
+        case 32: return _OP_u32
+        case 64: return _OP_u64
+        default: panic("unsupported uint size")
+    }
+}
+
+func _OP_uintptr() _Op {
+    switch _PTR_SIZE {
+        case 32: return _OP_u32
+        case 64: return _OP_u64
+        default: panic("unsupported pointer size")
+    }
+}
+
+func _OP_map_key_int() _Op {
+    switch _INT_SIZE {
+        case 32: return _OP_map_key_i32
+        case 64: return _OP_map_key_i64
+        default: panic("unsupported int size")
+    }
+}
+
+func _OP_map_key_uint() _Op {
+    switch _INT_SIZE {
+        case 32: return _OP_map_key_u32
+        case 64: return _OP_map_key_u64
+        default: panic("unsupported uint size")
+    }
+}
+
+func _OP_map_key_uintptr() _Op {
+    switch _PTR_SIZE {
+        case 32: return _OP_map_key_u32
+        case 64: return _OP_map_key_u64
+        default: panic("unsupported pointer size")
+    }
+}
+
+type _Instr struct {
+    u uint64            // union {op: 8, vb: 8, vi: 48}, iv maybe int or len([]int)
+    p unsafe.Pointer    // maybe GoSlice.Data, *GoType or *caching.FieldMap
+}
+
+func packOp(op _Op) uint64 {
+    return uint64(op) << 56
+}
+
+func newInsOp(op _Op) _Instr {
+    return _Instr{u: packOp(op)}
+}
+
+func newInsVi(op _Op, vi int) _Instr {
+    return _Instr{u: packOp(op) | rt.PackInt(vi)}
+}
+
+func newInsVb(op _Op, vb byte) _Instr {
+    return _Instr{u: packOp(op) | (uint64(vb) << 48)}
+}
+
+func newInsVs(op _Op, vs []int) _Instr {
+    return _Instr {
+        u: packOp(op) | rt.PackInt(len(vs)),
+        p: (*rt.GoSlice)(unsafe.Pointer(&vs)).Ptr,
+    }
+}
+
+func newInsVt(op _Op, vt reflect.Type) _Instr {
+    return _Instr {
+        u: packOp(op),
+        p: unsafe.Pointer(rt.UnpackType(vt)),
+    }
+}
+
+func newInsVf(op _Op, vf *caching.FieldMap) _Instr {
+    return _Instr {
+        u: packOp(op),
+        p: unsafe.Pointer(vf),
+    }
+}
+
+func (self _Instr) op() _Op {
+    return _Op(self.u >> 56)
+}
+
+func (self _Instr) vi() int {
+    return rt.UnpackInt(self.u)
+}
+
+func (self _Instr) vb() byte {
+    return byte(self.u >> 48)
+}
+
+func (self _Instr) vs() (v []int) {
+    (*rt.GoSlice)(unsafe.Pointer(&v)).Ptr = self.p
+    (*rt.GoSlice)(unsafe.Pointer(&v)).Cap = self.vi()
+    (*rt.GoSlice)(unsafe.Pointer(&v)).Len = self.vi()
+    return
+}
+
+func (self _Instr) vf() *caching.FieldMap {
+    return (*caching.FieldMap)(self.p)
+}
+
+func (self _Instr) vk() reflect.Kind {
+    return (*rt.GoType)(self.p).Kind()
+}
+
+func (self _Instr) vt() reflect.Type {
+    return (*rt.GoType)(self.p).Pack()
+}
+
+func (self _Instr) i64() int64 {
+    return int64(self.vi())
+}
+
+func (self _Instr) vlen() int {
+    return int((*rt.GoType)(self.p).Size)
+}
+
+func (self _Instr) isBranch() bool {
+    switch self.op() {
+        case _OP_goto          : fallthrough
+        case _OP_switch        : fallthrough
+        case _OP_is_null       : fallthrough
+        case _OP_is_null_quote : fallthrough
+        case _OP_check_char    : return true
+        default                : return false
+    }
+}
+
+func (self _Instr) disassemble() string {
+    switch self.op() {
+        case _OP_dyn              : fallthrough
+        case _OP_deref            : fallthrough
+        case _OP_map_key_i8       : fallthrough
+        case _OP_map_key_i16      : fallthrough
+        case _OP_map_key_i32      : fallthrough
+        case _OP_map_key_i64      : fallthrough
+        case _OP_map_key_u8       : fallthrough
+        case _OP_map_key_u16      : fallthrough
+        case _OP_map_key_u32      : fallthrough
+        case _OP_map_key_u64      : fallthrough
+        case _OP_map_key_f32      : fallthrough
+        case _OP_map_key_f64      : fallthrough
+        case _OP_map_key_str      : fallthrough
+        case _OP_map_key_utext    : fallthrough
+        case _OP_map_key_utext_p  : fallthrough
+        case _OP_slice_init       : fallthrough
+        case _OP_slice_append     : fallthrough
+        case _OP_unmarshal        : fallthrough
+        case _OP_unmarshal_p      : fallthrough
+        case _OP_unmarshal_text   : fallthrough
+        case _OP_unmarshal_text_p : fallthrough
+        case _OP_recurse          : return fmt.Sprintf("%-18s%s", self.op(), self.vt())
+        case _OP_goto             : fallthrough
+        case _OP_is_null_quote    : fallthrough
+        case _OP_is_null          : return fmt.Sprintf("%-18sL_%d", self.op(), self.vi())
+        case _OP_index            : fallthrough
+        case _OP_array_clear      : fallthrough
+        case _OP_array_clear_p    : return fmt.Sprintf("%-18s%d", self.op(), self.vi())
+        case _OP_switch           : return fmt.Sprintf("%-18s%s", self.op(), self.formatSwitchLabels())
+        case _OP_struct_field     : return fmt.Sprintf("%-18s%s", self.op(), self.formatStructFields())
+        case _OP_match_char       : return fmt.Sprintf("%-18s%s", self.op(), strconv.QuoteRune(rune(self.vb())))
+        case _OP_check_char       : return fmt.Sprintf("%-18sL_%d, %s", self.op(), self.vi(), strconv.QuoteRune(rune(self.vb())))
+        default                   : return self.op().String()
+    }
+}
+
+func (self _Instr) formatSwitchLabels() string {
+    var i int
+    var v int
+    var m []string
+
+    /* format each label */
+    for i, v = range self.vs() {
+        m = append(m, fmt.Sprintf("%d=L_%d", i, v))
+    }
+
+    /* join them with "," */
+    return strings.Join(m, ", ")
+}
+
+func (self _Instr) formatStructFields() string {
+    var i uint64
+    var r []string
+    var m []struct{i int; n string}
+
+    /* extract all the fields */
+    for i = 0; i < self.vf().N; i++ {
+        if v := self.vf().At(i); v.Hash != 0 {
+            m = append(m, struct{i int; n string}{i: v.ID, n: v.Name})
+        }
+    }
+
+    /* sort by field name */
+    sort.Slice(m, func(i, j int) bool {
+        return m[i].n < m[j].n
+    })
+
+    /* format each field */
+    for _, v := range m {
+        r = append(r, fmt.Sprintf("%s=%d", v.n, v.i))
+    }
+
+    /* join them with "," */
+    return strings.Join(r, ", ")
+}
+
+type (
+    _Program []_Instr
+)
+
+func (self _Program) pc() int {
+    return len(self)
+}
+
+func (self _Program) tag(n int) {
+    if n >= _MaxStack {
+        panic("type nesting too deep")
+    }
+}
+
+func (self _Program) pin(i int) {
+    v := &self[i]
+    v.u &= 0xffff000000000000
+    v.u |= rt.PackInt(self.pc())
+}
+
+func (self _Program) rel(v []int) {
+    for _, i := range v {
+        self.pin(i)
+    }
+}
+
+func (self *_Program) add(op _Op) {
+    *self = append(*self, newInsOp(op))
+}
+
+func (self *_Program) int(op _Op, vi int) {
+    *self = append(*self, newInsVi(op, vi))
+}
+
+func (self *_Program) chr(op _Op, vb byte) {
+    *self = append(*self, newInsVb(op, vb))
+}
+
+func (self *_Program) tab(op _Op, vs []int) {
+    *self = append(*self, newInsVs(op, vs))
+}
+
+func (self *_Program) rtt(op _Op, vt reflect.Type) {
+    *self = append(*self, newInsVt(op, vt))
+}
+
+func (self *_Program) fmv(op _Op, vf *caching.FieldMap) {
+    *self = append(*self, newInsVf(op, vf))
+}
+
+func (self _Program) disassemble() string {
+    nb  := len(self)
+    tab := make([]bool, nb + 1)
+    ret := make([]string, 0, nb + 1)
+
+    /* prescan to get all the labels */
+    for _, ins := range self {
+        if ins.isBranch() {
+            if ins.op() != _OP_switch {
+                tab[ins.vi()] = true
+            } else {
+                for _, v := range ins.vs() {
+                    tab[v] = true
+                }
+            }
+        }
+    }
+
+    /* disassemble each instruction */
+    for i, ins := range self {
+        if !tab[i] {
+            ret = append(ret, "\t" + ins.disassemble())
+        } else {
+            ret = append(ret, fmt.Sprintf("L_%d:\n\t%s", i, ins.disassemble()))
+        }
+    }
+
+    /* add the last label, if needed */
+    if tab[nb] {
+        ret = append(ret, fmt.Sprintf("L_%d:", nb))
+    }
+
+    /* add an "end" indicator, and join all the strings */
+    return strings.Join(append(ret, "\tend"), "\n")
+}
+
+type _Compiler struct {
+    opts option.CompileOptions
+    tab  map[reflect.Type]bool
+    rec  map[reflect.Type]bool
+}
+
+func newCompiler() *_Compiler {
+    return &_Compiler {
+        opts: option.DefaultCompileOptions(),
+        tab: map[reflect.Type]bool{},
+        rec: map[reflect.Type]bool{},
+    }
+}
+
+func (self *_Compiler) apply(opts option.CompileOptions) *_Compiler {
+    self.opts = opts
+    return self
+}
+
+func (self *_Compiler) rescue(ep *error) {
+    if val := recover(); val != nil {
+        if err, ok := val.(error); ok {
+            *ep = err
+        } else {
+            panic(val)
+        }
+    }
+}
+
+func (self *_Compiler) compile(vt reflect.Type) (ret _Program, err error) {
+    defer self.rescue(&err)
+    self.compileOne(&ret, 0, vt)
+    return
+}
+
+func (self *_Compiler) checkMarshaler(p *_Program, vt reflect.Type) bool {
+    pt := reflect.PtrTo(vt)
+
+    /* check for `json.Unmarshaler` with pointer receiver */
+    if pt.Implements(jsonUnmarshalerType) {
+        p.rtt(_OP_unmarshal_p, pt)
+        return true
+    }
+
+    /* check for `json.Unmarshaler` */
+    if vt.Implements(jsonUnmarshalerType) {
+        p.add(_OP_lspace)
+        self.compileUnmarshalJson(p, vt)
+        return true
+    }
+
+    /* check for `encoding.TextMarshaler` with pointer receiver */
+    if pt.Implements(encodingTextUnmarshalerType) {
+        p.add(_OP_lspace)
+        self.compileUnmarshalTextPtr(p, pt)
+        return true
+    }
+
+    /* check for `encoding.TextUnmarshaler` */
+    if vt.Implements(encodingTextUnmarshalerType) {
+        p.add(_OP_lspace)
+        self.compileUnmarshalText(p, vt)
+        return true
+    }
+    return false
+}
+
+func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type) {
+    /* check for recursive nesting */
+    ok := self.tab[vt]
+    if ok {
+        p.rtt(_OP_recurse, vt)
+        return
+    }
+
+    if self.checkMarshaler(p, vt) {
+        return
+    }
+
+    /* enter the recursion */
+    p.add(_OP_lspace)
+    self.tab[vt] = true
+    self.compileOps(p, sp, vt)
+    delete(self.tab, vt)
+}
+
+func (self *_Compiler) compileOps(p *_Program, sp int, vt reflect.Type) {
+    switch vt.Kind() {
+        case reflect.Bool      : self.compilePrimitive (vt, p, _OP_bool)
+        case reflect.Int       : self.compilePrimitive (vt, p, _OP_int())
+        case reflect.Int8      : self.compilePrimitive (vt, p, _OP_i8)
+        case reflect.Int16     : self.compilePrimitive (vt, p, _OP_i16)
+        case reflect.Int32     : self.compilePrimitive (vt, p, _OP_i32)
+        case reflect.Int64     : self.compilePrimitive (vt, p, _OP_i64)
+        case reflect.Uint      : self.compilePrimitive (vt, p, _OP_uint())
+        case reflect.Uint8     : self.compilePrimitive (vt, p, _OP_u8)
+        case reflect.Uint16    : self.compilePrimitive (vt, p, _OP_u16)
+        case reflect.Uint32    : self.compilePrimitive (vt, p, _OP_u32)
+        case reflect.Uint64    : self.compilePrimitive (vt, p, _OP_u64)
+        case reflect.Uintptr   : self.compilePrimitive (vt, p, _OP_uintptr())
+        case reflect.Float32   : self.compilePrimitive (vt, p, _OP_f32)
+        case reflect.Float64   : self.compilePrimitive (vt, p, _OP_f64)
+        case reflect.String    : self.compileString    (p, vt)
+        case reflect.Array     : self.compileArray     (p, sp, vt)
+        case reflect.Interface : self.compileInterface (p, vt)
+        case reflect.Map       : self.compileMap       (p, sp, vt)
+        case reflect.Ptr       : self.compilePtr       (p, sp, vt)
+        case reflect.Slice     : self.compileSlice     (p, sp, vt)
+        case reflect.Struct    : self.compileStruct    (p, sp, vt)
+        default                : panic                 (&json.UnmarshalTypeError{Type: vt})
+    }
+}
+
+func (self *_Compiler) compileMap(p *_Program, sp int, vt reflect.Type) {
+    if reflect.PtrTo(vt.Key()).Implements(encodingTextUnmarshalerType) {
+        self.compileMapOp(p, sp, vt, _OP_map_key_utext_p)
+    } else if vt.Key().Implements(encodingTextUnmarshalerType) {
+        self.compileMapOp(p, sp, vt, _OP_map_key_utext)
+    } else {
+        self.compileMapUt(p, sp, vt)
+    }
+}
+
+func (self *_Compiler) compileMapUt(p *_Program, sp int, vt reflect.Type) {
+    switch vt.Key().Kind() {
+        case reflect.Int     : self.compileMapOp(p, sp, vt, _OP_map_key_int())
+        case reflect.Int8    : self.compileMapOp(p, sp, vt, _OP_map_key_i8)
+        case reflect.Int16   : self.compileMapOp(p, sp, vt, _OP_map_key_i16)
+        case reflect.Int32   : self.compileMapOp(p, sp, vt, _OP_map_key_i32)
+        case reflect.Int64   : self.compileMapOp(p, sp, vt, _OP_map_key_i64)
+        case reflect.Uint    : self.compileMapOp(p, sp, vt, _OP_map_key_uint())
+        case reflect.Uint8   : self.compileMapOp(p, sp, vt, _OP_map_key_u8)
+        case reflect.Uint16  : self.compileMapOp(p, sp, vt, _OP_map_key_u16)
+        case reflect.Uint32  : self.compileMapOp(p, sp, vt, _OP_map_key_u32)
+        case reflect.Uint64  : self.compileMapOp(p, sp, vt, _OP_map_key_u64)
+        case reflect.Uintptr : self.compileMapOp(p, sp, vt, _OP_map_key_uintptr())
+        case reflect.Float32 : self.compileMapOp(p, sp, vt, _OP_map_key_f32)
+        case reflect.Float64 : self.compileMapOp(p, sp, vt, _OP_map_key_f64)
+        case reflect.String  : self.compileMapOp(p, sp, vt, _OP_map_key_str)
+        default              : panic(&json.UnmarshalTypeError{Type: vt})
+    }
+}
+
+func (self *_Compiler) compileMapOp(p *_Program, sp int, vt reflect.Type, op _Op) {
+    i := p.pc()
+    p.add(_OP_is_null)
+    p.tag(sp + 1)
+    skip := self.checkIfSkip(p, vt, '{')
+    p.add(_OP_save)
+    p.add(_OP_map_init)
+    p.add(_OP_save)
+    p.add(_OP_lspace)
+    j := p.pc()
+    p.chr(_OP_check_char, '}')
+    p.chr(_OP_match_char, '"')
+    skip2 := p.pc()
+    p.rtt(op, vt)
+
+    /* match the value separator */
+    p.add(_OP_lspace)
+    p.chr(_OP_match_char, ':')
+    self.compileOne(p, sp + 2, vt.Elem())
+    p.pin(skip2)
+    p.add(_OP_load)
+    k0 := p.pc()
+    p.add(_OP_lspace)
+    k1 := p.pc()
+    p.chr(_OP_check_char, '}')
+    p.chr(_OP_match_char, ',')
+    p.add(_OP_lspace)
+    p.chr(_OP_match_char, '"')
+    skip3 := p.pc()
+    p.rtt(op, vt)
+
+    /* match the value separator */
+    p.add(_OP_lspace)
+    p.chr(_OP_match_char, ':')
+    self.compileOne(p, sp + 2, vt.Elem())
+    p.pin(skip3)
+    p.add(_OP_load)
+    p.int(_OP_goto, k0)
+    p.pin(j)
+    p.pin(k1)
+    p.add(_OP_drop_2)
+    x := p.pc()
+    p.add(_OP_goto)
+    p.pin(i)
+    p.add(_OP_nil_1)
+    p.pin(skip)
+    p.pin(x)
+}
+
+func (self *_Compiler) compilePtr(p *_Program, sp int, et reflect.Type) {
+    i := p.pc()
+    p.add(_OP_is_null)
+
+    /* dereference all the way down */
+    for et.Kind() == reflect.Ptr {
+        if self.checkMarshaler(p, et) {
+            return
+        }
+        et = et.Elem()
+        p.rtt(_OP_deref, et)
+    }
+
+    /* check for recursive nesting */
+    ok := self.tab[et]
+    if ok {
+        p.rtt(_OP_recurse, et)
+    } else {
+        /* enter the recursion */
+        p.add(_OP_lspace)
+        self.tab[et] = true
+
+        /* not inline the pointer type
+        * recursing the defined pointer type's elem will casue issue379.
+        */
+        self.compileOps(p, sp, et)
+    }
+    delete(self.tab, et)
+
+    j := p.pc()
+    p.add(_OP_goto)
+
+    // set val pointer as nil
+    p.pin(i)
+    p.add(_OP_nil_1)
+
+    // nothing todo
+    p.pin(j)
+}
+
+func (self *_Compiler) compileArray(p *_Program, sp int, vt reflect.Type) {
+    x := p.pc()
+    p.add(_OP_is_null)
+    p.tag(sp)
+    skip := self.checkIfSkip(p, vt, '[')
+    
+    p.add(_OP_save)
+    p.add(_OP_lspace)
+    v := []int{p.pc()}
+    p.chr(_OP_check_char, ']')
+
+    /* decode every item */
+    for i := 1; i <= vt.Len(); i++ {
+        self.compileOne(p, sp + 1, vt.Elem())
+        p.add(_OP_load)
+        p.int(_OP_index, i * int(vt.Elem().Size()))
+        p.add(_OP_lspace)
+        v = append(v, p.pc())
+        p.chr(_OP_check_char, ']')
+        p.chr(_OP_match_char, ',')
+    }
+
+    /* drop rest of the array */
+    p.add(_OP_array_skip)
+    w := p.pc()
+    p.add(_OP_goto)
+    p.rel(v)
+
+    /* check for pointer data */
+    if rt.UnpackType(vt.Elem()).PtrData == 0 {
+        p.int(_OP_array_clear, int(vt.Size()))
+    } else {
+        p.int(_OP_array_clear_p, int(vt.Size()))
+    }
+
+    /* restore the stack */
+    p.pin(w)
+    p.add(_OP_drop)
+
+    p.pin(skip)
+    p.pin(x)
+}
+
+func (self *_Compiler) compileSlice(p *_Program, sp int, vt reflect.Type) {
+    if vt.Elem().Kind() == byteType.Kind() {
+        self.compileSliceBin(p, sp, vt)
+    } else {
+        self.compileSliceList(p, sp, vt)
+    }
+}
+
+func (self *_Compiler) compileSliceBin(p *_Program, sp int, vt reflect.Type) {
+    i := p.pc()
+    p.add(_OP_is_null)
+    j := p.pc()
+    p.chr(_OP_check_char, '[')
+    skip := self.checkIfSkip(p, vt, '"')
+    k := p.pc()
+    p.chr(_OP_check_char, '"')
+    p.add(_OP_bin)
+    x := p.pc()
+    p.add(_OP_goto)
+    p.pin(j)
+    self.compileSliceBody(p, sp, vt.Elem())
+    y := p.pc()
+    p.add(_OP_goto)
+    p.pin(i)
+    p.pin(k)
+    p.add(_OP_nil_3)
+    p.pin(x)
+    p.pin(skip)
+    p.pin(y)
+}
+
+func (self *_Compiler) compileSliceList(p *_Program, sp int, vt reflect.Type) {
+    i := p.pc()
+    p.add(_OP_is_null)
+    p.tag(sp)
+    skip := self.checkIfSkip(p, vt, '[')
+    self.compileSliceBody(p, sp, vt.Elem())
+    x := p.pc()
+    p.add(_OP_goto)
+    p.pin(i)
+    p.add(_OP_nil_3)
+    p.pin(x)
+    p.pin(skip)
+}
+
+func (self *_Compiler) compileSliceBody(p *_Program, sp int, et reflect.Type) {
+    p.add(_OP_lspace)
+    j := p.pc()
+    p.chr(_OP_check_empty, ']')
+    p.rtt(_OP_slice_init, et)
+    p.add(_OP_save)
+    p.rtt(_OP_slice_append, et)
+    self.compileOne(p, sp + 1, et)
+    p.add(_OP_load)
+    k0 := p.pc()
+    p.add(_OP_lspace)
+    k1 := p.pc()
+    p.chr(_OP_check_char, ']')
+    p.chr(_OP_match_char, ',')
+    p.rtt(_OP_slice_append, et)
+    self.compileOne(p, sp + 1, et)
+    p.add(_OP_load)
+    p.int(_OP_goto, k0)
+    p.pin(k1)
+    p.add(_OP_drop)
+    p.pin(j)
+}
+
+func (self *_Compiler) compileString(p *_Program, vt reflect.Type) {
+    if vt == jsonNumberType {
+        self.compilePrimitive(vt, p, _OP_num)
+    } else {
+        self.compileStringBody(vt, p)
+    }
+}
+
+func (self *_Compiler) compileStringBody(vt reflect.Type, p *_Program) {
+    i := p.pc()
+    p.add(_OP_is_null)
+    skip := self.checkIfSkip(p, vt, '"')
+    p.add(_OP_str)
+    p.pin(i)
+    p.pin(skip)
+}
+
+func (self *_Compiler) compileStruct(p *_Program, sp int, vt reflect.Type) {
+    if sp >= self.opts.MaxInlineDepth || p.pc() >= _MAX_ILBUF || (sp > 0 && vt.NumField() >= _MAX_FIELDS) {
+        p.rtt(_OP_recurse, vt)
+        if self.opts.RecursiveDepth > 0 {
+            self.rec[vt] = true
+        }
+    } else {
+        self.compileStructBody(p, sp, vt)
+    }
+}
+
+func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) {
+    fv := resolver.ResolveStruct(vt)
+    fm, sw := caching.CreateFieldMap(len(fv)), make([]int, len(fv))
+
+    /* start of object */
+    p.tag(sp)
+    n := p.pc()
+    p.add(_OP_is_null)
+
+    skip := self.checkIfSkip(p, vt, '{')
+    
+    p.add(_OP_save)
+    p.add(_OP_lspace)
+    x := p.pc()
+    p.chr(_OP_check_char, '}')
+    p.chr(_OP_match_char, '"')
+    p.fmv(_OP_struct_field, fm)
+    p.add(_OP_lspace)
+    p.chr(_OP_match_char, ':')
+    p.tab(_OP_switch, sw)
+    p.add(_OP_object_next)
+    y0 := p.pc()
+    p.add(_OP_lspace)
+    y1 := p.pc()
+    p.chr(_OP_check_char, '}')
+    p.chr(_OP_match_char, ',')
+
+    /* special case of an empty struct */
+    if len(fv) == 0 {
+        p.add(_OP_object_skip)
+        goto end_of_object
+    }
+
+    /* match the remaining fields */
+    p.add(_OP_lspace)
+    p.chr(_OP_match_char, '"')
+    p.fmv(_OP_struct_field, fm)
+    p.add(_OP_lspace)
+    p.chr(_OP_match_char, ':')
+    p.tab(_OP_switch, sw)
+    p.add(_OP_object_next)
+    p.int(_OP_goto, y0)
+
+    /* process each field */
+    for i, f := range fv {
+        sw[i] = p.pc()
+        fm.Set(f.Name, i)
+
+        /* index to the field */
+        for _, o := range f.Path {
+            if p.int(_OP_index, int(o.Size)); o.Kind == resolver.F_deref {
+                p.rtt(_OP_deref, o.Type)
+            }
+        }
+
+        /* check for "stringnize" option */
+        if (f.Opts & resolver.F_stringize) == 0 {
+            self.compileOne(p, sp + 1, f.Type)
+        } else {
+            self.compileStructFieldStr(p, sp + 1, f.Type)
+        }
+
+        /* load the state, and try next field */
+        p.add(_OP_load)
+        p.int(_OP_goto, y0)
+    }
+
+end_of_object:
+    p.pin(x)
+    p.pin(y1)
+    p.add(_OP_drop)
+    p.pin(n)
+    p.pin(skip)
+}
+
+func (self *_Compiler) compileStructFieldStr(p *_Program, sp int, vt reflect.Type) {
+    n1 := -1
+    ft := vt
+    sv := false
+
+    /* dereference the pointer if needed */
+    if ft.Kind() == reflect.Ptr {
+        ft = ft.Elem()
+    }
+
+    /* check if it can be stringized */
+    switch ft.Kind() {
+        case reflect.Bool    : sv = true
+        case reflect.Int     : sv = true
+        case reflect.Int8    : sv = true
+        case reflect.Int16   : sv = true
+        case reflect.Int32   : sv = true
+        case reflect.Int64   : sv = true
+        case reflect.Uint    : sv = true
+        case reflect.Uint8   : sv = true
+        case reflect.Uint16  : sv = true
+        case reflect.Uint32  : sv = true
+        case reflect.Uint64  : sv = true
+        case reflect.Uintptr : sv = true
+        case reflect.Float32 : sv = true
+        case reflect.Float64 : sv = true
+        case reflect.String  : sv = true
+    }
+
+    /* if it's not, ignore the "string" and follow the regular path */
+    if !sv {
+        self.compileOne(p, sp, vt)
+        return
+    }
+
+    /* remove the leading space, and match the leading quote */
+    vk := vt.Kind()
+    p.add(_OP_lspace)
+    n0 := p.pc()
+    p.add(_OP_is_null)
+    
+    skip := self.checkIfSkip(p, stringType, '"')
+
+    /* also check for inner "null" */
+    n1 = p.pc()
+    p.add(_OP_is_null_quote)
+
+    /* dereference the pointer only when it is not null */
+    if vk == reflect.Ptr {
+        vt = vt.Elem()
+        p.rtt(_OP_deref, vt)
+    }
+
+    n2 := p.pc()
+    p.chr(_OP_check_char_0, '"')
+
+    /* string opcode selector */
+    _OP_string := func() _Op {
+        if ft == jsonNumberType {
+            return _OP_num
+        } else {
+            return _OP_unquote
+        }
+    }
+
+    /* compile for each type */
+    switch vt.Kind() {
+        case reflect.Bool    : p.add(_OP_bool)
+        case reflect.Int     : p.add(_OP_int())
+        case reflect.Int8    : p.add(_OP_i8)
+        case reflect.Int16   : p.add(_OP_i16)
+        case reflect.Int32   : p.add(_OP_i32)
+        case reflect.Int64   : p.add(_OP_i64)
+        case reflect.Uint    : p.add(_OP_uint())
+        case reflect.Uint8   : p.add(_OP_u8)
+        case reflect.Uint16  : p.add(_OP_u16)
+        case reflect.Uint32  : p.add(_OP_u32)
+        case reflect.Uint64  : p.add(_OP_u64)
+        case reflect.Uintptr : p.add(_OP_uintptr())
+        case reflect.Float32 : p.add(_OP_f32)
+        case reflect.Float64 : p.add(_OP_f64)
+        case reflect.String  : p.add(_OP_string())
+        default              : panic("not reachable")
+    }
+
+    /* the closing quote is not needed when parsing a pure string */
+    if vt == jsonNumberType || vt.Kind() != reflect.String {
+        p.chr(_OP_match_char, '"')
+    }
+
+    /* pin the `is_null_quote` jump location */
+    if n1 != -1 && vk != reflect.Ptr {
+        p.pin(n1)
+    }
+
+    /* "null" but not a pointer, act as if the field is not present */
+    if vk != reflect.Ptr {
+        pc2 := p.pc()
+        p.add(_OP_goto)
+        p.pin(n2)
+        p.rtt(_OP_dismatch_err, vt)
+        p.int(_OP_add, 1)
+        p.pin(pc2)
+        p.pin(n0)
+        return
+    }
+
+    /* the "null" case of the pointer */
+    pc := p.pc()
+    p.add(_OP_goto)
+    p.pin(n0) // `is_null` jump location
+    p.pin(n1) // `is_null_quote` jump location
+    p.add(_OP_nil_1)
+    pc2 := p.pc()
+    p.add(_OP_goto)
+    p.pin(n2)
+    p.rtt(_OP_dismatch_err, vt)
+    p.int(_OP_add, 1)
+    p.pin(pc)
+    p.pin(pc2)
+    p.pin(skip)
+}
+
+func (self *_Compiler) compileInterface(p *_Program, vt reflect.Type) {
+    i := p.pc()
+    p.add(_OP_is_null)
+
+    /* check for empty interface */
+    if vt.NumMethod() == 0 {
+        p.add(_OP_any)
+    } else {
+        p.rtt(_OP_dyn, vt)
+    }
+
+    /* finish the OpCode */
+    j := p.pc()
+    p.add(_OP_goto)
+    p.pin(i)
+    p.add(_OP_nil_2)
+    p.pin(j)
+}
+
+func (self *_Compiler) compilePrimitive(vt reflect.Type, p *_Program, op _Op) {
+    i := p.pc()
+    p.add(_OP_is_null)
+    // skip := self.checkPrimitive(p, vt)
+    p.add(op)
+    p.pin(i)
+    // p.pin(skip)
+}
+
+func (self *_Compiler) compileUnmarshalEnd(p *_Program, vt reflect.Type, i int) {
+    j := p.pc()
+    k := vt.Kind()
+
+    /* not a pointer */
+    if k != reflect.Ptr {
+        p.pin(i)
+        return
+    }
+
+    /* it seems that in Go JSON library, "null" takes priority over any kind of unmarshaler */
+    p.add(_OP_goto)
+    p.pin(i)
+    p.add(_OP_nil_1)
+    p.pin(j)
+}
+
+func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type) {
+    i := p.pc()
+    v := _OP_unmarshal
+    p.add(_OP_is_null)
+
+    /* check for dynamic interface */
+    if vt.Kind() == reflect.Interface {
+        v = _OP_dyn
+    }
+
+    /* call the unmarshaler */
+    p.rtt(v, vt)
+    self.compileUnmarshalEnd(p, vt, i)
+}
+
+func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type) {
+    i := p.pc()
+    v := _OP_unmarshal_text
+    p.add(_OP_is_null)
+
+    /* check for dynamic interface */
+    if vt.Kind() == reflect.Interface {
+        v = _OP_dyn
+    } else {
+        p.chr(_OP_match_char, '"')
+    }
+
+    /* call the unmarshaler */
+    p.rtt(v, vt)
+    self.compileUnmarshalEnd(p, vt, i)
+}
+
+func (self *_Compiler) compileUnmarshalTextPtr(p *_Program, vt reflect.Type) {
+    i := p.pc()
+    p.add(_OP_is_null)
+    p.chr(_OP_match_char, '"')
+    p.rtt(_OP_unmarshal_text_p, vt)
+    p.pin(i)
+}
+
+func (self *_Compiler) checkIfSkip(p *_Program, vt reflect.Type, c byte) int {
+    j := p.pc()
+    p.chr(_OP_check_char_0, c)
+    p.rtt(_OP_dismatch_err, vt)
+    s := p.pc()
+    p.add(_OP_go_skip)
+    p.pin(j)
+    p.int(_OP_add, 1)
+    return s
+}

+ 70 - 0
vendor/github.com/bytedance/sonic/internal/decoder/debug.go

@@ -0,0 +1,70 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package decoder
+
+import (
+	`os`
+	`runtime`
+	`runtime/debug`
+	`strings`
+
+	`github.com/bytedance/sonic/internal/jit`
+)
+
+
+var (
+    debugSyncGC  = os.Getenv("SONIC_SYNC_GC") != ""
+    debugAsyncGC = os.Getenv("SONIC_NO_ASYNC_GC") == ""
+)
+
+var (
+    _Instr_End _Instr = newInsOp(_OP_nil_1)
+
+    _F_gc       = jit.Func(runtime.GC)
+    _F_force_gc = jit.Func(debug.FreeOSMemory)
+    _F_println  = jit.Func(println_wrapper)
+    _F_print    = jit.Func(print)
+)
+
+func println_wrapper(i int, op1 int, op2 int){
+    println(i, " Intrs ", op1, _OpNames[op1], "next: ", op2, _OpNames[op2])
+}
+
+func print(i int){
+    println(i)
+}
+
+func (self *_Assembler) force_gc() {
+    self.call_go(_F_gc)
+    self.call_go(_F_force_gc)
+}
+
+func (self *_Assembler) debug_instr(i int, v *_Instr) {
+    if debugSyncGC {
+        if (i+1 == len(self.p)) {
+            self.print_gc(i, v, &_Instr_End) 
+        } else {
+            next := &(self.p[i+1])
+            self.print_gc(i, v, next)
+            name := _OpNames[next.op()]
+            if strings.Contains(name, "save") {
+                return
+            }
+        }
+        self.force_gc()
+    }
+}

+ 255 - 0
vendor/github.com/bytedance/sonic/internal/decoder/decoder.go

@@ -0,0 +1,255 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package decoder
+
+import (
+    `unsafe`
+    `encoding/json`
+    `reflect`
+    `runtime`
+
+    `github.com/bytedance/sonic/internal/native`
+    `github.com/bytedance/sonic/internal/native/types`
+    `github.com/bytedance/sonic/internal/rt`
+    `github.com/bytedance/sonic/option`
+    `github.com/bytedance/sonic/utf8`
+)
+
+const (
+    _F_use_int64       = 0
+    _F_disable_urc     = 2
+    _F_disable_unknown = 3
+    _F_copy_string     = 4
+
+    _F_use_number      = types.B_USE_NUMBER
+    _F_validate_string = types.B_VALIDATE_STRING
+    _F_allow_control   = types.B_ALLOW_CONTROL
+)
+
+type Options uint64
+
+const (
+    OptionUseInt64         Options = 1 << _F_use_int64
+    OptionUseNumber        Options = 1 << _F_use_number
+    OptionUseUnicodeErrors Options = 1 << _F_disable_urc
+    OptionDisableUnknown   Options = 1 << _F_disable_unknown
+    OptionCopyString       Options = 1 << _F_copy_string
+    OptionValidateString   Options = 1 << _F_validate_string
+)
+
+func (self *Decoder) SetOptions(opts Options) {
+    if (opts & OptionUseNumber != 0) && (opts & OptionUseInt64 != 0) {
+        panic("can't set OptionUseInt64 and OptionUseNumber both!")
+    }
+    self.f = uint64(opts)
+}
+
+
+// Decoder is the decoder context object
+type Decoder struct {
+    i int
+    f uint64
+    s string
+}
+
+// NewDecoder creates a new decoder instance.
+func NewDecoder(s string) *Decoder {
+    return &Decoder{s: s}
+}
+
+// Pos returns the current decoding position.
+func (self *Decoder) Pos() int {
+    return self.i
+}
+
+func (self *Decoder) Reset(s string) {
+    self.s = s
+    self.i = 0
+    // self.f = 0
+}
+
+func (self *Decoder) CheckTrailings() error {
+    pos := self.i
+    buf := self.s
+    /* skip all the trailing spaces */
+    if pos != len(buf) {
+        for pos < len(buf) && (types.SPACE_MASK & (1 << buf[pos])) != 0 {
+            pos++
+        }
+    }
+
+    /* then it must be at EOF */
+    if pos == len(buf) {
+        return nil
+    }
+
+    /* junk after JSON value */
+    return SyntaxError {
+        Src  : buf,
+        Pos  : pos,
+        Code : types.ERR_INVALID_CHAR,
+    }
+}
+
+
+// Decode parses the JSON-encoded data from current position and stores the result
+// in the value pointed to by val.
+func (self *Decoder) Decode(val interface{}) error {
+    /* validate json if needed */
+    if (self.f & (1 << _F_validate_string)) != 0  && !utf8.ValidateString(self.s){
+        dbuf := utf8.CorrectWith(nil, rt.Str2Mem(self.s), "\ufffd")
+        self.s = rt.Mem2Str(dbuf)
+    }
+
+    vv := rt.UnpackEface(val)
+    vp := vv.Value
+
+    /* check for nil type */
+    if vv.Type == nil {
+        return &json.InvalidUnmarshalError{}
+    }
+
+    /* must be a non-nil pointer */
+    if vp == nil || vv.Type.Kind() != reflect.Ptr {
+        return &json.InvalidUnmarshalError{Type: vv.Type.Pack()}
+    }
+
+    etp := rt.PtrElem(vv.Type)
+
+    /* check the defined pointer type for issue 379 */
+    if vv.Type.IsNamed() {
+        newp := vp
+        etp  = vv.Type
+        vp   = unsafe.Pointer(&newp)
+    }
+
+    /* create a new stack, and call the decoder */
+    sb := newStack()
+    nb, err := decodeTypedPointer(self.s, self.i, etp, vp, sb, self.f)
+    /* return the stack back */
+    self.i = nb
+    freeStack(sb)
+
+    /* avoid GC ahead */
+    runtime.KeepAlive(vv)
+    return err
+}
+
+// UseInt64 indicates the Decoder to unmarshal an integer into an interface{} as an
+// int64 instead of as a float64.
+func (self *Decoder) UseInt64() {
+    self.f  |= 1 << _F_use_int64
+    self.f &^= 1 << _F_use_number
+}
+
+// UseNumber indicates the Decoder to unmarshal a number into an interface{} as a
+// json.Number instead of as a float64.
+func (self *Decoder) UseNumber() {
+    self.f &^= 1 << _F_use_int64
+    self.f  |= 1 << _F_use_number
+}
+
+// UseUnicodeErrors indicates the Decoder to return an error when encounter invalid
+// UTF-8 escape sequences.
+func (self *Decoder) UseUnicodeErrors() {
+    self.f |= 1 << _F_disable_urc
+}
+
+// DisallowUnknownFields indicates the Decoder to return an error when the destination
+// is a struct and the input contains object keys which do not match any
+// non-ignored, exported fields in the destination.
+func (self *Decoder) DisallowUnknownFields() {
+    self.f |= 1 << _F_disable_unknown
+}
+
+// CopyString indicates the Decoder to decode string values by copying instead of referring.
+func (self *Decoder) CopyString() {
+    self.f |= 1 << _F_copy_string
+}
+
+// ValidateString causes the Decoder to validate string values when decoding string value 
+// in JSON. Validation is that, returning error when unescaped control chars(0x00-0x1f) or
+// invalid UTF-8 chars in the string value of JSON.
+func (self *Decoder) ValidateString() {
+    self.f |= 1 << _F_validate_string
+}
+
+// Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in
+// order to reduce the first-hit latency.
+//
+// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is
+// a compile option to set the depth of recursive compile for the nested struct type.
+func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
+    cfg := option.DefaultCompileOptions()
+    for _, opt := range opts {
+        opt(&cfg)
+    }
+    return pretouchRec(map[reflect.Type]bool{vt:true}, cfg)
+}
+
+func pretouchType(_vt reflect.Type, opts option.CompileOptions) (map[reflect.Type]bool, error) {
+    /* compile function */
+    compiler := newCompiler().apply(opts)
+    decoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) {
+        if pp, err := compiler.compile(_vt); err != nil {
+            return nil, err
+        } else {
+            as := newAssembler(pp)
+            as.name = _vt.String()
+            return as.Load(), nil
+        }
+    }
+
+    /* find or compile */
+    vt := rt.UnpackType(_vt)
+    if val := programCache.Get(vt); val != nil {
+        return nil, nil
+    } else if _, err := programCache.Compute(vt, decoder); err == nil {
+        return compiler.rec, nil
+    } else {
+        return nil, err
+    }
+}
+
+func pretouchRec(vtm map[reflect.Type]bool, opts option.CompileOptions) error {
+    if opts.RecursiveDepth < 0 || len(vtm) == 0 {
+        return nil
+    }
+    next := make(map[reflect.Type]bool)
+    for vt := range(vtm) {
+        sub, err := pretouchType(vt, opts)
+        if err != nil {
+            return err
+        }
+        for svt := range(sub) {
+            next[svt] = true
+        }
+    }
+    opts.RecursiveDepth -= 1
+    return pretouchRec(next, opts)
+}
+
+// Skip skips only one json value, and returns first non-blank character position and its ending position if it is valid.
+// Otherwise, returns negative error code using start and invalid character position using end
+func Skip(data []byte) (start int, end int) {
+    s := rt.Mem2Str(data)
+    p := 0
+    m := types.NewStateMachine()
+    ret := native.SkipOne(&s, &p, m, uint64(0))
+    types.FreeStateMachine(m) 
+    return ret, p
+}

+ 191 - 0
vendor/github.com/bytedance/sonic/internal/decoder/errors.go

@@ -0,0 +1,191 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package decoder
+
+import (
+    `encoding/json`
+    `errors`
+    `fmt`
+    `reflect`
+    `strconv`
+    `strings`
+
+    `github.com/bytedance/sonic/internal/native/types`
+    `github.com/bytedance/sonic/internal/rt`
+)
+
+type SyntaxError struct {
+    Pos  int
+    Src  string
+    Code types.ParsingError
+    Msg  string
+}
+
+func (self SyntaxError) Error() string {
+    return fmt.Sprintf("%q", self.Description())
+}
+
+func (self SyntaxError) Description() string {
+    return "Syntax error " + self.description()
+}
+
+func (self SyntaxError) description() string {
+    /* check for empty source */
+    if self.Src == "" {
+        return fmt.Sprintf("no sources available: %#v", self)
+    }
+
+    p, x, q, y := calcBounds(len(self.Src), self.Pos)
+
+    /* compose the error description */
+    return fmt.Sprintf(
+        "at index %d: %s\n\n\t%s\n\t%s^%s\n",
+        self.Pos,
+        self.Message(),
+        self.Src[p:q],
+        strings.Repeat(".", x),
+        strings.Repeat(".", y),
+    )
+}
+
+func calcBounds(size int, pos int) (lbound int, lwidth int, rbound int, rwidth int) {
+    if pos >= size || pos < 0 {
+        return 0, 0, size, 0
+    }
+
+    i := 16
+    lbound = pos - i
+    rbound = pos + i
+
+    /* prevent slicing before the beginning */
+    if lbound < 0 {
+        lbound, rbound, i = 0, rbound - lbound, i + lbound
+    }
+
+    /* prevent slicing beyond the end */
+    if n := size; rbound > n {
+        n = rbound - n
+        rbound = size
+
+        /* move the left bound if possible */
+        if lbound > n {
+            i += n
+            lbound -= n
+        }
+    }
+
+    /* left and right length */
+    lwidth = clamp_zero(i)
+    rwidth = clamp_zero(rbound - lbound - i - 1)
+
+    return
+}
+
+func (self SyntaxError) Message() string {
+    if self.Msg == "" {
+        return self.Code.Message()
+    }
+    return self.Msg
+}
+
+func clamp_zero(v int) int {
+    if v < 0 {
+        return 0
+    } else {
+        return v
+    }
+}
+
+/** JIT Error Helpers **/
+
+var stackOverflow = &json.UnsupportedValueError {
+    Str   : "Value nesting too deep",
+    Value : reflect.ValueOf("..."),
+}
+
+func error_wrap(src string, pos int, code types.ParsingError) error {
+    return *error_wrap_heap(src, pos, code)
+}
+
+//go:noinline
+func error_wrap_heap(src string, pos int, code types.ParsingError) *SyntaxError {
+    return &SyntaxError {
+        Pos  : pos,
+        Src  : src,
+        Code : code,
+    }
+}
+
+func error_type(vt *rt.GoType) error {
+    return &json.UnmarshalTypeError{Type: vt.Pack()}
+}
+
+type MismatchTypeError struct {
+    Pos  int
+    Src  string
+    Type reflect.Type
+}
+
+func swithchJSONType (src string, pos int) string {
+    var val string
+    switch src[pos] {
+        case 'f': fallthrough
+        case 't': val = "bool"
+        case '"': val = "string"
+        case '{': val = "object"
+        case '[': val = "array"
+        case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': val = "number"        
+    }
+    return val
+}
+
+func (self MismatchTypeError) Error() string {
+    se := SyntaxError {
+        Pos  : self.Pos,
+        Src  : self.Src,
+        Code : types.ERR_MISMATCH,
+    }
+    return fmt.Sprintf("Mismatch type %s with value %s %q", self.Type.String(), swithchJSONType(self.Src, self.Pos), se.description())
+}
+
+func (self MismatchTypeError) Description() string {
+    se := SyntaxError {
+        Pos  : self.Pos,
+        Src  : self.Src,
+        Code : types.ERR_MISMATCH,
+    }
+    return fmt.Sprintf("Mismatch type %s with value %s %s", self.Type.String(), swithchJSONType(self.Src, self.Pos), se.description())
+}
+
+func error_mismatch(src string, pos int, vt *rt.GoType) error {
+    return &MismatchTypeError {
+        Pos  : pos,
+        Src  : src,
+        Type : vt.Pack(),
+    }
+}
+
+func error_field(name string) error {
+    return errors.New("json: unknown field " + strconv.Quote(name))
+}
+
+func error_value(value string, vtype reflect.Type) error {
+    return &json.UnmarshalTypeError {
+        Type  : vtype,
+        Value : value,
+    }
+}

+ 729 - 0
vendor/github.com/bytedance/sonic/internal/decoder/generic_regabi_amd64.go

@@ -0,0 +1,729 @@
+// +build go1.17,!go1.23
+
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package decoder
+
+import (
+    `encoding/json`
+    `fmt`
+    `reflect`
+
+    `github.com/bytedance/sonic/internal/jit`
+    `github.com/bytedance/sonic/internal/native`
+    `github.com/bytedance/sonic/internal/native/types`
+    `github.com/twitchyliquid64/golang-asm/obj`
+)
+
+/** Crucial Registers:
+ *
+ *      ST(R13) && 0(SP) : ro, decoder stack
+ *      DF(AX)  : ro, decoder flags
+ *      EP(BX) : wo, error pointer
+ *      IP(R10) : ro, input pointer
+ *      IL(R12) : ro, input length
+ *      IC(R11) : rw, input cursor
+ *      VP(R15) : ro, value pointer (to an interface{})
+ */
+
+const (
+    _VD_args   = 8      // 8 bytes  for passing arguments to this functions
+    _VD_fargs  = 64     // 64 bytes for passing arguments to other Go functions
+    _VD_saves  = 48     // 48 bytes for saving the registers before CALL instructions
+    _VD_locals = 96     // 96 bytes for local variables
+)
+
+const (
+    _VD_offs = _VD_fargs + _VD_saves + _VD_locals
+    _VD_size = _VD_offs + 8     // 8 bytes for the parent frame pointer
+)
+
+var (
+    _VAR_ss = _VAR_ss_Vt
+    _VAR_df = jit.Ptr(_SP, _VD_fargs + _VD_saves)
+)
+
+var (
+    _VAR_ss_Vt = jit.Ptr(_SP, _VD_fargs + _VD_saves + 8)
+    _VAR_ss_Dv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 16)
+    _VAR_ss_Iv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 24)
+    _VAR_ss_Ep = jit.Ptr(_SP, _VD_fargs + _VD_saves + 32)
+    _VAR_ss_Db = jit.Ptr(_SP, _VD_fargs + _VD_saves + 40)
+    _VAR_ss_Dc = jit.Ptr(_SP, _VD_fargs + _VD_saves + 48)
+)
+
+var (
+    _VAR_R9 = jit.Ptr(_SP, _VD_fargs + _VD_saves + 56)
+)
+type _ValueDecoder struct {
+    jit.BaseAssembler
+}
+
+var (
+    _VAR_cs_LR = jit.Ptr(_SP, _VD_fargs + _VD_saves + 64)
+    _VAR_cs_p = jit.Ptr(_SP, _VD_fargs + _VD_saves + 72)
+    _VAR_cs_n = jit.Ptr(_SP, _VD_fargs + _VD_saves + 80)
+    _VAR_cs_d = jit.Ptr(_SP, _VD_fargs + _VD_saves + 88)
+)
+
+func (self *_ValueDecoder) build() uintptr {
+    self.Init(self.compile)
+    return *(*uintptr)(self.Load("decode_value", _VD_size, _VD_args, argPtrs_generic, localPtrs_generic))
+}
+
+/** Function Calling Helpers **/
+
+func (self *_ValueDecoder) save(r ...obj.Addr) {
+    for i, v := range r {
+        if i > _VD_saves / 8 - 1 {
+            panic("too many registers to save")
+        } else {
+            self.Emit("MOVQ", v, jit.Ptr(_SP, _VD_fargs + int64(i) * 8))
+        }
+    }
+}
+
+func (self *_ValueDecoder) load(r ...obj.Addr) {
+    for i, v := range r {
+        if i > _VD_saves / 8 - 1 {
+            panic("too many registers to load")
+        } else {
+            self.Emit("MOVQ", jit.Ptr(_SP, _VD_fargs + int64(i) * 8), v)
+        }
+    }
+}
+
+func (self *_ValueDecoder) call(fn obj.Addr) {
+    self.Emit("MOVQ", fn, _R9)  // MOVQ ${fn}, AX
+    self.Rjmp("CALL", _R9)      // CALL AX
+}
+
+func (self *_ValueDecoder) call_go(fn obj.Addr) {
+    self.save(_REG_go...)   // SAVE $REG_go
+    self.call(fn)           // CALL ${fn}
+    self.load(_REG_go...)   // LOAD $REG_go
+}
+
+func (self *_ValueDecoder) callc(fn obj.Addr) {
+    self.save(_IP)  
+    self.call(fn)
+    self.load(_IP)  
+}
+
+func (self *_ValueDecoder) call_c(fn obj.Addr) {
+    self.Emit("XCHGQ", _IC, _BX)
+    self.callc(fn)
+    self.Emit("XCHGQ", _IC, _BX)
+}
+
+/** Decoder Assembler **/
+
+const (
+    _S_val = iota + 1
+    _S_arr
+    _S_arr_0
+    _S_obj
+    _S_obj_0
+    _S_obj_delim
+    _S_obj_sep
+)
+
+const (
+    _S_omask_key = (1 << _S_obj_0) | (1 << _S_obj_sep)
+    _S_omask_end = (1 << _S_obj_0) | (1 << _S_obj)
+    _S_vmask = (1 << _S_val) | (1 << _S_arr_0)
+)
+
+const (
+    _A_init_len = 1
+    _A_init_cap = 16
+)
+
+const (
+    _ST_Sp = 0
+    _ST_Vt = _PtrBytes
+    _ST_Vp = _PtrBytes * (types.MAX_RECURSE + 1)
+)
+
+var (
+    _V_true  = jit.Imm(int64(pbool(true)))
+    _V_false = jit.Imm(int64(pbool(false)))
+    _F_value = jit.Imm(int64(native.S_value))
+)
+
+var (
+    _V_max     = jit.Imm(int64(types.V_MAX))
+    _E_eof     = jit.Imm(int64(types.ERR_EOF))
+    _E_invalid = jit.Imm(int64(types.ERR_INVALID_CHAR))
+    _E_recurse = jit.Imm(int64(types.ERR_RECURSE_EXCEED_MAX))
+)
+
+var (
+    _F_convTslice    = jit.Func(convTslice)
+    _F_convTstring   = jit.Func(convTstring)
+    _F_invalid_vtype = jit.Func(invalid_vtype)
+)
+
+var (
+    _T_map     = jit.Type(reflect.TypeOf((map[string]interface{})(nil)))
+    _T_bool    = jit.Type(reflect.TypeOf(false))
+    _T_int64   = jit.Type(reflect.TypeOf(int64(0)))
+    _T_eface   = jit.Type(reflect.TypeOf((*interface{})(nil)).Elem())
+    _T_slice   = jit.Type(reflect.TypeOf(([]interface{})(nil)))
+    _T_string  = jit.Type(reflect.TypeOf(""))
+    _T_number  = jit.Type(reflect.TypeOf(json.Number("")))
+    _T_float64 = jit.Type(reflect.TypeOf(float64(0)))
+)
+
+var _R_tab = map[int]string {
+    '[': "_decode_V_ARRAY",
+    '{': "_decode_V_OBJECT",
+    ':': "_decode_V_KEY_SEP",
+    ',': "_decode_V_ELEM_SEP",
+    ']': "_decode_V_ARRAY_END",
+    '}': "_decode_V_OBJECT_END",
+}
+
+func (self *_ValueDecoder) compile() {
+    self.Emit("SUBQ", jit.Imm(_VD_size), _SP)       // SUBQ $_VD_size, SP
+    self.Emit("MOVQ", _BP, jit.Ptr(_SP, _VD_offs))  // MOVQ BP, _VD_offs(SP)
+    self.Emit("LEAQ", jit.Ptr(_SP, _VD_offs), _BP)  // LEAQ _VD_offs(SP), BP
+
+    /* initialize the state machine */
+    self.Emit("XORL", _CX, _CX)                                 // XORL CX, CX
+    self.Emit("MOVQ", _DF, _VAR_df)                             // MOVQ DF, df
+    /* initialize digital buffer first */
+    self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_ss_Dc)       // MOVQ $_MaxDigitNums, ss.Dcap
+    self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX)           // LEAQ _DbufOffset(ST), AX
+    self.Emit("MOVQ", _AX, _VAR_ss_Db)                          // MOVQ AX, ss.Dbuf
+    /* add ST offset */
+    self.Emit("ADDQ", jit.Imm(_FsmOffset), _ST)                 // ADDQ _FsmOffset, _ST
+    self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp))                // MOVQ CX, ST.Sp
+    self.WriteRecNotAX(0, _VP, jit.Ptr(_ST, _ST_Vp), false)                // MOVQ VP, ST.Vp[0]
+    self.Emit("MOVQ", jit.Imm(_S_val), jit.Ptr(_ST, _ST_Vt))    // MOVQ _S_val, ST.Vt[0]
+    self.Sjmp("JMP" , "_next")                                  // JMP  _next
+
+    /* set the value from previous round */
+    self.Link("_set_value")                                 // _set_value:
+    self.Emit("MOVL" , jit.Imm(_S_vmask), _DX)              // MOVL  _S_vmask, DX
+    self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX)           // MOVQ  ST.Sp, CX
+    self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX)   // MOVQ  ST.Vt[CX], AX
+    self.Emit("BTQ"  , _AX, _DX)                            // BTQ   AX, DX
+    self.Sjmp("JNC"  , "_vtype_error")                      // JNC   _vtype_error
+    self.Emit("XORL" , _SI, _SI)                            // XORL  SI, SI
+    self.Emit("SUBQ" , jit.Imm(1), jit.Ptr(_ST, _ST_Sp))    // SUBQ  $1, ST.Sp
+    self.Emit("XCHGQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI)   // XCHGQ ST.Vp[CX], SI
+    self.Emit("MOVQ" , _R8, jit.Ptr(_SI, 0))                // MOVQ  R8, (SI)
+    self.WriteRecNotAX(1, _R9, jit.Ptr(_SI, 8), false)           // MOVQ  R9, 8(SI)
+
+    /* check for value stack */
+    self.Link("_next")                              // _next:
+    self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _AX)   // MOVQ  ST.Sp, AX
+    self.Emit("TESTQ", _AX, _AX)                    // TESTQ AX, AX
+    self.Sjmp("JS"   , "_return")                   // JS    _return
+
+    /* fast path: test up to 4 characters manually */
+    self.Emit("CMPQ"   , _IC, _IL)                      // CMPQ    IC, IL
+    self.Sjmp("JAE"    , "_decode_V_EOF")               // JAE     _decode_V_EOF
+    self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX)  // MOVBQZX (IP)(IC), AX
+    self.Emit("MOVQ"   , jit.Imm(_BM_space), _DX)       // MOVQ    _BM_space, DX
+    self.Emit("CMPQ"   , _AX, jit.Imm(' '))             // CMPQ    AX, $' '
+    self.Sjmp("JA"     , "_decode_fast")                // JA      _decode_fast
+    self.Emit("BTQ"    , _AX, _DX)                      // BTQ     _AX, _DX
+    self.Sjmp("JNC"    , "_decode_fast")                // JNC     _decode_fast
+    self.Emit("ADDQ"   , jit.Imm(1), _IC)               // ADDQ    $1, IC
+
+    /* at least 1 to 3 spaces */
+    for i := 0; i < 3; i++ {
+        self.Emit("CMPQ"   , _IC, _IL)                      // CMPQ    IC, IL
+        self.Sjmp("JAE"    , "_decode_V_EOF")               // JAE     _decode_V_EOF
+        self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX)  // MOVBQZX (IP)(IC), AX
+        self.Emit("CMPQ"   , _AX, jit.Imm(' '))             // CMPQ    AX, $' '
+        self.Sjmp("JA"     , "_decode_fast")                // JA      _decode_fast
+        self.Emit("BTQ"    , _AX, _DX)                      // BTQ     _AX, _DX
+        self.Sjmp("JNC"    , "_decode_fast")                // JNC     _decode_fast
+        self.Emit("ADDQ"   , jit.Imm(1), _IC)               // ADDQ    $1, IC
+    }
+
+    /* at least 4 spaces */
+    self.Emit("CMPQ"   , _IC, _IL)                      // CMPQ    IC, IL
+    self.Sjmp("JAE"    , "_decode_V_EOF")               // JAE     _decode_V_EOF
+    self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX)  // MOVBQZX (IP)(IC), AX
+
+    /* fast path: use lookup table to select decoder */
+    self.Link("_decode_fast")                           // _decode_fast:
+    self.Byte(0x48, 0x8d, 0x3d)                         // LEAQ    ?(PC), DI
+    self.Sref("_decode_tab", 4)                         // ....    &_decode_tab
+    self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX)  // MOVLQSX (DI)(AX*4), AX
+    self.Emit("TESTQ"  , _AX, _AX)                      // TESTQ   AX, AX
+    self.Sjmp("JZ"     , "_decode_native")              // JZ      _decode_native
+    self.Emit("ADDQ"   , jit.Imm(1), _IC)               // ADDQ    $1, IC
+    self.Emit("ADDQ"   , _DI, _AX)                      // ADDQ    DI, AX
+    self.Rjmp("JMP"    , _AX)                           // JMP     AX
+
+    /* decode with native decoder */
+    self.Link("_decode_native")         // _decode_native:
+    self.Emit("MOVQ", _IP, _DI)         // MOVQ IP, DI
+    self.Emit("MOVQ", _IL, _SI)         // MOVQ IL, SI
+    self.Emit("MOVQ", _IC, _DX)         // MOVQ IC, DX
+    self.Emit("LEAQ", _VAR_ss, _CX)     // LEAQ ss, CX
+    self.Emit("MOVQ", _VAR_df, _R8)     // MOVQ $df, R8
+    self.Emit("BTSQ", jit.Imm(_F_allow_control), _R8)  // ANDQ $1<<_F_allow_control, R8
+    self.callc(_F_value)                // CALL value
+    self.Emit("MOVQ", _AX, _IC)         // MOVQ AX, IC
+
+    /* check for errors */
+    self.Emit("MOVQ" , _VAR_ss_Vt, _AX)     // MOVQ  ss.Vt, AX
+    self.Emit("TESTQ", _AX, _AX)            // TESTQ AX, AX
+    self.Sjmp("JS"   , "_parsing_error")       
+    self.Sjmp("JZ"   , "_invalid_vtype")    // JZ    _invalid_vtype
+    self.Emit("CMPQ" , _AX, _V_max)         // CMPQ  AX, _V_max
+    self.Sjmp("JA"   , "_invalid_vtype")    // JA    _invalid_vtype
+
+    /* jump table selector */
+    self.Byte(0x48, 0x8d, 0x3d)                             // LEAQ    ?(PC), DI
+    self.Sref("_switch_table", 4)                           // ....    &_switch_table
+    self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, -4), _AX)     // MOVLQSX -4(DI)(AX*4), AX
+    self.Emit("ADDQ"   , _DI, _AX)                          // ADDQ    DI, AX
+    self.Rjmp("JMP"    , _AX)                               // JMP     AX
+
+    /** V_EOF **/
+    self.Link("_decode_V_EOF")          // _decode_V_EOF:
+    self.Emit("MOVL", _E_eof, _EP)      // MOVL _E_eof, EP
+    self.Sjmp("JMP" , "_error")         // JMP  _error
+
+    /** V_NULL **/
+    self.Link("_decode_V_NULL")                 // _decode_V_NULL:
+    self.Emit("XORL", _R8, _R8)                 // XORL R8, R8
+    self.Emit("XORL", _R9, _R9)                 // XORL R9, R9
+    self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI)    // LEAQ -4(IC), DI
+    self.Sjmp("JMP" , "_set_value")             // JMP  _set_value
+
+    /** V_TRUE **/
+    self.Link("_decode_V_TRUE")                 // _decode_V_TRUE:
+    self.Emit("MOVQ", _T_bool, _R8)             // MOVQ _T_bool, R8
+    // TODO: maybe modified by users?
+    self.Emit("MOVQ", _V_true, _R9)             // MOVQ _V_true, R9 
+    self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI)    // LEAQ -4(IC), DI
+    self.Sjmp("JMP" , "_set_value")             // JMP  _set_value
+
+    /** V_FALSE **/
+    self.Link("_decode_V_FALSE")                // _decode_V_FALSE:
+    self.Emit("MOVQ", _T_bool, _R8)             // MOVQ _T_bool, R8
+    self.Emit("MOVQ", _V_false, _R9)            // MOVQ _V_false, R9
+    self.Emit("LEAQ", jit.Ptr(_IC, -5), _DI)    // LEAQ -5(IC), DI
+    self.Sjmp("JMP" , "_set_value")             // JMP  _set_value
+
+    /** V_ARRAY **/
+    self.Link("_decode_V_ARRAY")                            // _decode_V_ARRAY
+    self.Emit("MOVL", jit.Imm(_S_vmask), _DX)               // MOVL _S_vmask, DX
+    self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX)            // MOVQ ST.Sp, CX
+    self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX)    // MOVQ ST.Vt[CX], AX
+    self.Emit("BTQ" , _AX, _DX)                             // BTQ  AX, DX
+    self.Sjmp("JNC" , "_invalid_char")                      // JNC  _invalid_char
+
+    /* create a new array */
+    self.Emit("MOVQ", _T_eface, _AX)                            // MOVQ    _T_eface, AX
+    self.Emit("MOVQ", jit.Imm(_A_init_len), _BX)                // MOVQ    _A_init_len, BX
+    self.Emit("MOVQ", jit.Imm(_A_init_cap), _CX)                // MOVQ    _A_init_cap, CX
+    self.call_go(_F_makeslice)                                  // CALL_GO runtime.makeslice
+
+    /* pack into an interface */
+    self.Emit("MOVQ", jit.Imm(_A_init_len), _BX)                // MOVQ    _A_init_len, BX
+    self.Emit("MOVQ", jit.Imm(_A_init_cap), _CX)                // MOVQ    _A_init_cap, CX
+    self.call_go(_F_convTslice)                                 // CALL_GO runtime.convTslice
+    self.Emit("MOVQ", _AX, _R8)                                 // MOVQ    AX, R8
+
+    /* replace current state with an array */
+    self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX)                        // MOVQ ST.Sp, CX
+    self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI)                // MOVQ ST.Vp[CX], SI
+    self.Emit("MOVQ", jit.Imm(_S_arr), jit.Sib(_ST, _CX, 8, _ST_Vt))    // MOVQ _S_arr, ST.Vt[CX]
+    self.Emit("MOVQ", _T_slice, _AX)                                    // MOVQ _T_slice, AX
+    self.Emit("MOVQ", _AX, jit.Ptr(_SI, 0))                             // MOVQ AX, (SI)
+    self.WriteRecNotAX(2, _R8, jit.Ptr(_SI, 8), false)                  // MOVQ R8, 8(SI)
+
+    /* add a new slot for the first element */
+    self.Emit("ADDQ", jit.Imm(1), _CX)                                  // ADDQ $1, CX
+    self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE))                  // CMPQ CX, ${types.MAX_RECURSE}
+    self.Sjmp("JAE"  , "_stack_overflow")                                // JA   _stack_overflow
+    self.Emit("MOVQ", jit.Ptr(_R8, 0), _AX)                             // MOVQ (R8), AX
+    self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp))                        // MOVQ CX, ST.Sp
+    self.WritePtrAX(3, jit.Sib(_ST, _CX, 8, _ST_Vp), false)             // MOVQ AX, ST.Vp[CX]
+    self.Emit("MOVQ", jit.Imm(_S_arr_0), jit.Sib(_ST, _CX, 8, _ST_Vt))  // MOVQ _S_arr_0, ST.Vt[CX]
+    self.Sjmp("JMP" , "_next")                                          // JMP  _next
+
+    /** V_OBJECT **/
+    self.Link("_decode_V_OBJECT")                                       // _decode_V_OBJECT:
+    self.Emit("MOVL", jit.Imm(_S_vmask), _DX)                           // MOVL    _S_vmask, DX
+    self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX)                        // MOVQ    ST.Sp, CX
+    self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX)                // MOVQ    ST.Vt[CX], AX
+    self.Emit("BTQ" , _AX, _DX)                                         // BTQ     AX, DX
+    self.Sjmp("JNC" , "_invalid_char")                                  // JNC     _invalid_char
+    self.call_go(_F_makemap_small)                                      // CALL_GO runtime.makemap_small
+    self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX)                        // MOVQ    ST.Sp, CX
+    self.Emit("MOVQ", jit.Imm(_S_obj_0), jit.Sib(_ST, _CX, 8, _ST_Vt))    // MOVQ    _S_obj_0, ST.Vt[CX]
+    self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI)                // MOVQ    ST.Vp[CX], SI
+    self.Emit("MOVQ", _T_map, _DX)                                      // MOVQ    _T_map, DX
+    self.Emit("MOVQ", _DX, jit.Ptr(_SI, 0))                             // MOVQ    DX, (SI)
+    self.WritePtrAX(4, jit.Ptr(_SI, 8), false)                          // MOVQ    AX, 8(SI)
+    self.Sjmp("JMP" , "_next")                                          // JMP     _next
+
+    /** V_STRING **/
+    self.Link("_decode_V_STRING")       // _decode_V_STRING:
+    self.Emit("MOVQ", _VAR_ss_Iv, _CX)  // MOVQ ss.Iv, CX
+    self.Emit("MOVQ", _IC, _AX)         // MOVQ IC, AX
+    self.Emit("SUBQ", _CX, _AX)         // SUBQ CX, AX
+
+    /* check for escapes */
+    self.Emit("CMPQ", _VAR_ss_Ep, jit.Imm(-1))          // CMPQ ss.Ep, $-1
+    self.Sjmp("JNE" , "_unquote")                       // JNE  _unquote
+    self.Emit("SUBQ", jit.Imm(1), _AX)                  // SUBQ $1, AX
+    self.Emit("LEAQ", jit.Sib(_IP, _CX, 1, 0), _R8)     // LEAQ (IP)(CX), R8
+    self.Byte(0x48, 0x8d, 0x3d)                         // LEAQ (PC), DI
+    self.Sref("_copy_string_end", 4)
+    self.Emit("BTQ", jit.Imm(_F_copy_string), _VAR_df)
+    self.Sjmp("JC", "copy_string")
+    self.Link("_copy_string_end")                                 
+    self.Emit("XORL", _DX, _DX)   
+
+    /* strings with no escape sequences */
+    self.Link("_noescape")                                  // _noescape:
+    self.Emit("MOVL", jit.Imm(_S_omask_key), _DI)               // MOVL _S_omask, DI
+    self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX)            // MOVQ ST.Sp, CX
+    self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _SI)    // MOVQ ST.Vt[CX], SI
+    self.Emit("BTQ" , _SI, _DI)                             // BTQ  SI, DI
+    self.Sjmp("JC"  , "_object_key")                        // JC   _object_key
+
+    /* check for pre-packed strings, avoid 1 allocation */
+    self.Emit("TESTQ", _DX, _DX)                // TESTQ   DX, DX
+    self.Sjmp("JNZ"  , "_packed_str")           // JNZ     _packed_str
+    self.Emit("MOVQ" , _AX, _BX)                // MOVQ    AX, BX
+    self.Emit("MOVQ" , _R8, _AX)                // MOVQ    R8, AX
+    self.call_go(_F_convTstring)                // CALL_GO runtime.convTstring
+    self.Emit("MOVQ" , _AX, _R9)                // MOVQ    AX, R9
+
+    /* packed string already in R9 */
+    self.Link("_packed_str")            // _packed_str:
+    self.Emit("MOVQ", _T_string, _R8)   // MOVQ _T_string, R8
+    self.Emit("MOVQ", _VAR_ss_Iv, _DI)  // MOVQ ss.Iv, DI
+    self.Emit("SUBQ", jit.Imm(1), _DI)  // SUBQ $1, DI
+    self.Sjmp("JMP" , "_set_value")     // JMP  _set_value
+
+    /* the string is an object key, get the map */
+    self.Link("_object_key")
+    self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX)            // MOVQ ST.Sp, CX
+    self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI)    // MOVQ ST.Vp[CX], SI
+    self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI)                 // MOVQ 8(SI), SI
+
+    /* add a new delimiter */
+    self.Emit("ADDQ", jit.Imm(1), _CX)                                      // ADDQ $1, CX
+    self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE))                      // CMPQ CX, ${types.MAX_RECURSE}
+    self.Sjmp("JAE"  , "_stack_overflow")                                    // JA   _stack_overflow
+    self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp))                            // MOVQ CX, ST.Sp
+    self.Emit("MOVQ", jit.Imm(_S_obj_delim), jit.Sib(_ST, _CX, 8, _ST_Vt))  // MOVQ _S_obj_delim, ST.Vt[CX]
+
+    /* add a new slot int the map */
+    self.Emit("MOVQ", _AX, _DI)                         // MOVQ    AX, DI
+    self.Emit("MOVQ", _T_map, _AX)                      // MOVQ    _T_map, AX
+    self.Emit("MOVQ", _SI, _BX)                         // MOVQ    SI, BX
+    self.Emit("MOVQ", _R8, _CX)                         // MOVQ    R9, CX
+    self.call_go(_F_mapassign_faststr)                  // CALL_GO runtime.mapassign_faststr
+
+    /* add to the pointer stack */
+    self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX)                 // MOVQ ST.Sp, CX
+    self.WritePtrAX(6, jit.Sib(_ST, _CX, 8, _ST_Vp), false)    // MOVQ AX, ST.Vp[CX]
+    self.Sjmp("JMP" , "_next")                                   // JMP  _next
+
+    /* allocate memory to store the string header and unquoted result */
+    self.Link("_unquote")                               // _unquote:
+    self.Emit("ADDQ", jit.Imm(15), _AX)                 // ADDQ    $15, AX
+    self.Emit("MOVQ", _T_byte, _BX)                     // MOVQ    _T_byte, BX
+    self.Emit("MOVB", jit.Imm(0), _CX)                  // MOVB    $0, CX
+    self.call_go(_F_mallocgc)                           // CALL_GO runtime.mallocgc
+    self.Emit("MOVQ", _AX, _R9)                         // MOVQ    AX, R9
+
+    /* prepare the unquoting parameters */
+    self.Emit("MOVQ" , _VAR_ss_Iv, _CX)                         // MOVQ  ss.Iv, CX
+    self.Emit("LEAQ" , jit.Sib(_IP, _CX, 1, 0), _DI)            // LEAQ  (IP)(CX), DI
+    self.Emit("NEGQ" , _CX)                                     // NEGQ  CX
+    self.Emit("LEAQ" , jit.Sib(_IC, _CX, 1, -1), _SI)           // LEAQ  -1(IC)(CX), SI
+    self.Emit("LEAQ" , jit.Ptr(_R9, 16), _DX)                   // LEAQ  16(R8), DX
+    self.Emit("LEAQ" , _VAR_ss_Ep, _CX)                         // LEAQ  ss.Ep, CX
+    self.Emit("XORL" , _R8, _R8)                                // XORL  R8, R8
+    self.Emit("BTQ"  , jit.Imm(_F_disable_urc), _VAR_df)        // BTQ   ${_F_disable_urc}, fv
+    self.Emit("SETCC", _R8)                                     // SETCC R8
+    self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8)   // SHLQ  ${types.B_UNICODE_REPLACE}, R8
+
+    /* unquote the string, with R9 been preserved */
+    self.Emit("MOVQ", _R9, _VAR_R9)             // SAVE R9
+    self.call_c(_F_unquote)                     // CALL unquote
+    self.Emit("MOVQ", _VAR_R9, _R9)             // LOAD R9
+
+    /* check for errors */
+    self.Emit("TESTQ", _AX, _AX)                // TESTQ AX, AX
+    self.Sjmp("JS"   , "_unquote_error")        // JS    _unquote_error
+    self.Emit("MOVL" , jit.Imm(1), _DX)         // MOVL  $1, DX
+    self.Emit("LEAQ" , jit.Ptr(_R9, 16), _R8)   // ADDQ  $16, R8
+    self.Emit("MOVQ" , _R8, jit.Ptr(_R9, 0))    // MOVQ  R8, (R9)
+    self.Emit("MOVQ" , _AX, jit.Ptr(_R9, 8))    // MOVQ  AX, 8(R9)
+    self.Sjmp("JMP"  , "_noescape")             // JMP   _noescape
+
+    /** V_DOUBLE **/
+    self.Link("_decode_V_DOUBLE")                           // _decode_V_DOUBLE:
+    self.Emit("BTQ"  , jit.Imm(_F_use_number), _VAR_df)     // BTQ     _F_use_number, df
+    self.Sjmp("JC"   , "_use_number")                       // JC      _use_number
+    self.Emit("MOVSD", _VAR_ss_Dv, _X0)                     // MOVSD   ss.Dv, X0
+    self.Sjmp("JMP"  , "_use_float64")                      // JMP     _use_float64
+
+    /** V_INTEGER **/
+    self.Link("_decode_V_INTEGER")                          // _decode_V_INTEGER:
+    self.Emit("BTQ"     , jit.Imm(_F_use_number), _VAR_df)  // BTQ      _F_use_number, df
+    self.Sjmp("JC"      , "_use_number")                    // JC       _use_number
+    self.Emit("BTQ"     , jit.Imm(_F_use_int64), _VAR_df)   // BTQ      _F_use_int64, df
+    self.Sjmp("JC"      , "_use_int64")                     // JC       _use_int64
+    //TODO: use ss.Dv directly
+    self.Emit("MOVSD", _VAR_ss_Dv, _X0)                  // MOVSD   ss.Dv, X0
+
+    /* represent numbers as `float64` */
+    self.Link("_use_float64")                   // _use_float64:
+    self.Emit("MOVQ" , _X0, _AX)                // MOVQ   X0, AX
+    self.call_go(_F_convT64)                    // CALL_GO runtime.convT64
+    self.Emit("MOVQ" , _T_float64, _R8)         // MOVQ    _T_float64, R8
+    self.Emit("MOVQ" , _AX, _R9)                // MOVQ    AX, R9
+    self.Emit("MOVQ" , _VAR_ss_Ep, _DI)         // MOVQ    ss.Ep, DI
+    self.Sjmp("JMP"  , "_set_value")            // JMP     _set_value
+
+    /* represent numbers as `json.Number` */
+    self.Link("_use_number")                            // _use_number
+    self.Emit("MOVQ", _VAR_ss_Ep, _AX)                  // MOVQ    ss.Ep, AX
+    self.Emit("LEAQ", jit.Sib(_IP, _AX, 1, 0), _SI)     // LEAQ    (IP)(AX), SI
+    self.Emit("MOVQ", _IC, _CX)                         // MOVQ    IC, CX
+    self.Emit("SUBQ", _AX, _CX)                         // SUBQ    AX, CX
+    self.Emit("MOVQ", _SI, _AX)                         // MOVQ    SI, AX
+    self.Emit("MOVQ", _CX, _BX)                         // MOVQ    CX, BX
+    self.call_go(_F_convTstring)                        // CALL_GO runtime.convTstring
+    self.Emit("MOVQ", _T_number, _R8)                   // MOVQ    _T_number, R8
+    self.Emit("MOVQ", _AX, _R9)                         // MOVQ    AX, R9
+    self.Emit("MOVQ", _VAR_ss_Ep, _DI)                  // MOVQ    ss.Ep, DI
+    self.Sjmp("JMP" , "_set_value")                     // JMP     _set_value
+
+    /* represent numbers as `int64` */
+    self.Link("_use_int64")                     // _use_int64:
+    self.Emit("MOVQ", _VAR_ss_Iv, _AX)          // MOVQ    ss.Iv, AX
+    self.call_go(_F_convT64)                    // CALL_GO runtime.convT64
+    self.Emit("MOVQ", _T_int64, _R8)            // MOVQ    _T_int64, R8
+    self.Emit("MOVQ", _AX, _R9)                 // MOVQ    AX, R9
+    self.Emit("MOVQ", _VAR_ss_Ep, _DI)          // MOVQ    ss.Ep, DI
+    self.Sjmp("JMP" , "_set_value")             // JMP     _set_value
+
+    /** V_KEY_SEP **/
+    self.Link("_decode_V_KEY_SEP")                                          // _decode_V_KEY_SEP:
+    self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX)                            // MOVQ ST.Sp, CX
+    self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX)                    // MOVQ ST.Vt[CX], AX
+    self.Emit("CMPQ", _AX, jit.Imm(_S_obj_delim))                           // CMPQ AX, _S_obj_delim
+    self.Sjmp("JNE" , "_invalid_char")                                      // JNE  _invalid_char
+    self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt))        // MOVQ _S_val, ST.Vt[CX]
+    self.Emit("MOVQ", jit.Imm(_S_obj), jit.Sib(_ST, _CX, 8, _ST_Vt - 8))    // MOVQ _S_obj, ST.Vt[CX - 1]
+    self.Sjmp("JMP" , "_next")                                              // JMP  _next
+
+    /** V_ELEM_SEP **/
+    self.Link("_decode_V_ELEM_SEP")                          // _decode_V_ELEM_SEP:
+    self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX)            // MOVQ     ST.Sp, CX
+    self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX)    // MOVQ     ST.Vt[CX], AX
+    self.Emit("CMPQ" , _AX, jit.Imm(_S_arr))      
+    self.Sjmp("JE"   , "_array_sep")                         // JZ       _next
+    self.Emit("CMPQ" , _AX, jit.Imm(_S_obj))                 // CMPQ     _AX, _S_arr
+    self.Sjmp("JNE"  , "_invalid_char")                      // JNE      _invalid_char
+    self.Emit("MOVQ" , jit.Imm(_S_obj_sep), jit.Sib(_ST, _CX, 8, _ST_Vt))
+    self.Sjmp("JMP"  , "_next")                              // JMP      _next
+
+    /* arrays */
+    self.Link("_array_sep")
+    self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI)    // MOVQ ST.Vp[CX], SI
+    self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI)                 // MOVQ 8(SI), SI
+    self.Emit("MOVQ", jit.Ptr(_SI, 8), _DX)                 // MOVQ 8(SI), DX
+    self.Emit("CMPQ", _DX, jit.Ptr(_SI, 16))                // CMPQ DX, 16(SI)
+    self.Sjmp("JAE" , "_array_more")                        // JAE  _array_more
+
+    /* add a slot for the new element */
+    self.Link("_array_append")                                          // _array_append:
+    self.Emit("ADDQ", jit.Imm(1), jit.Ptr(_SI, 8))                      // ADDQ $1, 8(SI)
+    self.Emit("MOVQ", jit.Ptr(_SI, 0), _SI)                             // MOVQ (SI), SI
+    self.Emit("ADDQ", jit.Imm(1), _CX)                                  // ADDQ $1, CX
+    self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE))                  // CMPQ CX, ${types.MAX_RECURSE}
+    self.Sjmp("JAE"  , "_stack_overflow")                                // JA   _stack_overflow
+    self.Emit("SHLQ", jit.Imm(1), _DX)                                  // SHLQ $1, DX
+    self.Emit("LEAQ", jit.Sib(_SI, _DX, 8, 0), _SI)                     // LEAQ (SI)(DX*8), SI
+    self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp))                        // MOVQ CX, ST.Sp
+    self.WriteRecNotAX(7 , _SI, jit.Sib(_ST, _CX, 8, _ST_Vp), false)           // MOVQ SI, ST.Vp[CX]
+    self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt))    // MOVQ _S_val, ST.Vt[CX}
+    self.Sjmp("JMP" , "_next")                                          // JMP  _next
+
+    /** V_ARRAY_END **/
+    self.Link("_decode_V_ARRAY_END")                        // _decode_V_ARRAY_END:
+    self.Emit("XORL", _DX, _DX)                             // XORL DX, DX
+    self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX)            // MOVQ ST.Sp, CX
+    self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX)    // MOVQ ST.Vt[CX], AX
+    self.Emit("CMPQ", _AX, jit.Imm(_S_arr_0))               // CMPQ AX, _S_arr_0
+    self.Sjmp("JE"  , "_first_item")                        // JE   _first_item
+    self.Emit("CMPQ", _AX, jit.Imm(_S_arr))                 // CMPQ AX, _S_arr
+    self.Sjmp("JNE" , "_invalid_char")                      // JNE  _invalid_char
+    self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp))     // SUBQ $1, ST.Sp
+    self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp))    // MOVQ DX, ST.Vp[CX]
+    self.Sjmp("JMP" , "_next")                              // JMP  _next
+
+    /* first element of an array */
+    self.Link("_first_item")                                    // _first_item:
+    self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX)                // MOVQ ST.Sp, CX
+    self.Emit("SUBQ", jit.Imm(2), jit.Ptr(_ST, _ST_Sp))         // SUBQ $2, ST.Sp
+    self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp - 8), _SI)    // MOVQ ST.Vp[CX - 1], SI
+    self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI)                     // MOVQ 8(SI), SI
+    self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp - 8))    // MOVQ DX, ST.Vp[CX - 1]
+    self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp))        // MOVQ DX, ST.Vp[CX]
+    self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8))                     // MOVQ DX, 8(SI)
+    self.Sjmp("JMP" , "_next")                                  // JMP  _next
+
+    /** V_OBJECT_END **/
+    self.Link("_decode_V_OBJECT_END")                       // _decode_V_OBJECT_END:
+    self.Emit("MOVL", jit.Imm(_S_omask_end), _DI)           // MOVL _S_omask, DI
+    self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX)            // MOVQ ST.Sp, CX
+    self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX)    // MOVQ ST.Vt[CX], AX
+    self.Emit("BTQ" , _AX, _DI)                    
+    self.Sjmp("JNC" , "_invalid_char")                      // JNE  _invalid_char
+    self.Emit("XORL", _AX, _AX)                             // XORL AX, AX
+    self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp))     // SUBQ $1, ST.Sp
+    self.Emit("MOVQ", _AX, jit.Sib(_ST, _CX, 8, _ST_Vp))    // MOVQ AX, ST.Vp[CX]
+    self.Sjmp("JMP" , "_next")                              // JMP  _next
+
+    /* return from decoder */
+    self.Link("_return")                            // _return:
+    self.Emit("XORL", _EP, _EP)                     // XORL EP, EP
+    self.Emit("MOVQ", _EP, jit.Ptr(_ST, _ST_Vp))    // MOVQ EP, ST.Vp[0]
+    self.Link("_epilogue")                          // _epilogue:
+    self.Emit("SUBQ", jit.Imm(_FsmOffset), _ST)     // SUBQ _FsmOffset, _ST
+    self.Emit("MOVQ", jit.Ptr(_SP, _VD_offs), _BP)  // MOVQ _VD_offs(SP), BP
+    self.Emit("ADDQ", jit.Imm(_VD_size), _SP)       // ADDQ $_VD_size, SP
+    self.Emit("RET")                                // RET
+
+    /* array expand */
+    self.Link("_array_more")                    // _array_more:
+    self.Emit("MOVQ" , _T_eface, _AX)           // MOVQ    _T_eface, AX
+    self.Emit("MOVQ" , jit.Ptr(_SI, 0), _BX)    // MOVQ   (SI), BX
+    self.Emit("MOVQ" , jit.Ptr(_SI, 8), _CX)    // MOVQ   8(SI), CX
+    self.Emit("MOVQ" , jit.Ptr(_SI, 16), _DI)   // MOVQ    16(SI), DI
+    self.Emit("MOVQ" , _DI, _SI)                // MOVQ    DI, 24(SP)
+    self.Emit("SHLQ" , jit.Imm(1), _SI)         // SHLQ    $1, SI
+    self.call_go(_F_growslice)                  // CALL_GO runtime.growslice
+    self.Emit("MOVQ" , _AX, _DI)                // MOVQ   AX, DI
+    self.Emit("MOVQ" , _BX, _DX)                // MOVQ   BX, DX
+    self.Emit("MOVQ" , _CX, _AX)                // MOVQ   CX, AX
+             
+    /* update the slice */
+    self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX)            // MOVQ ST.Sp, CX
+    self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI)    // MOVQ ST.Vp[CX], SI
+    self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI)                 // MOVQ 8(SI), SI
+    self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8))                 // MOVQ DX, 8(SI)
+    self.Emit("MOVQ", _AX, jit.Ptr(_SI, 16))                // MOVQ AX, 16(AX)
+    self.WriteRecNotAX(8 , _DI, jit.Ptr(_SI, 0), false)                 // MOVQ R10, (SI)
+    self.Sjmp("JMP" , "_array_append")                      // JMP  _array_append
+
+    /* copy string */
+    self.Link("copy_string")  // pointer: R8, length: AX, return addr: DI
+    self.Emit("MOVQ", _R8, _VAR_cs_p)
+    self.Emit("MOVQ", _AX, _VAR_cs_n)
+    self.Emit("MOVQ", _DI, _VAR_cs_LR)
+    self.Emit("MOVQ", _AX, _BX)
+    self.Emit("MOVQ", _AX, _CX)
+    self.Emit("MOVQ", _T_byte, _AX)
+    self.call_go(_F_makeslice)                              
+    self.Emit("MOVQ", _AX, _VAR_cs_d)                    
+    self.Emit("MOVQ", _VAR_cs_p, _BX)
+    self.Emit("MOVQ", _VAR_cs_n, _CX)
+    self.call_go(_F_memmove)
+    self.Emit("MOVQ", _VAR_cs_d, _R8)
+    self.Emit("MOVQ", _VAR_cs_n, _AX)
+    self.Emit("MOVQ", _VAR_cs_LR, _DI)
+    self.Rjmp("JMP", _DI)
+
+    /* error handlers */
+    self.Link("_stack_overflow")
+    self.Emit("MOVL" , _E_recurse, _EP)         // MOVQ  _E_recurse, EP
+    self.Sjmp("JMP"  , "_error")                // JMP   _error
+    self.Link("_vtype_error")                   // _vtype_error:
+    self.Emit("MOVQ" , _DI, _IC)                // MOVQ  DI, IC
+    self.Emit("MOVL" , _E_invalid, _EP)         // MOVL  _E_invalid, EP
+    self.Sjmp("JMP"  , "_error")                // JMP   _error
+    self.Link("_invalid_char")                  // _invalid_char:
+    self.Emit("SUBQ" , jit.Imm(1), _IC)         // SUBQ  $1, IC
+    self.Emit("MOVL" , _E_invalid, _EP)         // MOVL  _E_invalid, EP
+    self.Sjmp("JMP"  , "_error")                // JMP   _error
+    self.Link("_unquote_error")                 // _unquote_error:
+    self.Emit("MOVQ" , _VAR_ss_Iv, _IC)         // MOVQ  ss.Iv, IC
+    self.Emit("SUBQ" , jit.Imm(1), _IC)         // SUBQ  $1, IC
+    self.Link("_parsing_error")                 // _parsing_error:
+    self.Emit("NEGQ" , _AX)                     // NEGQ  AX
+    self.Emit("MOVQ" , _AX, _EP)                // MOVQ  AX, EP
+    self.Link("_error")                         // _error:
+    self.Emit("PXOR" , _X0, _X0)                // PXOR  X0, X0
+    self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0))    // MOVOU X0, (VP)
+    self.Sjmp("JMP"  , "_epilogue")             // JMP   _epilogue
+
+    /* invalid value type, never returns */
+    self.Link("_invalid_vtype")
+    self.call_go(_F_invalid_vtype)                 // CALL invalid_type
+    self.Emit("UD2")                            // UD2
+
+    /* switch jump table */
+    self.Link("_switch_table")              // _switch_table:
+    self.Sref("_decode_V_EOF", 0)           // SREF &_decode_V_EOF, $0
+    self.Sref("_decode_V_NULL", -4)         // SREF &_decode_V_NULL, $-4
+    self.Sref("_decode_V_TRUE", -8)         // SREF &_decode_V_TRUE, $-8
+    self.Sref("_decode_V_FALSE", -12)       // SREF &_decode_V_FALSE, $-12
+    self.Sref("_decode_V_ARRAY", -16)       // SREF &_decode_V_ARRAY, $-16
+    self.Sref("_decode_V_OBJECT", -20)      // SREF &_decode_V_OBJECT, $-20
+    self.Sref("_decode_V_STRING", -24)      // SREF &_decode_V_STRING, $-24
+    self.Sref("_decode_V_DOUBLE", -28)      // SREF &_decode_V_DOUBLE, $-28
+    self.Sref("_decode_V_INTEGER", -32)     // SREF &_decode_V_INTEGER, $-32
+    self.Sref("_decode_V_KEY_SEP", -36)     // SREF &_decode_V_KEY_SEP, $-36
+    self.Sref("_decode_V_ELEM_SEP", -40)    // SREF &_decode_V_ELEM_SEP, $-40
+    self.Sref("_decode_V_ARRAY_END", -44)   // SREF &_decode_V_ARRAY_END, $-44
+    self.Sref("_decode_V_OBJECT_END", -48)  // SREF &_decode_V_OBJECT_END, $-48
+
+    /* fast character lookup table */
+    self.Link("_decode_tab")        // _decode_tab:
+    self.Sref("_decode_V_EOF", 0)   // SREF &_decode_V_EOF, $0
+
+    /* generate rest of the tabs */
+    for i := 1; i < 256; i++ {
+        if to, ok := _R_tab[i]; ok {
+            self.Sref(to, -int64(i) * 4)
+        } else {
+            self.Byte(0x00, 0x00, 0x00, 0x00)
+        }
+    }
+}
+
+/** Generic Decoder **/
+
+var (
+    _subr_decode_value = new(_ValueDecoder).build()
+)
+
+//go:nosplit
+func invalid_vtype(vt types.ValueType) {
+    throw(fmt.Sprintf("invalid value type: %d", vt))
+}

Някои файлове не бяха показани, защото твърде много файлове са промени