package suggestions import ( "bytes" "encoding/json" "fmt" "net/http" "strings" "time" "git.linuxforward.com/byom/byom-trends/common" "git.linuxforward.com/byom/byom-trends/config" "git.linuxforward.com/byom/byom-trends/logger" ) type Analyzer struct { httpClient *http.Client config *config.LiteLLMConfig logger *logger.Entry } type llmRequest struct { Model string `json:"model"` Messages []message `json:"messages"` Temperature float32 `json:"temperature"` } type message struct { Role string `json:"role"` Content string `json:"content"` } type llmResponse struct { Choices []struct { Message struct { Content string `json:"content"` } `json:"message"` } `json:"choices"` } func NewAnalyzer(cfg *config.LiteLLMConfig) (*Analyzer, error) { if cfg.ProxyURL == "" { return nil, fmt.Errorf("LiteLLM proxy URL not configured") } return &Analyzer{ httpClient: &http.Client{ Timeout: time.Duration(cfg.TimeoutSecs) * time.Second, }, config: cfg, logger: logger.NewLogger("suggestions-analyzer"), }, nil } func (a *Analyzer) AnalyzeTrendsForProducts(trends []common.ContentTrend) ([]common.ProductSuggestion, error) { a.logger.Info("Analyzing trends for product suggestions") // Build prompt for LLM prompt := a.buildTrendAnalysisPrompt(trends) // Query LLM suggestions, err := a.queryLLM(prompt) if err != nil { return nil, fmt.Errorf("query LLM: %w", err) } // Parse response return a.parseLLMResponse(suggestions) } func (a *Analyzer) buildTrendAnalysisPrompt(trends []common.ContentTrend) string { var trendDescription string for _, trend := range trends { trendDescription += fmt.Sprintf("\nPlatform: %s\nType: %s\n", trend.Platform, trend.TrendType) for _, item := range trend.Items { trendDescription += fmt.Sprintf("- %s (frequency: %d, growth rate: %.2f%%)\n", item.Value, item.Frequency, item.GrowthRate) } } return fmt.Sprintf(`As a market analysis expert, analyze the following social trends: %s Suggest 5 product or service ideas that could capitalize on these trends. For each suggestion, provide: 1. Product/service name 2. Short description 3. Target audience 4. Relevance score (1-10) 5. Success factors Expected JSON format: { "suggestions": [ { "name": "string", "description": "string", "target_audience": "string", "relevance_score": number, "success_factors": ["string"] } ] }`, trendDescription) } func (a *Analyzer) queryLLM(prompt string) (string, error) { a.logger.WithField("prompt", prompt).Debug("Querying LiteLLM") request := llmRequest{ Model: a.config.Model, Messages: []message{ { Role: "system", Content: "You are a market analysis expert specialized in identifying business opportunities based on social trends.", }, { Role: "user", Content: prompt, }, }, Temperature: 0.7, } reqBody, err := json.Marshal(request) if err != nil { return "", fmt.Errorf("marshal request: %w", err) } req, err := http.NewRequest("POST", a.config.ProxyURL+"/chat/completions", bytes.NewBuffer(reqBody)) if err != nil { return "", fmt.Errorf("create request: %w", err) } // Add headers req.Header.Set("Content-Type", "application/json") if a.config.APIKey != "" { req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", a.config.APIKey)) } for key, value := range a.config.Headers { req.Header.Set(key, value) } var resp *http.Response var lastErr error // Implement retry logic for attempt := 0; attempt <= a.config.MaxRetries; attempt++ { resp, err = a.httpClient.Do(req) if err == nil && resp.StatusCode == http.StatusOK { break } lastErr = err if resp != nil { resp.Body.Close() } // Exponential backoff if attempt < a.config.MaxRetries { time.Sleep(time.Duration(1<