analyzer.go 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202
  1. package suggestions
  2. import (
  3. "bytes"
  4. "encoding/json"
  5. "fmt"
  6. "net/http"
  7. "strings"
  8. "time"
  9. "git.linuxforward.com/byom/byom-trends/common"
  10. "git.linuxforward.com/byom/byom-trends/config"
  11. "git.linuxforward.com/byom/byom-trends/logger"
  12. )
  13. type Analyzer struct {
  14. httpClient *http.Client
  15. config *config.LiteLLMConfig
  16. logger *logger.Entry
  17. }
  18. type llmRequest struct {
  19. Model string `json:"model"`
  20. Messages []message `json:"messages"`
  21. Temperature float32 `json:"temperature"`
  22. }
  23. type message struct {
  24. Role string `json:"role"`
  25. Content string `json:"content"`
  26. }
  27. type llmResponse struct {
  28. Choices []struct {
  29. Message struct {
  30. Content string `json:"content"`
  31. } `json:"message"`
  32. } `json:"choices"`
  33. }
  34. func NewAnalyzer(cfg *config.LiteLLMConfig) (*Analyzer, error) {
  35. if cfg.ProxyURL == "" {
  36. return nil, fmt.Errorf("LiteLLM proxy URL not configured")
  37. }
  38. return &Analyzer{
  39. httpClient: &http.Client{
  40. Timeout: time.Duration(cfg.TimeoutSecs) * time.Second,
  41. },
  42. config: cfg,
  43. logger: logger.NewLogger("suggestions-analyzer"),
  44. }, nil
  45. }
  46. func (a *Analyzer) AnalyzeTrendsForProducts(trends []common.ContentTrend) ([]common.ProductSuggestion, error) {
  47. a.logger.Info("Analyzing trends for product suggestions")
  48. // Build prompt for LLM
  49. prompt := a.buildTrendAnalysisPrompt(trends)
  50. // Query LLM
  51. suggestions, err := a.queryLLM(prompt)
  52. if err != nil {
  53. return nil, fmt.Errorf("query LLM: %w", err)
  54. }
  55. // Parse response
  56. return a.parseLLMResponse(suggestions)
  57. }
  58. func (a *Analyzer) buildTrendAnalysisPrompt(trends []common.ContentTrend) string {
  59. var trendDescription string
  60. for _, trend := range trends {
  61. trendDescription += fmt.Sprintf("\nPlatform: %s\nType: %s\n",
  62. trend.Platform, trend.TrendType)
  63. for _, item := range trend.Items {
  64. trendDescription += fmt.Sprintf("- %s (frequency: %d, growth rate: %.2f%%)\n",
  65. item.Value, item.Frequency, item.GrowthRate)
  66. }
  67. }
  68. return fmt.Sprintf(`As a market analysis expert, analyze the following social trends:
  69. %s
  70. Suggest 5 product or service ideas that could capitalize on these trends.
  71. For each suggestion, provide:
  72. 1. Product/service name
  73. 2. Short description
  74. 3. Target audience
  75. 4. Relevance score (1-10)
  76. 5. Success factors
  77. Expected JSON format:
  78. {
  79. "suggestions": [
  80. {
  81. "name": "string",
  82. "description": "string",
  83. "target_audience": "string",
  84. "relevance_score": number,
  85. "success_factors": ["string"]
  86. }
  87. ]
  88. }`, trendDescription)
  89. }
  90. func (a *Analyzer) queryLLM(prompt string) (string, error) {
  91. a.logger.WithField("prompt", prompt).Debug("Querying LiteLLM")
  92. request := llmRequest{
  93. Model: a.config.Model,
  94. Messages: []message{
  95. {
  96. Role: "system",
  97. Content: "You are a market analysis expert specialized in identifying business opportunities based on social trends.",
  98. },
  99. {
  100. Role: "user",
  101. Content: prompt,
  102. },
  103. },
  104. Temperature: 0.7,
  105. }
  106. reqBody, err := json.Marshal(request)
  107. if err != nil {
  108. return "", fmt.Errorf("marshal request: %w", err)
  109. }
  110. req, err := http.NewRequest("POST", a.config.ProxyURL+"/chat/completions", bytes.NewBuffer(reqBody))
  111. if err != nil {
  112. return "", fmt.Errorf("create request: %w", err)
  113. }
  114. // Add headers
  115. req.Header.Set("Content-Type", "application/json")
  116. if a.config.APIKey != "" {
  117. req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", a.config.APIKey))
  118. }
  119. for key, value := range a.config.Headers {
  120. req.Header.Set(key, value)
  121. }
  122. var resp *http.Response
  123. var lastErr error
  124. // Implement retry logic
  125. for attempt := 0; attempt <= a.config.MaxRetries; attempt++ {
  126. resp, err = a.httpClient.Do(req)
  127. if err == nil && resp.StatusCode == http.StatusOK {
  128. break
  129. }
  130. lastErr = err
  131. if resp != nil {
  132. resp.Body.Close()
  133. }
  134. // Exponential backoff
  135. if attempt < a.config.MaxRetries {
  136. time.Sleep(time.Duration(1<<attempt) * time.Second)
  137. }
  138. }
  139. if lastErr != nil {
  140. return "", fmt.Errorf("failed after %d retries: %w", a.config.MaxRetries, lastErr)
  141. }
  142. defer resp.Body.Close()
  143. var llmResp llmResponse
  144. if err := json.NewDecoder(resp.Body).Decode(&llmResp); err != nil {
  145. return "", fmt.Errorf("decode response: %w", err)
  146. }
  147. if len(llmResp.Choices) == 0 {
  148. return "", fmt.Errorf("no response content received")
  149. }
  150. content := llmResp.Choices[0].Message.Content
  151. // Extract JSON from content
  152. start := strings.Index(content, "{")
  153. end := strings.LastIndex(content, "}")
  154. if start == -1 || end == -1 {
  155. return "", fmt.Errorf("no valid JSON found in response")
  156. }
  157. return content[start : end+1], nil
  158. }
  159. func (a *Analyzer) parseLLMResponse(response string) ([]common.ProductSuggestion, error) {
  160. var parsed struct {
  161. Suggestions []common.ProductSuggestion `json:"suggestions"`
  162. }
  163. if err := json.Unmarshal([]byte(response), &parsed); err != nil {
  164. return nil, fmt.Errorf("parse LLM response: %w", err)
  165. }
  166. return parsed.Suggestions, nil
  167. }