feat: 添加 AI 记账功能,包括 API 处理器和核心服务逻辑。

This commit is contained in:
2026-01-28 22:13:19 +08:00
parent cce8fad008
commit cebce4f758
2 changed files with 147 additions and 1 deletions

View File

@@ -494,6 +494,103 @@ func (s *LLMService) parseIntentSimple(text string) (*AITransactionParams, strin
return params, message, nil
}
// GenerateReport generates a report based on the provided prompt using LLM
func (s *LLMService) GenerateReport(ctx context.Context, prompt string) (string, error) {
if s.config.OpenAIAPIKey == "" || s.config.OpenAIBaseURL == "" {
return "", errors.New("OpenAI API not configured")
}
messages := []ChatMessage{
{
Role: "user",
Content: prompt,
},
}
reqBody := ChatCompletionRequest{
Model: s.config.ChatModel,
Messages: messages,
Temperature: 0.7, // Higher temperature for creative insights
}
jsonBody, err := json.Marshal(reqBody)
if err != nil {
return "", fmt.Errorf("failed to marshal request: %w", err)
}
req, err := http.NewRequestWithContext(ctx, "POST", s.config.OpenAIBaseURL+"/chat/completions", bytes.NewReader(jsonBody))
if err != nil {
return "", fmt.Errorf("failed to create request: %w", err)
}
req.Header.Set("Authorization", "Bearer "+s.config.OpenAIAPIKey)
req.Header.Set("Content-Type", "application/json")
resp, err := s.httpClient.Do(req)
if err != nil {
return "", fmt.Errorf("generate report request failed: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return "", fmt.Errorf("generate report failed with status %d: %s", resp.StatusCode, string(body))
}
var chatResp ChatCompletionResponse
if err := json.NewDecoder(resp.Body).Decode(&chatResp); err != nil {
return "", fmt.Errorf("failed to decode response: %w", err)
}
if len(chatResp.Choices) == 0 {
return "", errors.New("no response from AI")
}
return chatResp.Choices[0].Message.Content, nil
}
// GenerateDailyInsight generates a daily insight report
func (s *AIBookkeepingService) GenerateDailyInsight(ctx context.Context, userID uint, data map[string]interface{}) (string, error) {
// 1. Serialize context data to JSON for the prompt
dataBytes, err := json.MarshalIndent(data, "", " ")
if err != nil {
return "", fmt.Errorf("failed to marshal context data: %w", err)
}
// 2. Construct Prompt
prompt := fmt.Sprintf(`[SYSTEM: You are a personal financial analyst. Your task is to provide a brief, warm, and actionable daily financial insight based on the provided data.]
DATA:
%s
TASK:
Output a JSON object with exactly two fields: "spending" and "budget".
1. "spending": A comment on today's spending (max 40 chars). Warm tone. Mention weekday if relevant. Praise streaks.
2. "budget": Actionable advice on budget status (max 40 chars).
OUTPUT FORMAT (JSON ONLY, NO MARKDOWN):
{"spending": "...", "budget": "..."}`, string(dataBytes))
// 3. Call LLM
report, err := s.llmService.GenerateReport(ctx, prompt)
if err != nil {
return "", err
}
// Clean up markdown if present
report = strings.TrimSpace(report)
if strings.HasPrefix(report, "```") {
if idx := strings.Index(report, "\n"); idx != -1 {
report = report[idx+1:]
}
if idx := strings.LastIndex(report, "```"); idx != -1 {
report = report[:idx]
}
}
return strings.TrimSpace(report), nil
}
// MapAccountName maps natural language account name to account ID
func (s *LLMService) MapAccountName(ctx context.Context, name string, userID uint) (*uint, string, error) {
if name == "" {