feat: cache ai results on server + show success banner

This commit is contained in:
vas3k
2025-05-20 22:32:38 +02:00
parent c352f5eadd
commit f5c5bf75f6
11 changed files with 142 additions and 84 deletions

View File

@@ -4,6 +4,7 @@ import { ActionState } from "@/lib/actions"
import config from "@/lib/config"
import OpenAI from "openai"
import { AnalyzeAttachment } from "./attachments"
import { updateFile } from "@/models/files"
export type AnalysisResult = {
output: Record<string, string>
@@ -14,7 +15,9 @@ export async function analyzeTransaction(
prompt: string,
schema: Record<string, unknown>,
attachments: AnalyzeAttachment[],
apiKey: string
apiKey: string,
fileId: string,
userId: string
): Promise<ActionState<AnalysisResult>> {
const openai = new OpenAI({
apiKey,
@@ -54,6 +57,9 @@ export async function analyzeTransaction(
console.log("ChatGPT tokens used:", response.usage)
const result = JSON.parse(response.output_text)
await updateFile(fileId, userId, { cachedParseResult: result })
return { success: true, data: { output: result, tokensUsed: response.usage?.total_tokens || 0 } }
} catch (error) {
console.error("AI Analysis error:", error)