feat: storage and token limiting

This commit is contained in:
Vasily Zubarev
2025-04-21 13:50:45 +02:00
parent 62bad46e58
commit 73e83221b8
25 changed files with 232 additions and 65 deletions

View File

@@ -4,12 +4,17 @@ import { ActionState } from "@/lib/actions"
import OpenAI from "openai"
import { AnalyzeAttachment } from "./attachments"
export type AnalysisResult = {
output: Record<string, string>
tokensUsed: number
}
export async function analyzeTransaction(
prompt: string,
schema: Record<string, unknown>,
attachments: AnalyzeAttachment[],
apiKey: string
): Promise<ActionState<Record<string, string>>> {
): Promise<ActionState<AnalysisResult>> {
const openai = new OpenAI({
apiKey,
})
@@ -19,7 +24,7 @@ export async function analyzeTransaction(
try {
const response = await openai.responses.create({
model: "gpt-4o-mini-2024-07-18",
model: "gpt-4o-mini",
input: [
{
role: "user",
@@ -48,7 +53,7 @@ export async function analyzeTransaction(
console.log("ChatGPT tokens used:", response.usage)
const result = JSON.parse(response.output_text)
return { success: true, data: result }
return { success: true, data: { output: result, tokensUsed: response.usage?.total_tokens || 0 } }
} catch (error) {
console.error("AI Analysis error:", error)
return {