mirror of
https://github.com/marcogll/TaxHacker_s23.git
synced 2026-01-13 13:25:18 +00:00
BREAKING: postgres + saas
This commit is contained in:
57
ai/analyze.ts
Normal file
57
ai/analyze.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
"use server"
|
||||
|
||||
import OpenAI from "openai"
|
||||
import { AnalyzeAttachment } from "./attachments"
|
||||
|
||||
export async function analyzeTransaction(
|
||||
prompt: string,
|
||||
schema: Record<string, unknown>,
|
||||
attachments: AnalyzeAttachment[],
|
||||
apiKey: string
|
||||
): Promise<{ success: boolean; data?: Record<string, any>; error?: string }> {
|
||||
const openai = new OpenAI({
|
||||
apiKey,
|
||||
})
|
||||
console.log("RUNNING AI ANALYSIS")
|
||||
console.log("PROMPT:", prompt)
|
||||
console.log("SCHEMA:", schema)
|
||||
|
||||
try {
|
||||
const response = await openai.responses.create({
|
||||
model: "gpt-4o-mini-2024-07-18",
|
||||
input: [
|
||||
{
|
||||
role: "user",
|
||||
content: prompt,
|
||||
},
|
||||
{
|
||||
role: "user",
|
||||
content: attachments.map((attachment) => ({
|
||||
type: "input_image",
|
||||
detail: "auto",
|
||||
image_url: `data:${attachment.contentType};base64,${attachment.base64}`,
|
||||
})),
|
||||
},
|
||||
],
|
||||
text: {
|
||||
format: {
|
||||
type: "json_schema",
|
||||
name: "transaction",
|
||||
schema: schema,
|
||||
strict: true,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
console.log("ChatGPT response:", response.output_text)
|
||||
|
||||
const result = JSON.parse(response.output_text)
|
||||
return { success: true, data: result }
|
||||
} catch (error) {
|
||||
console.error("AI Analysis error:", error)
|
||||
return {
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : "Failed to analyze invoice",
|
||||
}
|
||||
}
|
||||
}
|
||||
35
ai/attachments.ts
Normal file
35
ai/attachments.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { fileExists, fullPathForFile } from "@/lib/files"
|
||||
import { generateFilePreviews } from "@/lib/previews/generate"
|
||||
import { File, User } from "@prisma/client"
|
||||
import fs from "fs/promises"
|
||||
|
||||
const MAX_PAGES_TO_ANALYZE = 4
|
||||
|
||||
export type AnalyzeAttachment = {
|
||||
filename: string
|
||||
contentType: string
|
||||
base64: string
|
||||
}
|
||||
|
||||
export const loadAttachmentsForAI = async (user: User, file: File): Promise<AnalyzeAttachment[]> => {
|
||||
const fullFilePath = await fullPathForFile(user, file)
|
||||
const isFileExists = await fileExists(fullFilePath)
|
||||
if (!isFileExists) {
|
||||
throw new Error("File not found on disk")
|
||||
}
|
||||
|
||||
const { contentType, previews } = await generateFilePreviews(user, fullFilePath, file.mimetype)
|
||||
|
||||
return Promise.all(
|
||||
previews.slice(0, MAX_PAGES_TO_ANALYZE).map(async (preview) => ({
|
||||
filename: file.filename,
|
||||
contentType: contentType,
|
||||
base64: await loadFileAsBase64(preview),
|
||||
}))
|
||||
)
|
||||
}
|
||||
|
||||
export const loadFileAsBase64 = async (filePath: string): Promise<string> => {
|
||||
const buffer = await fs.readFile(filePath)
|
||||
return Buffer.from(buffer).toString("base64")
|
||||
}
|
||||
39
ai/prompt.ts
Normal file
39
ai/prompt.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { Category, Field, Project } from "@prisma/client"
|
||||
|
||||
export function buildLLMPrompt(
|
||||
promptTemplate: string,
|
||||
fields: Field[],
|
||||
categories: Category[] = [],
|
||||
projects: Project[] = []
|
||||
) {
|
||||
let prompt = promptTemplate
|
||||
|
||||
prompt = prompt.replace(
|
||||
"{fields}",
|
||||
fields
|
||||
.filter((field) => field.llm_prompt)
|
||||
.map((field) => `- ${field.code}: ${field.llm_prompt}`)
|
||||
.join("\n")
|
||||
)
|
||||
|
||||
prompt = prompt.replace(
|
||||
"{categories}",
|
||||
categories
|
||||
.filter((category) => category.llm_prompt)
|
||||
.map((category) => `- ${category.code}: for ${category.llm_prompt}`)
|
||||
.join("\n")
|
||||
)
|
||||
|
||||
prompt = prompt.replace(
|
||||
"{projects}",
|
||||
projects
|
||||
.filter((project) => project.llm_prompt)
|
||||
.map((project) => `- ${project.code}: for ${project.llm_prompt}`)
|
||||
.join("\n")
|
||||
)
|
||||
|
||||
prompt = prompt.replace("{categories.code}", categories.map((category) => `${category.code}`).join(", "))
|
||||
prompt = prompt.replace("{projects.code}", projects.map((project) => `${project.code}`).join(", "))
|
||||
|
||||
return prompt
|
||||
}
|
||||
16
ai/schema.ts
Normal file
16
ai/schema.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { Field } from "@prisma/client"
|
||||
|
||||
export const fieldsToJsonSchema = (fields: Field[]) => {
|
||||
const fieldsWithPrompt = fields.filter((field) => field.llm_prompt)
|
||||
const schema = {
|
||||
type: "object",
|
||||
properties: fieldsWithPrompt.reduce((acc, field) => {
|
||||
acc[field.code] = { type: field.type, description: field.llm_prompt || "" }
|
||||
return acc
|
||||
}, {} as Record<string, { type: string; description: string }>),
|
||||
required: fieldsWithPrompt.map((field) => field.code),
|
||||
additionalProperties: false,
|
||||
}
|
||||
|
||||
return schema
|
||||
}
|
||||
Reference in New Issue
Block a user