fix: better transactions export UX

This commit is contained in:
Vasily Zubarev
2025-05-19 11:01:50 +02:00
parent ee368180f6
commit b7475ca57d
3 changed files with 130 additions and 74 deletions

View File

@@ -10,6 +10,10 @@ import fs from "fs/promises"
import JSZip from "jszip"
import { NextResponse } from "next/server"
import path from "path"
import { Readable } from "stream"
const TRANSACTIONS_CHUNK_SIZE = 300
const FILES_CHUNK_SIZE = 50
export async function GET(request: Request) {
const url = new URL(request.url)
@@ -21,50 +25,45 @@ export async function GET(request: Request) {
const { transactions } = await getTransactions(user.id, filters)
const existingFields = await getFields(user.id)
// Generate CSV file with all transactions
try {
const fieldKeys = fields.filter((field) => existingFields.some((f) => f.code === field))
let csvContent = ""
// Create a transform stream for CSV generation
const csvStream = format({ headers: fieldKeys, writeBOM: true, writeHeaders: false })
csvStream.on("data", (chunk) => {
csvContent += chunk
})
// Custom CSV headers
const headers = fieldKeys.map((field) => existingFields.find((f) => f.code === field)?.name ?? "UNKNOWN")
csvStream.write(headers)
// CSV rows
for (const transaction of transactions) {
const row: Record<string, unknown> = {}
for (const field of existingFields) {
let value
if (field.isExtra) {
value = transaction.extra?.[field.code as keyof typeof transaction.extra] ?? ""
} else {
value = transaction[field.code as keyof typeof transaction] ?? ""
}
// Process transactions in chunks to avoid memory issues
for (let i = 0; i < transactions.length; i += TRANSACTIONS_CHUNK_SIZE) {
const chunk = transactions.slice(i, i + TRANSACTIONS_CHUNK_SIZE)
// Check if the field has a special export rules
const exportFieldSettings = EXPORT_AND_IMPORT_FIELD_MAP[field.code]
if (exportFieldSettings && exportFieldSettings.export) {
row[field.code] = await exportFieldSettings.export(user.id, value)
} else {
row[field.code] = value
for (const transaction of chunk) {
const row: Record<string, unknown> = {}
for (const field of existingFields) {
let value
if (field.isExtra) {
value = transaction.extra?.[field.code as keyof typeof transaction.extra] ?? ""
} else {
value = transaction[field.code as keyof typeof transaction] ?? ""
}
const exportFieldSettings = EXPORT_AND_IMPORT_FIELD_MAP[field.code]
if (exportFieldSettings && exportFieldSettings.export) {
row[field.code] = await exportFieldSettings.export(user.id, value)
} else {
row[field.code] = value
}
}
csvStream.write(row)
}
csvStream.write(row)
}
csvStream.end()
// Wait for CSV generation to complete
await new Promise((resolve) => csvStream.on("end", resolve))
if (!includeAttachments) {
return new NextResponse(csvContent, {
const stream = Readable.from(csvStream)
return new NextResponse(stream as any, {
headers: {
"Content-Type": "text/csv",
"Content-Disposition": `attachment; filename="transactions.csv"`,
@@ -72,46 +71,64 @@ export async function GET(request: Request) {
})
}
// If includeAttachments is true, create a ZIP file with the CSV and attachments
// For ZIP files, we'll use a more memory-efficient approach
const zip = new JSZip()
// Add CSV to zip
const csvContent = await new Promise<string>((resolve) => {
let content = ""
csvStream.on("data", (chunk) => {
content += chunk
})
csvStream.on("end", () => resolve(content))
})
zip.file("transactions.csv", csvContent)
// Process files in chunks
const filesFolder = zip.folder("files")
if (!filesFolder) {
console.error("Failed to create zip folder")
return new NextResponse("Internal Server Error", { status: 500 })
throw new Error("Failed to create zip folder")
}
for (const transaction of transactions) {
const transactionFiles = await getFilesByTransactionId(transaction.id, user.id)
for (let i = 0; i < transactions.length; i += FILES_CHUNK_SIZE) {
const chunk = transactions.slice(i, i + FILES_CHUNK_SIZE)
const transactionFolder = filesFolder.folder(
path.join(
transaction.issuedAt ? formatDate(transaction.issuedAt, "yyyy/MM") : "",
transactionFiles.length > 1 ? transaction.name || transaction.id : ""
)
)
if (!transactionFolder) {
console.error(`Failed to create transaction folder for ${transaction.name}`)
continue
}
for (const transaction of chunk) {
const transactionFiles = await getFilesByTransactionId(transaction.id, user.id)
for (const file of transactionFiles) {
const fullFilePath = fullPathForFile(user, file)
if (await fileExists(fullFilePath)) {
const fileData = await fs.readFile(fullFilePath)
const fileExtension = path.extname(fullFilePath)
transactionFolder.file(
`${formatDate(transaction.issuedAt || new Date(), "yyyy-MM-dd")} - ${
transaction.name || transaction.id
}${fileExtension}`,
fileData
const transactionFolder = filesFolder.folder(
path.join(
transaction.issuedAt ? formatDate(transaction.issuedAt, "yyyy/MM") : "",
transactionFiles.length > 1 ? transaction.name || transaction.id : ""
)
)
if (!transactionFolder) continue
for (const file of transactionFiles) {
const fullFilePath = fullPathForFile(user, file)
if (await fileExists(fullFilePath)) {
const fileData = await fs.readFile(fullFilePath)
const fileExtension = path.extname(fullFilePath)
transactionFolder.file(
`${formatDate(transaction.issuedAt || new Date(), "yyyy-MM-dd")} - ${
transaction.name || transaction.id
}${fileExtension}`,
fileData
)
}
}
}
}
const zipContent = await zip.generateAsync({ type: "uint8array" })
// Generate zip with progress tracking
const zipContent = await zip.generateAsync({
type: "uint8array",
compression: "DEFLATE",
compressionOptions: {
level: 6,
},
})
return new NextResponse(zipContent, {
headers: {