diff --git a/.gitignore b/.gitignore index 49b002d..7e38136 100644 --- a/.gitignore +++ b/.gitignore @@ -45,7 +45,8 @@ yarn-error.log* *.tsbuildinfo next-env.d.ts -# sqlite +# databases +pgdata *.db *.sqlite *.sqlite3 diff --git a/app/export/transactions/route.ts b/app/export/transactions/route.ts index c16fc0a..9908ce8 100644 --- a/app/export/transactions/route.ts +++ b/app/export/transactions/route.ts @@ -1,4 +1,4 @@ -import { ExportFields, ExportFilters, exportImportFieldsMapping } from "@/models/export_and_import" +import { EXPORT_AND_IMPORT_FIELD_MAP, ExportFields, ExportFilters } from "@/models/export_and_import" import { getFields } from "@/models/fields" import { getFilesByTransactionId } from "@/models/files" import { getTransactions } from "@/models/transactions" @@ -38,7 +38,7 @@ export async function GET(request: Request) { const row: Record = {} for (const key of fieldKeys) { const value = transaction[key as keyof typeof transaction] ?? "" - const exportFieldSettings = exportImportFieldsMapping[key] + const exportFieldSettings = EXPORT_AND_IMPORT_FIELD_MAP[key] if (exportFieldSettings && exportFieldSettings.export) { row[key] = await exportFieldSettings.export(value) } else { diff --git a/app/import/csv/actions.tsx b/app/import/csv/actions.tsx index bc59f3b..b9c30ed 100644 --- a/app/import/csv/actions.tsx +++ b/app/import/csv/actions.tsx @@ -1,6 +1,6 @@ "use server" -import { exportImportFieldsMapping } from "@/models/export_and_import" +import { EXPORT_AND_IMPORT_FIELD_MAP } from "@/models/export_and_import" import { createTransaction } from "@/models/transactions" import { parse } from "@fast-csv/parse" import { revalidatePath } from "next/cache" @@ -44,7 +44,7 @@ export async function saveTransactionsAction(prevState: any, formData: FormData) for (const row of rows) { const transactionData: Record = {} for (const [fieldCode, value] of Object.entries(row)) { - const fieldDef = exportImportFieldsMapping[fieldCode] + const fieldDef = EXPORT_AND_IMPORT_FIELD_MAP[fieldCode] if (fieldDef?.import) { transactionData[fieldCode] = await fieldDef.import(value as string) } else { diff --git a/app/settings/backups/actions.ts b/app/settings/backups/actions.ts index 8f0f350..2b0212e 100644 --- a/app/settings/backups/actions.ts +++ b/app/settings/backups/actions.ts @@ -1,21 +1,214 @@ "use server" -import { DATABASE_FILE } from "@/lib/db" +import { prisma } from "@/lib/db" +import { FILE_UPLOAD_PATH } from "@/lib/files" +import { MODEL_BACKUP } from "@/models/backups" import fs from "fs" +import { mkdir } from "fs/promises" +import JSZip from "jszip" +import path from "path" + +const SUPPORTED_BACKUP_VERSIONS = ["1.0"] export async function restoreBackupAction(prevState: any, formData: FormData) { const file = formData.get("file") as File + const removeExistingData = formData.get("removeExistingData") === "true" + if (!file) { return { success: false, error: "No file provided" } } + // Restore tables try { const fileBuffer = await file.arrayBuffer() const fileData = Buffer.from(fileBuffer) - fs.writeFileSync(DATABASE_FILE, fileData) + const zip = await JSZip.loadAsync(fileData) + + // Check backup version + const metadataFile = zip.file("data/metadata.json") + if (metadataFile) { + const metadataContent = await metadataFile.async("string") + try { + const metadata = JSON.parse(metadataContent) + if (!metadata.version || !SUPPORTED_BACKUP_VERSIONS.includes(metadata.version)) { + return { + success: false, + error: `Incompatible backup version: ${ + metadata.version || "unknown" + }. Supported versions: ${SUPPORTED_BACKUP_VERSIONS.join(", ")}`, + } + } + console.log(`Restoring backup version ${metadata.version} created at ${metadata.timestamp}`) + } catch (error) { + console.warn("Could not parse backup metadata:", error) + } + } else { + console.warn("No metadata found in backup, assuming legacy format") + } + + if (removeExistingData) { + await clearAllTables() + } + + for (const { filename, model, idField } of MODEL_BACKUP) { + try { + const jsonFile = zip.file(`data/${filename}`) + if (jsonFile) { + const jsonContent = await jsonFile.async("string") + const restoredCount = await restoreModelFromJSON(model, jsonContent, idField) + console.log(`Restored ${restoredCount} records from ${filename}`) + } + } catch (error) { + console.error(`Error restoring model from ${filename}:`, error) + } + } + + // Restore files + try { + const filesToRestore = Object.keys(zip.files).filter( + (filename) => filename.startsWith("data/uploads/") && !filename.endsWith("/") + ) + + if (filesToRestore.length > 0) { + await mkdir(FILE_UPLOAD_PATH, { recursive: true }) + + // Extract and save each file + let restoredFilesCount = 0 + for (const zipFilePath of filesToRestore) { + const file = zip.file(zipFilePath) + if (file) { + const relativeFilePath = zipFilePath.replace("data/uploads/", "") + const fileContent = await file.async("nodebuffer") + + const filePath = path.join(FILE_UPLOAD_PATH, relativeFilePath) + const fileName = path.basename(filePath) + const fileId = path.basename(fileName, path.extname(fileName)) + const fileDir = path.dirname(filePath) + await mkdir(fileDir, { recursive: true }) + + // Write the file + fs.writeFileSync(filePath, fileContent) + restoredFilesCount++ + + // Update the file record + await prisma.file.upsert({ + where: { id: fileId }, + update: { + path: filePath, + }, + create: { + id: relativeFilePath, + path: filePath, + filename: fileName, + mimetype: "application/octet-stream", + }, + }) + } + } + } + } catch (error) { + console.error("Error restoring uploaded files:", error) + return { + success: false, + error: `Error restoring uploaded files: ${error instanceof Error ? error.message : String(error)}`, + } + } + + return { success: true, message: `Restore completed successfully` } } catch (error) { - return { success: false, error: "Failed to restore backup" } + console.error("Error restoring from backup:", error) + return { + success: false, + error: `Error restoring from backup: ${error instanceof Error ? error.message : String(error)}`, + } + } +} + +async function clearAllTables() { + // Delete in reverse order to handle foreign key constraints + for (const { model } of [...MODEL_BACKUP].reverse()) { + try { + await model.deleteMany({}) + } catch (error) { + console.error(`Error clearing table:`, error) + } + } +} + +async function restoreModelFromJSON(model: any, jsonContent: string, idField: string): Promise { + if (!jsonContent) return 0 + + try { + const records = JSON.parse(jsonContent) + + if (!records || records.length === 0) { + return 0 + } + + let insertedCount = 0 + for (const rawRecord of records) { + const record = processRowData(rawRecord) + + try { + // Skip records that don't have the required ID field + if (record[idField] === undefined) { + console.warn(`Skipping record missing required ID field '${idField}'`) + continue + } + + await model.upsert({ + where: { [idField]: record[idField] }, + update: record, + create: record, + }) + insertedCount++ + } catch (error) { + console.error(`Error upserting record:`, error) + } + } + + return insertedCount + } catch (error) { + console.error(`Error parsing JSON content:`, error) + return 0 + } +} + +function processRowData(row: Record): Record { + const processedRow: Record = {} + + for (const [key, value] of Object.entries(row)) { + if (value === "" || value === "null" || value === undefined) { + processedRow[key] = null + continue + } + + // Try to parse JSON for object fields + if (typeof value === "string" && (value.startsWith("{") || value.startsWith("["))) { + try { + processedRow[key] = JSON.parse(value) + continue + } catch (e) { + // Not valid JSON, continue with normal processing + } + } + + // Handle dates (checking for ISO date format) + if (typeof value === "string" && /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{3})?Z?$/.test(value)) { + processedRow[key] = new Date(value) + continue + } + + // Handle numbers + if (typeof value === "string" && !isNaN(Number(value)) && key !== "id" && !key.endsWith("Code")) { + // Convert numbers but preserving string IDs + processedRow[key] = Number(value) + continue + } + + // Default: keep as is + processedRow[key] = value } - return { success: true } + return processedRow } diff --git a/app/settings/backups/data/route.ts b/app/settings/backups/data/route.ts index 61a5c8b..d2bf74b 100644 --- a/app/settings/backups/data/route.ts +++ b/app/settings/backups/data/route.ts @@ -1,10 +1,13 @@ -import { DATABASE_FILE } from "@/lib/db" import { FILE_UPLOAD_PATH } from "@/lib/files" +import { MODEL_BACKUP } from "@/models/backups" import fs, { readdirSync } from "fs" import JSZip from "jszip" import { NextResponse } from "next/server" import path from "path" +const MAX_FILE_SIZE = 64 * 1024 * 1024 // 64MB +const BACKUP_VERSION = "1.0" + export async function GET(request: Request) { try { const zip = new JSZip() @@ -14,8 +17,29 @@ export async function GET(request: Request) { return new NextResponse("Internal Server Error", { status: 500 }) } - const databaseFile = fs.readFileSync(DATABASE_FILE) - rootFolder.file("database.sqlite", databaseFile) + // Add metadata with version information + rootFolder.file( + "metadata.json", + JSON.stringify( + { + version: BACKUP_VERSION, + timestamp: new Date().toISOString(), + models: MODEL_BACKUP.map((m) => m.filename), + }, + null, + 2 + ) + ) + + // Backup models + for (const { filename, model } of MODEL_BACKUP) { + try { + const jsonContent = await tableToJSON(model) + rootFolder.file(filename, jsonContent) + } catch (error) { + console.error(`Error exporting table ${filename}:`, error) + } + } const uploadsFolder = rootFolder.folder("uploads") if (!uploadsFolder) { @@ -25,7 +49,23 @@ export async function GET(request: Request) { const uploadedFiles = getAllFilePaths(FILE_UPLOAD_PATH) uploadedFiles.forEach((file) => { - uploadsFolder.file(file.replace(FILE_UPLOAD_PATH, ""), fs.readFileSync(file)) + try { + // Check file size before reading + const stats = fs.statSync(file) + if (stats.size > MAX_FILE_SIZE) { + console.warn( + `Skipping large file ${file} (${Math.round(stats.size / 1024 / 1024)}MB > ${ + MAX_FILE_SIZE / 1024 / 1024 + }MB limit)` + ) + return + } + + const fileContent = fs.readFileSync(file) + uploadsFolder.file(file.replace(FILE_UPLOAD_PATH, ""), fileContent) + } catch (error) { + console.error(`Error reading file ${file}:`, error) + } }) const archive = await zip.generateAsync({ type: "blob" }) @@ -60,3 +100,13 @@ function getAllFilePaths(dirPath: string): string[] { readDirectory(dirPath) return filePaths } + +async function tableToJSON(model: any): Promise { + const data = await model.findMany() + + if (!data || data.length === 0) { + return "[]" + } + + return JSON.stringify(data, null, 2) +} diff --git a/app/settings/backups/page.tsx b/app/settings/backups/page.tsx index 85955fc..536c654 100644 --- a/app/settings/backups/page.tsx +++ b/app/settings/backups/page.tsx @@ -3,7 +3,7 @@ import { FormError } from "@/components/forms/error" import { Button } from "@/components/ui/button" import { Card } from "@/components/ui/card" -import { Download } from "lucide-react" +import { Download, Loader2 } from "lucide-react" import Link from "next/link" import { useActionState } from "react" import { restoreBackupAction } from "./actions" @@ -18,38 +18,48 @@ export default function BackupSettingsPage() {
- The archive consists of all uploaded files and the SQLite database. You can view the contents of the database - using any SQLite viewer. + Inside the archive you will find all the uploaded files, as well as JSON files for transactions, categories, + projects, fields, currencies, and settings. You can view, edit or migrate your data to another service.
- -

How to restore from a backup

-
- This feature doesn't work automatically yet. Use your docker deployment with backup archive to manually put - database.sqlite and uploaded files into the paths specified in DATABASE_URL and UPLOAD_PATH -
- {/*
- - -
*/} + +

Restore from a backup

+

+ ⚠️ This action will delete all existing data from your current database and remove all uploaded files. Be + careful and make a backup first! +

+
+
+ + + +
+
{restoreState?.error && {restoreState.error}}
+ + {restoreState?.success && ( + +

Backup restored successfully

+

You can now continue using the app.

+
+ )} ) } diff --git a/app/settings/backups/restore/route.ts b/app/settings/backups/restore/route.ts deleted file mode 100644 index eb2f88c..0000000 --- a/app/settings/backups/restore/route.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { NextResponse } from "next/server" - -export async function POST(request: Request) { - try { - const formData = await request.formData() - const file = formData.get("file") as File - - if (!file) { - return new NextResponse("No file provided", { status: 400 }) - } - - const fileBuffer = await file.arrayBuffer() - const fileData = Buffer.from(fileBuffer) - - // TODO: Implement restore - // fs.writeFileSync(DATABASE_FILE, fileData) - - return new NextResponse("File restored", { status: 200 }) - } catch (error) { - console.error("Error restoring from backup:", error) - return new NextResponse("Internal Server Error", { status: 500 }) - } -} diff --git a/models/backups.ts b/models/backups.ts new file mode 100644 index 0000000..1aca776 --- /dev/null +++ b/models/backups.ts @@ -0,0 +1,46 @@ +import { prisma } from "@/lib/db" + +type ModelEntry = { + filename: string + model: any + idField: string +} + +// Ordering is important here +export const MODEL_BACKUP: ModelEntry[] = [ + { + filename: "settings.json", + model: prisma.setting, + idField: "code", + }, + { + filename: "currencies.json", + model: prisma.currency, + idField: "code", + }, + { + filename: "categories.json", + model: prisma.category, + idField: "code", + }, + { + filename: "projects.json", + model: prisma.project, + idField: "code", + }, + { + filename: "fields.json", + model: prisma.field, + idField: "code", + }, + { + filename: "files.json", + model: prisma.file, + idField: "id", + }, + { + filename: "transactions.json", + model: prisma.transaction, + idField: "id", + }, +] diff --git a/models/export_and_import.ts b/models/export_and_import.ts index 5a95b22..41705a8 100644 --- a/models/export_and_import.ts +++ b/models/export_and_import.ts @@ -16,7 +16,7 @@ export type ExportImportFieldSettings = { import?: (value: any) => Promise } -export const exportImportFieldsMapping: Record = { +export const EXPORT_AND_IMPORT_FIELD_MAP: Record = { name: { code: "name", type: "string", diff --git a/next.config.ts b/next.config.ts index ac87268..ac85713 100644 --- a/next.config.ts +++ b/next.config.ts @@ -6,7 +6,7 @@ const nextConfig: NextConfig = { }, experimental: { serverActions: { - bodySizeLimit: "100mb", + bodySizeLimit: "64mb", }, }, } diff --git a/package.json b/package.json index 4109cf5..483f103 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "taxhacker", - "version": "0.2.1", + "version": "0.3.0", "private": true, "type": "module", "scripts": {