diff --git a/packages/server/src/sdk/app/backups/backup.ts b/packages/server/src/sdk/app/backups/backup.ts index fb4bcc6022..cb758536bd 100644 --- a/packages/server/src/sdk/app/backups/backup.ts +++ b/packages/server/src/sdk/app/backups/backup.ts @@ -5,37 +5,41 @@ import { Job } from "bull" import fs from "fs" import env from "../../../environment" -export async function init() { - await backups.addAppBackupProcessor(async (job: Job) => { - const appId = job.data.appId, - trigger = job.data.trigger, - name = job.data.name - const tenantId = tenancy.getTenantIDFromAppID(appId) - await tenancy.doInTenant(tenantId, async () => { - const createdAt = new Date().toISOString() - const tarPath = await exportApp(appId, { tar: true }) - let filename = `${appId}/backup-${createdAt}.tar.gz` - // add the tenant to the bucket path if backing up within a multi-tenant environment - if (env.MULTI_TENANCY) { - filename = `${tenantId}/${filename}` - } - const bucket = objectStore.ObjectStoreBuckets.BACKUPS - const metadata = { - appId, - createdAt, - trigger, - name, - } - await objectStore.upload({ - path: tarPath, - type: "application/gzip", - bucket, - filename, - metadata, - }) - await backups.storeAppBackupMetadata(filename, metadata) - // clear up the tarball after uploading it - fs.rmSync(tarPath) +async function importProcessor(job: Job) {} + +async function exportProcessor(job: Job) { + const appId = job.data.appId, + trigger = job.data.trigger, + name = job.data.name + const tenantId = tenancy.getTenantIDFromAppID(appId) + await tenancy.doInTenant(tenantId, async () => { + const createdAt = new Date().toISOString() + const tarPath = await exportApp(appId, { tar: true }) + let filename = `${appId}/backup-${createdAt}.tar.gz` + // add the tenant to the bucket path if backing up within a multi-tenant environment + if (env.MULTI_TENANCY) { + filename = `${tenantId}/${filename}` + } + const bucket = objectStore.ObjectStoreBuckets.BACKUPS + const metadata = { + appId, + createdAt, + trigger, + name, + } + await objectStore.upload({ + path: tarPath, + type: "application/gzip", + bucket, + filename, + metadata, }) + await backups.storeAppBackupMetadata(filename, metadata) + // clear up the tarball after uploading it + fs.rmSync(tarPath) }) } + +export async function init() { + await backups.addAppBackupProcessors(importProcessor, exportProcessor) +} diff --git a/packages/server/src/sdk/app/backups/imports.ts b/packages/server/src/sdk/app/backups/imports.ts index d09c4b3f02..60ce63d51e 100644 --- a/packages/server/src/sdk/app/backups/imports.ts +++ b/packages/server/src/sdk/app/backups/imports.ts @@ -1,5 +1,5 @@ import { db as dbCore } from "@budibase/backend-core" -import { APP_PREFIX, TABLE_ROW_PREFIX } from "../../../db/utils" +import { TABLE_ROW_PREFIX } from "../../../db/utils" import { budibaseTempDir } from "../../../utilities/budibaseDir" import { DB_EXPORT_FILE, diff --git a/packages/types/src/documents/app/backup.ts b/packages/types/src/documents/app/backup.ts index 5fb602860e..28e927c772 100644 --- a/packages/types/src/documents/app/backup.ts +++ b/packages/types/src/documents/app/backup.ts @@ -6,6 +6,11 @@ export enum AppBackupTrigger { SCHEDULED = "scheduled", } +export enum AppBackupEventType { + EXPORT = "export", + IMPORT = "import", +} + export interface AppBackup extends Document { trigger: AppBackupTrigger name: string @@ -31,12 +36,22 @@ export type AppBackupFetchOpts = { } export interface AppBackupQueueData { - trigger: AppBackupTrigger - createdBy?: string - name?: string + eventType: AppBackupEventType appId: string + export?: { + trigger: AppBackupTrigger + name?: string + createdBy?: string + } + import?: { + backupId: string + } } -export interface AppBackupMetadata extends AppBackupQueueData { +export interface AppBackupMetadata { + appId: string + trigger: AppBackupTrigger + name?: string + createdBy?: string createdAt: string }