1
0
Fork 0
mirror of synced 2024-06-13 16:05:06 +12:00

Reformatting types to allow queue to be used for import and export.

This commit is contained in:
mike12345567 2022-10-17 19:42:36 +01:00
parent 38df528e2d
commit 07d4d9ee4d
3 changed files with 55 additions and 36 deletions

View file

@ -5,37 +5,41 @@ import { Job } from "bull"
import fs from "fs"
import env from "../../../environment"
export async function init() {
await backups.addAppBackupProcessor(async (job: Job) => {
const appId = job.data.appId,
trigger = job.data.trigger,
name = job.data.name
const tenantId = tenancy.getTenantIDFromAppID(appId)
await tenancy.doInTenant(tenantId, async () => {
const createdAt = new Date().toISOString()
const tarPath = await exportApp(appId, { tar: true })
let filename = `${appId}/backup-${createdAt}.tar.gz`
// add the tenant to the bucket path if backing up within a multi-tenant environment
if (env.MULTI_TENANCY) {
filename = `${tenantId}/${filename}`
}
const bucket = objectStore.ObjectStoreBuckets.BACKUPS
const metadata = {
appId,
createdAt,
trigger,
name,
}
await objectStore.upload({
path: tarPath,
type: "application/gzip",
bucket,
filename,
metadata,
})
await backups.storeAppBackupMetadata(filename, metadata)
// clear up the tarball after uploading it
fs.rmSync(tarPath)
async function importProcessor(job: Job) {}
async function exportProcessor(job: Job) {
const appId = job.data.appId,
trigger = job.data.trigger,
name = job.data.name
const tenantId = tenancy.getTenantIDFromAppID(appId)
await tenancy.doInTenant(tenantId, async () => {
const createdAt = new Date().toISOString()
const tarPath = await exportApp(appId, { tar: true })
let filename = `${appId}/backup-${createdAt}.tar.gz`
// add the tenant to the bucket path if backing up within a multi-tenant environment
if (env.MULTI_TENANCY) {
filename = `${tenantId}/${filename}`
}
const bucket = objectStore.ObjectStoreBuckets.BACKUPS
const metadata = {
appId,
createdAt,
trigger,
name,
}
await objectStore.upload({
path: tarPath,
type: "application/gzip",
bucket,
filename,
metadata,
})
await backups.storeAppBackupMetadata(filename, metadata)
// clear up the tarball after uploading it
fs.rmSync(tarPath)
})
}
export async function init() {
await backups.addAppBackupProcessors(importProcessor, exportProcessor)
}

View file

@ -1,5 +1,5 @@
import { db as dbCore } from "@budibase/backend-core"
import { APP_PREFIX, TABLE_ROW_PREFIX } from "../../../db/utils"
import { TABLE_ROW_PREFIX } from "../../../db/utils"
import { budibaseTempDir } from "../../../utilities/budibaseDir"
import {
DB_EXPORT_FILE,

View file

@ -6,6 +6,11 @@ export enum AppBackupTrigger {
SCHEDULED = "scheduled",
}
export enum AppBackupEventType {
EXPORT = "export",
IMPORT = "import",
}
export interface AppBackup extends Document {
trigger: AppBackupTrigger
name: string
@ -31,12 +36,22 @@ export type AppBackupFetchOpts = {
}
export interface AppBackupQueueData {
trigger: AppBackupTrigger
createdBy?: string
name?: string
eventType: AppBackupEventType
appId: string
export?: {
trigger: AppBackupTrigger
name?: string
createdBy?: string
}
import?: {
backupId: string
}
}
export interface AppBackupMetadata extends AppBackupQueueData {
export interface AppBackupMetadata {
appId: string
trigger: AppBackupTrigger
name?: string
createdBy?: string
createdAt: string
}