1
0
Fork 0
mirror of synced 2024-06-23 08:30:31 +12:00

Export to tarball through tmp.

This commit is contained in:
mike12345567 2022-10-11 18:21:58 +01:00
parent bb65cf8ef9
commit a41b362040
3 changed files with 136 additions and 103 deletions

View file

@ -1,24 +1,9 @@
const env = require("../../environment")
const { getAllApps, getGlobalDBName } = require("@budibase/backend-core/db")
const { sendTempFile, readFileSync } = require("../../utilities/fileSystem")
const { stringToReadStream } = require("../../utilities")
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
const { create } = require("./application")
const { getDocParams, DocumentType, isDevAppID } = require("../../db/utils")
const { streamFile } = require("../../utilities/fileSystem")
const { DocumentType, isDevAppID } = require("../../db/utils")
const sdk = require("../../sdk")
async function createApp(appName, appImport) {
const ctx = {
request: {
body: {
templateString: appImport,
name: appName,
},
},
}
return create(ctx)
}
exports.exportApps = async ctx => {
if (env.SELF_HOSTED || !env.MULTI_TENANCY) {
ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.")
@ -27,29 +12,18 @@ exports.exportApps = async ctx => {
const globalDBString = await sdk.apps.exports.exportDB(getGlobalDBName(), {
filter: doc => !doc._id.startsWith(DocumentType.USER),
})
let allDBs = {
global: globalDBString,
}
for (let app of apps) {
const appId = app.appId || app._id
// only export the dev apps as they will be the latest, the user can republish the apps
// in their self hosted environment
if (isDevAppID(appId)) {
allDBs[app.name] = await sdk.apps.exports.exportApp(appId)
}
}
const filename = `cloud-export-${new Date().getTime()}.txt`
ctx.attachment(filename)
ctx.body = sendTempFile(JSON.stringify(allDBs))
}
async function getAllDocType(db, docType) {
const response = await db.allDocs(
getDocParams(docType, null, {
include_docs: true,
})
// only export the dev apps as they will be the latest, the user can republish the apps
// in their self-hosted environment
let appIds = apps
.map(app => app.appId || app._id)
.filter(appId => isDevAppID(appId))
const tmpPath = await sdk.apps.exports.exportMultipleApps(
appIds,
globalDBString
)
return response.rows.map(row => row.doc)
const filename = `cloud-export-${new Date().getTime()}.tar.gz`
ctx.attachment(filename)
ctx.body = streamFile(tmpPath)
}
async function hasBeenImported() {
@ -77,30 +51,51 @@ exports.importApps = async ctx => {
"Import file is required and environment must be fresh to import apps."
)
}
const importFile = ctx.request.files.importFile
const importString = readFileSync(importFile.path)
const dbs = JSON.parse(importString)
const globalDbImport = dbs.global
// remove from the list of apps
delete dbs.global
const globalDb = getGlobalDB()
// load the global db first
await globalDb.load(stringToReadStream(globalDbImport))
for (let [appName, appImport] of Object.entries(dbs)) {
await createApp(appName, appImport)
}
// if there are any users make sure to remove them
let users = await getAllDocType(globalDb, DocumentType.USER)
let userDeletionPromises = []
for (let user of users) {
userDeletionPromises.push(globalDb.remove(user._id, user._rev))
}
if (userDeletionPromises.length > 0) {
await Promise.all(userDeletionPromises)
}
await globalDb.bulkDocs(users)
// TODO: IMPLEMENT TARBALL EXTRACTION, APP IMPORT, ATTACHMENT IMPORT AND GLOBAL DB IMPORT
// async function getAllDocType(db, docType) {
// const response = await db.allDocs(
// getDocParams(docType, null, {
// include_docs: true,
// })
// )
// return response.rows.map(row => row.doc)
// }
// async function createApp(appName, appImport) {
// const ctx = {
// request: {
// body: {
// templateString: appImport,
// name: appName,
// },
// },
// }
// return create(ctx)
// }
// const importFile = ctx.request.files.importFile
// const importString = readFileSync(importFile.path)
// const dbs = JSON.parse(importString)
// const globalDbImport = dbs.global
// // remove from the list of apps
// delete dbs.global
// const globalDb = getGlobalDB()
// // load the global db first
// await globalDb.load(stringToReadStream(globalDbImport))
// for (let [appName, appImport] of Object.entries(dbs)) {
// await createApp(appName, appImport)
// }
//
// // if there are any users make sure to remove them
// let users = await getAllDocType(globalDb, DocumentType.USER)
// let userDeletionPromises = []
// for (let user of users) {
// userDeletionPromises.push(globalDb.remove(user._id, user._rev))
// }
// if (userDeletionPromises.length > 0) {
// await Promise.all(userDeletionPromises)
// }
//
// await globalDb.bulkDocs(users)
ctx.body = {
message: "Apps successfully imported.",
}

View file

@ -1,9 +1,7 @@
import { db as dbCore } from "@budibase/backend-core"
import { budibaseTempDir } from "../../utilities/budibaseDir"
import {
streamUpload,
retrieveDirectory,
} from "../../utilities/fileSystem/utilities"
import { retrieveDirectory } from "../../utilities/fileSystem/utilities"
import { streamFile } from "../../utilities/fileSystem"
import { ObjectStoreBuckets, ATTACHMENT_PATH } from "../../constants"
import {
LINK_USER_METADATA_PREFIX,
@ -11,10 +9,35 @@ import {
USER_METDATA_PREFIX,
} from "../../db/utils"
import fs from "fs"
import env from "../../environment"
import { join } from "path"
const uuid = require("uuid/v4")
const tar = require("tar")
const MemoryStream = require("memorystream")
const DB_EXPORT_FILE = "db.txt"
const GLOBAL_DB_EXPORT_FILE = "global.txt"
type ExportOpts = {
filter?: any
exportPath?: string
tar?: boolean
excludeRows?: boolean
}
function tarFiles(cwd: string, files: string[], exportName?: string) {
exportName = exportName ? `${exportName}.tar.gz` : "export.tar.gz"
tar.create(
{
sync: true,
gzip: true,
file: exportName,
recursive: true,
cwd,
},
files
)
return join(cwd, exportName)
}
/**
* Exports a DB to either file or a variable (memory).
* @param {string} dbName the DB which is to be exported.
@ -22,36 +45,13 @@ const MemoryStream = require("memorystream")
* a filter function or the name of the export.
* @return {*} either a readable stream or a string
*/
export async function exportDB(
dbName: string,
opts: { stream?: boolean; filter?: any; exportName?: string } = {}
) {
// streaming a DB dump is a bit more complicated, can't close DB
if (opts?.stream) {
const db = dbCore.dangerousGetDB(dbName)
const memStream = new MemoryStream()
memStream.on("end", async () => {
await dbCore.closeDB(db)
})
db.dump(memStream, { filter: opts?.filter })
return memStream
}
export async function exportDB(dbName: string, opts: ExportOpts = {}) {
return dbCore.doWithDB(dbName, async (db: any) => {
// Write the dump to file if required
if (opts?.exportName) {
const path = join(budibaseTempDir(), opts?.exportName)
if (opts?.exportPath) {
const path = opts?.exportPath
const writeStream = fs.createWriteStream(path)
await db.dump(writeStream, { filter: opts?.filter })
// Upload the dump to the object store if self-hosted
if (env.SELF_HOSTED) {
await streamUpload(
ObjectStoreBuckets.BACKUPS,
join(dbName, opts?.exportName),
fs.createReadStream(path)
)
}
return fs.createReadStream(path)
} else {
// Stringify the dump in memory if required
@ -79,24 +79,57 @@ function defineFilter(excludeRows?: boolean) {
* Local utility to back up the database state for an app, excluding global user
* data or user relationships.
* @param {string} appId The app to back up
* @param {object} config Config to send to export DB
* @param {boolean} excludeRows Flag to state whether the export should include data.
* @param {object} config Config to send to export DB/attachment export
* @returns {*} either a string or a stream of the backup
*/
export async function exportApp(
appId: string,
config?: any,
excludeRows?: boolean
) {
const attachmentsPath = `${dbCore.getProdAppID(appId)}/${ATTACHMENT_PATH}`
export async function exportApp(appId: string, config?: ExportOpts) {
const prodAppId = dbCore.getProdAppID(appId)
const attachmentsPath = `${prodAppId}/${ATTACHMENT_PATH}`
// export attachments to tmp
const tmpPath = await retrieveDirectory(
ObjectStoreBuckets.APPS,
attachmentsPath
)
// move out of app directory, simplify structure
fs.renameSync(join(tmpPath, attachmentsPath), join(tmpPath, ATTACHMENT_PATH))
// remove the old app directory created by object export
fs.rmdirSync(join(tmpPath, prodAppId))
// enforce an export of app DB to the tmp path
const dbPath = join(tmpPath, DB_EXPORT_FILE)
await exportDB(appId, {
...config,
filter: defineFilter(excludeRows),
filter: defineFilter(config?.excludeRows),
exportPath: dbPath,
})
// if tar requested, return where the tarball is
if (config?.tar) {
// now the tmpPath contains both the DB export and attachments, tar this
return tarFiles(tmpPath, [ATTACHMENT_PATH, DB_EXPORT_FILE])
}
// tar not requested, turn the directory where export is
else {
return tmpPath
}
}
export async function exportMultipleApps(
appIds: string[],
globalDbContents?: string
) {
const tmpPath = join(budibaseTempDir(), uuid())
let exportPromises: Promise<void>[] = []
const exportAndMove = async (appId: string) => {
const path = await exportApp(appId)
await fs.promises.rename(path, join(tmpPath, appId))
}
for (let appId of appIds) {
exportPromises.push(exportAndMove(appId))
}
await Promise.all(exportPromises)
if (globalDbContents) {
fs.writeFileSync(join(tmpPath, GLOBAL_DB_EXPORT_FILE), globalDbContents)
}
return tarFiles(tmpPath, [...appIds, GLOBAL_DB_EXPORT_FILE])
}
/**
@ -106,5 +139,6 @@ export async function exportApp(
* @returns {*} a readable stream of the backup which is written in real time
*/
export async function streamExportApp(appId: string, excludeRows: boolean) {
return await exportApp(appId, { stream: true }, excludeRows)
const tmpPath = await exportApp(appId, { excludeRows, tar: true })
return streamFile(tmpPath)
}

View file

@ -112,6 +112,10 @@ exports.apiFileReturn = contents => {
return fs.createReadStream(path)
}
exports.streamFile = path => {
return fs.createReadStream(path)
}
/**
* Writes the provided contents to a temporary file, which can be used briefly.
* @param {string} fileContents contents which will be written to a temp file.