1
0
Fork 0
mirror of synced 2024-06-01 18:20:18 +12:00

Full import implementation - needs further testing, untars the file with all apps, then adds each of them individually.

This commit is contained in:
mike12345567 2022-10-12 19:15:28 +01:00
parent 92484fa240
commit ae0bfebae7
4 changed files with 112 additions and 67 deletions

View file

@ -21,7 +21,6 @@
import { API } from "api"
import { onMount } from "svelte"
import { apps, auth, admin, templates, licensing } from "stores/portal"
import download from "downloadjs"
import { goto } from "@roxi/routify"
import AppRow from "components/start/AppRow.svelte"
import { AppStatus } from "constants"
@ -140,7 +139,7 @@
const initiateAppsExport = () => {
try {
download(`/api/cloud/export`)
window.location = `/api/cloud/export`
notifications.success("Apps exported successfully")
} catch (err) {
notifications.error(`Error exporting apps: ${err}`)

View file

@ -1,9 +1,45 @@
const env = require("../../environment")
const { getAllApps, getGlobalDBName } = require("@budibase/backend-core/db")
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
const { streamFile } = require("../../utilities/fileSystem")
const { DocumentType, isDevAppID } = require("../../db/utils")
const { stringToReadStream } = require("../../utilities")
const {
getDocParams,
DocumentType,
isDevAppID,
APP_PREFIX,
} = require("../../db/utils")
const { create } = require("./application")
const { join } = require("path")
const fs = require("fs")
const sdk = require("../../sdk")
async function createApp(appName, appDirectory) {
const ctx = {
request: {
body: {
useTemplate: true,
name: appName,
},
files: {
templateFile: {
path: appDirectory,
},
},
},
}
return create(ctx)
}
async function getAllDocType(db, docType) {
const response = await db.allDocs(
getDocParams(docType, null, {
include_docs: true,
})
)
return response.rows.map(row => row.doc)
}
exports.exportApps = async ctx => {
if (env.SELF_HOSTED || !env.MULTI_TENANCY) {
ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.")
@ -14,10 +50,13 @@ exports.exportApps = async ctx => {
})
// only export the dev apps as they will be the latest, the user can republish the apps
// in their self-hosted environment
let appIds = apps
.map(app => app.appId || app._id)
.filter(appId => isDevAppID(appId))
const tmpPath = await sdk.backups.exportMultipleApps(appIds, globalDBString)
let appMetadata = apps
.filter(app => isDevAppID(app.appId || app._id))
.map(app => ({ appId: app.appId || app._id, name: app.name }))
const tmpPath = await sdk.backups.exportMultipleApps(
appMetadata,
globalDBString
)
const filename = `cloud-export-${new Date().getTime()}.tar.gz`
ctx.attachment(filename)
ctx.body = streamFile(tmpPath)
@ -48,51 +87,37 @@ exports.importApps = async ctx => {
"Import file is required and environment must be fresh to import apps."
)
}
if (ctx.request.files.importFile.type !== "application/gzip") {
ctx.throw(400, "Import file must be a gzipped tarball.")
}
// TODO: IMPLEMENT TARBALL EXTRACTION, APP IMPORT, ATTACHMENT IMPORT AND GLOBAL DB IMPORT
// async function getAllDocType(db, docType) {
// const response = await db.allDocs(
// getDocParams(docType, null, {
// include_docs: true,
// })
// )
// return response.rows.map(row => row.doc)
// }
// async function createApp(appName, appImport) {
// const ctx = {
// request: {
// body: {
// templateString: appImport,
// name: appName,
// },
// },
// }
// return create(ctx)
// }
// const importFile = ctx.request.files.importFile
// const importString = readFileSync(importFile.path)
// const dbs = JSON.parse(importString)
// const globalDbImport = dbs.global
// // remove from the list of apps
// delete dbs.global
// const globalDb = getGlobalDB()
// // load the global db first
// await globalDb.load(stringToReadStream(globalDbImport))
// for (let [appName, appImport] of Object.entries(dbs)) {
// await createApp(appName, appImport)
// }
//
// // if there are any users make sure to remove them
// let users = await getAllDocType(globalDb, DocumentType.USER)
// let userDeletionPromises = []
// for (let user of users) {
// userDeletionPromises.push(globalDb.remove(user._id, user._rev))
// }
// if (userDeletionPromises.length > 0) {
// await Promise.all(userDeletionPromises)
// }
//
// await globalDb.bulkDocs(users)
// initially get all the app databases out of the tarball
const tmpPath = sdk.backups.untarFile(ctx.request.file.importFile)
const globalDbImport = sdk.backups.getGlobalDBFile(tmpPath)
const appNames = fs
.readdirSync(tmpPath)
.filter(dir => dir.startsWith(APP_PREFIX))
const globalDb = getGlobalDB()
// load the global db first
await globalDb.load(stringToReadStream(globalDbImport))
const appCreationPromises = []
for (let appName of appNames) {
appCreationPromises.push(createApp(appName, join(tmpPath, appName)))
}
await Promise.all(appCreationPromises)
// if there are any users make sure to remove them
let users = await getAllDocType(globalDb, DocumentType.USER)
let userDeletionPromises = []
for (let user of users) {
userDeletionPromises.push(globalDb.remove(user._id, user._rev))
}
if (userDeletionPromises.length > 0) {
await Promise.all(userDeletionPromises)
}
await globalDb.bulkDocs(users)
ctx.body = {
message: "Apps successfully imported.",
}

View file

@ -127,28 +127,33 @@ export async function exportApp(appId: string, config?: ExportOpts) {
/**
* Export all apps + global DB (if supplied) to a single tarball, this includes
* the attachments for each app as well.
* @param {string[]} appIds The IDs of the apps to be exported.
* @param {object[]} appMetadata The IDs and names of apps to export.
* @param {string} globalDbContents The contents of the global DB to export as well.
* @return {string} The path to the tarball.
*/
export async function exportMultipleApps(
appIds: string[],
appMetadata: { appId: string; name: string }[],
globalDbContents?: string
) {
const tmpPath = join(budibaseTempDir(), uuid())
fs.mkdirSync(tmpPath)
let exportPromises: Promise<void>[] = []
const exportAndMove = async (appId: string) => {
// export each app to a directory, then move it into the complete export
const exportAndMove = async (appId: string, appName: string) => {
const path = await exportApp(appId)
await fs.promises.rename(path, join(tmpPath, appId))
}
for (let appId of appIds) {
exportPromises.push(exportAndMove(appId))
for (let metadata of appMetadata) {
exportPromises.push(exportAndMove(metadata.appId, metadata.name))
}
// wait for all exports to finish
await Promise.all(exportPromises)
// add the global DB contents
if (globalDbContents) {
fs.writeFileSync(join(tmpPath, GLOBAL_DB_EXPORT_FILE), globalDbContents)
}
const tarPath = tarFilesToTmp(tmpPath, [...appIds, GLOBAL_DB_EXPORT_FILE])
const appNames = appMetadata.map(metadata => metadata.name)
const tarPath = tarFilesToTmp(tmpPath, [...appNames, GLOBAL_DB_EXPORT_FILE])
// clear up the tmp path now tarball generated
fs.rmSync(tmpPath, { recursive: true, force: true })
return tarPath

View file

@ -1,7 +1,11 @@
import { db as dbCore } from "@budibase/backend-core"
import { TABLE_ROW_PREFIX } from "../../../db/utils"
import { budibaseTempDir } from "../../../utilities/budibaseDir"
import { DB_EXPORT_FILE, ATTACHMENT_DIR } from "./constants"
import {
DB_EXPORT_FILE,
ATTACHMENT_DIR,
GLOBAL_DB_EXPORT_FILE,
} from "./constants"
import { uploadDirectory } from "../../../utilities/fileSystem/utilities"
import { ObjectStoreBuckets, FieldTypes } from "../../../constants"
import { join } from "path"
@ -91,6 +95,22 @@ async function getTemplateStream(template: TemplateType) {
}
}
export function untarFile(file: { path: string }) {
const tmpPath = join(budibaseTempDir(), uuid())
fs.mkdirSync(tmpPath)
// extract the tarball
tar.extract({
sync: true,
cwd: tmpPath,
file: file.path,
})
return tmpPath
}
export function getGlobalDBFile(tmpPath: string) {
return fs.readFileSync(join(tmpPath, GLOBAL_DB_EXPORT_FILE), "utf8")
}
export async function importApp(
appId: string,
db: PouchDB.Database,
@ -98,15 +118,11 @@ export async function importApp(
) {
let prodAppId = dbCore.getProdAppID(appId)
let dbStream: any
if (template.file && template.file.type === "application/gzip") {
const tmpPath = join(budibaseTempDir(), uuid())
fs.mkdirSync(tmpPath)
// extract the tarball
tar.extract({
sync: true,
cwd: tmpPath,
file: template.file.path,
})
const isTar = template.file && template.file.type === "application/gzip"
const isDirectory =
template.file && fs.lstatSync(template.file.path).isDirectory()
if (template.file && (isTar || isDirectory)) {
const tmpPath = isTar ? untarFile(template.file) : template.file.path
const attachmentPath = join(tmpPath, ATTACHMENT_DIR)
// have to handle object import
if (fs.existsSync(attachmentPath)) {