diff --git a/packages/server/scripts/exportAppTemplate.js b/packages/server/scripts/exportAppTemplate.js index e896917d5b..c6d738225c 100755 --- a/packages/server/scripts/exportAppTemplate.js +++ b/packages/server/scripts/exportAppTemplate.js @@ -1,6 +1,9 @@ #!/usr/bin/env node -const { exportTemplateFromApp } = require("../src/utilities/templates") const yargs = require("yargs") +const fs = require("fs") +const { join } = require("path") +const CouchDB = require("../src/db") +const { budibaseAppsDir } = require("../src/utilities/budibaseDir") // Script to export a chosen budibase app into a package // Usage: ./scripts/exportAppTemplate.js export --name=Funky --appId=appId @@ -22,18 +25,22 @@ yargs }, }, async args => { + const name = args.name, + appId = args.appId console.log("Exporting app..") - if (args.name == null || args.appId == null) { + if (name == null || appId == null) { console.error( "Unable to export without a name and app ID being specified, check help for more info." ) return } - const exportPath = await exportTemplateFromApp({ - templateName: args.name, - appId: args.appId, - }) - console.log(`Template ${args.name} exported to ${exportPath}`) + const exportPath = join(budibaseAppsDir(), "templates", "app", name, "db") + fs.ensureDirSync(exportPath) + const writeStream = fs.createWriteStream(join(exportPath, "dump.text")) + // perform couch dump + const instanceDb = new CouchDB(appId) + await instanceDb.dump(writeStream, {}) + console.log(`Template ${name} exported to ${exportPath}`) } ) .help() diff --git a/packages/server/src/api/controllers/application.js b/packages/server/src/api/controllers/application.js index 89e233ac5c..e2bb15dbd3 100644 --- a/packages/server/src/api/controllers/application.js +++ b/packages/server/src/api/controllers/application.js @@ -1,15 +1,14 @@ const CouchDB = require("../../db") -const compileStaticAssets = require("../../utilities/builder/compileStaticAssets") const env = require("../../environment") -const { existsSync } = require("fs-extra") -const { budibaseAppsDir } = require("../../utilities/budibaseDir") const setBuilderToken = require("../../utilities/builder/setBuilderToken") -const fs = require("fs-extra") -const { join, resolve } = require("../../utilities/centralPath") const packageJson = require("../../../package.json") const { createLinkView } = require("../../db/linkedRows") const { createRoutingView } = require("../../utilities/routing") -const { getTemplateStream } = require("../../utilities/fileSystem") +const { + getTemplateStream, + createApp, + deleteApp, +} = require("../../utilities/fileSystem") const { generateAppID, getLayoutParams, @@ -20,9 +19,6 @@ const { BUILTIN_ROLE_IDS, AccessController, } = require("../../utilities/security/roles") -const { - downloadExtractComponentLibraries, -} = require("../../utilities/createAppPackage") const { BASE_LAYOUTS } = require("../../constants/layouts") const { createHomeScreen, @@ -181,10 +177,10 @@ exports.create = async function(ctx) { const instanceDb = new CouchDB(appId) await instanceDb.put(newApplication) - const newAppFolder = await createEmptyAppPackage(ctx, newApplication) + await createEmptyAppPackage(ctx, newApplication) /* istanbul ignore next */ if (env.NODE_ENV !== "jest") { - await downloadExtractComponentLibraries(newAppFolder) + await createApp(appId) } await setBuilderToken(ctx, appId, version) @@ -214,10 +210,7 @@ exports.delete = async function(ctx) { const app = await db.get(ctx.params.appId) const result = await db.destroy() - // remove top level directory - await fs.rmdir(join(budibaseAppsDir(), ctx.params.appId), { - recursive: true, - }) + await deleteApp(ctx.params.appId) ctx.status = 200 ctx.message = `Application ${app.name} deleted successfully.` @@ -225,17 +218,8 @@ exports.delete = async function(ctx) { } const createEmptyAppPackage = async (ctx, app) => { - const appsFolder = budibaseAppsDir() - const newAppFolder = resolve(appsFolder, app._id) - const db = new CouchDB(app._id) - if (existsSync(newAppFolder)) { - ctx.throw(400, "App folder already exists for this application") - } - - fs.mkdirpSync(newAppFolder) - let screensAndLayouts = [] for (let layout of BASE_LAYOUTS) { const cloned = cloneDeep(layout) @@ -251,6 +235,4 @@ const createEmptyAppPackage = async (ctx, app) => { screensAndLayouts.push(loginScreen) await db.bulkDocs(screensAndLayouts) - await compileStaticAssets(app._id) - return newAppFolder } diff --git a/packages/server/src/api/controllers/backup.js b/packages/server/src/api/controllers/backup.js index a83f96165b..02be10bbec 100644 --- a/packages/server/src/api/controllers/backup.js +++ b/packages/server/src/api/controllers/backup.js @@ -1,28 +1,10 @@ -const { performDump } = require("../../utilities/templates") -const path = require("path") -const os = require("os") -const fs = require("fs-extra") +const { performBackup } = require("../../utilities/fileSystem") exports.exportAppDump = async function(ctx) { const { appId } = ctx.query - const appname = decodeURI(ctx.query.appname) - - const backupsDir = path.join(os.homedir(), ".budibase", "backups") - fs.ensureDirSync(backupsDir) - const backupIdentifier = `${appname}Backup${new Date().getTime()}.txt` - await performDump({ - dir: backupsDir, - appId, - name: backupIdentifier, - }) - - ctx.status = 200 - - const backupFile = path.join(backupsDir, backupIdentifier) - ctx.attachment(backupIdentifier) - ctx.body = fs.createReadStream(backupFile) + ctx.body = await performBackup(appId, backupIdentifier) } diff --git a/packages/server/src/api/controllers/static/index.js b/packages/server/src/api/controllers/static/index.js index 24cee67146..466e680a7d 100644 --- a/packages/server/src/api/controllers/static/index.js +++ b/packages/server/src/api/controllers/static/index.js @@ -3,7 +3,6 @@ require("svelte/register") const send = require("koa-send") const { resolve, join } = require("../../../utilities/centralPath") const fetch = require("node-fetch") -const fs = require("fs-extra") const uuid = require("uuid") const AWS = require("aws-sdk") const { prepareUpload } = require("../deploy/utils") @@ -15,7 +14,7 @@ const { const { getDeployedApps } = require("../../../utilities/builder/hosting") const CouchDB = require("../../../db") const setBuilderToken = require("../../../utilities/builder/setBuilderToken") -const fileProcessor = require("../../../utilities/fileProcessor") +const { loadHandlebarsFile } = require("../../../utilities/fileSystem") const env = require("../../../environment") const { OBJ_STORE_DIRECTORY } = require("../../../constants") @@ -57,88 +56,24 @@ exports.uploadFile = async function(ctx) { ? Array.from(ctx.request.files.file) : [ctx.request.files.file] - const attachmentsPath = resolve( - budibaseAppsDir(), - ctx.user.appId, - "attachments" - ) - - if (env.CLOUD) { - // remote upload - const s3 = new AWS.S3({ - params: { - Bucket: "prod-budi-app-assets", - }, - }) - - const uploads = files.map(file => { - const fileExtension = [...file.name.split(".")].pop() - const processedFileName = `${uuid.v4()}.${fileExtension}` - - return prepareUpload({ - file, - s3Key: `assets/${ctx.user.appId}/attachments/${processedFileName}`, - s3, - }) - }) - - ctx.body = await Promise.all(uploads) - return - } - - ctx.body = await processLocalFileUploads({ - files, - outputPath: attachmentsPath, - appId: ctx.user.appId, + const s3 = new AWS.S3({ + params: { + Bucket: "prod-budi-app-assets", + }, }) -} -async function processLocalFileUploads({ files, outputPath, appId }) { - // create attachments dir if it doesnt exist - !fs.existsSync(outputPath) && fs.mkdirSync(outputPath, { recursive: true }) - - const filesToProcess = files.map(file => { + const uploads = files.map(file => { const fileExtension = [...file.name.split(".")].pop() - // filenames converted to UUIDs so they are unique const processedFileName = `${uuid.v4()}.${fileExtension}` - return { - name: file.name, - path: file.path, - size: file.size, - type: file.type, - processedFileName, - extension: fileExtension, - outputPath: join(outputPath, processedFileName), - url: join("/attachments", processedFileName), - } + return prepareUpload({ + file, + s3Key: `assets/${ctx.user.appId}/attachments/${processedFileName}`, + s3, + }) }) - const fileProcessOperations = filesToProcess.map(fileProcessor.process) - - const processedFiles = await Promise.all(fileProcessOperations) - - let pendingFileUploads - // local document used to track which files need to be uploaded - // db.get throws an error if the document doesn't exist - // need to use a promise to default - const db = new CouchDB(appId) - await db - .get("_local/fileuploads") - .then(data => { - pendingFileUploads = data - }) - .catch(() => { - pendingFileUploads = { _id: "_local/fileuploads", uploads: [] } - }) - - pendingFileUploads.uploads = [ - ...processedFiles, - ...pendingFileUploads.uploads, - ] - await db.put(pendingFileUploads) - - return processedFiles + ctx.body = await Promise.all(uploads) } exports.serveApp = async function(ctx) { @@ -157,7 +92,7 @@ exports.serveApp = async function(ctx) { objectStoreUrl: objectStoreUrl(), }) - const appHbs = fs.readFileSync(`${__dirname}/templates/app.hbs`, "utf8") + const appHbs = loadHandlebarsFile(`${__dirname}/templates/app.hbs`) ctx.body = await processString(appHbs, { head, body: html, diff --git a/packages/server/src/api/controllers/templates.js b/packages/server/src/api/controllers/templates.js index c3cfa28706..4d55bc5957 100644 --- a/packages/server/src/api/controllers/templates.js +++ b/packages/server/src/api/controllers/templates.js @@ -1,10 +1,5 @@ const fetch = require("node-fetch") -const { - downloadTemplate, - exportTemplateFromApp, - getLocalTemplates, -} = require("../../utilities/templates") -const env = require("../../environment") +const { downloadTemplate } = require("../../utilities/fileSystem") // development flag, can be used to test against templates exported locally const DEFAULT_TEMPLATES_BUCKET = @@ -12,16 +7,11 @@ const DEFAULT_TEMPLATES_BUCKET = exports.fetch = async function(ctx) { const { type = "app" } = ctx.query - - if (env.LOCAL_TEMPLATES) { - ctx.body = Object.values(getLocalTemplates()[type]) - } else { - const response = await fetch( - `https://${DEFAULT_TEMPLATES_BUCKET}/manifest.json` - ) - const json = await response.json() - ctx.body = Object.values(json.templates[type]) - } + const response = await fetch( + `https://${DEFAULT_TEMPLATES_BUCKET}/manifest.json` + ) + const json = await response.json() + ctx.body = Object.values(json.templates[type]) } // can't currently test this, have to ignore from coverage @@ -29,26 +19,9 @@ exports.fetch = async function(ctx) { exports.downloadTemplate = async function(ctx) { const { type, name } = ctx.params - if (!env.LOCAL_TEMPLATES) { - await downloadTemplate(type, name) - } + await downloadTemplate(type, name) ctx.body = { message: `template ${type}:${name} downloaded successfully.`, } } - -exports.exportTemplateFromApp = async function(ctx) { - const { appId } = ctx.user - const { templateName } = ctx.request.body - - await exportTemplateFromApp({ - appId, - templateName, - }) - - ctx.status = 200 - ctx.body = { - message: `Created template: ${templateName}`, - } -} diff --git a/packages/server/src/api/controllers/view/index.js b/packages/server/src/api/controllers/view/index.js index f482f3f2a6..0f6f008a1b 100644 --- a/packages/server/src/api/controllers/view/index.js +++ b/packages/server/src/api/controllers/view/index.js @@ -1,8 +1,6 @@ const CouchDB = require("../../../db") const viewTemplate = require("./viewBuilder") -const fs = require("fs") -const { join } = require("../../../utilities/centralPath") -const os = require("os") +const { apiFileReturn } = require("../../../utilities/fileSystem") const exporters = require("./exporters") const { fetchView } = require("../row") const { ViewNames } = require("../../../db/utils") @@ -120,12 +118,10 @@ const controller = { // Export part let headers = Object.keys(schema) const exporter = exporters[format] - const exportedFile = exporter(headers, ctx.body) const filename = `${viewName}.${format}` - fs.writeFileSync(join(os.tmpdir(), filename), exportedFile) - + // send down the file ctx.attachment(filename) - ctx.body = fs.createReadStream(join(os.tmpdir(), filename)) + ctx.body = apiFileReturn(exporter(headers, ctx.body)) }, } diff --git a/packages/server/src/api/routes/templates.js b/packages/server/src/api/routes/templates.js index 05882a22ea..6a427e8383 100644 --- a/packages/server/src/api/routes/templates.js +++ b/packages/server/src/api/routes/templates.js @@ -12,6 +12,5 @@ router authorized(BUILDER), controller.downloadTemplate ) - .post("/api/templates", authorized(BUILDER), controller.exportTemplateFromApp) module.exports = router diff --git a/packages/server/src/api/routes/tests/templates.spec.js b/packages/server/src/api/routes/tests/templates.spec.js index f0d26bc7db..1b98d9f7a9 100644 --- a/packages/server/src/api/routes/tests/templates.spec.js +++ b/packages/server/src/api/routes/tests/templates.spec.js @@ -24,26 +24,4 @@ describe("/templates", () => { expect(Array.isArray(res.body)).toEqual(true) }) }) - - describe("export", () => { - it("should be able to export the basic app", async () => { - const res = await request - .post(`/api/templates`) - .send({ - templateName: "test", - }) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body.message).toEqual("Created template: test") - const dir = join( - budibaseAppsDir(), - "templates", - "app", - "test", - "db" - ) - expect(fs.existsSync(dir)).toEqual(true) - }) - }) }) \ No newline at end of file diff --git a/packages/server/src/constants/index.js b/packages/server/src/constants/index.js index 46fb5cb649..f4e8c1cf20 100644 --- a/packages/server/src/constants/index.js +++ b/packages/server/src/constants/index.js @@ -89,3 +89,9 @@ exports.BaseQueryVerbs = { UPDATE: "update", DELETE: "delete", } + +exports.ObjectStoreBuckets = { + BACKUPS: "backups", + APPS: "apps", + TEMPLATES: "templates", +} diff --git a/packages/server/src/utilities/builder/compileStaticAssets.js b/packages/server/src/utilities/builder/compileStaticAssets.js deleted file mode 100644 index 0389c920ee..0000000000 --- a/packages/server/src/utilities/builder/compileStaticAssets.js +++ /dev/null @@ -1,35 +0,0 @@ -const { ensureDir, constants, copyFile } = require("fs-extra") -const { join } = require("../centralPath") -const { budibaseAppsDir } = require("../budibaseDir") - -/** - * Compile all the non-db static web assets that are required for the running of - * a budibase application. This includes the JSON structure of the DOM and - * the client library, a script responsible for reading the JSON structure - * and rendering the application. - * @param {string} appId id of the application we want to compile static assets for - */ -module.exports = async appId => { - const publicPath = join(budibaseAppsDir(), appId, "public") - await ensureDir(publicPath) - await copyClientLib(publicPath) -} - -/** - * Copy the budibase client library and sourcemap from NPM to /public/. - * The client library is then served as a static asset when the budibase application - * is running in preview or prod - * @param {String} publicPath - path to write the client library to - */ -const copyClientLib = async publicPath => { - const sourcepath = require.resolve("@budibase/client") - const destPath = join(publicPath, "budibase-client.js") - - await copyFile(sourcepath, destPath, constants.COPYFILE_FICLONE) - - await copyFile( - sourcepath + ".map", - destPath + ".map", - constants.COPYFILE_FICLONE - ) -} diff --git a/packages/server/src/utilities/createAppPackage.js b/packages/server/src/utilities/createAppPackage.js deleted file mode 100644 index 9500554227..0000000000 --- a/packages/server/src/utilities/createAppPackage.js +++ /dev/null @@ -1,29 +0,0 @@ -const stream = require("stream") -const fetch = require("node-fetch") -const tar = require("tar-fs") -const zlib = require("zlib") -const { promisify } = require("util") -const packageJson = require("../../package.json") - -const streamPipeline = promisify(stream.pipeline) - -// can't really test this due to the downloading nature of it, wouldn't be a great test case -/* istanbul ignore next */ -exports.downloadExtractComponentLibraries = async appFolder => { - const LIBRARIES = ["standard-components"] - - // Need to download tarballs directly from NPM as our users may not have node on their machine - for (let lib of LIBRARIES) { - // download tarball - const registryUrl = `https://registry.npmjs.org/@budibase/${lib}/-/${lib}-${packageJson.version}.tgz` - const response = await fetch(registryUrl) - if (!response.ok) - throw new Error(`unexpected response ${response.statusText}`) - - await streamPipeline( - response.body, - zlib.Unzip(), - tar.extract(`${appFolder}/node_modules/@budibase/${lib}`) - ) - } -} diff --git a/packages/server/src/utilities/fileSystem.js b/packages/server/src/utilities/fileSystem.js deleted file mode 100644 index b55c4110fd..0000000000 --- a/packages/server/src/utilities/fileSystem.js +++ /dev/null @@ -1,40 +0,0 @@ -const { budibaseTempDir } = require("./budibaseDir") -const { isDev } = require("./index") -const fs = require("fs") -const { join } = require("path") -const { downloadTemplate } = require("./templates") - -/** - * The single stack system (Cloud and Builder) should not make use of the file system where possible, - * this file handles all of the file access for the system with the intention of limiting it all to one - * place. Keeping all of this logic in one place means that when we need to do file system access (like - * downloading a package or opening a temporary file) in can be done in way that we can confirm it shouldn't - * be done through an object store instead. - */ - -/** - * Checks if the system is currently in development mode and if it is makes sure - * everything required to function is ready. - */ -exports.checkDevelopmentEnvironment = () => { - if (isDev() && !fs.existsSync(budibaseTempDir())) { - console.error( - "Please run a build before attempting to run server independently to fill 'tmp' directory." - ) - process.exit(-1) - } -} - -/** - * This function manages temporary template files which are stored by Koa. - * @param {Object} template The template object retrieved from the Koa context object. - * @returns {Object} Returns an fs read stream which can be loaded into the database. - */ -exports.getTemplateStream = async template => { - if (template.file) { - return fs.createReadStream(template.file.path) - } else { - const templatePath = await downloadTemplate(...template.key.split("/")) - return fs.createReadStream(join(templatePath, "db", "dump.txt")) - } -} diff --git a/packages/server/src/utilities/fileSystem/index.js b/packages/server/src/utilities/fileSystem/index.js new file mode 100644 index 0000000000..88346da110 --- /dev/null +++ b/packages/server/src/utilities/fileSystem/index.js @@ -0,0 +1,98 @@ +const { budibaseTempDir } = require("../budibaseDir") +const { isDev } = require("../index") +const fs = require("fs") +const { join } = require("path") +const uuid = require("uuid/v4") +const CouchDB = require("../../db") +const { ObjectStoreBuckets } = require("../../constants") +const { streamUpload, deleteFolder, downloadTarball } = require("./utilities") +const { downloadLibraries, newAppPublicPath } = require("./newApp") + +/** + * The single stack system (Cloud and Builder) should not make use of the file system where possible, + * this file handles all of the file access for the system with the intention of limiting it all to one + * place. Keeping all of this logic in one place means that when we need to do file system access (like + * downloading a package or opening a temporary file) in can be done in way that we can confirm it shouldn't + * be done through an object store instead. + */ + +/** + * Checks if the system is currently in development mode and if it is makes sure + * everything required to function is ready. + */ +exports.checkDevelopmentEnvironment = () => { + if (isDev() && !fs.existsSync(budibaseTempDir())) { + console.error( + "Please run a build before attempting to run server independently to fill 'tmp' directory." + ) + process.exit(-1) + } +} + +/** + * This function manages temporary template files which are stored by Koa. + * @param {Object} template The template object retrieved from the Koa context object. + * @returns {Object} Returns an fs read stream which can be loaded into the database. + */ +exports.getTemplateStream = async template => { + if (template.file) { + return fs.createReadStream(template.file.path) + } else { + const tmpPath = await exports.downloadTemplate(...template.key.split("/")) + return fs.createReadStream(join(tmpPath, "db", "dump.txt")) + } +} + +/** + * Used to retrieve a handlebars file from the system which will be used as a template. + * This is allowable as the template handlebars files should be static and identical across + * the cluster. + * @param {string} path The path to the handlebars file which is to be loaded. + * @returns {string} The loaded handlebars file as a string - loaded as utf8. + */ +exports.loadHandlebarsFile = path => { + return fs.readFileSync(path, "utf8") +} + +/** + * When return a file from the API need to write the file to the system temporarily so we + * can create a read stream to send. + * @param {string} contents the contents of the file which is to be returned from the API. + * @return {Object} the read stream which can be put into the koa context body. + */ +exports.apiFileReturn = contents => { + const path = join(budibaseTempDir(), uuid()) + fs.writeFileSync(path, contents) + return fs.createReadStream(path) +} + +exports.performBackup = async (appId, backupName) => { + const path = join(budibaseTempDir(), backupName) + const writeStream = fs.createWriteStream(path) + // perform couch dump + const instanceDb = new CouchDB(appId) + await instanceDb.dump(writeStream, {}) + // write the file to the object store + await streamUpload( + ObjectStoreBuckets.BACKUPS, + join(appId, backupName), + fs.createReadStream(path) + ) + return fs.createReadStream(path) +} + +exports.createApp = async appId => { + await downloadLibraries(appId) + await newAppPublicPath(appId) +} + +exports.deleteApp = async appId => { + await deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`) +} + +exports.downloadTemplate = async (type, name) => { + const DEFAULT_TEMPLATES_BUCKET = + "prod-budi-templates.s3-eu-west-1.amazonaws.com" + const templateUrl = `https://${DEFAULT_TEMPLATES_BUCKET}/templates/${type}/${name}.tar.gz` + return downloadTarball(templateUrl, ObjectStoreBuckets.TEMPLATES, type) +} diff --git a/packages/server/src/utilities/fileSystem/newApp.js b/packages/server/src/utilities/fileSystem/newApp.js new file mode 100644 index 0000000000..ba3d13afed --- /dev/null +++ b/packages/server/src/utilities/fileSystem/newApp.js @@ -0,0 +1,34 @@ +const packageJson = require("../../../package.json") +const { join } = require("path") +const { ObjectStoreBuckets } = require("../../constants") +const { streamUpload, downloadTarball } = require("./utilities") +const fs = require("fs") + +const BUCKET_NAME = ObjectStoreBuckets.APPS + +// can't really test this due to the downloading nature of it, wouldn't be a great test case +/* istanbul ignore next */ +exports.downloadLibraries = async appId => { + const LIBRARIES = ["standard-components"] + + // Need to download tarballs directly from NPM as our users may not have node on their machine + for (let lib of LIBRARIES) { + // download tarball + const registryUrl = `https://registry.npmjs.org/@budibase/${lib}/-/${lib}-${packageJson.version}.tgz` + const path = join(appId, "node_modules", "@budibase", lib) + await downloadTarball(registryUrl, BUCKET_NAME, path) + } +} + +exports.newAppPublicPath = async appId => { + const path = join(appId, "public") + const sourcepath = require.resolve("@budibase/client") + const destPath = join(path, "budibase-client.js") + + await streamUpload(BUCKET_NAME, destPath, fs.createReadStream(sourcepath)) + await streamUpload( + BUCKET_NAME, + destPath + ".map", + fs.createReadStream(sourcepath + ".map") + ) +} diff --git a/packages/server/src/utilities/fileSystem/utilities.js b/packages/server/src/utilities/fileSystem/utilities.js new file mode 100644 index 0000000000..01d956dac6 --- /dev/null +++ b/packages/server/src/utilities/fileSystem/utilities.js @@ -0,0 +1,116 @@ +const sanitize = require("sanitize-s3-objectkey") +const AWS = require("aws-sdk") +const stream = require("stream") +const fetch = require("node-fetch") +const tar = require("tar-fs") +const zlib = require("zlib") +const { promisify } = require("util") +const { join } = require("path") +const { streamUpload } = require("./utilities") +const fs = require("fs") +const { budibaseTempDir } = require("../budibaseDir") + +const streamPipeline = promisify(stream.pipeline) + +/** + * Gets a connection to the object store using the S3 SDK. + * @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from. + * @return {Object} an S3 object store object, check S3 Nodejs SDK for usage. + * @constructor + */ +exports.ObjectStore = bucket => { + return new AWS.S3({ + params: { + Bucket: bucket, + }, + }) +} + +exports.makeSureBucketExists = async (client, bucketName) => { + try { + await client + .headBucket({ + Bucket: bucketName, + }) + .promise() + } catch (err) { + // bucket doesn't exist create it + if (err.statusCode === 404) { + await client + .createBucket({ + Bucket: bucketName, + }) + .promise() + } else { + throw err + } + } +} + +exports.streamUpload = async (bucket, filename, stream) => { + const objectStore = exports.ObjectStore(bucket) + await exports.makeSureBucketExists(objectStore, bucket) + + const params = { + Bucket: bucket, + Key: sanitize(filename).replace(/\\/g, "/"), + Body: stream, + } + return objectStore.upload(params).promise() +} + +exports.deleteFolder = async (bucket, folder) => { + const client = exports.ObjectStore(bucket) + const listParams = { + Bucket: bucket, + Prefix: folder, + } + + const data = await client.listObjects(listParams).promise() + if (data.Contents.length > 0) { + const deleteParams = { + Bucket: bucket, + Delete: { + Objects: [], + }, + } + + data.Contents.forEach(content => { + deleteParams.Delete.Objects.push({ Key: content.Key }) + }) + + const data = await client.deleteObjects(deleteParams).promise() + // can only empty 1000 items at once + if (data.Contents.length === 1000) { + return exports.deleteFolder(bucket, folder) + } + } +} + +exports.uploadDirectory = async (bucket, localPath, bucketPath) => { + let uploads = [] + const files = fs.readdirSync(localPath, { withFileTypes: true }) + for (let file of files) { + const path = join(bucketPath, file.name) + const local = join(localPath, file.name) + if (file.isDirectory()) { + uploads.push(exports.uploadDirectory(bucket, local, path)) + } else { + uploads.push(streamUpload(bucket, path, fs.createReadStream(local))) + } + } + await Promise.all(uploads) +} + +exports.downloadTarball = async (url, bucket, path) => { + const response = await fetch(url) + if (!response.ok) { + throw new Error(`unexpected response ${response.statusText}`) + } + + const tmpPath = join(budibaseTempDir(), path) + await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath)) + await exports.uploadDirectory(bucket, tmpPath, path) + // return the temporary path incase there is a use for it + return tmpPath +} diff --git a/packages/server/src/utilities/templates.js b/packages/server/src/utilities/templates.js index c3d89477df..ccad4a9d73 100644 --- a/packages/server/src/utilities/templates.js +++ b/packages/server/src/utilities/templates.js @@ -9,73 +9,9 @@ const { promisify } = require("util") const streamPipeline = promisify(stream.pipeline) const { budibaseAppsDir } = require("./budibaseDir") const env = require("../environment") -const CouchDB = require("../db") +const { downloadTemplate } = require("./fileSystem") -const DEFAULT_TEMPLATES_BUCKET = - "prod-budi-templates.s3-eu-west-1.amazonaws.com" - -exports.getLocalTemplates = function() { - const templatesDir = join(os.homedir(), ".budibase", "templates", "app") - const templateObj = { app: {} } - fs.ensureDirSync(templatesDir) - const templateNames = fs.readdirSync(templatesDir) - for (let name of templateNames) { - templateObj.app[name] = { - name, - category: "local", - description: "local template", - type: "app", - key: `app/${name}`, - } - } - return templateObj -} // can't really test this, downloading is just not something we should do in a behavioural test /* istanbul ignore next */ -exports.downloadTemplate = async function(type, name) { - const dirName = join(budibaseAppsDir(), "templates", type, name) - if (env.LOCAL_TEMPLATES) { - return dirName - } - const templateUrl = `https://${DEFAULT_TEMPLATES_BUCKET}/templates/${type}/${name}.tar.gz` - const response = await fetch(templateUrl) - - if (!response.ok) { - throw new Error( - `Error downloading template ${type}:${name}: ${response.statusText}` - ) - } - - // stream the response, unzip and extract - await streamPipeline( - response.body, - zlib.Unzip(), - tar.extract(join(budibaseAppsDir(), "templates", type)) - ) - - return dirName -} - -async function performDump({ dir, appId, name = "dump.txt" }) { - const writeStream = fs.createWriteStream(join(dir, name)) - // perform couch dump - const instanceDb = new CouchDB(appId) - await instanceDb.dump(writeStream, {}) -} - -exports.performDump = performDump - -exports.exportTemplateFromApp = async function({ templateName, appId }) { - // Copy frontend files - const templatesDir = join( - budibaseAppsDir(), - "templates", - "app", - templateName, - "db" - ) - fs.ensureDirSync(templatesDir) - await performDump({ dir: templatesDir, appId }) - return templatesDir -} +exports.downloadTemplate = downloadTemplate