From 2f5ecf6e5fb77d79c9a51618a6ecb88396299d92 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 11 May 2021 17:53:54 +0100 Subject: [PATCH] Removing more useless deployment code. --- .../src/api/controllers/deploy/index.js | 11 +- .../src/api/controllers/deploy/selfDeploy.js | 19 --- .../src/api/controllers/deploy/utils.js | 136 ------------------ .../src/api/controllers/static/index.js | 22 ++- 4 files changed, 22 insertions(+), 166 deletions(-) delete mode 100644 packages/server/src/api/controllers/deploy/selfDeploy.js delete mode 100644 packages/server/src/api/controllers/deploy/utils.js diff --git a/packages/server/src/api/controllers/deploy/index.js b/packages/server/src/api/controllers/deploy/index.js index 7d5dae3263..38c2df527a 100644 --- a/packages/server/src/api/controllers/deploy/index.js +++ b/packages/server/src/api/controllers/deploy/index.js @@ -1,6 +1,5 @@ const PouchDB = require("../../../db") const Deployment = require("./Deployment") -const deploymentService = require("./selfDeploy") // the max time we can wait for an invalidation to complete before considering it failed const MAX_PENDING_TIME_MS = 30 * 60000 const DeploymentStatus = { @@ -56,16 +55,8 @@ async function storeLocalDeploymentHistory(deployment) { } async function deployApp(deployment) { - const appId = deployment.getAppId() try { - console.log(`Uploading assets for appID ${appId}..`) - - await deploymentService.deploy(deployment) - - // replicate the DB to the main couchDB cluster - console.log("Replicating local PouchDB to CouchDB..") - await deploymentService.replicateDb(deployment) - + // TODO: DB replication was here but wasn't accurate to new system deployment.setStatus(DeploymentStatus.SUCCESS) await storeLocalDeploymentHistory(deployment) } catch (err) { diff --git a/packages/server/src/api/controllers/deploy/selfDeploy.js b/packages/server/src/api/controllers/deploy/selfDeploy.js deleted file mode 100644 index 214680e5ec..0000000000 --- a/packages/server/src/api/controllers/deploy/selfDeploy.js +++ /dev/null @@ -1,19 +0,0 @@ -const { deployToObjectStore, performReplication } = require("./utils") - -exports.deploy = async function (deployment) { - const appId = deployment.getAppId() - const verification = deployment.getVerification() - // no metadata, aws has account ID in metadata - const metadata = {} - await deployToObjectStore(appId, verification.bucket, metadata) -} - -exports.replicateDb = async function (deployment) { - const appId = deployment.getAppId() - const verification = deployment.getVerification() - return performReplication( - appId, - verification.couchDbSession, - await getCouchUrl() - ) -} diff --git a/packages/server/src/api/controllers/deploy/utils.js b/packages/server/src/api/controllers/deploy/utils.js deleted file mode 100644 index 59f94905fa..0000000000 --- a/packages/server/src/api/controllers/deploy/utils.js +++ /dev/null @@ -1,136 +0,0 @@ -const { join } = require("../../../utilities/centralPath") -const fs = require("fs") -const { budibaseAppsDir } = require("../../../utilities/budibaseDir") -const fetch = require("node-fetch") -const PouchDB = require("../../../db") -const CouchDB = require("pouchdb") -const { upload } = require("../../../utilities/fileSystem") -const { attachmentsRelativeURL } = require("../../../utilities") - -// TODO: everything in this file is to be removed - -function walkDir(dirPath, callback) { - for (let filename of fs.readdirSync(dirPath)) { - const filePath = `${dirPath}/${filename}` - const stat = fs.lstatSync(filePath) - - if (stat.isFile()) { - callback(filePath) - } else { - walkDir(filePath, callback) - } - } -} - -exports.fetchCredentials = async function (url, body) { - const response = await fetch(url, { - method: "POST", - body: JSON.stringify(body), - headers: { "Content-Type": "application/json" }, - }) - - const json = await response.json() - if (json.errors) { - throw new Error(json.errors) - } - - if (response.status !== 200) { - throw new Error( - `Error fetching temporary credentials: ${JSON.stringify(json)}` - ) - } - - return json -} - -exports.prepareUpload = async function ({ s3Key, bucket, metadata, file }) { - const response = await upload({ - bucket, - metadata, - filename: s3Key, - path: file.path, - type: file.type, - }) - - // don't store a URL, work this out on the way out as the URL could change - return { - size: file.size, - name: file.name, - url: attachmentsRelativeURL(response.Key), - extension: [...file.name.split(".")].pop(), - key: response.Key, - } -} - -exports.deployToObjectStore = async function (appId, bucket, metadata) { - const appAssetsPath = join(budibaseAppsDir(), appId, "public") - - let uploads = [] - - // Upload HTML, CSS and JS for each page of the web app - walkDir(appAssetsPath, function (filePath) { - const filePathParts = filePath.split("/") - const appAssetUpload = exports.prepareUpload({ - bucket, - file: { - path: filePath, - name: filePathParts.pop(), - }, - s3Key: filePath.replace(appAssetsPath, `assets/${appId}`), - metadata, - }) - uploads.push(appAssetUpload) - }) - - // Upload file attachments - const db = new PouchDB(appId) - let fileUploads - try { - fileUploads = await db.get("_local/fileuploads") - } catch (err) { - fileUploads = { _id: "_local/fileuploads", uploads: [] } - } - - for (let file of fileUploads.uploads) { - if (file.uploaded) continue - - const attachmentUpload = exports.prepareUpload({ - file, - s3Key: `assets/${appId}/attachments/${file.processedFileName}`, - bucket, - metadata, - }) - - uploads.push(attachmentUpload) - - // mark file as uploaded - file.uploaded = true - } - - db.put(fileUploads) - - try { - return await Promise.all(uploads) - } catch (err) { - console.error("Error uploading budibase app assets to s3", err) - throw err - } -} - -exports.performReplication = (appId, session, dbUrl) => { - return new Promise((resolve, reject) => { - const local = new PouchDB(appId) - - const remote = new CouchDB(`${dbUrl}/${appId}`, { - fetch: function (url, opts) { - opts.headers.set("Cookie", `${session};`) - return CouchDB.fetch(url, opts) - }, - }) - - const replication = local.sync(remote) - - replication.on("complete", () => resolve()) - replication.on("error", err => reject(err)) - }) -} diff --git a/packages/server/src/api/controllers/static/index.js b/packages/server/src/api/controllers/static/index.js index 8120986a51..ba922cb9ad 100644 --- a/packages/server/src/api/controllers/static/index.js +++ b/packages/server/src/api/controllers/static/index.js @@ -5,7 +5,6 @@ const { resolve, join } = require("../../../utilities/centralPath") const fetch = require("node-fetch") const uuid = require("uuid") const { ObjectStoreBuckets } = require("../../../constants") -const { prepareUpload } = require("../deploy/utils") const { processString } = require("@budibase/string-templates") const { budibaseTempDir } = require("../../../utilities/budibaseDir") const { getDeployedApps } = require("../../../utilities/workerRequests") @@ -17,6 +16,27 @@ const { } = require("../../../utilities/fileSystem") const env = require("../../../environment") const { objectStoreUrl, clientLibraryPath } = require("../../../utilities") +const { upload } = require("../../../utilities/fileSystem") +const { attachmentsRelativeURL } = require("../../../utilities") + +async function prepareUpload({ s3Key, bucket, metadata, file }) { + const response = await upload({ + bucket, + metadata, + filename: s3Key, + path: file.path, + type: file.type, + }) + + // don't store a URL, work this out on the way out as the URL could change + return { + size: file.size, + name: file.name, + url: attachmentsRelativeURL(response.Key), + extension: [...file.name.split(".")].pop(), + key: response.Key, + } +} async function checkForSelfHostedURL(ctx) { // the "appId" component of the URL may actually be a specific self hosted URL