From a9d927b713bf829be25b40f1bb1d7eaaa64a5c6c Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 30 Jun 2022 20:26:49 +0100 Subject: [PATCH] Adding MinIO handling, backing up to disk by bucket name. --- hosting/docker-compose.dev.yaml | 3 +- hosting/docker-compose.yaml | 2 +- .../backend-core/src/objectStore/index.ts | 7 +- packages/cli/.gitignore | 1 + packages/cli/src/backups/index.js | 118 ++++-------------- packages/cli/src/backups/objectStore.js | 63 ++++++++++ packages/cli/src/backups/utils.js | 92 ++++++++++++++ 7 files changed, 188 insertions(+), 98 deletions(-) create mode 100644 packages/cli/src/backups/objectStore.js create mode 100644 packages/cli/src/backups/utils.js diff --git a/hosting/docker-compose.dev.yaml b/hosting/docker-compose.dev.yaml index be0bc74a26..7322b0e8a9 100644 --- a/hosting/docker-compose.dev.yaml +++ b/hosting/docker-compose.dev.yaml @@ -11,10 +11,11 @@ services: - minio_data:/data ports: - "${MINIO_PORT}:9000" + - "9001:9001" environment: MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY} MINIO_SECRET_KEY: ${MINIO_SECRET_KEY} - command: server /data + command: server /data --console-address ":9001" healthcheck: test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"] interval: 30s diff --git a/hosting/docker-compose.yaml b/hosting/docker-compose.yaml index f9d9eaf1c5..cdbe2cb66c 100644 --- a/hosting/docker-compose.yaml +++ b/hosting/docker-compose.yaml @@ -61,7 +61,7 @@ services: MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY} MINIO_SECRET_KEY: ${MINIO_SECRET_KEY} MINIO_BROWSER: "off" - command: server /data + command: server /data --console-address ":9001" healthcheck: test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"] interval: 30s diff --git a/packages/backend-core/src/objectStore/index.ts b/packages/backend-core/src/objectStore/index.ts index a7e0b0c134..503ab9bca0 100644 --- a/packages/backend-core/src/objectStore/index.ts +++ b/packages/backend-core/src/objectStore/index.ts @@ -75,9 +75,11 @@ export const ObjectStore = (bucket: any) => { s3ForcePathStyle: true, signatureVersion: "v4", apiVersion: "2006-03-01", - params: { + } + if (bucket) { + config.params = { Bucket: sanitizeBucket(bucket), - }, + } } if (env.MINIO_URL) { config.endpoint = env.MINIO_URL @@ -292,6 +294,7 @@ export const uploadDirectory = async ( } } await Promise.all(uploads) + return files } exports.downloadTarballDirect = async (url: string, path: string) => { diff --git a/packages/cli/.gitignore b/packages/cli/.gitignore index 988d7b9a6b..efef4f97c8 100644 --- a/packages/cli/.gitignore +++ b/packages/cli/.gitignore @@ -4,3 +4,4 @@ nginx.conf build/ docker-error.log envoy.yaml +*.tar.gz diff --git a/packages/cli/src/backups/index.js b/packages/cli/src/backups/index.js index 0e6e18bae1..85840af828 100644 --- a/packages/cli/src/backups/index.js +++ b/packages/cli/src/backups/index.js @@ -1,98 +1,19 @@ const Command = require("../structures/Command") const { CommandWords } = require("../constants") -const dotenv = require("dotenv") const fs = require("fs") const { join } = require("path") -const { string } = require("../questions") -const { env } = require("@budibase/backend-core") -const { getPouch, getAllDbs } = require("@budibase/backend-core/db") +const { getAllDbs } = require("@budibase/backend-core/db") const tar = require("tar") const { progressBar } = require("../utils") - -const DEFAULT_COUCH = "http://budibase:budibase@localhost:10000/db/" -const DEFAULT_MINIO = "http://localhost:10000/" -const TEMP_DIR = ".temp" - -const REQUIRED = [ - { value: "MAIN_PORT", default: "10000" }, - { value: "COUCH_DB_URL", default: DEFAULT_COUCH }, - { value: "MINIO_URL", default: DEFAULT_MINIO }, - { value: "MINIO_ACCESS_KEY" }, - { value: "MINIO_SECRET_KEY" }, -] - -function checkURLs(config) { - const mainPort = config["MAIN_PORT"], - username = config["COUCH_DB_USER"], - password = config["COUCH_DB_PASSWORD"] - if (!config["COUCH_DB_URL"] && mainPort && username && password) { - config[ - "COUCH_DB_URL" - ] = `http://${username}:${password}@localhost:${mainPort}/db/` - } - if (!config["MINIO_URL"]) { - config["MINIO_URL"] = DEFAULT_MINIO - } - return config -} - -async function askQuestions() { - console.log( - "*** NOTE: use a .env file to load these parameters repeatedly ***" - ) - let config = {} - for (let property of REQUIRED) { - config[property.value] = await string(property.value, property.default) - } - return config -} - -function loadEnvironment(path) { - if (!fs.existsSync(path)) { - throw "Unable to file specified .env file" - } - const env = fs.readFileSync(path, "utf8") - const config = checkURLs(dotenv.parse(env)) - for (let required of REQUIRED) { - if (!config[required.value]) { - throw `Cannot find "${required.value}" property in .env file` - } - } - return config -} - -// true is the default value passed by commander -async function getConfig(envFile = true) { - let config - if (envFile !== true) { - config = loadEnvironment(envFile) - } else { - config = askQuestions() - } - for (let required of REQUIRED) { - env._set(required.value, config[required.value]) - } - return config -} - -function replication(from, to) { - return new Promise((resolve, reject) => { - from.replicate - .to(to) - .on("complete", () => { - resolve() - }) - .on("error", err => { - reject(err) - }) - }) -} - -function getPouches() { - const Remote = getPouch({ replication: true }) - const Local = getPouch({ onDisk: true, directory: TEMP_DIR }) - return { Remote, Local } -} +const { + TEMP_DIR, + COUCH_DIR, + MINIO_DIR, + getConfig, + replication, + getPouches, +} = require("./utils") +const { exportObjects, importObjects } = require("./objectStore") async function exportBackup(opts) { const envFile = opts.env || undefined @@ -107,18 +28,21 @@ async function exportBackup(opts) { if (fs.existsSync(TEMP_DIR)) { fs.rmSync(TEMP_DIR, { recursive: true }) } - const couchDir = join(TEMP_DIR, "couchdb") + const couchDir = join(TEMP_DIR, COUCH_DIR) fs.mkdirSync(TEMP_DIR) fs.mkdirSync(couchDir) + console.log("CouchDB Export") const bar = progressBar(dbList.length) let count = 0 for (let db of dbList) { bar.update(++count) const remote = new Remote(db) - const local = new Local(join(TEMP_DIR, "couchdb", db)) + const local = new Local(join(TEMP_DIR, COUCH_DIR, db)) await replication(remote, local) } bar.stop() + console.log("S3 Export") + await exportObjects() tar.create( { sync: true, @@ -126,7 +50,7 @@ async function exportBackup(opts) { file: filename, cwd: join(TEMP_DIR), }, - ["couchdb"] + [COUCH_DIR, MINIO_DIR] ) fs.rmSync(TEMP_DIR, { recursive: true }) console.log(`Generated export file - ${filename}`) @@ -140,6 +64,9 @@ async function importBackup(opts) { console.error("Cannot import without specifying a valid file to import") process.exit(-1) } + if (fs.existsSync(TEMP_DIR)) { + fs.rmSync(TEMP_DIR, { recursive: true }) + } fs.mkdirSync(TEMP_DIR) tar.extract({ sync: true, @@ -147,16 +74,19 @@ async function importBackup(opts) { file: filename, }) const { Remote, Local } = getPouches() - const dbList = fs.readdirSync(join(TEMP_DIR, "couchdb")) + const dbList = fs.readdirSync(join(TEMP_DIR, COUCH_DIR)) + console.log("CouchDB Import") const bar = progressBar(dbList.length) let count = 0 for (let db of dbList) { bar.update(++count) const remote = new Remote(db) - const local = new Local(join(TEMP_DIR, "couchdb", db)) + const local = new Local(join(TEMP_DIR, COUCH_DIR, db)) await replication(local, remote) } bar.stop() + console.log("MinIO Import") + await importObjects() console.log("Import complete") fs.rmSync(TEMP_DIR, { recursive: true }) } diff --git a/packages/cli/src/backups/objectStore.js b/packages/cli/src/backups/objectStore.js new file mode 100644 index 0000000000..b0bf99891d --- /dev/null +++ b/packages/cli/src/backups/objectStore.js @@ -0,0 +1,63 @@ +const { + ObjectStoreBuckets, + ObjectStore, + retrieve, + uploadDirectory, + makeSureBucketExists, +} = require("@budibase/backend-core/objectStore") +const fs = require("fs") +const { join } = require("path") +const { TEMP_DIR, MINIO_DIR } = require("./utils") +const { progressBar } = require("../utils") + +const bucketList = Object.values(ObjectStoreBuckets) + +exports.exportObjects = async () => { + const path = join(TEMP_DIR, MINIO_DIR) + fs.mkdirSync(path) + let fullList = [] + for (let bucket of bucketList) { + const client = ObjectStore(bucket) + try { + await client.headBucket().promise() + } catch (err) { + continue + } + const list = await client.listObjectsV2().promise() + fullList = fullList.concat(list.Contents.map(el => ({ ...el, bucket }))) + } + const bar = progressBar(fullList.length) + let count = 0 + for (let object of fullList) { + const filename = object.Key + const data = await retrieve(object.bucket, filename) + const possiblePath = filename.split("/") + if (possiblePath.length > 1) { + const dirs = possiblePath.slice(0, possiblePath.length - 1) + fs.mkdirSync(join(path, object.bucket, ...dirs), { recursive: true }) + } + fs.writeFileSync(join(path, object.bucket, ...possiblePath), data) + bar.update(++count) + } + bar.stop() +} + +exports.importObjects = async () => { + const path = join(TEMP_DIR, MINIO_DIR) + const buckets = fs.readdirSync(path) + let total = 0 + buckets.forEach(bucket => { + const files = fs.readdirSync(join(path, bucket)) + total += files.length + }) + const bar = progressBar(total) + let count = 0 + for (let bucket of buckets) { + const client = ObjectStore(bucket) + await makeSureBucketExists(client, bucket) + const files = await uploadDirectory(bucket, join(path, bucket), "/") + count += files.length + bar.update(count) + } + bar.stop() +} diff --git a/packages/cli/src/backups/utils.js b/packages/cli/src/backups/utils.js new file mode 100644 index 0000000000..5ceba577df --- /dev/null +++ b/packages/cli/src/backups/utils.js @@ -0,0 +1,92 @@ +const dotenv = require("dotenv") +const fs = require("fs") +const { string } = require("../questions") +const { env } = require("@budibase/backend-core") +const { getPouch } = require("@budibase/backend-core/db") + +exports.DEFAULT_COUCH = "http://budibase:budibase@localhost:10000/db/" +exports.DEFAULT_MINIO = "http://localhost:10000/" +exports.TEMP_DIR = ".temp" +exports.COUCH_DIR = "couchdb" +exports.MINIO_DIR = "minio" + +const REQUIRED = [ + { value: "MAIN_PORT", default: "10000" }, + { value: "COUCH_DB_URL", default: exports.DEFAULT_COUCH }, + { value: "MINIO_URL", default: exports.DEFAULT_MINIO }, + { value: "MINIO_ACCESS_KEY" }, + { value: "MINIO_SECRET_KEY" }, +] + +exports.checkURLs = config => { + const mainPort = config["MAIN_PORT"], + username = config["COUCH_DB_USER"], + password = config["COUCH_DB_PASSWORD"] + if (!config["COUCH_DB_URL"] && mainPort && username && password) { + config[ + "COUCH_DB_URL" + ] = `http://${username}:${password}@localhost:${mainPort}/db/` + } + if (!config["MINIO_URL"]) { + config["MINIO_URL"] = exports.DEFAULT_MINIO + } + return config +} + +exports.askQuestions = async () => { + console.log( + "*** NOTE: use a .env file to load these parameters repeatedly ***" + ) + let config = {} + for (let property of REQUIRED) { + config[property.value] = await string(property.value, property.default) + } + return config +} + +exports.loadEnvironment = path => { + if (!fs.existsSync(path)) { + throw "Unable to file specified .env file" + } + const env = fs.readFileSync(path, "utf8") + const config = exports.checkURLs(dotenv.parse(env)) + for (let required of REQUIRED) { + if (!config[required.value]) { + throw `Cannot find "${required.value}" property in .env file` + } + } + return config +} + +// true is the default value passed by commander +exports.getConfig = async (envFile = true) => { + let config + if (envFile !== true) { + config = exports.loadEnvironment(envFile) + } else { + config = await exports.askQuestions() + } + for (let required of REQUIRED) { + env._set(required.value, config[required.value]) + } + return config +} + +exports.replication = (from, to) => { + return new Promise((resolve, reject) => { + from.replicate + .to(to) + .on("complete", () => { + resolve() + }) + .on("error", err => { + reject(err) + }) + }) +} + +exports.getPouches = () => { + const Remote = getPouch({ replication: true }) + const Local = getPouch({ onDisk: true, directory: exports.TEMP_DIR }) + return { Remote, Local } +}