1
0
Fork 0
mirror of synced 2024-06-26 10:00:41 +12:00

Adding MinIO handling, backing up to disk by bucket name.

This commit is contained in:
mike12345567 2022-06-30 20:26:49 +01:00
parent c633c643e2
commit a9d927b713
7 changed files with 188 additions and 98 deletions

View file

@ -11,10 +11,11 @@ services:
- minio_data:/data
ports:
- "${MINIO_PORT}:9000"
- "9001:9001"
environment:
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
command: server /data
command: server /data --console-address ":9001"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s

View file

@ -61,7 +61,7 @@ services:
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
MINIO_BROWSER: "off"
command: server /data
command: server /data --console-address ":9001"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s

View file

@ -75,9 +75,11 @@ export const ObjectStore = (bucket: any) => {
s3ForcePathStyle: true,
signatureVersion: "v4",
apiVersion: "2006-03-01",
params: {
}
if (bucket) {
config.params = {
Bucket: sanitizeBucket(bucket),
},
}
}
if (env.MINIO_URL) {
config.endpoint = env.MINIO_URL
@ -292,6 +294,7 @@ export const uploadDirectory = async (
}
}
await Promise.all(uploads)
return files
}
exports.downloadTarballDirect = async (url: string, path: string) => {

View file

@ -4,3 +4,4 @@ nginx.conf
build/
docker-error.log
envoy.yaml
*.tar.gz

View file

@ -1,98 +1,19 @@
const Command = require("../structures/Command")
const { CommandWords } = require("../constants")
const dotenv = require("dotenv")
const fs = require("fs")
const { join } = require("path")
const { string } = require("../questions")
const { env } = require("@budibase/backend-core")
const { getPouch, getAllDbs } = require("@budibase/backend-core/db")
const { getAllDbs } = require("@budibase/backend-core/db")
const tar = require("tar")
const { progressBar } = require("../utils")
const DEFAULT_COUCH = "http://budibase:budibase@localhost:10000/db/"
const DEFAULT_MINIO = "http://localhost:10000/"
const TEMP_DIR = ".temp"
const REQUIRED = [
{ value: "MAIN_PORT", default: "10000" },
{ value: "COUCH_DB_URL", default: DEFAULT_COUCH },
{ value: "MINIO_URL", default: DEFAULT_MINIO },
{ value: "MINIO_ACCESS_KEY" },
{ value: "MINIO_SECRET_KEY" },
]
function checkURLs(config) {
const mainPort = config["MAIN_PORT"],
username = config["COUCH_DB_USER"],
password = config["COUCH_DB_PASSWORD"]
if (!config["COUCH_DB_URL"] && mainPort && username && password) {
config[
"COUCH_DB_URL"
] = `http://${username}:${password}@localhost:${mainPort}/db/`
}
if (!config["MINIO_URL"]) {
config["MINIO_URL"] = DEFAULT_MINIO
}
return config
}
async function askQuestions() {
console.log(
"*** NOTE: use a .env file to load these parameters repeatedly ***"
)
let config = {}
for (let property of REQUIRED) {
config[property.value] = await string(property.value, property.default)
}
return config
}
function loadEnvironment(path) {
if (!fs.existsSync(path)) {
throw "Unable to file specified .env file"
}
const env = fs.readFileSync(path, "utf8")
const config = checkURLs(dotenv.parse(env))
for (let required of REQUIRED) {
if (!config[required.value]) {
throw `Cannot find "${required.value}" property in .env file`
}
}
return config
}
// true is the default value passed by commander
async function getConfig(envFile = true) {
let config
if (envFile !== true) {
config = loadEnvironment(envFile)
} else {
config = askQuestions()
}
for (let required of REQUIRED) {
env._set(required.value, config[required.value])
}
return config
}
function replication(from, to) {
return new Promise((resolve, reject) => {
from.replicate
.to(to)
.on("complete", () => {
resolve()
})
.on("error", err => {
reject(err)
})
})
}
function getPouches() {
const Remote = getPouch({ replication: true })
const Local = getPouch({ onDisk: true, directory: TEMP_DIR })
return { Remote, Local }
}
const {
TEMP_DIR,
COUCH_DIR,
MINIO_DIR,
getConfig,
replication,
getPouches,
} = require("./utils")
const { exportObjects, importObjects } = require("./objectStore")
async function exportBackup(opts) {
const envFile = opts.env || undefined
@ -107,18 +28,21 @@ async function exportBackup(opts) {
if (fs.existsSync(TEMP_DIR)) {
fs.rmSync(TEMP_DIR, { recursive: true })
}
const couchDir = join(TEMP_DIR, "couchdb")
const couchDir = join(TEMP_DIR, COUCH_DIR)
fs.mkdirSync(TEMP_DIR)
fs.mkdirSync(couchDir)
console.log("CouchDB Export")
const bar = progressBar(dbList.length)
let count = 0
for (let db of dbList) {
bar.update(++count)
const remote = new Remote(db)
const local = new Local(join(TEMP_DIR, "couchdb", db))
const local = new Local(join(TEMP_DIR, COUCH_DIR, db))
await replication(remote, local)
}
bar.stop()
console.log("S3 Export")
await exportObjects()
tar.create(
{
sync: true,
@ -126,7 +50,7 @@ async function exportBackup(opts) {
file: filename,
cwd: join(TEMP_DIR),
},
["couchdb"]
[COUCH_DIR, MINIO_DIR]
)
fs.rmSync(TEMP_DIR, { recursive: true })
console.log(`Generated export file - ${filename}`)
@ -140,6 +64,9 @@ async function importBackup(opts) {
console.error("Cannot import without specifying a valid file to import")
process.exit(-1)
}
if (fs.existsSync(TEMP_DIR)) {
fs.rmSync(TEMP_DIR, { recursive: true })
}
fs.mkdirSync(TEMP_DIR)
tar.extract({
sync: true,
@ -147,16 +74,19 @@ async function importBackup(opts) {
file: filename,
})
const { Remote, Local } = getPouches()
const dbList = fs.readdirSync(join(TEMP_DIR, "couchdb"))
const dbList = fs.readdirSync(join(TEMP_DIR, COUCH_DIR))
console.log("CouchDB Import")
const bar = progressBar(dbList.length)
let count = 0
for (let db of dbList) {
bar.update(++count)
const remote = new Remote(db)
const local = new Local(join(TEMP_DIR, "couchdb", db))
const local = new Local(join(TEMP_DIR, COUCH_DIR, db))
await replication(local, remote)
}
bar.stop()
console.log("MinIO Import")
await importObjects()
console.log("Import complete")
fs.rmSync(TEMP_DIR, { recursive: true })
}

View file

@ -0,0 +1,63 @@
const {
ObjectStoreBuckets,
ObjectStore,
retrieve,
uploadDirectory,
makeSureBucketExists,
} = require("@budibase/backend-core/objectStore")
const fs = require("fs")
const { join } = require("path")
const { TEMP_DIR, MINIO_DIR } = require("./utils")
const { progressBar } = require("../utils")
const bucketList = Object.values(ObjectStoreBuckets)
exports.exportObjects = async () => {
const path = join(TEMP_DIR, MINIO_DIR)
fs.mkdirSync(path)
let fullList = []
for (let bucket of bucketList) {
const client = ObjectStore(bucket)
try {
await client.headBucket().promise()
} catch (err) {
continue
}
const list = await client.listObjectsV2().promise()
fullList = fullList.concat(list.Contents.map(el => ({ ...el, bucket })))
}
const bar = progressBar(fullList.length)
let count = 0
for (let object of fullList) {
const filename = object.Key
const data = await retrieve(object.bucket, filename)
const possiblePath = filename.split("/")
if (possiblePath.length > 1) {
const dirs = possiblePath.slice(0, possiblePath.length - 1)
fs.mkdirSync(join(path, object.bucket, ...dirs), { recursive: true })
}
fs.writeFileSync(join(path, object.bucket, ...possiblePath), data)
bar.update(++count)
}
bar.stop()
}
exports.importObjects = async () => {
const path = join(TEMP_DIR, MINIO_DIR)
const buckets = fs.readdirSync(path)
let total = 0
buckets.forEach(bucket => {
const files = fs.readdirSync(join(path, bucket))
total += files.length
})
const bar = progressBar(total)
let count = 0
for (let bucket of buckets) {
const client = ObjectStore(bucket)
await makeSureBucketExists(client, bucket)
const files = await uploadDirectory(bucket, join(path, bucket), "/")
count += files.length
bar.update(count)
}
bar.stop()
}

View file

@ -0,0 +1,92 @@
const dotenv = require("dotenv")
const fs = require("fs")
const { string } = require("../questions")
const { env } = require("@budibase/backend-core")
const { getPouch } = require("@budibase/backend-core/db")
exports.DEFAULT_COUCH = "http://budibase:budibase@localhost:10000/db/"
exports.DEFAULT_MINIO = "http://localhost:10000/"
exports.TEMP_DIR = ".temp"
exports.COUCH_DIR = "couchdb"
exports.MINIO_DIR = "minio"
const REQUIRED = [
{ value: "MAIN_PORT", default: "10000" },
{ value: "COUCH_DB_URL", default: exports.DEFAULT_COUCH },
{ value: "MINIO_URL", default: exports.DEFAULT_MINIO },
{ value: "MINIO_ACCESS_KEY" },
{ value: "MINIO_SECRET_KEY" },
]
exports.checkURLs = config => {
const mainPort = config["MAIN_PORT"],
username = config["COUCH_DB_USER"],
password = config["COUCH_DB_PASSWORD"]
if (!config["COUCH_DB_URL"] && mainPort && username && password) {
config[
"COUCH_DB_URL"
] = `http://${username}:${password}@localhost:${mainPort}/db/`
}
if (!config["MINIO_URL"]) {
config["MINIO_URL"] = exports.DEFAULT_MINIO
}
return config
}
exports.askQuestions = async () => {
console.log(
"*** NOTE: use a .env file to load these parameters repeatedly ***"
)
let config = {}
for (let property of REQUIRED) {
config[property.value] = await string(property.value, property.default)
}
return config
}
exports.loadEnvironment = path => {
if (!fs.existsSync(path)) {
throw "Unable to file specified .env file"
}
const env = fs.readFileSync(path, "utf8")
const config = exports.checkURLs(dotenv.parse(env))
for (let required of REQUIRED) {
if (!config[required.value]) {
throw `Cannot find "${required.value}" property in .env file`
}
}
return config
}
// true is the default value passed by commander
exports.getConfig = async (envFile = true) => {
let config
if (envFile !== true) {
config = exports.loadEnvironment(envFile)
} else {
config = await exports.askQuestions()
}
for (let required of REQUIRED) {
env._set(required.value, config[required.value])
}
return config
}
exports.replication = (from, to) => {
return new Promise((resolve, reject) => {
from.replicate
.to(to)
.on("complete", () => {
resolve()
})
.on("error", err => {
reject(err)
})
})
}
exports.getPouches = () => {
const Remote = getPouch({ replication: true })
const Local = getPouch({ onDisk: true, directory: exports.TEMP_DIR })
return { Remote, Local }
}