1
0
Fork 0
mirror of synced 2024-06-03 02:55:14 +12:00

Some changes after testing, needed to update object store client creation.

This commit is contained in:
mike12345567 2021-03-24 12:54:59 +00:00
parent 9708957646
commit 9c0a4ab4fb
4 changed files with 30 additions and 22 deletions

View file

@ -15,6 +15,8 @@ services:
COUCH_DB_URL: http://${COUCH_DB_USER}:${COUCH_DB_PASSWORD}@couchdb-service:5984
WORKER_URL: http://worker-service:4003
MINIO_URL: http://minio-service:9000
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
HOSTING_KEY: ${HOSTING_KEY}
BUDIBASE_ENVIRONMENT: ${BUDIBASE_ENVIRONMENT}
PORT: 4002

View file

@ -68,8 +68,6 @@ for (let route of mainRoutes) {
router.use(staticRoutes.routes())
router.use(staticRoutes.allowedMethods())
if (!env.SELF_HOSTED && !env.CLOUD) {
router.redirect("/", "/_builder")
}
router.redirect("/", "/_builder")
module.exports = router

View file

@ -25,6 +25,8 @@ module.exports = {
SELF_HOSTED: process.env.SELF_HOSTED,
AWS_REGION: process.env.AWS_REGION,
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
// environment
NODE_ENV: process.env.NODE_ENV,
JEST_WORKER_ID: process.env.JEST_WORKER_ID,

View file

@ -6,7 +6,6 @@ const tar = require("tar-fs")
const zlib = require("zlib")
const { promisify } = require("util")
const { join } = require("path")
const { streamUpload } = require("./utilities")
const fs = require("fs")
const { budibaseTempDir } = require("../budibaseDir")
const env = require("../../environment")
@ -50,8 +49,12 @@ const PUBLIC_BUCKETS = [ObjectStoreBuckets.APPS]
* @constructor
*/
exports.ObjectStore = bucket => {
AWS.config.update({
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
})
const config = {
s3ForcePathStyle: true, // needed with minio?
s3ForcePathStyle: true,
signatureVersion: "v4",
params: {
Bucket: bucket,
@ -161,24 +164,25 @@ exports.deleteFolder = async (bucket, folder) => {
Prefix: folder,
}
const data = await client.listObjects(listParams).promise()
if (data.Contents.length > 0) {
const deleteParams = {
Bucket: bucket,
Delete: {
Objects: [],
},
}
let response = await client.listObjects(listParams).promise()
if (response.Contents.length === 0) {
return
}
const deleteParams = {
Bucket: bucket,
Delete: {
Objects: [],
},
}
data.Contents.forEach(content => {
deleteParams.Delete.Objects.push({ Key: content.Key })
})
response.Contents.forEach(content => {
deleteParams.Delete.Objects.push({ Key: content.Key })
})
const data = await client.deleteObjects(deleteParams).promise()
// can only empty 1000 items at once
if (data.Contents.length === 1000) {
return exports.deleteFolder(bucket, folder)
}
response = await client.deleteObjects(deleteParams).promise()
// can only empty 1000 items at once
if (response.Deleted.length === 1000) {
return exports.deleteFolder(bucket, folder)
}
}
@ -191,7 +195,9 @@ exports.uploadDirectory = async (bucket, localPath, bucketPath) => {
if (file.isDirectory()) {
uploads.push(exports.uploadDirectory(bucket, local, path))
} else {
uploads.push(streamUpload(bucket, path, fs.createReadStream(local)))
uploads.push(
exports.streamUpload(bucket, path, fs.createReadStream(local))
)
}
}
await Promise.all(uploads)