1
0
Fork 0
mirror of synced 2024-09-30 00:57:16 +13:00

Initial version of memory leak protection, making sure that PouchDB databases are closed correctly after use, using a combination of closures wrapping DB gets (this replaces the getDB, leaving only a dangerousGetDB function which can be used in very very specific scenarios) and then closing the DB as part of CLS hooked functions finishing. Also moving the GlobalDB init to the tenancy middleware as this is used everywhere in the worker/app services - means that not all getGlobalDB calls require an async closure around them.

This commit is contained in:
mike12345567 2022-04-19 19:42:52 +01:00
parent 41144db055
commit 192fb1307e
33 changed files with 778 additions and 543 deletions

View file

@ -1,5 +1,5 @@
const redis = require("../redis/authRedis") const redis = require("../redis/authRedis")
const { getDB } = require("../db") const { doWithDB } = require("../db")
const { DocumentTypes } = require("../db/constants") const { DocumentTypes } = require("../db/constants")
const AppState = { const AppState = {
@ -11,8 +11,13 @@ const EXPIRY_SECONDS = 3600
* The default populate app metadata function * The default populate app metadata function
*/ */
const populateFromDB = async appId => { const populateFromDB = async appId => {
const db = getDB(appId, { skip_setup: true }) return doWithDB(
return db.get(DocumentTypes.APP_METADATA) appId,
db => {
return db.get(DocumentTypes.APP_METADATA)
},
{ skip_setup: true }
)
} }
const isInvalid = metadata => { const isInvalid = metadata => {

View file

@ -1,5 +1,5 @@
const redis = require("../redis/authRedis") const redis = require("../redis/authRedis")
const { getTenantId, lookupTenantId, getGlobalDB } = require("../tenancy") const { getTenantId, lookupTenantId, doWithGlobalDB } = require("../tenancy")
const env = require("../environment") const env = require("../environment")
const accounts = require("../cloud/accounts") const accounts = require("../cloud/accounts")
@ -9,9 +9,8 @@ const EXPIRY_SECONDS = 3600
* The default populate user function * The default populate user function
*/ */
const populateFromDB = async (userId, tenantId) => { const populateFromDB = async (userId, tenantId) => {
const user = await getGlobalDB(tenantId).get(userId) const user = await doWithGlobalDB(tenantId, db => db.get(userId))
user.budibaseAccess = true user.budibaseAccess = true
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
const account = await accounts.getAccount(user.email) const account = await accounts.getAccount(user.email)
if (account) { if (account) {

View file

@ -4,7 +4,11 @@ const { newid } = require("../hashing")
const REQUEST_ID_KEY = "requestId" const REQUEST_ID_KEY = "requestId"
class FunctionContext { class FunctionContext {
static getMiddleware(updateCtxFn = null, contextName = "session") { static getMiddleware(
updateCtxFn = null,
destroyFn = null,
contextName = "session"
) {
const namespace = this.createNamespace(contextName) const namespace = this.createNamespace(contextName)
return async function (ctx, next) { return async function (ctx, next) {
@ -18,7 +22,14 @@ class FunctionContext {
if (updateCtxFn) { if (updateCtxFn) {
updateCtxFn(ctx) updateCtxFn(ctx)
} }
next().then(resolve).catch(reject) next()
.then(resolve)
.catch(reject)
.finally(() => {
if (destroyFn) {
return destroyFn(ctx)
}
})
}) })
) )
} }

View file

@ -1,6 +1,6 @@
const { getGlobalUserParams, getAllApps } = require("../db/utils") const { getGlobalUserParams, getAllApps } = require("../db/utils")
const { getDB } = require("../db") const { doWithDB } = require("../db")
const { getGlobalDB } = require("../tenancy") const { doWithGlobalDB } = require("../tenancy")
const { StaticDatabases } = require("../db/constants") const { StaticDatabases } = require("../db/constants")
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
@ -8,11 +8,12 @@ const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name
const removeTenantFromInfoDB = async tenantId => { const removeTenantFromInfoDB = async tenantId => {
try { try {
const infoDb = getDB(PLATFORM_INFO_DB) await doWithDB(PLATFORM_INFO_DB, async infoDb => {
let tenants = await infoDb.get(TENANT_DOC) let tenants = await infoDb.get(TENANT_DOC)
tenants.tenantIds = tenants.tenantIds.filter(id => id !== tenantId) tenants.tenantIds = tenants.tenantIds.filter(id => id !== tenantId)
await infoDb.put(tenants) await infoDb.put(tenants)
})
} catch (err) { } catch (err) {
console.error(`Error removing tenant ${tenantId} from info db`, err) console.error(`Error removing tenant ${tenantId} from info db`, err)
throw err throw err
@ -20,36 +21,8 @@ const removeTenantFromInfoDB = async tenantId => {
} }
exports.removeUserFromInfoDB = async dbUser => { exports.removeUserFromInfoDB = async dbUser => {
const infoDb = getDB(PLATFORM_INFO_DB) await doWithDB(PLATFORM_INFO_DB, async infoDb => {
const keys = [dbUser._id, dbUser.email] const keys = [dbUser._id, dbUser.email]
const userDocs = await infoDb.allDocs({
keys,
include_docs: true,
})
const toDelete = userDocs.rows.map(row => {
return {
...row.doc,
_deleted: true,
}
})
await infoDb.bulkDocs(toDelete)
}
const removeUsersFromInfoDB = async tenantId => {
try {
const globalDb = getGlobalDB(tenantId)
const infoDb = getDB(PLATFORM_INFO_DB)
const allUsers = await globalDb.allDocs(
getGlobalUserParams(null, {
include_docs: true,
})
)
const allEmails = allUsers.rows.map(row => row.doc.email)
// get the id docs
let keys = allUsers.rows.map(row => row.id)
// and the email docs
keys = keys.concat(allEmails)
// retrieve the docs and delete them
const userDocs = await infoDb.allDocs({ const userDocs = await infoDb.allDocs({
keys, keys,
include_docs: true, include_docs: true,
@ -61,26 +34,60 @@ const removeUsersFromInfoDB = async tenantId => {
} }
}) })
await infoDb.bulkDocs(toDelete) await infoDb.bulkDocs(toDelete)
} catch (err) { })
console.error(`Error removing tenant ${tenantId} users from info db`, err) }
throw err
} const removeUsersFromInfoDB = async tenantId => {
return doWithGlobalDB(tenantId, async db => {
try {
const allUsers = await db.allDocs(
getGlobalUserParams(null, {
include_docs: true,
})
)
await doWithDB(PLATFORM_INFO_DB, async infoDb => {
const allEmails = allUsers.rows.map(row => row.doc.email)
// get the id docs
let keys = allUsers.rows.map(row => row.id)
// and the email docs
keys = keys.concat(allEmails)
// retrieve the docs and delete them
const userDocs = await infoDb.allDocs({
keys,
include_docs: true,
})
const toDelete = userDocs.rows.map(row => {
return {
...row.doc,
_deleted: true,
}
})
await infoDb.bulkDocs(toDelete)
})
} catch (err) {
console.error(`Error removing tenant ${tenantId} users from info db`, err)
throw err
}
})
} }
const removeGlobalDB = async tenantId => { const removeGlobalDB = async tenantId => {
try { return doWithGlobalDB(tenantId, async db => {
const globalDb = getGlobalDB(tenantId) try {
await globalDb.destroy() await db.destroy()
} catch (err) { } catch (err) {
console.error(`Error removing tenant ${tenantId} users from info db`, err) console.error(`Error removing tenant ${tenantId} users from info db`, err)
throw err throw err
} }
})
} }
const removeTenantApps = async tenantId => { const removeTenantApps = async tenantId => {
try { try {
const apps = await getAllApps({ all: true }) const apps = await getAllApps({ all: true })
const destroyPromises = apps.map(app => getDB(app.appId).destroy()) const destroyPromises = apps.map(app =>
doWithDB(app.appId, db => db.destroy())
)
await Promise.allSettled(destroyPromises) await Promise.allSettled(destroyPromises)
} catch (err) { } catch (err) {
console.error(`Error removing tenant ${tenantId} apps`, err) console.error(`Error removing tenant ${tenantId} apps`, err)

View file

@ -1,9 +1,11 @@
const env = require("../environment") const env = require("../environment")
const { Headers } = require("../../constants") const { Headers } = require("../../constants")
const { SEPARATOR, DocumentTypes } = require("../db/constants") const { SEPARATOR, DocumentTypes } = require("../db/constants")
const { DEFAULT_TENANT_ID } = require("../constants")
const cls = require("./FunctionContext") const cls = require("./FunctionContext")
const { getDB } = require("../db") const { dangerousGetDB } = require("../db")
const { getProdAppID, getDevelopmentAppID } = require("../db/conversions") const { getProdAppID, getDevelopmentAppID } = require("../db/conversions")
const { baseGlobalDBName } = require("../tenancy/utils")
const { isEqual } = require("lodash") const { isEqual } = require("lodash")
// some test cases call functions directly, need to // some test cases call functions directly, need to
@ -12,6 +14,7 @@ let TEST_APP_ID = null
const ContextKeys = { const ContextKeys = {
TENANT_ID: "tenantId", TENANT_ID: "tenantId",
GLOBAL_DB: "globalDb",
APP_ID: "appId", APP_ID: "appId",
// whatever the request app DB was // whatever the request app DB was
CURRENT_DB: "currentDb", CURRENT_DB: "currentDb",
@ -22,7 +25,28 @@ const ContextKeys = {
DB_OPTS: "dbOpts", DB_OPTS: "dbOpts",
} }
exports.DEFAULT_TENANT_ID = "default" exports.DEFAULT_TENANT_ID = DEFAULT_TENANT_ID
// this function makes sure the PouchDB objects are closed and
// fully deleted when finished - this protects against memory leaks
async function closeAppDBs() {
const dbKeys = [
ContextKeys.CURRENT_DB,
ContextKeys.PROD_DB,
ContextKeys.DEV_DB,
]
for (let dbKey of dbKeys) {
const db = cls.getFromContext(dbKey)
if (!db) {
continue
}
try {
await db.close()
} catch (err) {
// ignore error, its already closed likely
}
}
}
exports.isDefaultTenant = () => { exports.isDefaultTenant = () => {
return exports.getTenantId() === exports.DEFAULT_TENANT_ID return exports.getTenantId() === exports.DEFAULT_TENANT_ID
@ -34,13 +58,29 @@ exports.isMultiTenant = () => {
// used for automations, API endpoints should always be in context already // used for automations, API endpoints should always be in context already
exports.doInTenant = (tenantId, task) => { exports.doInTenant = (tenantId, task) => {
return cls.run(() => { // the internal function is so that we can re-use an existing
// context - don't want to close DB on a parent context
async function internal(opts = { existing: false }) {
// set the tenant id // set the tenant id
cls.setOnContext(ContextKeys.TENANT_ID, tenantId) if (!opts.existing) {
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
exports.setGlobalDB(tenantId)
}
// invoke the task // invoke the task
return task() const response = await task()
}) if (!opts.existing) {
await exports.getGlobalDB().close()
}
return response
}
if (cls.getFromContext(ContextKeys.TENANT_ID) === tenantId) {
return internal({ existing: true })
} else {
return cls.run(async () => {
return internal()
})
}
} }
/** /**
@ -64,24 +104,38 @@ exports.getTenantIDFromAppID = appId => {
} }
const setAppTenantId = appId => { const setAppTenantId = appId => {
const appTenantId = this.getTenantIDFromAppID(appId) || this.DEFAULT_TENANT_ID const appTenantId =
this.updateTenantId(appTenantId) exports.getTenantIDFromAppID(appId) || exports.DEFAULT_TENANT_ID
exports.updateTenantId(appTenantId)
} }
exports.doInAppContext = (appId, task) => { exports.doInAppContext = (appId, task) => {
if (!appId) { if (!appId) {
throw new Error("appId is required") throw new Error("appId is required")
} }
return cls.run(() => { // the internal function is so that we can re-use an existing
// context - don't want to close DB on a parent context
async function internal(opts = { existing: false }) {
// set the app tenant id // set the app tenant id
setAppTenantId(appId) if (!opts.existing) {
setAppTenantId(appId)
}
// set the app ID // set the app ID
cls.setOnContext(ContextKeys.APP_ID, appId) cls.setOnContext(ContextKeys.APP_ID, appId)
// invoke the task // invoke the task
return task() const response = await task()
}) if (!opts.existing) {
await closeAppDBs()
}
return response
}
if (appId === cls.getFromContext(ContextKeys.APP_ID)) {
return internal({ existing: true })
} else {
return cls.run(async () => {
return internal()
})
}
} }
exports.updateTenantId = tenantId => { exports.updateTenantId = tenantId => {
@ -90,11 +144,13 @@ exports.updateTenantId = tenantId => {
exports.updateAppId = appId => { exports.updateAppId = appId => {
try { try {
const promise = closeAppDBs()
cls.setOnContext(ContextKeys.APP_ID, appId) cls.setOnContext(ContextKeys.APP_ID, appId)
cls.setOnContext(ContextKeys.PROD_DB, null) cls.setOnContext(ContextKeys.PROD_DB, null)
cls.setOnContext(ContextKeys.DEV_DB, null) cls.setOnContext(ContextKeys.DEV_DB, null)
cls.setOnContext(ContextKeys.CURRENT_DB, null) cls.setOnContext(ContextKeys.CURRENT_DB, null)
cls.setOnContext(ContextKeys.DB_OPTS, null) cls.setOnContext(ContextKeys.DB_OPTS, null)
return promise
} catch (err) { } catch (err) {
if (env.isTest()) { if (env.isTest()) {
TEST_APP_ID = appId TEST_APP_ID = appId
@ -111,8 +167,8 @@ exports.setTenantId = (
let tenantId let tenantId
// exit early if not multi-tenant // exit early if not multi-tenant
if (!exports.isMultiTenant()) { if (!exports.isMultiTenant()) {
cls.setOnContext(ContextKeys.TENANT_ID, this.DEFAULT_TENANT_ID) cls.setOnContext(ContextKeys.TENANT_ID, exports.DEFAULT_TENANT_ID)
return return exports.DEFAULT_TENANT_ID
} }
const allowQs = opts && opts.allowQs const allowQs = opts && opts.allowQs
@ -140,6 +196,22 @@ exports.setTenantId = (
if (tenantId) { if (tenantId) {
cls.setOnContext(ContextKeys.TENANT_ID, tenantId) cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
} }
return tenantId
}
exports.setGlobalDB = tenantId => {
const dbName = baseGlobalDBName(tenantId)
const db = dangerousGetDB(dbName)
cls.setOnContext(ContextKeys.GLOBAL_DB, db)
return db
}
exports.getGlobalDB = () => {
const db = cls.getFromContext(ContextKeys.GLOBAL_DB)
if (!db) {
throw new Error("Global DB not found")
}
return db
} }
exports.isTenantIdSet = () => { exports.isTenantIdSet = () => {
@ -187,7 +259,7 @@ function getContextDB(key, opts) {
toUseAppId = getDevelopmentAppID(appId) toUseAppId = getDevelopmentAppID(appId)
break break
} }
db = getDB(toUseAppId, opts) db = dangerousGetDB(toUseAppId, opts)
try { try {
cls.setOnContext(key, db) cls.setOnContext(key, db)
if (opts) { if (opts) {

View file

@ -1,4 +1,4 @@
const { getDB } = require(".") const { dangerousGetDB } = require(".")
class Replication { class Replication {
/** /**
@ -7,8 +7,8 @@ class Replication {
* @param {String} target - the DB you want to replicate to, or rollback from * @param {String} target - the DB you want to replicate to, or rollback from
*/ */
constructor({ source, target }) { constructor({ source, target }) {
this.source = getDB(source) this.source = dangerousGetDB(source)
this.target = getDB(target) this.target = dangerousGetDB(target)
} }
promisify(operation, opts = {}) { promisify(operation, opts = {}) {
@ -51,7 +51,7 @@ class Replication {
async rollback() { async rollback() {
await this.target.destroy() await this.target.destroy()
// Recreate the DB again // Recreate the DB again
this.target = getDB(this.target.name) this.target = dangerousGetDB(this.target.name)
await this.replicate() await this.replicate()
} }

View file

@ -22,7 +22,10 @@ exports.init = opts => {
initialised = true initialised = true
} }
exports.getDB = (dbName, opts) => { // NOTE: THIS IS A DANGEROUS FUNCTION - USE WITH CAUTION
// this function is prone to leaks, should only be used
// in situations that using the function doWithDB does not work
exports.dangerousGetDB = (dbName, opts) => {
checkInitialised() checkInitialised()
const db = new PouchDB(dbName, opts) const db = new PouchDB(dbName, opts)
const dbPut = db.put const dbPut = db.put
@ -30,6 +33,22 @@ exports.getDB = (dbName, opts) => {
return db return db
} }
// we have to use a callback for this so that we can close
// the DB when we're done, without this manual requests would
// need to close the database when done with it to avoid memory leaks
exports.doWithDB = async (dbName, cb, opts) => {
const db = exports.dangerousGetDB(dbName, opts)
// need this to be async so that we can correctly close DB after all
// async operations have been completed
const resp = await cb(db)
try {
await db.close()
} catch (err) {
// ignore error - it may have not opened database/is closed already
}
return resp
}
exports.allDbs = () => { exports.allDbs = () => {
checkInitialised() checkInitialised()
return PouchDB.allDbs() return PouchDB.allDbs()

View file

@ -11,7 +11,7 @@ const {
} = require("./constants") } = require("./constants")
const { getTenantId, getGlobalDBName } = require("../tenancy") const { getTenantId, getGlobalDBName } = require("../tenancy")
const fetch = require("node-fetch") const fetch = require("node-fetch")
const { getDB, allDbs } = require("./index") const { doWithDB, allDbs } = require("./index")
const { getCouchUrl } = require("./pouch") const { getCouchUrl } = require("./pouch")
const { getAppMetadata } = require("../cache/appMetadata") const { getAppMetadata } = require("../cache/appMetadata")
const { checkSlashesInUrl } = require("../helpers") const { checkSlashesInUrl } = require("../helpers")
@ -280,17 +280,22 @@ exports.getDevAppIDs = async () => {
exports.dbExists = async dbName => { exports.dbExists = async dbName => {
let exists = false let exists = false
try { return doWithDB(
const db = getDB(dbName, { skip_setup: true }) dbName,
// check if database exists async db => {
const info = await db.info() try {
if (info && !info.error) { // check if database exists
exists = true const info = await db.info()
} if (info && !info.error) {
} catch (err) { exists = true
exists = false }
} } catch (err) {
return exists exists = false
}
return exists
},
{ skip_setup: true }
)
} }
/** /**

View file

@ -1,7 +1,7 @@
const google = require("../google") const google = require("../google")
const { Cookies, Configs } = require("../../../constants") const { Cookies, Configs } = require("../../../constants")
const { clearCookie, getCookie } = require("../../../utils") const { clearCookie, getCookie } = require("../../../utils")
const { getDB } = require("../../../db") const { doWithDB } = require("../../../db")
const { getScopedConfig } = require("../../../db/utils") const { getScopedConfig } = require("../../../db/utils")
const environment = require("../../../environment") const environment = require("../../../environment")
const { getGlobalDB } = require("../../../tenancy") const { getGlobalDB } = require("../../../tenancy")
@ -13,12 +13,12 @@ async function fetchGoogleCreds() {
type: Configs.GOOGLE, type: Configs.GOOGLE,
}) })
// or fall back to env variables // or fall back to env variables
const config = googleConfig || { return (
clientID: environment.GOOGLE_CLIENT_ID, googleConfig || {
clientSecret: environment.GOOGLE_CLIENT_SECRET, clientID: environment.GOOGLE_CLIENT_ID,
} clientSecret: environment.GOOGLE_CLIENT_SECRET,
}
return config )
} }
async function preAuth(passport, ctx, next) { async function preAuth(passport, ctx, next) {
@ -59,16 +59,17 @@ async function postAuth(passport, ctx, next) {
{ successRedirect: "/", failureRedirect: "/error" }, { successRedirect: "/", failureRedirect: "/error" },
async (err, tokens) => { async (err, tokens) => {
// update the DB for the datasource with all the user info // update the DB for the datasource with all the user info
const db = getDB(authStateCookie.appId) await doWithDB(authStateCookie.appId, async db => {
const datasource = await db.get(authStateCookie.datasourceId) const datasource = await db.get(authStateCookie.datasourceId)
if (!datasource.config) { if (!datasource.config) {
datasource.config = {} datasource.config = {}
} }
datasource.config.auth = { type: "google", ...tokens } datasource.config.auth = { type: "google", ...tokens }
await db.put(datasource) await db.put(datasource)
ctx.redirect( ctx.redirect(
`/builder/app/${authStateCookie.appId}/data/datasource/${authStateCookie.datasourceId}` `/builder/app/${authStateCookie.appId}/data/datasource/${authStateCookie.datasourceId}`
) )
})
} }
)(ctx, next) )(ctx, next)
} }

View file

@ -2,7 +2,7 @@
require("../../../tests/utilities/dbConfig") require("../../../tests/utilities/dbConfig")
const database = require("../../../db") const { dangerousGetDB } = require("../../../db")
const { authenticateThirdParty } = require("../third-party-common") const { authenticateThirdParty } = require("../third-party-common")
const { data } = require("./utilities/mock-data") const { data } = require("./utilities/mock-data")
@ -29,7 +29,7 @@ describe("third party common", () => {
let thirdPartyUser let thirdPartyUser
beforeEach(() => { beforeEach(() => {
db = database.getDB(StaticDatabases.GLOBAL.name) db = dangerousGetDB(StaticDatabases.GLOBAL.name)
thirdPartyUser = data.buildThirdPartyUser() thirdPartyUser = data.buildThirdPartyUser()
}) })

View file

@ -1,4 +1,4 @@
const { setTenantId } = require("../tenancy") const { setTenantId, setGlobalDB, getGlobalDB } = require("../tenancy")
const ContextFactory = require("../context/FunctionContext") const ContextFactory = require("../context/FunctionContext")
const { buildMatcherRegex, matches } = require("./matchers") const { buildMatcherRegex, matches } = require("./matchers")
@ -10,10 +10,16 @@ module.exports = (
const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns) const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns)
const noTenancyOptions = buildMatcherRegex(noTenancyPatterns) const noTenancyOptions = buildMatcherRegex(noTenancyPatterns)
return ContextFactory.getMiddleware(ctx => { const updateCtxFn = ctx => {
const allowNoTenant = const allowNoTenant =
opts.noTenancyRequired || !!matches(ctx, noTenancyOptions) opts.noTenancyRequired || !!matches(ctx, noTenancyOptions)
const allowQs = !!matches(ctx, allowQsOptions) const allowQs = !!matches(ctx, allowQsOptions)
setTenantId(ctx, { allowQs, allowNoTenant }) const tenantId = setTenantId(ctx, { allowQs, allowNoTenant })
}) setGlobalDB(tenantId)
}
const destroyFn = async () => {
await getGlobalDB().close()
}
return ContextFactory.getMiddleware(updateCtxFn, destroyFn)
} }

View file

@ -1,5 +1,5 @@
const { DEFAULT_TENANT_ID } = require("../constants") const { DEFAULT_TENANT_ID } = require("../constants")
const { getDB } = require("../db") const { doWithDB } = require("../db")
const { DocumentTypes } = require("../db/constants") const { DocumentTypes } = require("../db/constants")
const { getAllApps } = require("../db/utils") const { getAllApps } = require("../db/utils")
const environment = require("../environment") const environment = require("../environment")
@ -47,45 +47,46 @@ const runMigration = async (migration, options = {}) => {
// run the migration against each db // run the migration against each db
for (const dbName of dbNames) { for (const dbName of dbNames) {
const db = getDB(dbName) await doWithDB(dbName, async db => {
try { try {
const doc = await exports.getMigrationsDoc(db) const doc = await exports.getMigrationsDoc(db)
// exit if the migration has been performed already // exit if the migration has been performed already
if (doc[migrationName]) { if (doc[migrationName]) {
if ( if (
options.force && options.force &&
options.force[migrationType] && options.force[migrationType] &&
options.force[migrationType].includes(migrationName) options.force[migrationType].includes(migrationName)
) { ) {
console.log( console.log(
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Forcing` `[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Forcing`
) )
} else { } else {
// the migration has already been performed // the migration has already been performed
continue return
}
} }
console.log(
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Running`
)
// run the migration with tenant context
await migration.fn(db)
console.log(
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Complete`
)
// mark as complete
doc[migrationName] = Date.now()
await db.put(doc)
} catch (err) {
console.error(
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Error: `,
err
)
throw err
} }
})
console.log(
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Running`
)
// run the migration with tenant context
await migration.fn(db)
console.log(
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Complete`
)
// mark as complete
doc[migrationName] = Date.now()
await db.put(doc)
} catch (err) {
console.error(
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Error: `,
err
)
throw err
}
} }
} }

View file

@ -1,7 +1,7 @@
require("../../tests/utilities/dbConfig") require("../../tests/utilities/dbConfig")
const { runMigrations, getMigrationsDoc } = require("../index") const { runMigrations, getMigrationsDoc } = require("../index")
const { getDB } = require("../../db") const { dangerousGetDB } = require("../../db")
const { const {
StaticDatabases, StaticDatabases,
} = require("../../db/utils") } = require("../../db/utils")
@ -20,7 +20,7 @@ describe("migrations", () => {
}] }]
beforeEach(() => { beforeEach(() => {
db = getDB(StaticDatabases.GLOBAL.name) db = dangerousGetDB(StaticDatabases.GLOBAL.name)
}) })
afterEach(async () => { afterEach(async () => {

View file

@ -7,7 +7,7 @@ const {
SEPARATOR, SEPARATOR,
} = require("../db/utils") } = require("../db/utils")
const { getAppDB } = require("../context") const { getAppDB } = require("../context")
const { getDB } = require("../db") const { doWithDB } = require("../db")
const BUILTIN_IDS = { const BUILTIN_IDS = {
ADMIN: "ADMIN", ADMIN: "ADMIN",
@ -199,43 +199,49 @@ exports.checkForRoleResourceArray = (rolePerms, resourceId) => {
* @return {Promise<object[]>} An array of the role objects that were found. * @return {Promise<object[]>} An array of the role objects that were found.
*/ */
exports.getAllRoles = async appId => { exports.getAllRoles = async appId => {
const db = appId ? getDB(appId) : getAppDB() if (appId) {
const body = await db.allDocs( return doWithDB(appId, internal)
getRoleParams(null, { } else {
include_docs: true, return internal(getAppDB())
}) }
) async function internal(db) {
let roles = body.rows.map(row => row.doc) const body = await db.allDocs(
const builtinRoles = exports.getBuiltinRoles() getRoleParams(null, {
include_docs: true,
})
)
let roles = body.rows.map(row => row.doc)
const builtinRoles = exports.getBuiltinRoles()
// need to combine builtin with any DB record of them (for sake of permissions) // need to combine builtin with any DB record of them (for sake of permissions)
for (let builtinRoleId of EXTERNAL_BUILTIN_ROLE_IDS) { for (let builtinRoleId of EXTERNAL_BUILTIN_ROLE_IDS) {
const builtinRole = builtinRoles[builtinRoleId] const builtinRole = builtinRoles[builtinRoleId]
const dbBuiltin = roles.filter( const dbBuiltin = roles.filter(
dbRole => exports.getExternalRoleID(dbRole._id) === builtinRoleId dbRole => exports.getExternalRoleID(dbRole._id) === builtinRoleId
)[0] )[0]
if (dbBuiltin == null) { if (dbBuiltin == null) {
roles.push(builtinRole || builtinRoles.BASIC) roles.push(builtinRole || builtinRoles.BASIC)
} else { } else {
// remove role and all back after combining with the builtin // remove role and all back after combining with the builtin
roles = roles.filter(role => role._id !== dbBuiltin._id) roles = roles.filter(role => role._id !== dbBuiltin._id)
dbBuiltin._id = exports.getExternalRoleID(dbBuiltin._id) dbBuiltin._id = exports.getExternalRoleID(dbBuiltin._id)
roles.push(Object.assign(builtinRole, dbBuiltin)) roles.push(Object.assign(builtinRole, dbBuiltin))
}
} }
// check permissions
for (let role of roles) {
if (!role.permissions) {
continue
}
for (let resourceId of Object.keys(role.permissions)) {
role.permissions = exports.checkForRoleResourceArray(
role.permissions,
resourceId
)
}
}
return roles
} }
// check permissions
for (let role of roles) {
if (!role.permissions) {
continue
}
for (let resourceId of Object.keys(role.permissions)) {
role.permissions = exports.checkForRoleResourceArray(
role.permissions,
resourceId
)
}
}
return roles
} }
/** /**

View file

@ -1,5 +1,6 @@
const { getDB } = require("../db") const { doWithDB } = require("../db")
const { SEPARATOR, StaticDatabases } = require("../db/constants") const { StaticDatabases } = require("../db/constants")
const { baseGlobalDBName } = require("./utils")
const { const {
getTenantId, getTenantId,
DEFAULT_TENANT_ID, DEFAULT_TENANT_ID,
@ -23,59 +24,61 @@ exports.addTenantToUrl = url => {
} }
exports.doesTenantExist = async tenantId => { exports.doesTenantExist = async tenantId => {
const db = getDB(PLATFORM_INFO_DB) return doWithDB(PLATFORM_INFO_DB, async db => {
let tenants let tenants
try { try {
tenants = await db.get(TENANT_DOC) tenants = await db.get(TENANT_DOC)
} catch (err) { } catch (err) {
// if theres an error the doc doesn't exist, no tenants exist // if theres an error the doc doesn't exist, no tenants exist
return false return false
} }
return ( return (
tenants && tenants &&
Array.isArray(tenants.tenantIds) && Array.isArray(tenants.tenantIds) &&
tenants.tenantIds.indexOf(tenantId) !== -1 tenants.tenantIds.indexOf(tenantId) !== -1
) )
})
} }
exports.tryAddTenant = async (tenantId, userId, email) => { exports.tryAddTenant = async (tenantId, userId, email) => {
const db = getDB(PLATFORM_INFO_DB) return doWithDB(PLATFORM_INFO_DB, async db => {
const getDoc = async id => { const getDoc = async id => {
if (!id) { if (!id) {
return null return null
}
try {
return await db.get(id)
} catch (err) {
return { _id: id }
}
} }
try { let [tenants, userIdDoc, emailDoc] = await Promise.all([
return await db.get(id) getDoc(TENANT_DOC),
} catch (err) { getDoc(userId),
return { _id: id } getDoc(email),
])
if (!Array.isArray(tenants.tenantIds)) {
tenants = {
_id: TENANT_DOC,
tenantIds: [],
}
} }
} let promises = []
let [tenants, userIdDoc, emailDoc] = await Promise.all([ if (userIdDoc) {
getDoc(TENANT_DOC), userIdDoc.tenantId = tenantId
getDoc(userId), promises.push(db.put(userIdDoc))
getDoc(email),
])
if (!Array.isArray(tenants.tenantIds)) {
tenants = {
_id: TENANT_DOC,
tenantIds: [],
} }
} if (emailDoc) {
let promises = [] emailDoc.tenantId = tenantId
if (userIdDoc) { emailDoc.userId = userId
userIdDoc.tenantId = tenantId promises.push(db.put(emailDoc))
promises.push(db.put(userIdDoc)) }
} if (tenants.tenantIds.indexOf(tenantId) === -1) {
if (emailDoc) { tenants.tenantIds.push(tenantId)
emailDoc.tenantId = tenantId promises.push(db.put(tenants))
emailDoc.userId = userId }
promises.push(db.put(emailDoc)) await Promise.all(promises)
} })
if (tenants.tenantIds.indexOf(tenantId) === -1) {
tenants.tenantIds.push(tenantId)
promises.push(db.put(tenants))
}
await Promise.all(promises)
} }
exports.getGlobalDBName = (tenantId = null) => { exports.getGlobalDBName = (tenantId = null) => {
@ -84,43 +87,37 @@ exports.getGlobalDBName = (tenantId = null) => {
if (!tenantId) { if (!tenantId) {
tenantId = getTenantId() tenantId = getTenantId()
} }
return baseGlobalDBName(tenantId)
let dbName
if (tenantId === DEFAULT_TENANT_ID) {
dbName = StaticDatabases.GLOBAL.name
} else {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
}
return dbName
} }
exports.getGlobalDB = (tenantId = null) => { exports.doWithGlobalDB = (tenantId, cb) => {
const dbName = exports.getGlobalDBName(tenantId) return doWithDB(exports.getGlobalDBName(tenantId), cb)
return getDB(dbName)
} }
exports.lookupTenantId = async userId => { exports.lookupTenantId = async userId => {
const db = getDB(StaticDatabases.PLATFORM_INFO.name) return doWithDB(StaticDatabases.PLATFORM_INFO.name, async db => {
let tenantId = env.MULTI_TENANCY ? DEFAULT_TENANT_ID : null let tenantId = env.MULTI_TENANCY ? DEFAULT_TENANT_ID : null
try { try {
const doc = await db.get(userId) const doc = await db.get(userId)
if (doc && doc.tenantId) { if (doc && doc.tenantId) {
tenantId = doc.tenantId tenantId = doc.tenantId
}
} catch (err) {
// just return the default
} }
} catch (err) { return tenantId
// just return the default })
}
return tenantId
} }
// lookup, could be email or userId, either will return a doc // lookup, could be email or userId, either will return a doc
exports.getTenantUser = async identifier => { exports.getTenantUser = async identifier => {
const db = getDB(PLATFORM_INFO_DB) return doWithDB(PLATFORM_INFO_DB, async db => {
try { try {
return await db.get(identifier) return await db.get(identifier)
} catch (err) { } catch (err) {
return null return null
} }
})
} }
exports.isUserInAppTenant = (appId, user = null) => { exports.isUserInAppTenant = (appId, user = null) => {
@ -135,13 +132,14 @@ exports.isUserInAppTenant = (appId, user = null) => {
} }
exports.getTenantIds = async () => { exports.getTenantIds = async () => {
const db = getDB(PLATFORM_INFO_DB) return doWithDB(PLATFORM_INFO_DB, async db => {
let tenants let tenants
try { try {
tenants = await db.get(TENANT_DOC) tenants = await db.get(TENANT_DOC)
} catch (err) { } catch (err) {
// if theres an error the doc doesn't exist, no tenants exist // if theres an error the doc doesn't exist, no tenants exist
return [] return []
} }
return (tenants && tenants.tenantIds) || [] return (tenants && tenants.tenantIds) || []
})
} }

View file

@ -0,0 +1,12 @@
const { DEFAULT_TENANT_ID } = require("../constants")
const { StaticDatabases, SEPARATOR } = require("../db/constants")
exports.baseGlobalDBName = tenantId => {
let dbName
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
dbName = StaticDatabases.GLOBAL.name
} else {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
}
return dbName
}

View file

@ -10,7 +10,7 @@ const { options } = require("./middleware/passport/jwt")
const { queryGlobalView } = require("./db/views") const { queryGlobalView } = require("./db/views")
const { Headers, UserStatus, Cookies, MAX_VALID_DATE } = require("./constants") const { Headers, UserStatus, Cookies, MAX_VALID_DATE } = require("./constants")
const { const {
getGlobalDB, doWithGlobalDB,
updateTenantId, updateTenantId,
getTenantUser, getTenantUser,
tryAddTenant, tryAddTenant,
@ -188,82 +188,83 @@ exports.saveUser = async (
// need to set the context for this request, as specified // need to set the context for this request, as specified
updateTenantId(tenantId) updateTenantId(tenantId)
// specify the tenancy incase we're making a new admin user (public) // specify the tenancy incase we're making a new admin user (public)
const db = getGlobalDB(tenantId) return doWithGlobalDB(tenantId, async db => {
let { email, password, _id } = user let { email, password, _id } = user
// make sure another user isn't using the same email // make sure another user isn't using the same email
let dbUser let dbUser
if (email) { if (email) {
// check budibase users inside the tenant // check budibase users inside the tenant
dbUser = await exports.getGlobalUserByEmail(email) dbUser = await exports.getGlobalUserByEmail(email)
if (dbUser != null && (dbUser._id !== _id || Array.isArray(dbUser))) { if (dbUser != null && (dbUser._id !== _id || Array.isArray(dbUser))) {
throw `Email address ${email} already in use.`
}
// check budibase users in other tenants
if (env.MULTI_TENANCY) {
const tenantUser = await getTenantUser(email)
if (tenantUser != null && tenantUser.tenantId !== tenantId) {
throw `Email address ${email} already in use.` throw `Email address ${email} already in use.`
} }
}
// check root account users in account portal // check budibase users in other tenants
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { if (env.MULTI_TENANCY) {
const account = await accounts.getAccount(email) const tenantUser = await getTenantUser(email)
if (account && account.verified && account.tenantId !== tenantId) { if (tenantUser != null && tenantUser.tenantId !== tenantId) {
throw `Email address ${email} already in use.` throw `Email address ${email} already in use.`
}
} }
}
} else {
dbUser = await db.get(_id)
}
// get the password, make sure one is defined // check root account users in account portal
let hashedPassword if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
if (password) { const account = await accounts.getAccount(email)
hashedPassword = hashPassword ? await hash(password) : password if (account && account.verified && account.tenantId !== tenantId) {
} else if (dbUser) { throw `Email address ${email} already in use.`
hashedPassword = dbUser.password }
} else if (requirePassword) { }
throw "Password must be specified."
}
_id = _id || generateGlobalUserID()
user = {
createdAt: Date.now(),
...dbUser,
...user,
_id,
password: hashedPassword,
tenantId,
}
// make sure the roles object is always present
if (!user.roles) {
user.roles = {}
}
// add the active status to a user if its not provided
if (user.status == null) {
user.status = UserStatus.ACTIVE
}
try {
const response = await db.put({
password: hashedPassword,
...user,
})
await tryAddTenant(tenantId, _id, email)
await userCache.invalidateUser(response.id)
return {
_id: response.id,
_rev: response.rev,
email,
}
} catch (err) {
if (err.status === 409) {
throw "User exists already"
} else { } else {
throw err dbUser = await db.get(_id)
} }
}
// get the password, make sure one is defined
let hashedPassword
if (password) {
hashedPassword = hashPassword ? await hash(password) : password
} else if (dbUser) {
hashedPassword = dbUser.password
} else if (requirePassword) {
throw "Password must be specified."
}
_id = _id || generateGlobalUserID()
user = {
createdAt: Date.now(),
...dbUser,
...user,
_id,
password: hashedPassword,
tenantId,
}
// make sure the roles object is always present
if (!user.roles) {
user.roles = {}
}
// add the active status to a user if its not provided
if (user.status == null) {
user.status = UserStatus.ACTIVE
}
try {
const response = await db.put({
password: hashedPassword,
...user,
})
await tryAddTenant(tenantId, _id, email)
await userCache.invalidateUser(response.id)
return {
_id: response.id,
_rev: response.rev,
email,
}
} catch (err) {
if (err.status === 409) {
throw "User exists already"
} else {
throw err
}
}
})
} }
/** /**

View file

@ -258,6 +258,13 @@
dependencies: dependencies:
"@babel/helper-plugin-utils" "^7.14.5" "@babel/helper-plugin-utils" "^7.14.5"
"@babel/runtime@^7.15.4":
version "7.17.9"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.17.9.tgz#d19fbf802d01a8cb6cf053a64e472d42c434ba72"
integrity sha512-lSiBBvodq29uShpWGNbgFdKYNiFDo5/HIYsaCEY9ff4sb10x9jizo2+pRrSyF4jKZCXqgzuqBOQKbUm90gQwJg==
dependencies:
regenerator-runtime "^0.13.4"
"@babel/template@^7.16.0", "@babel/template@^7.3.3": "@babel/template@^7.16.0", "@babel/template@^7.3.3":
version "7.16.0" version "7.16.0"
resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.0.tgz#d16a35ebf4cd74e202083356fab21dd89363ddd6" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.0.tgz#d16a35ebf4cd74e202083356fab21dd89363ddd6"
@ -857,6 +864,21 @@ aws4@^1.8.0:
resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59"
integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA== integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==
axios-retry@^3.1.9:
version "3.2.4"
resolved "https://registry.yarnpkg.com/axios-retry/-/axios-retry-3.2.4.tgz#f447a53c3456f5bfeca18f20c3a3272207d082ae"
integrity sha512-Co3UXiv4npi6lM963mfnuH90/YFLKWWDmoBYfxkHT5xtkSSWNqK9zdG3fw5/CP/dsoKB5aMMJCsgab+tp1OxLQ==
dependencies:
"@babel/runtime" "^7.15.4"
is-retry-allowed "^2.2.0"
axios@0.24.0:
version "0.24.0"
resolved "https://registry.yarnpkg.com/axios/-/axios-0.24.0.tgz#804e6fa1e4b9c5288501dd9dff56a7a0940d20d6"
integrity sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA==
dependencies:
follow-redirects "^1.14.4"
babel-jest@^26.6.3: babel-jest@^26.6.3:
version "26.6.3" version "26.6.3"
resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-26.6.3.tgz#d87d25cb0037577a0c89f82e5755c5d293c01056" resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-26.6.3.tgz#d87d25cb0037577a0c89f82e5755c5d293c01056"
@ -1139,6 +1161,11 @@ char-regex@^1.0.2:
resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf"
integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==
charenc@0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667"
integrity sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc=
chownr@^1.1.1: chownr@^1.1.1:
version "1.1.4" version "1.1.4"
resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b"
@ -1273,6 +1300,11 @@ component-emitter@^1.2.1:
resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0"
integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==
component-type@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/component-type/-/component-type-1.2.1.tgz#8a47901700238e4fc32269771230226f24b415a9"
integrity sha1-ikeQFwAjjk/DIml3EjAibyS0Fak=
concat-map@0.0.1: concat-map@0.0.1:
version "0.0.1" version "0.0.1"
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
@ -1315,6 +1347,11 @@ cross-spawn@^7.0.0:
shebang-command "^2.0.0" shebang-command "^2.0.0"
which "^2.0.1" which "^2.0.1"
crypt@0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
integrity sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs=
cryptiles@2.x.x: cryptiles@2.x.x:
version "2.0.5" version "2.0.5"
resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8" resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8"
@ -1802,6 +1839,11 @@ find-up@^4.0.0, find-up@^4.1.0:
locate-path "^5.0.0" locate-path "^5.0.0"
path-exists "^4.0.0" path-exists "^4.0.0"
follow-redirects@^1.14.4:
version "1.14.9"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.9.tgz#dd4ea157de7bfaf9ea9b3fbd85aa16951f78d8d7"
integrity sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w==
for-in@^1.0.2: for-in@^1.0.2:
version "1.0.2" version "1.0.2"
resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80"
@ -2226,7 +2268,7 @@ is-arrayish@^0.2.1:
resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=
is-buffer@^1.1.5: is-buffer@^1.1.5, is-buffer@~1.1.6:
version "1.1.6" version "1.1.6"
resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be"
integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==
@ -2328,6 +2370,11 @@ is-potential-custom-element-name@^1.0.1:
resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5"
integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==
is-retry-allowed@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-2.2.0.tgz#88f34cbd236e043e71b6932d09b0c65fb7b4d71d"
integrity sha512-XVm7LOeLpTW4jV19QSH38vkswxoLud8sQ57YwJVTPWdiaI9I8keEhGFpBlslyVsgdQy4Opg8QOLb8YRgsyZiQg==
is-stream@^1.1.0: is-stream@^1.1.0:
version "1.1.0" version "1.1.0"
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
@ -2824,6 +2871,11 @@ jodid25519@^1.0.0:
dependencies: dependencies:
jsbn "~0.1.0" jsbn "~0.1.0"
join-component@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/join-component/-/join-component-1.1.0.tgz#b8417b750661a392bee2c2537c68b2a9d4977cd5"
integrity sha1-uEF7dQZho5K+4sJTfGiyqdSXfNU=
js-tokens@^4.0.0: js-tokens@^4.0.0:
version "4.0.0" version "4.0.0"
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
@ -3257,6 +3309,15 @@ map-visit@^1.0.0:
dependencies: dependencies:
object-visit "^1.0.0" object-visit "^1.0.0"
md5@^2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/md5/-/md5-2.3.0.tgz#c3da9a6aae3a30b46b7b0c349b87b110dc3bda4f"
integrity sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==
dependencies:
charenc "0.0.2"
crypt "0.0.2"
is-buffer "~1.1.6"
memdown@1.4.1: memdown@1.4.1:
version "1.4.1" version "1.4.1"
resolved "https://registry.yarnpkg.com/memdown/-/memdown-1.4.1.tgz#b4e4e192174664ffbae41361aa500f3119efe215" resolved "https://registry.yarnpkg.com/memdown/-/memdown-1.4.1.tgz#b4e4e192174664ffbae41361aa500f3119efe215"
@ -3377,6 +3438,11 @@ ms@2.1.2, ms@^2.1.1:
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
ms@^2.1.3:
version "2.1.3"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
nanomatch@^1.2.9: nanomatch@^1.2.9:
version "1.2.13" version "1.2.13"
resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119"
@ -4195,6 +4261,11 @@ redis-parser@^3.0.0:
dependencies: dependencies:
redis-errors "^1.0.0" redis-errors "^1.0.0"
regenerator-runtime@^0.13.4:
version "0.13.9"
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52"
integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==
regex-not@^1.0.0, regex-not@^1.0.2: regex-not@^1.0.0, regex-not@^1.0.2:
version "1.0.2" version "1.0.2"
resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c"
@ -4208,6 +4279,11 @@ remove-trailing-separator@^1.0.1:
resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef"
integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8=
remove-trailing-slash@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/remove-trailing-slash/-/remove-trailing-slash-0.1.1.tgz#be2285a59f39c74d1bce4f825950061915e3780d"
integrity sha512-o4S4Qh6L2jpnCy83ysZDau+VORNvnFw07CKSAymkd6ICNVEPisMyzlc00KlvvicsxKck94SEwhDnMNdICzO+tA==
repeat-element@^1.1.2: repeat-element@^1.1.2:
version "1.1.4" version "1.1.4"
resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9" resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9"

View file

@ -3,7 +3,7 @@ const yargs = require("yargs")
const fs = require("fs") const fs = require("fs")
const { join } = require("path") const { join } = require("path")
require("../src/db").init() require("../src/db").init()
const { getDB } = require("@budibase/backend-core/db") const { doWithDB } = require("@budibase/backend-core/db")
// load environment // load environment
const env = require("../src/environment") const env = require("../src/environment")
const { const {
@ -48,13 +48,14 @@ yargs
const writeStream = fs.createWriteStream(join(exportPath, "dump.text")) const writeStream = fs.createWriteStream(join(exportPath, "dump.text"))
// perform couch dump // perform couch dump
const instanceDb = getDB(appId) await doWithDB(appId, async db => {
await instanceDb.dump(writeStream, { return db.dump(writeStream, {
filter: doc => filter: doc =>
!( !(
doc._id.includes(USER_METDATA_PREFIX) || doc._id.includes(USER_METDATA_PREFIX) ||
doc.includes(LINK_USER_METADATA_PREFIX) doc.includes(LINK_USER_METADATA_PREFIX)
), ),
})
}) })
console.log(`Template ${name} exported to ${exportPath}`) console.log(`Template ${name} exported to ${exportPath}`)
} }

View file

@ -7,7 +7,7 @@
require("../src/db").init() require("../src/db").init()
const { DocumentTypes } = require("../src/db/utils") const { DocumentTypes } = require("../src/db/utils")
const { getAllDbs, getDB } = require("@budibase/backend-core/db") const { getAllDbs, dangerousGetDB } = require("@budibase/backend-core/db")
const appName = process.argv[2].toLowerCase() const appName = process.argv[2].toLowerCase()
const remoteUrl = process.argv[3] const remoteUrl = process.argv[3]
@ -18,7 +18,7 @@ const run = async () => {
const appDbNames = dbs.filter(dbName => dbName.startsWith("inst_app")) const appDbNames = dbs.filter(dbName => dbName.startsWith("inst_app"))
let apps = [] let apps = []
for (let dbName of appDbNames) { for (let dbName of appDbNames) {
const db = getDB(dbName) const db = dangerousGetDB(dbName)
apps.push(db.get(DocumentTypes.APP_METADATA)) apps.push(db.get(DocumentTypes.APP_METADATA))
} }
apps = await Promise.all(apps) apps = await Promise.all(apps)
@ -33,8 +33,8 @@ const run = async () => {
return return
} }
const instanceDb = getDB(app.appId) const instanceDb = dangerousGetDB(app.appId)
const remoteDb = getDB(`${remoteUrl}/${appName}`) const remoteDb = dangerousGetDB(`${remoteUrl}/${appName}`)
instanceDb.replicate instanceDb.replicate
.to(remoteDb) .to(remoteDb)

View file

@ -1,4 +1,5 @@
const { checkBuilderEndpoint, getDB } = require("./utilities/TestFunctions") const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
const { getAppDB } = require("@budibase/backend-core/context")
const setup = require("./utilities") const setup = require("./utilities")
const { basicTable } = setup.structures const { basicTable } = setup.structures
@ -122,7 +123,7 @@ describe("/tables", () => {
describe("indexing", () => { describe("indexing", () => {
it("should be able to create a table with indexes", async () => { it("should be able to create a table with indexes", async () => {
const db = getDB(config) const db = getAppDB(config)
const indexCount = (await db.getIndexes()).total_rows const indexCount = (await db.getIndexes()).total_rows
const table = basicTable() const table = basicTable()
table.indexes = ["name"] table.indexes = ["name"]

View file

@ -3,7 +3,7 @@ const appController = require("../../../controllers/application")
const { AppStatus } = require("../../../../db/utils") const { AppStatus } = require("../../../../db/utils")
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
const { TENANT_ID } = require("../../../../tests/utilities/structures") const { TENANT_ID } = require("../../../../tests/utilities/structures")
const { getAppDB, doInAppContext } = require("@budibase/backend-core/context") const { doInAppContext } = require("@budibase/backend-core/context")
const env = require("../../../../environment") const env = require("../../../../environment")
function Request(appId, params) { function Request(appId, params) {
@ -106,10 +106,6 @@ exports.checkPermissionsEndpoint = async ({
.expect(403) .expect(403)
} }
exports.getDB = () => {
return getAppDB()
}
exports.testAutomation = async (config, automation) => { exports.testAutomation = async (config, automation) => {
return runRequest(automation.appId, async () => { return runRequest(automation.appId, async () => {
return await config.request return await config.request

View file

@ -7,7 +7,7 @@ const { updateEntityMetadata } = require("../utilities")
const { MetadataTypes, WebhookType } = require("../constants") const { MetadataTypes, WebhookType } = require("../constants")
const { getProdAppID } = require("@budibase/backend-core/db") const { getProdAppID } = require("@budibase/backend-core/db")
const { cloneDeep } = require("lodash/fp") const { cloneDeep } = require("lodash/fp")
const { getDB } = require("@budibase/backend-core/db") const { doWithDB } = require("@budibase/backend-core/db")
const { getAppDB, getAppId } = require("@budibase/backend-core/context") const { getAppDB, getAppId } = require("@budibase/backend-core/context")
const WH_STEP_ID = definitions.WEBHOOK.stepId const WH_STEP_ID = definitions.WEBHOOK.stepId
@ -101,10 +101,11 @@ exports.enableCronTrigger = async (appId, automation) => {
// can't use getAppDB here as this is likely to be called from dev app, // can't use getAppDB here as this is likely to be called from dev app,
// but this call could be for dev app or prod app, need to just use what // but this call could be for dev app or prod app, need to just use what
// was passed in // was passed in
const db = getDB(appId) await doWithDB(appId, async db => {
const response = await db.put(automation) const response = await db.put(automation)
automation._id = response.id automation._id = response.id
automation._rev = response.rev automation._rev = response.rev
})
} }
return automation return automation
} }

View file

@ -41,5 +41,6 @@ exports.runView = async (view, calculation, group, data) => {
} }
} }
await db.destroy() await db.destroy()
await db.close()
return response return response
} }

View file

@ -2,7 +2,7 @@ const TestConfig = require("../../tests/utilities/TestConfiguration")
const { basicTable } = require("../../tests/utilities/structures") const { basicTable } = require("../../tests/utilities/structures")
const linkUtils = require("../linkedRows/linkUtils") const linkUtils = require("../linkedRows/linkUtils")
const { getAppDB } = require("@budibase/backend-core/context") const { getAppDB } = require("@budibase/backend-core/context")
const { getDB } = require("@budibase/backend-core/db") const { doWithDB } = require("@budibase/backend-core/db")
describe("test link functionality", () => { describe("test link functionality", () => {
const config = new TestConfig(false) const config = new TestConfig(false)
@ -48,12 +48,13 @@ describe("test link functionality", () => {
describe("getLinkDocuments", () => { describe("getLinkDocuments", () => {
it("should create the link view when it doesn't exist", async () => { it("should create the link view when it doesn't exist", async () => {
// create the DB and a very basic app design DB // create the DB and a very basic app design DB
const db = getDB("test") const output = await doWithDB("test", async db => {
await db.put({ _id: "_design/database", views: {} }) await db.put({ _id: "_design/database", views: {} })
const output = await linkUtils.getLinkDocuments({ return await linkUtils.getLinkDocuments({
tableId: "test", tableId: "test",
rowId: "test", rowId: "test",
includeDocs: false, includeDocs: false,
})
}) })
expect(Array.isArray(output)).toBe(true) expect(Array.isArray(output)).toBe(true)
}) })

View file

@ -53,51 +53,55 @@ module CouchDBModule {
class CouchDBIntegration implements IntegrationBase { class CouchDBIntegration implements IntegrationBase {
private config: CouchDBConfig private config: CouchDBConfig
private client: any private readonly client: any
constructor(config: CouchDBConfig) { constructor(config: CouchDBConfig) {
this.config = config this.config = config
this.client = new PouchDB(`${config.url}/${config.database}`) this.client = new PouchDB(`${config.url}/${config.database}`)
} }
async create(query: { json: object }) { async query(
command: string,
errorMsg: string,
query: { json?: object; id?: string }
) {
try { try {
return this.client.post(query.json) const response = await this.client[command](query.id || query.json)
await this.client.close()
return response
} catch (err) { } catch (err) {
console.error("Error writing to couchDB", err) console.error(errorMsg, err)
throw err throw err
} }
} }
async create(query: { json: object }) {
return this.query("post", "Error writing to couchDB", query)
}
async read(query: { json: object }) { async read(query: { json: object }) {
try { const result = await this.query("allDocs", "Error querying couchDB", {
const result = await this.client.allDocs({ json: {
include_docs: true, include_docs: true,
...query.json, ...query.json,
}) },
return result.rows.map((row: { doc: object }) => row.doc) })
} catch (err) { return result.rows.map((row: { doc: object }) => row.doc)
console.error("Error querying couchDB", err)
throw err
}
} }
async update(query: { json: object }) { async update(query: { json: object }) {
try { return this.query("put", "Error updating couchDB document", query)
return this.client.put(query.json)
} catch (err) {
console.error("Error updating couchDB document", err)
throw err
}
} }
async delete(query: { id: string }) { async delete(query: { id: string }) {
try { const doc = await this.query(
return await this.client.remove(query.id) "get",
} catch (err) { "Cannot find doc to be deleted",
console.error("Error deleting couchDB document", err) query
throw err )
} return this.query("remove", "Error deleting couchDB document", {
json: doc,
})
} }
} }

View file

@ -5,7 +5,7 @@ const {
checkDebounce, checkDebounce,
setDebounce, setDebounce,
} = require("../utilities/redis") } = require("../utilities/redis")
const { getDB } = require("@budibase/backend-core/db") const { doWithDB } = require("@budibase/backend-core/db")
const { DocumentTypes } = require("../db/utils") const { DocumentTypes } = require("../db/utils")
const { PermissionTypes } = require("@budibase/backend-core/permissions") const { PermissionTypes } = require("@budibase/backend-core/permissions")
const { app: appCache } = require("@budibase/backend-core/cache") const { app: appCache } = require("@budibase/backend-core/cache")
@ -48,14 +48,15 @@ async function updateAppUpdatedAt(ctx) {
if (ctx.method === "GET" || (await checkDebounce(appId))) { if (ctx.method === "GET" || (await checkDebounce(appId))) {
return return
} }
const db = getDB(appId) await doWithDB(appId, async db => {
const metadata = await db.get(DocumentTypes.APP_METADATA) const metadata = await db.get(DocumentTypes.APP_METADATA)
metadata.updatedAt = new Date().toISOString() metadata.updatedAt = new Date().toISOString()
const response = await db.put(metadata) const response = await db.put(metadata)
metadata._rev = response.rev metadata._rev = response.rev
await appCache.invalidateAppMetadata(appId, metadata) await appCache.invalidateAppMetadata(appId, metadata)
// set a new debounce record with a short TTL // set a new debounce record with a short TTL
await setDebounce(appId, DEBOUNCE_TIME_SEC) await setDebounce(appId, DEBOUNCE_TIME_SEC)
})
} }
module.exports = async (ctx, permType) => { module.exports = async (ctx, permType) => {

View file

@ -1,4 +1,4 @@
const { DocumentTypes, getDB } = require("@budibase/backend-core/db") const { DocumentTypes, doWithDB } = require("@budibase/backend-core/db")
const TestConfig = require("../../../tests/utilities/TestConfiguration") const TestConfig = require("../../../tests/utilities/TestConfiguration")
const migration = require("../appUrls") const migration = require("../appUrls")
@ -14,14 +14,13 @@ describe("run", () => {
it("runs successfully", async () => { it("runs successfully", async () => {
const app = await config.createApp("testApp") const app = await config.createApp("testApp")
const appDb = getDB(app.appId) const metadata = doWithDB(app.appId, async db => {
let metadata = await appDb.get(DocumentTypes.APP_METADATA) const metadataDoc = await db.get(DocumentTypes.APP_METADATA)
delete metadata.url delete metadataDoc.url
await appDb.put(metadata) await db.put(metadataDoc)
await migration.run(db)
await migration.run(appDb) return await db.get(DocumentTypes.APP_METADATA)
})
metadata = await appDb.get(DocumentTypes.APP_METADATA)
expect(metadata.url).toEqual("/testapp") expect(metadata.url).toEqual("/testapp")
}) })
}) })

View file

@ -18,7 +18,7 @@ const supertest = require("supertest")
const { cleanup } = require("../../utilities/fileSystem") const { cleanup } = require("../../utilities/fileSystem")
const { Cookies, Headers } = require("@budibase/backend-core/constants") const { Cookies, Headers } = require("@budibase/backend-core/constants")
const { jwt } = require("@budibase/backend-core/auth") const { jwt } = require("@budibase/backend-core/auth")
const { getGlobalDB } = require("@budibase/backend-core/tenancy") const { doWithGlobalDB } = require("@budibase/backend-core/tenancy")
const { createASession } = require("@budibase/backend-core/sessions") const { createASession } = require("@budibase/backend-core/sessions")
const { user: userCache } = require("@budibase/backend-core/cache") const { user: userCache } = require("@budibase/backend-core/cache")
const newid = require("../../db/newid") const newid = require("../../db/newid")
@ -84,17 +84,18 @@ class TestConfiguration {
} }
async generateApiKey(userId = GLOBAL_USER_ID) { async generateApiKey(userId = GLOBAL_USER_ID) {
const db = getGlobalDB(TENANT_ID) return doWithGlobalDB(TENANT_ID, async db => {
const id = generateDevInfoID(userId) const id = generateDevInfoID(userId)
let devInfo let devInfo
try { try {
devInfo = await db.get(id) devInfo = await db.get(id)
} catch (err) { } catch (err) {
devInfo = { _id: id, userId } devInfo = { _id: id, userId }
} }
devInfo.apiKey = encrypt(`${TENANT_ID}${SEPARATOR}${newid()}`) devInfo.apiKey = encrypt(`${TENANT_ID}${SEPARATOR}${newid()}`)
await db.put(devInfo) await db.put(devInfo)
return devInfo.apiKey return devInfo.apiKey
})
} }
async globalUser({ async globalUser({
@ -103,34 +104,35 @@ class TestConfiguration {
email = EMAIL, email = EMAIL,
roles, roles,
} = {}) { } = {}) {
const db = getGlobalDB(TENANT_ID) return doWithGlobalDB(TENANT_ID, async db => {
let existing let existing
try { try {
existing = await db.get(id) existing = await db.get(id)
} catch (err) { } catch (err) {
existing = { email } existing = { email }
} }
const user = { const user = {
_id: id, _id: id,
...existing, ...existing,
roles: roles || {}, roles: roles || {},
tenantId: TENANT_ID, tenantId: TENANT_ID,
} }
await createASession(id, { await createASession(id, {
sessionId: "sessionid", sessionId: "sessionid",
tenantId: TENANT_ID, tenantId: TENANT_ID,
csrfToken: CSRF_TOKEN, csrfToken: CSRF_TOKEN,
})
if (builder) {
user.builder = { global: true }
} else {
user.builder = { global: false }
}
const resp = await db.put(user)
return {
_rev: resp._rev,
...user,
}
}) })
if (builder) {
user.builder = { global: true }
} else {
user.builder = { global: false }
}
const resp = await db.put(user)
return {
_rev: resp._rev,
...user,
}
} }
// use a new id as the name to avoid name collisions // use a new id as the name to avoid name collisions

View file

@ -2,7 +2,7 @@ const { budibaseTempDir } = require("../budibaseDir")
const fs = require("fs") const fs = require("fs")
const { join } = require("path") const { join } = require("path")
const uuid = require("uuid/v4") const uuid = require("uuid/v4")
const { getDB } = require("@budibase/backend-core/db") const { doWithDB } = require("@budibase/backend-core/db")
const { ObjectStoreBuckets } = require("../../constants") const { ObjectStoreBuckets } = require("../../constants")
const { const {
upload, upload,
@ -151,41 +151,41 @@ exports.streamBackup = async appId => {
* @return {*} either a readable stream or a string * @return {*} either a readable stream or a string
*/ */
exports.exportDB = async (dbName, { stream, filter, exportName } = {}) => { exports.exportDB = async (dbName, { stream, filter, exportName } = {}) => {
const instanceDb = getDB(dbName) return doWithDB(dbName, async db => {
// Stream the dump if required
// Stream the dump if required if (stream) {
if (stream) { const memStream = new MemoryStream()
const memStream = new MemoryStream() db.dump(memStream, { filter })
instanceDb.dump(memStream, { filter }) return memStream
return memStream
}
// Write the dump to file if required
if (exportName) {
const path = join(budibaseTempDir(), exportName)
const writeStream = fs.createWriteStream(path)
await instanceDb.dump(writeStream, { filter })
// Upload the dump to the object store if self hosted
if (env.SELF_HOSTED) {
await streamUpload(
ObjectStoreBuckets.BACKUPS,
join(dbName, exportName),
fs.createReadStream(path)
)
} }
return fs.createReadStream(path) // Write the dump to file if required
} if (exportName) {
const path = join(budibaseTempDir(), exportName)
const writeStream = fs.createWriteStream(path)
await db.dump(writeStream, { filter })
// Stringify the dump in memory if required // Upload the dump to the object store if self hosted
const memStream = new MemoryStream() if (env.SELF_HOSTED) {
let appString = "" await streamUpload(
memStream.on("data", chunk => { ObjectStoreBuckets.BACKUPS,
appString += chunk.toString() join(dbName, exportName),
fs.createReadStream(path)
)
}
return fs.createReadStream(path)
}
// Stringify the dump in memory if required
const memStream = new MemoryStream()
let appString = ""
memStream.on("data", chunk => {
appString += chunk.toString()
})
await db.dump(memStream, { filter })
return appString
}) })
await instanceDb.dump(memStream, { filter })
return appString
} }
/** /**

View file

@ -2,7 +2,7 @@ const { getRowParams, USER_METDATA_PREFIX } = require("../../db/utils")
const { const {
isDevAppID, isDevAppID,
getDevelopmentAppID, getDevelopmentAppID,
getDB, doWithDB,
} = require("@budibase/backend-core/db") } = require("@budibase/backend-core/db")
const ROW_EXCLUSIONS = [USER_METDATA_PREFIX] const ROW_EXCLUSIONS = [USER_METDATA_PREFIX]
@ -27,22 +27,23 @@ const getAppPairs = appIds => {
const getAppRows = async appId => { const getAppRows = async appId => {
// need to specify the app ID, as this is used for different apps in one call // need to specify the app ID, as this is used for different apps in one call
const appDb = getDB(appId) return doWithDB(appId, async db => {
const response = await appDb.allDocs( const response = await db.allDocs(
getRowParams(null, null, { getRowParams(null, null, {
include_docs: false, include_docs: false,
}) })
) )
return response.rows return response.rows
.map(r => r.id) .map(r => r.id)
.filter(id => { .filter(id => {
for (let exclusion of ROW_EXCLUSIONS) { for (let exclusion of ROW_EXCLUSIONS) {
if (id.startsWith(exclusion)) { if (id.startsWith(exclusion)) {
return false return false
}
} }
} return true
return true })
}) })
} }
/** /**

View file

@ -15,6 +15,7 @@ const { invalidateSessions } = require("@budibase/backend-core/sessions")
const accounts = require("@budibase/backend-core/accounts") const accounts = require("@budibase/backend-core/accounts")
const { const {
getGlobalDB, getGlobalDB,
doWithGlobalDB,
getTenantId, getTenantId,
getTenantUser, getTenantUser,
doesTenantExist, doesTenantExist,
@ -51,26 +52,27 @@ exports.adminUser = async ctx => {
ctx.throw(403, "Organisation already exists.") ctx.throw(403, "Organisation already exists.")
} }
const db = getGlobalDB(tenantId) const response = await doWithGlobalDB(tenantId, async db => {
const response = await db.allDocs( const response = await db.allDocs(
getGlobalUserParams(null, { getGlobalUserParams(null, {
include_docs: true, include_docs: true,
}) })
) )
// write usage quotas for cloud
// write usage quotas for cloud if (!env.SELF_HOSTED) {
if (!env.SELF_HOSTED) { // could be a scenario where it exists, make sure its clean
// could be a scenario where it exists, make sure its clean try {
try { const usageQuota = await db.get(StaticDatabases.GLOBAL.docs.usageQuota)
const usageQuota = await db.get(StaticDatabases.GLOBAL.docs.usageQuota) if (usageQuota) {
if (usageQuota) { await db.remove(usageQuota._id, usageQuota._rev)
await db.remove(usageQuota._id, usageQuota._rev) }
} catch (err) {
// don't worry about errors
} }
} catch (err) { await db.put(generateNewUsageQuotaDoc())
// don't worry about errors
} }
await db.put(generateNewUsageQuotaDoc()) return response
} })
if (response.rows.some(row => row.doc.admin)) { if (response.rows.some(row => row.doc.admin)) {
ctx.throw( ctx.throw(

View file

@ -1,36 +1,42 @@
const { StaticDatabases, getDB } = require("@budibase/backend-core/db") const { StaticDatabases, doWithDB } = require("@budibase/backend-core/db")
const { getTenantId } = require("@budibase/backend-core/tenancy") const { getTenantId } = require("@budibase/backend-core/tenancy")
const { deleteTenant } = require("@budibase/backend-core/deprovision") const { deleteTenant } = require("@budibase/backend-core/deprovision")
exports.exists = async ctx => { exports.exists = async ctx => {
const tenantId = ctx.request.params const tenantId = ctx.request.params
const db = getDB(StaticDatabases.PLATFORM_INFO.name)
let exists = false
try {
const tenantsDoc = await db.get(StaticDatabases.PLATFORM_INFO.docs.tenants)
if (tenantsDoc) {
exists = tenantsDoc.tenantIds.indexOf(tenantId) !== -1
}
} catch (err) {
// if error it doesn't exist
}
ctx.body = { ctx.body = {
exists, exists: await doWithDB(StaticDatabases.PLATFORM_INFO.name, async db => {
let exists = false
try {
const tenantsDoc = await db.get(
StaticDatabases.PLATFORM_INFO.docs.tenants
)
if (tenantsDoc) {
exists = tenantsDoc.tenantIds.indexOf(tenantId) !== -1
}
} catch (err) {
// if error it doesn't exist
}
return exists
}),
} }
} }
exports.fetch = async ctx => { exports.fetch = async ctx => {
const db = getDB(StaticDatabases.PLATFORM_INFO.name) ctx.body = await doWithDB(StaticDatabases.PLATFORM_INFO.name, async db => {
let tenants = [] let tenants = []
try { try {
const tenantsDoc = await db.get(StaticDatabases.PLATFORM_INFO.docs.tenants) const tenantsDoc = await db.get(
if (tenantsDoc) { StaticDatabases.PLATFORM_INFO.docs.tenants
tenants = tenantsDoc.tenantIds )
if (tenantsDoc) {
tenants = tenantsDoc.tenantIds
}
} catch (err) {
// if error it doesn't exist
} }
} catch (err) { return tenants
// if error it doesn't exist })
}
ctx.body = tenants
} }
exports.delete = async ctx => { exports.delete = async ctx => {