1
0
Fork 0
mirror of synced 2024-07-04 22:11:23 +12:00

merge with master

This commit is contained in:
Martin McKeaveney 2022-04-26 10:21:45 +01:00
commit 9ca4dacbaa
93 changed files with 5027 additions and 1286 deletions

View file

@ -47,6 +47,8 @@ ingress:
className: "" className: ""
annotations: annotations:
kubernetes.io/ingress.class: nginx kubernetes.io/ingress.class: nginx
nginx.ingress.kubernetes.io/client-max-body-size: 150M
nginx.ingress.kubernetes.io/proxy-body-size: 50m
hosts: hosts:
- host: # change if using custom domain - host: # change if using custom domain
paths: paths:

View file

@ -1,5 +1,5 @@
{ {
"version": "1.0.105-alpha.41", "version": "1.0.122",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View file

@ -74,6 +74,7 @@
"mode:account": "yarn mode:cloud && yarn env:account:enable", "mode:account": "yarn mode:cloud && yarn env:account:enable",
"security:audit": "node scripts/audit.js", "security:audit": "node scripts/audit.js",
"postinstall": "husky install", "postinstall": "husky install",
"install:pro": "bash scripts/pro/install.sh" "install:pro": "bash scripts/pro/install.sh",
"dep:clean": "yarn clean && yarn bootstrap"
} }
} }

View file

@ -3,4 +3,5 @@ module.exports = {
...require("./src/db/constants"), ...require("./src/db/constants"),
...require("./src/db"), ...require("./src/db"),
...require("./src/db/views"), ...require("./src/db/views"),
...require("./src/db/pouch"),
} }

View file

@ -1,6 +1,6 @@
{ {
"name": "@budibase/backend-core", "name": "@budibase/backend-core",
"version": "1.0.105-alpha.41", "version": "1.0.122",
"description": "Budibase backend core libraries used in server and worker", "description": "Budibase backend core libraries used in server and worker",
"main": "src/index.js", "main": "src/index.js",
"author": "Budibase", "author": "Budibase",
@ -24,6 +24,10 @@
"passport-google-oauth": "^2.0.0", "passport-google-oauth": "^2.0.0",
"passport-jwt": "^4.0.0", "passport-jwt": "^4.0.0",
"passport-local": "^1.0.0", "passport-local": "^1.0.0",
"posthog-node": "^1.3.0",
"pouchdb": "7.3.0",
"pouchdb-find": "^7.2.2",
"pouchdb-replication-stream": "^1.2.9",
"sanitize-s3-objectkey": "^0.0.1", "sanitize-s3-objectkey": "^0.0.1",
"tar-fs": "^2.1.1", "tar-fs": "^2.1.1",
"uuid": "^8.3.2", "uuid": "^8.3.2",
@ -37,7 +41,6 @@
"devDependencies": { "devDependencies": {
"ioredis-mock": "^5.5.5", "ioredis-mock": "^5.5.5",
"jest": "^26.6.3", "jest": "^26.6.3",
"pouchdb": "^7.2.1",
"pouchdb-adapter-memory": "^7.2.2", "pouchdb-adapter-memory": "^7.2.2",
"pouchdb-all-dbs": "^1.0.2" "pouchdb-all-dbs": "^1.0.2"
}, },

View file

@ -1,5 +1,5 @@
const redis = require("../redis/authRedis") const redis = require("../redis/authRedis")
const { getCouch } = require("../db") const { doWithDB } = require("../db")
const { DocumentTypes } = require("../db/constants") const { DocumentTypes } = require("../db/constants")
const AppState = { const AppState = {
@ -10,12 +10,14 @@ const EXPIRY_SECONDS = 3600
/** /**
* The default populate app metadata function * The default populate app metadata function
*/ */
const populateFromDB = async (appId, CouchDB = null) => { const populateFromDB = async appId => {
if (!CouchDB) { return doWithDB(
CouchDB = getCouch() appId,
} db => {
const db = new CouchDB(appId, { skip_setup: true })
return db.get(DocumentTypes.APP_METADATA) return db.get(DocumentTypes.APP_METADATA)
},
{ skip_setup: true }
)
} }
const isInvalid = metadata => { const isInvalid = metadata => {
@ -27,17 +29,16 @@ const isInvalid = metadata => {
* Use redis cache to first read the app metadata. * Use redis cache to first read the app metadata.
* If not present fallback to loading the app metadata directly and re-caching. * If not present fallback to loading the app metadata directly and re-caching.
* @param {string} appId the id of the app to get metadata from. * @param {string} appId the id of the app to get metadata from.
* @param {object} CouchDB the database being passed
* @returns {object} the app metadata. * @returns {object} the app metadata.
*/ */
exports.getAppMetadata = async (appId, CouchDB = null) => { exports.getAppMetadata = async appId => {
const client = await redis.getAppClient() const client = await redis.getAppClient()
// try cache // try cache
let metadata = await client.get(appId) let metadata = await client.get(appId)
if (!metadata) { if (!metadata) {
let expiry = EXPIRY_SECONDS let expiry = EXPIRY_SECONDS
try { try {
metadata = await populateFromDB(appId, CouchDB) metadata = await populateFromDB(appId)
} catch (err) { } catch (err) {
// app DB left around, but no metadata, it is invalid // app DB left around, but no metadata, it is invalid
if (err && err.status === 404) { if (err && err.status === 404) {

View file

@ -1,5 +1,5 @@
const redis = require("../redis/authRedis") const redis = require("../redis/authRedis")
const { getTenantId, lookupTenantId, getGlobalDB } = require("../tenancy") const { getTenantId, lookupTenantId, doWithGlobalDB } = require("../tenancy")
const env = require("../environment") const env = require("../environment")
const accounts = require("../cloud/accounts") const accounts = require("../cloud/accounts")
@ -9,9 +9,8 @@ const EXPIRY_SECONDS = 3600
* The default populate user function * The default populate user function
*/ */
const populateFromDB = async (userId, tenantId) => { const populateFromDB = async (userId, tenantId) => {
const user = await getGlobalDB(tenantId).get(userId) const user = await doWithGlobalDB(tenantId, db => db.get(userId))
user.budibaseAccess = true user.budibaseAccess = true
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
const account = await accounts.getAccount(user.email) const account = await accounts.getAccount(user.email)
if (account) { if (account) {

View file

@ -29,9 +29,7 @@ class API {
credentials: "include", credentials: "include",
} }
const resp = await fetch(`${this.host}${url}`, requestOptions) return await fetch(`${this.host}${url}`, requestOptions)
return resp
} }
post = this.apiCall("POST") post = this.apiCall("POST")

View file

@ -4,7 +4,11 @@ const { newid } = require("../hashing")
const REQUEST_ID_KEY = "requestId" const REQUEST_ID_KEY = "requestId"
class FunctionContext { class FunctionContext {
static getMiddleware(updateCtxFn = null, contextName = "session") { static getMiddleware(
updateCtxFn = null,
destroyFn = null,
contextName = "session"
) {
const namespace = this.createNamespace(contextName) const namespace = this.createNamespace(contextName)
return async function (ctx, next) { return async function (ctx, next) {
@ -18,7 +22,14 @@ class FunctionContext {
if (updateCtxFn) { if (updateCtxFn) {
updateCtxFn(ctx) updateCtxFn(ctx)
} }
next().then(resolve).catch(reject) next()
.then(resolve)
.catch(reject)
.finally(() => {
if (destroyFn) {
return destroyFn(ctx)
}
})
}) })
) )
} }

View file

@ -1,6 +1,6 @@
const { getGlobalUserParams, getAllApps } = require("../db/utils") const { getGlobalUserParams, getAllApps } = require("../db/utils")
const { getDB } = require("../db") const { doWithDB } = require("../db")
const { getGlobalDB } = require("../tenancy") const { doWithGlobalDB } = require("../tenancy")
const { StaticDatabases } = require("../db/constants") const { StaticDatabases } = require("../db/constants")
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
@ -8,11 +8,12 @@ const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name
const removeTenantFromInfoDB = async tenantId => { const removeTenantFromInfoDB = async tenantId => {
try { try {
const infoDb = getDB(PLATFORM_INFO_DB) await doWithDB(PLATFORM_INFO_DB, async infoDb => {
let tenants = await infoDb.get(TENANT_DOC) let tenants = await infoDb.get(TENANT_DOC)
tenants.tenantIds = tenants.tenantIds.filter(id => id !== tenantId) tenants.tenantIds = tenants.tenantIds.filter(id => id !== tenantId)
await infoDb.put(tenants) await infoDb.put(tenants)
})
} catch (err) { } catch (err) {
console.error(`Error removing tenant ${tenantId} from info db`, err) console.error(`Error removing tenant ${tenantId} from info db`, err)
throw err throw err
@ -20,7 +21,7 @@ const removeTenantFromInfoDB = async tenantId => {
} }
exports.removeUserFromInfoDB = async dbUser => { exports.removeUserFromInfoDB = async dbUser => {
const infoDb = getDB(PLATFORM_INFO_DB) await doWithDB(PLATFORM_INFO_DB, async infoDb => {
const keys = [dbUser._id, dbUser.email] const keys = [dbUser._id, dbUser.email]
const userDocs = await infoDb.allDocs({ const userDocs = await infoDb.allDocs({
keys, keys,
@ -33,17 +34,18 @@ exports.removeUserFromInfoDB = async dbUser => {
} }
}) })
await infoDb.bulkDocs(toDelete) await infoDb.bulkDocs(toDelete)
})
} }
const removeUsersFromInfoDB = async tenantId => { const removeUsersFromInfoDB = async tenantId => {
return doWithGlobalDB(tenantId, async db => {
try { try {
const globalDb = getGlobalDB(tenantId) const allUsers = await db.allDocs(
const infoDb = getDB(PLATFORM_INFO_DB)
const allUsers = await globalDb.allDocs(
getGlobalUserParams(null, { getGlobalUserParams(null, {
include_docs: true, include_docs: true,
}) })
) )
await doWithDB(PLATFORM_INFO_DB, async infoDb => {
const allEmails = allUsers.rows.map(row => row.doc.email) const allEmails = allUsers.rows.map(row => row.doc.email)
// get the id docs // get the id docs
let keys = allUsers.rows.map(row => row.id) let keys = allUsers.rows.map(row => row.id)
@ -61,26 +63,31 @@ const removeUsersFromInfoDB = async tenantId => {
} }
}) })
await infoDb.bulkDocs(toDelete) await infoDb.bulkDocs(toDelete)
})
} catch (err) { } catch (err) {
console.error(`Error removing tenant ${tenantId} users from info db`, err) console.error(`Error removing tenant ${tenantId} users from info db`, err)
throw err throw err
} }
})
} }
const removeGlobalDB = async tenantId => { const removeGlobalDB = async tenantId => {
return doWithGlobalDB(tenantId, async db => {
try { try {
const globalDb = getGlobalDB(tenantId) await db.destroy()
await globalDb.destroy()
} catch (err) { } catch (err) {
console.error(`Error removing tenant ${tenantId} users from info db`, err) console.error(`Error removing tenant ${tenantId} users from info db`, err)
throw err throw err
} }
})
} }
const removeTenantApps = async tenantId => { const removeTenantApps = async tenantId => {
try { try {
const apps = await getAllApps({ all: true }) const apps = await getAllApps({ all: true })
const destroyPromises = apps.map(app => getDB(app.appId).destroy()) const destroyPromises = apps.map(app =>
doWithDB(app.appId, db => db.destroy())
)
await Promise.allSettled(destroyPromises) await Promise.allSettled(destroyPromises)
} catch (err) { } catch (err) {
console.error(`Error removing tenant ${tenantId} apps`, err) console.error(`Error removing tenant ${tenantId} apps`, err)

View file

@ -1,9 +1,11 @@
const env = require("../environment") const env = require("../environment")
const { Headers } = require("../../constants") const { Headers } = require("../../constants")
const { SEPARATOR, DocumentTypes } = require("../db/constants") const { SEPARATOR, DocumentTypes } = require("../db/constants")
const { DEFAULT_TENANT_ID } = require("../constants")
const cls = require("./FunctionContext") const cls = require("./FunctionContext")
const { getCouch } = require("../db") const { dangerousGetDB, closeDB } = require("../db")
const { getProdAppID, getDevelopmentAppID } = require("../db/conversions") const { getProdAppID, getDevelopmentAppID } = require("../db/conversions")
const { baseGlobalDBName } = require("../tenancy/utils")
const { isEqual } = require("lodash") const { isEqual } = require("lodash")
// some test cases call functions directly, need to // some test cases call functions directly, need to
@ -12,6 +14,7 @@ let TEST_APP_ID = null
const ContextKeys = { const ContextKeys = {
TENANT_ID: "tenantId", TENANT_ID: "tenantId",
GLOBAL_DB: "globalDb",
APP_ID: "appId", APP_ID: "appId",
// whatever the request app DB was // whatever the request app DB was
CURRENT_DB: "currentDb", CURRENT_DB: "currentDb",
@ -20,9 +23,37 @@ const ContextKeys = {
// get the dev app DB from the request // get the dev app DB from the request
DEV_DB: "devDb", DEV_DB: "devDb",
DB_OPTS: "dbOpts", DB_OPTS: "dbOpts",
// check if something else is using the context, don't close DB
IN_USE: "inUse",
} }
exports.DEFAULT_TENANT_ID = "default" exports.DEFAULT_TENANT_ID = DEFAULT_TENANT_ID
// this function makes sure the PouchDB objects are closed and
// fully deleted when finished - this protects against memory leaks
async function closeAppDBs() {
const dbKeys = [
ContextKeys.CURRENT_DB,
ContextKeys.PROD_DB,
ContextKeys.DEV_DB,
]
for (let dbKey of dbKeys) {
const db = cls.getFromContext(dbKey)
if (!db) {
continue
}
await closeDB(db)
// clear the DB from context, incase someone tries to use it again
cls.setOnContext(dbKey, null)
}
// clear the app ID now that the databases are closed
if (cls.getFromContext(ContextKeys.APP_ID)) {
cls.setOnContext(ContextKeys.APP_ID, null)
}
if (cls.getFromContext(ContextKeys.DB_OPTS)) {
cls.setOnContext(ContextKeys.DB_OPTS, null)
}
}
exports.isDefaultTenant = () => { exports.isDefaultTenant = () => {
return exports.getTenantId() === exports.DEFAULT_TENANT_ID return exports.getTenantId() === exports.DEFAULT_TENANT_ID
@ -34,14 +65,41 @@ exports.isMultiTenant = () => {
// used for automations, API endpoints should always be in context already // used for automations, API endpoints should always be in context already
exports.doInTenant = (tenantId, task) => { exports.doInTenant = (tenantId, task) => {
return cls.run(() => { // the internal function is so that we can re-use an existing
// context - don't want to close DB on a parent context
async function internal(opts = { existing: false }) {
// set the tenant id // set the tenant id
if (!opts.existing) {
cls.setOnContext(ContextKeys.TENANT_ID, tenantId) cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
exports.setGlobalDB(tenantId)
}
try {
// invoke the task // invoke the task
return task() return await task()
} finally {
const using = cls.getFromContext(ContextKeys.IN_USE)
if (!using || using <= 1) {
await closeDB(exports.getGlobalDB())
// clear from context now that database is closed/task is finished
cls.setOnContext(ContextKeys.TENANT_ID, null)
cls.setOnContext(ContextKeys.GLOBAL_DB, null)
} else {
cls.setOnContext(using - 1)
}
}
}
const using = cls.getFromContext(ContextKeys.IN_USE)
if (using && cls.getFromContext(ContextKeys.TENANT_ID) === tenantId) {
cls.setOnContext(ContextKeys.IN_USE, using + 1)
return internal({ existing: true })
} else {
return cls.run(async () => {
cls.setOnContext(ContextKeys.IN_USE, 1)
return internal()
}) })
} }
}
/** /**
* Given an app ID this will attempt to retrieve the tenant ID from it. * Given an app ID this will attempt to retrieve the tenant ID from it.
@ -64,37 +122,58 @@ exports.getTenantIDFromAppID = appId => {
} }
const setAppTenantId = appId => { const setAppTenantId = appId => {
const appTenantId = this.getTenantIDFromAppID(appId) || this.DEFAULT_TENANT_ID const appTenantId =
this.updateTenantId(appTenantId) exports.getTenantIDFromAppID(appId) || exports.DEFAULT_TENANT_ID
exports.updateTenantId(appTenantId)
} }
exports.doInAppContext = (appId, task) => { exports.doInAppContext = (appId, task) => {
if (!appId) { if (!appId) {
throw new Error("appId is required") throw new Error("appId is required")
} }
return cls.run(() => {
// set the app tenant id
setAppTenantId(appId)
// the internal function is so that we can re-use an existing
// context - don't want to close DB on a parent context
async function internal(opts = { existing: false }) {
// set the app tenant id
if (!opts.existing) {
setAppTenantId(appId)
}
// set the app ID // set the app ID
cls.setOnContext(ContextKeys.APP_ID, appId) cls.setOnContext(ContextKeys.APP_ID, appId)
try {
// invoke the task // invoke the task
return task() return await task()
} finally {
const using = cls.getFromContext(ContextKeys.IN_USE)
if (!using || using <= 1) {
await closeAppDBs()
} else {
cls.setOnContext(using - 1)
}
}
}
const using = cls.getFromContext(ContextKeys.IN_USE)
if (using && cls.getFromContext(ContextKeys.APP_ID) === appId) {
cls.setOnContext(ContextKeys.IN_USE, using + 1)
return internal({ existing: true })
} else {
return cls.run(async () => {
cls.setOnContext(ContextKeys.IN_USE, 1)
return internal()
}) })
} }
}
exports.updateTenantId = tenantId => { exports.updateTenantId = tenantId => {
cls.setOnContext(ContextKeys.TENANT_ID, tenantId) cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
} }
exports.updateAppId = appId => { exports.updateAppId = async appId => {
try { try {
// have to close first, before removing the databases from context
await closeAppDBs()
cls.setOnContext(ContextKeys.APP_ID, appId) cls.setOnContext(ContextKeys.APP_ID, appId)
cls.setOnContext(ContextKeys.PROD_DB, null)
cls.setOnContext(ContextKeys.DEV_DB, null)
cls.setOnContext(ContextKeys.CURRENT_DB, null)
cls.setOnContext(ContextKeys.DB_OPTS, null)
} catch (err) { } catch (err) {
if (env.isTest()) { if (env.isTest()) {
TEST_APP_ID = appId TEST_APP_ID = appId
@ -111,8 +190,8 @@ exports.setTenantId = (
let tenantId let tenantId
// exit early if not multi-tenant // exit early if not multi-tenant
if (!exports.isMultiTenant()) { if (!exports.isMultiTenant()) {
cls.setOnContext(ContextKeys.TENANT_ID, this.DEFAULT_TENANT_ID) cls.setOnContext(ContextKeys.TENANT_ID, exports.DEFAULT_TENANT_ID)
return return exports.DEFAULT_TENANT_ID
} }
const allowQs = opts && opts.allowQs const allowQs = opts && opts.allowQs
@ -140,6 +219,22 @@ exports.setTenantId = (
if (tenantId) { if (tenantId) {
cls.setOnContext(ContextKeys.TENANT_ID, tenantId) cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
} }
return tenantId
}
exports.setGlobalDB = tenantId => {
const dbName = baseGlobalDBName(tenantId)
const db = dangerousGetDB(dbName)
cls.setOnContext(ContextKeys.GLOBAL_DB, db)
return db
}
exports.getGlobalDB = () => {
const db = cls.getFromContext(ContextKeys.GLOBAL_DB)
if (!db) {
throw new Error("Global DB not found")
}
return db
} }
exports.isTenantIdSet = () => { exports.isTenantIdSet = () => {
@ -167,7 +262,7 @@ exports.getAppId = () => {
} }
} }
function getDB(key, opts) { function getContextDB(key, opts) {
const dbOptsKey = `${key}${ContextKeys.DB_OPTS}` const dbOptsKey = `${key}${ContextKeys.DB_OPTS}`
let storedOpts = cls.getFromContext(dbOptsKey) let storedOpts = cls.getFromContext(dbOptsKey)
let db = cls.getFromContext(key) let db = cls.getFromContext(key)
@ -175,7 +270,6 @@ function getDB(key, opts) {
return db return db
} }
const appId = exports.getAppId() const appId = exports.getAppId()
const CouchDB = getCouch()
let toUseAppId let toUseAppId
switch (key) { switch (key) {
case ContextKeys.CURRENT_DB: case ContextKeys.CURRENT_DB:
@ -188,7 +282,7 @@ function getDB(key, opts) {
toUseAppId = getDevelopmentAppID(appId) toUseAppId = getDevelopmentAppID(appId)
break break
} }
db = new CouchDB(toUseAppId, opts) db = dangerousGetDB(toUseAppId, opts)
try { try {
cls.setOnContext(key, db) cls.setOnContext(key, db)
if (opts) { if (opts) {
@ -207,7 +301,7 @@ function getDB(key, opts) {
* contained, dev or prod. * contained, dev or prod.
*/ */
exports.getAppDB = opts => { exports.getAppDB = opts => {
return getDB(ContextKeys.CURRENT_DB, opts) return getContextDB(ContextKeys.CURRENT_DB, opts)
} }
/** /**
@ -215,7 +309,7 @@ exports.getAppDB = opts => {
* contained a development app ID, this will open the prod one. * contained a development app ID, this will open the prod one.
*/ */
exports.getProdAppDB = opts => { exports.getProdAppDB = opts => {
return getDB(ContextKeys.PROD_DB, opts) return getContextDB(ContextKeys.PROD_DB, opts)
} }
/** /**
@ -223,5 +317,5 @@ exports.getProdAppDB = opts => {
* contained a prod app ID, this will open the dev one. * contained a prod app ID, this will open the dev one.
*/ */
exports.getDevAppDB = opts => { exports.getDevAppDB = opts => {
return getDB(ContextKeys.DEV_DB, opts) return getContextDB(ContextKeys.DEV_DB, opts)
} }

View file

@ -1,4 +1,4 @@
const { getDB } = require(".") const { dangerousGetDB, closeDB } = require(".")
class Replication { class Replication {
/** /**
@ -7,8 +7,12 @@ class Replication {
* @param {String} target - the DB you want to replicate to, or rollback from * @param {String} target - the DB you want to replicate to, or rollback from
*/ */
constructor({ source, target }) { constructor({ source, target }) {
this.source = getDB(source) this.source = dangerousGetDB(source)
this.target = getDB(target) this.target = dangerousGetDB(target)
}
close() {
return Promise.all([closeDB(this.source), closeDB(this.target)])
} }
promisify(operation, opts = {}) { promisify(operation, opts = {}) {
@ -51,7 +55,7 @@ class Replication {
async rollback() { async rollback() {
await this.target.destroy() await this.target.destroy()
// Recreate the DB again // Recreate the DB again
this.target = getDB(this.target.name) this.target = dangerousGetDB(this.target.name)
await this.replicate() await this.replicate()
} }

View file

@ -1,13 +1,67 @@
let Pouch const pouch = require("./pouch")
const env = require("../environment")
module.exports.setDB = pouch => { let PouchDB
Pouch = pouch let initialised = false
const put =
dbPut =>
async (doc, options = {}) => {
const response = await dbPut(doc, options)
// TODO: add created / updated
return response
} }
module.exports.getDB = dbName => { const checkInitialised = () => {
return new Pouch(dbName) if (!initialised) {
throw new Error("init has not been called")
}
} }
module.exports.getCouch = () => { exports.init = opts => {
return Pouch PouchDB = pouch.getPouch(opts)
initialised = true
}
// NOTE: THIS IS A DANGEROUS FUNCTION - USE WITH CAUTION
// this function is prone to leaks, should only be used
// in situations that using the function doWithDB does not work
exports.dangerousGetDB = (dbName, opts) => {
checkInitialised()
const db = new PouchDB(dbName, opts)
const dbPut = db.put
db.put = put(dbPut)
return db
}
// use this function if you have called dangerousGetDB - close
// the databases you've opened once finished
exports.closeDB = async db => {
if (!db || env.isTest()) {
return
}
try {
return db.close()
} catch (err) {
// ignore error, already closed
}
}
// we have to use a callback for this so that we can close
// the DB when we're done, without this manual requests would
// need to close the database when done with it to avoid memory leaks
exports.doWithDB = async (dbName, cb, opts) => {
const db = exports.dangerousGetDB(dbName, opts)
// need this to be async so that we can correctly close DB after all
// async operations have been completed
try {
return await cb(db)
} finally {
await exports.closeDB(db)
}
}
exports.allDbs = () => {
checkInitialised()
return PouchDB.allDbs()
} }

View file

@ -0,0 +1,93 @@
const PouchDB = require("pouchdb")
const env = require("../environment")
exports.getCouchUrl = () => {
if (!env.COUCH_DB_URL) return
// username and password already exist in URL
if (env.COUCH_DB_URL.includes("@")) {
return env.COUCH_DB_URL
}
const [protocol, ...rest] = env.COUCH_DB_URL.split("://")
if (!env.COUCH_DB_USERNAME || !env.COUCH_DB_PASSWORD) {
throw new Error(
"CouchDB configuration invalid. You must provide a fully qualified CouchDB url, or the COUCH_DB_USER and COUCH_DB_PASSWORD environment variables."
)
}
return `${protocol}://${env.COUCH_DB_USERNAME}:${env.COUCH_DB_PASSWORD}@${rest}`
}
exports.splitCouchUrl = url => {
const [protocol, rest] = url.split("://")
const [auth, host] = rest.split("@")
const [username, password] = auth.split(":")
return {
url: `${protocol}://${host}`,
auth: {
username,
password,
},
}
}
/**
* Return a constructor for PouchDB.
* This should be rarely used outside of the main application config.
* Exposed for exceptional cases such as in-memory views.
*/
exports.getPouch = (opts = {}) => {
let auth = {
username: env.COUCH_DB_USERNAME,
password: env.COUCH_DB_PASSWORD,
}
let url = exports.getCouchUrl() || "http://localhost:4005"
// need to update security settings
if (!auth.username || !auth.password || url.includes("@")) {
const split = exports.splitCouchUrl(url)
url = split.url
auth = split.auth
}
const authCookie = Buffer.from(`${auth.username}:${auth.password}`).toString(
"base64"
)
let POUCH_DB_DEFAULTS = {
prefix: url,
fetch: (url, opts) => {
// use a specific authorization cookie - be very explicit about how we authenticate
opts.headers.set("Authorization", `Basic ${authCookie}`)
return PouchDB.fetch(url, opts)
},
}
if (opts.inMemory) {
const inMemory = require("pouchdb-adapter-memory")
PouchDB.plugin(inMemory)
POUCH_DB_DEFAULTS = {
prefix: undefined,
adapter: "memory",
}
}
if (opts.replication) {
const replicationStream = require("pouchdb-replication-stream")
PouchDB.plugin(replicationStream.plugin)
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
}
if (opts.find) {
const find = require("pouchdb-find")
PouchDB.plugin(find)
}
const Pouch = PouchDB.defaults(POUCH_DB_DEFAULTS)
if (opts.allDbs) {
const allDbs = require("pouchdb-all-dbs")
allDbs(Pouch)
}
return Pouch
}

View file

@ -11,7 +11,8 @@ const {
} = require("./constants") } = require("./constants")
const { getTenantId, getGlobalDBName } = require("../tenancy") const { getTenantId, getGlobalDBName } = require("../tenancy")
const fetch = require("node-fetch") const fetch = require("node-fetch")
const { getCouch } = require("./index") const { doWithDB, allDbs } = require("./index")
const { getCouchUrl } = require("./pouch")
const { getAppMetadata } = require("../cache/appMetadata") const { getAppMetadata } = require("../cache/appMetadata")
const { checkSlashesInUrl } = require("../helpers") const { checkSlashesInUrl } = require("../helpers")
const { const {
@ -150,25 +151,6 @@ exports.getRoleParams = (roleId = null, otherProps = {}) => {
return getDocParams(DocumentTypes.ROLE, roleId, otherProps) return getDocParams(DocumentTypes.ROLE, roleId, otherProps)
} }
exports.getCouchUrl = () => {
if (!env.COUCH_DB_URL) return
// username and password already exist in URL
if (env.COUCH_DB_URL.includes("@")) {
return env.COUCH_DB_URL
}
const [protocol, ...rest] = env.COUCH_DB_URL.split("://")
if (!env.COUCH_DB_USERNAME || !env.COUCH_DB_PASSWORD) {
throw new Error(
"CouchDB configuration invalid. You must provide a fully qualified CouchDB url, or the COUCH_DB_USER and COUCH_DB_PASSWORD environment variables."
)
}
return `${protocol}://${env.COUCH_DB_USERNAME}:${env.COUCH_DB_PASSWORD}@${rest}`
}
exports.getStartEndKeyURL = (base, baseKey, tenantId = null) => { exports.getStartEndKeyURL = (base, baseKey, tenantId = null) => {
const tenancy = tenantId ? `${SEPARATOR}${tenantId}` : "" const tenancy = tenantId ? `${SEPARATOR}${tenantId}` : ""
return `${base}?startkey="${baseKey}${tenancy}"&endkey="${baseKey}${tenancy}${UNICODE_MAX}"` return `${base}?startkey="${baseKey}${tenancy}"&endkey="${baseKey}${tenancy}${UNICODE_MAX}"`
@ -184,7 +166,7 @@ exports.getAllDbs = async (opts = { efficient: false }) => {
const efficient = opts && opts.efficient const efficient = opts && opts.efficient
// specifically for testing we use the pouch package for this // specifically for testing we use the pouch package for this
if (env.isTest()) { if (env.isTest()) {
return getCouch().allDbs() return allDbs()
} }
let dbs = [] let dbs = []
async function addDbs(url) { async function addDbs(url) {
@ -196,7 +178,7 @@ exports.getAllDbs = async (opts = { efficient: false }) => {
throw "Cannot connect to CouchDB instance" throw "Cannot connect to CouchDB instance"
} }
} }
let couchUrl = `${exports.getCouchUrl()}/_all_dbs` let couchUrl = `${getCouchUrl()}/_all_dbs`
let tenantId = getTenantId() let tenantId = getTenantId()
if (!env.MULTI_TENANCY || (!efficient && tenantId === DEFAULT_TENANT_ID)) { if (!env.MULTI_TENANCY || (!efficient && tenantId === DEFAULT_TENANT_ID)) {
// just get all DBs when: // just get all DBs when:
@ -227,7 +209,6 @@ exports.getAllDbs = async (opts = { efficient: false }) => {
* @return {Promise<object[]>} returns the app information document stored in each app database. * @return {Promise<object[]>} returns the app information document stored in each app database.
*/ */
exports.getAllApps = async ({ dev, all, idsOnly, efficient } = {}) => { exports.getAllApps = async ({ dev, all, idsOnly, efficient } = {}) => {
const CouchDB = getCouch()
let tenantId = getTenantId() let tenantId = getTenantId()
if (!env.MULTI_TENANCY && !tenantId) { if (!env.MULTI_TENANCY && !tenantId) {
tenantId = DEFAULT_TENANT_ID tenantId = DEFAULT_TENANT_ID
@ -255,7 +236,7 @@ exports.getAllApps = async ({ dev, all, idsOnly, efficient } = {}) => {
} }
const appPromises = appDbNames.map(app => const appPromises = appDbNames.map(app =>
// skip setup otherwise databases could be re-created // skip setup otherwise databases could be re-created
getAppMetadata(app, CouchDB) getAppMetadata(app)
) )
if (appPromises.length === 0) { if (appPromises.length === 0) {
return [] return []
@ -299,10 +280,11 @@ exports.getDevAppIDs = async () => {
} }
exports.dbExists = async dbName => { exports.dbExists = async dbName => {
const CouchDB = getCouch()
let exists = false let exists = false
return doWithDB(
dbName,
async db => {
try { try {
const db = CouchDB(dbName, { skip_setup: true })
// check if database exists // check if database exists
const info = await db.info() const info = await db.info()
if (info && !info.error) { if (info && !info.error) {
@ -312,6 +294,9 @@ exports.dbExists = async dbName => {
exists = false exists = false
} }
return exists return exists
},
{ skip_setup: true }
)
} }
/** /**
@ -436,3 +421,4 @@ exports.generateConfigID = generateConfigID
exports.getConfigParams = getConfigParams exports.getConfigParams = getConfigParams
exports.getScopedFullConfig = getScopedFullConfig exports.getScopedFullConfig = getScopedFullConfig
exports.generateDevInfoID = generateDevInfoID exports.generateDevInfoID = generateDevInfoID
exports.getPlatformUrl = getPlatformUrl

View file

@ -1,8 +1,8 @@
const { setDB } = require("./db") const db = require("./db")
module.exports = { module.exports = {
init(pouch) { init(opts = {}) {
setDB(pouch) db.init(opts.db)
}, },
// some default exports from the library, however these ideally shouldn't // some default exports from the library, however these ideally shouldn't
// be used, instead the syntax require("@budibase/backend-core/db") should be used // be used, instead the syntax require("@budibase/backend-core/db") should be used

View file

@ -1,8 +1,8 @@
const google = require("../google") const google = require("../google")
const { Cookies, Configs } = require("../../../constants") const { Cookies, Configs } = require("../../../constants")
const { clearCookie, getCookie } = require("../../../utils") const { clearCookie, getCookie } = require("../../../utils")
const { getDB } = require("../../../db") const { getScopedConfig, getPlatformUrl } = require("../../../db/utils")
const { getScopedConfig } = require("../../../db/utils") const { doWithDB } = require("../../../db")
const environment = require("../../../environment") const environment = require("../../../environment")
const { getGlobalDB } = require("../../../tenancy") const { getGlobalDB } = require("../../../tenancy")
@ -13,18 +13,28 @@ async function fetchGoogleCreds() {
type: Configs.GOOGLE, type: Configs.GOOGLE,
}) })
// or fall back to env variables // or fall back to env variables
const config = googleConfig || { return (
googleConfig || {
clientID: environment.GOOGLE_CLIENT_ID, clientID: environment.GOOGLE_CLIENT_ID,
clientSecret: environment.GOOGLE_CLIENT_SECRET, clientSecret: environment.GOOGLE_CLIENT_SECRET,
} }
)
}
return config async function platformUrl() {
const db = getGlobalDB()
const publicConfig = await getScopedConfig(db, {
type: Configs.SETTINGS,
})
return getPlatformUrl(publicConfig)
} }
async function preAuth(passport, ctx, next) { async function preAuth(passport, ctx, next) {
// get the relevant config // get the relevant config
const googleConfig = await fetchGoogleCreds() const googleConfig = await fetchGoogleCreds()
let callbackUrl = `${environment.PLATFORM_URL}/api/global/auth/datasource/google/callback` const platUrl = await platformUrl()
let callbackUrl = `${platUrl}/api/global/auth/datasource/google/callback`
const strategy = await google.strategyFactory(googleConfig, callbackUrl) const strategy = await google.strategyFactory(googleConfig, callbackUrl)
if (!ctx.query.appId || !ctx.query.datasourceId) { if (!ctx.query.appId || !ctx.query.datasourceId) {
@ -41,14 +51,15 @@ async function preAuth(passport, ctx, next) {
async function postAuth(passport, ctx, next) { async function postAuth(passport, ctx, next) {
// get the relevant config // get the relevant config
const config = await fetchGoogleCreds() const config = await fetchGoogleCreds()
const platUrl = await platformUrl()
let callbackUrl = `${environment.PLATFORM_URL}/api/global/auth/datasource/google/callback` let callbackUrl = `${platUrl}/api/global/auth/datasource/google/callback`
const strategy = await google.strategyFactory( const strategy = await google.strategyFactory(
config, config,
callbackUrl, callbackUrl,
(accessToken, refreshToken, profile, done) => { (accessToken, refreshToken, profile, done) => {
clearCookie(ctx, Cookies.DatasourceAuth) clearCookie(ctx, Cookies.DatasourceAuth)
done(null, { accessToken, refreshToken }) done(null, { refreshToken })
} }
) )
@ -59,7 +70,7 @@ async function postAuth(passport, ctx, next) {
{ successRedirect: "/", failureRedirect: "/error" }, { successRedirect: "/", failureRedirect: "/error" },
async (err, tokens) => { async (err, tokens) => {
// update the DB for the datasource with all the user info // update the DB for the datasource with all the user info
const db = getDB(authStateCookie.appId) await doWithDB(authStateCookie.appId, async db => {
const datasource = await db.get(authStateCookie.datasourceId) const datasource = await db.get(authStateCookie.datasourceId)
if (!datasource.config) { if (!datasource.config) {
datasource.config = {} datasource.config = {}
@ -69,6 +80,7 @@ async function postAuth(passport, ctx, next) {
ctx.redirect( ctx.redirect(
`/builder/app/${authStateCookie.appId}/data/datasource/${authStateCookie.datasourceId}` `/builder/app/${authStateCookie.appId}/data/datasource/${authStateCookie.datasourceId}`
) )
})
} }
)(ctx, next) )(ctx, next)
} }

View file

@ -2,17 +2,13 @@
require("../../../tests/utilities/dbConfig") require("../../../tests/utilities/dbConfig")
const database = require("../../../db")
const { authenticateThirdParty } = require("../third-party-common") const { authenticateThirdParty } = require("../third-party-common")
const { data } = require("./utilities/mock-data") const { data } = require("./utilities/mock-data")
const { DEFAULT_TENANT_ID } = require("../../../constants")
const { const { generateGlobalUserID } = require("../../../db/utils")
StaticDatabases,
generateGlobalUserID
} = require("../../../db/utils")
const { newid } = require("../../../hashing") const { newid } = require("../../../hashing")
const { doWithGlobalDB, doInTenant } = require("../../../tenancy")
let db
const done = jest.fn() const done = jest.fn()
@ -21,7 +17,15 @@ const getErrorMessage = () => {
} }
const saveUser = async (user) => { const saveUser = async (user) => {
return doWithGlobalDB(DEFAULT_TENANT_ID, async db => {
return await db.put(user) return await db.put(user)
})
}
function authenticate(user, requireLocal, saveFn) {
return doInTenant(DEFAULT_TENANT_ID, () => {
return authenticateThirdParty(user, requireLocal, done, saveFn)
})
} }
describe("third party common", () => { describe("third party common", () => {
@ -29,35 +33,36 @@ describe("third party common", () => {
let thirdPartyUser let thirdPartyUser
beforeEach(() => { beforeEach(() => {
db = database.getDB(StaticDatabases.GLOBAL.name)
thirdPartyUser = data.buildThirdPartyUser() thirdPartyUser = data.buildThirdPartyUser()
}) })
afterEach(async () => { afterEach(async () => {
return doWithGlobalDB(DEFAULT_TENANT_ID, async db => {
jest.clearAllMocks() jest.clearAllMocks()
await db.destroy() await db.destroy()
}) })
})
describe("validation", () => { describe("validation", () => {
const testValidation = async (message) => { const testValidation = async (message) => {
await authenticateThirdParty(thirdPartyUser, false, done, saveUser) await authenticate(thirdPartyUser, false, saveUser)
expect(done.mock.calls.length).toBe(1) expect(done.mock.calls.length).toBe(1)
expect(getErrorMessage()).toContain(message) expect(getErrorMessage()).toContain(message)
} }
it("provider fails", async () => { it("provider fails", async () => {
delete thirdPartyUser.provider delete thirdPartyUser.provider
testValidation("third party user provider required") await testValidation("third party user provider required")
}) })
it("user id fails", async () => { it("user id fails", async () => {
delete thirdPartyUser.userId delete thirdPartyUser.userId
testValidation("third party user id required") await testValidation("third party user id required")
}) })
it("email fails", async () => { it("email fails", async () => {
delete thirdPartyUser.email delete thirdPartyUser.email
testValidation("third party user email required") await testValidation("third party user email required")
}) })
}) })
@ -81,7 +86,7 @@ describe("third party common", () => {
describe("when the user doesn't exist", () => { describe("when the user doesn't exist", () => {
describe("when a local account is required", () => { describe("when a local account is required", () => {
it("returns an error message", async () => { it("returns an error message", async () => {
await authenticateThirdParty(thirdPartyUser, true, done, saveUser) await authenticate(thirdPartyUser, true, saveUser)
expect(done.mock.calls.length).toBe(1) expect(done.mock.calls.length).toBe(1)
expect(getErrorMessage()).toContain("Email does not yet exist. You must set up your local budibase account first.") expect(getErrorMessage()).toContain("Email does not yet exist. You must set up your local budibase account first.")
}) })
@ -89,7 +94,7 @@ describe("third party common", () => {
describe("when a local account isn't required", () => { describe("when a local account isn't required", () => {
it("creates and authenticates the user", async () => { it("creates and authenticates the user", async () => {
await authenticateThirdParty(thirdPartyUser, false, done, saveUser) await authenticate(thirdPartyUser, false, saveUser)
const user = expectUserIsAuthenticated() const user = expectUserIsAuthenticated()
expectUserIsSynced(user, thirdPartyUser) expectUserIsSynced(user, thirdPartyUser)
expect(user.roles).toStrictEqual({}) expect(user.roles).toStrictEqual({})
@ -103,12 +108,15 @@ describe("third party common", () => {
let email let email
const createUser = async () => { const createUser = async () => {
return doWithGlobalDB(DEFAULT_TENANT_ID, async db => {
dbUser = { dbUser = {
_id: id, _id: id,
email: email, email: email,
} }
const response = await db.put(dbUser) const response = await db.put(dbUser)
dbUser._rev = response.rev dbUser._rev = response.rev
return dbUser
})
} }
const expectUserIsUpdated = (user) => { const expectUserIsUpdated = (user) => {
@ -126,7 +134,7 @@ describe("third party common", () => {
}) })
it("syncs and authenticates the user", async () => { it("syncs and authenticates the user", async () => {
await authenticateThirdParty(thirdPartyUser, true, done, saveUser) await authenticate(thirdPartyUser, true, saveUser)
const user = expectUserIsAuthenticated() const user = expectUserIsAuthenticated()
expectUserIsSynced(user, thirdPartyUser) expectUserIsSynced(user, thirdPartyUser)
@ -142,7 +150,7 @@ describe("third party common", () => {
}) })
it("syncs and authenticates the user", async () => { it("syncs and authenticates the user", async () => {
await authenticateThirdParty(thirdPartyUser, true, done, saveUser) await authenticate(thirdPartyUser, true, saveUser)
const user = expectUserIsAuthenticated() const user = expectUserIsAuthenticated()
expectUserIsSynced(user, thirdPartyUser) expectUserIsSynced(user, thirdPartyUser)
@ -160,7 +168,7 @@ describe("third party common", () => {
}) })
it("syncs and authenticates the user", async () => { it("syncs and authenticates the user", async () => {
await authenticateThirdParty(thirdPartyUser, true, done, saveUser) await authenticate(thirdPartyUser, true, saveUser)
const user = expectUserIsAuthenticated() const user = expectUserIsAuthenticated()
expectUserIsSynced(user, thirdPartyUser) expectUserIsSynced(user, thirdPartyUser)

View file

@ -1,4 +1,5 @@
const { setTenantId } = require("../tenancy") const { setTenantId, setGlobalDB, getGlobalDB } = require("../tenancy")
const { closeDB } = require("../db")
const ContextFactory = require("../context/FunctionContext") const ContextFactory = require("../context/FunctionContext")
const { buildMatcherRegex, matches } = require("./matchers") const { buildMatcherRegex, matches } = require("./matchers")
@ -10,10 +11,17 @@ module.exports = (
const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns) const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns)
const noTenancyOptions = buildMatcherRegex(noTenancyPatterns) const noTenancyOptions = buildMatcherRegex(noTenancyPatterns)
return ContextFactory.getMiddleware(ctx => { const updateCtxFn = ctx => {
const allowNoTenant = const allowNoTenant =
opts.noTenancyRequired || !!matches(ctx, noTenancyOptions) opts.noTenancyRequired || !!matches(ctx, noTenancyOptions)
const allowQs = !!matches(ctx, allowQsOptions) const allowQs = !!matches(ctx, allowQsOptions)
setTenantId(ctx, { allowQs, allowNoTenant }) const tenantId = setTenantId(ctx, { allowQs, allowNoTenant })
}) setGlobalDB(tenantId)
}
const destroyFn = async () => {
const db = getGlobalDB()
await closeDB(db)
}
return ContextFactory.getMiddleware(updateCtxFn, destroyFn)
} }

View file

@ -1,4 +1,5 @@
const { DEFAULT_TENANT_ID } = require("../constants") const { DEFAULT_TENANT_ID } = require("../constants")
const { doWithDB } = require("../db")
const { DocumentTypes } = require("../db/constants") const { DocumentTypes } = require("../db/constants")
const { getAllApps } = require("../db/utils") const { getAllApps } = require("../db/utils")
const environment = require("../environment") const environment = require("../environment")
@ -26,7 +27,7 @@ exports.getMigrationsDoc = async db => {
} }
} }
const runMigration = async (CouchDB, migration, options = {}) => { const runMigration = async (migration, options = {}) => {
const tenantId = getTenantId() const tenantId = getTenantId()
const migrationType = migration.type const migrationType = migration.type
const migrationName = migration.name const migrationName = migration.name
@ -46,7 +47,7 @@ const runMigration = async (CouchDB, migration, options = {}) => {
// run the migration against each db // run the migration against each db
for (const dbName of dbNames) { for (const dbName of dbNames) {
const db = new CouchDB(dbName) await doWithDB(dbName, async db => {
try { try {
const doc = await exports.getMigrationsDoc(db) const doc = await exports.getMigrationsDoc(db)
@ -62,7 +63,7 @@ const runMigration = async (CouchDB, migration, options = {}) => {
) )
} else { } else {
// the migration has already been performed // the migration has already been performed
continue return
} }
} }
@ -85,10 +86,11 @@ const runMigration = async (CouchDB, migration, options = {}) => {
) )
throw err throw err
} }
})
} }
} }
exports.runMigrations = async (CouchDB, migrations, options = {}) => { exports.runMigrations = async (migrations, options = {}) => {
console.log("Running migrations") console.log("Running migrations")
let tenantIds let tenantIds
if (environment.MULTI_TENANCY) { if (environment.MULTI_TENANCY) {
@ -108,9 +110,7 @@ exports.runMigrations = async (CouchDB, migrations, options = {}) => {
// for all migrations // for all migrations
for (const migration of migrations) { for (const migration of migrations) {
// run the migration // run the migration
await doInTenant(tenantId, () => await doInTenant(tenantId, () => runMigration(migration, options))
runMigration(CouchDB, migration, options)
)
} }
} }
console.log("Migrations complete") console.log("Migrations complete")

View file

@ -1,7 +1,7 @@
require("../../tests/utilities/dbConfig") require("../../tests/utilities/dbConfig")
const { runMigrations, getMigrationsDoc } = require("../index") const { runMigrations, getMigrationsDoc } = require("../index")
const CouchDB = require("../../db").getCouch() const { dangerousGetDB } = require("../../db")
const { const {
StaticDatabases, StaticDatabases,
} = require("../../db/utils") } = require("../../db/utils")
@ -20,7 +20,7 @@ describe("migrations", () => {
}] }]
beforeEach(() => { beforeEach(() => {
db = new CouchDB(StaticDatabases.GLOBAL.name) db = dangerousGetDB(StaticDatabases.GLOBAL.name)
}) })
afterEach(async () => { afterEach(async () => {
@ -29,7 +29,7 @@ describe("migrations", () => {
}) })
const migrate = () => { const migrate = () => {
return runMigrations(CouchDB, MIGRATIONS) return runMigrations(MIGRATIONS)
} }
it("should run a new migration", async () => { it("should run a new migration", async () => {

View file

@ -7,7 +7,7 @@ const {
SEPARATOR, SEPARATOR,
} = require("../db/utils") } = require("../db/utils")
const { getAppDB } = require("../context") const { getAppDB } = require("../context")
const { getDB } = require("../db") const { doWithDB } = require("../db")
const BUILTIN_IDS = { const BUILTIN_IDS = {
ADMIN: "ADMIN", ADMIN: "ADMIN",
@ -199,7 +199,12 @@ exports.checkForRoleResourceArray = (rolePerms, resourceId) => {
* @return {Promise<object[]>} An array of the role objects that were found. * @return {Promise<object[]>} An array of the role objects that were found.
*/ */
exports.getAllRoles = async appId => { exports.getAllRoles = async appId => {
const db = appId ? getDB(appId) : getAppDB() if (appId) {
return doWithDB(appId, internal)
} else {
return internal(getAppDB())
}
async function internal(db) {
const body = await db.allDocs( const body = await db.allDocs(
getRoleParams(null, { getRoleParams(null, {
include_docs: true, include_docs: true,
@ -237,6 +242,7 @@ exports.getAllRoles = async appId => {
} }
return roles return roles
} }
}
/** /**
* This retrieves the required role for a resource * This retrieves the required role for a resource

View file

@ -1,5 +1,6 @@
const { getDB } = require("../db") const { doWithDB } = require("../db")
const { SEPARATOR, StaticDatabases } = require("../db/constants") const { StaticDatabases } = require("../db/constants")
const { baseGlobalDBName } = require("./utils")
const { const {
getTenantId, getTenantId,
DEFAULT_TENANT_ID, DEFAULT_TENANT_ID,
@ -23,7 +24,7 @@ exports.addTenantToUrl = url => {
} }
exports.doesTenantExist = async tenantId => { exports.doesTenantExist = async tenantId => {
const db = getDB(PLATFORM_INFO_DB) return doWithDB(PLATFORM_INFO_DB, async db => {
let tenants let tenants
try { try {
tenants = await db.get(TENANT_DOC) tenants = await db.get(TENANT_DOC)
@ -36,10 +37,11 @@ exports.doesTenantExist = async tenantId => {
Array.isArray(tenants.tenantIds) && Array.isArray(tenants.tenantIds) &&
tenants.tenantIds.indexOf(tenantId) !== -1 tenants.tenantIds.indexOf(tenantId) !== -1
) )
})
} }
exports.tryAddTenant = async (tenantId, userId, email) => { exports.tryAddTenant = async (tenantId, userId, email) => {
const db = getDB(PLATFORM_INFO_DB) return doWithDB(PLATFORM_INFO_DB, async db => {
const getDoc = async id => { const getDoc = async id => {
if (!id) { if (!id) {
return null return null
@ -76,6 +78,7 @@ exports.tryAddTenant = async (tenantId, userId, email) => {
promises.push(db.put(tenants)) promises.push(db.put(tenants))
} }
await Promise.all(promises) await Promise.all(promises)
})
} }
exports.getGlobalDBName = (tenantId = null) => { exports.getGlobalDBName = (tenantId = null) => {
@ -84,23 +87,15 @@ exports.getGlobalDBName = (tenantId = null) => {
if (!tenantId) { if (!tenantId) {
tenantId = getTenantId() tenantId = getTenantId()
} }
return baseGlobalDBName(tenantId)
let dbName
if (tenantId === DEFAULT_TENANT_ID) {
dbName = StaticDatabases.GLOBAL.name
} else {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
}
return dbName
} }
exports.getGlobalDB = (tenantId = null) => { exports.doWithGlobalDB = (tenantId, cb) => {
const dbName = exports.getGlobalDBName(tenantId) return doWithDB(exports.getGlobalDBName(tenantId), cb)
return getDB(dbName)
} }
exports.lookupTenantId = async userId => { exports.lookupTenantId = async userId => {
const db = getDB(StaticDatabases.PLATFORM_INFO.name) return doWithDB(StaticDatabases.PLATFORM_INFO.name, async db => {
let tenantId = env.MULTI_TENANCY ? DEFAULT_TENANT_ID : null let tenantId = env.MULTI_TENANCY ? DEFAULT_TENANT_ID : null
try { try {
const doc = await db.get(userId) const doc = await db.get(userId)
@ -111,16 +106,18 @@ exports.lookupTenantId = async userId => {
// just return the default // just return the default
} }
return tenantId return tenantId
})
} }
// lookup, could be email or userId, either will return a doc // lookup, could be email or userId, either will return a doc
exports.getTenantUser = async identifier => { exports.getTenantUser = async identifier => {
const db = getDB(PLATFORM_INFO_DB) return doWithDB(PLATFORM_INFO_DB, async db => {
try { try {
return await db.get(identifier) return await db.get(identifier)
} catch (err) { } catch (err) {
return null return null
} }
})
} }
exports.isUserInAppTenant = (appId, user = null) => { exports.isUserInAppTenant = (appId, user = null) => {
@ -135,7 +132,7 @@ exports.isUserInAppTenant = (appId, user = null) => {
} }
exports.getTenantIds = async () => { exports.getTenantIds = async () => {
const db = getDB(PLATFORM_INFO_DB) return doWithDB(PLATFORM_INFO_DB, async db => {
let tenants let tenants
try { try {
tenants = await db.get(TENANT_DOC) tenants = await db.get(TENANT_DOC)
@ -144,4 +141,5 @@ exports.getTenantIds = async () => {
return [] return []
} }
return (tenants && tenants.tenantIds) || [] return (tenants && tenants.tenantIds) || []
})
} }

View file

@ -0,0 +1,12 @@
const { DEFAULT_TENANT_ID } = require("../constants")
const { StaticDatabases, SEPARATOR } = require("../db/constants")
exports.baseGlobalDBName = tenantId => {
let dbName
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
dbName = StaticDatabases.GLOBAL.name
} else {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
}
return dbName
}

View file

@ -1,17 +0,0 @@
const PouchDB = require("pouchdb")
const env = require("../../environment")
let POUCH_DB_DEFAULTS
// should always be test but good to do the sanity check
if (env.isTest()) {
PouchDB.plugin(require("pouchdb-adapter-memory"))
POUCH_DB_DEFAULTS = {
prefix: undefined,
adapter: "memory",
}
}
const Pouch = PouchDB.defaults(POUCH_DB_DEFAULTS)
module.exports = Pouch

View file

@ -1,3 +1,5 @@
const packageConfiguration = require("../../index") const core = require("../../index")
const CouchDB = require("./db") const dbConfig = {
packageConfiguration.init(CouchDB) inMemory: true,
}
core.init({ db: dbConfig })

View file

@ -10,7 +10,7 @@ const { options } = require("./middleware/passport/jwt")
const { queryGlobalView } = require("./db/views") const { queryGlobalView } = require("./db/views")
const { Headers, UserStatus, Cookies, MAX_VALID_DATE } = require("./constants") const { Headers, UserStatus, Cookies, MAX_VALID_DATE } = require("./constants")
const { const {
getGlobalDB, doWithGlobalDB,
updateTenantId, updateTenantId,
getTenantUser, getTenantUser,
tryAddTenant, tryAddTenant,
@ -209,7 +209,7 @@ exports.saveUser = async (
// need to set the context for this request, as specified // need to set the context for this request, as specified
updateTenantId(tenantId) updateTenantId(tenantId)
// specify the tenancy incase we're making a new admin user (public) // specify the tenancy incase we're making a new admin user (public)
const db = getGlobalDB(tenantId) return doWithGlobalDB(tenantId, async db => {
let { email, password, _id } = user let { email, password, _id } = user
// make sure another user isn't using the same email // make sure another user isn't using the same email
let dbUser let dbUser
@ -285,6 +285,7 @@ exports.saveUser = async (
throw err throw err
} }
} }
})
} }
/** /**

View file

@ -258,6 +258,13 @@
dependencies: dependencies:
"@babel/helper-plugin-utils" "^7.14.5" "@babel/helper-plugin-utils" "^7.14.5"
"@babel/runtime@^7.15.4":
version "7.17.9"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.17.9.tgz#d19fbf802d01a8cb6cf053a64e472d42c434ba72"
integrity sha512-lSiBBvodq29uShpWGNbgFdKYNiFDo5/HIYsaCEY9ff4sb10x9jizo2+pRrSyF4jKZCXqgzuqBOQKbUm90gQwJg==
dependencies:
regenerator-runtime "^0.13.4"
"@babel/template@^7.16.0", "@babel/template@^7.3.3": "@babel/template@^7.16.0", "@babel/template@^7.3.3":
version "7.16.0" version "7.16.0"
resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.0.tgz#d16a35ebf4cd74e202083356fab21dd89363ddd6" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.0.tgz#d16a35ebf4cd74e202083356fab21dd89363ddd6"
@ -857,6 +864,21 @@ aws4@^1.8.0:
resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59"
integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA== integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==
axios-retry@^3.1.9:
version "3.2.4"
resolved "https://registry.yarnpkg.com/axios-retry/-/axios-retry-3.2.4.tgz#f447a53c3456f5bfeca18f20c3a3272207d082ae"
integrity sha512-Co3UXiv4npi6lM963mfnuH90/YFLKWWDmoBYfxkHT5xtkSSWNqK9zdG3fw5/CP/dsoKB5aMMJCsgab+tp1OxLQ==
dependencies:
"@babel/runtime" "^7.15.4"
is-retry-allowed "^2.2.0"
axios@0.24.0:
version "0.24.0"
resolved "https://registry.yarnpkg.com/axios/-/axios-0.24.0.tgz#804e6fa1e4b9c5288501dd9dff56a7a0940d20d6"
integrity sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA==
dependencies:
follow-redirects "^1.14.4"
babel-jest@^26.6.3: babel-jest@^26.6.3:
version "26.6.3" version "26.6.3"
resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-26.6.3.tgz#d87d25cb0037577a0c89f82e5755c5d293c01056" resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-26.6.3.tgz#d87d25cb0037577a0c89f82e5755c5d293c01056"
@ -1048,7 +1070,7 @@ buffer-from@1.1.1:
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef"
integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==
buffer-from@^1.0.0: buffer-from@1.1.2, buffer-from@^1.0.0:
version "1.1.2" version "1.1.2"
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5"
integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==
@ -1139,6 +1161,11 @@ char-regex@^1.0.2:
resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf"
integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==
charenc@0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667"
integrity sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc=
chownr@^1.1.1: chownr@^1.1.1:
version "1.1.4" version "1.1.4"
resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b"
@ -1273,6 +1300,11 @@ component-emitter@^1.2.1:
resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0"
integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==
component-type@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/component-type/-/component-type-1.2.1.tgz#8a47901700238e4fc32269771230226f24b415a9"
integrity sha1-ikeQFwAjjk/DIml3EjAibyS0Fak=
concat-map@0.0.1: concat-map@0.0.1:
version "0.0.1" version "0.0.1"
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
@ -1315,6 +1347,11 @@ cross-spawn@^7.0.0:
shebang-command "^2.0.0" shebang-command "^2.0.0"
which "^2.0.1" which "^2.0.1"
crypt@0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
integrity sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs=
cryptiles@2.x.x: cryptiles@2.x.x:
version "2.0.5" version "2.0.5"
resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8" resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8"
@ -1777,6 +1814,13 @@ fetch-cookie@0.10.1:
dependencies: dependencies:
tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0" tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0"
fetch-cookie@0.11.0:
version "0.11.0"
resolved "https://registry.yarnpkg.com/fetch-cookie/-/fetch-cookie-0.11.0.tgz#e046d2abadd0ded5804ce7e2cae06d4331c15407"
integrity sha512-BQm7iZLFhMWFy5CZ/162sAGjBfdNWb7a8LEqqnzsHFhxT/X/SVj/z2t2nu3aJvjlbQkrAlTUApplPRjWyH4mhA==
dependencies:
tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0"
fill-range@^4.0.0: fill-range@^4.0.0:
version "4.0.0" version "4.0.0"
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7"
@ -1802,6 +1846,11 @@ find-up@^4.0.0, find-up@^4.1.0:
locate-path "^5.0.0" locate-path "^5.0.0"
path-exists "^4.0.0" path-exists "^4.0.0"
follow-redirects@^1.14.4:
version "1.14.9"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.9.tgz#dd4ea157de7bfaf9ea9b3fbd85aa16951f78d8d7"
integrity sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w==
for-in@^1.0.2: for-in@^1.0.2:
version "1.0.2" version "1.0.2"
resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80"
@ -2175,7 +2224,7 @@ inflight@^1.0.4:
once "^1.3.0" once "^1.3.0"
wrappy "1" wrappy "1"
inherits@2, inherits@2.0.4, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1: inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3:
version "2.0.4" version "2.0.4"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
@ -2226,7 +2275,7 @@ is-arrayish@^0.2.1:
resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=
is-buffer@^1.1.5: is-buffer@^1.1.5, is-buffer@~1.1.6:
version "1.1.6" version "1.1.6"
resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be"
integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==
@ -2328,6 +2377,11 @@ is-potential-custom-element-name@^1.0.1:
resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5"
integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==
is-retry-allowed@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-2.2.0.tgz#88f34cbd236e043e71b6932d09b0c65fb7b4d71d"
integrity sha512-XVm7LOeLpTW4jV19QSH38vkswxoLud8sQ57YwJVTPWdiaI9I8keEhGFpBlslyVsgdQy4Opg8QOLb8YRgsyZiQg==
is-stream@^1.1.0: is-stream@^1.1.0:
version "1.1.0" version "1.1.0"
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
@ -2360,7 +2414,7 @@ isarray@0.0.1:
resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf"
integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=
isarray@1.0.0, isarray@^1.0.0: isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0:
version "1.0.0" version "1.0.0"
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
@ -2824,6 +2878,11 @@ jodid25519@^1.0.0:
dependencies: dependencies:
jsbn "~0.1.0" jsbn "~0.1.0"
join-component@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/join-component/-/join-component-1.1.0.tgz#b8417b750661a392bee2c2537c68b2a9d4977cd5"
integrity sha1-uEF7dQZho5K+4sJTfGiyqdSXfNU=
js-tokens@^4.0.0: js-tokens@^4.0.0:
version "4.0.0" version "4.0.0"
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
@ -2902,7 +2961,7 @@ json-stable-stringify@^1.0.1:
dependencies: dependencies:
jsonify "~0.0.0" jsonify "~0.0.0"
json-stringify-safe@~5.0.1: json-stringify-safe@^5.0.1, json-stringify-safe@~5.0.1:
version "5.0.1" version "5.0.1"
resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=
@ -3204,6 +3263,11 @@ lodash.once@^4.0.0:
resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac"
integrity sha1-DdOXEhPHxW34gJd9UEyI+0cal6w= integrity sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=
lodash.pick@^4.0.0:
version "4.4.0"
resolved "https://registry.yarnpkg.com/lodash.pick/-/lodash.pick-4.4.0.tgz#52f05610fff9ded422611441ed1fc123a03001b3"
integrity sha1-UvBWEP/53tQiYRRB7R/BI6AwAbM=
lodash@^4.14.0: lodash@^4.14.0:
version "4.17.4" version "4.17.4"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae"
@ -3252,6 +3316,15 @@ map-visit@^1.0.0:
dependencies: dependencies:
object-visit "^1.0.0" object-visit "^1.0.0"
md5@^2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/md5/-/md5-2.3.0.tgz#c3da9a6aae3a30b46b7b0c349b87b110dc3bda4f"
integrity sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==
dependencies:
charenc "0.0.2"
crypt "0.0.2"
is-buffer "~1.1.6"
memdown@1.4.1: memdown@1.4.1:
version "1.4.1" version "1.4.1"
resolved "https://registry.yarnpkg.com/memdown/-/memdown-1.4.1.tgz#b4e4e192174664ffbae41361aa500f3119efe215" resolved "https://registry.yarnpkg.com/memdown/-/memdown-1.4.1.tgz#b4e4e192174664ffbae41361aa500f3119efe215"
@ -3372,6 +3445,11 @@ ms@2.1.2, ms@^2.1.1:
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
ms@^2.1.3:
version "2.1.3"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
nanomatch@^1.2.9: nanomatch@^1.2.9:
version "1.2.13" version "1.2.13"
resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119"
@ -3399,6 +3477,16 @@ natural-compare@^1.4.0:
resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=
ndjson@^1.4.3:
version "1.5.0"
resolved "https://registry.yarnpkg.com/ndjson/-/ndjson-1.5.0.tgz#ae603b36b134bcec347b452422b0bf98d5832ec8"
integrity sha1-rmA7NrE0vOw0e0UkIrC/mNWDLsg=
dependencies:
json-stringify-safe "^5.0.1"
minimist "^1.2.0"
split2 "^2.1.0"
through2 "^2.0.3"
nice-try@^1.0.4: nice-try@^1.0.4:
version "1.0.5" version "1.0.5"
resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
@ -3409,7 +3497,7 @@ node-fetch@2.6.0:
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd"
integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA== integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==
node-fetch@^2.6.1: node-fetch@2.6.7, node-fetch@^2.6.1:
version "2.6.7" version "2.6.7"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad"
integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==
@ -3769,6 +3857,42 @@ posix-character-classes@^0.1.0:
resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab"
integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=
posthog-node@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/posthog-node/-/posthog-node-1.3.0.tgz#804ed2f213a2f05253f798bf9569d55a9cad94f7"
integrity sha512-2+VhqiY/rKIqKIXyvemBFHbeijHE25sP7eKltnqcFqAssUE6+sX6vusN9A4luzToOqHQkUZexiCKxvuGagh7JA==
dependencies:
axios "0.24.0"
axios-retry "^3.1.9"
component-type "^1.2.1"
join-component "^1.1.0"
md5 "^2.3.0"
ms "^2.1.3"
remove-trailing-slash "^0.1.1"
uuid "^8.3.2"
pouch-stream@^0.4.0:
version "0.4.1"
resolved "https://registry.yarnpkg.com/pouch-stream/-/pouch-stream-0.4.1.tgz#0c6d8475c9307677627991a2f079b301c3b89bdd"
integrity sha1-DG2EdckwdndieZGi8HmzAcO4m90=
dependencies:
inherits "^2.0.1"
readable-stream "^1.0.27-1"
pouchdb-abstract-mapreduce@7.2.2:
version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-abstract-mapreduce/-/pouchdb-abstract-mapreduce-7.2.2.tgz#dd1b10a83f8d24361dce9aaaab054614b39f766f"
integrity sha512-7HWN/2yV2JkwMnGnlp84lGvFtnm0Q55NiBUdbBcaT810+clCGKvhssBCrXnmwShD1SXTwT83aszsgiSfW+SnBA==
dependencies:
pouchdb-binary-utils "7.2.2"
pouchdb-collate "7.2.2"
pouchdb-collections "7.2.2"
pouchdb-errors "7.2.2"
pouchdb-fetch "7.2.2"
pouchdb-mapreduce-utils "7.2.2"
pouchdb-md5 "7.2.2"
pouchdb-utils "7.2.2"
pouchdb-adapter-leveldb-core@7.2.2: pouchdb-adapter-leveldb-core@7.2.2:
version "7.2.2" version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-adapter-leveldb-core/-/pouchdb-adapter-leveldb-core-7.2.2.tgz#e0aa6a476e2607d7ae89f4a803c9fba6e6d05a8a" resolved "https://registry.yarnpkg.com/pouchdb-adapter-leveldb-core/-/pouchdb-adapter-leveldb-core-7.2.2.tgz#e0aa6a476e2607d7ae89f4a803c9fba6e6d05a8a"
@ -3828,6 +3952,11 @@ pouchdb-binary-utils@7.2.2:
dependencies: dependencies:
buffer-from "1.1.1" buffer-from "1.1.1"
pouchdb-collate@7.2.2:
version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-collate/-/pouchdb-collate-7.2.2.tgz#fc261f5ef837c437e3445fb0abc3f125d982c37c"
integrity sha512-/SMY9GGasslknivWlCVwXMRMnQ8myKHs4WryQ5535nq1Wj/ehpqWloMwxEQGvZE1Sda3LOm7/5HwLTcB8Our+w==
pouchdb-collections@7.2.2: pouchdb-collections@7.2.2:
version "7.2.2" version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-collections/-/pouchdb-collections-7.2.2.tgz#aeed77f33322429e3f59d59ea233b48ff0e68572" resolved "https://registry.yarnpkg.com/pouchdb-collections/-/pouchdb-collections-7.2.2.tgz#aeed77f33322429e3f59d59ea233b48ff0e68572"
@ -3840,6 +3969,28 @@ pouchdb-errors@7.2.2:
dependencies: dependencies:
inherits "2.0.4" inherits "2.0.4"
pouchdb-fetch@7.2.2:
version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-fetch/-/pouchdb-fetch-7.2.2.tgz#492791236d60c899d7e9973f9aca0d7b9cc02230"
integrity sha512-lUHmaG6U3zjdMkh8Vob9GvEiRGwJfXKE02aZfjiVQgew+9SLkuOxNw3y2q4d1B6mBd273y1k2Lm0IAziRNxQnA==
dependencies:
abort-controller "3.0.0"
fetch-cookie "0.10.1"
node-fetch "2.6.0"
pouchdb-find@^7.2.2:
version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-find/-/pouchdb-find-7.2.2.tgz#1227afdd761812d508fe0794b3e904518a721089"
integrity sha512-BmFeFVQ0kHmDehvJxNZl9OmIztCjPlZlVSdpijuFbk/Fi1EFPU1BAv3kLC+6DhZuOqU/BCoaUBY9sn66pPY2ag==
dependencies:
pouchdb-abstract-mapreduce "7.2.2"
pouchdb-collate "7.2.2"
pouchdb-errors "7.2.2"
pouchdb-fetch "7.2.2"
pouchdb-md5 "7.2.2"
pouchdb-selector-core "7.2.2"
pouchdb-utils "7.2.2"
pouchdb-json@7.2.2: pouchdb-json@7.2.2:
version "7.2.2" version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-json/-/pouchdb-json-7.2.2.tgz#b939be24b91a7322e9a24b8880a6e21514ec5e1f" resolved "https://registry.yarnpkg.com/pouchdb-json/-/pouchdb-json-7.2.2.tgz#b939be24b91a7322e9a24b8880a6e21514ec5e1f"
@ -3847,6 +3998,16 @@ pouchdb-json@7.2.2:
dependencies: dependencies:
vuvuzela "1.0.3" vuvuzela "1.0.3"
pouchdb-mapreduce-utils@7.2.2:
version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-mapreduce-utils/-/pouchdb-mapreduce-utils-7.2.2.tgz#13a46a3cc2a3f3b8e24861da26966904f2963146"
integrity sha512-rAllb73hIkU8rU2LJNbzlcj91KuulpwQu804/F6xF3fhZKC/4JQMClahk+N/+VATkpmLxp1zWmvmgdlwVU4HtQ==
dependencies:
argsarray "0.0.1"
inherits "2.0.4"
pouchdb-collections "7.2.2"
pouchdb-utils "7.2.2"
pouchdb-md5@7.2.2: pouchdb-md5@7.2.2:
version "7.2.2" version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-md5/-/pouchdb-md5-7.2.2.tgz#415401acc5a844112d765bd1fb4e5d9f38fb0838" resolved "https://registry.yarnpkg.com/pouchdb-md5/-/pouchdb-md5-7.2.2.tgz#415401acc5a844112d765bd1fb4e5d9f38fb0838"
@ -3860,13 +4021,34 @@ pouchdb-merge@7.2.2:
resolved "https://registry.yarnpkg.com/pouchdb-merge/-/pouchdb-merge-7.2.2.tgz#940d85a2b532d6a93a6cab4b250f5648511bcc16" resolved "https://registry.yarnpkg.com/pouchdb-merge/-/pouchdb-merge-7.2.2.tgz#940d85a2b532d6a93a6cab4b250f5648511bcc16"
integrity sha512-6yzKJfjIchBaS7Tusuk8280WJdESzFfQ0sb4jeMUNnrqs4Cx3b0DIEOYTRRD9EJDM+je7D3AZZ4AT0tFw8gb4A== integrity sha512-6yzKJfjIchBaS7Tusuk8280WJdESzFfQ0sb4jeMUNnrqs4Cx3b0DIEOYTRRD9EJDM+je7D3AZZ4AT0tFw8gb4A==
pouchdb-promise@6.4.3: pouchdb-promise@6.4.3, pouchdb-promise@^6.0.4:
version "6.4.3" version "6.4.3"
resolved "https://registry.yarnpkg.com/pouchdb-promise/-/pouchdb-promise-6.4.3.tgz#74516f4acf74957b54debd0fb2c0e5b5a68ca7b3" resolved "https://registry.yarnpkg.com/pouchdb-promise/-/pouchdb-promise-6.4.3.tgz#74516f4acf74957b54debd0fb2c0e5b5a68ca7b3"
integrity sha512-ruJaSFXwzsxRHQfwNHjQfsj58LBOY1RzGzde4PM5CWINZwFjCQAhZwfMrch2o/0oZT6d+Xtt0HTWhq35p3b0qw== integrity sha512-ruJaSFXwzsxRHQfwNHjQfsj58LBOY1RzGzde4PM5CWINZwFjCQAhZwfMrch2o/0oZT6d+Xtt0HTWhq35p3b0qw==
dependencies: dependencies:
lie "3.1.1" lie "3.1.1"
pouchdb-replication-stream@^1.2.9:
version "1.2.9"
resolved "https://registry.yarnpkg.com/pouchdb-replication-stream/-/pouchdb-replication-stream-1.2.9.tgz#aa4fa5d8f52df4825392f18e07c7e11acffc650a"
integrity sha1-qk+l2PUt9IJTkvGOB8fhGs/8ZQo=
dependencies:
argsarray "0.0.1"
inherits "^2.0.3"
lodash.pick "^4.0.0"
ndjson "^1.4.3"
pouch-stream "^0.4.0"
pouchdb-promise "^6.0.4"
through2 "^2.0.0"
pouchdb-selector-core@7.2.2:
version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-selector-core/-/pouchdb-selector-core-7.2.2.tgz#264d7436a8c8ac3801f39960e79875ef7f3879a0"
integrity sha512-XYKCNv9oiNmSXV5+CgR9pkEkTFqxQGWplnVhO3W9P154H08lU0ZoNH02+uf+NjZ2kjse7Q1fxV4r401LEcGMMg==
dependencies:
pouchdb-collate "7.2.2"
pouchdb-utils "7.2.2"
pouchdb-utils@7.2.2: pouchdb-utils@7.2.2:
version "7.2.2" version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-utils/-/pouchdb-utils-7.2.2.tgz#c17c4788f1d052b0daf4ef8797bbc4aaa3945aa4" resolved "https://registry.yarnpkg.com/pouchdb-utils/-/pouchdb-utils-7.2.2.tgz#c17c4788f1d052b0daf4ef8797bbc4aaa3945aa4"
@ -3881,17 +4063,17 @@ pouchdb-utils@7.2.2:
pouchdb-md5 "7.2.2" pouchdb-md5 "7.2.2"
uuid "8.1.0" uuid "8.1.0"
pouchdb@^7.2.1: pouchdb@7.3.0:
version "7.2.2" version "7.3.0"
resolved "https://registry.yarnpkg.com/pouchdb/-/pouchdb-7.2.2.tgz#fcae82862db527e4cf7576ed8549d1384961f364" resolved "https://registry.yarnpkg.com/pouchdb/-/pouchdb-7.3.0.tgz#440fbef12dfd8f9002320802528665e883a3b7f8"
integrity sha512-5gf5nw5XH/2H/DJj8b0YkvG9fhA/4Jt6kL0Y8QjtztVjb1y4J19Rg4rG+fUbXu96gsUrlyIvZ3XfM0b4mogGmw== integrity sha512-OwsIQGXsfx3TrU1pLruj6PGSwFH+h5k4hGNxFkZ76Um7/ZI8F5TzUHFrpldVVIhfXYi2vP31q0q7ot1FSLFYOw==
dependencies: dependencies:
abort-controller "3.0.0" abort-controller "3.0.0"
argsarray "0.0.1" argsarray "0.0.1"
buffer-from "1.1.1" buffer-from "1.1.2"
clone-buffer "1.0.0" clone-buffer "1.0.0"
double-ended-queue "2.1.0-0" double-ended-queue "2.1.0-0"
fetch-cookie "0.10.1" fetch-cookie "0.11.0"
immediate "3.3.0" immediate "3.3.0"
inherits "2.0.4" inherits "2.0.4"
level "6.0.1" level "6.0.1"
@ -3900,11 +4082,11 @@ pouchdb@^7.2.1:
leveldown "5.6.0" leveldown "5.6.0"
levelup "4.4.0" levelup "4.4.0"
ltgt "2.2.1" ltgt "2.2.1"
node-fetch "2.6.0" node-fetch "2.6.7"
readable-stream "1.1.14" readable-stream "1.1.14"
spark-md5 "3.0.1" spark-md5 "3.0.2"
through2 "3.0.2" through2 "3.0.2"
uuid "8.1.0" uuid "8.3.2"
vuvuzela "1.0.3" vuvuzela "1.0.3"
prelude-ls@~1.1.2: prelude-ls@~1.1.2:
@ -3927,6 +4109,11 @@ private@^0.1.6, private@~0.1.5:
resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff"
integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg==
process-nextick-args@~2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
prompts@^2.0.1: prompts@^2.0.1:
version "2.4.2" version "2.4.2"
resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069"
@ -4012,7 +4199,7 @@ read-pkg@^5.2.0:
parse-json "^5.0.0" parse-json "^5.0.0"
type-fest "^0.6.0" type-fest "^0.6.0"
readable-stream@1.1.14: readable-stream@1.1.14, readable-stream@^1.0.27-1:
version "1.1.14" version "1.1.14"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.14.tgz#7cf4c54ef648e3813084c636dd2079e166c081d9" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.14.tgz#7cf4c54ef648e3813084c636dd2079e166c081d9"
integrity sha1-fPTFTvZI44EwhMY23SB54WbAgdk= integrity sha1-fPTFTvZI44EwhMY23SB54WbAgdk=
@ -4036,6 +4223,19 @@ readable-stream@~0.0.2:
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-0.0.4.tgz#f32d76e3fb863344a548d79923007173665b3b8d" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-0.0.4.tgz#f32d76e3fb863344a548d79923007173665b3b8d"
integrity sha1-8y124/uGM0SlSNeZIwBxc2ZbO40= integrity sha1-8y124/uGM0SlSNeZIwBxc2ZbO40=
readable-stream@~2.3.6:
version "2.3.7"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
dependencies:
core-util-is "~1.0.0"
inherits "~2.0.3"
isarray "~1.0.0"
process-nextick-args "~2.0.0"
safe-buffer "~5.1.1"
string_decoder "~1.1.1"
util-deprecate "~1.0.1"
readline-sync@^1.4.9: readline-sync@^1.4.9:
version "1.4.10" version "1.4.10"
resolved "https://registry.yarnpkg.com/readline-sync/-/readline-sync-1.4.10.tgz#41df7fbb4b6312d673011594145705bf56d8873b" resolved "https://registry.yarnpkg.com/readline-sync/-/readline-sync-1.4.10.tgz#41df7fbb4b6312d673011594145705bf56d8873b"
@ -4068,6 +4268,11 @@ redis-parser@^3.0.0:
dependencies: dependencies:
redis-errors "^1.0.0" redis-errors "^1.0.0"
regenerator-runtime@^0.13.4:
version "0.13.9"
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52"
integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==
regex-not@^1.0.0, regex-not@^1.0.2: regex-not@^1.0.0, regex-not@^1.0.2:
version "1.0.2" version "1.0.2"
resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c"
@ -4081,6 +4286,11 @@ remove-trailing-separator@^1.0.1:
resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef"
integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8=
remove-trailing-slash@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/remove-trailing-slash/-/remove-trailing-slash-0.1.1.tgz#be2285a59f39c74d1bce4f825950061915e3780d"
integrity sha512-o4S4Qh6L2jpnCy83ysZDau+VORNvnFw07CKSAymkd6ICNVEPisMyzlc00KlvvicsxKck94SEwhDnMNdICzO+tA==
repeat-element@^1.1.2: repeat-element@^1.1.2:
version "1.1.4" version "1.1.4"
resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9" resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9"
@ -4202,7 +4412,7 @@ safe-buffer@^5.0.1, safe-buffer@^5.1.2, safe-buffer@~5.2.0:
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
safe-buffer@~5.1.1: safe-buffer@~5.1.0, safe-buffer@~5.1.1:
version "5.1.2" version "5.1.2"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
@ -4425,6 +4635,11 @@ spark-md5@3.0.1:
resolved "https://registry.yarnpkg.com/spark-md5/-/spark-md5-3.0.1.tgz#83a0e255734f2ab4e5c466e5a2cfc9ba2aa2124d" resolved "https://registry.yarnpkg.com/spark-md5/-/spark-md5-3.0.1.tgz#83a0e255734f2ab4e5c466e5a2cfc9ba2aa2124d"
integrity sha512-0tF3AGSD1ppQeuffsLDIOWlKUd3lS92tFxcsrh5Pe3ZphhnoK+oXIBTzOAThZCiuINZLvpiLH/1VS1/ANEJVig== integrity sha512-0tF3AGSD1ppQeuffsLDIOWlKUd3lS92tFxcsrh5Pe3ZphhnoK+oXIBTzOAThZCiuINZLvpiLH/1VS1/ANEJVig==
spark-md5@3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/spark-md5/-/spark-md5-3.0.2.tgz#7952c4a30784347abcee73268e473b9c0167e3fc"
integrity sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==
spdx-correct@^3.0.0: spdx-correct@^3.0.0:
version "3.1.1" version "3.1.1"
resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9" resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9"
@ -4458,6 +4673,13 @@ split-string@^3.0.1, split-string@^3.0.2:
dependencies: dependencies:
extend-shallow "^3.0.0" extend-shallow "^3.0.0"
split2@^2.1.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/split2/-/split2-2.2.0.tgz#186b2575bcf83e85b7d18465756238ee4ee42493"
integrity sha512-RAb22TG39LhI31MbreBgIuKiIKhVsawfTgEGqKHTK87aG+ul/PB8Sqoi3I7kVdRWiCfrKxK3uo4/YUkpNvhPbw==
dependencies:
through2 "^2.0.2"
sprintf-js@^1.1.1: sprintf-js@^1.1.1:
version "1.1.2" version "1.1.2"
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.2.tgz#da1765262bf8c0f571749f2ad6c26300207ae673" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.2.tgz#da1765262bf8c0f571749f2ad6c26300207ae673"
@ -4548,6 +4770,13 @@ string_decoder@~0.10.x:
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94"
integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ= integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=
string_decoder@~1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==
dependencies:
safe-buffer "~5.1.0"
stringstream@~0.0.4: stringstream@~0.0.4:
version "0.0.6" version "0.0.6"
resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.6.tgz#7880225b0d4ad10e30927d167a1d6f2fd3b33a72" resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.6.tgz#7880225b0d4ad10e30927d167a1d6f2fd3b33a72"
@ -4663,6 +4892,14 @@ through2@3.0.2:
inherits "^2.0.4" inherits "^2.0.4"
readable-stream "2 || 3" readable-stream "2 || 3"
through2@^2.0.0, through2@^2.0.2, through2@^2.0.3:
version "2.0.5"
resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd"
integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==
dependencies:
readable-stream "~2.3.6"
xtend "~4.0.1"
through@~2.3.4: through@~2.3.4:
version "2.3.8" version "2.3.8"
resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5"
@ -4857,7 +5094,7 @@ use@^3.1.0:
resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f"
integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==
util-deprecate@^1.0.1: util-deprecate@^1.0.1, util-deprecate@~1.0.1:
version "1.0.2" version "1.0.2"
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
@ -4877,6 +5114,11 @@ uuid@8.1.0:
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.1.0.tgz#6f1536eb43249f473abc6bd58ff983da1ca30d8d" resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.1.0.tgz#6f1536eb43249f473abc6bd58ff983da1ca30d8d"
integrity sha512-CI18flHDznR0lq54xBycOVmphdCYnQLKn8abKn7PXUiKUGdEd+/l9LWNJmugXel4hXq7S+RMNl34ecyC9TntWg== integrity sha512-CI18flHDznR0lq54xBycOVmphdCYnQLKn8abKn7PXUiKUGdEd+/l9LWNJmugXel4hXq7S+RMNl34ecyC9TntWg==
uuid@8.3.2, uuid@^8.3.0, uuid@^8.3.2:
version "8.3.2"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"
integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
uuid@^3.0.0: uuid@^3.0.0:
version "3.0.1" version "3.0.1"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.0.1.tgz#6544bba2dfda8c1cf17e629a3a305e2bb1fee6c1" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.0.1.tgz#6544bba2dfda8c1cf17e629a3a305e2bb1fee6c1"
@ -4887,11 +5129,6 @@ uuid@^3.3.2:
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee"
integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==
uuid@^8.3.0, uuid@^8.3.2:
version "8.3.2"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"
integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
v8-to-istanbul@^7.0.0: v8-to-istanbul@^7.0.0:
version "7.1.2" version "7.1.2"
resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-7.1.2.tgz#30898d1a7fa0c84d225a2c1434fb958f290883c1" resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-7.1.2.tgz#30898d1a7fa0c84d225a2c1434fb958f290883c1"
@ -5084,7 +5321,7 @@ xmlchars@^2.2.0:
resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb"
integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==
xtend@^4.0.2, xtend@~4.0.0: xtend@^4.0.2, xtend@~4.0.0, xtend@~4.0.1:
version "4.0.2" version "4.0.2"
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"
integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==

View file

@ -1,7 +1,7 @@
{ {
"name": "@budibase/bbui", "name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.", "description": "A UI solution used in the different Budibase projects.",
"version": "1.0.105-alpha.41", "version": "1.0.122",
"license": "MPL-2.0", "license": "MPL-2.0",
"svelte": "src/index.js", "svelte": "src/index.js",
"module": "dist/bbui.es.js", "module": "dist/bbui.es.js",
@ -38,7 +38,7 @@
], ],
"dependencies": { "dependencies": {
"@adobe/spectrum-css-workflow-icons": "^1.2.1", "@adobe/spectrum-css-workflow-icons": "^1.2.1",
"@budibase/string-templates": "^1.0.105-alpha.41", "@budibase/string-templates": "^1.0.122",
"@spectrum-css/actionbutton": "^1.0.1", "@spectrum-css/actionbutton": "^1.0.1",
"@spectrum-css/actiongroup": "^1.0.1", "@spectrum-css/actiongroup": "^1.0.1",
"@spectrum-css/avatar": "^3.0.2", "@spectrum-css/avatar": "^3.0.2",

View file

@ -1,6 +1,6 @@
{ {
"name": "@budibase/builder", "name": "@budibase/builder",
"version": "1.0.105-alpha.41", "version": "1.0.122",
"license": "GPL-3.0", "license": "GPL-3.0",
"private": true, "private": true,
"scripts": { "scripts": {
@ -65,10 +65,10 @@
} }
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "^1.0.105-alpha.41", "@budibase/bbui": "^1.0.122",
"@budibase/client": "^1.0.105-alpha.41", "@budibase/client": "^1.0.122",
"@budibase/frontend-core": "^1.0.105-alpha.41", "@budibase/frontend-core": "^1.0.122",
"@budibase/string-templates": "^1.0.105-alpha.41", "@budibase/string-templates": "^1.0.122",
"@sentry/browser": "5.19.1", "@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1", "@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1", "@spectrum-css/vars": "^3.0.1",

View file

@ -60,6 +60,7 @@ export function getBindings({
) )
const label = path == null ? column : `${path}.0.${column}` const label = path == null ? column : `${path}.0.${column}`
const binding = path == null ? `[${column}]` : `${path}.0.[${column}]`
// only supply a description for relationship paths // only supply a description for relationship paths
const description = const description =
path == null path == null
@ -73,8 +74,8 @@ export function getBindings({
description, description,
// don't include path, it messes things up, relationship path // don't include path, it messes things up, relationship path
// will be replaced by the main array binding // will be replaced by the main array binding
readableBinding: column, readableBinding: label,
runtimeBinding: `[${column}]`, runtimeBinding: binding,
}) })
} }
return bindings return bindings

View file

@ -15,7 +15,6 @@
import ArrayRenderer from "components/common/renderers/ArrayRenderer.svelte" import ArrayRenderer from "components/common/renderers/ArrayRenderer.svelte"
import ConfirmDialog from "components/common/ConfirmDialog.svelte" import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import GoogleButton from "../_components/GoogleButton.svelte"
export let datasource export let datasource
export let save export let save
@ -161,11 +160,6 @@
Fetch tables Fetch tables
</Button> </Button>
<Button cta icon="Add" on:click={createNewTable}>New table</Button> <Button cta icon="Add" on:click={createNewTable}>New table</Button>
{#if integration.auth}
{#if integration.auth.type === "google"}
<GoogleButton {datasource} />
{/if}
{/if}
</div> </div>
</div> </div>
<Body> <Body>

View file

@ -136,7 +136,7 @@
notifications.success("Request sent successfully") notifications.success("Request sent successfully")
} }
} catch (error) { } catch (error) {
notifications.error("Error running query") notifications.error(`Query Error: ${error.message}`)
} }
} }

View file

@ -1,6 +1,6 @@
{ {
"name": "@budibase/cli", "name": "@budibase/cli",
"version": "1.0.105-alpha.41", "version": "1.0.122",
"description": "Budibase CLI, for developers, self hosting and migrations.", "description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js", "main": "src/index.js",
"bin": { "bin": {

View file

@ -1,6 +1,6 @@
{ {
"name": "@budibase/client", "name": "@budibase/client",
"version": "1.0.105-alpha.41", "version": "1.0.122",
"license": "MPL-2.0", "license": "MPL-2.0",
"module": "dist/budibase-client.js", "module": "dist/budibase-client.js",
"main": "dist/budibase-client.js", "main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw" "dev:builder": "rollup -cw"
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "^1.0.105-alpha.41", "@budibase/bbui": "^1.0.122",
"@budibase/frontend-core": "^1.0.105-alpha.41", "@budibase/frontend-core": "^1.0.122",
"@budibase/string-templates": "^1.0.105-alpha.41", "@budibase/string-templates": "^1.0.122",
"@spectrum-css/button": "^3.0.3", "@spectrum-css/button": "^3.0.3",
"@spectrum-css/card": "^3.0.3", "@spectrum-css/card": "^3.0.3",
"@spectrum-css/divider": "^1.0.3", "@spectrum-css/divider": "^1.0.3",

View file

@ -34,6 +34,7 @@ export const API = createAPIClient({
// Or we could check error.status and redirect to login on a 403 etc. // Or we could check error.status and redirect to login on a 403 etc.
onError: error => { onError: error => {
const { status, method, url, message, handled } = error || {} const { status, method, url, message, handled } = error || {}
const ignoreErrorUrls = ["bbtel", "/api/global/self"]
// Log any errors that we haven't manually handled // Log any errors that we haven't manually handled
if (!handled) { if (!handled) {
@ -45,7 +46,14 @@ export const API = createAPIClient({
if (message) { if (message) {
// Don't notify if the URL contains the word analytics as it may be // Don't notify if the URL contains the word analytics as it may be
// blocked by browser extensions // blocked by browser extensions
if (!url?.includes("analytics")) { let ignore = false
for (let ignoreUrl of ignoreErrorUrls) {
if (url?.includes(ignoreUrl)) {
ignore = true
break
}
}
if (!ignore) {
notificationStore.actions.error(message) notificationStore.actions.error(message)
} }
} }

View file

@ -36,8 +36,13 @@
div { div {
font-style: italic; font-style: italic;
} }
@media (hover: hover) {
.hoverable:hover { .hoverable:hover {
color: var(--spectrum-alias-icon-color-selected-hover) !important; color: var(--spectrum-alias-icon-color-selected-hover) !important;
cursor: pointer; cursor: pointer;
} }
}
.hoverable:active {
color: var(--spectrum-alias-icon-color-selected-hover) !important;
}
</style> </style>

File diff suppressed because it is too large Load diff

View file

@ -1,12 +1,12 @@
{ {
"name": "@budibase/frontend-core", "name": "@budibase/frontend-core",
"version": "1.0.105-alpha.41", "version": "1.0.122",
"description": "Budibase frontend core libraries used in builder and client", "description": "Budibase frontend core libraries used in builder and client",
"author": "Budibase", "author": "Budibase",
"license": "MPL-2.0", "license": "MPL-2.0",
"svelte": "src/index.js", "svelte": "src/index.js",
"dependencies": { "dependencies": {
"@budibase/bbui": "^1.0.105-alpha.41", "@budibase/bbui": "^1.0.122",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"svelte": "^3.46.2" "svelte": "^3.46.2"
} }

View file

@ -1,7 +1,7 @@
{ {
"name": "@budibase/server", "name": "@budibase/server",
"email": "hi@budibase.com", "email": "hi@budibase.com",
"version": "1.0.105-alpha.41", "version": "1.0.122",
"description": "Budibase Web Server", "description": "Budibase Web Server",
"main": "src/index.ts", "main": "src/index.ts",
"repository": { "repository": {
@ -68,10 +68,9 @@
"license": "GPL-3.0", "license": "GPL-3.0",
"dependencies": { "dependencies": {
"@apidevtools/swagger-parser": "^10.0.3", "@apidevtools/swagger-parser": "^10.0.3",
"@budibase/backend-core": "^1.0.105-alpha.41", "@budibase/backend-core": "^1.0.122",
"@budibase/client": "^1.0.105-alpha.41", "@budibase/client": "^1.0.122",
"@budibase/pro": "1.0.105-alpha.41", "@budibase/string-templates": "^1.0.122",
"@budibase/string-templates": "^1.0.105-alpha.41",
"@bull-board/api": "^3.7.0", "@bull-board/api": "^3.7.0",
"@bull-board/koa": "^3.7.0", "@bull-board/koa": "^3.7.0",
"@elastic/elasticsearch": "7.10.0", "@elastic/elasticsearch": "7.10.0",
@ -121,7 +120,7 @@
"pg": "8.5.1", "pg": "8.5.1",
"pino-pretty": "4.0.0", "pino-pretty": "4.0.0",
"posthog-node": "^1.1.4", "posthog-node": "^1.1.4",
"pouchdb": "7.2.1", "pouchdb": "7.3.0",
"pouchdb-adapter-memory": "^7.2.1", "pouchdb-adapter-memory": "^7.2.1",
"pouchdb-all-dbs": "1.0.2", "pouchdb-all-dbs": "1.0.2",
"pouchdb-find": "^7.2.2", "pouchdb-find": "^7.2.2",

View file

@ -2,7 +2,8 @@
const yargs = require("yargs") const yargs = require("yargs")
const fs = require("fs") const fs = require("fs")
const { join } = require("path") const { join } = require("path")
const CouchDB = require("../src/db") require("../src/db").init()
const { doWithDB } = require("@budibase/backend-core/db")
// load environment // load environment
const env = require("../src/environment") const env = require("../src/environment")
const { const {
@ -47,14 +48,15 @@ yargs
const writeStream = fs.createWriteStream(join(exportPath, "dump.text")) const writeStream = fs.createWriteStream(join(exportPath, "dump.text"))
// perform couch dump // perform couch dump
const instanceDb = new CouchDB(appId) await doWithDB(appId, async db => {
await instanceDb.dump(writeStream, { return db.dump(writeStream, {
filter: doc => filter: doc =>
!( !(
doc._id.includes(USER_METDATA_PREFIX) || doc._id.includes(USER_METDATA_PREFIX) ||
doc.includes(LINK_USER_METADATA_PREFIX) doc.includes(LINK_USER_METADATA_PREFIX)
), ),
}) })
})
console.log(`Template ${name} exported to ${exportPath}`) console.log(`Template ${name} exported to ${exportPath}`)
} }
) )

View file

@ -26,6 +26,7 @@ CREATE TABLE Products (
updated time updated time
); );
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Mike', 'Hughes', 28.2, '123 Fake Street', 'Belfast', '2021-01-19 03:14:07'); INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Mike', 'Hughes', 28.2, '123 Fake Street', 'Belfast', '2021-01-19 03:14:07');
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Dave', 'Johnson', 29, '124 Fake Street', 'Belfast', '2022-04-01 00:11:11');
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (1, 'assembling', '2020-01-01'); INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (1, 'assembling', '2020-01-01');
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (2, 'processing', '2019-12-31'); INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (2, 'processing', '2019-12-31');
INSERT INTO Products (name, updated) VALUES ('Meat', '11:00:22'), ('Fruit', '10:00:00'); INSERT INTO Products (name, updated) VALUES ('Meat', '11:00:22'), ('Fruit', '10:00:00');

View file

@ -1,50 +0,0 @@
/**
* Script to replicate your PouchDb (in your home directory) to a remote CouchDB
* USAGE...
* node scripts/replicateApp <app_name> <remote_url>
* e.g. node scripts/replicateApp Mike http://admin:password@127.0.0.1:5984
*/
const CouchDB = require("../src/db")
const { DocumentTypes } = require("../src/db/utils")
const { getAllDbs } = require("@budibase/backend-core/db")
const appName = process.argv[2].toLowerCase()
const remoteUrl = process.argv[3]
console.log(`Replicating from ${appName} to ${remoteUrl}/${appName}`)
const run = async () => {
const dbs = await getAllDbs()
const appDbNames = dbs.filter(dbName => dbName.startsWith("inst_app"))
let apps = []
for (let dbName of appDbNames) {
const db = new CouchDB(dbName)
apps.push(db.get(DocumentTypes.APP_METADATA))
}
apps = await Promise.all(apps)
const app = apps.find(
a => a.name === appName || a.name.toLowerCase() === appName
)
if (!app) {
console.log(
`Could not find app... apps: ${apps.map(app => app.name).join(", ")}`
)
return
}
const instanceDb = new CouchDB(app.appId)
const remoteDb = new CouchDB(`${remoteUrl}/${appName}`)
instanceDb.replicate
.to(remoteDb)
.on("complete", function () {
console.log("SUCCESS!")
})
.on("error", function (err) {
console.log(`FAILED: ${err}`)
})
}
run()

View file

@ -131,7 +131,7 @@ async function createInstance(template: any) {
const tenantId = isMultiTenant() ? getTenantId() : null const tenantId = isMultiTenant() ? getTenantId() : null
const baseAppId = generateAppID(tenantId) const baseAppId = generateAppID(tenantId)
const appId = generateDevAppID(baseAppId) const appId = generateDevAppID(baseAppId)
updateAppId(appId) await updateAppId(appId)
const db = getAppDB() const db = getAppDB()
await db.put({ await db.put({
@ -463,6 +463,8 @@ export const sync = async (ctx: any, next: any) => {
}) })
} catch (err) { } catch (err) {
error = err error = err
} finally {
await replication.close()
} }
// sync the users // sync the users

View file

@ -105,7 +105,6 @@ async function deployApp(deployment: any) {
const replication = new Replication(config) const replication = new Replication(config)
console.log("Replication object created") console.log("Replication object created")
await replication.replicate() await replication.replicate()
console.log("replication complete.. replacing app meta doc") console.log("replication complete.. replacing app meta doc")
const db = getProdAppDB() const db = getProdAppDB()
@ -126,6 +125,8 @@ async function deployApp(deployment: any) {
...err, ...err,
message: `Deployment Failed: ${err.message}`, message: `Deployment Failed: ${err.message}`,
} }
} finally {
await replication.close()
} }
} }

View file

@ -83,7 +83,9 @@ exports.revert = async ctx => {
try { try {
const db = getProdAppDB({ skip_setup: true }) const db = getProdAppDB({ skip_setup: true })
const info = await db.info() const info = await db.info()
if (info.error) throw info.error if (info.error) {
throw info.error
}
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS) const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
if ( if (
!deploymentDoc.history || !deploymentDoc.history ||
@ -95,12 +97,11 @@ exports.revert = async ctx => {
return ctx.throw(400, "App has not yet been deployed") return ctx.throw(400, "App has not yet been deployed")
} }
try {
const replication = new Replication({ const replication = new Replication({
source: productionAppId, source: productionAppId,
target: appId, target: appId,
}) })
try {
await replication.rollback() await replication.rollback()
// update appID in reverted app to be dev version again // update appID in reverted app to be dev version again
const db = getAppDB() const db = getAppDB()
@ -114,6 +115,8 @@ exports.revert = async ctx => {
} }
} catch (err) { } catch (err) {
ctx.throw(400, `Unable to revert. ${err}`) ctx.throw(400, `Unable to revert. ${err}`)
} finally {
await replication.close()
} }
} }

View file

@ -1,12 +1,4 @@
const TestConfig = require("../../../../../tests/utilities/TestConfiguration")
const bulkDocs = jest.fn()
const db = jest.fn(() => {
return {
bulkDocs
}
})
jest.mock("../../../../../db", () => db)
require("@budibase/backend-core").init(require("../../../../../db"))
const { RestImporter } = require("../index") const { RestImporter } = require("../index")
@ -48,6 +40,12 @@ const datasets = {
} }
describe("Rest Importer", () => { describe("Rest Importer", () => {
const config = new TestConfig(false)
beforeEach(async () => {
await config.init()
})
let restImporter let restImporter
const init = async (data) => { const init = async (data) => {
@ -105,11 +103,9 @@ describe("Rest Importer", () => {
const testImportQueries = async (key, data, assertions) => { const testImportQueries = async (key, data, assertions) => {
await init(data) await init(data)
bulkDocs.mockReturnValue([])
const importResult = await restImporter.importQueries("datasourceId") const importResult = await restImporter.importQueries("datasourceId")
expect(importResult.errorQueries.length).toBe(0) expect(importResult.errorQueries.length).toBe(0)
expect(importResult.queries.length).toBe(assertions[key].count) expect(importResult.queries.length).toBe(assertions[key].count)
expect(bulkDocs).toHaveBeenCalledTimes(1)
jest.clearAllMocks() jest.clearAllMocks()
} }

View file

@ -323,6 +323,28 @@ module External {
return { row: newRow, manyRelationships } return { row: newRow, manyRelationships }
} }
squashRelationshipColumns(
table: Table,
row: Row,
relationships: RelationshipsJson[]
): Row {
for (let relationship of relationships) {
const linkedTable = this.tables[relationship.tableName]
if (!linkedTable || !row[relationship.column]) {
continue
}
const display = linkedTable.primaryDisplay
for (let key of Object.keys(row[relationship.column])) {
const related: Row = row[relationship.column][key]
row[relationship.column][key] = {
primaryDisplay: display ? related[display] : undefined,
_id: related._id,
}
}
}
return row
}
/** /**
* This iterates through the returned rows and works out what elements of the rows * This iterates through the returned rows and works out what elements of the rows
* actually match up to another row (based on primary keys) - this is pretty specific * actually match up to another row (based on primary keys) - this is pretty specific
@ -354,12 +376,6 @@ module External {
if (!linked._id) { if (!linked._id) {
continue continue
} }
// if not returning full docs then get the minimal links out
const display = linkedTable.primaryDisplay
linked = {
primaryDisplay: display ? linked[display] : undefined,
_id: linked._id,
}
columns[relationship.column] = linked columns[relationship.column] = linked
} }
for (let [column, related] of Object.entries(columns)) { for (let [column, related] of Object.entries(columns)) {
@ -417,7 +433,9 @@ module External {
relationships relationships
) )
} }
return processFormulas(table, Object.values(finalRows)) return processFormulas(table, Object.values(finalRows)).map((row: Row) =>
this.squashRelationshipColumns(table, row, relationships)
)
} }
/** /**

View file

@ -6,6 +6,7 @@ const {
DocumentTypes, DocumentTypes,
InternalTables, InternalTables,
} = require("../../../db/utils") } = require("../../../db/utils")
const { dangerousGetDB } = require("@budibase/backend-core/db")
const userController = require("../user") const userController = require("../user")
const { const {
inputProcessing, inputProcessing,
@ -250,7 +251,7 @@ exports.fetch = async ctx => {
} }
exports.find = async ctx => { exports.find = async ctx => {
const db = getAppDB() const db = dangerousGetDB(ctx.appId)
const table = await db.get(ctx.params.tableId) const table = await db.get(ctx.params.tableId)
let row = await findRow(ctx, ctx.params.tableId, ctx.params.rowId) let row = await findRow(ctx, ctx.params.tableId, ctx.params.rowId)
row = await outputProcessing(table, row) row = await outputProcessing(table, row)

View file

@ -54,7 +54,7 @@ exports.destroy = async ctx => {
} }
exports.buildSchema = async ctx => { exports.buildSchema = async ctx => {
updateAppId(ctx.params.instance) await updateAppId(ctx.params.instance)
const db = getAppDB() const db = getAppDB()
const webhook = await db.get(ctx.params.id) const webhook = await db.get(ctx.params.id)
webhook.bodySchema = toJsonSchema(ctx.request.body) webhook.bodySchema = toJsonSchema(ctx.request.body)
@ -80,7 +80,7 @@ exports.buildSchema = async ctx => {
exports.trigger = async ctx => { exports.trigger = async ctx => {
const prodAppId = getProdAppID(ctx.params.instance) const prodAppId = getProdAppID(ctx.params.instance)
updateAppId(prodAppId) await updateAppId(prodAppId)
try { try {
const db = getAppDB() const db = getAppDB()
const webhook = await db.get(ctx.params.id) const webhook = await db.get(ctx.params.id)

View file

@ -2,6 +2,7 @@ const { outputProcessing } = require("../../../utilities/rowProcessor")
const setup = require("./utilities") const setup = require("./utilities")
const { basicRow } = setup.structures const { basicRow } = setup.structures
const { doInAppContext } = require("@budibase/backend-core/context") const { doInAppContext } = require("@budibase/backend-core/context")
const { doInTenant } = require("@budibase/backend-core/tenancy")
// mock the fetch for the search system // mock the fetch for the search system
jest.mock("node-fetch") jest.mock("node-fetch")
@ -340,6 +341,7 @@ describe("/rows", () => {
describe("fetchEnrichedRows", () => { describe("fetchEnrichedRows", () => {
it("should allow enriching some linked rows", async () => { it("should allow enriching some linked rows", async () => {
const { table, firstRow, secondRow } = await doInTenant(setup.structures.TENANT_ID, async () => {
const table = await config.createLinkedTable() const table = await config.createLinkedTable()
const firstRow = await config.createRow({ const firstRow = await config.createRow({
name: "Test Contact", name: "Test Contact",
@ -352,6 +354,8 @@ describe("/rows", () => {
link: [{_id: firstRow._id}], link: [{_id: firstRow._id}],
tableId: table._id, tableId: table._id,
}) })
return { table, firstRow, secondRow }
})
// test basic enrichment // test basic enrichment
const resBasic = await request const resBasic = await request

View file

@ -1,4 +1,5 @@
const { checkBuilderEndpoint, getDB } = require("./utilities/TestFunctions") const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
const { getAppDB } = require("@budibase/backend-core/context")
const setup = require("./utilities") const setup = require("./utilities")
const { basicTable } = setup.structures const { basicTable } = setup.structures
@ -122,7 +123,7 @@ describe("/tables", () => {
describe("indexing", () => { describe("indexing", () => {
it("should be able to create a table with indexes", async () => { it("should be able to create a table with indexes", async () => {
const db = getDB(config) const db = getAppDB(config)
const indexCount = (await db.getIndexes()).total_rows const indexCount = (await db.getIndexes()).total_rows
const table = basicTable() const table = basicTable()
table.indexes = ["name"] table.indexes = ["name"]

View file

@ -1,8 +1,8 @@
// need to load environment first // need to load environment first
import { ExtendableContext } from "koa" import { ExtendableContext } from "koa"
import * as env from "./environment" import * as env from "./environment"
const CouchDB = require("./db") import db from "./db"
require("@budibase/backend-core").init(CouchDB) db.init()
const Koa = require("koa") const Koa = require("koa")
const destroyable = require("server-destroy") const destroyable = require("server-destroy")
const koaBody = require("koa-body") const koaBody = require("koa-body")

View file

@ -1,6 +1,7 @@
const { execSync } = require("child_process") const { execSync } = require("child_process")
const { processStringSync } = require("@budibase/string-templates") const { processStringSync } = require("@budibase/string-templates")
const automationUtils = require("../automationUtils") const automationUtils = require("../automationUtils")
const environment = require("../../environment")
exports.definition = { exports.definition = {
name: "Bash Scripting", name: "Bash Scripting",
@ -51,7 +52,9 @@ exports.run = async function ({ inputs, context }) {
let stdout, let stdout,
success = true success = true
try { try {
stdout = execSync(command, { timeout: 500 }).toString() stdout = execSync(command, {
timeout: environment.QUERY_THREAD_TIMEOUT || 500,
}).toString()
} catch (err) { } catch (err) {
stdout = err.message stdout = err.message
success = false success = false

View file

@ -26,7 +26,7 @@ describe("test the delete row action", () => {
expect(res.row._id).toEqual(row._id) expect(res.row._id).toEqual(row._id)
let error let error
try { try {
await config.getRow(table._id, res.id) await config.getRow(table._id, res.row._id)
} catch (err) { } catch (err) {
error = err error = err
} }

View file

@ -1,12 +1,11 @@
import { Thread, ThreadType } from "../threads" import { Thread, ThreadType } from "../threads"
import { definitions } from "./triggerInfo" import { definitions } from "./triggerInfo"
import * as webhooks from "../api/controllers/webhook" import * as webhooks from "../api/controllers/webhook"
import CouchDB from "../db"
import { queue } from "./bullboard" import { queue } from "./bullboard"
import newid from "../db/newid" import newid from "../db/newid"
import { updateEntityMetadata } from "../utilities" import { updateEntityMetadata } from "../utilities"
import { MetadataTypes, WebhookType } from "../constants" import { MetadataTypes, WebhookType } from "../constants"
import { getProdAppID } from "@budibase/backend-core/db" import { getProdAppID, doWithDB } from "@budibase/backend-core/db"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { getAppDB, getAppId } from "@budibase/backend-core/context" import { getAppDB, getAppId } from "@budibase/backend-core/context"
import { tenancy } from "@budibase/backend-core" import { tenancy } from "@budibase/backend-core"
@ -113,10 +112,11 @@ export async function enableCronTrigger(appId: any, automation: any) {
// can't use getAppDB here as this is likely to be called from dev app, // can't use getAppDB here as this is likely to be called from dev app,
// but this call could be for dev app or prod app, need to just use what // but this call could be for dev app or prod app, need to just use what
// was passed in // was passed in
const db = new CouchDB(appId) await doWithDB(appId, async (db: any) => {
const response = await db.put(automation) const response = await db.put(automation)
automation._id = response.id automation._id = response.id
automation._rev = response.rev automation._rev = response.rev
})
} }
return automation return automation
} }

View file

@ -1,31 +0,0 @@
const PouchDB = require("pouchdb")
const { getCouchUrl } = require("@budibase/backend-core/db")
const replicationStream = require("pouchdb-replication-stream")
const allDbs = require("pouchdb-all-dbs")
const find = require("pouchdb-find")
const env = require("../environment")
const COUCH_DB_URL = getCouchUrl() || "http://localhost:4005"
PouchDB.plugin(replicationStream.plugin)
PouchDB.plugin(find)
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
let POUCH_DB_DEFAULTS = {
prefix: COUCH_DB_URL,
}
if (env.isTest()) {
PouchDB.plugin(require("pouchdb-adapter-memory"))
POUCH_DB_DEFAULTS = {
prefix: undefined,
adapter: "memory",
}
}
const Pouch = PouchDB.defaults(POUCH_DB_DEFAULTS)
// have to still have pouch alldbs for testing
allDbs(Pouch)
module.exports = Pouch

View file

@ -1,18 +1,16 @@
const PouchDB = require("pouchdb")
const memory = require("pouchdb-adapter-memory")
const newid = require("./newid") const newid = require("./newid")
PouchDB.plugin(memory) // bypass the main application db config
const Pouch = PouchDB.defaults({ // use in memory pouchdb directly
prefix: undefined, const { getPouch, closeDB } = require("@budibase/backend-core/db")
adapter: "memory", const Pouch = getPouch({ inMemory: true })
})
exports.runView = async (view, calculation, group, data) => { exports.runView = async (view, calculation, group, data) => {
// use a different ID each time for the DB, make sure they // use a different ID each time for the DB, make sure they
// are always unique for each query, don't want overlap // are always unique for each query, don't want overlap
// which could cause 409s // which could cause 409s
const db = new Pouch(newid()) const db = new Pouch(newid())
try {
// write all the docs to the in memory Pouch (remove revs) // write all the docs to the in memory Pouch (remove revs)
await db.bulkDocs( await db.bulkDocs(
data.map(row => ({ data.map(row => ({
@ -43,6 +41,9 @@ exports.runView = async (view, calculation, group, data) => {
row._rev = found._rev row._rev = found._rev
} }
} }
await db.destroy()
return response return response
} finally {
await db.destroy()
await closeDB(db)
}
} }

View file

@ -1,3 +1,16 @@
const client = require("./client") const core = require("@budibase/backend-core")
const env = require("../environment")
module.exports = client exports.init = () => {
const dbConfig = {
replication: true,
find: true,
}
if (env.isTest()) {
dbConfig.inMemory = true
dbConfig.allDbs = true
}
core.init({ db: dbConfig })
}

View file

@ -1,8 +1,8 @@
const TestConfig = require("../../tests/utilities/TestConfiguration") const TestConfig = require("../../tests/utilities/TestConfiguration")
const { basicTable } = require("../../tests/utilities/structures") const { basicTable } = require("../../tests/utilities/structures")
const linkUtils = require("../linkedRows/linkUtils") const linkUtils = require("../linkedRows/linkUtils")
const CouchDB = require("../index")
const { getAppDB } = require("@budibase/backend-core/context") const { getAppDB } = require("@budibase/backend-core/context")
const { doWithDB } = require("@budibase/backend-core/db")
describe("test link functionality", () => { describe("test link functionality", () => {
const config = new TestConfig(false) const config = new TestConfig(false)
@ -48,13 +48,14 @@ describe("test link functionality", () => {
describe("getLinkDocuments", () => { describe("getLinkDocuments", () => {
it("should create the link view when it doesn't exist", async () => { it("should create the link view when it doesn't exist", async () => {
// create the DB and a very basic app design DB // create the DB and a very basic app design DB
const db = new CouchDB("test") const output = await doWithDB("test", async db => {
await db.put({ _id: "_design/database", views: {} }) await db.put({ _id: "_design/database", views: {} })
const output = await linkUtils.getLinkDocuments({ return await linkUtils.getLinkDocuments({
tableId: "test", tableId: "test",
rowId: "test", rowId: "test",
includeDocs: false, includeDocs: false,
}) })
})
expect(Array.isArray(output)).toBe(true) expect(Array.isArray(output)).toBe(true)
}) })
}) })

View file

@ -24,6 +24,12 @@ if (!LOADED && isDev() && !isTest()) {
LOADED = true LOADED = true
} }
function parseIntSafe(number) {
if (number) {
return parseInt(number)
}
}
let inThread = false let inThread = false
module.exports = { module.exports = {
@ -61,6 +67,7 @@ module.exports = {
SENDGRID_API_KEY: process.env.SENDGRID_API_KEY, SENDGRID_API_KEY: process.env.SENDGRID_API_KEY,
DYNAMO_ENDPOINT: process.env.DYNAMO_ENDPOINT, DYNAMO_ENDPOINT: process.env.DYNAMO_ENDPOINT,
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN, POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
QUERY_THREAD_TIMEOUT: parseIntSafe(process.env.QUERY_THREAD_TIMEOUT),
// old - to remove // old - to remove
CLIENT_ID: process.env.CLIENT_ID, CLIENT_ID: process.env.CLIENT_ID,
BUDIBASE_DIR: process.env.BUDIBASE_DIR, BUDIBASE_DIR: process.env.BUDIBASE_DIR,
@ -70,7 +77,6 @@ module.exports = {
DEPLOYMENT_CREDENTIALS_URL: process.env.DEPLOYMENT_CREDENTIALS_URL, DEPLOYMENT_CREDENTIALS_URL: process.env.DEPLOYMENT_CREDENTIALS_URL,
ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS, ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS,
DISABLE_THREADING: process.env.DISABLE_THREADING, DISABLE_THREADING: process.env.DISABLE_THREADING,
QUERY_THREAD_TIMEOUT: process.env.QUERY_THREAD_TIMEOUT,
SQL_MAX_ROWS: process.env.SQL_MAX_ROWS, SQL_MAX_ROWS: process.env.SQL_MAX_ROWS,
_set(key, value) { _set(key, value) {
process.env[key] = value process.env[key] = value

View file

@ -53,51 +53,55 @@ module CouchDBModule {
class CouchDBIntegration implements IntegrationBase { class CouchDBIntegration implements IntegrationBase {
private config: CouchDBConfig private config: CouchDBConfig
private client: any private readonly client: any
constructor(config: CouchDBConfig) { constructor(config: CouchDBConfig) {
this.config = config this.config = config
this.client = new PouchDB(`${config.url}/${config.database}`) this.client = new PouchDB(`${config.url}/${config.database}`)
} }
async create(query: { json: object }) { async query(
command: string,
errorMsg: string,
query: { json?: object; id?: string }
) {
try { try {
return this.client.post(query.json) const response = await this.client[command](query.id || query.json)
await this.client.close()
return response
} catch (err) { } catch (err) {
console.error("Error writing to couchDB", err) console.error(errorMsg, err)
throw err throw err
} }
} }
async create(query: { json: object }) {
return this.query("post", "Error writing to couchDB", query)
}
async read(query: { json: object }) { async read(query: { json: object }) {
try { const result = await this.query("allDocs", "Error querying couchDB", {
const result = await this.client.allDocs({ json: {
include_docs: true, include_docs: true,
...query.json, ...query.json,
},
}) })
return result.rows.map((row: { doc: object }) => row.doc) return result.rows.map((row: { doc: object }) => row.doc)
} catch (err) {
console.error("Error querying couchDB", err)
throw err
}
} }
async update(query: { json: object }) { async update(query: { json: object }) {
try { return this.query("put", "Error updating couchDB document", query)
return this.client.put(query.json)
} catch (err) {
console.error("Error updating couchDB document", err)
throw err
}
} }
async delete(query: { id: string }) { async delete(query: { id: string }) {
try { const doc = await this.query(
return await this.client.remove(query.id) "get",
} catch (err) { "Cannot find doc to be deleted",
console.error("Error deleting couchDB document", err) query
throw err )
} return this.query("remove", "Error deleting couchDB document", {
json: doc,
})
} }
} }

View file

@ -131,11 +131,12 @@ module DynamoModule {
constructor(config: DynamoDBConfig) { constructor(config: DynamoDBConfig) {
this.config = config this.config = config
if (!this.config.endpoint) { if (this.config.endpoint && !this.config.endpoint.includes("localhost")) {
this.connect() this.connect()
} }
let options = { let options = {
correctClockSkew: true, correctClockSkew: true,
region: this.config.region || AWS_REGION,
endpoint: config.endpoint ? config.endpoint : undefined, endpoint: config.endpoint ? config.endpoint : undefined,
} }
this.client = new AWS.DynamoDB.DocumentClient(options) this.client = new AWS.DynamoDB.DocumentClient(options)

View file

@ -16,6 +16,7 @@ module GoogleSheetsModule {
const { getGlobalDB } = require("@budibase/backend-core/tenancy") const { getGlobalDB } = require("@budibase/backend-core/tenancy")
const { getScopedConfig } = require("@budibase/backend-core/db") const { getScopedConfig } = require("@budibase/backend-core/db")
const { Configs } = require("@budibase/backend-core/constants") const { Configs } = require("@budibase/backend-core/constants")
const fetch = require("node-fetch")
interface GoogleSheetsConfig { interface GoogleSheetsConfig {
spreadsheetId: string spreadsheetId: string
@ -28,6 +29,16 @@ module GoogleSheetsModule {
refreshToken: string refreshToken: string
} }
interface AuthTokenRequest {
client_id: string
client_secret: string
refresh_token: string
}
interface AuthTokenResponse {
access_token: string
}
const SCHEMA: Integration = { const SCHEMA: Integration = {
plus: true, plus: true,
auth: { auth: {
@ -40,6 +51,7 @@ module GoogleSheetsModule {
friendlyName: "Google Sheets", friendlyName: "Google Sheets",
datasource: { datasource: {
spreadsheetId: { spreadsheetId: {
display: "Google Sheet URL",
type: DatasourceFieldTypes.STRING, type: DatasourceFieldTypes.STRING,
required: true, required: true,
}, },
@ -135,6 +147,30 @@ module GoogleSheetsModule {
return parts.length > 5 ? parts[5] : spreadsheetId return parts.length > 5 ? parts[5] : spreadsheetId
} }
async fetchAccessToken(
payload: AuthTokenRequest
): Promise<AuthTokenResponse> {
const response = await fetch(
"https://www.googleapis.com/oauth2/v4/token",
{
method: "POST",
body: JSON.stringify({
...payload,
grant_type: "refresh_token",
}),
headers: {
"Content-Type": "application/json",
},
}
)
if (response.status !== 200) {
throw new Error("Error authenticating with google sheets.")
}
return response.json()
}
async connect() { async connect() {
try { try {
// Initialise oAuth client // Initialise oAuth client
@ -154,14 +190,18 @@ module GoogleSheetsModule {
clientId: googleConfig.clientID, clientId: googleConfig.clientID,
clientSecret: googleConfig.clientSecret, clientSecret: googleConfig.clientSecret,
}) })
oauthClient.on("tokens", tokens => {
const tokenResponse = await this.fetchAccessToken({
client_id: googleConfig.clientID,
client_secret: googleConfig.clientSecret,
refresh_token: this.config.auth.refreshToken,
})
oauthClient.setCredentials({ oauthClient.setCredentials({
refresh_token: googleConfig.refreshToken, refresh_token: this.config.auth.refreshToken,
access_token: tokens.access_token, access_token: tokenResponse.access_token,
}) })
})
oauthClient.credentials.access_token = this.config.auth.accessToken
oauthClient.credentials.refresh_token = this.config.auth.refreshToken
this.client.useOAuth2Client(oauthClient) this.client.useOAuth2Client(oauthClient)
await this.client.loadInfo() await this.client.loadInfo()
} catch (err) { } catch (err) {

View file

@ -15,6 +15,7 @@ import {
} from "./utils" } from "./utils"
import { DatasourcePlus } from "./base/datasourcePlus" import { DatasourcePlus } from "./base/datasourcePlus"
import dayjs from "dayjs" import dayjs from "dayjs"
const { NUMBER_REGEX } = require("../utilities")
module MySQLModule { module MySQLModule {
const mysql = require("mysql2/promise") const mysql = require("mysql2/promise")
@ -26,7 +27,8 @@ module MySQLModule {
user: string user: string
password: string password: string
database: string database: string
ssl?: object ssl?: { [key: string]: any }
rejectUnauthorized: boolean
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
@ -64,6 +66,11 @@ module MySQLModule {
type: DatasourceFieldTypes.OBJECT, type: DatasourceFieldTypes.OBJECT,
required: false, required: false,
}, },
rejectUnauthorized: {
type: DatasourceFieldTypes.BOOLEAN,
default: true,
required: false,
},
}, },
query: { query: {
create: { create: {
@ -87,7 +94,7 @@ module MySQLModule {
if (typeof binding !== "string") { if (typeof binding !== "string") {
continue continue
} }
const matches = binding.match(/^\d*$/g) const matches = binding.match(NUMBER_REGEX)
// check if number first // check if number first
if (matches && matches[0] !== "" && !isNaN(Number(matches[0]))) { if (matches && matches[0] !== "" && !isNaN(Number(matches[0]))) {
bindings[i] = parseFloat(binding) bindings[i] = parseFloat(binding)
@ -113,6 +120,16 @@ module MySQLModule {
if (config.ssl && Object.keys(config.ssl).length === 0) { if (config.ssl && Object.keys(config.ssl).length === 0) {
delete config.ssl delete config.ssl
} }
// make sure this defaults to true
if (
config.rejectUnauthorized != null &&
!config.rejectUnauthorized &&
config.ssl
) {
config.ssl.rejectUnauthorized = config.rejectUnauthorized
}
// @ts-ignore
delete config.rejectUnauthorized
this.config = config this.config = config
} }

View file

@ -0,0 +1,125 @@
import { findHBSBlocks, processStringSync } from "@budibase/string-templates"
import { Integration } from "../../definitions/datasource"
import { DatasourcePlus } from "../base/datasourcePlus"
const CONST_CHAR_REGEX = new RegExp("'[^']*'", "g")
export function enrichQueryFields(
fields: { [key: string]: any },
parameters = {}
) {
const enrichedQuery: { [key: string]: any } = Array.isArray(fields) ? [] : {}
// enrich the fields with dynamic parameters
for (let key of Object.keys(fields)) {
if (fields[key] == null) {
continue
}
if (typeof fields[key] === "object") {
// enrich nested fields object
enrichedQuery[key] = enrichQueryFields(fields[key], parameters)
} else if (typeof fields[key] === "string") {
// enrich string value as normal
enrichedQuery[key] = processStringSync(fields[key], parameters, {
noEscaping: true,
noHelpers: true,
escapeNewlines: true,
})
} else {
enrichedQuery[key] = fields[key]
}
}
if (
enrichedQuery.json ||
enrichedQuery.customData ||
enrichedQuery.requestBody
) {
try {
enrichedQuery.json = JSON.parse(
enrichedQuery.json ||
enrichedQuery.customData ||
enrichedQuery.requestBody
)
} catch (err) {
// no json found, ignore
}
delete enrichedQuery.customData
}
return enrichedQuery
}
export function interpolateSQL(
fields: { [key: string]: any },
parameters: { [key: string]: any },
integration: DatasourcePlus
) {
let sql = fields.sql
if (!sql || typeof sql !== "string") {
return fields
}
const bindings = findHBSBlocks(sql)
let variables = [],
arrays = []
for (let binding of bindings) {
// look for array/list operations in the SQL statement, which will need handled later
const listRegexMatch = sql.match(
new RegExp(`(in|IN|In|iN)( )+[(]?${binding}[)]?`)
)
// check if the variable was used as part of a string concat e.g. 'Hello {{binding}}'
// start by finding all the instances of const character strings
const charConstMatch = sql.match(CONST_CHAR_REGEX) || []
// now look within them to see if a binding is used
const charConstBindingMatch = charConstMatch.find((string: any) =>
string.match(new RegExp(`'[^']*${binding}[^']*'`))
)
if (charConstBindingMatch) {
let [part1, part2] = charConstBindingMatch.split(binding)
part1 = `'${part1.substring(1)}'`
part2 = `'${part2.substring(0, part2.length - 1)}'`
sql = sql.replace(
charConstBindingMatch,
integration.getStringConcat([
part1,
integration.getBindingIdentifier(),
part2,
])
)
}
// generate SQL parameterised array
else if (listRegexMatch) {
arrays.push(binding)
// determine the length of the array
const value = enrichQueryFields([binding], parameters)[0]
.split(",")
.map((val: string) => val.trim())
// build a string like ($1, $2, $3)
let replacement = `${Array.apply(null, Array(value.length))
.map(() => integration.getBindingIdentifier())
.join(",")}`
// check if parentheses are needed
if (!listRegexMatch[0].includes(`(${binding})`)) {
replacement = `(${replacement})`
}
sql = sql.replace(binding, replacement)
} else {
sql = sql.replace(binding, integration.getBindingIdentifier())
}
variables.push(binding)
}
// replicate the knex structure
fields.sql = sql
fields.bindings = enrichQueryFields(variables, parameters)
// check for arrays in the data
let updated: string[] = []
for (let i = 0; i < variables.length; i++) {
if (arrays.includes(variables[i])) {
updated = updated.concat(
fields.bindings[i].split(",").map((val: string) => val.trim())
)
} else {
updated.push(fields.bindings[i])
}
}
fields.bindings = updated
return fields
}

View file

@ -7,7 +7,9 @@ class TestConfiguration {
this.integration = new AirtableIntegration.integration(config) this.integration = new AirtableIntegration.integration(config)
this.client = { this.client = {
create: jest.fn(), create: jest.fn(),
select: jest.fn(), select: jest.fn(() => ({
firstPage: jest.fn(() => []),
})),
update: jest.fn(), update: jest.fn(),
destroy: jest.fn(), destroy: jest.fn(),
} }

View file

@ -2,8 +2,10 @@ jest.mock("pouchdb", () => function CouchDBMock() {
this.post = jest.fn() this.post = jest.fn()
this.allDocs = jest.fn(() => ({ rows: [] })) this.allDocs = jest.fn(() => ({ rows: [] }))
this.put = jest.fn() this.put = jest.fn()
this.get = jest.fn()
this.remove = jest.fn() this.remove = jest.fn()
this.plugin = jest.fn() this.plugin = jest.fn()
this.close = jest.fn()
}) })
const CouchDBIntegration = require("../couchdb") const CouchDBIntegration = require("../couchdb")
@ -62,6 +64,7 @@ describe("CouchDB Integration", () => {
it("calls the delete method with the correct params", async () => { it("calls the delete method with the correct params", async () => {
const id = "1234" const id = "1234"
const response = await config.integration.delete({ id }) const response = await config.integration.delete({ id })
expect(config.integration.client.remove).toHaveBeenCalledWith(id) expect(config.integration.client.get).toHaveBeenCalledWith(id)
expect(config.integration.client.remove).toHaveBeenCalled()
}) })
}) })

View file

@ -5,7 +5,7 @@ const {
checkDebounce, checkDebounce,
setDebounce, setDebounce,
} = require("../utilities/redis") } = require("../utilities/redis")
const { getDB } = require("@budibase/backend-core/db") const { doWithDB } = require("@budibase/backend-core/db")
const { DocumentTypes } = require("../db/utils") const { DocumentTypes } = require("../db/utils")
const { PermissionTypes } = require("@budibase/backend-core/permissions") const { PermissionTypes } = require("@budibase/backend-core/permissions")
const { app: appCache } = require("@budibase/backend-core/cache") const { app: appCache } = require("@budibase/backend-core/cache")
@ -48,7 +48,7 @@ async function updateAppUpdatedAt(ctx) {
if (ctx.method === "GET" || (await checkDebounce(appId))) { if (ctx.method === "GET" || (await checkDebounce(appId))) {
return return
} }
const db = getDB(appId) await doWithDB(appId, async db => {
const metadata = await db.get(DocumentTypes.APP_METADATA) const metadata = await db.get(DocumentTypes.APP_METADATA)
metadata.updatedAt = new Date().toISOString() metadata.updatedAt = new Date().toISOString()
const response = await db.put(metadata) const response = await db.put(metadata)
@ -56,6 +56,7 @@ async function updateAppUpdatedAt(ctx) {
await appCache.invalidateAppMetadata(appId, metadata) await appCache.invalidateAppMetadata(appId, metadata)
// set a new debounce record with a short TTL // set a new debounce record with a short TTL
await setDebounce(appId, DEBOUNCE_TIME_SEC) await setDebounce(appId, DEBOUNCE_TIME_SEC)
})
} }
module.exports = async (ctx, permType) => { module.exports = async (ctx, permType) => {

View file

@ -10,7 +10,6 @@ jest.mock("../../environment", () => ({
const authorizedMiddleware = require("../authorized") const authorizedMiddleware = require("../authorized")
const env = require("../../environment") const env = require("../../environment")
const { PermissionTypes, PermissionLevels } = require("@budibase/backend-core/permissions") const { PermissionTypes, PermissionLevels } = require("@budibase/backend-core/permissions")
require("@budibase/backend-core").init(require("../../db"))
const { doInAppContext } = require("@budibase/backend-core/context") const { doInAppContext } = require("@budibase/backend-core/context")
const APP_ID = "" const APP_ID = ""

View file

@ -0,0 +1,120 @@
jest.mock("../../db")
jest.mock("../../utilities/usageQuota")
jest.mock("@budibase/backend-core/tenancy", () => ({
getTenantId: () => "testing123"
}))
const usageQuotaMiddleware = require("../usageQuota")
const usageQuota = require("../../utilities/usageQuota")
jest.mock("@budibase/backend-core/db")
const { dangerousGetDB } = require("@budibase/backend-core/db")
const env = require("../../environment")
class TestConfiguration {
constructor() {
this.throw = jest.fn()
this.next = jest.fn()
this.middleware = usageQuotaMiddleware
this.ctx = {
throw: this.throw,
next: this.next,
appId: "test",
request: {
body: {}
},
req: {
method: "POST",
url: "/applications"
}
}
usageQuota.useQuotas = () => true
}
executeMiddleware() {
return this.middleware(this.ctx, this.next)
}
setProd(bool) {
if (bool) {
env.isDev = () => false
env.isProd = () => true
this.ctx.user = { tenantId: "test" }
} else {
env.isDev = () => true
env.isProd = () => false
}
}
setMethod(method) {
this.ctx.req.method = method
}
setUrl(url) {
this.ctx.req.url = url
}
setBody(body) {
this.ctx.request.body = body
}
setFiles(files) {
this.ctx.request.files = { file: files }
}
}
describe("usageQuota middleware", () => {
let config
beforeEach(() => {
config = new TestConfiguration()
})
it("skips the middleware if there is no usage property or method", async () => {
await config.executeMiddleware()
expect(config.next).toHaveBeenCalled()
})
it("passes through to next middleware if document already exists", async () => {
config.setProd(true)
config.setBody({
_id: "test",
_rev: "test",
})
dangerousGetDB.mockImplementationOnce(() => ({
get: async () => true
}))
await config.executeMiddleware()
expect(config.next).toHaveBeenCalled()
})
it("throws if request has _id, but the document no longer exists", async () => {
config.setBody({
_id: "123",
_rev: "test",
})
config.setProd(true)
dangerousGetDB.mockImplementationOnce(() => ({
get: async () => {
throw new Error()
}
}))
await config.executeMiddleware()
expect(config.throw).toHaveBeenCalledWith(404, `${config.ctx.request.body._id} does not exist`)
})
it("calculates and persists the correct usage quota for the relevant action", async () => {
config.setUrl("/rows")
await config.executeMiddleware()
expect(usageQuota.update).toHaveBeenCalledWith("rows", 1)
expect(config.next).toHaveBeenCalled()
})
})

View file

@ -1,12 +1,10 @@
const { DocumentTypes } = require("@budibase/backend-core/db") const { DocumentTypes, doWithDB } = require("@budibase/backend-core/db")
const env = require("../../../environment")
const TestConfig = require("../../../tests/utilities/TestConfiguration") const TestConfig = require("../../../tests/utilities/TestConfiguration")
const migration = require("../appUrls") const migration = require("../appUrls")
describe("run", () => { describe("run", () => {
let config = new TestConfig(false) let config = new TestConfig(false)
const CouchDB = config.getCouch()
beforeEach(async () => { beforeEach(async () => {
await config.init() await config.init()
@ -16,14 +14,13 @@ describe("run", () => {
it("runs successfully", async () => { it("runs successfully", async () => {
const app = await config.createApp("testApp") const app = await config.createApp("testApp")
const appDb = new CouchDB(app.appId) const metadata = await doWithDB(app.appId, async db => {
let metadata = await appDb.get(DocumentTypes.APP_METADATA) const metadataDoc = await db.get(DocumentTypes.APP_METADATA)
delete metadata.url delete metadataDoc.url
await appDb.put(metadata) await db.put(metadataDoc)
await migration.run(db)
await migration.run(appDb) return await db.get(DocumentTypes.APP_METADATA)
})
metadata = await appDb.get(DocumentTypes.APP_METADATA)
expect(metadata.url).toEqual("/testapp") expect(metadata.url).toEqual("/testapp")
}) })
}) })

View file

@ -1,5 +1,6 @@
const TestConfig = require("../../../tests/utilities/TestConfiguration") const TestConfig = require("../../../tests/utilities/TestConfiguration")
const { getGlobalDB } = require("@budibase/backend-core/tenancy") const { TENANT_ID } = require("../../../tests/utilities/structures")
const { getGlobalDB, doInTenant } = require("@budibase/backend-core/tenancy")
// mock email view creation // mock email view creation
const coreDb = require("@budibase/backend-core/db") const coreDb = require("@budibase/backend-core/db")
@ -9,6 +10,7 @@ coreDb.createUserEmailView = createUserEmailView
const migration = require("../userEmailViewCasing") const migration = require("../userEmailViewCasing")
describe("run", () => { describe("run", () => {
doInTenant(TENANT_ID, () => {
let config = new TestConfig(false) let config = new TestConfig(false)
const globalDb = getGlobalDB() const globalDb = getGlobalDB()
@ -23,3 +25,4 @@ describe("run", () => {
expect(createUserEmailView).toHaveBeenCalledTimes(1) expect(createUserEmailView).toHaveBeenCalledTimes(1)
}) })
}) })
})

View file

@ -0,0 +1,41 @@
const { doInTenant, getGlobalDB } = require("@budibase/backend-core/tenancy")
const TestConfig = require("../../../../tests/utilities/TestConfiguration")
const { TENANT_ID } = require("../../../../tests/utilities/structures")
const { getUsageQuotaDoc, update, Properties } = require("../../../../utilities/usageQuota")
const syncApps = require("../syncApps")
const env = require("../../../../environment")
describe("syncApps", () => {
let config = new TestConfig(false)
beforeEach(async () => {
await config.init()
env._set("USE_QUOTAS", 1)
})
afterAll(config.end)
it("runs successfully", async () => {
await doInTenant(TENANT_ID, async () => {
const db = getGlobalDB()
// create the usage quota doc and mock usages
await getUsageQuotaDoc(db)
await update(Properties.APPS, 3)
let usageDoc = await getUsageQuotaDoc(db)
expect(usageDoc.usageQuota.apps).toEqual(3)
// create an extra app to test the migration
await config.createApp("quota-test")
// migrate
await syncApps.run()
// assert the migration worked
usageDoc = await getUsageQuotaDoc(db)
expect(usageDoc.usageQuota.apps).toEqual(2)
})
})
})

View file

@ -0,0 +1,45 @@
const { doInTenant, getGlobalDB } = require("@budibase/backend-core/tenancy")
const TestConfig = require("../../../../tests/utilities/TestConfiguration")
const { TENANT_ID } = require("../../../../tests/utilities/structures")
const { getUsageQuotaDoc, update, Properties } = require("../../../../utilities/usageQuota")
const syncRows = require("../syncRows")
const env = require("../../../../environment")
describe("syncRows", () => {
let config = new TestConfig(false)
beforeEach(async () => {
await config.init()
env._set("USE_QUOTAS", 1)
})
afterAll(config.end)
it("runs successfully", async () => {
await doInTenant(TENANT_ID, async () => {
const db = getGlobalDB()
await getUsageQuotaDoc(db)
await update(Properties.ROW, 300)
let usageDoc = await getUsageQuotaDoc(db)
expect(usageDoc.usageQuota.rows).toEqual(300)
// app 1
await config.createTable()
await config.createRow()
// app 2
await config.createApp("second-app")
await config.createTable()
await config.createRow()
await config.createRow()
// migrate
await syncRows.run()
// assert the migration worked
usageDoc = await getUsageQuotaDoc(db)
expect(usageDoc.usageQuota.rows).toEqual(3)
})
})
})

View file

@ -1,4 +1,3 @@
import CouchDB from "../db"
const { const {
MIGRATION_TYPES, MIGRATION_TYPES,
runMigrations, runMigrations,
@ -64,5 +63,5 @@ export const MIGRATIONS: Migration[] = [
] ]
export const migrate = async (options?: MigrationOptions) => { export const migrate = async (options?: MigrationOptions) => {
await runMigrations(CouchDB, MIGRATIONS, options) await runMigrations(MIGRATIONS, options)
} }

View file

@ -5,3 +5,7 @@ declare module "@budibase/backend-core/context"
declare module "@budibase/backend-core/cache" declare module "@budibase/backend-core/cache"
declare module "@budibase/backend-core/permissions" declare module "@budibase/backend-core/permissions"
declare module "@budibase/backend-core/roles" declare module "@budibase/backend-core/roles"
declare module "@budibase/backend-core/constants"
declare module "@budibase/backend-core/auth"
declare module "@budibase/backend-core/sessions"
declare module "@budibase/backend-core/encryption"

View file

@ -1,6 +1,4 @@
const core = require("@budibase/backend-core") require("../../db").init()
const CouchDB = require("../../db")
core.init(CouchDB)
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
const env = require("../../environment") const env = require("../../environment")
const { const {
@ -20,7 +18,7 @@ const supertest = require("supertest")
const { cleanup } = require("../../utilities/fileSystem") const { cleanup } = require("../../utilities/fileSystem")
const { Cookies, Headers } = require("@budibase/backend-core/constants") const { Cookies, Headers } = require("@budibase/backend-core/constants")
const { jwt } = require("@budibase/backend-core/auth") const { jwt } = require("@budibase/backend-core/auth")
const { getGlobalDB } = require("@budibase/backend-core/tenancy") const { doInTenant, doWithGlobalDB } = require("@budibase/backend-core/tenancy")
const { createASession } = require("@budibase/backend-core/sessions") const { createASession } = require("@budibase/backend-core/sessions")
const { user: userCache } = require("@budibase/backend-core/cache") const { user: userCache } = require("@budibase/backend-core/cache")
const newid = require("../../db/newid") const newid = require("../../db/newid")
@ -57,8 +55,20 @@ class TestConfiguration {
return this.appId return this.appId
} }
getCouch() { async doInContext(appId, task) {
return CouchDB if (!appId) {
appId = this.appId
}
return doInTenant(TENANT_ID, () => {
// check if already in a context
if (context.getAppId() == null && appId !== null) {
return context.doInAppContext(appId, async () => {
return task()
})
} else {
return task()
}
})
} }
async _req(config, params, controlFunc) { async _req(config, params, controlFunc) {
@ -72,25 +82,17 @@ class TestConfiguration {
request.request = { request.request = {
body: config, body: config,
} }
async function run() { return this.doInContext(this.appId, async () => {
if (params) { if (params) {
request.params = params request.params = params
} }
await controlFunc(request) await controlFunc(request)
return request.body return request.body
}
// check if already in a context
if (context.getAppId() == null && this.appId !== null) {
return context.doInAppContext(this.appId, async () => {
return run()
}) })
} else {
return run()
}
} }
async generateApiKey(userId = GLOBAL_USER_ID) { async generateApiKey(userId = GLOBAL_USER_ID) {
const db = getGlobalDB(TENANT_ID) return doWithGlobalDB(TENANT_ID, async db => {
const id = generateDevInfoID(userId) const id = generateDevInfoID(userId)
let devInfo let devInfo
try { try {
@ -101,6 +103,7 @@ class TestConfiguration {
devInfo.apiKey = encrypt(`${TENANT_ID}${SEPARATOR}${newid()}`) devInfo.apiKey = encrypt(`${TENANT_ID}${SEPARATOR}${newid()}`)
await db.put(devInfo) await db.put(devInfo)
return devInfo.apiKey return devInfo.apiKey
})
} }
async globalUser({ async globalUser({
@ -109,7 +112,7 @@ class TestConfiguration {
email = EMAIL, email = EMAIL,
roles, roles,
} = {}) { } = {}) {
const db = getGlobalDB(TENANT_ID) return doWithGlobalDB(TENANT_ID, async db => {
let existing let existing
try { try {
existing = await db.get(id) existing = await db.get(id)
@ -137,6 +140,7 @@ class TestConfiguration {
_rev: resp._rev, _rev: resp._rev,
...user, ...user,
} }
})
} }
// use a new id as the name to avoid name collisions // use a new id as the name to avoid name collisions
@ -205,9 +209,12 @@ class TestConfiguration {
async createApp(appName) { async createApp(appName) {
// create dev app // create dev app
// clear any old app
this.appId = null
await context.updateAppId(null)
this.app = await this._req({ name: appName }, null, controllers.app.create) this.app = await this._req({ name: appName }, null, controllers.app.create)
this.appId = this.app.appId this.appId = this.app.appId
context.updateAppId(this.appId) await context.updateAppId(this.appId)
// create production app // create production app
this.prodApp = await this.deploy() this.prodApp = await this.deploy()

View file

@ -26,6 +26,7 @@ export class Thread {
count: any count: any
disableThreading: any disableThreading: any
workers: any workers: any
timeoutMs: any
constructor(type: any, opts: any = { timeoutMs: null, count: 1 }) { constructor(type: any, opts: any = { timeoutMs: null, count: 1 }) {
this.type = type this.type = type
@ -41,6 +42,7 @@ export class Thread {
maxConcurrentWorkers: this.count, maxConcurrentWorkers: this.count,
} }
if (opts.timeoutMs) { if (opts.timeoutMs) {
this.timeoutMs = opts.timeoutMs
workerOpts.maxCallTime = opts.timeoutMs workerOpts.maxCallTime = opts.timeoutMs
} }
this.workers = workerFarm(workerOpts, typeToFile(type)) this.workers = workerFarm(workerOpts, typeToFile(type))
@ -57,7 +59,13 @@ export class Thread {
fncToCall = this.workers fncToCall = this.workers
} }
fncToCall(data, (err: any, response: any) => { fncToCall(data, (err: any, response: any) => {
if (err) { if (err && err.type === "TimeoutError") {
reject(
new Error(
`Query response time exceeded ${this.timeoutMs}ms timeout.`
)
)
} else if (err) {
reject(err) reject(err)
} else { } else {
resolve(response) resolve(response)

View file

@ -2,12 +2,13 @@ const threadUtils = require("./utils")
threadUtils.threadSetup() threadUtils.threadSetup()
const ScriptRunner = require("../utilities/scriptRunner") const ScriptRunner = require("../utilities/scriptRunner")
const { integrations } = require("../integrations") const { integrations } = require("../integrations")
const { const { processStringSync } = require("@budibase/string-templates")
processStringSync,
findHBSBlocks,
} = require("@budibase/string-templates")
const { doInAppContext, getAppDB } = require("@budibase/backend-core/context") const { doInAppContext, getAppDB } = require("@budibase/backend-core/context")
const { isSQL } = require("../integrations/utils") const { isSQL } = require("../integrations/utils")
const {
enrichQueryFields,
interpolateSQL,
} = require("../integrations/queries/sql")
class QueryRunner { class QueryRunner {
constructor(input, flags = { noRecursiveQuery: false }) { constructor(input, flags = { noRecursiveQuery: false }) {
@ -27,69 +28,6 @@ class QueryRunner {
this.hasRerun = false this.hasRerun = false
} }
interpolateSQL(fields, parameters, integration) {
let sql = fields.sql
if (!sql) {
return fields
}
const bindings = findHBSBlocks(sql)
let variables = [],
arrays = []
for (let binding of bindings) {
// look for array/list operations in the SQL statement, which will need handled later
const listRegex = new RegExp(`(in|IN|In|iN)( )+${binding}`)
const listRegexMatch = sql.match(listRegex)
// check if the variable was used as part of a string concat e.g. 'Hello {{binding}}'
const charConstRegex = new RegExp(`'[^']*${binding}[^']*'`)
const charConstMatch = sql.match(charConstRegex)
if (charConstMatch) {
let [part1, part2] = charConstMatch[0].split(binding)
part1 = `'${part1.substring(1)}'`
part2 = `'${part2.substring(0, part2.length - 1)}'`
sql = sql.replace(
charConstMatch[0],
integration.getStringConcat([
part1,
integration.getBindingIdentifier(),
part2,
])
)
}
// generate SQL parameterised array
else if (listRegexMatch) {
arrays.push(binding)
// determine the length of the array
const value = this.enrichQueryFields([binding], parameters)[0].split(
","
)
// build a string like ($1, $2, $3)
sql = sql.replace(
binding,
`(${Array.apply(null, Array(value.length))
.map(() => integration.getBindingIdentifier())
.join(",")})`
)
} else {
sql = sql.replace(binding, integration.getBindingIdentifier())
}
variables.push(binding)
}
// replicate the knex structure
fields.sql = sql
fields.bindings = this.enrichQueryFields(variables, parameters)
// check for arrays in the data
let updated = []
for (let i = 0; i < variables.length; i++) {
if (arrays.includes(variables[i])) {
updated = updated.concat(fields.bindings[i].split(","))
} else {
updated.push(fields.bindings[i])
}
}
fields.bindings = updated
return fields
}
async execute() { async execute() {
let { datasource, fields, queryVerb, transformer } = this let { datasource, fields, queryVerb, transformer } = this
const Integration = integrations[datasource.source] const Integration = integrations[datasource.source]
@ -103,9 +41,9 @@ class QueryRunner {
let query let query
// handle SQL injections by interpolating the variables // handle SQL injections by interpolating the variables
if (isSQL(datasource)) { if (isSQL(datasource)) {
query = this.interpolateSQL(fields, parameters, integration) query = interpolateSQL(fields, parameters, integration)
} else { } else {
query = this.enrichQueryFields(fields, parameters) query = enrichQueryFields(fields, parameters)
} }
// Add pagination values for REST queries // Add pagination values for REST queries
@ -250,47 +188,6 @@ class QueryRunner {
} }
return parameters return parameters
} }
enrichQueryFields(fields, parameters = {}) {
const enrichedQuery = Array.isArray(fields) ? [] : {}
// enrich the fields with dynamic parameters
for (let key of Object.keys(fields)) {
if (fields[key] == null) {
continue
}
if (typeof fields[key] === "object") {
// enrich nested fields object
enrichedQuery[key] = this.enrichQueryFields(fields[key], parameters)
} else if (typeof fields[key] === "string") {
// enrich string value as normal
enrichedQuery[key] = processStringSync(fields[key], parameters, {
noEscaping: true,
noHelpers: true,
escapeNewlines: true,
})
} else {
enrichedQuery[key] = fields[key]
}
}
if (
enrichedQuery.json ||
enrichedQuery.customData ||
enrichedQuery.requestBody
) {
try {
enrichedQuery.json = JSON.parse(
enrichedQuery.json ||
enrichedQuery.customData ||
enrichedQuery.requestBody
)
} catch (err) {
// no json found, ignore
}
delete enrichedQuery.customData
}
return enrichedQuery
}
} }
module.exports = (input, callback) => { module.exports = (input, callback) => {

View file

@ -1,6 +1,5 @@
const env = require("../environment") const env = require("../environment")
const CouchDB = require("../db") const db = require("../db")
const { init } = require("@budibase/backend-core")
const redis = require("@budibase/backend-core/redis") const redis = require("@budibase/backend-core/redis")
const { SEPARATOR } = require("@budibase/backend-core/db") const { SEPARATOR } = require("@budibase/backend-core/db")
@ -25,7 +24,7 @@ exports.threadSetup = () => {
} }
// when thread starts, make sure it is recorded // when thread starts, make sure it is recorded
env.setInThread() env.setInThread()
init(CouchDB) db.init()
} }
function makeVariableKey(queryId, variable) { function makeVariableKey(queryId, variable) {

View file

@ -2,7 +2,7 @@ const { budibaseTempDir } = require("../budibaseDir")
const fs = require("fs") const fs = require("fs")
const { join } = require("path") const { join } = require("path")
const uuid = require("uuid/v4") const uuid = require("uuid/v4")
const CouchDB = require("../../db") const { doWithDB } = require("@budibase/backend-core/db")
const { ObjectStoreBuckets } = require("../../constants") const { ObjectStoreBuckets } = require("../../constants")
const { const {
upload, upload,
@ -151,12 +151,11 @@ exports.streamBackup = async appId => {
* @return {*} either a readable stream or a string * @return {*} either a readable stream or a string
*/ */
exports.exportDB = async (dbName, { stream, filter, exportName } = {}) => { exports.exportDB = async (dbName, { stream, filter, exportName } = {}) => {
const instanceDb = new CouchDB(dbName) return doWithDB(dbName, async db => {
// Stream the dump if required // Stream the dump if required
if (stream) { if (stream) {
const memStream = new MemoryStream() const memStream = new MemoryStream()
instanceDb.dump(memStream, { filter }) db.dump(memStream, { filter })
return memStream return memStream
} }
@ -164,7 +163,7 @@ exports.exportDB = async (dbName, { stream, filter, exportName } = {}) => {
if (exportName) { if (exportName) {
const path = join(budibaseTempDir(), exportName) const path = join(budibaseTempDir(), exportName)
const writeStream = fs.createWriteStream(path) const writeStream = fs.createWriteStream(path)
await instanceDb.dump(writeStream, { filter }) await db.dump(writeStream, { filter })
// Upload the dump to the object store if self hosted // Upload the dump to the object store if self hosted
if (env.SELF_HOSTED) { if (env.SELF_HOSTED) {
@ -184,8 +183,9 @@ exports.exportDB = async (dbName, { stream, filter, exportName } = {}) => {
memStream.on("data", chunk => { memStream.on("data", chunk => {
appString += chunk.toString() appString += chunk.toString()
}) })
await instanceDb.dump(memStream, { filter }) await db.dump(memStream, { filter })
return appString return appString
})
} }
/** /**

View file

@ -11,6 +11,8 @@ exports.wait = ms => new Promise(resolve => setTimeout(resolve, ms))
exports.isDev = env.isDev exports.isDev = env.isDev
exports.NUMBER_REGEX = /^[+-]?([0-9]*[.])?[0-9]+$/g
exports.removeFromArray = (array, element) => { exports.removeFromArray = (array, element) => {
const index = array.indexOf(element) const index = array.indexOf(element)
if (index !== -1) { if (index !== -1) {

View file

@ -1,6 +1,9 @@
const { getRowParams, USER_METDATA_PREFIX } = require("../../db/utils") const { getRowParams, USER_METDATA_PREFIX } = require("../../db/utils")
const CouchDB = require("../../db") const {
const { isDevAppID, getDevelopmentAppID } = require("@budibase/backend-core/db") isDevAppID,
getDevelopmentAppID,
doWithDB,
} = require("@budibase/backend-core/db")
const ROW_EXCLUSIONS = [USER_METDATA_PREFIX] const ROW_EXCLUSIONS = [USER_METDATA_PREFIX]
@ -24,8 +27,8 @@ const getAppPairs = appIds => {
const getAppRows = async appId => { const getAppRows = async appId => {
// need to specify the app ID, as this is used for different apps in one call // need to specify the app ID, as this is used for different apps in one call
const appDb = new CouchDB(appId) return doWithDB(appId, async db => {
const response = await appDb.allDocs( const response = await db.allDocs(
getRowParams(null, null, { getRowParams(null, null, {
include_docs: false, include_docs: false,
}) })
@ -40,6 +43,7 @@ const getAppRows = async appId => {
} }
return true return true
}) })
})
} }
/** /**
@ -58,7 +62,7 @@ exports.getUniqueRows = async appIds => {
continue continue
} }
try { try {
appRows.push(await getAppRows(appId)) appRows = appRows.concat(await getAppRows(appId))
} catch (e) { } catch (e) {
console.error(e) console.error(e)
// don't error out if we can't count the app rows, just continue // don't error out if we can't count the app rows, just continue

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
{ {
"name": "@budibase/string-templates", "name": "@budibase/string-templates",
"version": "1.0.105-alpha.41", "version": "1.0.122",
"description": "Handlebars wrapper for Budibase templating.", "description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs", "main": "src/index.cjs",
"module": "dist/bundle.mjs", "module": "dist/bundle.mjs",

View file

@ -1,7 +1,7 @@
{ {
"name": "@budibase/worker", "name": "@budibase/worker",
"email": "hi@budibase.com", "email": "hi@budibase.com",
"version": "1.0.105-alpha.41", "version": "1.0.122",
"description": "Budibase background service", "description": "Budibase background service",
"main": "src/index.ts", "main": "src/index.ts",
"repository": { "repository": {
@ -31,9 +31,9 @@
"author": "Budibase", "author": "Budibase",
"license": "GPL-3.0", "license": "GPL-3.0",
"dependencies": { "dependencies": {
"@budibase/backend-core": "^1.0.105-alpha.41", "@budibase/backend-core": "^1.0.122",
"@budibase/pro": "1.0.105-alpha.41", "@budibase/string-templates": "^1.0.122",
"@budibase/string-templates": "^1.0.105-alpha.41", "@budibase/pro": "^1.0.122",
"@koa/router": "^8.0.0", "@koa/router": "^8.0.0",
"@sentry/node": "6.17.7", "@sentry/node": "6.17.7",
"@techpass/passport-openidconnect": "^0.3.0", "@techpass/passport-openidconnect": "^0.3.0",
@ -58,7 +58,7 @@
"passport-jwt": "^4.0.0", "passport-jwt": "^4.0.0",
"passport-local": "^1.0.0", "passport-local": "^1.0.0",
"pino-pretty": "^4.0.0", "pino-pretty": "^4.0.0",
"pouchdb": "^7.2.1", "pouchdb": "7.3.0",
"pouchdb-all-dbs": "^1.0.2", "pouchdb-all-dbs": "^1.0.2",
"server-destroy": "^1.0.1" "server-destroy": "^1.0.1"
}, },

View file

@ -11,6 +11,7 @@ const { invalidateSessions } = require("@budibase/backend-core/sessions")
const accounts = require("@budibase/backend-core/accounts") const accounts = require("@budibase/backend-core/accounts")
const { const {
getGlobalDB, getGlobalDB,
doWithGlobalDB,
getTenantId, getTenantId,
getTenantUser, getTenantUser,
doesTenantExist, doesTenantExist,
@ -49,13 +50,12 @@ export const adminUser = async (ctx: any) => {
ctx.throw(403, "Organisation already exists.") ctx.throw(403, "Organisation already exists.")
} }
const db = getGlobalDB(tenantId) const response = await doWithGlobalDB(tenantId, async db => {
const response = await db.allDocs( const response = await db.allDocs(
getGlobalUserParams(null, { getGlobalUserParams(null, {
include_docs: true, include_docs: true,
}) })
) )
// write usage quotas for cloud // write usage quotas for cloud
if (!env.SELF_HOSTED) { if (!env.SELF_HOSTED) {
// could be a scenario where it exists, make sure its clean // could be a scenario where it exists, make sure its clean
@ -67,8 +67,10 @@ export const adminUser = async (ctx: any) => {
} catch (err) { } catch (err) {
// don't worry about errors // don't worry about errors
} }
await db.put(quotas.generateNewQuotaUsage()) await db.put(quotas.generateNewUsageQuotaDoc())
} }
return response
})
if (response.rows.some((row: any) => row.doc.admin)) { if (response.rows.some((row: any) => row.doc.admin)) {
ctx.throw( ctx.throw(

View file

@ -1,37 +1,42 @@
const CouchDB = require("../../../db") const { StaticDatabases, doWithDB } = require("@budibase/backend-core/db")
const { StaticDatabases } = require("@budibase/backend-core/db")
const { getTenantId } = require("@budibase/backend-core/tenancy") const { getTenantId } = require("@budibase/backend-core/tenancy")
const { deleteTenant } = require("@budibase/backend-core/deprovision") const { deleteTenant } = require("@budibase/backend-core/deprovision")
exports.exists = async ctx => { exports.exists = async ctx => {
const tenantId = ctx.request.params const tenantId = ctx.request.params
const db = new CouchDB(StaticDatabases.PLATFORM_INFO.name) ctx.body = {
exists: await doWithDB(StaticDatabases.PLATFORM_INFO.name, async db => {
let exists = false let exists = false
try { try {
const tenantsDoc = await db.get(StaticDatabases.PLATFORM_INFO.docs.tenants) const tenantsDoc = await db.get(
StaticDatabases.PLATFORM_INFO.docs.tenants
)
if (tenantsDoc) { if (tenantsDoc) {
exists = tenantsDoc.tenantIds.indexOf(tenantId) !== -1 exists = tenantsDoc.tenantIds.indexOf(tenantId) !== -1
} }
} catch (err) { } catch (err) {
// if error it doesn't exist // if error it doesn't exist
} }
ctx.body = { return exists
exists, }),
} }
} }
exports.fetch = async ctx => { exports.fetch = async ctx => {
const db = new CouchDB(StaticDatabases.PLATFORM_INFO.name) ctx.body = await doWithDB(StaticDatabases.PLATFORM_INFO.name, async db => {
let tenants = [] let tenants = []
try { try {
const tenantsDoc = await db.get(StaticDatabases.PLATFORM_INFO.docs.tenants) const tenantsDoc = await db.get(
StaticDatabases.PLATFORM_INFO.docs.tenants
)
if (tenantsDoc) { if (tenantsDoc) {
tenants = tenantsDoc.tenantIds tenants = tenantsDoc.tenantIds
} }
} catch (err) { } catch (err) {
// if error it doesn't exist // if error it doesn't exist
} }
ctx.body = tenants return tenants
})
} }
exports.delete = async ctx => { exports.delete = async ctx => {

View file

@ -1,3 +1,4 @@
require("../../../../db").init()
const env = require("../../../../environment") const env = require("../../../../environment")
const controllers = require("./controllers") const controllers = require("./controllers")
const supertest = require("supertest") const supertest = require("supertest")
@ -8,15 +9,12 @@ const { getGlobalUserByEmail } = require("@budibase/backend-core/utils")
const { createASession } = require("@budibase/backend-core/sessions") const { createASession } = require("@budibase/backend-core/sessions")
const { newid } = require("@budibase/backend-core/src/hashing") const { newid } = require("@budibase/backend-core/src/hashing")
const { TENANT_ID, CSRF_TOKEN } = require("./structures") const { TENANT_ID, CSRF_TOKEN } = require("./structures")
const core = require("@budibase/backend-core")
const CouchDB = require("../../../../db")
const { doInTenant } = require("@budibase/backend-core/tenancy") const { doInTenant } = require("@budibase/backend-core/tenancy")
core.init(CouchDB)
class TestConfiguration { class TestConfiguration {
constructor(openServer = true) { constructor(openServer = true) {
if (openServer) { if (openServer) {
env.PORT = 4003 env.PORT = 4012
this.server = require("../../../../index") this.server = require("../../../../index")
// we need the request for logging in, involves cookies, hard to fake // we need the request for logging in, involves cookies, hard to fake
this.request = supertest(this.server) this.request = supertest(this.server)

View file

@ -1,26 +1,11 @@
const PouchDB = require("pouchdb") const core = require("@budibase/backend-core")
const allDbs = require("pouchdb-all-dbs")
const env = require("../environment") const env = require("../environment")
const { getCouchUrl } = require("@budibase/backend-core/db")
// level option is purely for testing (development)
const COUCH_DB_URL = getCouchUrl() || "http://localhost:4005"
let POUCH_DB_DEFAULTS = {
prefix: COUCH_DB_URL,
}
exports.init = () => {
const dbConfig = {}
if (env.isTest()) { if (env.isTest()) {
PouchDB.plugin(require("pouchdb-adapter-memory")) dbConfig.inMemory = true
POUCH_DB_DEFAULTS = { dbConfig.allDbs = true
prefix: undefined,
adapter: "memory",
} }
core.init({ db: dbConfig })
} }
const Pouch = PouchDB.defaults(POUCH_DB_DEFAULTS)
// have to still have pouch alldbs for testing
allDbs(Pouch)
module.exports = Pouch

View file

@ -5,8 +5,8 @@ import Application from "koa"
import { bootstrap } from "global-agent" import { bootstrap } from "global-agent"
const env = require("./environment") const env = require("./environment")
const CouchDB = require("./db") import db from "./db"
require("@budibase/backend-core").init(CouchDB) db.init()
const Koa = require("koa") const Koa = require("koa")
const destroyable = require("server-destroy") const destroyable = require("server-destroy")
const koaBody = require("koa-body") const koaBody = require("koa-body")

File diff suppressed because it is too large Load diff