1
0
Fork 0
mirror of synced 2024-08-13 17:11:49 +12:00

Merge pull request #8829 from Budibase/refactor/backend-core-worker

Complete backend Typescript conversion
This commit is contained in:
Michael Drury 2022-11-29 11:08:46 +00:00 committed by GitHub
commit 1dcb6a6814
277 changed files with 4599 additions and 4328 deletions

View file

@ -1 +0,0 @@
module.exports = require("./src/cloud/accounts")

View file

@ -1 +0,0 @@
module.exports = require("./src/auth")

View file

@ -1,9 +0,0 @@
const generic = require("./src/cache/generic")
module.exports = {
user: require("./src/cache/user"),
app: require("./src/cache/appMetadata"),
writethrough: require("./src/cache/writethrough"),
...generic,
cache: generic,
}

View file

@ -1 +0,0 @@
module.exports = require("./src/constants")

View file

@ -1,24 +0,0 @@
const {
getAppDB,
getDevAppDB,
getProdAppDB,
getAppId,
updateAppId,
doInAppContext,
doInTenant,
doInContext,
} = require("./src/context")
const identity = require("./src/context/identity")
module.exports = {
getAppDB,
getDevAppDB,
getProdAppDB,
getAppId,
updateAppId,
doInAppContext,
doInTenant,
identity,
doInContext,
}

View file

@ -1 +0,0 @@
module.exports = require("./src/db")

View file

@ -1 +0,0 @@
module.exports = require("./src/context/deprovision")

View file

@ -1 +0,0 @@
module.exports = require("./src/security/encryption")

View file

@ -1 +0,0 @@
module.exports = require("./src/logging")

View file

@ -1 +0,0 @@
module.exports = require("./src/middleware")

View file

@ -1 +0,0 @@
module.exports = require("./src/migrations")

View file

@ -1,4 +0,0 @@
module.exports = {
...require("./src/objectStore"),
...require("./src/objectStore/utils"),
}

View file

@ -1 +0,0 @@
module.exports = require("./src/security/permissions")

View file

@ -1,3 +0,0 @@
module.exports = {
...require("./src/plugin"),
}

View file

@ -1,5 +0,0 @@
module.exports = {
Client: require("./src/redis"),
utils: require("./src/redis/utils"),
clients: require("./src/redis/init"),
}

View file

@ -1 +0,0 @@
module.exports = require("./src/security/roles")

View file

@ -1 +0,0 @@
module.exports = require("./src/security/sessions")

View file

@ -1,16 +1,14 @@
const passport = require("koa-passport")
const _passport = require("koa-passport")
const LocalStrategy = require("passport-local").Strategy
const JwtStrategy = require("passport-jwt").Strategy
import { getGlobalDB } from "./tenancy"
import { getGlobalDB } from "../tenancy"
const refresh = require("passport-oauth2-refresh")
import { Config } from "./constants"
import { getScopedConfig } from "./db/utils"
import { Config } from "../constants"
import { getScopedConfig } from "../db"
import {
jwt,
jwt as jwtPassport,
local,
authenticated,
google,
oidc,
auditLog,
tenancy,
authError,
@ -21,22 +19,41 @@ import {
builderOnly,
builderOrAdmin,
joiValidator,
} from "./middleware"
import { invalidateUser } from "./cache/user"
oidc,
google,
} from "../middleware"
import { invalidateUser } from "../cache/user"
import { User } from "@budibase/types"
import { logAlert } from "./logging"
import { logAlert } from "../logging"
export {
auditLog,
authError,
internalApi,
ssoCallbackUrl,
adminOnly,
builderOnly,
builderOrAdmin,
joiValidator,
google,
oidc,
} from "../middleware"
export const buildAuthMiddleware = authenticated
export const buildTenancyMiddleware = tenancy
export const buildCsrfMiddleware = csrf
export const passport = _passport
export const jwt = require("jsonwebtoken")
// Strategies
passport.use(new LocalStrategy(local.options, local.authenticate))
if (jwt.options.secretOrKey) {
passport.use(new JwtStrategy(jwt.options, jwt.authenticate))
_passport.use(new LocalStrategy(local.options, local.authenticate))
if (jwtPassport.options.secretOrKey) {
_passport.use(new JwtStrategy(jwtPassport.options, jwtPassport.authenticate))
} else {
logAlert("No JWT Secret supplied, cannot configure JWT strategy")
}
passport.serializeUser((user: User, done: any) => done(null, user))
_passport.serializeUser((user: User, done: any) => done(null, user))
passport.deserializeUser(async (user: User, done: any) => {
_passport.deserializeUser(async (user: User, done: any) => {
const db = getGlobalDB()
try {
@ -115,7 +132,7 @@ async function refreshGoogleAccessToken(
})
}
async function refreshOAuthToken(
export async function refreshOAuthToken(
refreshToken: string,
configType: string,
configId: string
@ -152,7 +169,7 @@ async function refreshOAuthToken(
return refreshResponse
}
async function updateUserOAuth(userId: string, oAuthConfig: any) {
export async function updateUserOAuth(userId: string, oAuthConfig: any) {
const details = {
accessToken: oAuthConfig.accessToken,
refreshToken: oAuthConfig.refreshToken,
@ -179,23 +196,3 @@ async function updateUserOAuth(userId: string, oAuthConfig: any) {
console.error("Could not update OAuth details for current user", e)
}
}
export = {
buildAuthMiddleware: authenticated,
passport,
google,
oidc,
jwt: require("jsonwebtoken"),
buildTenancyMiddleware: tenancy,
auditLog,
authError,
buildCsrfMiddleware: csrf,
internalApi,
refreshOAuthToken,
updateUserOAuth,
ssoCallbackUrl,
adminOnly,
builderOnly,
builderOrAdmin,
joiValidator,
}

View file

@ -0,0 +1 @@
export * from "./auth"

View file

@ -1,6 +1,6 @@
const redis = require("../redis/init")
const { doWithDB } = require("../db")
const { DocumentType } = require("../db/constants")
import { getAppClient } from "../redis/init"
import { doWithDB, DocumentType } from "../db"
import { Database } from "@budibase/types"
const AppState = {
INVALID: "invalid",
@ -10,17 +10,17 @@ const EXPIRY_SECONDS = 3600
/**
* The default populate app metadata function
*/
const populateFromDB = async appId => {
async function populateFromDB(appId: string) {
return doWithDB(
appId,
db => {
(db: Database) => {
return db.get(DocumentType.APP_METADATA)
},
{ skip_setup: true }
)
}
const isInvalid = metadata => {
function isInvalid(metadata?: { state: string }) {
return !metadata || metadata.state === AppState.INVALID
}
@ -31,15 +31,15 @@ const isInvalid = metadata => {
* @param {string} appId the id of the app to get metadata from.
* @returns {object} the app metadata.
*/
exports.getAppMetadata = async appId => {
const client = await redis.getAppClient()
export async function getAppMetadata(appId: string) {
const client = await getAppClient()
// try cache
let metadata = await client.get(appId)
if (!metadata) {
let expiry = EXPIRY_SECONDS
let expiry: number | undefined = EXPIRY_SECONDS
try {
metadata = await populateFromDB(appId)
} catch (err) {
} catch (err: any) {
// app DB left around, but no metadata, it is invalid
if (err && err.status === 404) {
metadata = { state: AppState.INVALID }
@ -74,11 +74,11 @@ exports.getAppMetadata = async appId => {
* @param newMetadata {object|undefined} optional - can simply provide the new metadata to update with.
* @return {Promise<void>} will respond with success when cache is updated.
*/
exports.invalidateAppMetadata = async (appId, newMetadata = null) => {
export async function invalidateAppMetadata(appId: string, newMetadata?: any) {
if (!appId) {
throw "Cannot invalidate if no app ID provided."
}
const client = await redis.getAppClient()
const client = await getAppClient()
await client.delete(appId)
if (newMetadata) {
await client.store(appId, newMetadata, EXPIRY_SECONDS)

View file

@ -1,6 +1,6 @@
import { getTenantId } from "../../context"
import redis from "../../redis/init"
import RedisWrapper from "../../redis"
import * as redis from "../../redis/init"
import { Client } from "../../redis"
function generateTenantKey(key: string) {
const tenantId = getTenantId()
@ -8,9 +8,9 @@ function generateTenantKey(key: string) {
}
export = class BaseCache {
client: RedisWrapper | undefined
client: Client | undefined
constructor(client: RedisWrapper | undefined = undefined) {
constructor(client: Client | undefined = undefined) {
this.client = client
}

View file

@ -1,30 +0,0 @@
const BaseCache = require("./base")
const GENERIC = new BaseCache()
exports.CacheKeys = {
CHECKLIST: "checklist",
INSTALLATION: "installation",
ANALYTICS_ENABLED: "analyticsEnabled",
UNIQUE_TENANT_ID: "uniqueTenantId",
EVENTS: "events",
BACKFILL_METADATA: "backfillMetadata",
EVENTS_RATE_LIMIT: "eventsRateLimit",
}
exports.TTL = {
ONE_MINUTE: 600,
ONE_HOUR: 3600,
ONE_DAY: 86400,
}
function performExport(funcName) {
return (...args) => GENERIC[funcName](...args)
}
exports.keys = performExport("keys")
exports.get = performExport("get")
exports.store = performExport("store")
exports.delete = performExport("delete")
exports.withCache = performExport("withCache")
exports.bustCache = performExport("bustCache")

View file

@ -0,0 +1,30 @@
const BaseCache = require("./base")
const GENERIC = new BaseCache()
export enum CacheKey {
CHECKLIST = "checklist",
INSTALLATION = "installation",
ANALYTICS_ENABLED = "analyticsEnabled",
UNIQUE_TENANT_ID = "uniqueTenantId",
EVENTS = "events",
BACKFILL_METADATA = "backfillMetadata",
EVENTS_RATE_LIMIT = "eventsRateLimit",
}
export enum TTL {
ONE_MINUTE = 600,
ONE_HOUR = 3600,
ONE_DAY = 86400,
}
function performExport(funcName: string) {
return (...args: any) => GENERIC[funcName](...args)
}
export const keys = performExport("keys")
export const get = performExport("get")
export const store = performExport("store")
export const destroy = performExport("delete")
export const withCache = performExport("withCache")
export const bustCache = performExport("bustCache")

View file

@ -0,0 +1,5 @@
export * as generic from "./generic"
export * as user from "./user"
export * as app from "./appMetadata"
export * as writethrough from "./writethrough"
export * from "./generic"

View file

@ -1,15 +1,16 @@
const redis = require("../redis/init")
const { getTenantId, lookupTenantId, doWithGlobalDB } = require("../tenancy")
const env = require("../environment")
const accounts = require("../cloud/accounts")
import * as redis from "../redis/init"
import { getTenantId, lookupTenantId, doWithGlobalDB } from "../tenancy"
import env from "../environment"
import * as accounts from "../cloud/accounts"
import { Database } from "@budibase/types"
const EXPIRY_SECONDS = 3600
/**
* The default populate user function
*/
const populateFromDB = async (userId, tenantId) => {
const user = await doWithGlobalDB(tenantId, db => db.get(userId))
async function populateFromDB(userId: string, tenantId: string) {
const user = await doWithGlobalDB(tenantId, (db: Database) => db.get(userId))
user.budibaseAccess = true
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
const account = await accounts.getAccount(user.email)
@ -31,7 +32,11 @@ const populateFromDB = async (userId, tenantId) => {
* @param {*} populateUser function to provide the user for re-caching. default to couch db
* @returns
*/
exports.getUser = async (userId, tenantId = null, populateUser = null) => {
export async function getUser(
userId: string,
tenantId?: string,
populateUser?: any
) {
if (!populateUser) {
populateUser = populateFromDB
}
@ -47,7 +52,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => {
let user = await client.get(userId)
if (!user) {
user = await populateUser(userId, tenantId)
client.store(userId, user, EXPIRY_SECONDS)
await client.store(userId, user, EXPIRY_SECONDS)
}
if (user && !user.tenantId && tenantId) {
// make sure the tenant ID is always correct/set
@ -56,7 +61,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => {
return user
}
exports.invalidateUser = async userId => {
export async function invalidateUser(userId: string) {
const client = await redis.getUserClient()
await client.delete(userId)
}

View file

@ -1,42 +0,0 @@
const fetch = require("node-fetch")
class API {
constructor(host) {
this.host = host
}
apiCall =
method =>
async (url = "", options = {}) => {
if (!options.headers) {
options.headers = {}
}
if (!options.headers["Content-Type"]) {
options.headers = {
"Content-Type": "application/json",
Accept: "application/json",
...options.headers,
}
}
let json = options.headers["Content-Type"] === "application/json"
const requestOptions = {
method: method,
body: json ? JSON.stringify(options.body) : options.body,
headers: options.headers,
// TODO: See if this is necessary
credentials: "include",
}
return await fetch(`${this.host}${url}`, requestOptions)
}
post = this.apiCall("POST")
get = this.apiCall("GET")
patch = this.apiCall("PATCH")
del = this.apiCall("DELETE")
put = this.apiCall("PUT")
}
module.exports = API

View file

@ -0,0 +1,55 @@
import fetch from "node-fetch"
export = class API {
host: string
constructor(host: string) {
this.host = host
}
async apiCall(method: string, url: string, options?: any) {
if (!options.headers) {
options.headers = {}
}
if (!options.headers["Content-Type"]) {
options.headers = {
"Content-Type": "application/json",
Accept: "application/json",
...options.headers,
}
}
let json = options.headers["Content-Type"] === "application/json"
const requestOptions = {
method: method,
body: json ? JSON.stringify(options.body) : options.body,
headers: options.headers,
// TODO: See if this is necessary
credentials: "include",
}
return await fetch(`${this.host}${url}`, requestOptions)
}
async post(url: string, options?: any) {
return this.apiCall("POST", url, options)
}
async get(url: string, options?: any) {
return this.apiCall("GET", url, options)
}
async patch(url: string, options?: any) {
return this.apiCall("PATCH", url, options)
}
async del(url: string, options?: any) {
return this.apiCall("DELETE", url, options)
}
async put(url: string, options?: any) {
return this.apiCall("PUT", url, options)
}
}

View file

@ -1,44 +0,0 @@
exports.UserStatus = {
ACTIVE: "active",
INACTIVE: "inactive",
}
exports.Cookie = {
CurrentApp: "budibase:currentapp",
Auth: "budibase:auth",
Init: "budibase:init",
ACCOUNT_RETURN_URL: "budibase:account:returnurl",
DatasourceAuth: "budibase:datasourceauth",
OIDC_CONFIG: "budibase:oidc:config",
}
exports.Header = {
API_KEY: "x-budibase-api-key",
LICENSE_KEY: "x-budibase-license-key",
API_VER: "x-budibase-api-version",
APP_ID: "x-budibase-app-id",
TYPE: "x-budibase-type",
PREVIEW_ROLE: "x-budibase-role",
TENANT_ID: "x-budibase-tenant-id",
TOKEN: "x-budibase-token",
CSRF_TOKEN: "x-csrf-token",
}
exports.GlobalRoles = {
OWNER: "owner",
ADMIN: "admin",
BUILDER: "builder",
WORKSPACE_MANAGER: "workspace_manager",
}
exports.Config = {
SETTINGS: "settings",
ACCOUNT: "account",
SMTP: "smtp",
GOOGLE: "google",
OIDC: "oidc",
OIDC_LOGOS: "logos_oidc",
}
exports.MAX_VALID_DATE = new Date(2147483647000)
exports.DEFAULT_TENANT_ID = "default"

View file

@ -0,0 +1,2 @@
export * from "./db"
export * from "./misc"

View file

@ -1,18 +1,17 @@
import { AsyncLocalStorage } from "async_hooks"
import { ContextMap } from "./constants"
export default class Context {
static storage = new AsyncLocalStorage<ContextMap>()
static storage = new AsyncLocalStorage<Record<string, any>>()
static run(context: ContextMap, func: any) {
static run(context: Record<string, any>, func: any) {
return Context.storage.run(context, () => func())
}
static get(): ContextMap {
return Context.storage.getStore() as ContextMap
static get(): Record<string, any> {
return Context.storage.getStore() as Record<string, any>
}
static set(context: ContextMap) {
static set(context: Record<string, any>) {
Context.storage.enterWith(context)
}
}

View file

@ -1,7 +0,0 @@
import { IdentityContext } from "@budibase/types"
export type ContextMap = {
tenantId?: string
appId?: string
identity?: IdentityContext
}

View file

@ -2,23 +2,22 @@ import {
IdentityContext,
IdentityType,
User,
UserContext,
isCloudAccount,
Account,
AccountUserContext,
} from "@budibase/types"
import * as context from "."
export const getIdentity = (): IdentityContext | undefined => {
export function getIdentity(): IdentityContext | undefined {
return context.getIdentity()
}
export const doInIdentityContext = (identity: IdentityContext, task: any) => {
export function doInIdentityContext(identity: IdentityContext, task: any) {
return context.doInIdentityContext(identity, task)
}
export const doInUserContext = (user: User, task: any) => {
const userContext: UserContext = {
export function doInUserContext(user: User, task: any) {
const userContext: any = {
...user,
_id: user._id as string,
type: IdentityType.USER,
@ -26,7 +25,7 @@ export const doInUserContext = (user: User, task: any) => {
return doInIdentityContext(userContext, task)
}
export const doInAccountContext = (account: Account, task: any) => {
export function doInAccountContext(account: Account, task: any) {
const _id = getAccountUserId(account)
const tenantId = account.tenantId
const accountContext: AccountUserContext = {
@ -38,12 +37,12 @@ export const doInAccountContext = (account: Account, task: any) => {
return doInIdentityContext(accountContext, task)
}
export const getAccountUserId = (account: Account) => {
export function getAccountUserId(account: Account) {
let userId: string
if (isCloudAccount(account)) {
userId = account.budibaseUserId
} else {
// use account id as user id for self hosting
// use account id as user id for self-hosting
userId = account.accountId
}
return userId

View file

@ -1,223 +1,3 @@
import env from "../environment"
import {
SEPARATOR,
DocumentType,
getDevelopmentAppID,
getProdAppID,
baseGlobalDBName,
getDB,
} from "../db"
import Context from "./Context"
import { IdentityContext, Database } from "@budibase/types"
import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants"
import { ContextMap } from "./constants"
export const DEFAULT_TENANT_ID = _DEFAULT_TENANT_ID
// some test cases call functions directly, need to
// store an app ID to pretend there is a context
let TEST_APP_ID: string | null = null
export function isMultiTenant() {
return env.MULTI_TENANCY
}
export function isTenantIdSet() {
const context = Context.get()
return !!context?.tenantId
}
export function isTenancyEnabled() {
return env.MULTI_TENANCY
}
/**
* Given an app ID this will attempt to retrieve the tenant ID from it.
* @return {null|string} The tenant ID found within the app ID.
*/
export function getTenantIDFromAppID(appId: string) {
if (!appId) {
return undefined
}
if (!isMultiTenant()) {
return DEFAULT_TENANT_ID
}
const split = appId.split(SEPARATOR)
const hasDev = split[1] === DocumentType.DEV
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
return undefined
}
if (hasDev) {
return split[2]
} else {
return split[1]
}
}
function updateContext(updates: ContextMap) {
let context: ContextMap
try {
context = Context.get()
} catch (err) {
// no context, start empty
context = {}
}
context = {
...context,
...updates,
}
return context
}
async function newContext(updates: ContextMap, task: any) {
// see if there already is a context setup
let context: ContextMap = updateContext(updates)
return Context.run(context, task)
}
export async function doInContext(appId: string, task: any): Promise<any> {
const tenantId = getTenantIDFromAppID(appId)
return newContext(
{
tenantId,
appId,
},
task
)
}
export async function doInTenant(
tenantId: string | null,
task: any
): Promise<any> {
// make sure default always selected in single tenancy
if (!env.MULTI_TENANCY) {
tenantId = tenantId || DEFAULT_TENANT_ID
}
const updates = tenantId ? { tenantId } : {}
return newContext(updates, task)
}
export async function doInAppContext(appId: string, task: any): Promise<any> {
if (!appId) {
throw new Error("appId is required")
}
const tenantId = getTenantIDFromAppID(appId)
const updates: ContextMap = { appId }
if (tenantId) {
updates.tenantId = tenantId
}
return newContext(updates, task)
}
export async function doInIdentityContext(
identity: IdentityContext,
task: any
): Promise<any> {
if (!identity) {
throw new Error("identity is required")
}
const context: ContextMap = {
identity,
}
if (identity.tenantId) {
context.tenantId = identity.tenantId
}
return newContext(context, task)
}
export function getIdentity(): IdentityContext | undefined {
try {
const context = Context.get()
return context?.identity
} catch (e) {
// do nothing - identity is not in context
}
}
export function getTenantId(): string {
if (!isMultiTenant()) {
return DEFAULT_TENANT_ID
}
const context = Context.get()
const tenantId = context?.tenantId
if (!tenantId) {
throw new Error("Tenant id not found")
}
return tenantId
}
export function getAppId(): string | undefined {
const context = Context.get()
const foundId = context?.appId
if (!foundId && env.isTest() && TEST_APP_ID) {
return TEST_APP_ID
} else {
return foundId
}
}
export function updateTenantId(tenantId?: string) {
let context: ContextMap = updateContext({
tenantId,
})
Context.set(context)
}
export function updateAppId(appId: string) {
let context: ContextMap = updateContext({
appId,
})
try {
Context.set(context)
} catch (err) {
if (env.isTest()) {
TEST_APP_ID = appId
} else {
throw err
}
}
}
export function getGlobalDB(): Database {
const context = Context.get()
if (!context || (env.MULTI_TENANCY && !context.tenantId)) {
throw new Error("Global DB not found")
}
return getDB(baseGlobalDBName(context?.tenantId))
}
/**
* Gets the app database based on whatever the request
* contained, dev or prod.
*/
export function getAppDB(opts?: any): Database {
const appId = getAppId()
return getDB(appId, opts)
}
/**
* This specifically gets the prod app ID, if the request
* contained a development app ID, this will get the prod one.
*/
export function getProdAppDB(opts?: any): Database {
const appId = getAppId()
if (!appId) {
throw new Error("Unable to retrieve prod DB - no app ID.")
}
return getDB(getProdAppID(appId), opts)
}
/**
* This specifically gets the dev app ID, if the request
* contained a prod app ID, this will get the dev one.
*/
export function getDevAppDB(opts?: any): Database {
const appId = getAppId()
if (!appId) {
throw new Error("Unable to retrieve dev DB - no app ID.")
}
return getDB(getDevelopmentAppID(appId), opts)
}
export { DEFAULT_TENANT_ID } from "../constants"
export * as identity from "./identity"
export * from "./mainContext"

View file

@ -0,0 +1,245 @@
// some test cases call functions directly, need to
// store an app ID to pretend there is a context
import env from "../environment"
import Context from "./Context"
import { getDevelopmentAppID, getProdAppID } from "../db/conversions"
import { getDB } from "../db/db"
import {
DocumentType,
SEPARATOR,
StaticDatabases,
DEFAULT_TENANT_ID,
} from "../constants"
import { Database, IdentityContext } from "@budibase/types"
export type ContextMap = {
tenantId?: string
appId?: string
identity?: IdentityContext
}
let TEST_APP_ID: string | null = null
export function getGlobalDBName(tenantId?: string) {
// tenant ID can be set externally, for example user API where
// new tenants are being created, this may be the case
if (!tenantId) {
tenantId = getTenantId()
}
return baseGlobalDBName(tenantId)
}
export function baseGlobalDBName(tenantId: string | undefined | null) {
let dbName
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
dbName = StaticDatabases.GLOBAL.name
} else {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
}
return dbName
}
export function isMultiTenant() {
return env.MULTI_TENANCY
}
export function isTenantIdSet() {
const context = Context.get()
return !!context?.tenantId
}
export function isTenancyEnabled() {
return env.MULTI_TENANCY
}
/**
* Given an app ID this will attempt to retrieve the tenant ID from it.
* @return {null|string} The tenant ID found within the app ID.
*/
export function getTenantIDFromAppID(appId: string) {
if (!appId) {
return undefined
}
if (!isMultiTenant()) {
return DEFAULT_TENANT_ID
}
const split = appId.split(SEPARATOR)
const hasDev = split[1] === DocumentType.DEV
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
return undefined
}
if (hasDev) {
return split[2]
} else {
return split[1]
}
}
function updateContext(updates: ContextMap) {
let context: ContextMap
try {
context = Context.get()
} catch (err) {
// no context, start empty
context = {}
}
context = {
...context,
...updates,
}
return context
}
async function newContext(updates: ContextMap, task: any) {
// see if there already is a context setup
let context: ContextMap = updateContext(updates)
return Context.run(context, task)
}
export async function doInContext(appId: string, task: any): Promise<any> {
const tenantId = getTenantIDFromAppID(appId)
return newContext(
{
tenantId,
appId,
},
task
)
}
export async function doInTenant(
tenantId: string | null,
task: any
): Promise<any> {
// make sure default always selected in single tenancy
if (!env.MULTI_TENANCY) {
tenantId = tenantId || DEFAULT_TENANT_ID
}
const updates = tenantId ? { tenantId } : {}
return newContext(updates, task)
}
export async function doInAppContext(appId: string, task: any): Promise<any> {
if (!appId) {
throw new Error("appId is required")
}
const tenantId = getTenantIDFromAppID(appId)
const updates: ContextMap = { appId }
if (tenantId) {
updates.tenantId = tenantId
}
return newContext(updates, task)
}
export async function doInIdentityContext(
identity: IdentityContext,
task: any
): Promise<any> {
if (!identity) {
throw new Error("identity is required")
}
const context: ContextMap = {
identity,
}
if (identity.tenantId) {
context.tenantId = identity.tenantId
}
return newContext(context, task)
}
export function getIdentity(): IdentityContext | undefined {
try {
const context = Context.get()
return context?.identity
} catch (e) {
// do nothing - identity is not in context
}
}
export function getTenantId(): string {
if (!isMultiTenant()) {
return DEFAULT_TENANT_ID
}
const context = Context.get()
const tenantId = context?.tenantId
if (!tenantId) {
throw new Error("Tenant id not found")
}
return tenantId
}
export function getAppId(): string | undefined {
const context = Context.get()
const foundId = context?.appId
if (!foundId && env.isTest() && TEST_APP_ID) {
return TEST_APP_ID
} else {
return foundId
}
}
export function updateTenantId(tenantId?: string) {
let context: ContextMap = updateContext({
tenantId,
})
Context.set(context)
}
export function updateAppId(appId: string) {
let context: ContextMap = updateContext({
appId,
})
try {
Context.set(context)
} catch (err) {
if (env.isTest()) {
TEST_APP_ID = appId
} else {
throw err
}
}
}
export function getGlobalDB(): Database {
const context = Context.get()
if (!context || (env.MULTI_TENANCY && !context.tenantId)) {
throw new Error("Global DB not found")
}
return getDB(baseGlobalDBName(context?.tenantId))
}
/**
* Gets the app database based on whatever the request
* contained, dev or prod.
*/
export function getAppDB(opts?: any): Database {
const appId = getAppId()
return getDB(appId, opts)
}
/**
* This specifically gets the prod app ID, if the request
* contained a development app ID, this will get the prod one.
*/
export function getProdAppDB(opts?: any): Database {
const appId = getAppId()
if (!appId) {
throw new Error("Unable to retrieve prod DB - no app ID.")
}
return getDB(getProdAppID(appId), opts)
}
/**
* This specifically gets the dev app ID, if the request
* contained a prod app ID, this will get the dev one.
*/
export function getDevAppDB(opts?: any): Database {
const appId = getAppId()
if (!appId) {
throw new Error("Unable to retrieve dev DB - no app ID.")
}
return getDB(getDevelopmentAppID(appId), opts)
}

View file

@ -1,5 +1,5 @@
import { getPouchDB, closePouchDB } from "./couch/pouchDB"
import { DocumentType } from "./constants"
import { getPouchDB, closePouchDB } from "./couch"
import { DocumentType } from "../constants"
class Replication {
source: any

View file

@ -1,4 +1,4 @@
import { APP_DEV_PREFIX, APP_PREFIX } from "./constants"
import { APP_DEV_PREFIX, APP_PREFIX } from "../constants"
import { App } from "@budibase/types"
const NO_APP_ERROR = "No app provided"

View file

@ -2,6 +2,8 @@ export * from "./couch"
export * from "./db"
export * from "./utils"
export * from "./views"
export * from "./constants"
export * from "./conversions"
export * from "./tenancy"
export { default as Replication } from "./Replication"
// exports to support old export structure
export * from "../constants/db"
export { getGlobalDBName, baseGlobalDBName } from "../context"

View file

@ -1,22 +0,0 @@
import { DEFAULT_TENANT_ID } from "../constants"
import { StaticDatabases, SEPARATOR } from "./constants"
import { getTenantId } from "../context"
export const getGlobalDBName = (tenantId?: string) => {
// tenant ID can be set externally, for example user API where
// new tenants are being created, this may be the case
if (!tenantId) {
tenantId = getTenantId()
}
return baseGlobalDBName(tenantId)
}
export const baseGlobalDBName = (tenantId: string | undefined | null) => {
let dbName
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
dbName = StaticDatabases.GLOBAL.name
} else {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
}
return dbName
}

View file

@ -1,10 +1,12 @@
require("../../../tests")
const {
generateAppID,
getDevelopmentAppID,
getProdAppID,
isDevAppID,
isProdAppID,
} = require("../conversions")
const {
generateAppID,
getPlatformUrl,
getScopedConfig
} = require("../utils")

View file

@ -1,26 +1,20 @@
import { newid } from "../hashing"
import { DEFAULT_TENANT_ID, Config } from "../constants"
import { newid } from "../newid"
import env from "../environment"
import {
DEFAULT_TENANT_ID,
SEPARATOR,
DocumentType,
UNICODE_MAX,
ViewName,
InternalTable,
} from "./constants"
import { getTenantId, getGlobalDB } from "../context"
import { getGlobalDBName } from "./tenancy"
APP_PREFIX,
} from "../constants"
import { getTenantId, getGlobalDB, getGlobalDBName } from "../context"
import { doWithDB, allDbs, directCouchAllDbs } from "./db"
import { getAppMetadata } from "../cache/appMetadata"
import { isDevApp, isDevAppID, getProdAppID } from "./conversions"
import { APP_PREFIX } from "./constants"
import * as events from "../events"
import { App, Database } from "@budibase/types"
export * from "./constants"
export * from "./conversions"
export { default as Replication } from "./Replication"
export * from "./tenancy"
import { App, Database, ConfigType } from "@budibase/types"
/**
* Generates a new app ID.
@ -494,7 +488,7 @@ export const getScopedFullConfig = async function (
)[0]
// custom logic for settings doc
if (type === Config.SETTINGS) {
if (type === ConfigType.SETTINGS) {
if (scopedConfig && scopedConfig.doc) {
// overrides affected by environment variables
scopedConfig.doc.config.platformUrl = await getPlatformUrl({
@ -533,7 +527,7 @@ export const getPlatformUrl = async (opts = { tenantAware: true }) => {
// get the doc directly instead of with getScopedConfig to prevent loop
let settings
try {
settings = await db.get(generateConfigID({ type: Config.SETTINGS }))
settings = await db.get(generateConfigID({ type: ConfigType.SETTINGS }))
} catch (e: any) {
if (e.status !== 404) {
throw e

View file

@ -1,6 +1,11 @@
import { DocumentType, ViewName, DeprecatedViews, SEPARATOR } from "./utils"
import {
DocumentType,
ViewName,
DeprecatedViews,
SEPARATOR,
StaticDatabases,
} from "../constants"
import { getGlobalDB } from "../context"
import { StaticDatabases } from "./constants"
import { doWithDB } from "./"
import { Database, DatabaseQueryOpts } from "@budibase/types"

View file

@ -25,7 +25,7 @@ const DefaultBucketName = {
PLUGINS: "plugins",
}
const env = {
const environment = {
isTest,
isDev,
JS_BCRYPT: process.env.JS_BCRYPT,
@ -75,17 +75,18 @@ const env = {
process.env.DEPLOYMENT_ENVIRONMENT || "docker-compose",
_set(key: any, value: any) {
process.env[key] = value
module.exports[key] = value
// @ts-ignore
environment[key] = value
},
}
// clean up any environment variable edge cases
for (let [key, value] of Object.entries(env)) {
for (let [key, value] of Object.entries(environment)) {
// handle the edge case of "0" to disable an environment variable
if (value === "0") {
// @ts-ignore
env[key] = 0
environment[key] = 0
}
}
export = env
export = environment

View file

@ -1,8 +1,8 @@
import env from "../environment"
import tenancy from "../tenancy"
import * as tenancy from "../tenancy"
import * as dbUtils from "../db/utils"
import { Config } from "../constants"
import { withCache, TTL, CacheKeys } from "../cache/generic"
import { withCache, TTL, CacheKey } from "../cache"
export const enabled = async () => {
// cloud - always use the environment variable
@ -13,7 +13,7 @@ export const enabled = async () => {
// self host - prefer the settings doc
// use cache as events have high throughput
const enabledInDB = await withCache(
CacheKeys.ANALYTICS_ENABLED,
CacheKey.ANALYTICS_ENABLED,
TTL.ONE_DAY,
async () => {
const settings = await getSettingsDoc()

View file

@ -21,7 +21,7 @@ import {
AppCreatedEvent,
} from "@budibase/types"
import * as context from "../context"
import { CacheKeys } from "../cache/generic"
import { CacheKey } from "../cache/generic"
import * as cache from "../cache/generic"
// LIFECYCLE
@ -48,18 +48,18 @@ export const end = async () => {
// CRUD
const getBackfillMetadata = async (): Promise<BackfillMetadata | null> => {
return cache.get(CacheKeys.BACKFILL_METADATA)
return cache.get(CacheKey.BACKFILL_METADATA)
}
const saveBackfillMetadata = async (
backfill: BackfillMetadata
): Promise<void> => {
// no TTL - deleted by backfill
return cache.store(CacheKeys.BACKFILL_METADATA, backfill)
return cache.store(CacheKey.BACKFILL_METADATA, backfill)
}
const deleteBackfillMetadata = async (): Promise<void> => {
await cache.delete(CacheKeys.BACKFILL_METADATA)
await cache.destroy(CacheKey.BACKFILL_METADATA)
}
const clearEvents = async () => {
@ -70,7 +70,7 @@ const clearEvents = async () => {
for (const key of keys) {
// delete each key
// don't use tenancy, already in the key
await cache.delete(key, { useTenancy: false })
await cache.destroy(key, { useTenancy: false })
}
}
@ -167,7 +167,7 @@ const getEventKey = (event?: Event, properties?: any) => {
const tenantId = context.getTenantId()
if (event) {
eventKey = `${CacheKeys.EVENTS}:${tenantId}:${event}`
eventKey = `${CacheKey.EVENTS}:${tenantId}:${event}`
// use some properties to make the key more unique
const custom = CUSTOM_PROPERTY_SUFFIX[event]
@ -176,7 +176,7 @@ const getEventKey = (event?: Event, properties?: any) => {
eventKey = `${eventKey}:${suffix}`
}
} else {
eventKey = `${CacheKeys.EVENTS}:${tenantId}:*`
eventKey = `${CacheKey.EVENTS}:${tenantId}:*`
}
return eventKey

View file

@ -20,9 +20,9 @@ import {
import { processors } from "./processors"
import * as dbUtils from "../db/utils"
import { Config } from "../constants"
import * as hashing from "../hashing"
import { newid } from "../utils"
import * as installation from "../installation"
import { withCache, TTL, CacheKeys } from "../cache/generic"
import { withCache, TTL, CacheKey } from "../cache/generic"
const pkg = require("../../package.json")
@ -270,7 +270,7 @@ const getEventTenantId = async (tenantId: string): Promise<string> => {
const getUniqueTenantId = async (tenantId: string): Promise<string> => {
// make sure this tenantId always matches the tenantId in context
return context.doInTenant(tenantId, () => {
return withCache(CacheKeys.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => {
return withCache(CacheKey.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => {
const db = context.getGlobalDB()
const config: SettingsConfig = await dbUtils.getScopedFullConfig(db, {
type: Config.SETTINGS,
@ -280,7 +280,7 @@ const getUniqueTenantId = async (tenantId: string): Promise<string> => {
if (config.config.uniqueTenantId) {
return config.config.uniqueTenantId
} else {
uniqueTenantId = `${hashing.newid()}_${tenantId}`
uniqueTenantId = `${newid()}_${tenantId}`
config.config.uniqueTenantId = uniqueTenantId
await db.put(config)
return uniqueTenantId

View file

@ -1,5 +1,5 @@
import { Event } from "@budibase/types"
import { CacheKeys, TTL } from "../../../cache/generic"
import { CacheKey, TTL } from "../../../cache/generic"
import * as cache from "../../../cache/generic"
import * as context from "../../../context"
@ -74,7 +74,7 @@ export const limited = async (event: Event): Promise<boolean> => {
}
const eventKey = (event: RateLimitedEvent) => {
let key = `${CacheKeys.EVENTS_RATE_LIMIT}:${event}`
let key = `${CacheKey.EVENTS_RATE_LIMIT}:${event}`
if (isPerApp(event)) {
key = key + ":" + context.getAppId()
}

View file

@ -3,7 +3,7 @@ import PosthogProcessor from "../PosthogProcessor"
import { Event, IdentityType, Hosting } from "@budibase/types"
const tk = require("timekeeper")
import * as cache from "../../../../cache/generic"
import { CacheKeys } from "../../../../cache/generic"
import { CacheKey } from "../../../../cache/generic"
import * as context from "../../../../context"
const newIdentity = () => {
@ -19,7 +19,7 @@ describe("PosthogProcessor", () => {
beforeEach(async () => {
jest.clearAllMocks()
await cache.bustCache(
`${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
`${CacheKey.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
)
})
@ -89,7 +89,7 @@ describe("PosthogProcessor", () => {
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
await cache.bustCache(
`${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
`${CacheKey.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
)
tk.freeze(new Date(2022, 0, 1, 14, 0))

View file

@ -72,7 +72,7 @@ export async function stepCreated(
automationId: automation._id as string,
triggerId: automation.definition?.trigger?.id,
triggerType: automation.definition?.trigger?.stepId,
stepId: step.id,
stepId: step.id!,
stepType: step.stepId,
}
await publishEvent(Event.AUTOMATION_STEP_CREATED, properties, timestamp)
@ -87,7 +87,7 @@ export async function stepDeleted(
automationId: automation._id as string,
triggerId: automation.definition?.trigger?.id,
triggerType: automation.definition?.trigger?.stepId,
stepId: step.id,
stepId: step.id!,
stepType: step.stepId,
}
await publishEvent(Event.AUTOMATION_STEP_DELETED, properties)

View file

@ -1,17 +1,17 @@
const env = require("../environment")
const tenancy = require("../tenancy")
import env from "../environment"
import * as tenancy from "../tenancy"
/**
* Read the TENANT_FEATURE_FLAGS env var and return an array of features flags for each tenant.
* The env var is formatted as:
* tenant1:feature1:feature2,tenant2:feature1
*/
const getFeatureFlags = () => {
function getFeatureFlags() {
if (!env.TENANT_FEATURE_FLAGS) {
return
}
const tenantFeatureFlags = {}
const tenantFeatureFlags: Record<string, string[]> = {}
env.TENANT_FEATURE_FLAGS.split(",").forEach(tenantToFeatures => {
const [tenantId, ...features] = tenantToFeatures.split(":")
@ -29,13 +29,13 @@ const getFeatureFlags = () => {
const TENANT_FEATURE_FLAGS = getFeatureFlags()
exports.isEnabled = featureFlag => {
export function isEnabled(featureFlag: string) {
const tenantId = tenancy.getTenantId()
const flags = exports.getTenantFeatureFlags(tenantId)
const flags = getTenantFeatureFlags(tenantId)
return flags.includes(featureFlag)
}
exports.getTenantFeatureFlags = tenantId => {
export function getTenantFeatureFlags(tenantId: string) {
const flags = []
if (TENANT_FEATURE_FLAGS) {
@ -53,8 +53,8 @@ exports.getTenantFeatureFlags = tenantId => {
return flags
}
exports.TenantFeatureFlag = {
LICENSING: "LICENSING",
GOOGLE_SHEETS: "GOOGLE_SHEETS",
USER_GROUPS: "USER_GROUPS",
export enum TenantFeatureFlag {
LICENSING = "LICENSING",
GOOGLE_SHEETS = "GOOGLE_SHEETS",
USER_GROUPS = "USER_GROUPS",
}

View file

@ -4,6 +4,6 @@
* @param {string} url The URL to test and remove any extra double slashes.
* @return {string} The updated url.
*/
exports.checkSlashesInUrl = url => {
export function checkSlashesInUrl(url: string) {
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
}

View file

@ -8,27 +8,24 @@ import * as permissions from "./security/permissions"
import * as accounts from "./cloud/accounts"
import * as installation from "./installation"
import env from "./environment"
import tenancy from "./tenancy"
import featureFlags from "./featureFlags"
import * as tenancy from "./tenancy"
import * as featureFlags from "./featureFlags"
import * as sessions from "./security/sessions"
import * as deprovisioning from "./context/deprovision"
import auth from "./auth"
import * as auth from "./auth"
import * as constants from "./constants"
import * as dbConstants from "./db/constants"
import * as logging from "./logging"
import pino from "./pino"
import * as pino from "./pino"
import * as middleware from "./middleware"
import plugins from "./plugin"
import encryption from "./security/encryption"
import * as plugins from "./plugin"
import * as encryption from "./security/encryption"
import * as queue from "./queue"
import * as db from "./db"
// mimic the outer package exports
import * as objectStore from "./pkg/objectStore"
import * as utils from "./pkg/utils"
import redis from "./pkg/redis"
import cache from "./pkg/cache"
import context from "./pkg/context"
import * as context from "./context"
import * as cache from "./cache"
import * as objectStore from "./objectStore"
import * as redis from "./redis"
import * as utils from "./utils"
const init = (opts: any = {}) => {
db.init(opts.db)
@ -37,7 +34,7 @@ const init = (opts: any = {}) => {
const core = {
init,
db,
...dbConstants,
...constants,
redis,
locks: redis.redlock,
objectStore,
@ -46,7 +43,6 @@ const core = {
cache,
auth,
constants,
...constants,
migrations,
env,
accounts,

View file

@ -1,16 +1,16 @@
import * as hashing from "./hashing"
import { newid } from "./utils"
import * as events from "./events"
import { StaticDatabases } from "./db/constants"
import { StaticDatabases } from "./db"
import { doWithDB } from "./db"
import { Installation, IdentityType } from "@budibase/types"
import * as context from "./context"
import semver from "semver"
import { bustCache, withCache, TTL, CacheKeys } from "./cache/generic"
import { bustCache, withCache, TTL, CacheKey } from "./cache/generic"
const pkg = require("../package.json")
export const getInstall = async (): Promise<Installation> => {
return withCache(CacheKeys.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, {
return withCache(CacheKey.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, {
useTenancy: false,
})
}
@ -28,7 +28,7 @@ const getInstallFromDB = async (): Promise<Installation> => {
if (e.status === 404) {
install = {
_id: StaticDatabases.PLATFORM_INFO.docs.install,
installId: hashing.newid(),
installId: newid(),
version: pkg.version,
}
const resp = await platformDb.put(install)
@ -50,7 +50,7 @@ const updateVersion = async (version: string): Promise<boolean> => {
const install = await getInstall()
install.version = version
await platformDb.put(install)
await bustCache(CacheKeys.INSTALLATION)
await bustCache(CacheKey.INSTALLATION)
}
)
} catch (e: any) {

View file

@ -1,4 +1,6 @@
module.exports = async (ctx, next) => {
import { BBContext } from "@budibase/types"
export = async (ctx: BBContext, next: any) => {
if (
!ctx.internal &&
(!ctx.user || !ctx.user.admin || !ctx.user.admin.global)

View file

@ -1,4 +0,0 @@
module.exports = async (ctx, next) => {
// Placeholder for audit log middleware
return next()
}

View file

@ -0,0 +1,6 @@
import { BBContext } from "@budibase/types"
export = async (ctx: BBContext | any, next: any) => {
// Placeholder for audit log middleware
return next()
}

View file

@ -6,10 +6,13 @@ import { buildMatcherRegex, matches } from "./matchers"
import { SEPARATOR, queryGlobalView, ViewName } from "../db"
import { getGlobalDB, doInTenant } from "../tenancy"
import { decrypt } from "../security/encryption"
const identity = require("../context/identity")
const env = require("../environment")
import * as identity from "../context/identity"
import env from "../environment"
import { BBContext, EndpointMatcher } from "@budibase/types"
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD || 60 * 1000
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD
? parseInt(env.SESSION_UPDATE_PERIOD)
: 60 * 1000
interface FinaliseOpts {
authenticated?: boolean
@ -40,13 +43,13 @@ async function checkApiKey(apiKey: string, populateUser?: Function) {
return doInTenant(tenantId, async () => {
const db = getGlobalDB()
// api key is encrypted in the database
const userId = await queryGlobalView(
const userId = (await queryGlobalView(
ViewName.BY_API_KEY,
{
key: apiKey,
},
db
)
)) as string
if (userId) {
return {
valid: true,
@ -63,14 +66,14 @@ async function checkApiKey(apiKey: string, populateUser?: Function) {
* The tenancy modules should not be used here and it should be assumed that the tenancy context
* has not yet been populated.
*/
export = (
noAuthPatterns = [],
opts: { publicAllowed: boolean; populateUser?: Function } = {
export = function (
noAuthPatterns: EndpointMatcher[] = [],
opts: { publicAllowed?: boolean; populateUser?: Function } = {
publicAllowed: false,
}
) => {
) {
const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : []
return async (ctx: any, next: any) => {
return async (ctx: BBContext | any, next: any) => {
let publicEndpoint = false
const version = ctx.request.headers[Header.API_VER]
// the path is not authenticated

View file

@ -1,4 +1,6 @@
module.exports = async (ctx, next) => {
import { BBContext } from "@budibase/types"
export = async (ctx: BBContext, next: any) => {
if (
!ctx.internal &&
(!ctx.user || !ctx.user.builder || !ctx.user.builder.global)

View file

@ -1,4 +1,6 @@
module.exports = async (ctx, next) => {
import { BBContext } from "@budibase/types"
export = async (ctx: BBContext, next: any) => {
if (
!ctx.internal &&
(!ctx.user || !ctx.user.builder || !ctx.user.builder.global) &&

View file

@ -1,5 +1,6 @@
const { Header } = require("../constants")
const { buildMatcherRegex, matches } = require("./matchers")
import { Header } from "../constants"
import { buildMatcherRegex, matches } from "./matchers"
import { BBContext, EndpointMatcher } from "@budibase/types"
/**
* GET, HEAD and OPTIONS methods are considered safe operations
@ -31,9 +32,11 @@ const INCLUDED_CONTENT_TYPES = [
* https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#synchronizer-token-pattern
*
*/
module.exports = (opts = { noCsrfPatterns: [] }) => {
export = function (
opts: { noCsrfPatterns: EndpointMatcher[] } = { noCsrfPatterns: [] }
) {
const noCsrfOptions = buildMatcherRegex(opts.noCsrfPatterns)
return async (ctx, next) => {
return async (ctx: BBContext | any, next: any) => {
// don't apply for excluded paths
const found = matches(ctx, noCsrfOptions)
if (found) {
@ -62,7 +65,7 @@ module.exports = (opts = { noCsrfPatterns: [] }) => {
// apply csrf when there is a token in the session (new logins)
// in future there should be a hard requirement that the token is present
const userToken = ctx.user.csrfToken
const userToken = ctx.user?.csrfToken
if (!userToken) {
return next()
}

View file

@ -1,18 +1,18 @@
const jwt = require("./passport/jwt")
const local = require("./passport/local")
const google = require("./passport/google")
const oidc = require("./passport/oidc")
const { authError, ssoCallbackUrl } = require("./passport/utils")
const authenticated = require("./authenticated")
const auditLog = require("./auditLog")
const tenancy = require("./tenancy")
const internalApi = require("./internalApi")
const datasourceGoogle = require("./passport/datasource/google")
const csrf = require("./csrf")
const adminOnly = require("./adminOnly")
const builderOrAdmin = require("./builderOrAdmin")
const builderOnly = require("./builderOnly")
const joiValidator = require("./joi-validator")
import * as jwt from "./passport/jwt"
import * as local from "./passport/local"
import * as google from "./passport/google"
import * as oidc from "./passport/oidc"
import { authError, ssoCallbackUrl } from "./passport/utils"
import authenticated from "./authenticated"
import auditLog from "./auditLog"
import tenancy from "./tenancy"
import internalApi from "./internalApi"
import * as datasourceGoogle from "./passport/datasource/google"
import csrf from "./csrf"
import adminOnly from "./adminOnly"
import builderOrAdmin from "./builderOrAdmin"
import builderOnly from "./builderOnly"
import * as joiValidator from "./joi-validator"
const pkg = {
google,

View file

@ -1,10 +1,11 @@
const env = require("../environment")
const { Header } = require("../constants")
import env from "../environment"
import { Header } from "../constants"
import { BBContext } from "@budibase/types"
/**
* API Key only endpoint.
*/
module.exports = async (ctx, next) => {
export = async (ctx: BBContext, next: any) => {
const apiKey = ctx.request.headers[Header.API_KEY]
if (apiKey !== env.INTERNAL_API_KEY) {
ctx.throw(403, "Unauthorized")

View file

@ -1,21 +1,27 @@
const Joi = require("joi")
import Joi, { ObjectSchema } from "joi"
import { BBContext } from "@budibase/types"
function validate(schema, property) {
function validate(
schema: Joi.ObjectSchema | Joi.ArraySchema,
property: string
) {
// Return a Koa middleware function
return (ctx, next) => {
return (ctx: BBContext, next: any) => {
if (!schema) {
return next()
}
let params = null
// @ts-ignore
let reqProp = ctx.request?.[property]
if (ctx[property] != null) {
params = ctx[property]
} else if (ctx.request[property] != null) {
params = ctx.request[property]
} else if (reqProp != null) {
params = reqProp
}
// not all schemas have the append property e.g. array schemas
if (schema.append) {
schema = schema.append({
if ((schema as Joi.ObjectSchema).append) {
schema = (schema as Joi.ObjectSchema).append({
createdAt: Joi.any().optional(),
updatedAt: Joi.any().optional(),
})
@ -30,10 +36,10 @@ function validate(schema, property) {
}
}
module.exports.body = schema => {
export function body(schema: Joi.ObjectSchema | Joi.ArraySchema) {
return validate(schema, "body")
}
module.exports.params = schema => {
export function params(schema: Joi.ObjectSchema | Joi.ArraySchema) {
return validate(schema, "params")
}

View file

@ -1,11 +1,15 @@
const google = require("../google")
import * as google from "../google"
import { Cookie, Config } from "../../../constants"
import { clearCookie, getCookie } from "../../../utils"
import { getScopedConfig, getPlatformUrl, doWithDB } from "../../../db"
import environment from "../../../environment"
import { getGlobalDB } from "../../../tenancy"
import { BBContext, Database, SSOProfile } from "@budibase/types"
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
const { Cookie, Config } = require("../../../constants")
const { clearCookie, getCookie } = require("../../../utils")
const { getScopedConfig, getPlatformUrl } = require("../../../db/utils")
const { doWithDB } = require("../../../db")
const environment = require("../../../environment")
const { getGlobalDB } = require("../../../tenancy")
type Passport = {
authenticate: any
}
async function fetchGoogleCreds() {
// try and get the config from the tenant
@ -22,7 +26,11 @@ async function fetchGoogleCreds() {
)
}
async function preAuth(passport, ctx, next) {
export async function preAuth(
passport: Passport,
ctx: BBContext,
next: Function
) {
// get the relevant config
const googleConfig = await fetchGoogleCreds()
const platformUrl = await getPlatformUrl({ tenantAware: false })
@ -41,7 +49,11 @@ async function preAuth(passport, ctx, next) {
})(ctx, next)
}
async function postAuth(passport, ctx, next) {
export async function postAuth(
passport: Passport,
ctx: BBContext,
next: Function
) {
// get the relevant config
const config = await fetchGoogleCreds()
const platformUrl = await getPlatformUrl({ tenantAware: false })
@ -56,15 +68,20 @@ async function postAuth(passport, ctx, next) {
clientSecret: config.clientSecret,
callbackURL: callbackUrl,
},
(accessToken, refreshToken, profile, done) => {
(
accessToken: string,
refreshToken: string,
profile: SSOProfile,
done: Function
) => {
clearCookie(ctx, Cookie.DatasourceAuth)
done(null, { accessToken, refreshToken })
}
),
{ successRedirect: "/", failureRedirect: "/error" },
async (err, tokens) => {
async (err: any, tokens: string[]) => {
// update the DB for the datasource with all the user info
await doWithDB(authStateCookie.appId, async db => {
await doWithDB(authStateCookie.appId, async (db: Database) => {
const datasource = await db.get(authStateCookie.datasourceId)
if (!datasource.config) {
datasource.config = {}
@ -78,6 +95,3 @@ async function postAuth(passport, ctx, next) {
}
)(ctx, next)
}
exports.preAuth = preAuth
exports.postAuth = postAuth

View file

@ -1,10 +1,15 @@
import { ssoCallbackUrl } from "./utils"
import { authenticateThirdParty } from "./third-party-common"
import { ConfigType, GoogleConfig, Database, SSOProfile } from "@budibase/types"
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
const { ssoCallbackUrl } = require("./utils")
const { authenticateThirdParty } = require("./third-party-common")
const { Config } = require("../../../constants")
const buildVerifyFn = saveUserFn => {
return (accessToken, refreshToken, profile, done) => {
export function buildVerifyFn(saveUserFn?: Function) {
return (
accessToken: string,
refreshToken: string,
profile: SSOProfile,
done: Function
) => {
const thirdPartyUser = {
provider: profile.provider, // should always be 'google'
providerType: "google",
@ -31,7 +36,11 @@ const buildVerifyFn = saveUserFn => {
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
* @returns Dynamically configured Passport Google Strategy
*/
exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
export async function strategyFactory(
config: GoogleConfig["config"],
callbackUrl: string,
saveUserFn?: Function
) {
try {
const { clientID, clientSecret } = config
@ -50,18 +59,15 @@ exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
},
verify
)
} catch (err) {
} catch (err: any) {
console.error(err)
throw new Error(
`Error constructing google authentication strategy: ${err}`,
err
)
throw new Error(`Error constructing google authentication strategy: ${err}`)
}
}
exports.getCallbackUrl = async function (db, config) {
return ssoCallbackUrl(db, config, Config.GOOGLE)
export async function getCallbackUrl(
db: Database,
config: { callbackURL?: string }
) {
return ssoCallbackUrl(db, config, ConfigType.GOOGLE)
}
// expose for testing
exports.buildVerifyFn = buildVerifyFn

View file

@ -1,18 +0,0 @@
const { Cookie } = require("../../constants")
const env = require("../../environment")
const { authError } = require("./utils")
exports.options = {
secretOrKey: env.JWT_SECRET,
jwtFromRequest: function (ctx) {
return ctx.cookies.get(Cookie.Auth)
},
}
exports.authenticate = async function (jwt, done) {
try {
return done(null, jwt)
} catch (err) {
return authError(done, "JWT invalid", err)
}
}

View file

@ -0,0 +1,19 @@
import { Cookie } from "../../constants"
import env from "../../environment"
import { authError } from "./utils"
import { BBContext } from "@budibase/types"
export const options = {
secretOrKey: env.JWT_SECRET,
jwtFromRequest: function (ctx: BBContext) {
return ctx.cookies.get(Cookie.Auth)
},
}
export async function authenticate(jwt: Function, done: Function) {
try {
return done(null, jwt)
} catch (err) {
return authError(done, "JWT invalid", err)
}
}

View file

@ -1,18 +1,18 @@
import { UserStatus } from "../../constants"
import { compare, newid } from "../../utils"
import env from "../../environment"
import * as users from "../../users"
import { authError } from "./utils"
import { createASession } from "../../security/sessions"
import { getTenantId } from "../../tenancy"
import { BBContext } from "@budibase/types"
const jwt = require("jsonwebtoken")
const { UserStatus } = require("../../constants")
const { compare } = require("../../hashing")
const env = require("../../environment")
const users = require("../../users")
const { authError } = require("./utils")
const { newid } = require("../../hashing")
const { createASession } = require("../../security/sessions")
const { getTenantId } = require("../../tenancy")
const INVALID_ERR = "Invalid credentials"
const SSO_NO_PASSWORD = "SSO user does not have a password set"
const EXPIRED = "This account has expired. Please reset your password"
exports.options = {
export const options = {
passReqToCallback: true,
}
@ -24,7 +24,12 @@ exports.options = {
* @param {*} done callback from passport to return user information and errors
* @returns The authenticated user, or errors if they occur
*/
exports.authenticate = async function (ctx, email, password, done) {
export async function authenticate(
ctx: BBContext,
email: string,
password: string,
done: Function
) {
if (!email) return authError(done, "Email Required")
if (!password) return authError(done, "Password Required")
@ -56,9 +61,9 @@ exports.authenticate = async function (ctx, email, password, done) {
const sessionId = newid()
const tenantId = getTenantId()
await createASession(dbUser._id, { sessionId, tenantId })
await createASession(dbUser._id!, { sessionId, tenantId })
dbUser.token = jwt.sign(
const token = jwt.sign(
{
userId: dbUser._id,
sessionId,
@ -69,7 +74,10 @@ exports.authenticate = async function (ctx, email, password, done) {
// Remove users password in payload
delete dbUser.password
return done(null, dbUser)
return done(null, {
...dbUser,
token,
})
} else {
return authError(done, INVALID_ERR)
}

View file

@ -1,10 +1,23 @@
const fetch = require("node-fetch")
import fetch from "node-fetch"
import { authenticateThirdParty } from "./third-party-common"
import { ssoCallbackUrl } from "./utils"
import {
Config,
ConfigType,
OIDCInnerCfg,
Database,
SSOProfile,
ThirdPartyUser,
OIDCConfiguration,
} from "@budibase/types"
const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
const { authenticateThirdParty } = require("./third-party-common")
const { ssoCallbackUrl } = require("./utils")
const { Config } = require("../../../constants")
const buildVerifyFn = saveUserFn => {
type JwtClaims = {
preferred_username: string
email: string
}
export function buildVerifyFn(saveUserFn?: Function) {
/**
* @param {*} issuer The identity provider base URL
* @param {*} sub The user ID
@ -17,17 +30,17 @@ const buildVerifyFn = saveUserFn => {
* @param {*} done The passport callback: err, user, info
*/
return async (
issuer,
sub,
profile,
jwtClaims,
accessToken,
refreshToken,
idToken,
params,
done
issuer: string,
sub: string,
profile: SSOProfile,
jwtClaims: JwtClaims,
accessToken: string,
refreshToken: string,
idToken: string,
params: any,
done: Function
) => {
const thirdPartyUser = {
const thirdPartyUser: ThirdPartyUser = {
// store the issuer info to enable sync in future
provider: issuer,
providerType: "oidc",
@ -53,7 +66,7 @@ const buildVerifyFn = saveUserFn => {
* @param {*} profile The structured profile created by passport using the user info endpoint
* @param {*} jwtClaims The claims returned in the id token
*/
function getEmail(profile, jwtClaims) {
function getEmail(profile: SSOProfile, jwtClaims: JwtClaims) {
// profile not guaranteed to contain email e.g. github connected azure ad account
if (profile._json.email) {
return profile._json.email
@ -77,7 +90,7 @@ function getEmail(profile, jwtClaims) {
)
}
function validEmail(value) {
function validEmail(value: string) {
return (
value &&
!!value.match(
@ -91,19 +104,25 @@ function validEmail(value) {
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
* @returns Dynamically configured Passport OIDC Strategy
*/
exports.strategyFactory = async function (config, saveUserFn) {
export async function strategyFactory(
config: OIDCConfiguration,
saveUserFn?: Function
) {
try {
const verify = buildVerifyFn(saveUserFn)
const strategy = new OIDCStrategy(config, verify)
strategy.name = "oidc"
return strategy
} catch (err) {
} catch (err: any) {
console.error(err)
throw new Error("Error constructing OIDC authentication strategy", err)
throw new Error(`Error constructing OIDC authentication strategy - ${err}`)
}
}
exports.fetchStrategyConfig = async function (enrichedConfig, callbackUrl) {
export async function fetchStrategyConfig(
enrichedConfig: OIDCInnerCfg,
callbackUrl?: string
): Promise<OIDCConfiguration> {
try {
const { clientID, clientSecret, configUrl } = enrichedConfig
@ -135,13 +154,15 @@ exports.fetchStrategyConfig = async function (enrichedConfig, callbackUrl) {
}
} catch (err) {
console.error(err)
throw new Error("Error constructing OIDC authentication configuration", err)
throw new Error(
`Error constructing OIDC authentication configuration - ${err}`
)
}
}
exports.getCallbackUrl = async function (db, config) {
return ssoCallbackUrl(db, config, Config.OIDC)
export async function getCallbackUrl(
db: Database,
config: { callbackURL?: string }
) {
return ssoCallbackUrl(db, config, ConfigType.OIDC)
}
// expose for testing
exports.buildVerifyFn = buildVerifyFn

View file

@ -4,7 +4,7 @@ const { data } = require("./utilities/mock-data")
const { DEFAULT_TENANT_ID } = require("../../../constants")
const { generateGlobalUserID } = require("../../../db/utils")
const { newid } = require("../../../hashing")
const { newid } = require("../../../utils")
const { doWithGlobalDB, doInTenant } = require("../../../tenancy")
const done = jest.fn()

View file

@ -1,21 +1,22 @@
const env = require("../../environment")
import env from "../../environment"
import { generateGlobalUserID } from "../../db"
import { authError } from "./utils"
import { newid } from "../../utils"
import { createASession } from "../../security/sessions"
import * as users from "../../users"
import { getGlobalDB, getTenantId } from "../../tenancy"
import fetch from "node-fetch"
import { ThirdPartyUser } from "@budibase/types"
const jwt = require("jsonwebtoken")
const { generateGlobalUserID } = require("../../db/utils")
const { authError } = require("./utils")
const { newid } = require("../../hashing")
const { createASession } = require("../../security/sessions")
const users = require("../../users")
const { getGlobalDB, getTenantId } = require("../../tenancy")
const fetch = require("node-fetch")
/**
* Common authentication logic for third parties. e.g. OAuth, OIDC.
*/
exports.authenticateThirdParty = async function (
thirdPartyUser,
requireLocalAccount = true,
done,
saveUserFn
export async function authenticateThirdParty(
thirdPartyUser: ThirdPartyUser,
requireLocalAccount: boolean = true,
done: Function,
saveUserFn?: Function
) {
if (!saveUserFn) {
throw new Error("Save user function must be provided")
@ -39,7 +40,7 @@ exports.authenticateThirdParty = async function (
// try to load by id
try {
dbUser = await db.get(userId)
} catch (err) {
} catch (err: any) {
// abort when not 404 error
if (!err.status || err.status !== 404) {
return authError(
@ -81,7 +82,7 @@ exports.authenticateThirdParty = async function (
// create or sync the user
try {
await saveUserFn(dbUser, false, false)
} catch (err) {
} catch (err: any) {
return authError(done, err)
}
@ -104,13 +105,16 @@ exports.authenticateThirdParty = async function (
return done(null, dbUser)
}
async function syncProfilePicture(user, thirdPartyUser) {
const pictureUrl = thirdPartyUser.profile._json.picture
async function syncProfilePicture(
user: ThirdPartyUser,
thirdPartyUser: ThirdPartyUser
) {
const pictureUrl = thirdPartyUser.profile?._json.picture
if (pictureUrl) {
const response = await fetch(pictureUrl)
if (response.status === 200) {
const type = response.headers.get("content-type")
const type = response.headers.get("content-type") as string
if (type.startsWith("image/")) {
user.pictureUrl = pictureUrl
}
@ -123,7 +127,7 @@ async function syncProfilePicture(user, thirdPartyUser) {
/**
* @returns a user that has been sync'd with third party information
*/
async function syncUser(user, thirdPartyUser) {
async function syncUser(user: ThirdPartyUser, thirdPartyUser: ThirdPartyUser) {
// provider
user.provider = thirdPartyUser.provider
user.providerType = thirdPartyUser.providerType

View file

@ -1,6 +1,6 @@
const { isMultiTenant, getTenantId } = require("../../tenancy")
const { getScopedConfig } = require("../../db/utils")
const { Config } = require("../../constants")
import { isMultiTenant, getTenantId } from "../../tenancy"
import { getScopedConfig } from "../../db"
import { ConfigType, Database, Config } from "@budibase/types"
/**
* Utility to handle authentication errors.
@ -10,7 +10,7 @@ const { Config } = require("../../constants")
* @param {*} err (Optional) error that will be logged
*/
exports.authError = function (done, message, err = null) {
export function authError(done: Function, message: string, err?: any) {
return done(
err,
null, // never return a user
@ -18,13 +18,17 @@ exports.authError = function (done, message, err = null) {
)
}
exports.ssoCallbackUrl = async (db, config, type) => {
export async function ssoCallbackUrl(
db: Database,
config?: { callbackURL?: string },
type?: ConfigType
) {
// incase there is a callback URL from before
if (config && config.callbackURL) {
return config.callbackURL
}
const publicConfig = await getScopedConfig(db, {
type: Config.SETTINGS,
type: ConfigType.SETTINGS,
})
let callbackUrl = `/api/global/auth`

View file

@ -8,15 +8,15 @@ import {
TenantResolutionStrategy,
} from "@budibase/types"
const tenancy = (
export = function (
allowQueryStringPatterns: EndpointMatcher[],
noTenancyPatterns: EndpointMatcher[],
opts = { noTenancyRequired: false }
) => {
opts: { noTenancyRequired?: boolean } = { noTenancyRequired: false }
) {
const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns)
const noTenancyOptions = buildMatcherRegex(noTenancyPatterns)
return async function (ctx: BBContext, next: any) {
return async function (ctx: BBContext | any, next: any) {
const allowNoTenant =
opts.noTenancyRequired || !!matches(ctx, noTenancyOptions)
const tenantOpts: GetTenantIdOptions = {
@ -33,5 +33,3 @@ const tenancy = (
return doInTenant(tenantId, next)
}
}
export = tenancy

View file

@ -3,7 +3,7 @@ const { runMigrations, getMigrationsDoc } = require("../index")
const { getDB } = require("../../db")
const {
StaticDatabases,
} = require("../../db/utils")
} = require("../../constants")
let db

View file

@ -0,0 +1,5 @@
import { v4 } from "uuid"
export function newid() {
return v4().replace(/-/g, "")
}

View file

@ -1,426 +1,2 @@
const sanitize = require("sanitize-s3-objectkey")
import AWS from "aws-sdk"
import stream from "stream"
import fetch from "node-fetch"
import tar from "tar-fs"
const zlib = require("zlib")
import { promisify } from "util"
import { join } from "path"
import fs from "fs"
import env from "../environment"
import { budibaseTempDir, ObjectStoreBuckets } from "./utils"
import { v4 } from "uuid"
import { APP_PREFIX, APP_DEV_PREFIX } from "../db/utils"
const streamPipeline = promisify(stream.pipeline)
// use this as a temporary store of buckets that are being created
const STATE = {
bucketCreationPromises: {},
}
type ListParams = {
ContinuationToken?: string
}
type UploadParams = {
bucket: string
filename: string
path: string
type?: string
// can be undefined, we will remove it
metadata?: {
[key: string]: string | undefined
}
}
const CONTENT_TYPE_MAP: any = {
txt: "text/plain",
html: "text/html",
css: "text/css",
js: "application/javascript",
json: "application/json",
gz: "application/gzip",
}
const STRING_CONTENT_TYPES = [
CONTENT_TYPE_MAP.html,
CONTENT_TYPE_MAP.css,
CONTENT_TYPE_MAP.js,
CONTENT_TYPE_MAP.json,
]
// does normal sanitization and then swaps dev apps to apps
export function sanitizeKey(input: string) {
return sanitize(sanitizeBucket(input)).replace(/\\/g, "/")
}
// simply handles the dev app to app conversion
export function sanitizeBucket(input: string) {
return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX)
}
function publicPolicy(bucketName: string) {
return {
Version: "2012-10-17",
Statement: [
{
Effect: "Allow",
Principal: {
AWS: ["*"],
},
Action: "s3:GetObject",
Resource: [`arn:aws:s3:::${bucketName}/*`],
},
],
}
}
const PUBLIC_BUCKETS = [
ObjectStoreBuckets.APPS,
ObjectStoreBuckets.GLOBAL,
ObjectStoreBuckets.PLUGINS,
]
/**
* Gets a connection to the object store using the S3 SDK.
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
* @constructor
*/
export const ObjectStore = (bucket: string) => {
const config: any = {
s3ForcePathStyle: true,
signatureVersion: "v4",
apiVersion: "2006-03-01",
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION,
}
if (bucket) {
config.params = {
Bucket: sanitizeBucket(bucket),
}
}
if (env.MINIO_URL) {
config.endpoint = env.MINIO_URL
}
return new AWS.S3(config)
}
/**
* Given an object store and a bucket name this will make sure the bucket exists,
* if it does not exist then it will create it.
*/
export const makeSureBucketExists = async (client: any, bucketName: string) => {
bucketName = sanitizeBucket(bucketName)
try {
await client
.headBucket({
Bucket: bucketName,
})
.promise()
} catch (err: any) {
const promises: any = STATE.bucketCreationPromises
const doesntExist = err.statusCode === 404,
noAccess = err.statusCode === 403
if (promises[bucketName]) {
await promises[bucketName]
} else if (doesntExist || noAccess) {
if (doesntExist) {
// bucket doesn't exist create it
promises[bucketName] = client
.createBucket({
Bucket: bucketName,
})
.promise()
await promises[bucketName]
delete promises[bucketName]
}
// public buckets are quite hidden in the system, make sure
// no bucket is set accidentally
if (PUBLIC_BUCKETS.includes(bucketName)) {
await client
.putBucketPolicy({
Bucket: bucketName,
Policy: JSON.stringify(publicPolicy(bucketName)),
})
.promise()
}
} else {
throw new Error("Unable to write to object store bucket.")
}
}
}
/**
* Uploads the contents of a file given the required parameters, useful when
* temp files in use (for example file uploaded as an attachment).
*/
export const upload = async ({
bucket: bucketName,
filename,
path,
type,
metadata,
}: UploadParams) => {
const extension = filename.split(".").pop()
const fileBytes = fs.readFileSync(path)
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
let contentType = type
if (!contentType) {
contentType = extension
? CONTENT_TYPE_MAP[extension.toLowerCase()]
: CONTENT_TYPE_MAP.txt
}
const config: any = {
// windows file paths need to be converted to forward slashes for s3
Key: sanitizeKey(filename),
Body: fileBytes,
ContentType: contentType,
}
if (metadata && typeof metadata === "object") {
// remove any nullish keys from the metadata object, as these may be considered invalid
for (let key of Object.keys(metadata)) {
if (!metadata[key] || typeof metadata[key] !== "string") {
delete metadata[key]
}
}
config.Metadata = metadata
}
return objectStore.upload(config).promise()
}
/**
* Similar to the upload function but can be used to send a file stream
* through to the object store.
*/
export const streamUpload = async (
bucketName: string,
filename: string,
stream: any,
extra = {}
) => {
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
// Set content type for certain known extensions
if (filename?.endsWith(".js")) {
extra = {
...extra,
ContentType: "application/javascript",
}
} else if (filename?.endsWith(".svg")) {
extra = {
...extra,
ContentType: "image",
}
}
const params = {
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filename),
Body: stream,
...extra,
}
return objectStore.upload(params).promise()
}
/**
* retrieves the contents of a file from the object store, if it is a known content type it
* will be converted, otherwise it will be returned as a buffer stream.
*/
export const retrieve = async (bucketName: string, filepath: string) => {
const objectStore = ObjectStore(bucketName)
const params = {
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filepath),
}
const response: any = await objectStore.getObject(params).promise()
// currently these are all strings
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
return response.Body.toString("utf8")
} else {
return response.Body
}
}
export const listAllObjects = async (bucketName: string, path: string) => {
const objectStore = ObjectStore(bucketName)
const list = (params: ListParams = {}) => {
return objectStore
.listObjectsV2({
...params,
Bucket: sanitizeBucket(bucketName),
Prefix: sanitizeKey(path),
})
.promise()
}
let isTruncated = false,
token,
objects: AWS.S3.Types.Object[] = []
do {
let params: ListParams = {}
if (token) {
params.ContinuationToken = token
}
const response = await list(params)
if (response.Contents) {
objects = objects.concat(response.Contents)
}
isTruncated = !!response.IsTruncated
} while (isTruncated)
return objects
}
/**
* Same as retrieval function but puts to a temporary file.
*/
export const retrieveToTmp = async (bucketName: string, filepath: string) => {
bucketName = sanitizeBucket(bucketName)
filepath = sanitizeKey(filepath)
const data = await retrieve(bucketName, filepath)
const outputPath = join(budibaseTempDir(), v4())
fs.writeFileSync(outputPath, data)
return outputPath
}
export const retrieveDirectory = async (bucketName: string, path: string) => {
let writePath = join(budibaseTempDir(), v4())
fs.mkdirSync(writePath)
const objects = await listAllObjects(bucketName, path)
let fullObjects = await Promise.all(
objects.map(obj => retrieve(bucketName, obj.Key!))
)
let count = 0
for (let obj of objects) {
const filename = obj.Key!
const data = fullObjects[count++]
const possiblePath = filename.split("/")
if (possiblePath.length > 1) {
const dirs = possiblePath.slice(0, possiblePath.length - 1)
fs.mkdirSync(join(writePath, ...dirs), { recursive: true })
}
fs.writeFileSync(join(writePath, ...possiblePath), data)
}
return writePath
}
/**
* Delete a single file.
*/
export const deleteFile = async (bucketName: string, filepath: string) => {
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
const params = {
Bucket: bucketName,
Key: filepath,
}
return objectStore.deleteObject(params)
}
export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
const params = {
Bucket: bucketName,
Delete: {
Objects: filepaths.map((path: any) => ({ Key: path })),
},
}
return objectStore.deleteObjects(params).promise()
}
/**
* Delete a path, including everything within.
*/
export const deleteFolder = async (
bucketName: string,
folder: string
): Promise<any> => {
bucketName = sanitizeBucket(bucketName)
folder = sanitizeKey(folder)
const client = ObjectStore(bucketName)
const listParams = {
Bucket: bucketName,
Prefix: folder,
}
let response: any = await client.listObjects(listParams).promise()
if (response.Contents.length === 0) {
return
}
const deleteParams: any = {
Bucket: bucketName,
Delete: {
Objects: [],
},
}
response.Contents.forEach((content: any) => {
deleteParams.Delete.Objects.push({ Key: content.Key })
})
response = await client.deleteObjects(deleteParams).promise()
// can only empty 1000 items at once
if (response.Deleted.length === 1000) {
return deleteFolder(bucketName, folder)
}
}
export const uploadDirectory = async (
bucketName: string,
localPath: string,
bucketPath: string
) => {
bucketName = sanitizeBucket(bucketName)
let uploads = []
const files = fs.readdirSync(localPath, { withFileTypes: true })
for (let file of files) {
const path = sanitizeKey(join(bucketPath, file.name))
const local = join(localPath, file.name)
if (file.isDirectory()) {
uploads.push(uploadDirectory(bucketName, local, path))
} else {
uploads.push(streamUpload(bucketName, path, fs.createReadStream(local)))
}
}
await Promise.all(uploads)
return files
}
export const downloadTarballDirect = async (
url: string,
path: string,
headers = {}
) => {
path = sanitizeKey(path)
const response = await fetch(url, { headers })
if (!response.ok) {
throw new Error(`unexpected response ${response.statusText}`)
}
await streamPipeline(response.body, zlib.Unzip(), tar.extract(path))
}
export const downloadTarball = async (
url: string,
bucketName: string,
path: string
) => {
bucketName = sanitizeBucket(bucketName)
path = sanitizeKey(path)
const response = await fetch(url)
if (!response.ok) {
throw new Error(`unexpected response ${response.statusText}`)
}
const tmpPath = join(budibaseTempDir(), path)
await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath))
if (!env.isTest() && env.SELF_HOSTED) {
await uploadDirectory(bucketName, tmpPath, path)
}
// return the temporary path incase there is a use for it
return tmpPath
}
export * from "./objectStore"
export * from "./utils"

View file

@ -0,0 +1,426 @@
const sanitize = require("sanitize-s3-objectkey")
import AWS from "aws-sdk"
import stream from "stream"
import fetch from "node-fetch"
import tar from "tar-fs"
const zlib = require("zlib")
import { promisify } from "util"
import { join } from "path"
import fs from "fs"
import env from "../environment"
import { budibaseTempDir, ObjectStoreBuckets } from "./utils"
import { v4 } from "uuid"
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
const streamPipeline = promisify(stream.pipeline)
// use this as a temporary store of buckets that are being created
const STATE = {
bucketCreationPromises: {},
}
type ListParams = {
ContinuationToken?: string
}
type UploadParams = {
bucket: string
filename: string
path: string
type?: string
// can be undefined, we will remove it
metadata?: {
[key: string]: string | undefined
}
}
const CONTENT_TYPE_MAP: any = {
txt: "text/plain",
html: "text/html",
css: "text/css",
js: "application/javascript",
json: "application/json",
gz: "application/gzip",
}
const STRING_CONTENT_TYPES = [
CONTENT_TYPE_MAP.html,
CONTENT_TYPE_MAP.css,
CONTENT_TYPE_MAP.js,
CONTENT_TYPE_MAP.json,
]
// does normal sanitization and then swaps dev apps to apps
export function sanitizeKey(input: string) {
return sanitize(sanitizeBucket(input)).replace(/\\/g, "/")
}
// simply handles the dev app to app conversion
export function sanitizeBucket(input: string) {
return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX)
}
function publicPolicy(bucketName: string) {
return {
Version: "2012-10-17",
Statement: [
{
Effect: "Allow",
Principal: {
AWS: ["*"],
},
Action: "s3:GetObject",
Resource: [`arn:aws:s3:::${bucketName}/*`],
},
],
}
}
const PUBLIC_BUCKETS = [
ObjectStoreBuckets.APPS,
ObjectStoreBuckets.GLOBAL,
ObjectStoreBuckets.PLUGINS,
]
/**
* Gets a connection to the object store using the S3 SDK.
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
* @constructor
*/
export const ObjectStore = (bucket: string) => {
const config: any = {
s3ForcePathStyle: true,
signatureVersion: "v4",
apiVersion: "2006-03-01",
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION,
}
if (bucket) {
config.params = {
Bucket: sanitizeBucket(bucket),
}
}
if (env.MINIO_URL) {
config.endpoint = env.MINIO_URL
}
return new AWS.S3(config)
}
/**
* Given an object store and a bucket name this will make sure the bucket exists,
* if it does not exist then it will create it.
*/
export const makeSureBucketExists = async (client: any, bucketName: string) => {
bucketName = sanitizeBucket(bucketName)
try {
await client
.headBucket({
Bucket: bucketName,
})
.promise()
} catch (err: any) {
const promises: any = STATE.bucketCreationPromises
const doesntExist = err.statusCode === 404,
noAccess = err.statusCode === 403
if (promises[bucketName]) {
await promises[bucketName]
} else if (doesntExist || noAccess) {
if (doesntExist) {
// bucket doesn't exist create it
promises[bucketName] = client
.createBucket({
Bucket: bucketName,
})
.promise()
await promises[bucketName]
delete promises[bucketName]
}
// public buckets are quite hidden in the system, make sure
// no bucket is set accidentally
if (PUBLIC_BUCKETS.includes(bucketName)) {
await client
.putBucketPolicy({
Bucket: bucketName,
Policy: JSON.stringify(publicPolicy(bucketName)),
})
.promise()
}
} else {
throw new Error("Unable to write to object store bucket.")
}
}
}
/**
* Uploads the contents of a file given the required parameters, useful when
* temp files in use (for example file uploaded as an attachment).
*/
export const upload = async ({
bucket: bucketName,
filename,
path,
type,
metadata,
}: UploadParams) => {
const extension = filename.split(".").pop()
const fileBytes = fs.readFileSync(path)
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
let contentType = type
if (!contentType) {
contentType = extension
? CONTENT_TYPE_MAP[extension.toLowerCase()]
: CONTENT_TYPE_MAP.txt
}
const config: any = {
// windows file paths need to be converted to forward slashes for s3
Key: sanitizeKey(filename),
Body: fileBytes,
ContentType: contentType,
}
if (metadata && typeof metadata === "object") {
// remove any nullish keys from the metadata object, as these may be considered invalid
for (let key of Object.keys(metadata)) {
if (!metadata[key] || typeof metadata[key] !== "string") {
delete metadata[key]
}
}
config.Metadata = metadata
}
return objectStore.upload(config).promise()
}
/**
* Similar to the upload function but can be used to send a file stream
* through to the object store.
*/
export const streamUpload = async (
bucketName: string,
filename: string,
stream: any,
extra = {}
) => {
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
// Set content type for certain known extensions
if (filename?.endsWith(".js")) {
extra = {
...extra,
ContentType: "application/javascript",
}
} else if (filename?.endsWith(".svg")) {
extra = {
...extra,
ContentType: "image",
}
}
const params = {
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filename),
Body: stream,
...extra,
}
return objectStore.upload(params).promise()
}
/**
* retrieves the contents of a file from the object store, if it is a known content type it
* will be converted, otherwise it will be returned as a buffer stream.
*/
export const retrieve = async (bucketName: string, filepath: string) => {
const objectStore = ObjectStore(bucketName)
const params = {
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filepath),
}
const response: any = await objectStore.getObject(params).promise()
// currently these are all strings
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
return response.Body.toString("utf8")
} else {
return response.Body
}
}
export const listAllObjects = async (bucketName: string, path: string) => {
const objectStore = ObjectStore(bucketName)
const list = (params: ListParams = {}) => {
return objectStore
.listObjectsV2({
...params,
Bucket: sanitizeBucket(bucketName),
Prefix: sanitizeKey(path),
})
.promise()
}
let isTruncated = false,
token,
objects: AWS.S3.Types.Object[] = []
do {
let params: ListParams = {}
if (token) {
params.ContinuationToken = token
}
const response = await list(params)
if (response.Contents) {
objects = objects.concat(response.Contents)
}
isTruncated = !!response.IsTruncated
} while (isTruncated)
return objects
}
/**
* Same as retrieval function but puts to a temporary file.
*/
export const retrieveToTmp = async (bucketName: string, filepath: string) => {
bucketName = sanitizeBucket(bucketName)
filepath = sanitizeKey(filepath)
const data = await retrieve(bucketName, filepath)
const outputPath = join(budibaseTempDir(), v4())
fs.writeFileSync(outputPath, data)
return outputPath
}
export const retrieveDirectory = async (bucketName: string, path: string) => {
let writePath = join(budibaseTempDir(), v4())
fs.mkdirSync(writePath)
const objects = await listAllObjects(bucketName, path)
let fullObjects = await Promise.all(
objects.map(obj => retrieve(bucketName, obj.Key!))
)
let count = 0
for (let obj of objects) {
const filename = obj.Key!
const data = fullObjects[count++]
const possiblePath = filename.split("/")
if (possiblePath.length > 1) {
const dirs = possiblePath.slice(0, possiblePath.length - 1)
fs.mkdirSync(join(writePath, ...dirs), { recursive: true })
}
fs.writeFileSync(join(writePath, ...possiblePath), data)
}
return writePath
}
/**
* Delete a single file.
*/
export const deleteFile = async (bucketName: string, filepath: string) => {
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
const params = {
Bucket: bucketName,
Key: filepath,
}
return objectStore.deleteObject(params)
}
export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
const params = {
Bucket: bucketName,
Delete: {
Objects: filepaths.map((path: any) => ({ Key: path })),
},
}
return objectStore.deleteObjects(params).promise()
}
/**
* Delete a path, including everything within.
*/
export const deleteFolder = async (
bucketName: string,
folder: string
): Promise<any> => {
bucketName = sanitizeBucket(bucketName)
folder = sanitizeKey(folder)
const client = ObjectStore(bucketName)
const listParams = {
Bucket: bucketName,
Prefix: folder,
}
let response: any = await client.listObjects(listParams).promise()
if (response.Contents.length === 0) {
return
}
const deleteParams: any = {
Bucket: bucketName,
Delete: {
Objects: [],
},
}
response.Contents.forEach((content: any) => {
deleteParams.Delete.Objects.push({ Key: content.Key })
})
response = await client.deleteObjects(deleteParams).promise()
// can only empty 1000 items at once
if (response.Deleted.length === 1000) {
return deleteFolder(bucketName, folder)
}
}
export const uploadDirectory = async (
bucketName: string,
localPath: string,
bucketPath: string
) => {
bucketName = sanitizeBucket(bucketName)
let uploads = []
const files = fs.readdirSync(localPath, { withFileTypes: true })
for (let file of files) {
const path = sanitizeKey(join(bucketPath, file.name))
const local = join(localPath, file.name)
if (file.isDirectory()) {
uploads.push(uploadDirectory(bucketName, local, path))
} else {
uploads.push(streamUpload(bucketName, path, fs.createReadStream(local)))
}
}
await Promise.all(uploads)
return files
}
export const downloadTarballDirect = async (
url: string,
path: string,
headers = {}
) => {
path = sanitizeKey(path)
const response = await fetch(url, { headers })
if (!response.ok) {
throw new Error(`unexpected response ${response.statusText}`)
}
await streamPipeline(response.body, zlib.Unzip(), tar.extract(path))
}
export const downloadTarball = async (
url: string,
bucketName: string,
path: string
) => {
bucketName = sanitizeBucket(bucketName)
path = sanitizeKey(path)
const response = await fetch(url)
if (!response.ok) {
throw new Error(`unexpected response ${response.statusText}`)
}
const tmpPath = join(budibaseTempDir(), path)
await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath))
if (!env.isTest() && env.SELF_HOSTED) {
await uploadDirectory(bucketName, tmpPath, path)
}
// return the temporary path incase there is a use for it
return tmpPath
}

View file

@ -1,14 +1,15 @@
const { join } = require("path")
const { tmpdir } = require("os")
const fs = require("fs")
const env = require("../environment")
import { join } from "path"
import { tmpdir } from "os"
import fs from "fs"
import env from "../environment"
/****************************************************
* NOTE: When adding a new bucket - name *
* sure that S3 usages (like budibase-infra) *
* have been updated to have a unique bucket name. *
****************************************************/
exports.ObjectStoreBuckets = {
// can't be an enum - only numbers can be used for computed types
export const ObjectStoreBuckets = {
BACKUPS: env.BACKUPS_BUCKET_NAME,
APPS: env.APPS_BUCKET_NAME,
TEMPLATES: env.TEMPLATES_BUCKET_NAME,
@ -22,6 +23,6 @@ if (!fs.existsSync(bbTmp)) {
fs.mkdirSync(bbTmp)
}
exports.budibaseTempDir = function () {
export function budibaseTempDir() {
return bbTmp
}

View file

@ -1,11 +0,0 @@
const env = require("./environment")
exports.pinoSettings = () => ({
prettyPrint: {
levelFirst: true,
},
level: env.LOG_LEVEL || "error",
autoLogging: {
ignore: req => req.url.includes("/health"),
},
})

View file

@ -0,0 +1,13 @@
import env from "./environment"
export function pinoSettings() {
return {
prettyPrint: {
levelFirst: true,
},
level: env.LOG_LEVEL || "error",
autoLogging: {
ignore: (req: { url: string }) => req.url.includes("/health"),
},
}
}

View file

@ -1,13 +0,0 @@
// Mimic the outer package export for usage in index.ts
// The outer exports can't be used as they now reference dist directly
import * as generic from "../cache/generic"
import * as user from "../cache/user"
import * as app from "../cache/appMetadata"
import * as writethrough from "../cache/writethrough"
export = {
app,
user,
writethrough,
...generic,
}

View file

@ -1,26 +0,0 @@
// Mimic the outer package export for usage in index.ts
// The outer exports can't be used as they now reference dist directly
import {
getAppDB,
getDevAppDB,
getProdAppDB,
getAppId,
updateAppId,
doInAppContext,
doInTenant,
doInContext,
} from "../context"
import * as identity from "../context/identity"
export = {
getAppDB,
getDevAppDB,
getProdAppDB,
getAppId,
updateAppId,
doInAppContext,
doInTenant,
doInContext,
identity,
}

View file

@ -1,4 +0,0 @@
// Mimic the outer package export for usage in index.ts
// The outer exports can't be used as they now reference dist directly
export * from "../objectStore"
export * from "../objectStore/utils"

View file

@ -1,13 +0,0 @@
// Mimic the outer package export for usage in index.ts
// The outer exports can't be used as they now reference dist directly
import Client from "../redis"
import utils from "../redis/utils"
import clients from "../redis/init"
import * as redlock from "../redis/redlock"
export = {
Client,
utils,
clients,
redlock,
}

View file

@ -1,4 +0,0 @@
// Mimic the outer package export for usage in index.ts
// The outer exports can't be used as they now reference dist directly
export * from "../utils"
export * from "../hashing"

View file

@ -1,7 +1 @@
import * as utils from "./utils"
const pkg = {
...utils,
}
export = pkg
export * from "./utils"

View file

@ -1,9 +1,5 @@
const {
DatasourceFieldType,
QueryType,
PluginType,
} = require("@budibase/types")
const joi = require("joi")
import { DatasourceFieldType, QueryType, PluginType } from "@budibase/types"
import joi from "joi"
const DATASOURCE_TYPES = [
"Relational",
@ -14,14 +10,14 @@ const DATASOURCE_TYPES = [
"API",
]
function runJoi(validator, schema) {
function runJoi(validator: joi.Schema, schema: any) {
const { error } = validator.validate(schema)
if (error) {
throw error
}
}
function validateComponent(schema) {
function validateComponent(schema: any) {
const validator = joi.object({
type: joi.string().allow("component").required(),
metadata: joi.object().unknown(true).required(),
@ -37,7 +33,7 @@ function validateComponent(schema) {
runJoi(validator, schema)
}
function validateDatasource(schema) {
function validateDatasource(schema: any) {
const fieldValidator = joi.object({
type: joi
.string()
@ -86,7 +82,7 @@ function validateDatasource(schema) {
runJoi(validator, schema)
}
exports.validate = schema => {
export function validate(schema: any) {
switch (schema?.type) {
case PluginType.COMPONENT:
validateComponent(schema)

View file

@ -1,5 +1,5 @@
import events from "events"
import { timeout } from "../../utils"
import { timeout } from "../utils"
/**
* Bull works with a Job wrapper around all messages that contains a lot more information about

View file

@ -39,7 +39,7 @@ export function createQueue<T>(
return queue
}
exports.shutdown = async () => {
export async function shutdown() {
if (QUEUES.length) {
clearInterval(cleanupInterval)
for (let queue of QUEUES) {

View file

@ -1,278 +1,6 @@
import RedisWrapper from "../redis"
const env = require("../environment")
// ioredis mock is all in memory
const Redis = env.isTest() ? require("ioredis-mock") : require("ioredis")
const {
addDbPrefix,
removeDbPrefix,
getRedisOptions,
SEPARATOR,
SelectableDatabases,
} = require("./utils")
const RETRY_PERIOD_MS = 2000
const STARTUP_TIMEOUT_MS = 5000
const CLUSTERED = false
const DEFAULT_SELECT_DB = SelectableDatabases.DEFAULT
// for testing just generate the client once
let CLOSED = false
let CLIENTS: { [key: number]: any } = {}
// if in test always connected
let CONNECTED = env.isTest()
function pickClient(selectDb: number): any {
return CLIENTS[selectDb]
}
function connectionError(
selectDb: number,
timeout: NodeJS.Timeout,
err: Error | string
) {
// manually shut down, ignore errors
if (CLOSED) {
return
}
pickClient(selectDb).disconnect()
CLOSED = true
// always clear this on error
clearTimeout(timeout)
CONNECTED = false
console.error("Redis connection failed - " + err)
setTimeout(() => {
init()
}, RETRY_PERIOD_MS)
}
/**
* Inits the system, will error if unable to connect to redis cluster (may take up to 10 seconds) otherwise
* will return the ioredis client which will be ready to use.
*/
function init(selectDb = DEFAULT_SELECT_DB) {
let timeout: NodeJS.Timeout
CLOSED = false
let client = pickClient(selectDb)
// already connected, ignore
if (client && CONNECTED) {
return
}
// testing uses a single in memory client
if (env.isTest()) {
CLIENTS[selectDb] = new Redis(getRedisOptions())
}
// start the timer - only allowed 5 seconds to connect
timeout = setTimeout(() => {
if (!CONNECTED) {
connectionError(
selectDb,
timeout,
"Did not successfully connect in timeout"
)
}
}, STARTUP_TIMEOUT_MS)
// disconnect any lingering client
if (client) {
client.disconnect()
}
const { redisProtocolUrl, opts, host, port } = getRedisOptions(CLUSTERED)
if (CLUSTERED) {
client = new Redis.Cluster([{ host, port }], opts)
} else if (redisProtocolUrl) {
client = new Redis(redisProtocolUrl)
} else {
client = new Redis(opts)
}
// attach handlers
client.on("end", (err: Error) => {
connectionError(selectDb, timeout, err)
})
client.on("error", (err: Error) => {
connectionError(selectDb, timeout, err)
})
client.on("connect", () => {
clearTimeout(timeout)
CONNECTED = true
})
CLIENTS[selectDb] = client
}
function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) {
return new Promise(resolve => {
if (pickClient(selectDb) == null) {
init()
} else if (CONNECTED) {
resolve("")
return
}
// check if the connection is ready
const interval = setInterval(() => {
if (CONNECTED) {
clearInterval(interval)
resolve("")
}
}, 500)
})
}
/**
* Utility function, takes a redis stream and converts it to a promisified response -
* this can only be done with redis streams because they will have an end.
* @param stream A redis stream, specifically as this type of stream will have an end.
* @param client The client to use for further lookups.
* @return {Promise<object>} The final output of the stream
*/
function promisifyStream(stream: any, client: RedisWrapper) {
return new Promise((resolve, reject) => {
const outputKeys = new Set()
stream.on("data", (keys: string[]) => {
keys.forEach(key => {
outputKeys.add(key)
})
})
stream.on("error", (err: Error) => {
reject(err)
})
stream.on("end", async () => {
const keysArray: string[] = Array.from(outputKeys) as string[]
try {
let getPromises = []
for (let key of keysArray) {
getPromises.push(client.get(key))
}
const jsonArray = await Promise.all(getPromises)
resolve(
keysArray.map(key => ({
key: removeDbPrefix(key),
value: JSON.parse(jsonArray.shift()),
}))
)
} catch (err) {
reject(err)
}
})
})
}
export = class RedisWrapper {
_db: string
_select: number
constructor(db: string, selectDb: number | null = null) {
this._db = db
this._select = selectDb || DEFAULT_SELECT_DB
}
getClient() {
return pickClient(this._select)
}
async init() {
CLOSED = false
init(this._select)
await waitForConnection(this._select)
return this
}
async finish() {
CLOSED = true
this.getClient().disconnect()
}
async scan(key = ""): Promise<any> {
const db = this._db
key = `${db}${SEPARATOR}${key}`
let stream
if (CLUSTERED) {
let node = this.getClient().nodes("master")
stream = node[0].scanStream({ match: key + "*", count: 100 })
} else {
stream = this.getClient().scanStream({ match: key + "*", count: 100 })
}
return promisifyStream(stream, this.getClient())
}
async keys(pattern: string) {
const db = this._db
return this.getClient().keys(addDbPrefix(db, pattern))
}
async get(key: string) {
const db = this._db
let response = await this.getClient().get(addDbPrefix(db, key))
// overwrite the prefixed key
if (response != null && response.key) {
response.key = key
}
// if its not an object just return the response
try {
return JSON.parse(response)
} catch (err) {
return response
}
}
async bulkGet(keys: string[]) {
const db = this._db
if (keys.length === 0) {
return {}
}
const prefixedKeys = keys.map(key => addDbPrefix(db, key))
let response = await this.getClient().mget(prefixedKeys)
if (Array.isArray(response)) {
let final: any = {}
let count = 0
for (let result of response) {
if (result) {
let parsed
try {
parsed = JSON.parse(result)
} catch (err) {
parsed = result
}
final[keys[count]] = parsed
}
count++
}
return final
} else {
throw new Error(`Invalid response: ${response}`)
}
}
async store(key: string, value: any, expirySeconds: number | null = null) {
const db = this._db
if (typeof value === "object") {
value = JSON.stringify(value)
}
const prefixedKey = addDbPrefix(db, key)
await this.getClient().set(prefixedKey, value)
if (expirySeconds) {
await this.getClient().expire(prefixedKey, expirySeconds)
}
}
async getTTL(key: string) {
const db = this._db
const prefixedKey = addDbPrefix(db, key)
return this.getClient().ttl(prefixedKey)
}
async setExpiry(key: string, expirySeconds: number | null) {
const db = this._db
const prefixedKey = addDbPrefix(db, key)
await this.getClient().expire(prefixedKey, expirySeconds)
}
async delete(key: string) {
const db = this._db
await this.getClient().del(addDbPrefix(db, key))
}
async clear() {
let items = await this.scan()
await Promise.all(items.map((obj: any) => this.delete(obj.key)))
}
}
// Mimic the outer package export for usage in index.ts
// The outer exports can't be used as they now reference dist directly
export { default as Client } from "./redis"
export * as utils from "./utils"
export * as clients from "./init"
export * as redlock from "./redlock"

View file

@ -1,69 +0,0 @@
const Client = require("./index")
const utils = require("./utils")
let userClient,
sessionClient,
appClient,
cacheClient,
writethroughClient,
lockClient
async function init() {
userClient = await new Client(utils.Databases.USER_CACHE).init()
sessionClient = await new Client(utils.Databases.SESSIONS).init()
appClient = await new Client(utils.Databases.APP_METADATA).init()
cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init()
lockClient = await new Client(utils.Databases.LOCKS).init()
writethroughClient = await new Client(
utils.Databases.WRITE_THROUGH,
utils.SelectableDatabases.WRITE_THROUGH
).init()
}
process.on("exit", async () => {
if (userClient) await userClient.finish()
if (sessionClient) await sessionClient.finish()
if (appClient) await appClient.finish()
if (cacheClient) await cacheClient.finish()
if (writethroughClient) await writethroughClient.finish()
if (lockClient) await lockClient.finish()
})
module.exports = {
getUserClient: async () => {
if (!userClient) {
await init()
}
return userClient
},
getSessionClient: async () => {
if (!sessionClient) {
await init()
}
return sessionClient
},
getAppClient: async () => {
if (!appClient) {
await init()
}
return appClient
},
getCacheClient: async () => {
if (!cacheClient) {
await init()
}
return cacheClient
},
getWritethroughClient: async () => {
if (!writethroughClient) {
await init()
}
return writethroughClient
},
getLockClient: async () => {
if (!lockClient) {
await init()
}
return lockClient
},
}

View file

@ -0,0 +1,72 @@
import Client from "./redis"
import * as utils from "./utils"
let userClient: Client,
sessionClient: Client,
appClient: Client,
cacheClient: Client,
writethroughClient: Client,
lockClient: Client
async function init() {
userClient = await new Client(utils.Databases.USER_CACHE).init()
sessionClient = await new Client(utils.Databases.SESSIONS).init()
appClient = await new Client(utils.Databases.APP_METADATA).init()
cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init()
lockClient = await new Client(utils.Databases.LOCKS).init()
writethroughClient = await new Client(
utils.Databases.WRITE_THROUGH,
utils.SelectableDatabase.WRITE_THROUGH
).init()
}
process.on("exit", async () => {
if (userClient) await userClient.finish()
if (sessionClient) await sessionClient.finish()
if (appClient) await appClient.finish()
if (cacheClient) await cacheClient.finish()
if (writethroughClient) await writethroughClient.finish()
if (lockClient) await lockClient.finish()
})
export async function getUserClient() {
if (!userClient) {
await init()
}
return userClient
}
export async function getSessionClient() {
if (!sessionClient) {
await init()
}
return sessionClient
}
export async function getAppClient() {
if (!appClient) {
await init()
}
return appClient
}
export async function getCacheClient() {
if (!cacheClient) {
await init()
}
return cacheClient
}
export async function getWritethroughClient() {
if (!writethroughClient) {
await init()
}
return writethroughClient
}
export async function getLockClient() {
if (!lockClient) {
await init()
}
return lockClient
}

View file

@ -0,0 +1,279 @@
import env from "../environment"
// ioredis mock is all in memory
const Redis = env.isTest() ? require("ioredis-mock") : require("ioredis")
import {
addDbPrefix,
removeDbPrefix,
getRedisOptions,
SEPARATOR,
SelectableDatabase,
} from "./utils"
const RETRY_PERIOD_MS = 2000
const STARTUP_TIMEOUT_MS = 5000
const CLUSTERED = false
const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT
// for testing just generate the client once
let CLOSED = false
let CLIENTS: { [key: number]: any } = {}
// if in test always connected
let CONNECTED = env.isTest()
function pickClient(selectDb: number): any {
return CLIENTS[selectDb]
}
function connectionError(
selectDb: number,
timeout: NodeJS.Timeout,
err: Error | string
) {
// manually shut down, ignore errors
if (CLOSED) {
return
}
pickClient(selectDb).disconnect()
CLOSED = true
// always clear this on error
clearTimeout(timeout)
CONNECTED = false
console.error("Redis connection failed - " + err)
setTimeout(() => {
init()
}, RETRY_PERIOD_MS)
}
/**
* Inits the system, will error if unable to connect to redis cluster (may take up to 10 seconds) otherwise
* will return the ioredis client which will be ready to use.
*/
function init(selectDb = DEFAULT_SELECT_DB) {
let timeout: NodeJS.Timeout
CLOSED = false
let client = pickClient(selectDb)
// already connected, ignore
if (client && CONNECTED) {
return
}
// testing uses a single in memory client
if (env.isTest()) {
CLIENTS[selectDb] = new Redis(getRedisOptions())
}
// start the timer - only allowed 5 seconds to connect
timeout = setTimeout(() => {
if (!CONNECTED) {
connectionError(
selectDb,
timeout,
"Did not successfully connect in timeout"
)
}
}, STARTUP_TIMEOUT_MS)
// disconnect any lingering client
if (client) {
client.disconnect()
}
const { redisProtocolUrl, opts, host, port } = getRedisOptions(CLUSTERED)
if (CLUSTERED) {
client = new Redis.Cluster([{ host, port }], opts)
} else if (redisProtocolUrl) {
client = new Redis(redisProtocolUrl)
} else {
client = new Redis(opts)
}
// attach handlers
client.on("end", (err: Error) => {
connectionError(selectDb, timeout, err)
})
client.on("error", (err: Error) => {
connectionError(selectDb, timeout, err)
})
client.on("connect", () => {
clearTimeout(timeout)
CONNECTED = true
})
CLIENTS[selectDb] = client
}
function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) {
return new Promise(resolve => {
if (pickClient(selectDb) == null) {
init()
} else if (CONNECTED) {
resolve("")
return
}
// check if the connection is ready
const interval = setInterval(() => {
if (CONNECTED) {
clearInterval(interval)
resolve("")
}
}, 500)
})
}
/**
* Utility function, takes a redis stream and converts it to a promisified response -
* this can only be done with redis streams because they will have an end.
* @param stream A redis stream, specifically as this type of stream will have an end.
* @param client The client to use for further lookups.
* @return {Promise<object>} The final output of the stream
*/
function promisifyStream(stream: any, client: RedisWrapper) {
return new Promise((resolve, reject) => {
const outputKeys = new Set()
stream.on("data", (keys: string[]) => {
keys.forEach(key => {
outputKeys.add(key)
})
})
stream.on("error", (err: Error) => {
reject(err)
})
stream.on("end", async () => {
const keysArray: string[] = Array.from(outputKeys) as string[]
try {
let getPromises = []
for (let key of keysArray) {
getPromises.push(client.get(key))
}
const jsonArray = await Promise.all(getPromises)
resolve(
keysArray.map(key => ({
key: removeDbPrefix(key),
value: JSON.parse(jsonArray.shift()),
}))
)
} catch (err) {
reject(err)
}
})
})
}
class RedisWrapper {
_db: string
_select: number
constructor(db: string, selectDb: number | null = null) {
this._db = db
this._select = selectDb || DEFAULT_SELECT_DB
}
getClient() {
return pickClient(this._select)
}
async init() {
CLOSED = false
init(this._select)
await waitForConnection(this._select)
return this
}
async finish() {
CLOSED = true
this.getClient().disconnect()
}
async scan(key = ""): Promise<any> {
const db = this._db
key = `${db}${SEPARATOR}${key}`
let stream
if (CLUSTERED) {
let node = this.getClient().nodes("master")
stream = node[0].scanStream({ match: key + "*", count: 100 })
} else {
stream = this.getClient().scanStream({ match: key + "*", count: 100 })
}
return promisifyStream(stream, this.getClient())
}
async keys(pattern: string) {
const db = this._db
return this.getClient().keys(addDbPrefix(db, pattern))
}
async get(key: string) {
const db = this._db
let response = await this.getClient().get(addDbPrefix(db, key))
// overwrite the prefixed key
if (response != null && response.key) {
response.key = key
}
// if its not an object just return the response
try {
return JSON.parse(response)
} catch (err) {
return response
}
}
async bulkGet(keys: string[]) {
const db = this._db
if (keys.length === 0) {
return {}
}
const prefixedKeys = keys.map(key => addDbPrefix(db, key))
let response = await this.getClient().mget(prefixedKeys)
if (Array.isArray(response)) {
let final: any = {}
let count = 0
for (let result of response) {
if (result) {
let parsed
try {
parsed = JSON.parse(result)
} catch (err) {
parsed = result
}
final[keys[count]] = parsed
}
count++
}
return final
} else {
throw new Error(`Invalid response: ${response}`)
}
}
async store(key: string, value: any, expirySeconds: number | null = null) {
const db = this._db
if (typeof value === "object") {
value = JSON.stringify(value)
}
const prefixedKey = addDbPrefix(db, key)
await this.getClient().set(prefixedKey, value)
if (expirySeconds) {
await this.getClient().expire(prefixedKey, expirySeconds)
}
}
async getTTL(key: string) {
const db = this._db
const prefixedKey = addDbPrefix(db, key)
return this.getClient().ttl(prefixedKey)
}
async setExpiry(key: string, expirySeconds: number | null) {
const db = this._db
const prefixedKey = addDbPrefix(db, key)
await this.getClient().expire(prefixedKey, expirySeconds)
}
async delete(key: string) {
const db = this._db
await this.getClient().del(addDbPrefix(db, key))
}
async clear() {
let items = await this.scan()
await Promise.all(items.map((obj: any) => this.delete(obj.key)))
}
}
export = RedisWrapper

View file

@ -1,10 +1,10 @@
const env = require("../environment")
import env from "../environment"
const SLOT_REFRESH_MS = 2000
const CONNECT_TIMEOUT_MS = 10000
const SEPARATOR = "-"
const REDIS_URL = !env.REDIS_URL ? "localhost:6379" : env.REDIS_URL
const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD
export const SEPARATOR = "-"
/**
* These Redis databases help us to segment up a Redis keyspace by prepending the
@ -12,23 +12,23 @@ const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD
* can be split up a bit; allowing us to use scans on small databases to find some particular
* keys within.
* If writing a very large volume of keys is expected (say 10K+) then it is better to keep these out
* of the default keyspace and use a separate one - the SelectableDatabases can be used for this.
* of the default keyspace and use a separate one - the SelectableDatabase can be used for this.
*/
exports.Databases = {
PW_RESETS: "pwReset",
VERIFICATIONS: "verification",
INVITATIONS: "invitation",
DEV_LOCKS: "devLocks",
DEBOUNCE: "debounce",
SESSIONS: "session",
USER_CACHE: "users",
FLAGS: "flags",
APP_METADATA: "appMetadata",
QUERY_VARS: "queryVars",
LICENSES: "license",
GENERIC_CACHE: "data_cache",
WRITE_THROUGH: "writeThrough",
LOCKS: "locks",
export enum Databases {
PW_RESETS = "pwReset",
VERIFICATIONS = "verification",
INVITATIONS = "invitation",
DEV_LOCKS = "devLocks",
DEBOUNCE = "debounce",
SESSIONS = "session",
USER_CACHE = "users",
FLAGS = "flags",
APP_METADATA = "appMetadata",
QUERY_VARS = "queryVars",
LICENSES = "license",
GENERIC_CACHE = "data_cache",
WRITE_THROUGH = "writeThrough",
LOCKS = "locks",
}
/**
@ -40,30 +40,28 @@ exports.Databases = {
* but if you need to walk through all values in a database periodically then a separate selectable
* keyspace should be used.
*/
exports.SelectableDatabases = {
DEFAULT: 0,
WRITE_THROUGH: 1,
UNUSED_1: 2,
UNUSED_2: 3,
UNUSED_3: 4,
UNUSED_4: 5,
UNUSED_5: 6,
UNUSED_6: 7,
UNUSED_7: 8,
UNUSED_8: 9,
UNUSED_9: 10,
UNUSED_10: 11,
UNUSED_11: 12,
UNUSED_12: 13,
UNUSED_13: 14,
UNUSED_14: 15,
export enum SelectableDatabase {
DEFAULT = 0,
WRITE_THROUGH = 1,
UNUSED_1 = 2,
UNUSED_2 = 3,
UNUSED_3 = 4,
UNUSED_4 = 5,
UNUSED_5 = 6,
UNUSED_6 = 7,
UNUSED_7 = 8,
UNUSED_8 = 9,
UNUSED_9 = 10,
UNUSED_10 = 11,
UNUSED_11 = 12,
UNUSED_12 = 13,
UNUSED_13 = 14,
UNUSED_14 = 15,
}
exports.SEPARATOR = SEPARATOR
exports.getRedisOptions = (clustered = false) => {
export function getRedisOptions(clustered = false) {
let password = REDIS_PASSWORD
let url = REDIS_URL.split("//")
let url: string[] | string = REDIS_URL.split("//")
// get rid of the protocol
url = url.length > 1 ? url[1] : url[0]
// check for a password etc
@ -84,7 +82,7 @@ exports.getRedisOptions = (clustered = false) => {
redisProtocolUrl = REDIS_URL
}
const opts = {
const opts: any = {
connectTimeout: CONNECT_TIMEOUT_MS,
}
if (clustered) {
@ -92,7 +90,7 @@ exports.getRedisOptions = (clustered = false) => {
opts.redisOptions.tls = {}
opts.redisOptions.password = password
opts.slotsRefreshTimeout = SLOT_REFRESH_MS
opts.dnsLookup = (address, callback) => callback(null, address)
opts.dnsLookup = (address: string, callback: any) => callback(null, address)
} else {
opts.host = host
opts.port = port
@ -101,14 +99,14 @@ exports.getRedisOptions = (clustered = false) => {
return { opts, host, port, redisProtocolUrl }
}
exports.addDbPrefix = (db, key) => {
export function addDbPrefix(db: string, key: string) {
if (key.includes(db)) {
return key
}
return `${db}${SEPARATOR}${key}`
}
exports.removeDbPrefix = key => {
export function removeDbPrefix(key: string) {
let parts = key.split(SEPARATOR)
if (parts.length >= 2) {
parts.shift()

View file

@ -1 +0,0 @@
exports.lookupApiKey = async () => {}

View file

@ -1,5 +1,5 @@
const crypto = require("crypto")
const env = require("../environment")
import crypto from "crypto"
import env from "../environment"
const ALGO = "aes-256-ctr"
const SECRET = env.JWT_SECRET
@ -8,13 +8,13 @@ const ITERATIONS = 10000
const RANDOM_BYTES = 16
const STRETCH_LENGTH = 32
function stretchString(string, salt) {
function stretchString(string: string, salt: Buffer) {
return crypto.pbkdf2Sync(string, salt, ITERATIONS, STRETCH_LENGTH, "sha512")
}
exports.encrypt = input => {
export function encrypt(input: string) {
const salt = crypto.randomBytes(RANDOM_BYTES)
const stretched = stretchString(SECRET, salt)
const stretched = stretchString(SECRET!, salt)
const cipher = crypto.createCipheriv(ALGO, stretched, salt)
const base = cipher.update(input)
const final = cipher.final()
@ -22,10 +22,10 @@ exports.encrypt = input => {
return `${salt.toString("hex")}${SEPARATOR}${encrypted}`
}
exports.decrypt = input => {
export function decrypt(input: string) {
const [salt, encrypted] = input.split(SEPARATOR)
const saltBuffer = Buffer.from(salt, "hex")
const stretched = stretchString(SECRET, saltBuffer)
const stretched = stretchString(SECRET!, saltBuffer)
const decipher = crypto.createDecipheriv(ALGO, stretched, saltBuffer)
const base = decipher.update(Buffer.from(encrypted, "hex"))
const final = decipher.final()

View file

@ -1,9 +1,2 @@
import * as context from "../context"
import * as tenancy from "./tenancy"
const pkg = {
...context,
...tenancy,
}
export = pkg
export * from "../context"
export * from "./tenancy"

View file

@ -1,10 +1,4 @@
import {
doWithDB,
queryPlatformView,
StaticDatabases,
getGlobalDBName,
ViewName,
} from "../db"
import { doWithDB, queryPlatformView, getGlobalDBName } from "../db"
import {
DEFAULT_TENANT_ID,
getTenantId,
@ -18,7 +12,7 @@ import {
TenantResolutionStrategy,
GetTenantIdOptions,
} from "@budibase/types"
import { Header } from "../constants"
import { Header, StaticDatabases, ViewName } from "../constants"
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name

View file

@ -1,7 +1,8 @@
const { structures } = require("../../tests")
const utils = require("../utils")
const events = require("../events")
const { doInTenant, DEFAULT_TENANT_ID }= require("../context")
const { DEFAULT_TENANT_ID } = require("../constants")
const { doInTenant } = require("../context")
describe("utils", () => {
describe("platformLogout", () => {

Some files were not shown because too many files have changed in this diff Show more