diff --git a/packages/backend-core/accounts.js b/packages/backend-core/accounts.js deleted file mode 100644 index 47ad03456a..0000000000 --- a/packages/backend-core/accounts.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/cloud/accounts") diff --git a/packages/backend-core/auth.js b/packages/backend-core/auth.js deleted file mode 100644 index bbfe3d41dd..0000000000 --- a/packages/backend-core/auth.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/auth") diff --git a/packages/backend-core/cache.js b/packages/backend-core/cache.js deleted file mode 100644 index c8bd3c9b6f..0000000000 --- a/packages/backend-core/cache.js +++ /dev/null @@ -1,9 +0,0 @@ -const generic = require("./src/cache/generic") - -module.exports = { - user: require("./src/cache/user"), - app: require("./src/cache/appMetadata"), - writethrough: require("./src/cache/writethrough"), - ...generic, - cache: generic, -} diff --git a/packages/backend-core/constants.js b/packages/backend-core/constants.js deleted file mode 100644 index 4abb7703db..0000000000 --- a/packages/backend-core/constants.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/constants") diff --git a/packages/backend-core/context.js b/packages/backend-core/context.js deleted file mode 100644 index c6fa87a337..0000000000 --- a/packages/backend-core/context.js +++ /dev/null @@ -1,24 +0,0 @@ -const { - getAppDB, - getDevAppDB, - getProdAppDB, - getAppId, - updateAppId, - doInAppContext, - doInTenant, - doInContext, -} = require("./src/context") - -const identity = require("./src/context/identity") - -module.exports = { - getAppDB, - getDevAppDB, - getProdAppDB, - getAppId, - updateAppId, - doInAppContext, - doInTenant, - identity, - doInContext, -} diff --git a/packages/backend-core/db.js b/packages/backend-core/db.js deleted file mode 100644 index f7004972d5..0000000000 --- a/packages/backend-core/db.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/db") diff --git a/packages/backend-core/deprovision.js b/packages/backend-core/deprovision.js deleted file mode 100644 index 672da214ff..0000000000 --- a/packages/backend-core/deprovision.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/context/deprovision") diff --git a/packages/backend-core/encryption.js b/packages/backend-core/encryption.js deleted file mode 100644 index 4ccb6e3a99..0000000000 --- a/packages/backend-core/encryption.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/security/encryption") diff --git a/packages/backend-core/logging.js b/packages/backend-core/logging.js deleted file mode 100644 index da40fe3100..0000000000 --- a/packages/backend-core/logging.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/logging") diff --git a/packages/backend-core/middleware.js b/packages/backend-core/middleware.js deleted file mode 100644 index 30fec96239..0000000000 --- a/packages/backend-core/middleware.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/middleware") diff --git a/packages/backend-core/migrations.js b/packages/backend-core/migrations.js deleted file mode 100644 index 2de19ebf65..0000000000 --- a/packages/backend-core/migrations.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/migrations") diff --git a/packages/backend-core/objectStore.js b/packages/backend-core/objectStore.js deleted file mode 100644 index 3ee433f224..0000000000 --- a/packages/backend-core/objectStore.js +++ /dev/null @@ -1,4 +0,0 @@ -module.exports = { - ...require("./src/objectStore"), - ...require("./src/objectStore/utils"), -} diff --git a/packages/backend-core/permissions.js b/packages/backend-core/permissions.js deleted file mode 100644 index 42f37c9c7e..0000000000 --- a/packages/backend-core/permissions.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/security/permissions") diff --git a/packages/backend-core/plugins.js b/packages/backend-core/plugins.js deleted file mode 100644 index 018e214dcb..0000000000 --- a/packages/backend-core/plugins.js +++ /dev/null @@ -1,3 +0,0 @@ -module.exports = { - ...require("./src/plugin"), -} diff --git a/packages/backend-core/redis.js b/packages/backend-core/redis.js deleted file mode 100644 index 1f7a48540a..0000000000 --- a/packages/backend-core/redis.js +++ /dev/null @@ -1,5 +0,0 @@ -module.exports = { - Client: require("./src/redis"), - utils: require("./src/redis/utils"), - clients: require("./src/redis/init"), -} diff --git a/packages/backend-core/roles.js b/packages/backend-core/roles.js deleted file mode 100644 index 158bcdb6b8..0000000000 --- a/packages/backend-core/roles.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/security/roles") diff --git a/packages/backend-core/sessions.js b/packages/backend-core/sessions.js deleted file mode 100644 index c07efa2380..0000000000 --- a/packages/backend-core/sessions.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/security/sessions") diff --git a/packages/backend-core/src/auth.ts b/packages/backend-core/src/auth/auth.ts similarity index 79% rename from packages/backend-core/src/auth.ts rename to packages/backend-core/src/auth/auth.ts index 5e1959e0c8..75e425bd0f 100644 --- a/packages/backend-core/src/auth.ts +++ b/packages/backend-core/src/auth/auth.ts @@ -1,16 +1,14 @@ -const passport = require("koa-passport") +const _passport = require("koa-passport") const LocalStrategy = require("passport-local").Strategy const JwtStrategy = require("passport-jwt").Strategy -import { getGlobalDB } from "./tenancy" +import { getGlobalDB } from "../tenancy" const refresh = require("passport-oauth2-refresh") -import { Config } from "./constants" -import { getScopedConfig } from "./db/utils" +import { Config } from "../constants" +import { getScopedConfig } from "../db" import { - jwt, + jwt as jwtPassport, local, authenticated, - google, - oidc, auditLog, tenancy, authError, @@ -21,22 +19,41 @@ import { builderOnly, builderOrAdmin, joiValidator, -} from "./middleware" -import { invalidateUser } from "./cache/user" + oidc, + google, +} from "../middleware" +import { invalidateUser } from "../cache/user" import { User } from "@budibase/types" -import { logAlert } from "./logging" +import { logAlert } from "../logging" +export { + auditLog, + authError, + internalApi, + ssoCallbackUrl, + adminOnly, + builderOnly, + builderOrAdmin, + joiValidator, + google, + oidc, +} from "../middleware" +export const buildAuthMiddleware = authenticated +export const buildTenancyMiddleware = tenancy +export const buildCsrfMiddleware = csrf +export const passport = _passport +export const jwt = require("jsonwebtoken") // Strategies -passport.use(new LocalStrategy(local.options, local.authenticate)) -if (jwt.options.secretOrKey) { - passport.use(new JwtStrategy(jwt.options, jwt.authenticate)) +_passport.use(new LocalStrategy(local.options, local.authenticate)) +if (jwtPassport.options.secretOrKey) { + _passport.use(new JwtStrategy(jwtPassport.options, jwtPassport.authenticate)) } else { logAlert("No JWT Secret supplied, cannot configure JWT strategy") } -passport.serializeUser((user: User, done: any) => done(null, user)) +_passport.serializeUser((user: User, done: any) => done(null, user)) -passport.deserializeUser(async (user: User, done: any) => { +_passport.deserializeUser(async (user: User, done: any) => { const db = getGlobalDB() try { @@ -115,7 +132,7 @@ async function refreshGoogleAccessToken( }) } -async function refreshOAuthToken( +export async function refreshOAuthToken( refreshToken: string, configType: string, configId: string @@ -152,7 +169,7 @@ async function refreshOAuthToken( return refreshResponse } -async function updateUserOAuth(userId: string, oAuthConfig: any) { +export async function updateUserOAuth(userId: string, oAuthConfig: any) { const details = { accessToken: oAuthConfig.accessToken, refreshToken: oAuthConfig.refreshToken, @@ -179,23 +196,3 @@ async function updateUserOAuth(userId: string, oAuthConfig: any) { console.error("Could not update OAuth details for current user", e) } } - -export = { - buildAuthMiddleware: authenticated, - passport, - google, - oidc, - jwt: require("jsonwebtoken"), - buildTenancyMiddleware: tenancy, - auditLog, - authError, - buildCsrfMiddleware: csrf, - internalApi, - refreshOAuthToken, - updateUserOAuth, - ssoCallbackUrl, - adminOnly, - builderOnly, - builderOrAdmin, - joiValidator, -} diff --git a/packages/backend-core/src/auth/index.ts b/packages/backend-core/src/auth/index.ts new file mode 100644 index 0000000000..306751af96 --- /dev/null +++ b/packages/backend-core/src/auth/index.ts @@ -0,0 +1 @@ +export * from "./auth" diff --git a/packages/backend-core/src/cache/appMetadata.js b/packages/backend-core/src/cache/appMetadata.ts similarity index 79% rename from packages/backend-core/src/cache/appMetadata.js rename to packages/backend-core/src/cache/appMetadata.ts index a7ff0d2fc1..d24c4a3140 100644 --- a/packages/backend-core/src/cache/appMetadata.js +++ b/packages/backend-core/src/cache/appMetadata.ts @@ -1,6 +1,6 @@ -const redis = require("../redis/init") -const { doWithDB } = require("../db") -const { DocumentType } = require("../db/constants") +import { getAppClient } from "../redis/init" +import { doWithDB, DocumentType } from "../db" +import { Database } from "@budibase/types" const AppState = { INVALID: "invalid", @@ -10,17 +10,17 @@ const EXPIRY_SECONDS = 3600 /** * The default populate app metadata function */ -const populateFromDB = async appId => { +async function populateFromDB(appId: string) { return doWithDB( appId, - db => { + (db: Database) => { return db.get(DocumentType.APP_METADATA) }, { skip_setup: true } ) } -const isInvalid = metadata => { +function isInvalid(metadata?: { state: string }) { return !metadata || metadata.state === AppState.INVALID } @@ -31,15 +31,15 @@ const isInvalid = metadata => { * @param {string} appId the id of the app to get metadata from. * @returns {object} the app metadata. */ -exports.getAppMetadata = async appId => { - const client = await redis.getAppClient() +export async function getAppMetadata(appId: string) { + const client = await getAppClient() // try cache let metadata = await client.get(appId) if (!metadata) { - let expiry = EXPIRY_SECONDS + let expiry: number | undefined = EXPIRY_SECONDS try { metadata = await populateFromDB(appId) - } catch (err) { + } catch (err: any) { // app DB left around, but no metadata, it is invalid if (err && err.status === 404) { metadata = { state: AppState.INVALID } @@ -74,11 +74,11 @@ exports.getAppMetadata = async appId => { * @param newMetadata {object|undefined} optional - can simply provide the new metadata to update with. * @return {Promise} will respond with success when cache is updated. */ -exports.invalidateAppMetadata = async (appId, newMetadata = null) => { +export async function invalidateAppMetadata(appId: string, newMetadata?: any) { if (!appId) { throw "Cannot invalidate if no app ID provided." } - const client = await redis.getAppClient() + const client = await getAppClient() await client.delete(appId) if (newMetadata) { await client.store(appId, newMetadata, EXPIRY_SECONDS) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index f3216531f4..ab620a900e 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -1,6 +1,6 @@ import { getTenantId } from "../../context" -import redis from "../../redis/init" -import RedisWrapper from "../../redis" +import * as redis from "../../redis/init" +import { Client } from "../../redis" function generateTenantKey(key: string) { const tenantId = getTenantId() @@ -8,9 +8,9 @@ function generateTenantKey(key: string) { } export = class BaseCache { - client: RedisWrapper | undefined + client: Client | undefined - constructor(client: RedisWrapper | undefined = undefined) { + constructor(client: Client | undefined = undefined) { this.client = client } diff --git a/packages/backend-core/src/cache/generic.js b/packages/backend-core/src/cache/generic.js deleted file mode 100644 index 26ef0c6bb0..0000000000 --- a/packages/backend-core/src/cache/generic.js +++ /dev/null @@ -1,30 +0,0 @@ -const BaseCache = require("./base") - -const GENERIC = new BaseCache() - -exports.CacheKeys = { - CHECKLIST: "checklist", - INSTALLATION: "installation", - ANALYTICS_ENABLED: "analyticsEnabled", - UNIQUE_TENANT_ID: "uniqueTenantId", - EVENTS: "events", - BACKFILL_METADATA: "backfillMetadata", - EVENTS_RATE_LIMIT: "eventsRateLimit", -} - -exports.TTL = { - ONE_MINUTE: 600, - ONE_HOUR: 3600, - ONE_DAY: 86400, -} - -function performExport(funcName) { - return (...args) => GENERIC[funcName](...args) -} - -exports.keys = performExport("keys") -exports.get = performExport("get") -exports.store = performExport("store") -exports.delete = performExport("delete") -exports.withCache = performExport("withCache") -exports.bustCache = performExport("bustCache") diff --git a/packages/backend-core/src/cache/generic.ts b/packages/backend-core/src/cache/generic.ts new file mode 100644 index 0000000000..d8a54e4a3f --- /dev/null +++ b/packages/backend-core/src/cache/generic.ts @@ -0,0 +1,30 @@ +const BaseCache = require("./base") + +const GENERIC = new BaseCache() + +export enum CacheKey { + CHECKLIST = "checklist", + INSTALLATION = "installation", + ANALYTICS_ENABLED = "analyticsEnabled", + UNIQUE_TENANT_ID = "uniqueTenantId", + EVENTS = "events", + BACKFILL_METADATA = "backfillMetadata", + EVENTS_RATE_LIMIT = "eventsRateLimit", +} + +export enum TTL { + ONE_MINUTE = 600, + ONE_HOUR = 3600, + ONE_DAY = 86400, +} + +function performExport(funcName: string) { + return (...args: any) => GENERIC[funcName](...args) +} + +export const keys = performExport("keys") +export const get = performExport("get") +export const store = performExport("store") +export const destroy = performExport("delete") +export const withCache = performExport("withCache") +export const bustCache = performExport("bustCache") diff --git a/packages/backend-core/src/cache/index.ts b/packages/backend-core/src/cache/index.ts new file mode 100644 index 0000000000..58928c271a --- /dev/null +++ b/packages/backend-core/src/cache/index.ts @@ -0,0 +1,5 @@ +export * as generic from "./generic" +export * as user from "./user" +export * as app from "./appMetadata" +export * as writethrough from "./writethrough" +export * from "./generic" diff --git a/packages/backend-core/src/cache/user.js b/packages/backend-core/src/cache/user.ts similarity index 68% rename from packages/backend-core/src/cache/user.js rename to packages/backend-core/src/cache/user.ts index 130da1915e..a128465cd6 100644 --- a/packages/backend-core/src/cache/user.js +++ b/packages/backend-core/src/cache/user.ts @@ -1,15 +1,16 @@ -const redis = require("../redis/init") -const { getTenantId, lookupTenantId, doWithGlobalDB } = require("../tenancy") -const env = require("../environment") -const accounts = require("../cloud/accounts") +import * as redis from "../redis/init" +import { getTenantId, lookupTenantId, doWithGlobalDB } from "../tenancy" +import env from "../environment" +import * as accounts from "../cloud/accounts" +import { Database } from "@budibase/types" const EXPIRY_SECONDS = 3600 /** * The default populate user function */ -const populateFromDB = async (userId, tenantId) => { - const user = await doWithGlobalDB(tenantId, db => db.get(userId)) +async function populateFromDB(userId: string, tenantId: string) { + const user = await doWithGlobalDB(tenantId, (db: Database) => db.get(userId)) user.budibaseAccess = true if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { const account = await accounts.getAccount(user.email) @@ -31,7 +32,11 @@ const populateFromDB = async (userId, tenantId) => { * @param {*} populateUser function to provide the user for re-caching. default to couch db * @returns */ -exports.getUser = async (userId, tenantId = null, populateUser = null) => { +export async function getUser( + userId: string, + tenantId?: string, + populateUser?: any +) { if (!populateUser) { populateUser = populateFromDB } @@ -47,7 +52,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => { let user = await client.get(userId) if (!user) { user = await populateUser(userId, tenantId) - client.store(userId, user, EXPIRY_SECONDS) + await client.store(userId, user, EXPIRY_SECONDS) } if (user && !user.tenantId && tenantId) { // make sure the tenant ID is always correct/set @@ -56,7 +61,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => { return user } -exports.invalidateUser = async userId => { +export async function invalidateUser(userId: string) { const client = await redis.getUserClient() await client.delete(userId) } diff --git a/packages/backend-core/src/cloud/api.js b/packages/backend-core/src/cloud/api.js deleted file mode 100644 index d4d4b6c8bb..0000000000 --- a/packages/backend-core/src/cloud/api.js +++ /dev/null @@ -1,42 +0,0 @@ -const fetch = require("node-fetch") -class API { - constructor(host) { - this.host = host - } - - apiCall = - method => - async (url = "", options = {}) => { - if (!options.headers) { - options.headers = {} - } - - if (!options.headers["Content-Type"]) { - options.headers = { - "Content-Type": "application/json", - Accept: "application/json", - ...options.headers, - } - } - - let json = options.headers["Content-Type"] === "application/json" - - const requestOptions = { - method: method, - body: json ? JSON.stringify(options.body) : options.body, - headers: options.headers, - // TODO: See if this is necessary - credentials: "include", - } - - return await fetch(`${this.host}${url}`, requestOptions) - } - - post = this.apiCall("POST") - get = this.apiCall("GET") - patch = this.apiCall("PATCH") - del = this.apiCall("DELETE") - put = this.apiCall("PUT") -} - -module.exports = API diff --git a/packages/backend-core/src/cloud/api.ts b/packages/backend-core/src/cloud/api.ts new file mode 100644 index 0000000000..287c447271 --- /dev/null +++ b/packages/backend-core/src/cloud/api.ts @@ -0,0 +1,55 @@ +import fetch from "node-fetch" + +export = class API { + host: string + + constructor(host: string) { + this.host = host + } + + async apiCall(method: string, url: string, options?: any) { + if (!options.headers) { + options.headers = {} + } + + if (!options.headers["Content-Type"]) { + options.headers = { + "Content-Type": "application/json", + Accept: "application/json", + ...options.headers, + } + } + + let json = options.headers["Content-Type"] === "application/json" + + const requestOptions = { + method: method, + body: json ? JSON.stringify(options.body) : options.body, + headers: options.headers, + // TODO: See if this is necessary + credentials: "include", + } + + return await fetch(`${this.host}${url}`, requestOptions) + } + + async post(url: string, options?: any) { + return this.apiCall("POST", url, options) + } + + async get(url: string, options?: any) { + return this.apiCall("GET", url, options) + } + + async patch(url: string, options?: any) { + return this.apiCall("PATCH", url, options) + } + + async del(url: string, options?: any) { + return this.apiCall("DELETE", url, options) + } + + async put(url: string, options?: any) { + return this.apiCall("PUT", url, options) + } +} diff --git a/packages/backend-core/src/constants.js b/packages/backend-core/src/constants.js deleted file mode 100644 index 7fda17f6f2..0000000000 --- a/packages/backend-core/src/constants.js +++ /dev/null @@ -1,44 +0,0 @@ -exports.UserStatus = { - ACTIVE: "active", - INACTIVE: "inactive", -} - -exports.Cookie = { - CurrentApp: "budibase:currentapp", - Auth: "budibase:auth", - Init: "budibase:init", - ACCOUNT_RETURN_URL: "budibase:account:returnurl", - DatasourceAuth: "budibase:datasourceauth", - OIDC_CONFIG: "budibase:oidc:config", -} - -exports.Header = { - API_KEY: "x-budibase-api-key", - LICENSE_KEY: "x-budibase-license-key", - API_VER: "x-budibase-api-version", - APP_ID: "x-budibase-app-id", - TYPE: "x-budibase-type", - PREVIEW_ROLE: "x-budibase-role", - TENANT_ID: "x-budibase-tenant-id", - TOKEN: "x-budibase-token", - CSRF_TOKEN: "x-csrf-token", -} - -exports.GlobalRoles = { - OWNER: "owner", - ADMIN: "admin", - BUILDER: "builder", - WORKSPACE_MANAGER: "workspace_manager", -} - -exports.Config = { - SETTINGS: "settings", - ACCOUNT: "account", - SMTP: "smtp", - GOOGLE: "google", - OIDC: "oidc", - OIDC_LOGOS: "logos_oidc", -} - -exports.MAX_VALID_DATE = new Date(2147483647000) -exports.DEFAULT_TENANT_ID = "default" diff --git a/packages/backend-core/src/db/constants.ts b/packages/backend-core/src/constants/db.ts similarity index 100% rename from packages/backend-core/src/db/constants.ts rename to packages/backend-core/src/constants/db.ts diff --git a/packages/backend-core/src/constants/index.ts b/packages/backend-core/src/constants/index.ts new file mode 100644 index 0000000000..62d5e08e63 --- /dev/null +++ b/packages/backend-core/src/constants/index.ts @@ -0,0 +1,2 @@ +export * from "./db" +export * from "./misc" diff --git a/packages/backend-core/src/constants.ts b/packages/backend-core/src/constants/misc.ts similarity index 100% rename from packages/backend-core/src/constants.ts rename to packages/backend-core/src/constants/misc.ts diff --git a/packages/backend-core/src/context/Context.ts b/packages/backend-core/src/context/Context.ts index 6ffb57e44e..f0ccdb97a8 100644 --- a/packages/backend-core/src/context/Context.ts +++ b/packages/backend-core/src/context/Context.ts @@ -1,18 +1,17 @@ import { AsyncLocalStorage } from "async_hooks" -import { ContextMap } from "./constants" export default class Context { - static storage = new AsyncLocalStorage() + static storage = new AsyncLocalStorage>() - static run(context: ContextMap, func: any) { + static run(context: Record, func: any) { return Context.storage.run(context, () => func()) } - static get(): ContextMap { - return Context.storage.getStore() as ContextMap + static get(): Record { + return Context.storage.getStore() as Record } - static set(context: ContextMap) { + static set(context: Record) { Context.storage.enterWith(context) } } diff --git a/packages/backend-core/src/context/constants.ts b/packages/backend-core/src/context/constants.ts deleted file mode 100644 index 64fdb45dec..0000000000 --- a/packages/backend-core/src/context/constants.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { IdentityContext } from "@budibase/types" - -export type ContextMap = { - tenantId?: string - appId?: string - identity?: IdentityContext -} diff --git a/packages/backend-core/src/context/identity.ts b/packages/backend-core/src/context/identity.ts index 37e1ecf40a..648dd1b5fd 100644 --- a/packages/backend-core/src/context/identity.ts +++ b/packages/backend-core/src/context/identity.ts @@ -2,23 +2,22 @@ import { IdentityContext, IdentityType, User, - UserContext, isCloudAccount, Account, AccountUserContext, } from "@budibase/types" import * as context from "." -export const getIdentity = (): IdentityContext | undefined => { +export function getIdentity(): IdentityContext | undefined { return context.getIdentity() } -export const doInIdentityContext = (identity: IdentityContext, task: any) => { +export function doInIdentityContext(identity: IdentityContext, task: any) { return context.doInIdentityContext(identity, task) } -export const doInUserContext = (user: User, task: any) => { - const userContext: UserContext = { +export function doInUserContext(user: User, task: any) { + const userContext: any = { ...user, _id: user._id as string, type: IdentityType.USER, @@ -26,7 +25,7 @@ export const doInUserContext = (user: User, task: any) => { return doInIdentityContext(userContext, task) } -export const doInAccountContext = (account: Account, task: any) => { +export function doInAccountContext(account: Account, task: any) { const _id = getAccountUserId(account) const tenantId = account.tenantId const accountContext: AccountUserContext = { @@ -38,12 +37,12 @@ export const doInAccountContext = (account: Account, task: any) => { return doInIdentityContext(accountContext, task) } -export const getAccountUserId = (account: Account) => { +export function getAccountUserId(account: Account) { let userId: string if (isCloudAccount(account)) { userId = account.budibaseUserId } else { - // use account id as user id for self hosting + // use account id as user id for self-hosting userId = account.accountId } return userId diff --git a/packages/backend-core/src/context/index.ts b/packages/backend-core/src/context/index.ts index ce37d4f0b4..9c70363170 100644 --- a/packages/backend-core/src/context/index.ts +++ b/packages/backend-core/src/context/index.ts @@ -1,223 +1,3 @@ -import env from "../environment" -import { - SEPARATOR, - DocumentType, - getDevelopmentAppID, - getProdAppID, - baseGlobalDBName, - getDB, -} from "../db" -import Context from "./Context" -import { IdentityContext, Database } from "@budibase/types" -import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants" -import { ContextMap } from "./constants" -export const DEFAULT_TENANT_ID = _DEFAULT_TENANT_ID - -// some test cases call functions directly, need to -// store an app ID to pretend there is a context -let TEST_APP_ID: string | null = null - -export function isMultiTenant() { - return env.MULTI_TENANCY -} - -export function isTenantIdSet() { - const context = Context.get() - return !!context?.tenantId -} - -export function isTenancyEnabled() { - return env.MULTI_TENANCY -} - -/** - * Given an app ID this will attempt to retrieve the tenant ID from it. - * @return {null|string} The tenant ID found within the app ID. - */ -export function getTenantIDFromAppID(appId: string) { - if (!appId) { - return undefined - } - if (!isMultiTenant()) { - return DEFAULT_TENANT_ID - } - const split = appId.split(SEPARATOR) - const hasDev = split[1] === DocumentType.DEV - if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) { - return undefined - } - if (hasDev) { - return split[2] - } else { - return split[1] - } -} - -function updateContext(updates: ContextMap) { - let context: ContextMap - try { - context = Context.get() - } catch (err) { - // no context, start empty - context = {} - } - context = { - ...context, - ...updates, - } - return context -} - -async function newContext(updates: ContextMap, task: any) { - // see if there already is a context setup - let context: ContextMap = updateContext(updates) - return Context.run(context, task) -} - -export async function doInContext(appId: string, task: any): Promise { - const tenantId = getTenantIDFromAppID(appId) - return newContext( - { - tenantId, - appId, - }, - task - ) -} - -export async function doInTenant( - tenantId: string | null, - task: any -): Promise { - // make sure default always selected in single tenancy - if (!env.MULTI_TENANCY) { - tenantId = tenantId || DEFAULT_TENANT_ID - } - - const updates = tenantId ? { tenantId } : {} - return newContext(updates, task) -} - -export async function doInAppContext(appId: string, task: any): Promise { - if (!appId) { - throw new Error("appId is required") - } - - const tenantId = getTenantIDFromAppID(appId) - const updates: ContextMap = { appId } - if (tenantId) { - updates.tenantId = tenantId - } - return newContext(updates, task) -} - -export async function doInIdentityContext( - identity: IdentityContext, - task: any -): Promise { - if (!identity) { - throw new Error("identity is required") - } - - const context: ContextMap = { - identity, - } - if (identity.tenantId) { - context.tenantId = identity.tenantId - } - return newContext(context, task) -} - -export function getIdentity(): IdentityContext | undefined { - try { - const context = Context.get() - return context?.identity - } catch (e) { - // do nothing - identity is not in context - } -} - -export function getTenantId(): string { - if (!isMultiTenant()) { - return DEFAULT_TENANT_ID - } - const context = Context.get() - const tenantId = context?.tenantId - if (!tenantId) { - throw new Error("Tenant id not found") - } - return tenantId -} - -export function getAppId(): string | undefined { - const context = Context.get() - const foundId = context?.appId - if (!foundId && env.isTest() && TEST_APP_ID) { - return TEST_APP_ID - } else { - return foundId - } -} - -export function updateTenantId(tenantId?: string) { - let context: ContextMap = updateContext({ - tenantId, - }) - Context.set(context) -} - -export function updateAppId(appId: string) { - let context: ContextMap = updateContext({ - appId, - }) - try { - Context.set(context) - } catch (err) { - if (env.isTest()) { - TEST_APP_ID = appId - } else { - throw err - } - } -} - -export function getGlobalDB(): Database { - const context = Context.get() - if (!context || (env.MULTI_TENANCY && !context.tenantId)) { - throw new Error("Global DB not found") - } - return getDB(baseGlobalDBName(context?.tenantId)) -} - -/** - * Gets the app database based on whatever the request - * contained, dev or prod. - */ -export function getAppDB(opts?: any): Database { - const appId = getAppId() - return getDB(appId, opts) -} - -/** - * This specifically gets the prod app ID, if the request - * contained a development app ID, this will get the prod one. - */ -export function getProdAppDB(opts?: any): Database { - const appId = getAppId() - if (!appId) { - throw new Error("Unable to retrieve prod DB - no app ID.") - } - return getDB(getProdAppID(appId), opts) -} - -/** - * This specifically gets the dev app ID, if the request - * contained a prod app ID, this will get the dev one. - */ -export function getDevAppDB(opts?: any): Database { - const appId = getAppId() - if (!appId) { - throw new Error("Unable to retrieve dev DB - no app ID.") - } - return getDB(getDevelopmentAppID(appId), opts) -} +export { DEFAULT_TENANT_ID } from "../constants" +export * as identity from "./identity" +export * from "./mainContext" diff --git a/packages/backend-core/src/context/mainContext.ts b/packages/backend-core/src/context/mainContext.ts new file mode 100644 index 0000000000..d743d2f49b --- /dev/null +++ b/packages/backend-core/src/context/mainContext.ts @@ -0,0 +1,245 @@ +// some test cases call functions directly, need to +// store an app ID to pretend there is a context +import env from "../environment" +import Context from "./Context" +import { getDevelopmentAppID, getProdAppID } from "../db/conversions" +import { getDB } from "../db/db" +import { + DocumentType, + SEPARATOR, + StaticDatabases, + DEFAULT_TENANT_ID, +} from "../constants" +import { Database, IdentityContext } from "@budibase/types" + +export type ContextMap = { + tenantId?: string + appId?: string + identity?: IdentityContext +} + +let TEST_APP_ID: string | null = null + +export function getGlobalDBName(tenantId?: string) { + // tenant ID can be set externally, for example user API where + // new tenants are being created, this may be the case + if (!tenantId) { + tenantId = getTenantId() + } + return baseGlobalDBName(tenantId) +} + +export function baseGlobalDBName(tenantId: string | undefined | null) { + let dbName + if (!tenantId || tenantId === DEFAULT_TENANT_ID) { + dbName = StaticDatabases.GLOBAL.name + } else { + dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}` + } + return dbName +} + +export function isMultiTenant() { + return env.MULTI_TENANCY +} + +export function isTenantIdSet() { + const context = Context.get() + return !!context?.tenantId +} + +export function isTenancyEnabled() { + return env.MULTI_TENANCY +} + +/** + * Given an app ID this will attempt to retrieve the tenant ID from it. + * @return {null|string} The tenant ID found within the app ID. + */ +export function getTenantIDFromAppID(appId: string) { + if (!appId) { + return undefined + } + if (!isMultiTenant()) { + return DEFAULT_TENANT_ID + } + const split = appId.split(SEPARATOR) + const hasDev = split[1] === DocumentType.DEV + if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) { + return undefined + } + if (hasDev) { + return split[2] + } else { + return split[1] + } +} + +function updateContext(updates: ContextMap) { + let context: ContextMap + try { + context = Context.get() + } catch (err) { + // no context, start empty + context = {} + } + context = { + ...context, + ...updates, + } + return context +} + +async function newContext(updates: ContextMap, task: any) { + // see if there already is a context setup + let context: ContextMap = updateContext(updates) + return Context.run(context, task) +} + +export async function doInContext(appId: string, task: any): Promise { + const tenantId = getTenantIDFromAppID(appId) + return newContext( + { + tenantId, + appId, + }, + task + ) +} + +export async function doInTenant( + tenantId: string | null, + task: any +): Promise { + // make sure default always selected in single tenancy + if (!env.MULTI_TENANCY) { + tenantId = tenantId || DEFAULT_TENANT_ID + } + + const updates = tenantId ? { tenantId } : {} + return newContext(updates, task) +} + +export async function doInAppContext(appId: string, task: any): Promise { + if (!appId) { + throw new Error("appId is required") + } + + const tenantId = getTenantIDFromAppID(appId) + const updates: ContextMap = { appId } + if (tenantId) { + updates.tenantId = tenantId + } + return newContext(updates, task) +} + +export async function doInIdentityContext( + identity: IdentityContext, + task: any +): Promise { + if (!identity) { + throw new Error("identity is required") + } + + const context: ContextMap = { + identity, + } + if (identity.tenantId) { + context.tenantId = identity.tenantId + } + return newContext(context, task) +} + +export function getIdentity(): IdentityContext | undefined { + try { + const context = Context.get() + return context?.identity + } catch (e) { + // do nothing - identity is not in context + } +} + +export function getTenantId(): string { + if (!isMultiTenant()) { + return DEFAULT_TENANT_ID + } + const context = Context.get() + const tenantId = context?.tenantId + if (!tenantId) { + throw new Error("Tenant id not found") + } + return tenantId +} + +export function getAppId(): string | undefined { + const context = Context.get() + const foundId = context?.appId + if (!foundId && env.isTest() && TEST_APP_ID) { + return TEST_APP_ID + } else { + return foundId + } +} + +export function updateTenantId(tenantId?: string) { + let context: ContextMap = updateContext({ + tenantId, + }) + Context.set(context) +} + +export function updateAppId(appId: string) { + let context: ContextMap = updateContext({ + appId, + }) + try { + Context.set(context) + } catch (err) { + if (env.isTest()) { + TEST_APP_ID = appId + } else { + throw err + } + } +} + +export function getGlobalDB(): Database { + const context = Context.get() + if (!context || (env.MULTI_TENANCY && !context.tenantId)) { + throw new Error("Global DB not found") + } + return getDB(baseGlobalDBName(context?.tenantId)) +} + +/** + * Gets the app database based on whatever the request + * contained, dev or prod. + */ +export function getAppDB(opts?: any): Database { + const appId = getAppId() + return getDB(appId, opts) +} + +/** + * This specifically gets the prod app ID, if the request + * contained a development app ID, this will get the prod one. + */ +export function getProdAppDB(opts?: any): Database { + const appId = getAppId() + if (!appId) { + throw new Error("Unable to retrieve prod DB - no app ID.") + } + return getDB(getProdAppID(appId), opts) +} + +/** + * This specifically gets the dev app ID, if the request + * contained a prod app ID, this will get the dev one. + */ +export function getDevAppDB(opts?: any): Database { + const appId = getAppId() + if (!appId) { + throw new Error("Unable to retrieve dev DB - no app ID.") + } + return getDB(getDevelopmentAppID(appId), opts) +} diff --git a/packages/backend-core/src/db/Replication.ts b/packages/backend-core/src/db/Replication.ts index 12f6001a70..eb9d613a58 100644 --- a/packages/backend-core/src/db/Replication.ts +++ b/packages/backend-core/src/db/Replication.ts @@ -1,5 +1,5 @@ -import { getPouchDB, closePouchDB } from "./couch/pouchDB" -import { DocumentType } from "./constants" +import { getPouchDB, closePouchDB } from "./couch" +import { DocumentType } from "../constants" class Replication { source: any diff --git a/packages/backend-core/src/db/conversions.ts b/packages/backend-core/src/db/conversions.ts index 48eaf31844..381c5cb90f 100644 --- a/packages/backend-core/src/db/conversions.ts +++ b/packages/backend-core/src/db/conversions.ts @@ -1,4 +1,4 @@ -import { APP_DEV_PREFIX, APP_PREFIX } from "./constants" +import { APP_DEV_PREFIX, APP_PREFIX } from "../constants" import { App } from "@budibase/types" const NO_APP_ERROR = "No app provided" diff --git a/packages/backend-core/src/db/index.ts b/packages/backend-core/src/db/index.ts index 7269aa8f92..0d9f75fa18 100644 --- a/packages/backend-core/src/db/index.ts +++ b/packages/backend-core/src/db/index.ts @@ -2,6 +2,8 @@ export * from "./couch" export * from "./db" export * from "./utils" export * from "./views" -export * from "./constants" export * from "./conversions" -export * from "./tenancy" +export { default as Replication } from "./Replication" +// exports to support old export structure +export * from "../constants/db" +export { getGlobalDBName, baseGlobalDBName } from "../context" diff --git a/packages/backend-core/src/db/tenancy.ts b/packages/backend-core/src/db/tenancy.ts deleted file mode 100644 index d920f7cd41..0000000000 --- a/packages/backend-core/src/db/tenancy.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { DEFAULT_TENANT_ID } from "../constants" -import { StaticDatabases, SEPARATOR } from "./constants" -import { getTenantId } from "../context" - -export const getGlobalDBName = (tenantId?: string) => { - // tenant ID can be set externally, for example user API where - // new tenants are being created, this may be the case - if (!tenantId) { - tenantId = getTenantId() - } - return baseGlobalDBName(tenantId) -} - -export const baseGlobalDBName = (tenantId: string | undefined | null) => { - let dbName - if (!tenantId || tenantId === DEFAULT_TENANT_ID) { - dbName = StaticDatabases.GLOBAL.name - } else { - dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}` - } - return dbName -} diff --git a/packages/backend-core/src/db/tests/utils.spec.js b/packages/backend-core/src/db/tests/utils.spec.js index 0d16e2dec2..f95889c1cc 100644 --- a/packages/backend-core/src/db/tests/utils.spec.js +++ b/packages/backend-core/src/db/tests/utils.spec.js @@ -1,10 +1,12 @@ require("../../../tests") const { - generateAppID, getDevelopmentAppID, getProdAppID, isDevAppID, isProdAppID, +} = require("../conversions") +const { + generateAppID, getPlatformUrl, getScopedConfig } = require("../utils") diff --git a/packages/backend-core/src/db/utils.ts b/packages/backend-core/src/db/utils.ts index 04feafa008..590c3eeef8 100644 --- a/packages/backend-core/src/db/utils.ts +++ b/packages/backend-core/src/db/utils.ts @@ -1,26 +1,20 @@ -import { newid } from "../hashing" -import { DEFAULT_TENANT_ID, Config } from "../constants" +import { newid } from "../newid" import env from "../environment" import { + DEFAULT_TENANT_ID, SEPARATOR, DocumentType, UNICODE_MAX, ViewName, InternalTable, -} from "./constants" -import { getTenantId, getGlobalDB } from "../context" -import { getGlobalDBName } from "./tenancy" + APP_PREFIX, +} from "../constants" +import { getTenantId, getGlobalDB, getGlobalDBName } from "../context" import { doWithDB, allDbs, directCouchAllDbs } from "./db" import { getAppMetadata } from "../cache/appMetadata" import { isDevApp, isDevAppID, getProdAppID } from "./conversions" -import { APP_PREFIX } from "./constants" import * as events from "../events" -import { App, Database } from "@budibase/types" - -export * from "./constants" -export * from "./conversions" -export { default as Replication } from "./Replication" -export * from "./tenancy" +import { App, Database, ConfigType } from "@budibase/types" /** * Generates a new app ID. @@ -494,7 +488,7 @@ export const getScopedFullConfig = async function ( )[0] // custom logic for settings doc - if (type === Config.SETTINGS) { + if (type === ConfigType.SETTINGS) { if (scopedConfig && scopedConfig.doc) { // overrides affected by environment variables scopedConfig.doc.config.platformUrl = await getPlatformUrl({ @@ -533,7 +527,7 @@ export const getPlatformUrl = async (opts = { tenantAware: true }) => { // get the doc directly instead of with getScopedConfig to prevent loop let settings try { - settings = await db.get(generateConfigID({ type: Config.SETTINGS })) + settings = await db.get(generateConfigID({ type: ConfigType.SETTINGS })) } catch (e: any) { if (e.status !== 404) { throw e diff --git a/packages/backend-core/src/db/views.ts b/packages/backend-core/src/db/views.ts index c563d55be3..4a87be0a68 100644 --- a/packages/backend-core/src/db/views.ts +++ b/packages/backend-core/src/db/views.ts @@ -1,6 +1,11 @@ -import { DocumentType, ViewName, DeprecatedViews, SEPARATOR } from "./utils" +import { + DocumentType, + ViewName, + DeprecatedViews, + SEPARATOR, + StaticDatabases, +} from "../constants" import { getGlobalDB } from "../context" -import { StaticDatabases } from "./constants" import { doWithDB } from "./" import { Database, DatabaseQueryOpts } from "@budibase/types" diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index 2443287d5a..51ab101b3c 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -25,7 +25,7 @@ const DefaultBucketName = { PLUGINS: "plugins", } -const env = { +const environment = { isTest, isDev, JS_BCRYPT: process.env.JS_BCRYPT, @@ -75,17 +75,18 @@ const env = { process.env.DEPLOYMENT_ENVIRONMENT || "docker-compose", _set(key: any, value: any) { process.env[key] = value - module.exports[key] = value + // @ts-ignore + environment[key] = value }, } // clean up any environment variable edge cases -for (let [key, value] of Object.entries(env)) { +for (let [key, value] of Object.entries(environment)) { // handle the edge case of "0" to disable an environment variable if (value === "0") { // @ts-ignore - env[key] = 0 + environment[key] = 0 } } -export = env +export = environment diff --git a/packages/backend-core/src/events/analytics.ts b/packages/backend-core/src/events/analytics.ts index 228805ef82..f621a9c98b 100644 --- a/packages/backend-core/src/events/analytics.ts +++ b/packages/backend-core/src/events/analytics.ts @@ -1,8 +1,8 @@ import env from "../environment" -import tenancy from "../tenancy" +import * as tenancy from "../tenancy" import * as dbUtils from "../db/utils" import { Config } from "../constants" -import { withCache, TTL, CacheKeys } from "../cache/generic" +import { withCache, TTL, CacheKey } from "../cache" export const enabled = async () => { // cloud - always use the environment variable @@ -13,7 +13,7 @@ export const enabled = async () => { // self host - prefer the settings doc // use cache as events have high throughput const enabledInDB = await withCache( - CacheKeys.ANALYTICS_ENABLED, + CacheKey.ANALYTICS_ENABLED, TTL.ONE_DAY, async () => { const settings = await getSettingsDoc() diff --git a/packages/backend-core/src/events/backfill.ts b/packages/backend-core/src/events/backfill.ts index e4577c5ab4..c8025a8e4e 100644 --- a/packages/backend-core/src/events/backfill.ts +++ b/packages/backend-core/src/events/backfill.ts @@ -21,7 +21,7 @@ import { AppCreatedEvent, } from "@budibase/types" import * as context from "../context" -import { CacheKeys } from "../cache/generic" +import { CacheKey } from "../cache/generic" import * as cache from "../cache/generic" // LIFECYCLE @@ -48,18 +48,18 @@ export const end = async () => { // CRUD const getBackfillMetadata = async (): Promise => { - return cache.get(CacheKeys.BACKFILL_METADATA) + return cache.get(CacheKey.BACKFILL_METADATA) } const saveBackfillMetadata = async ( backfill: BackfillMetadata ): Promise => { // no TTL - deleted by backfill - return cache.store(CacheKeys.BACKFILL_METADATA, backfill) + return cache.store(CacheKey.BACKFILL_METADATA, backfill) } const deleteBackfillMetadata = async (): Promise => { - await cache.delete(CacheKeys.BACKFILL_METADATA) + await cache.destroy(CacheKey.BACKFILL_METADATA) } const clearEvents = async () => { @@ -70,7 +70,7 @@ const clearEvents = async () => { for (const key of keys) { // delete each key // don't use tenancy, already in the key - await cache.delete(key, { useTenancy: false }) + await cache.destroy(key, { useTenancy: false }) } } @@ -167,7 +167,7 @@ const getEventKey = (event?: Event, properties?: any) => { const tenantId = context.getTenantId() if (event) { - eventKey = `${CacheKeys.EVENTS}:${tenantId}:${event}` + eventKey = `${CacheKey.EVENTS}:${tenantId}:${event}` // use some properties to make the key more unique const custom = CUSTOM_PROPERTY_SUFFIX[event] @@ -176,7 +176,7 @@ const getEventKey = (event?: Event, properties?: any) => { eventKey = `${eventKey}:${suffix}` } } else { - eventKey = `${CacheKeys.EVENTS}:${tenantId}:*` + eventKey = `${CacheKey.EVENTS}:${tenantId}:*` } return eventKey diff --git a/packages/backend-core/src/events/identification.ts b/packages/backend-core/src/events/identification.ts index 0b4b043837..b93bd44968 100644 --- a/packages/backend-core/src/events/identification.ts +++ b/packages/backend-core/src/events/identification.ts @@ -20,9 +20,9 @@ import { import { processors } from "./processors" import * as dbUtils from "../db/utils" import { Config } from "../constants" -import * as hashing from "../hashing" +import { newid } from "../utils" import * as installation from "../installation" -import { withCache, TTL, CacheKeys } from "../cache/generic" +import { withCache, TTL, CacheKey } from "../cache/generic" const pkg = require("../../package.json") @@ -270,7 +270,7 @@ const getEventTenantId = async (tenantId: string): Promise => { const getUniqueTenantId = async (tenantId: string): Promise => { // make sure this tenantId always matches the tenantId in context return context.doInTenant(tenantId, () => { - return withCache(CacheKeys.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => { + return withCache(CacheKey.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => { const db = context.getGlobalDB() const config: SettingsConfig = await dbUtils.getScopedFullConfig(db, { type: Config.SETTINGS, @@ -280,7 +280,7 @@ const getUniqueTenantId = async (tenantId: string): Promise => { if (config.config.uniqueTenantId) { return config.config.uniqueTenantId } else { - uniqueTenantId = `${hashing.newid()}_${tenantId}` + uniqueTenantId = `${newid()}_${tenantId}` config.config.uniqueTenantId = uniqueTenantId await db.put(config) return uniqueTenantId diff --git a/packages/backend-core/src/events/processors/posthog/rateLimiting.ts b/packages/backend-core/src/events/processors/posthog/rateLimiting.ts index 9c7b7876d6..89da10defa 100644 --- a/packages/backend-core/src/events/processors/posthog/rateLimiting.ts +++ b/packages/backend-core/src/events/processors/posthog/rateLimiting.ts @@ -1,5 +1,5 @@ import { Event } from "@budibase/types" -import { CacheKeys, TTL } from "../../../cache/generic" +import { CacheKey, TTL } from "../../../cache/generic" import * as cache from "../../../cache/generic" import * as context from "../../../context" @@ -74,7 +74,7 @@ export const limited = async (event: Event): Promise => { } const eventKey = (event: RateLimitedEvent) => { - let key = `${CacheKeys.EVENTS_RATE_LIMIT}:${event}` + let key = `${CacheKey.EVENTS_RATE_LIMIT}:${event}` if (isPerApp(event)) { key = key + ":" + context.getAppId() } diff --git a/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts b/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts index c9c4ceffe3..349a0427ac 100644 --- a/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts +++ b/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts @@ -3,7 +3,7 @@ import PosthogProcessor from "../PosthogProcessor" import { Event, IdentityType, Hosting } from "@budibase/types" const tk = require("timekeeper") import * as cache from "../../../../cache/generic" -import { CacheKeys } from "../../../../cache/generic" +import { CacheKey } from "../../../../cache/generic" import * as context from "../../../../context" const newIdentity = () => { @@ -19,7 +19,7 @@ describe("PosthogProcessor", () => { beforeEach(async () => { jest.clearAllMocks() await cache.bustCache( - `${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}` + `${CacheKey.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}` ) }) @@ -89,7 +89,7 @@ describe("PosthogProcessor", () => { await processor.processEvent(Event.SERVED_BUILDER, identity, properties) await cache.bustCache( - `${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}` + `${CacheKey.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}` ) tk.freeze(new Date(2022, 0, 1, 14, 0)) diff --git a/packages/backend-core/src/events/publishers/automation.ts b/packages/backend-core/src/events/publishers/automation.ts index 95f9cb8db6..8b2574b739 100644 --- a/packages/backend-core/src/events/publishers/automation.ts +++ b/packages/backend-core/src/events/publishers/automation.ts @@ -72,7 +72,7 @@ export async function stepCreated( automationId: automation._id as string, triggerId: automation.definition?.trigger?.id, triggerType: automation.definition?.trigger?.stepId, - stepId: step.id, + stepId: step.id!, stepType: step.stepId, } await publishEvent(Event.AUTOMATION_STEP_CREATED, properties, timestamp) @@ -87,7 +87,7 @@ export async function stepDeleted( automationId: automation._id as string, triggerId: automation.definition?.trigger?.id, triggerType: automation.definition?.trigger?.stepId, - stepId: step.id, + stepId: step.id!, stepType: step.stepId, } await publishEvent(Event.AUTOMATION_STEP_DELETED, properties) diff --git a/packages/backend-core/src/featureFlags/index.js b/packages/backend-core/src/featureFlags/index.ts similarity index 70% rename from packages/backend-core/src/featureFlags/index.js rename to packages/backend-core/src/featureFlags/index.ts index 8a8162d0ba..71e226c976 100644 --- a/packages/backend-core/src/featureFlags/index.js +++ b/packages/backend-core/src/featureFlags/index.ts @@ -1,17 +1,17 @@ -const env = require("../environment") -const tenancy = require("../tenancy") +import env from "../environment" +import * as tenancy from "../tenancy" /** * Read the TENANT_FEATURE_FLAGS env var and return an array of features flags for each tenant. * The env var is formatted as: * tenant1:feature1:feature2,tenant2:feature1 */ -const getFeatureFlags = () => { +function getFeatureFlags() { if (!env.TENANT_FEATURE_FLAGS) { return } - const tenantFeatureFlags = {} + const tenantFeatureFlags: Record = {} env.TENANT_FEATURE_FLAGS.split(",").forEach(tenantToFeatures => { const [tenantId, ...features] = tenantToFeatures.split(":") @@ -29,13 +29,13 @@ const getFeatureFlags = () => { const TENANT_FEATURE_FLAGS = getFeatureFlags() -exports.isEnabled = featureFlag => { +export function isEnabled(featureFlag: string) { const tenantId = tenancy.getTenantId() - const flags = exports.getTenantFeatureFlags(tenantId) + const flags = getTenantFeatureFlags(tenantId) return flags.includes(featureFlag) } -exports.getTenantFeatureFlags = tenantId => { +export function getTenantFeatureFlags(tenantId: string) { const flags = [] if (TENANT_FEATURE_FLAGS) { @@ -53,8 +53,8 @@ exports.getTenantFeatureFlags = tenantId => { return flags } -exports.TenantFeatureFlag = { - LICENSING: "LICENSING", - GOOGLE_SHEETS: "GOOGLE_SHEETS", - USER_GROUPS: "USER_GROUPS", +export enum TenantFeatureFlag { + LICENSING = "LICENSING", + GOOGLE_SHEETS = "GOOGLE_SHEETS", + USER_GROUPS = "USER_GROUPS", } diff --git a/packages/worker/src/utilities/index.js b/packages/backend-core/src/helpers.ts similarity index 85% rename from packages/worker/src/utilities/index.js rename to packages/backend-core/src/helpers.ts index b402a82cf3..e1e065bd4e 100644 --- a/packages/worker/src/utilities/index.js +++ b/packages/backend-core/src/helpers.ts @@ -4,6 +4,6 @@ * @param {string} url The URL to test and remove any extra double slashes. * @return {string} The updated url. */ -exports.checkSlashesInUrl = url => { +export function checkSlashesInUrl(url: string) { return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2") } diff --git a/packages/backend-core/src/index.ts b/packages/backend-core/src/index.ts index c68c8f0927..a4d4ad0a80 100644 --- a/packages/backend-core/src/index.ts +++ b/packages/backend-core/src/index.ts @@ -8,27 +8,24 @@ import * as permissions from "./security/permissions" import * as accounts from "./cloud/accounts" import * as installation from "./installation" import env from "./environment" -import tenancy from "./tenancy" -import featureFlags from "./featureFlags" +import * as tenancy from "./tenancy" +import * as featureFlags from "./featureFlags" import * as sessions from "./security/sessions" import * as deprovisioning from "./context/deprovision" -import auth from "./auth" +import * as auth from "./auth" import * as constants from "./constants" -import * as dbConstants from "./db/constants" import * as logging from "./logging" -import pino from "./pino" +import * as pino from "./pino" import * as middleware from "./middleware" -import plugins from "./plugin" -import encryption from "./security/encryption" +import * as plugins from "./plugin" +import * as encryption from "./security/encryption" import * as queue from "./queue" import * as db from "./db" - -// mimic the outer package exports -import * as objectStore from "./pkg/objectStore" -import * as utils from "./pkg/utils" -import redis from "./pkg/redis" -import cache from "./pkg/cache" -import context from "./pkg/context" +import * as context from "./context" +import * as cache from "./cache" +import * as objectStore from "./objectStore" +import * as redis from "./redis" +import * as utils from "./utils" const init = (opts: any = {}) => { db.init(opts.db) @@ -37,7 +34,7 @@ const init = (opts: any = {}) => { const core = { init, db, - ...dbConstants, + ...constants, redis, locks: redis.redlock, objectStore, @@ -46,7 +43,6 @@ const core = { cache, auth, constants, - ...constants, migrations, env, accounts, diff --git a/packages/backend-core/src/installation.ts b/packages/backend-core/src/installation.ts index da9b6c5b76..4e78a508a5 100644 --- a/packages/backend-core/src/installation.ts +++ b/packages/backend-core/src/installation.ts @@ -1,16 +1,16 @@ -import * as hashing from "./hashing" +import { newid } from "./utils" import * as events from "./events" -import { StaticDatabases } from "./db/constants" +import { StaticDatabases } from "./db" import { doWithDB } from "./db" import { Installation, IdentityType } from "@budibase/types" import * as context from "./context" import semver from "semver" -import { bustCache, withCache, TTL, CacheKeys } from "./cache/generic" +import { bustCache, withCache, TTL, CacheKey } from "./cache/generic" const pkg = require("../package.json") export const getInstall = async (): Promise => { - return withCache(CacheKeys.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, { + return withCache(CacheKey.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, { useTenancy: false, }) } @@ -28,7 +28,7 @@ const getInstallFromDB = async (): Promise => { if (e.status === 404) { install = { _id: StaticDatabases.PLATFORM_INFO.docs.install, - installId: hashing.newid(), + installId: newid(), version: pkg.version, } const resp = await platformDb.put(install) @@ -50,7 +50,7 @@ const updateVersion = async (version: string): Promise => { const install = await getInstall() install.version = version await platformDb.put(install) - await bustCache(CacheKeys.INSTALLATION) + await bustCache(CacheKey.INSTALLATION) } ) } catch (e: any) { diff --git a/packages/backend-core/src/middleware/adminOnly.js b/packages/backend-core/src/middleware/adminOnly.ts similarity index 63% rename from packages/backend-core/src/middleware/adminOnly.js rename to packages/backend-core/src/middleware/adminOnly.ts index 4bfdf83848..30fdf2907b 100644 --- a/packages/backend-core/src/middleware/adminOnly.js +++ b/packages/backend-core/src/middleware/adminOnly.ts @@ -1,4 +1,6 @@ -module.exports = async (ctx, next) => { +import { BBContext } from "@budibase/types" + +export = async (ctx: BBContext, next: any) => { if ( !ctx.internal && (!ctx.user || !ctx.user.admin || !ctx.user.admin.global) diff --git a/packages/backend-core/src/middleware/auditLog.js b/packages/backend-core/src/middleware/auditLog.js deleted file mode 100644 index c9063ae2e0..0000000000 --- a/packages/backend-core/src/middleware/auditLog.js +++ /dev/null @@ -1,4 +0,0 @@ -module.exports = async (ctx, next) => { - // Placeholder for audit log middleware - return next() -} diff --git a/packages/backend-core/src/middleware/auditLog.ts b/packages/backend-core/src/middleware/auditLog.ts new file mode 100644 index 0000000000..a2c30ade8a --- /dev/null +++ b/packages/backend-core/src/middleware/auditLog.ts @@ -0,0 +1,6 @@ +import { BBContext } from "@budibase/types" + +export = async (ctx: BBContext | any, next: any) => { + // Placeholder for audit log middleware + return next() +} diff --git a/packages/backend-core/src/middleware/authenticated.ts b/packages/backend-core/src/middleware/authenticated.ts index 8a1e52f414..fcf07c50a5 100644 --- a/packages/backend-core/src/middleware/authenticated.ts +++ b/packages/backend-core/src/middleware/authenticated.ts @@ -6,10 +6,13 @@ import { buildMatcherRegex, matches } from "./matchers" import { SEPARATOR, queryGlobalView, ViewName } from "../db" import { getGlobalDB, doInTenant } from "../tenancy" import { decrypt } from "../security/encryption" -const identity = require("../context/identity") -const env = require("../environment") +import * as identity from "../context/identity" +import env from "../environment" +import { BBContext, EndpointMatcher } from "@budibase/types" -const ONE_MINUTE = env.SESSION_UPDATE_PERIOD || 60 * 1000 +const ONE_MINUTE = env.SESSION_UPDATE_PERIOD + ? parseInt(env.SESSION_UPDATE_PERIOD) + : 60 * 1000 interface FinaliseOpts { authenticated?: boolean @@ -40,13 +43,13 @@ async function checkApiKey(apiKey: string, populateUser?: Function) { return doInTenant(tenantId, async () => { const db = getGlobalDB() // api key is encrypted in the database - const userId = await queryGlobalView( + const userId = (await queryGlobalView( ViewName.BY_API_KEY, { key: apiKey, }, db - ) + )) as string if (userId) { return { valid: true, @@ -63,14 +66,14 @@ async function checkApiKey(apiKey: string, populateUser?: Function) { * The tenancy modules should not be used here and it should be assumed that the tenancy context * has not yet been populated. */ -export = ( - noAuthPatterns = [], - opts: { publicAllowed: boolean; populateUser?: Function } = { +export = function ( + noAuthPatterns: EndpointMatcher[] = [], + opts: { publicAllowed?: boolean; populateUser?: Function } = { publicAllowed: false, } -) => { +) { const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : [] - return async (ctx: any, next: any) => { + return async (ctx: BBContext | any, next: any) => { let publicEndpoint = false const version = ctx.request.headers[Header.API_VER] // the path is not authenticated diff --git a/packages/backend-core/src/middleware/builderOnly.js b/packages/backend-core/src/middleware/builderOnly.ts similarity index 64% rename from packages/backend-core/src/middleware/builderOnly.js rename to packages/backend-core/src/middleware/builderOnly.ts index 2128626db4..e13882d7f6 100644 --- a/packages/backend-core/src/middleware/builderOnly.js +++ b/packages/backend-core/src/middleware/builderOnly.ts @@ -1,4 +1,6 @@ -module.exports = async (ctx, next) => { +import { BBContext } from "@budibase/types" + +export = async (ctx: BBContext, next: any) => { if ( !ctx.internal && (!ctx.user || !ctx.user.builder || !ctx.user.builder.global) diff --git a/packages/backend-core/src/middleware/builderOrAdmin.js b/packages/backend-core/src/middleware/builderOrAdmin.ts similarity index 71% rename from packages/backend-core/src/middleware/builderOrAdmin.js rename to packages/backend-core/src/middleware/builderOrAdmin.ts index 6440766298..26664695f8 100644 --- a/packages/backend-core/src/middleware/builderOrAdmin.js +++ b/packages/backend-core/src/middleware/builderOrAdmin.ts @@ -1,4 +1,6 @@ -module.exports = async (ctx, next) => { +import { BBContext } from "@budibase/types" + +export = async (ctx: BBContext, next: any) => { if ( !ctx.internal && (!ctx.user || !ctx.user.builder || !ctx.user.builder.global) && diff --git a/packages/backend-core/src/middleware/csrf.js b/packages/backend-core/src/middleware/csrf.ts similarity index 86% rename from packages/backend-core/src/middleware/csrf.js rename to packages/backend-core/src/middleware/csrf.ts index 1557740cd6..654ba47e07 100644 --- a/packages/backend-core/src/middleware/csrf.js +++ b/packages/backend-core/src/middleware/csrf.ts @@ -1,5 +1,6 @@ -const { Header } = require("../constants") -const { buildMatcherRegex, matches } = require("./matchers") +import { Header } from "../constants" +import { buildMatcherRegex, matches } from "./matchers" +import { BBContext, EndpointMatcher } from "@budibase/types" /** * GET, HEAD and OPTIONS methods are considered safe operations @@ -31,9 +32,11 @@ const INCLUDED_CONTENT_TYPES = [ * https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#synchronizer-token-pattern * */ -module.exports = (opts = { noCsrfPatterns: [] }) => { +export = function ( + opts: { noCsrfPatterns: EndpointMatcher[] } = { noCsrfPatterns: [] } +) { const noCsrfOptions = buildMatcherRegex(opts.noCsrfPatterns) - return async (ctx, next) => { + return async (ctx: BBContext | any, next: any) => { // don't apply for excluded paths const found = matches(ctx, noCsrfOptions) if (found) { @@ -62,7 +65,7 @@ module.exports = (opts = { noCsrfPatterns: [] }) => { // apply csrf when there is a token in the session (new logins) // in future there should be a hard requirement that the token is present - const userToken = ctx.user.csrfToken + const userToken = ctx.user?.csrfToken if (!userToken) { return next() } diff --git a/packages/backend-core/src/middleware/index.ts b/packages/backend-core/src/middleware/index.ts index 998c231b3d..2b332f5c49 100644 --- a/packages/backend-core/src/middleware/index.ts +++ b/packages/backend-core/src/middleware/index.ts @@ -1,18 +1,18 @@ -const jwt = require("./passport/jwt") -const local = require("./passport/local") -const google = require("./passport/google") -const oidc = require("./passport/oidc") -const { authError, ssoCallbackUrl } = require("./passport/utils") -const authenticated = require("./authenticated") -const auditLog = require("./auditLog") -const tenancy = require("./tenancy") -const internalApi = require("./internalApi") -const datasourceGoogle = require("./passport/datasource/google") -const csrf = require("./csrf") -const adminOnly = require("./adminOnly") -const builderOrAdmin = require("./builderOrAdmin") -const builderOnly = require("./builderOnly") -const joiValidator = require("./joi-validator") +import * as jwt from "./passport/jwt" +import * as local from "./passport/local" +import * as google from "./passport/google" +import * as oidc from "./passport/oidc" +import { authError, ssoCallbackUrl } from "./passport/utils" +import authenticated from "./authenticated" +import auditLog from "./auditLog" +import tenancy from "./tenancy" +import internalApi from "./internalApi" +import * as datasourceGoogle from "./passport/datasource/google" +import csrf from "./csrf" +import adminOnly from "./adminOnly" +import builderOrAdmin from "./builderOrAdmin" +import builderOnly from "./builderOnly" +import * as joiValidator from "./joi-validator" const pkg = { google, diff --git a/packages/backend-core/src/middleware/internalApi.js b/packages/backend-core/src/middleware/internalApi.ts similarity index 53% rename from packages/backend-core/src/middleware/internalApi.js rename to packages/backend-core/src/middleware/internalApi.ts index 05833842ce..f4f08ec2dd 100644 --- a/packages/backend-core/src/middleware/internalApi.js +++ b/packages/backend-core/src/middleware/internalApi.ts @@ -1,10 +1,11 @@ -const env = require("../environment") -const { Header } = require("../constants") +import env from "../environment" +import { Header } from "../constants" +import { BBContext } from "@budibase/types" /** * API Key only endpoint. */ -module.exports = async (ctx, next) => { +export = async (ctx: BBContext, next: any) => { const apiKey = ctx.request.headers[Header.API_KEY] if (apiKey !== env.INTERNAL_API_KEY) { ctx.throw(403, "Unauthorized") diff --git a/packages/backend-core/src/middleware/joi-validator.js b/packages/backend-core/src/middleware/joi-validator.ts similarity index 50% rename from packages/backend-core/src/middleware/joi-validator.js rename to packages/backend-core/src/middleware/joi-validator.ts index 6812dbdd54..fcc8316886 100644 --- a/packages/backend-core/src/middleware/joi-validator.js +++ b/packages/backend-core/src/middleware/joi-validator.ts @@ -1,21 +1,27 @@ -const Joi = require("joi") +import Joi, { ObjectSchema } from "joi" +import { BBContext } from "@budibase/types" -function validate(schema, property) { +function validate( + schema: Joi.ObjectSchema | Joi.ArraySchema, + property: string +) { // Return a Koa middleware function - return (ctx, next) => { + return (ctx: BBContext, next: any) => { if (!schema) { return next() } let params = null + // @ts-ignore + let reqProp = ctx.request?.[property] if (ctx[property] != null) { params = ctx[property] - } else if (ctx.request[property] != null) { - params = ctx.request[property] + } else if (reqProp != null) { + params = reqProp } // not all schemas have the append property e.g. array schemas - if (schema.append) { - schema = schema.append({ + if ((schema as Joi.ObjectSchema).append) { + schema = (schema as Joi.ObjectSchema).append({ createdAt: Joi.any().optional(), updatedAt: Joi.any().optional(), }) @@ -30,10 +36,10 @@ function validate(schema, property) { } } -module.exports.body = schema => { +export function body(schema: Joi.ObjectSchema | Joi.ArraySchema) { return validate(schema, "body") } -module.exports.params = schema => { +export function params(schema: Joi.ObjectSchema | Joi.ArraySchema) { return validate(schema, "params") } diff --git a/packages/backend-core/src/middleware/passport/datasource/google.js b/packages/backend-core/src/middleware/passport/datasource/google.ts similarity index 71% rename from packages/backend-core/src/middleware/passport/datasource/google.js rename to packages/backend-core/src/middleware/passport/datasource/google.ts index 7cfd7f55f6..65620d7aa3 100644 --- a/packages/backend-core/src/middleware/passport/datasource/google.js +++ b/packages/backend-core/src/middleware/passport/datasource/google.ts @@ -1,11 +1,15 @@ -const google = require("../google") +import * as google from "../google" +import { Cookie, Config } from "../../../constants" +import { clearCookie, getCookie } from "../../../utils" +import { getScopedConfig, getPlatformUrl, doWithDB } from "../../../db" +import environment from "../../../environment" +import { getGlobalDB } from "../../../tenancy" +import { BBContext, Database, SSOProfile } from "@budibase/types" const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy -const { Cookie, Config } = require("../../../constants") -const { clearCookie, getCookie } = require("../../../utils") -const { getScopedConfig, getPlatformUrl } = require("../../../db/utils") -const { doWithDB } = require("../../../db") -const environment = require("../../../environment") -const { getGlobalDB } = require("../../../tenancy") + +type Passport = { + authenticate: any +} async function fetchGoogleCreds() { // try and get the config from the tenant @@ -22,7 +26,11 @@ async function fetchGoogleCreds() { ) } -async function preAuth(passport, ctx, next) { +export async function preAuth( + passport: Passport, + ctx: BBContext, + next: Function +) { // get the relevant config const googleConfig = await fetchGoogleCreds() const platformUrl = await getPlatformUrl({ tenantAware: false }) @@ -41,7 +49,11 @@ async function preAuth(passport, ctx, next) { })(ctx, next) } -async function postAuth(passport, ctx, next) { +export async function postAuth( + passport: Passport, + ctx: BBContext, + next: Function +) { // get the relevant config const config = await fetchGoogleCreds() const platformUrl = await getPlatformUrl({ tenantAware: false }) @@ -56,15 +68,20 @@ async function postAuth(passport, ctx, next) { clientSecret: config.clientSecret, callbackURL: callbackUrl, }, - (accessToken, refreshToken, profile, done) => { + ( + accessToken: string, + refreshToken: string, + profile: SSOProfile, + done: Function + ) => { clearCookie(ctx, Cookie.DatasourceAuth) done(null, { accessToken, refreshToken }) } ), { successRedirect: "/", failureRedirect: "/error" }, - async (err, tokens) => { + async (err: any, tokens: string[]) => { // update the DB for the datasource with all the user info - await doWithDB(authStateCookie.appId, async db => { + await doWithDB(authStateCookie.appId, async (db: Database) => { const datasource = await db.get(authStateCookie.datasourceId) if (!datasource.config) { datasource.config = {} @@ -78,6 +95,3 @@ async function postAuth(passport, ctx, next) { } )(ctx, next) } - -exports.preAuth = preAuth -exports.postAuth = postAuth diff --git a/packages/backend-core/src/middleware/passport/google.js b/packages/backend-core/src/middleware/passport/google.ts similarity index 63% rename from packages/backend-core/src/middleware/passport/google.js rename to packages/backend-core/src/middleware/passport/google.ts index 7eb1215c1f..deba849233 100644 --- a/packages/backend-core/src/middleware/passport/google.js +++ b/packages/backend-core/src/middleware/passport/google.ts @@ -1,10 +1,15 @@ +import { ssoCallbackUrl } from "./utils" +import { authenticateThirdParty } from "./third-party-common" +import { ConfigType, GoogleConfig, Database, SSOProfile } from "@budibase/types" const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy -const { ssoCallbackUrl } = require("./utils") -const { authenticateThirdParty } = require("./third-party-common") -const { Config } = require("../../../constants") -const buildVerifyFn = saveUserFn => { - return (accessToken, refreshToken, profile, done) => { +export function buildVerifyFn(saveUserFn?: Function) { + return ( + accessToken: string, + refreshToken: string, + profile: SSOProfile, + done: Function + ) => { const thirdPartyUser = { provider: profile.provider, // should always be 'google' providerType: "google", @@ -31,7 +36,11 @@ const buildVerifyFn = saveUserFn => { * from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport. * @returns Dynamically configured Passport Google Strategy */ -exports.strategyFactory = async function (config, callbackUrl, saveUserFn) { +export async function strategyFactory( + config: GoogleConfig["config"], + callbackUrl: string, + saveUserFn?: Function +) { try { const { clientID, clientSecret } = config @@ -50,18 +59,15 @@ exports.strategyFactory = async function (config, callbackUrl, saveUserFn) { }, verify ) - } catch (err) { + } catch (err: any) { console.error(err) - throw new Error( - `Error constructing google authentication strategy: ${err}`, - err - ) + throw new Error(`Error constructing google authentication strategy: ${err}`) } } -exports.getCallbackUrl = async function (db, config) { - return ssoCallbackUrl(db, config, Config.GOOGLE) +export async function getCallbackUrl( + db: Database, + config: { callbackURL?: string } +) { + return ssoCallbackUrl(db, config, ConfigType.GOOGLE) } - -// expose for testing -exports.buildVerifyFn = buildVerifyFn diff --git a/packages/backend-core/src/middleware/passport/jwt.js b/packages/backend-core/src/middleware/passport/jwt.js deleted file mode 100644 index 36316264b0..0000000000 --- a/packages/backend-core/src/middleware/passport/jwt.js +++ /dev/null @@ -1,18 +0,0 @@ -const { Cookie } = require("../../constants") -const env = require("../../environment") -const { authError } = require("./utils") - -exports.options = { - secretOrKey: env.JWT_SECRET, - jwtFromRequest: function (ctx) { - return ctx.cookies.get(Cookie.Auth) - }, -} - -exports.authenticate = async function (jwt, done) { - try { - return done(null, jwt) - } catch (err) { - return authError(done, "JWT invalid", err) - } -} diff --git a/packages/backend-core/src/middleware/passport/jwt.ts b/packages/backend-core/src/middleware/passport/jwt.ts new file mode 100644 index 0000000000..95dc8f2656 --- /dev/null +++ b/packages/backend-core/src/middleware/passport/jwt.ts @@ -0,0 +1,19 @@ +import { Cookie } from "../../constants" +import env from "../../environment" +import { authError } from "./utils" +import { BBContext } from "@budibase/types" + +export const options = { + secretOrKey: env.JWT_SECRET, + jwtFromRequest: function (ctx: BBContext) { + return ctx.cookies.get(Cookie.Auth) + }, +} + +export async function authenticate(jwt: Function, done: Function) { + try { + return done(null, jwt) + } catch (err) { + return authError(done, "JWT invalid", err) + } +} diff --git a/packages/backend-core/src/middleware/passport/local.js b/packages/backend-core/src/middleware/passport/local.ts similarity index 73% rename from packages/backend-core/src/middleware/passport/local.js rename to packages/backend-core/src/middleware/passport/local.ts index b955d29102..8b85d3734c 100644 --- a/packages/backend-core/src/middleware/passport/local.js +++ b/packages/backend-core/src/middleware/passport/local.ts @@ -1,18 +1,18 @@ +import { UserStatus } from "../../constants" +import { compare, newid } from "../../utils" +import env from "../../environment" +import * as users from "../../users" +import { authError } from "./utils" +import { createASession } from "../../security/sessions" +import { getTenantId } from "../../tenancy" +import { BBContext } from "@budibase/types" const jwt = require("jsonwebtoken") -const { UserStatus } = require("../../constants") -const { compare } = require("../../hashing") -const env = require("../../environment") -const users = require("../../users") -const { authError } = require("./utils") -const { newid } = require("../../hashing") -const { createASession } = require("../../security/sessions") -const { getTenantId } = require("../../tenancy") const INVALID_ERR = "Invalid credentials" const SSO_NO_PASSWORD = "SSO user does not have a password set" const EXPIRED = "This account has expired. Please reset your password" -exports.options = { +export const options = { passReqToCallback: true, } @@ -24,7 +24,12 @@ exports.options = { * @param {*} done callback from passport to return user information and errors * @returns The authenticated user, or errors if they occur */ -exports.authenticate = async function (ctx, email, password, done) { +export async function authenticate( + ctx: BBContext, + email: string, + password: string, + done: Function +) { if (!email) return authError(done, "Email Required") if (!password) return authError(done, "Password Required") @@ -56,9 +61,9 @@ exports.authenticate = async function (ctx, email, password, done) { const sessionId = newid() const tenantId = getTenantId() - await createASession(dbUser._id, { sessionId, tenantId }) + await createASession(dbUser._id!, { sessionId, tenantId }) - dbUser.token = jwt.sign( + const token = jwt.sign( { userId: dbUser._id, sessionId, @@ -69,7 +74,10 @@ exports.authenticate = async function (ctx, email, password, done) { // Remove users password in payload delete dbUser.password - return done(null, dbUser) + return done(null, { + ...dbUser, + token, + }) } else { return authError(done, INVALID_ERR) } diff --git a/packages/backend-core/src/middleware/passport/oidc.js b/packages/backend-core/src/middleware/passport/oidc.ts similarity index 72% rename from packages/backend-core/src/middleware/passport/oidc.js rename to packages/backend-core/src/middleware/passport/oidc.ts index 55a7033e40..27c3c647b7 100644 --- a/packages/backend-core/src/middleware/passport/oidc.js +++ b/packages/backend-core/src/middleware/passport/oidc.ts @@ -1,10 +1,23 @@ -const fetch = require("node-fetch") +import fetch from "node-fetch" +import { authenticateThirdParty } from "./third-party-common" +import { ssoCallbackUrl } from "./utils" +import { + Config, + ConfigType, + OIDCInnerCfg, + Database, + SSOProfile, + ThirdPartyUser, + OIDCConfiguration, +} from "@budibase/types" const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy -const { authenticateThirdParty } = require("./third-party-common") -const { ssoCallbackUrl } = require("./utils") -const { Config } = require("../../../constants") -const buildVerifyFn = saveUserFn => { +type JwtClaims = { + preferred_username: string + email: string +} + +export function buildVerifyFn(saveUserFn?: Function) { /** * @param {*} issuer The identity provider base URL * @param {*} sub The user ID @@ -17,17 +30,17 @@ const buildVerifyFn = saveUserFn => { * @param {*} done The passport callback: err, user, info */ return async ( - issuer, - sub, - profile, - jwtClaims, - accessToken, - refreshToken, - idToken, - params, - done + issuer: string, + sub: string, + profile: SSOProfile, + jwtClaims: JwtClaims, + accessToken: string, + refreshToken: string, + idToken: string, + params: any, + done: Function ) => { - const thirdPartyUser = { + const thirdPartyUser: ThirdPartyUser = { // store the issuer info to enable sync in future provider: issuer, providerType: "oidc", @@ -53,7 +66,7 @@ const buildVerifyFn = saveUserFn => { * @param {*} profile The structured profile created by passport using the user info endpoint * @param {*} jwtClaims The claims returned in the id token */ -function getEmail(profile, jwtClaims) { +function getEmail(profile: SSOProfile, jwtClaims: JwtClaims) { // profile not guaranteed to contain email e.g. github connected azure ad account if (profile._json.email) { return profile._json.email @@ -77,7 +90,7 @@ function getEmail(profile, jwtClaims) { ) } -function validEmail(value) { +function validEmail(value: string) { return ( value && !!value.match( @@ -91,19 +104,25 @@ function validEmail(value) { * from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport. * @returns Dynamically configured Passport OIDC Strategy */ -exports.strategyFactory = async function (config, saveUserFn) { +export async function strategyFactory( + config: OIDCConfiguration, + saveUserFn?: Function +) { try { const verify = buildVerifyFn(saveUserFn) const strategy = new OIDCStrategy(config, verify) strategy.name = "oidc" return strategy - } catch (err) { + } catch (err: any) { console.error(err) - throw new Error("Error constructing OIDC authentication strategy", err) + throw new Error(`Error constructing OIDC authentication strategy - ${err}`) } } -exports.fetchStrategyConfig = async function (enrichedConfig, callbackUrl) { +export async function fetchStrategyConfig( + enrichedConfig: OIDCInnerCfg, + callbackUrl?: string +): Promise { try { const { clientID, clientSecret, configUrl } = enrichedConfig @@ -135,13 +154,15 @@ exports.fetchStrategyConfig = async function (enrichedConfig, callbackUrl) { } } catch (err) { console.error(err) - throw new Error("Error constructing OIDC authentication configuration", err) + throw new Error( + `Error constructing OIDC authentication configuration - ${err}` + ) } } -exports.getCallbackUrl = async function (db, config) { - return ssoCallbackUrl(db, config, Config.OIDC) +export async function getCallbackUrl( + db: Database, + config: { callbackURL?: string } +) { + return ssoCallbackUrl(db, config, ConfigType.OIDC) } - -// expose for testing -exports.buildVerifyFn = buildVerifyFn diff --git a/packages/backend-core/src/middleware/passport/tests/third-party-common.spec.js b/packages/backend-core/src/middleware/passport/tests/third-party-common.spec.js index 9799045ffc..d377d602f1 100644 --- a/packages/backend-core/src/middleware/passport/tests/third-party-common.spec.js +++ b/packages/backend-core/src/middleware/passport/tests/third-party-common.spec.js @@ -4,7 +4,7 @@ const { data } = require("./utilities/mock-data") const { DEFAULT_TENANT_ID } = require("../../../constants") const { generateGlobalUserID } = require("../../../db/utils") -const { newid } = require("../../../hashing") +const { newid } = require("../../../utils") const { doWithGlobalDB, doInTenant } = require("../../../tenancy") const done = jest.fn() diff --git a/packages/backend-core/src/middleware/passport/third-party-common.js b/packages/backend-core/src/middleware/passport/third-party-common.ts similarity index 77% rename from packages/backend-core/src/middleware/passport/third-party-common.js rename to packages/backend-core/src/middleware/passport/third-party-common.ts index 1c5891fce7..8798ce5298 100644 --- a/packages/backend-core/src/middleware/passport/third-party-common.js +++ b/packages/backend-core/src/middleware/passport/third-party-common.ts @@ -1,21 +1,22 @@ -const env = require("../../environment") +import env from "../../environment" +import { generateGlobalUserID } from "../../db" +import { authError } from "./utils" +import { newid } from "../../utils" +import { createASession } from "../../security/sessions" +import * as users from "../../users" +import { getGlobalDB, getTenantId } from "../../tenancy" +import fetch from "node-fetch" +import { ThirdPartyUser } from "@budibase/types" const jwt = require("jsonwebtoken") -const { generateGlobalUserID } = require("../../db/utils") -const { authError } = require("./utils") -const { newid } = require("../../hashing") -const { createASession } = require("../../security/sessions") -const users = require("../../users") -const { getGlobalDB, getTenantId } = require("../../tenancy") -const fetch = require("node-fetch") /** * Common authentication logic for third parties. e.g. OAuth, OIDC. */ -exports.authenticateThirdParty = async function ( - thirdPartyUser, - requireLocalAccount = true, - done, - saveUserFn +export async function authenticateThirdParty( + thirdPartyUser: ThirdPartyUser, + requireLocalAccount: boolean = true, + done: Function, + saveUserFn?: Function ) { if (!saveUserFn) { throw new Error("Save user function must be provided") @@ -39,7 +40,7 @@ exports.authenticateThirdParty = async function ( // try to load by id try { dbUser = await db.get(userId) - } catch (err) { + } catch (err: any) { // abort when not 404 error if (!err.status || err.status !== 404) { return authError( @@ -81,7 +82,7 @@ exports.authenticateThirdParty = async function ( // create or sync the user try { await saveUserFn(dbUser, false, false) - } catch (err) { + } catch (err: any) { return authError(done, err) } @@ -104,13 +105,16 @@ exports.authenticateThirdParty = async function ( return done(null, dbUser) } -async function syncProfilePicture(user, thirdPartyUser) { - const pictureUrl = thirdPartyUser.profile._json.picture +async function syncProfilePicture( + user: ThirdPartyUser, + thirdPartyUser: ThirdPartyUser +) { + const pictureUrl = thirdPartyUser.profile?._json.picture if (pictureUrl) { const response = await fetch(pictureUrl) if (response.status === 200) { - const type = response.headers.get("content-type") + const type = response.headers.get("content-type") as string if (type.startsWith("image/")) { user.pictureUrl = pictureUrl } @@ -123,7 +127,7 @@ async function syncProfilePicture(user, thirdPartyUser) { /** * @returns a user that has been sync'd with third party information */ -async function syncUser(user, thirdPartyUser) { +async function syncUser(user: ThirdPartyUser, thirdPartyUser: ThirdPartyUser) { // provider user.provider = thirdPartyUser.provider user.providerType = thirdPartyUser.providerType diff --git a/packages/backend-core/src/middleware/passport/utils.js b/packages/backend-core/src/middleware/passport/utils.ts similarity index 64% rename from packages/backend-core/src/middleware/passport/utils.js rename to packages/backend-core/src/middleware/passport/utils.ts index ab199b9f2f..3d79aada28 100644 --- a/packages/backend-core/src/middleware/passport/utils.js +++ b/packages/backend-core/src/middleware/passport/utils.ts @@ -1,6 +1,6 @@ -const { isMultiTenant, getTenantId } = require("../../tenancy") -const { getScopedConfig } = require("../../db/utils") -const { Config } = require("../../constants") +import { isMultiTenant, getTenantId } from "../../tenancy" +import { getScopedConfig } from "../../db" +import { ConfigType, Database, Config } from "@budibase/types" /** * Utility to handle authentication errors. @@ -10,7 +10,7 @@ const { Config } = require("../../constants") * @param {*} err (Optional) error that will be logged */ -exports.authError = function (done, message, err = null) { +export function authError(done: Function, message: string, err?: any) { return done( err, null, // never return a user @@ -18,13 +18,17 @@ exports.authError = function (done, message, err = null) { ) } -exports.ssoCallbackUrl = async (db, config, type) => { +export async function ssoCallbackUrl( + db: Database, + config?: { callbackURL?: string }, + type?: ConfigType +) { // incase there is a callback URL from before if (config && config.callbackURL) { return config.callbackURL } const publicConfig = await getScopedConfig(db, { - type: Config.SETTINGS, + type: ConfigType.SETTINGS, }) let callbackUrl = `/api/global/auth` diff --git a/packages/backend-core/src/middleware/tenancy.ts b/packages/backend-core/src/middleware/tenancy.ts index 0aaacef139..78da2bb3e8 100644 --- a/packages/backend-core/src/middleware/tenancy.ts +++ b/packages/backend-core/src/middleware/tenancy.ts @@ -8,15 +8,15 @@ import { TenantResolutionStrategy, } from "@budibase/types" -const tenancy = ( +export = function ( allowQueryStringPatterns: EndpointMatcher[], noTenancyPatterns: EndpointMatcher[], - opts = { noTenancyRequired: false } -) => { + opts: { noTenancyRequired?: boolean } = { noTenancyRequired: false } +) { const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns) const noTenancyOptions = buildMatcherRegex(noTenancyPatterns) - return async function (ctx: BBContext, next: any) { + return async function (ctx: BBContext | any, next: any) { const allowNoTenant = opts.noTenancyRequired || !!matches(ctx, noTenancyOptions) const tenantOpts: GetTenantIdOptions = { @@ -33,5 +33,3 @@ const tenancy = ( return doInTenant(tenantId, next) } } - -export = tenancy diff --git a/packages/backend-core/src/migrations/tests/index.spec.js b/packages/backend-core/src/migrations/tests/index.spec.js index 8fbc244cd6..b7d2e14ea5 100644 --- a/packages/backend-core/src/migrations/tests/index.spec.js +++ b/packages/backend-core/src/migrations/tests/index.spec.js @@ -3,7 +3,7 @@ const { runMigrations, getMigrationsDoc } = require("../index") const { getDB } = require("../../db") const { StaticDatabases, -} = require("../../db/utils") +} = require("../../constants") let db diff --git a/packages/backend-core/src/newid.ts b/packages/backend-core/src/newid.ts new file mode 100644 index 0000000000..5676c23f48 --- /dev/null +++ b/packages/backend-core/src/newid.ts @@ -0,0 +1,5 @@ +import { v4 } from "uuid" + +export function newid() { + return v4().replace(/-/g, "") +} diff --git a/packages/backend-core/src/objectStore/index.ts b/packages/backend-core/src/objectStore/index.ts index a1193c0303..2971834f0e 100644 --- a/packages/backend-core/src/objectStore/index.ts +++ b/packages/backend-core/src/objectStore/index.ts @@ -1,426 +1,2 @@ -const sanitize = require("sanitize-s3-objectkey") -import AWS from "aws-sdk" -import stream from "stream" -import fetch from "node-fetch" -import tar from "tar-fs" -const zlib = require("zlib") -import { promisify } from "util" -import { join } from "path" -import fs from "fs" -import env from "../environment" -import { budibaseTempDir, ObjectStoreBuckets } from "./utils" -import { v4 } from "uuid" -import { APP_PREFIX, APP_DEV_PREFIX } from "../db/utils" - -const streamPipeline = promisify(stream.pipeline) -// use this as a temporary store of buckets that are being created -const STATE = { - bucketCreationPromises: {}, -} - -type ListParams = { - ContinuationToken?: string -} - -type UploadParams = { - bucket: string - filename: string - path: string - type?: string - // can be undefined, we will remove it - metadata?: { - [key: string]: string | undefined - } -} - -const CONTENT_TYPE_MAP: any = { - txt: "text/plain", - html: "text/html", - css: "text/css", - js: "application/javascript", - json: "application/json", - gz: "application/gzip", -} -const STRING_CONTENT_TYPES = [ - CONTENT_TYPE_MAP.html, - CONTENT_TYPE_MAP.css, - CONTENT_TYPE_MAP.js, - CONTENT_TYPE_MAP.json, -] - -// does normal sanitization and then swaps dev apps to apps -export function sanitizeKey(input: string) { - return sanitize(sanitizeBucket(input)).replace(/\\/g, "/") -} - -// simply handles the dev app to app conversion -export function sanitizeBucket(input: string) { - return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX) -} - -function publicPolicy(bucketName: string) { - return { - Version: "2012-10-17", - Statement: [ - { - Effect: "Allow", - Principal: { - AWS: ["*"], - }, - Action: "s3:GetObject", - Resource: [`arn:aws:s3:::${bucketName}/*`], - }, - ], - } -} - -const PUBLIC_BUCKETS = [ - ObjectStoreBuckets.APPS, - ObjectStoreBuckets.GLOBAL, - ObjectStoreBuckets.PLUGINS, -] - -/** - * Gets a connection to the object store using the S3 SDK. - * @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from. - * @return {Object} an S3 object store object, check S3 Nodejs SDK for usage. - * @constructor - */ -export const ObjectStore = (bucket: string) => { - const config: any = { - s3ForcePathStyle: true, - signatureVersion: "v4", - apiVersion: "2006-03-01", - accessKeyId: env.MINIO_ACCESS_KEY, - secretAccessKey: env.MINIO_SECRET_KEY, - region: env.AWS_REGION, - } - if (bucket) { - config.params = { - Bucket: sanitizeBucket(bucket), - } - } - if (env.MINIO_URL) { - config.endpoint = env.MINIO_URL - } - return new AWS.S3(config) -} - -/** - * Given an object store and a bucket name this will make sure the bucket exists, - * if it does not exist then it will create it. - */ -export const makeSureBucketExists = async (client: any, bucketName: string) => { - bucketName = sanitizeBucket(bucketName) - try { - await client - .headBucket({ - Bucket: bucketName, - }) - .promise() - } catch (err: any) { - const promises: any = STATE.bucketCreationPromises - const doesntExist = err.statusCode === 404, - noAccess = err.statusCode === 403 - if (promises[bucketName]) { - await promises[bucketName] - } else if (doesntExist || noAccess) { - if (doesntExist) { - // bucket doesn't exist create it - promises[bucketName] = client - .createBucket({ - Bucket: bucketName, - }) - .promise() - await promises[bucketName] - delete promises[bucketName] - } - // public buckets are quite hidden in the system, make sure - // no bucket is set accidentally - if (PUBLIC_BUCKETS.includes(bucketName)) { - await client - .putBucketPolicy({ - Bucket: bucketName, - Policy: JSON.stringify(publicPolicy(bucketName)), - }) - .promise() - } - } else { - throw new Error("Unable to write to object store bucket.") - } - } -} - -/** - * Uploads the contents of a file given the required parameters, useful when - * temp files in use (for example file uploaded as an attachment). - */ -export const upload = async ({ - bucket: bucketName, - filename, - path, - type, - metadata, -}: UploadParams) => { - const extension = filename.split(".").pop() - const fileBytes = fs.readFileSync(path) - - const objectStore = ObjectStore(bucketName) - await makeSureBucketExists(objectStore, bucketName) - - let contentType = type - if (!contentType) { - contentType = extension - ? CONTENT_TYPE_MAP[extension.toLowerCase()] - : CONTENT_TYPE_MAP.txt - } - const config: any = { - // windows file paths need to be converted to forward slashes for s3 - Key: sanitizeKey(filename), - Body: fileBytes, - ContentType: contentType, - } - if (metadata && typeof metadata === "object") { - // remove any nullish keys from the metadata object, as these may be considered invalid - for (let key of Object.keys(metadata)) { - if (!metadata[key] || typeof metadata[key] !== "string") { - delete metadata[key] - } - } - config.Metadata = metadata - } - return objectStore.upload(config).promise() -} - -/** - * Similar to the upload function but can be used to send a file stream - * through to the object store. - */ -export const streamUpload = async ( - bucketName: string, - filename: string, - stream: any, - extra = {} -) => { - const objectStore = ObjectStore(bucketName) - await makeSureBucketExists(objectStore, bucketName) - - // Set content type for certain known extensions - if (filename?.endsWith(".js")) { - extra = { - ...extra, - ContentType: "application/javascript", - } - } else if (filename?.endsWith(".svg")) { - extra = { - ...extra, - ContentType: "image", - } - } - - const params = { - Bucket: sanitizeBucket(bucketName), - Key: sanitizeKey(filename), - Body: stream, - ...extra, - } - return objectStore.upload(params).promise() -} - -/** - * retrieves the contents of a file from the object store, if it is a known content type it - * will be converted, otherwise it will be returned as a buffer stream. - */ -export const retrieve = async (bucketName: string, filepath: string) => { - const objectStore = ObjectStore(bucketName) - const params = { - Bucket: sanitizeBucket(bucketName), - Key: sanitizeKey(filepath), - } - const response: any = await objectStore.getObject(params).promise() - // currently these are all strings - if (STRING_CONTENT_TYPES.includes(response.ContentType)) { - return response.Body.toString("utf8") - } else { - return response.Body - } -} - -export const listAllObjects = async (bucketName: string, path: string) => { - const objectStore = ObjectStore(bucketName) - const list = (params: ListParams = {}) => { - return objectStore - .listObjectsV2({ - ...params, - Bucket: sanitizeBucket(bucketName), - Prefix: sanitizeKey(path), - }) - .promise() - } - let isTruncated = false, - token, - objects: AWS.S3.Types.Object[] = [] - do { - let params: ListParams = {} - if (token) { - params.ContinuationToken = token - } - const response = await list(params) - if (response.Contents) { - objects = objects.concat(response.Contents) - } - isTruncated = !!response.IsTruncated - } while (isTruncated) - return objects -} - -/** - * Same as retrieval function but puts to a temporary file. - */ -export const retrieveToTmp = async (bucketName: string, filepath: string) => { - bucketName = sanitizeBucket(bucketName) - filepath = sanitizeKey(filepath) - const data = await retrieve(bucketName, filepath) - const outputPath = join(budibaseTempDir(), v4()) - fs.writeFileSync(outputPath, data) - return outputPath -} - -export const retrieveDirectory = async (bucketName: string, path: string) => { - let writePath = join(budibaseTempDir(), v4()) - fs.mkdirSync(writePath) - const objects = await listAllObjects(bucketName, path) - let fullObjects = await Promise.all( - objects.map(obj => retrieve(bucketName, obj.Key!)) - ) - let count = 0 - for (let obj of objects) { - const filename = obj.Key! - const data = fullObjects[count++] - const possiblePath = filename.split("/") - if (possiblePath.length > 1) { - const dirs = possiblePath.slice(0, possiblePath.length - 1) - fs.mkdirSync(join(writePath, ...dirs), { recursive: true }) - } - fs.writeFileSync(join(writePath, ...possiblePath), data) - } - return writePath -} - -/** - * Delete a single file. - */ -export const deleteFile = async (bucketName: string, filepath: string) => { - const objectStore = ObjectStore(bucketName) - await makeSureBucketExists(objectStore, bucketName) - const params = { - Bucket: bucketName, - Key: filepath, - } - return objectStore.deleteObject(params) -} - -export const deleteFiles = async (bucketName: string, filepaths: string[]) => { - const objectStore = ObjectStore(bucketName) - await makeSureBucketExists(objectStore, bucketName) - const params = { - Bucket: bucketName, - Delete: { - Objects: filepaths.map((path: any) => ({ Key: path })), - }, - } - return objectStore.deleteObjects(params).promise() -} - -/** - * Delete a path, including everything within. - */ -export const deleteFolder = async ( - bucketName: string, - folder: string -): Promise => { - bucketName = sanitizeBucket(bucketName) - folder = sanitizeKey(folder) - const client = ObjectStore(bucketName) - const listParams = { - Bucket: bucketName, - Prefix: folder, - } - - let response: any = await client.listObjects(listParams).promise() - if (response.Contents.length === 0) { - return - } - const deleteParams: any = { - Bucket: bucketName, - Delete: { - Objects: [], - }, - } - - response.Contents.forEach((content: any) => { - deleteParams.Delete.Objects.push({ Key: content.Key }) - }) - - response = await client.deleteObjects(deleteParams).promise() - // can only empty 1000 items at once - if (response.Deleted.length === 1000) { - return deleteFolder(bucketName, folder) - } -} - -export const uploadDirectory = async ( - bucketName: string, - localPath: string, - bucketPath: string -) => { - bucketName = sanitizeBucket(bucketName) - let uploads = [] - const files = fs.readdirSync(localPath, { withFileTypes: true }) - for (let file of files) { - const path = sanitizeKey(join(bucketPath, file.name)) - const local = join(localPath, file.name) - if (file.isDirectory()) { - uploads.push(uploadDirectory(bucketName, local, path)) - } else { - uploads.push(streamUpload(bucketName, path, fs.createReadStream(local))) - } - } - await Promise.all(uploads) - return files -} - -export const downloadTarballDirect = async ( - url: string, - path: string, - headers = {} -) => { - path = sanitizeKey(path) - const response = await fetch(url, { headers }) - if (!response.ok) { - throw new Error(`unexpected response ${response.statusText}`) - } - - await streamPipeline(response.body, zlib.Unzip(), tar.extract(path)) -} - -export const downloadTarball = async ( - url: string, - bucketName: string, - path: string -) => { - bucketName = sanitizeBucket(bucketName) - path = sanitizeKey(path) - const response = await fetch(url) - if (!response.ok) { - throw new Error(`unexpected response ${response.statusText}`) - } - - const tmpPath = join(budibaseTempDir(), path) - await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath)) - if (!env.isTest() && env.SELF_HOSTED) { - await uploadDirectory(bucketName, tmpPath, path) - } - // return the temporary path incase there is a use for it - return tmpPath -} +export * from "./objectStore" +export * from "./utils" diff --git a/packages/backend-core/src/objectStore/objectStore.ts b/packages/backend-core/src/objectStore/objectStore.ts new file mode 100644 index 0000000000..2ae8848c53 --- /dev/null +++ b/packages/backend-core/src/objectStore/objectStore.ts @@ -0,0 +1,426 @@ +const sanitize = require("sanitize-s3-objectkey") +import AWS from "aws-sdk" +import stream from "stream" +import fetch from "node-fetch" +import tar from "tar-fs" +const zlib = require("zlib") +import { promisify } from "util" +import { join } from "path" +import fs from "fs" +import env from "../environment" +import { budibaseTempDir, ObjectStoreBuckets } from "./utils" +import { v4 } from "uuid" +import { APP_PREFIX, APP_DEV_PREFIX } from "../db" + +const streamPipeline = promisify(stream.pipeline) +// use this as a temporary store of buckets that are being created +const STATE = { + bucketCreationPromises: {}, +} + +type ListParams = { + ContinuationToken?: string +} + +type UploadParams = { + bucket: string + filename: string + path: string + type?: string + // can be undefined, we will remove it + metadata?: { + [key: string]: string | undefined + } +} + +const CONTENT_TYPE_MAP: any = { + txt: "text/plain", + html: "text/html", + css: "text/css", + js: "application/javascript", + json: "application/json", + gz: "application/gzip", +} +const STRING_CONTENT_TYPES = [ + CONTENT_TYPE_MAP.html, + CONTENT_TYPE_MAP.css, + CONTENT_TYPE_MAP.js, + CONTENT_TYPE_MAP.json, +] + +// does normal sanitization and then swaps dev apps to apps +export function sanitizeKey(input: string) { + return sanitize(sanitizeBucket(input)).replace(/\\/g, "/") +} + +// simply handles the dev app to app conversion +export function sanitizeBucket(input: string) { + return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX) +} + +function publicPolicy(bucketName: string) { + return { + Version: "2012-10-17", + Statement: [ + { + Effect: "Allow", + Principal: { + AWS: ["*"], + }, + Action: "s3:GetObject", + Resource: [`arn:aws:s3:::${bucketName}/*`], + }, + ], + } +} + +const PUBLIC_BUCKETS = [ + ObjectStoreBuckets.APPS, + ObjectStoreBuckets.GLOBAL, + ObjectStoreBuckets.PLUGINS, +] + +/** + * Gets a connection to the object store using the S3 SDK. + * @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from. + * @return {Object} an S3 object store object, check S3 Nodejs SDK for usage. + * @constructor + */ +export const ObjectStore = (bucket: string) => { + const config: any = { + s3ForcePathStyle: true, + signatureVersion: "v4", + apiVersion: "2006-03-01", + accessKeyId: env.MINIO_ACCESS_KEY, + secretAccessKey: env.MINIO_SECRET_KEY, + region: env.AWS_REGION, + } + if (bucket) { + config.params = { + Bucket: sanitizeBucket(bucket), + } + } + if (env.MINIO_URL) { + config.endpoint = env.MINIO_URL + } + return new AWS.S3(config) +} + +/** + * Given an object store and a bucket name this will make sure the bucket exists, + * if it does not exist then it will create it. + */ +export const makeSureBucketExists = async (client: any, bucketName: string) => { + bucketName = sanitizeBucket(bucketName) + try { + await client + .headBucket({ + Bucket: bucketName, + }) + .promise() + } catch (err: any) { + const promises: any = STATE.bucketCreationPromises + const doesntExist = err.statusCode === 404, + noAccess = err.statusCode === 403 + if (promises[bucketName]) { + await promises[bucketName] + } else if (doesntExist || noAccess) { + if (doesntExist) { + // bucket doesn't exist create it + promises[bucketName] = client + .createBucket({ + Bucket: bucketName, + }) + .promise() + await promises[bucketName] + delete promises[bucketName] + } + // public buckets are quite hidden in the system, make sure + // no bucket is set accidentally + if (PUBLIC_BUCKETS.includes(bucketName)) { + await client + .putBucketPolicy({ + Bucket: bucketName, + Policy: JSON.stringify(publicPolicy(bucketName)), + }) + .promise() + } + } else { + throw new Error("Unable to write to object store bucket.") + } + } +} + +/** + * Uploads the contents of a file given the required parameters, useful when + * temp files in use (for example file uploaded as an attachment). + */ +export const upload = async ({ + bucket: bucketName, + filename, + path, + type, + metadata, +}: UploadParams) => { + const extension = filename.split(".").pop() + const fileBytes = fs.readFileSync(path) + + const objectStore = ObjectStore(bucketName) + await makeSureBucketExists(objectStore, bucketName) + + let contentType = type + if (!contentType) { + contentType = extension + ? CONTENT_TYPE_MAP[extension.toLowerCase()] + : CONTENT_TYPE_MAP.txt + } + const config: any = { + // windows file paths need to be converted to forward slashes for s3 + Key: sanitizeKey(filename), + Body: fileBytes, + ContentType: contentType, + } + if (metadata && typeof metadata === "object") { + // remove any nullish keys from the metadata object, as these may be considered invalid + for (let key of Object.keys(metadata)) { + if (!metadata[key] || typeof metadata[key] !== "string") { + delete metadata[key] + } + } + config.Metadata = metadata + } + return objectStore.upload(config).promise() +} + +/** + * Similar to the upload function but can be used to send a file stream + * through to the object store. + */ +export const streamUpload = async ( + bucketName: string, + filename: string, + stream: any, + extra = {} +) => { + const objectStore = ObjectStore(bucketName) + await makeSureBucketExists(objectStore, bucketName) + + // Set content type for certain known extensions + if (filename?.endsWith(".js")) { + extra = { + ...extra, + ContentType: "application/javascript", + } + } else if (filename?.endsWith(".svg")) { + extra = { + ...extra, + ContentType: "image", + } + } + + const params = { + Bucket: sanitizeBucket(bucketName), + Key: sanitizeKey(filename), + Body: stream, + ...extra, + } + return objectStore.upload(params).promise() +} + +/** + * retrieves the contents of a file from the object store, if it is a known content type it + * will be converted, otherwise it will be returned as a buffer stream. + */ +export const retrieve = async (bucketName: string, filepath: string) => { + const objectStore = ObjectStore(bucketName) + const params = { + Bucket: sanitizeBucket(bucketName), + Key: sanitizeKey(filepath), + } + const response: any = await objectStore.getObject(params).promise() + // currently these are all strings + if (STRING_CONTENT_TYPES.includes(response.ContentType)) { + return response.Body.toString("utf8") + } else { + return response.Body + } +} + +export const listAllObjects = async (bucketName: string, path: string) => { + const objectStore = ObjectStore(bucketName) + const list = (params: ListParams = {}) => { + return objectStore + .listObjectsV2({ + ...params, + Bucket: sanitizeBucket(bucketName), + Prefix: sanitizeKey(path), + }) + .promise() + } + let isTruncated = false, + token, + objects: AWS.S3.Types.Object[] = [] + do { + let params: ListParams = {} + if (token) { + params.ContinuationToken = token + } + const response = await list(params) + if (response.Contents) { + objects = objects.concat(response.Contents) + } + isTruncated = !!response.IsTruncated + } while (isTruncated) + return objects +} + +/** + * Same as retrieval function but puts to a temporary file. + */ +export const retrieveToTmp = async (bucketName: string, filepath: string) => { + bucketName = sanitizeBucket(bucketName) + filepath = sanitizeKey(filepath) + const data = await retrieve(bucketName, filepath) + const outputPath = join(budibaseTempDir(), v4()) + fs.writeFileSync(outputPath, data) + return outputPath +} + +export const retrieveDirectory = async (bucketName: string, path: string) => { + let writePath = join(budibaseTempDir(), v4()) + fs.mkdirSync(writePath) + const objects = await listAllObjects(bucketName, path) + let fullObjects = await Promise.all( + objects.map(obj => retrieve(bucketName, obj.Key!)) + ) + let count = 0 + for (let obj of objects) { + const filename = obj.Key! + const data = fullObjects[count++] + const possiblePath = filename.split("/") + if (possiblePath.length > 1) { + const dirs = possiblePath.slice(0, possiblePath.length - 1) + fs.mkdirSync(join(writePath, ...dirs), { recursive: true }) + } + fs.writeFileSync(join(writePath, ...possiblePath), data) + } + return writePath +} + +/** + * Delete a single file. + */ +export const deleteFile = async (bucketName: string, filepath: string) => { + const objectStore = ObjectStore(bucketName) + await makeSureBucketExists(objectStore, bucketName) + const params = { + Bucket: bucketName, + Key: filepath, + } + return objectStore.deleteObject(params) +} + +export const deleteFiles = async (bucketName: string, filepaths: string[]) => { + const objectStore = ObjectStore(bucketName) + await makeSureBucketExists(objectStore, bucketName) + const params = { + Bucket: bucketName, + Delete: { + Objects: filepaths.map((path: any) => ({ Key: path })), + }, + } + return objectStore.deleteObjects(params).promise() +} + +/** + * Delete a path, including everything within. + */ +export const deleteFolder = async ( + bucketName: string, + folder: string +): Promise => { + bucketName = sanitizeBucket(bucketName) + folder = sanitizeKey(folder) + const client = ObjectStore(bucketName) + const listParams = { + Bucket: bucketName, + Prefix: folder, + } + + let response: any = await client.listObjects(listParams).promise() + if (response.Contents.length === 0) { + return + } + const deleteParams: any = { + Bucket: bucketName, + Delete: { + Objects: [], + }, + } + + response.Contents.forEach((content: any) => { + deleteParams.Delete.Objects.push({ Key: content.Key }) + }) + + response = await client.deleteObjects(deleteParams).promise() + // can only empty 1000 items at once + if (response.Deleted.length === 1000) { + return deleteFolder(bucketName, folder) + } +} + +export const uploadDirectory = async ( + bucketName: string, + localPath: string, + bucketPath: string +) => { + bucketName = sanitizeBucket(bucketName) + let uploads = [] + const files = fs.readdirSync(localPath, { withFileTypes: true }) + for (let file of files) { + const path = sanitizeKey(join(bucketPath, file.name)) + const local = join(localPath, file.name) + if (file.isDirectory()) { + uploads.push(uploadDirectory(bucketName, local, path)) + } else { + uploads.push(streamUpload(bucketName, path, fs.createReadStream(local))) + } + } + await Promise.all(uploads) + return files +} + +export const downloadTarballDirect = async ( + url: string, + path: string, + headers = {} +) => { + path = sanitizeKey(path) + const response = await fetch(url, { headers }) + if (!response.ok) { + throw new Error(`unexpected response ${response.statusText}`) + } + + await streamPipeline(response.body, zlib.Unzip(), tar.extract(path)) +} + +export const downloadTarball = async ( + url: string, + bucketName: string, + path: string +) => { + bucketName = sanitizeBucket(bucketName) + path = sanitizeKey(path) + const response = await fetch(url) + if (!response.ok) { + throw new Error(`unexpected response ${response.statusText}`) + } + + const tmpPath = join(budibaseTempDir(), path) + await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath)) + if (!env.isTest() && env.SELF_HOSTED) { + await uploadDirectory(bucketName, tmpPath, path) + } + // return the temporary path incase there is a use for it + return tmpPath +} diff --git a/packages/backend-core/src/objectStore/utils.js b/packages/backend-core/src/objectStore/utils.ts similarity index 71% rename from packages/backend-core/src/objectStore/utils.js rename to packages/backend-core/src/objectStore/utils.ts index 2d4faf55d1..f3c9e93943 100644 --- a/packages/backend-core/src/objectStore/utils.js +++ b/packages/backend-core/src/objectStore/utils.ts @@ -1,14 +1,15 @@ -const { join } = require("path") -const { tmpdir } = require("os") -const fs = require("fs") -const env = require("../environment") +import { join } from "path" +import { tmpdir } from "os" +import fs from "fs" +import env from "../environment" /**************************************************** * NOTE: When adding a new bucket - name * * sure that S3 usages (like budibase-infra) * * have been updated to have a unique bucket name. * ****************************************************/ -exports.ObjectStoreBuckets = { +// can't be an enum - only numbers can be used for computed types +export const ObjectStoreBuckets = { BACKUPS: env.BACKUPS_BUCKET_NAME, APPS: env.APPS_BUCKET_NAME, TEMPLATES: env.TEMPLATES_BUCKET_NAME, @@ -22,6 +23,6 @@ if (!fs.existsSync(bbTmp)) { fs.mkdirSync(bbTmp) } -exports.budibaseTempDir = function () { +export function budibaseTempDir() { return bbTmp } diff --git a/packages/backend-core/src/pino.js b/packages/backend-core/src/pino.js deleted file mode 100644 index 69962b3841..0000000000 --- a/packages/backend-core/src/pino.js +++ /dev/null @@ -1,11 +0,0 @@ -const env = require("./environment") - -exports.pinoSettings = () => ({ - prettyPrint: { - levelFirst: true, - }, - level: env.LOG_LEVEL || "error", - autoLogging: { - ignore: req => req.url.includes("/health"), - }, -}) diff --git a/packages/backend-core/src/pino.ts b/packages/backend-core/src/pino.ts new file mode 100644 index 0000000000..4140f428e1 --- /dev/null +++ b/packages/backend-core/src/pino.ts @@ -0,0 +1,13 @@ +import env from "./environment" + +export function pinoSettings() { + return { + prettyPrint: { + levelFirst: true, + }, + level: env.LOG_LEVEL || "error", + autoLogging: { + ignore: (req: { url: string }) => req.url.includes("/health"), + }, + } +} diff --git a/packages/backend-core/src/pkg/cache.ts b/packages/backend-core/src/pkg/cache.ts deleted file mode 100644 index c40a686260..0000000000 --- a/packages/backend-core/src/pkg/cache.ts +++ /dev/null @@ -1,13 +0,0 @@ -// Mimic the outer package export for usage in index.ts -// The outer exports can't be used as they now reference dist directly -import * as generic from "../cache/generic" -import * as user from "../cache/user" -import * as app from "../cache/appMetadata" -import * as writethrough from "../cache/writethrough" - -export = { - app, - user, - writethrough, - ...generic, -} diff --git a/packages/backend-core/src/pkg/context.ts b/packages/backend-core/src/pkg/context.ts deleted file mode 100644 index 4915cc6e41..0000000000 --- a/packages/backend-core/src/pkg/context.ts +++ /dev/null @@ -1,26 +0,0 @@ -// Mimic the outer package export for usage in index.ts -// The outer exports can't be used as they now reference dist directly -import { - getAppDB, - getDevAppDB, - getProdAppDB, - getAppId, - updateAppId, - doInAppContext, - doInTenant, - doInContext, -} from "../context" - -import * as identity from "../context/identity" - -export = { - getAppDB, - getDevAppDB, - getProdAppDB, - getAppId, - updateAppId, - doInAppContext, - doInTenant, - doInContext, - identity, -} diff --git a/packages/backend-core/src/pkg/objectStore.ts b/packages/backend-core/src/pkg/objectStore.ts deleted file mode 100644 index 0447c6b3c2..0000000000 --- a/packages/backend-core/src/pkg/objectStore.ts +++ /dev/null @@ -1,4 +0,0 @@ -// Mimic the outer package export for usage in index.ts -// The outer exports can't be used as they now reference dist directly -export * from "../objectStore" -export * from "../objectStore/utils" diff --git a/packages/backend-core/src/pkg/redis.ts b/packages/backend-core/src/pkg/redis.ts deleted file mode 100644 index 297c2b54f4..0000000000 --- a/packages/backend-core/src/pkg/redis.ts +++ /dev/null @@ -1,13 +0,0 @@ -// Mimic the outer package export for usage in index.ts -// The outer exports can't be used as they now reference dist directly -import Client from "../redis" -import utils from "../redis/utils" -import clients from "../redis/init" -import * as redlock from "../redis/redlock" - -export = { - Client, - utils, - clients, - redlock, -} diff --git a/packages/backend-core/src/pkg/utils.ts b/packages/backend-core/src/pkg/utils.ts deleted file mode 100644 index 5272046524..0000000000 --- a/packages/backend-core/src/pkg/utils.ts +++ /dev/null @@ -1,4 +0,0 @@ -// Mimic the outer package export for usage in index.ts -// The outer exports can't be used as they now reference dist directly -export * from "../utils" -export * from "../hashing" diff --git a/packages/backend-core/src/plugin/index.ts b/packages/backend-core/src/plugin/index.ts index a6d1853007..3eeaeaa90c 100644 --- a/packages/backend-core/src/plugin/index.ts +++ b/packages/backend-core/src/plugin/index.ts @@ -1,7 +1 @@ -import * as utils from "./utils" - -const pkg = { - ...utils, -} - -export = pkg +export * from "./utils" diff --git a/packages/backend-core/src/plugin/utils.js b/packages/backend-core/src/plugin/utils.ts similarity index 89% rename from packages/backend-core/src/plugin/utils.js rename to packages/backend-core/src/plugin/utils.ts index b943747483..7b62248bb5 100644 --- a/packages/backend-core/src/plugin/utils.js +++ b/packages/backend-core/src/plugin/utils.ts @@ -1,9 +1,5 @@ -const { - DatasourceFieldType, - QueryType, - PluginType, -} = require("@budibase/types") -const joi = require("joi") +import { DatasourceFieldType, QueryType, PluginType } from "@budibase/types" +import joi from "joi" const DATASOURCE_TYPES = [ "Relational", @@ -14,14 +10,14 @@ const DATASOURCE_TYPES = [ "API", ] -function runJoi(validator, schema) { +function runJoi(validator: joi.Schema, schema: any) { const { error } = validator.validate(schema) if (error) { throw error } } -function validateComponent(schema) { +function validateComponent(schema: any) { const validator = joi.object({ type: joi.string().allow("component").required(), metadata: joi.object().unknown(true).required(), @@ -37,7 +33,7 @@ function validateComponent(schema) { runJoi(validator, schema) } -function validateDatasource(schema) { +function validateDatasource(schema: any) { const fieldValidator = joi.object({ type: joi .string() @@ -86,7 +82,7 @@ function validateDatasource(schema) { runJoi(validator, schema) } -exports.validate = schema => { +export function validate(schema: any) { switch (schema?.type) { case PluginType.COMPONENT: validateComponent(schema) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index eb054766d7..acfff1c7b8 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -1,5 +1,5 @@ import events from "events" -import { timeout } from "../../utils" +import { timeout } from "../utils" /** * Bull works with a Job wrapper around all messages that contains a lot more information about diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts index b4eeeb31aa..b34d46e463 100644 --- a/packages/backend-core/src/queue/queue.ts +++ b/packages/backend-core/src/queue/queue.ts @@ -39,7 +39,7 @@ export function createQueue( return queue } -exports.shutdown = async () => { +export async function shutdown() { if (QUEUES.length) { clearInterval(cleanupInterval) for (let queue of QUEUES) { diff --git a/packages/backend-core/src/redis/index.ts b/packages/backend-core/src/redis/index.ts index 8a15320ff3..ea4379f048 100644 --- a/packages/backend-core/src/redis/index.ts +++ b/packages/backend-core/src/redis/index.ts @@ -1,278 +1,6 @@ -import RedisWrapper from "../redis" -const env = require("../environment") -// ioredis mock is all in memory -const Redis = env.isTest() ? require("ioredis-mock") : require("ioredis") -const { - addDbPrefix, - removeDbPrefix, - getRedisOptions, - SEPARATOR, - SelectableDatabases, -} = require("./utils") - -const RETRY_PERIOD_MS = 2000 -const STARTUP_TIMEOUT_MS = 5000 -const CLUSTERED = false -const DEFAULT_SELECT_DB = SelectableDatabases.DEFAULT - -// for testing just generate the client once -let CLOSED = false -let CLIENTS: { [key: number]: any } = {} -// if in test always connected -let CONNECTED = env.isTest() - -function pickClient(selectDb: number): any { - return CLIENTS[selectDb] -} - -function connectionError( - selectDb: number, - timeout: NodeJS.Timeout, - err: Error | string -) { - // manually shut down, ignore errors - if (CLOSED) { - return - } - pickClient(selectDb).disconnect() - CLOSED = true - // always clear this on error - clearTimeout(timeout) - CONNECTED = false - console.error("Redis connection failed - " + err) - setTimeout(() => { - init() - }, RETRY_PERIOD_MS) -} - -/** - * Inits the system, will error if unable to connect to redis cluster (may take up to 10 seconds) otherwise - * will return the ioredis client which will be ready to use. - */ -function init(selectDb = DEFAULT_SELECT_DB) { - let timeout: NodeJS.Timeout - CLOSED = false - let client = pickClient(selectDb) - // already connected, ignore - if (client && CONNECTED) { - return - } - // testing uses a single in memory client - if (env.isTest()) { - CLIENTS[selectDb] = new Redis(getRedisOptions()) - } - // start the timer - only allowed 5 seconds to connect - timeout = setTimeout(() => { - if (!CONNECTED) { - connectionError( - selectDb, - timeout, - "Did not successfully connect in timeout" - ) - } - }, STARTUP_TIMEOUT_MS) - - // disconnect any lingering client - if (client) { - client.disconnect() - } - const { redisProtocolUrl, opts, host, port } = getRedisOptions(CLUSTERED) - - if (CLUSTERED) { - client = new Redis.Cluster([{ host, port }], opts) - } else if (redisProtocolUrl) { - client = new Redis(redisProtocolUrl) - } else { - client = new Redis(opts) - } - // attach handlers - client.on("end", (err: Error) => { - connectionError(selectDb, timeout, err) - }) - client.on("error", (err: Error) => { - connectionError(selectDb, timeout, err) - }) - client.on("connect", () => { - clearTimeout(timeout) - CONNECTED = true - }) - CLIENTS[selectDb] = client -} - -function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) { - return new Promise(resolve => { - if (pickClient(selectDb) == null) { - init() - } else if (CONNECTED) { - resolve("") - return - } - // check if the connection is ready - const interval = setInterval(() => { - if (CONNECTED) { - clearInterval(interval) - resolve("") - } - }, 500) - }) -} - -/** - * Utility function, takes a redis stream and converts it to a promisified response - - * this can only be done with redis streams because they will have an end. - * @param stream A redis stream, specifically as this type of stream will have an end. - * @param client The client to use for further lookups. - * @return {Promise} The final output of the stream - */ -function promisifyStream(stream: any, client: RedisWrapper) { - return new Promise((resolve, reject) => { - const outputKeys = new Set() - stream.on("data", (keys: string[]) => { - keys.forEach(key => { - outputKeys.add(key) - }) - }) - stream.on("error", (err: Error) => { - reject(err) - }) - stream.on("end", async () => { - const keysArray: string[] = Array.from(outputKeys) as string[] - try { - let getPromises = [] - for (let key of keysArray) { - getPromises.push(client.get(key)) - } - const jsonArray = await Promise.all(getPromises) - resolve( - keysArray.map(key => ({ - key: removeDbPrefix(key), - value: JSON.parse(jsonArray.shift()), - })) - ) - } catch (err) { - reject(err) - } - }) - }) -} - -export = class RedisWrapper { - _db: string - _select: number - - constructor(db: string, selectDb: number | null = null) { - this._db = db - this._select = selectDb || DEFAULT_SELECT_DB - } - - getClient() { - return pickClient(this._select) - } - - async init() { - CLOSED = false - init(this._select) - await waitForConnection(this._select) - return this - } - - async finish() { - CLOSED = true - this.getClient().disconnect() - } - - async scan(key = ""): Promise { - const db = this._db - key = `${db}${SEPARATOR}${key}` - let stream - if (CLUSTERED) { - let node = this.getClient().nodes("master") - stream = node[0].scanStream({ match: key + "*", count: 100 }) - } else { - stream = this.getClient().scanStream({ match: key + "*", count: 100 }) - } - return promisifyStream(stream, this.getClient()) - } - - async keys(pattern: string) { - const db = this._db - return this.getClient().keys(addDbPrefix(db, pattern)) - } - - async get(key: string) { - const db = this._db - let response = await this.getClient().get(addDbPrefix(db, key)) - // overwrite the prefixed key - if (response != null && response.key) { - response.key = key - } - // if its not an object just return the response - try { - return JSON.parse(response) - } catch (err) { - return response - } - } - - async bulkGet(keys: string[]) { - const db = this._db - if (keys.length === 0) { - return {} - } - const prefixedKeys = keys.map(key => addDbPrefix(db, key)) - let response = await this.getClient().mget(prefixedKeys) - if (Array.isArray(response)) { - let final: any = {} - let count = 0 - for (let result of response) { - if (result) { - let parsed - try { - parsed = JSON.parse(result) - } catch (err) { - parsed = result - } - final[keys[count]] = parsed - } - count++ - } - return final - } else { - throw new Error(`Invalid response: ${response}`) - } - } - - async store(key: string, value: any, expirySeconds: number | null = null) { - const db = this._db - if (typeof value === "object") { - value = JSON.stringify(value) - } - const prefixedKey = addDbPrefix(db, key) - await this.getClient().set(prefixedKey, value) - if (expirySeconds) { - await this.getClient().expire(prefixedKey, expirySeconds) - } - } - - async getTTL(key: string) { - const db = this._db - const prefixedKey = addDbPrefix(db, key) - return this.getClient().ttl(prefixedKey) - } - - async setExpiry(key: string, expirySeconds: number | null) { - const db = this._db - const prefixedKey = addDbPrefix(db, key) - await this.getClient().expire(prefixedKey, expirySeconds) - } - - async delete(key: string) { - const db = this._db - await this.getClient().del(addDbPrefix(db, key)) - } - - async clear() { - let items = await this.scan() - await Promise.all(items.map((obj: any) => this.delete(obj.key))) - } -} +// Mimic the outer package export for usage in index.ts +// The outer exports can't be used as they now reference dist directly +export { default as Client } from "./redis" +export * as utils from "./utils" +export * as clients from "./init" +export * as redlock from "./redlock" diff --git a/packages/backend-core/src/redis/init.js b/packages/backend-core/src/redis/init.js deleted file mode 100644 index 3150ef2c1c..0000000000 --- a/packages/backend-core/src/redis/init.js +++ /dev/null @@ -1,69 +0,0 @@ -const Client = require("./index") -const utils = require("./utils") - -let userClient, - sessionClient, - appClient, - cacheClient, - writethroughClient, - lockClient - -async function init() { - userClient = await new Client(utils.Databases.USER_CACHE).init() - sessionClient = await new Client(utils.Databases.SESSIONS).init() - appClient = await new Client(utils.Databases.APP_METADATA).init() - cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init() - lockClient = await new Client(utils.Databases.LOCKS).init() - writethroughClient = await new Client( - utils.Databases.WRITE_THROUGH, - utils.SelectableDatabases.WRITE_THROUGH - ).init() -} - -process.on("exit", async () => { - if (userClient) await userClient.finish() - if (sessionClient) await sessionClient.finish() - if (appClient) await appClient.finish() - if (cacheClient) await cacheClient.finish() - if (writethroughClient) await writethroughClient.finish() - if (lockClient) await lockClient.finish() -}) - -module.exports = { - getUserClient: async () => { - if (!userClient) { - await init() - } - return userClient - }, - getSessionClient: async () => { - if (!sessionClient) { - await init() - } - return sessionClient - }, - getAppClient: async () => { - if (!appClient) { - await init() - } - return appClient - }, - getCacheClient: async () => { - if (!cacheClient) { - await init() - } - return cacheClient - }, - getWritethroughClient: async () => { - if (!writethroughClient) { - await init() - } - return writethroughClient - }, - getLockClient: async () => { - if (!lockClient) { - await init() - } - return lockClient - }, -} diff --git a/packages/backend-core/src/redis/init.ts b/packages/backend-core/src/redis/init.ts new file mode 100644 index 0000000000..00329ffb84 --- /dev/null +++ b/packages/backend-core/src/redis/init.ts @@ -0,0 +1,72 @@ +import Client from "./redis" +import * as utils from "./utils" + +let userClient: Client, + sessionClient: Client, + appClient: Client, + cacheClient: Client, + writethroughClient: Client, + lockClient: Client + +async function init() { + userClient = await new Client(utils.Databases.USER_CACHE).init() + sessionClient = await new Client(utils.Databases.SESSIONS).init() + appClient = await new Client(utils.Databases.APP_METADATA).init() + cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init() + lockClient = await new Client(utils.Databases.LOCKS).init() + writethroughClient = await new Client( + utils.Databases.WRITE_THROUGH, + utils.SelectableDatabase.WRITE_THROUGH + ).init() +} + +process.on("exit", async () => { + if (userClient) await userClient.finish() + if (sessionClient) await sessionClient.finish() + if (appClient) await appClient.finish() + if (cacheClient) await cacheClient.finish() + if (writethroughClient) await writethroughClient.finish() + if (lockClient) await lockClient.finish() +}) + +export async function getUserClient() { + if (!userClient) { + await init() + } + return userClient +} + +export async function getSessionClient() { + if (!sessionClient) { + await init() + } + return sessionClient +} + +export async function getAppClient() { + if (!appClient) { + await init() + } + return appClient +} + +export async function getCacheClient() { + if (!cacheClient) { + await init() + } + return cacheClient +} + +export async function getWritethroughClient() { + if (!writethroughClient) { + await init() + } + return writethroughClient +} + +export async function getLockClient() { + if (!lockClient) { + await init() + } + return lockClient +} diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts new file mode 100644 index 0000000000..58734fc4f1 --- /dev/null +++ b/packages/backend-core/src/redis/redis.ts @@ -0,0 +1,279 @@ +import env from "../environment" +// ioredis mock is all in memory +const Redis = env.isTest() ? require("ioredis-mock") : require("ioredis") +import { + addDbPrefix, + removeDbPrefix, + getRedisOptions, + SEPARATOR, + SelectableDatabase, +} from "./utils" + +const RETRY_PERIOD_MS = 2000 +const STARTUP_TIMEOUT_MS = 5000 +const CLUSTERED = false +const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT + +// for testing just generate the client once +let CLOSED = false +let CLIENTS: { [key: number]: any } = {} +// if in test always connected +let CONNECTED = env.isTest() + +function pickClient(selectDb: number): any { + return CLIENTS[selectDb] +} + +function connectionError( + selectDb: number, + timeout: NodeJS.Timeout, + err: Error | string +) { + // manually shut down, ignore errors + if (CLOSED) { + return + } + pickClient(selectDb).disconnect() + CLOSED = true + // always clear this on error + clearTimeout(timeout) + CONNECTED = false + console.error("Redis connection failed - " + err) + setTimeout(() => { + init() + }, RETRY_PERIOD_MS) +} + +/** + * Inits the system, will error if unable to connect to redis cluster (may take up to 10 seconds) otherwise + * will return the ioredis client which will be ready to use. + */ +function init(selectDb = DEFAULT_SELECT_DB) { + let timeout: NodeJS.Timeout + CLOSED = false + let client = pickClient(selectDb) + // already connected, ignore + if (client && CONNECTED) { + return + } + // testing uses a single in memory client + if (env.isTest()) { + CLIENTS[selectDb] = new Redis(getRedisOptions()) + } + // start the timer - only allowed 5 seconds to connect + timeout = setTimeout(() => { + if (!CONNECTED) { + connectionError( + selectDb, + timeout, + "Did not successfully connect in timeout" + ) + } + }, STARTUP_TIMEOUT_MS) + + // disconnect any lingering client + if (client) { + client.disconnect() + } + const { redisProtocolUrl, opts, host, port } = getRedisOptions(CLUSTERED) + + if (CLUSTERED) { + client = new Redis.Cluster([{ host, port }], opts) + } else if (redisProtocolUrl) { + client = new Redis(redisProtocolUrl) + } else { + client = new Redis(opts) + } + // attach handlers + client.on("end", (err: Error) => { + connectionError(selectDb, timeout, err) + }) + client.on("error", (err: Error) => { + connectionError(selectDb, timeout, err) + }) + client.on("connect", () => { + clearTimeout(timeout) + CONNECTED = true + }) + CLIENTS[selectDb] = client +} + +function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) { + return new Promise(resolve => { + if (pickClient(selectDb) == null) { + init() + } else if (CONNECTED) { + resolve("") + return + } + // check if the connection is ready + const interval = setInterval(() => { + if (CONNECTED) { + clearInterval(interval) + resolve("") + } + }, 500) + }) +} + +/** + * Utility function, takes a redis stream and converts it to a promisified response - + * this can only be done with redis streams because they will have an end. + * @param stream A redis stream, specifically as this type of stream will have an end. + * @param client The client to use for further lookups. + * @return {Promise} The final output of the stream + */ +function promisifyStream(stream: any, client: RedisWrapper) { + return new Promise((resolve, reject) => { + const outputKeys = new Set() + stream.on("data", (keys: string[]) => { + keys.forEach(key => { + outputKeys.add(key) + }) + }) + stream.on("error", (err: Error) => { + reject(err) + }) + stream.on("end", async () => { + const keysArray: string[] = Array.from(outputKeys) as string[] + try { + let getPromises = [] + for (let key of keysArray) { + getPromises.push(client.get(key)) + } + const jsonArray = await Promise.all(getPromises) + resolve( + keysArray.map(key => ({ + key: removeDbPrefix(key), + value: JSON.parse(jsonArray.shift()), + })) + ) + } catch (err) { + reject(err) + } + }) + }) +} + +class RedisWrapper { + _db: string + _select: number + + constructor(db: string, selectDb: number | null = null) { + this._db = db + this._select = selectDb || DEFAULT_SELECT_DB + } + + getClient() { + return pickClient(this._select) + } + + async init() { + CLOSED = false + init(this._select) + await waitForConnection(this._select) + return this + } + + async finish() { + CLOSED = true + this.getClient().disconnect() + } + + async scan(key = ""): Promise { + const db = this._db + key = `${db}${SEPARATOR}${key}` + let stream + if (CLUSTERED) { + let node = this.getClient().nodes("master") + stream = node[0].scanStream({ match: key + "*", count: 100 }) + } else { + stream = this.getClient().scanStream({ match: key + "*", count: 100 }) + } + return promisifyStream(stream, this.getClient()) + } + + async keys(pattern: string) { + const db = this._db + return this.getClient().keys(addDbPrefix(db, pattern)) + } + + async get(key: string) { + const db = this._db + let response = await this.getClient().get(addDbPrefix(db, key)) + // overwrite the prefixed key + if (response != null && response.key) { + response.key = key + } + // if its not an object just return the response + try { + return JSON.parse(response) + } catch (err) { + return response + } + } + + async bulkGet(keys: string[]) { + const db = this._db + if (keys.length === 0) { + return {} + } + const prefixedKeys = keys.map(key => addDbPrefix(db, key)) + let response = await this.getClient().mget(prefixedKeys) + if (Array.isArray(response)) { + let final: any = {} + let count = 0 + for (let result of response) { + if (result) { + let parsed + try { + parsed = JSON.parse(result) + } catch (err) { + parsed = result + } + final[keys[count]] = parsed + } + count++ + } + return final + } else { + throw new Error(`Invalid response: ${response}`) + } + } + + async store(key: string, value: any, expirySeconds: number | null = null) { + const db = this._db + if (typeof value === "object") { + value = JSON.stringify(value) + } + const prefixedKey = addDbPrefix(db, key) + await this.getClient().set(prefixedKey, value) + if (expirySeconds) { + await this.getClient().expire(prefixedKey, expirySeconds) + } + } + + async getTTL(key: string) { + const db = this._db + const prefixedKey = addDbPrefix(db, key) + return this.getClient().ttl(prefixedKey) + } + + async setExpiry(key: string, expirySeconds: number | null) { + const db = this._db + const prefixedKey = addDbPrefix(db, key) + await this.getClient().expire(prefixedKey, expirySeconds) + } + + async delete(key: string) { + const db = this._db + await this.getClient().del(addDbPrefix(db, key)) + } + + async clear() { + let items = await this.scan() + await Promise.all(items.map((obj: any) => this.delete(obj.key))) + } +} + +export = RedisWrapper diff --git a/packages/backend-core/src/redis/utils.js b/packages/backend-core/src/redis/utils.ts similarity index 68% rename from packages/backend-core/src/redis/utils.js rename to packages/backend-core/src/redis/utils.ts index af719197b5..4c556ebd54 100644 --- a/packages/backend-core/src/redis/utils.js +++ b/packages/backend-core/src/redis/utils.ts @@ -1,10 +1,10 @@ -const env = require("../environment") +import env from "../environment" const SLOT_REFRESH_MS = 2000 const CONNECT_TIMEOUT_MS = 10000 -const SEPARATOR = "-" const REDIS_URL = !env.REDIS_URL ? "localhost:6379" : env.REDIS_URL const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD +export const SEPARATOR = "-" /** * These Redis databases help us to segment up a Redis keyspace by prepending the @@ -12,23 +12,23 @@ const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD * can be split up a bit; allowing us to use scans on small databases to find some particular * keys within. * If writing a very large volume of keys is expected (say 10K+) then it is better to keep these out - * of the default keyspace and use a separate one - the SelectableDatabases can be used for this. + * of the default keyspace and use a separate one - the SelectableDatabase can be used for this. */ -exports.Databases = { - PW_RESETS: "pwReset", - VERIFICATIONS: "verification", - INVITATIONS: "invitation", - DEV_LOCKS: "devLocks", - DEBOUNCE: "debounce", - SESSIONS: "session", - USER_CACHE: "users", - FLAGS: "flags", - APP_METADATA: "appMetadata", - QUERY_VARS: "queryVars", - LICENSES: "license", - GENERIC_CACHE: "data_cache", - WRITE_THROUGH: "writeThrough", - LOCKS: "locks", +export enum Databases { + PW_RESETS = "pwReset", + VERIFICATIONS = "verification", + INVITATIONS = "invitation", + DEV_LOCKS = "devLocks", + DEBOUNCE = "debounce", + SESSIONS = "session", + USER_CACHE = "users", + FLAGS = "flags", + APP_METADATA = "appMetadata", + QUERY_VARS = "queryVars", + LICENSES = "license", + GENERIC_CACHE = "data_cache", + WRITE_THROUGH = "writeThrough", + LOCKS = "locks", } /** @@ -40,30 +40,28 @@ exports.Databases = { * but if you need to walk through all values in a database periodically then a separate selectable * keyspace should be used. */ -exports.SelectableDatabases = { - DEFAULT: 0, - WRITE_THROUGH: 1, - UNUSED_1: 2, - UNUSED_2: 3, - UNUSED_3: 4, - UNUSED_4: 5, - UNUSED_5: 6, - UNUSED_6: 7, - UNUSED_7: 8, - UNUSED_8: 9, - UNUSED_9: 10, - UNUSED_10: 11, - UNUSED_11: 12, - UNUSED_12: 13, - UNUSED_13: 14, - UNUSED_14: 15, +export enum SelectableDatabase { + DEFAULT = 0, + WRITE_THROUGH = 1, + UNUSED_1 = 2, + UNUSED_2 = 3, + UNUSED_3 = 4, + UNUSED_4 = 5, + UNUSED_5 = 6, + UNUSED_6 = 7, + UNUSED_7 = 8, + UNUSED_8 = 9, + UNUSED_9 = 10, + UNUSED_10 = 11, + UNUSED_11 = 12, + UNUSED_12 = 13, + UNUSED_13 = 14, + UNUSED_14 = 15, } -exports.SEPARATOR = SEPARATOR - -exports.getRedisOptions = (clustered = false) => { +export function getRedisOptions(clustered = false) { let password = REDIS_PASSWORD - let url = REDIS_URL.split("//") + let url: string[] | string = REDIS_URL.split("//") // get rid of the protocol url = url.length > 1 ? url[1] : url[0] // check for a password etc @@ -84,7 +82,7 @@ exports.getRedisOptions = (clustered = false) => { redisProtocolUrl = REDIS_URL } - const opts = { + const opts: any = { connectTimeout: CONNECT_TIMEOUT_MS, } if (clustered) { @@ -92,7 +90,7 @@ exports.getRedisOptions = (clustered = false) => { opts.redisOptions.tls = {} opts.redisOptions.password = password opts.slotsRefreshTimeout = SLOT_REFRESH_MS - opts.dnsLookup = (address, callback) => callback(null, address) + opts.dnsLookup = (address: string, callback: any) => callback(null, address) } else { opts.host = host opts.port = port @@ -101,14 +99,14 @@ exports.getRedisOptions = (clustered = false) => { return { opts, host, port, redisProtocolUrl } } -exports.addDbPrefix = (db, key) => { +export function addDbPrefix(db: string, key: string) { if (key.includes(db)) { return key } return `${db}${SEPARATOR}${key}` } -exports.removeDbPrefix = key => { +export function removeDbPrefix(key: string) { let parts = key.split(SEPARATOR) if (parts.length >= 2) { parts.shift() diff --git a/packages/backend-core/src/security/apiKeys.js b/packages/backend-core/src/security/apiKeys.js deleted file mode 100644 index e90418abb8..0000000000 --- a/packages/backend-core/src/security/apiKeys.js +++ /dev/null @@ -1 +0,0 @@ -exports.lookupApiKey = async () => {} diff --git a/packages/backend-core/src/security/encryption.js b/packages/backend-core/src/security/encryption.ts similarity index 73% rename from packages/backend-core/src/security/encryption.js rename to packages/backend-core/src/security/encryption.ts index c31f597652..a9006f302d 100644 --- a/packages/backend-core/src/security/encryption.js +++ b/packages/backend-core/src/security/encryption.ts @@ -1,5 +1,5 @@ -const crypto = require("crypto") -const env = require("../environment") +import crypto from "crypto" +import env from "../environment" const ALGO = "aes-256-ctr" const SECRET = env.JWT_SECRET @@ -8,13 +8,13 @@ const ITERATIONS = 10000 const RANDOM_BYTES = 16 const STRETCH_LENGTH = 32 -function stretchString(string, salt) { +function stretchString(string: string, salt: Buffer) { return crypto.pbkdf2Sync(string, salt, ITERATIONS, STRETCH_LENGTH, "sha512") } -exports.encrypt = input => { +export function encrypt(input: string) { const salt = crypto.randomBytes(RANDOM_BYTES) - const stretched = stretchString(SECRET, salt) + const stretched = stretchString(SECRET!, salt) const cipher = crypto.createCipheriv(ALGO, stretched, salt) const base = cipher.update(input) const final = cipher.final() @@ -22,10 +22,10 @@ exports.encrypt = input => { return `${salt.toString("hex")}${SEPARATOR}${encrypted}` } -exports.decrypt = input => { +export function decrypt(input: string) { const [salt, encrypted] = input.split(SEPARATOR) const saltBuffer = Buffer.from(salt, "hex") - const stretched = stretchString(SECRET, saltBuffer) + const stretched = stretchString(SECRET!, saltBuffer) const decipher = crypto.createDecipheriv(ALGO, stretched, saltBuffer) const base = decipher.update(Buffer.from(encrypted, "hex")) const final = decipher.final() diff --git a/packages/backend-core/src/tenancy/index.ts b/packages/backend-core/src/tenancy/index.ts index e0006abab2..1618a136dd 100644 --- a/packages/backend-core/src/tenancy/index.ts +++ b/packages/backend-core/src/tenancy/index.ts @@ -1,9 +1,2 @@ -import * as context from "../context" -import * as tenancy from "./tenancy" - -const pkg = { - ...context, - ...tenancy, -} - -export = pkg +export * from "../context" +export * from "./tenancy" diff --git a/packages/backend-core/src/tenancy/tenancy.ts b/packages/backend-core/src/tenancy/tenancy.ts index cc1088ab08..e0e0703433 100644 --- a/packages/backend-core/src/tenancy/tenancy.ts +++ b/packages/backend-core/src/tenancy/tenancy.ts @@ -1,10 +1,4 @@ -import { - doWithDB, - queryPlatformView, - StaticDatabases, - getGlobalDBName, - ViewName, -} from "../db" +import { doWithDB, queryPlatformView, getGlobalDBName } from "../db" import { DEFAULT_TENANT_ID, getTenantId, @@ -18,7 +12,7 @@ import { TenantResolutionStrategy, GetTenantIdOptions, } from "@budibase/types" -import { Header } from "../constants" +import { Header, StaticDatabases, ViewName } from "../constants" const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name diff --git a/packages/backend-core/src/tests/utils.spec.js b/packages/backend-core/src/tests/utils.spec.js index 76fc7b4481..fb3828921d 100644 --- a/packages/backend-core/src/tests/utils.spec.js +++ b/packages/backend-core/src/tests/utils.spec.js @@ -1,7 +1,8 @@ const { structures } = require("../../tests") const utils = require("../utils") const events = require("../events") -const { doInTenant, DEFAULT_TENANT_ID }= require("../context") +const { DEFAULT_TENANT_ID } = require("../constants") +const { doInTenant } = require("../context") describe("utils", () => { describe("platformLogout", () => { diff --git a/packages/backend-core/src/hashing.js b/packages/backend-core/src/utils/hashing.ts similarity index 53% rename from packages/backend-core/src/hashing.js rename to packages/backend-core/src/utils/hashing.ts index 7524e66043..220ffea47f 100644 --- a/packages/backend-core/src/hashing.js +++ b/packages/backend-core/src/utils/hashing.ts @@ -1,18 +1,14 @@ -const env = require("./environment") +import env from "../environment" +export * from "../newid" const bcrypt = env.JS_BCRYPT ? require("bcryptjs") : require("bcrypt") -const { v4 } = require("uuid") const SALT_ROUNDS = env.SALT_ROUNDS || 10 -exports.hash = async data => { +export async function hash(data: string) { const salt = await bcrypt.genSalt(SALT_ROUNDS) return bcrypt.hash(data, salt) } -exports.compare = async (data, encrypted) => { +export async function compare(data: string, encrypted: string) { return bcrypt.compare(data, encrypted) } - -exports.newid = function () { - return v4().replace(/-/g, "") -} diff --git a/packages/backend-core/src/utils/index.ts b/packages/backend-core/src/utils/index.ts new file mode 100644 index 0000000000..8e663bce52 --- /dev/null +++ b/packages/backend-core/src/utils/index.ts @@ -0,0 +1,2 @@ +export * from "./hashing" +export * from "./utils" diff --git a/packages/backend-core/src/utils.ts b/packages/backend-core/src/utils/utils.ts similarity index 92% rename from packages/backend-core/src/utils.ts rename to packages/backend-core/src/utils/utils.ts index c04d6196b3..3e9fbb177a 100644 --- a/packages/backend-core/src/utils.ts +++ b/packages/backend-core/src/utils/utils.ts @@ -1,17 +1,11 @@ -import { - DocumentType, - SEPARATOR, - ViewName, - getAllApps, - queryGlobalView, -} from "./db" -import { options } from "./middleware/passport/jwt" -import { Header, Cookie, MAX_VALID_DATE } from "./constants" -import env from "./environment" -import userCache from "./cache/user" -import { getSessionsForUser, invalidateSessions } from "./security/sessions" -import * as events from "./events" -import tenancy from "./tenancy" +import { getAllApps, queryGlobalView } from "../db" +import { options } from "../middleware/passport/jwt" +import { Header, Cookie, MAX_VALID_DATE } from "../constants" +import env from "../environment" +import * as userCache from "../cache/user" +import { getSessionsForUser, invalidateSessions } from "../security/sessions" +import * as events from "../events" +import * as tenancy from "../tenancy" import { App, BBContext, @@ -19,6 +13,7 @@ import { TenantResolutionStrategy, } from "@budibase/types" import { SetOption } from "cookies" +import { DocumentType, SEPARATOR, ViewName } from "../constants" const jwt = require("jsonwebtoken") const APP_PREFIX = DocumentType.APP + SEPARATOR diff --git a/packages/backend-core/tenancy.js b/packages/backend-core/tenancy.js deleted file mode 100644 index 9ca808b74e..0000000000 --- a/packages/backend-core/tenancy.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/tenancy") diff --git a/packages/backend-core/tsconfig.build.json b/packages/backend-core/tsconfig.build.json index f5b16eda1a..9682f3e32f 100644 --- a/packages/backend-core/tsconfig.build.json +++ b/packages/backend-core/tsconfig.build.json @@ -3,7 +3,6 @@ "target": "es6", "module": "commonjs", "lib": ["es2020"], - "allowJs": true, "strict": true, "noImplicitAny": true, "esModuleInterop": true, diff --git a/packages/backend-core/utils.js b/packages/backend-core/utils.js deleted file mode 100644 index 2ef920e103..0000000000 --- a/packages/backend-core/utils.js +++ /dev/null @@ -1,4 +0,0 @@ -module.exports = { - ...require("./src/utils"), - ...require("./src/hashing"), -} diff --git a/packages/cli/src/backups/objectStore.js b/packages/cli/src/backups/objectStore.js index 8d616d276a..407659d54a 100644 --- a/packages/cli/src/backups/objectStore.js +++ b/packages/cli/src/backups/objectStore.js @@ -1,14 +1,15 @@ +const { objectStore } = require("@budibase/backend-core") +const fs = require("fs") +const { join } = require("path") +const { TEMP_DIR, MINIO_DIR } = require("./utils") +const { progressBar } = require("../utils") const { ObjectStoreBuckets, ObjectStore, retrieve, uploadDirectory, makeSureBucketExists, -} = require("@budibase/backend-core/objectStore") -const fs = require("fs") -const { join } = require("path") -const { TEMP_DIR, MINIO_DIR } = require("./utils") -const { progressBar } = require("../utils") +} = objectStore const bucketList = Object.values(ObjectStoreBuckets) diff --git a/packages/cli/src/plugins/index.js b/packages/cli/src/plugins/index.js index 873be10612..5786c521b2 100644 --- a/packages/cli/src/plugins/index.js +++ b/packages/cli/src/plugins/index.js @@ -4,7 +4,7 @@ const { getSkeleton, fleshOutSkeleton } = require("./skeleton") const questions = require("../questions") const fs = require("fs") const { PLUGIN_TYPE_ARR } = require("@budibase/types") -const { validate } = require("@budibase/backend-core/plugins") +const { plugins } = require("@budibase/backend-core") const { runPkgCommand } = require("../exec") const { join } = require("path") const { success, error, info, moveDirectory } = require("../utils") @@ -107,7 +107,7 @@ async function verify() { } name = pkgJson.name version = pkgJson.version - validate(schemaJson) + plugins.validate(schemaJson) return { name, version } } catch (err) { if (err && err.message && err.message.includes("not valid JSON")) { diff --git a/packages/server/src/api/controllers/application.ts b/packages/server/src/api/controllers/application.ts index 83d7bb0b8d..752353893f 100644 --- a/packages/server/src/api/controllers/application.ts +++ b/packages/server/src/api/controllers/application.ts @@ -390,7 +390,7 @@ const appPostCreate = async (ctx: any, app: App) => { export const create = async (ctx: any) => { const newApplication = await quotas.addApp(() => performAppCreate(ctx)) await appPostCreate(ctx, newApplication) - await cache.bustCache(cache.CacheKeys.CHECKLIST) + await cache.bustCache(cache.CacheKey.CHECKLIST) ctx.body = newApplication ctx.status = 200 } diff --git a/packages/server/src/api/controllers/automation.ts b/packages/server/src/api/controllers/automation.ts index 185da80216..0e6624d125 100644 --- a/packages/server/src/api/controllers/automation.ts +++ b/packages/server/src/api/controllers/automation.ts @@ -1,5 +1,5 @@ -import actions from "../../automations/actions" -import triggers from "../../automations/triggers" +import * as actions from "../../automations/actions" +import * as triggers from "../../automations/triggers" import { getAutomationParams, generateAutomationID, diff --git a/packages/server/src/api/controllers/integration.js b/packages/server/src/api/controllers/integration.js deleted file mode 100644 index 2f11ec19ed..0000000000 --- a/packages/server/src/api/controllers/integration.js +++ /dev/null @@ -1,14 +0,0 @@ -const { getDefinitions } = require("../../integrations") - -exports.fetch = async function (ctx) { - ctx.status = 200 - const defs = await getDefinitions() - - ctx.body = defs -} - -exports.find = async function (ctx) { - const defs = await getDefinitions() - ctx.status = 200 - ctx.body = defs[ctx.params.type] -} diff --git a/packages/server/src/api/controllers/integration.ts b/packages/server/src/api/controllers/integration.ts new file mode 100644 index 0000000000..743d216da7 --- /dev/null +++ b/packages/server/src/api/controllers/integration.ts @@ -0,0 +1,13 @@ +import { getDefinitions } from "../../integrations" +import { BBContext } from "@budibase/types" + +export async function fetch(ctx: BBContext) { + ctx.status = 200 + ctx.body = await getDefinitions() +} + +export async function find(ctx: BBContext) { + const defs = await getDefinitions() + ctx.status = 200 + ctx.body = defs[ctx.params.type] +} diff --git a/packages/server/src/api/controllers/migrations.js b/packages/server/src/api/controllers/migrations.js deleted file mode 100644 index 6a890349c3..0000000000 --- a/packages/server/src/api/controllers/migrations.js +++ /dev/null @@ -1,13 +0,0 @@ -const { migrate, MIGRATIONS } = require("../../migrations") - -exports.migrate = async ctx => { - const options = ctx.request.body - // don't await as can take a while, just return - migrate(options) - ctx.status = 200 -} - -exports.fetchDefinitions = async ctx => { - ctx.body = MIGRATIONS - ctx.status = 200 -} diff --git a/packages/server/src/api/controllers/migrations.ts b/packages/server/src/api/controllers/migrations.ts new file mode 100644 index 0000000000..8f1bfa22db --- /dev/null +++ b/packages/server/src/api/controllers/migrations.ts @@ -0,0 +1,14 @@ +import { migrate as migrationImpl, MIGRATIONS } from "../../migrations" +import { BBContext } from "@budibase/types" + +export async function migrate(ctx: BBContext) { + const options = ctx.request.body + // don't await as can take a while, just return + migrationImpl(options) + ctx.status = 200 +} + +export async function fetchDefinitions(ctx: BBContext) { + ctx.body = MIGRATIONS + ctx.status = 200 +} diff --git a/packages/server/src/api/controllers/plugin/utils.ts b/packages/server/src/api/controllers/plugin/utils.ts index 0e92fbb987..ff696b1aa1 100644 --- a/packages/server/src/api/controllers/plugin/utils.ts +++ b/packages/server/src/api/controllers/plugin/utils.ts @@ -1,7 +1,5 @@ -import { - createTempFolder, - downloadTarballDirect, -} from "../../../utilities/fileSystem" +import { createTempFolder } from "../../../utilities/fileSystem" +import { objectStore } from "@budibase/backend-core" export async function downloadUnzipTarball( url: string, @@ -10,7 +8,7 @@ export async function downloadUnzipTarball( ) { try { const path = createTempFolder(name) - await downloadTarballDirect(url, path, headers) + await objectStore.downloadTarballDirect(url, path, headers) return path } catch (e: any) { diff --git a/packages/server/src/api/controllers/public/rows.ts b/packages/server/src/api/controllers/public/rows.ts index 67059ec2f5..df856f1fe0 100644 --- a/packages/server/src/api/controllers/public/rows.ts +++ b/packages/server/src/api/controllers/public/rows.ts @@ -1,6 +1,6 @@ import * as rowController from "../row" import { addRev } from "./utils" -import { Row } from "../../../definitions/common" +import { Row } from "@budibase/types" import { convertBookmark } from "../../../utilities" // makes sure that the user doesn't need to pass in the type, tableId or _id params for diff --git a/packages/server/src/api/controllers/public/utils.ts b/packages/server/src/api/controllers/public/utils.ts index 6909db9628..a51b6b5fff 100644 --- a/packages/server/src/api/controllers/public/utils.ts +++ b/packages/server/src/api/controllers/public/utils.ts @@ -1,4 +1,4 @@ -const { getAppDB } = require("@budibase/backend-core/context") +import { context } from "@budibase/backend-core" import { isExternalTable } from "../../../integrations/utils" import { APP_PREFIX, DocumentType } from "../../../db/utils" @@ -13,7 +13,7 @@ export async function addRev( if (body._id.startsWith(APP_PREFIX)) { id = DocumentType.APP_METADATA } - const db = getAppDB() + const db = context.getAppDB() const dbDoc = await db.get(id) body._rev = dbDoc._rev // update ID in case it is an app ID diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index 2ef7953437..de4ce317ef 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -18,11 +18,7 @@ import { convertRowId, } from "../../../integrations/utils" import { getDatasourceAndQuery } from "./utils" -import { - DataSourceOperation, - FieldTypes, - RelationshipTypes, -} from "../../../constants" +import { FieldTypes, RelationshipTypes } from "../../../constants" import { breakExternalTableId, isSQL } from "../../../integrations/utils" import { processObjectSync } from "@budibase/string-templates" // @ts-ignore @@ -30,7 +26,7 @@ import { cloneDeep } from "lodash/fp" import { processFormulas, processDates } from "../../../utilities/rowProcessor" import { context } from "@budibase/backend-core" -interface ManyRelationship { +export interface ManyRelationship { tableId?: string id?: string isUpdate?: boolean @@ -38,708 +34,692 @@ interface ManyRelationship { [key: string]: any } -interface RunConfig { - id?: string +export interface RunConfig { + id?: any[] filters?: SearchFilters sort?: SortJson paginate?: PaginationJson + datasource?: Datasource row?: Row rows?: Row[] + tables?: Record } -module External { - function buildFilters( - id: string | undefined | string[], - filters: SearchFilters, - table: Table - ) { - const primary = table.primary - // if passed in array need to copy for shifting etc - let idCopy: undefined | string | any[] = cloneDeep(id) - if (filters) { - // need to map over the filters and make sure the _id field isn't present - for (let filter of Object.values(filters)) { - if (filter._id && primary) { - const parts = breakRowIdField(filter._id) - for (let field of primary) { - filter[field] = parts.shift() - } +function buildFilters( + id: string | undefined | string[], + filters: SearchFilters, + table: Table +) { + const primary = table.primary + // if passed in array need to copy for shifting etc + let idCopy: undefined | string | any[] = cloneDeep(id) + if (filters) { + // need to map over the filters and make sure the _id field isn't present + for (let filter of Object.values(filters)) { + if (filter._id && primary) { + const parts = breakRowIdField(filter._id) + for (let field of primary) { + filter[field] = parts.shift() } - // make sure this field doesn't exist on any filter - delete filter._id } - } - // there is no id, just use the user provided filters - if (!idCopy || !table) { - return filters - } - // if used as URL parameter it will have been joined - if (!Array.isArray(idCopy)) { - idCopy = breakRowIdField(idCopy) - } - const equal: any = {} - if (primary && idCopy) { - for (let field of primary) { - // work through the ID and get the parts - equal[field] = idCopy.shift() - } - } - return { - equal, + // make sure this field doesn't exist on any filter + delete filter._id } } - - /** - * This function checks the incoming parameters to make sure all the inputs are - * valid based on on the table schema. The main thing this is looking for is when a - * user has made use of the _id field of a row for a foreign key or a search parameter. - * In these cases the key will be sent up as [1], rather than 1. In these cases we will - * simplify it down to the requirements. This function is quite complex as we try to be - * relatively restrictive over what types of columns we will perform this action for. - */ - function cleanupConfig(config: RunConfig, table: Table): RunConfig { - const primaryOptions = [ - FieldTypes.STRING, - FieldTypes.LONGFORM, - FieldTypes.OPTIONS, - FieldTypes.NUMBER, - ] - // filter out fields which cannot be keys - const fieldNames = Object.entries(table.schema) - .filter(schema => primaryOptions.find(val => val === schema[1].type)) - .map(([fieldName]) => fieldName) - const iterateObject = (obj: { [key: string]: any }) => { - for (let [field, value] of Object.entries(obj)) { - if (fieldNames.find(name => name === field) && isRowId(value)) { - obj[field] = convertRowId(value) - } - } - } - // check the row and filters to make sure they aren't a key of some sort - if (config.filters) { - for (let [key, filter] of Object.entries(config.filters)) { - // oneOf is an array, don't iterate it - if ( - typeof filter !== "object" || - Object.keys(filter).length === 0 || - key === FilterType.ONE_OF - ) { - continue - } - iterateObject(filter) - } - } - if (config.row) { - iterateObject(config.row) - } - - return config + // there is no id, just use the user provided filters + if (!idCopy || !table) { + return filters } - - function generateIdForRow(row: Row | undefined, table: Table): string { - const primary = table.primary - if (!row || !primary) { - return "" - } - // build id array - let idParts = [] + // if used as URL parameter it will have been joined + if (!Array.isArray(idCopy)) { + idCopy = breakRowIdField(idCopy) + } + const equal: any = {} + if (primary && idCopy) { for (let field of primary) { - // need to handle table name + field or just field, depending on if relationships used - const fieldValue = row[`${table.name}.${field}`] || row[field] - if (fieldValue) { - idParts.push(fieldValue) + // work through the ID and get the parts + equal[field] = idCopy.shift() + } + } + return { + equal, + } +} + +/** + * This function checks the incoming parameters to make sure all the inputs are + * valid based on on the table schema. The main thing this is looking for is when a + * user has made use of the _id field of a row for a foreign key or a search parameter. + * In these cases the key will be sent up as [1], rather than 1. In these cases we will + * simplify it down to the requirements. This function is quite complex as we try to be + * relatively restrictive over what types of columns we will perform this action for. + */ +function cleanupConfig(config: RunConfig, table: Table): RunConfig { + const primaryOptions = [ + FieldTypes.STRING, + FieldTypes.LONGFORM, + FieldTypes.OPTIONS, + FieldTypes.NUMBER, + ] + // filter out fields which cannot be keys + const fieldNames = Object.entries(table.schema) + .filter(schema => primaryOptions.find(val => val === schema[1].type)) + .map(([fieldName]) => fieldName) + const iterateObject = (obj: { [key: string]: any }) => { + for (let [field, value] of Object.entries(obj)) { + if (fieldNames.find(name => name === field) && isRowId(value)) { + obj[field] = convertRowId(value) } } - if (idParts.length === 0) { - return "" - } - return generateRowIdField(idParts) } - - function getEndpoint(tableId: string | undefined, operation: string) { - if (!tableId) { - return {} - } - const { datasourceId, tableName } = breakExternalTableId(tableId) - return { - datasourceId, - entityId: tableName, - operation, - } - } - - function basicProcessing(row: Row, table: Table): Row { - const thisRow: Row = {} - // filter the row down to what is actually the row (not joined) - for (let fieldName of Object.keys(table.schema)) { - const pathValue = row[`${table.name}.${fieldName}`] - const value = pathValue != null ? pathValue : row[fieldName] - // all responses include "select col as table.col" so that overlaps are handled - if (value != null) { - thisRow[fieldName] = value - } - } - thisRow._id = generateIdForRow(row, table) - thisRow.tableId = table._id - thisRow._rev = "rev" - return processFormulas(table, thisRow) - } - - function fixArrayTypes(row: Row, table: Table) { - for (let [fieldName, schema] of Object.entries(table.schema)) { + // check the row and filters to make sure they aren't a key of some sort + if (config.filters) { + for (let [key, filter] of Object.entries(config.filters)) { + // oneOf is an array, don't iterate it if ( - schema.type === FieldTypes.ARRAY && - typeof row[fieldName] === "string" + typeof filter !== "object" || + Object.keys(filter).length === 0 || + key === FilterType.ONE_OF ) { - try { - row[fieldName] = JSON.parse(row[fieldName]) - } catch (err) { - // couldn't convert back to array, ignore - delete row[fieldName] + continue + } + iterateObject(filter) + } + } + if (config.row) { + iterateObject(config.row) + } + + return config +} + +function generateIdForRow(row: Row | undefined, table: Table): string { + const primary = table.primary + if (!row || !primary) { + return "" + } + // build id array + let idParts = [] + for (let field of primary) { + // need to handle table name + field or just field, depending on if relationships used + const fieldValue = row[`${table.name}.${field}`] || row[field] + if (fieldValue) { + idParts.push(fieldValue) + } + } + if (idParts.length === 0) { + return "" + } + return generateRowIdField(idParts) +} + +function getEndpoint(tableId: string | undefined, operation: string) { + if (!tableId) { + return {} + } + const { datasourceId, tableName } = breakExternalTableId(tableId) + return { + datasourceId, + entityId: tableName, + operation, + } +} + +function basicProcessing(row: Row, table: Table): Row { + const thisRow: Row = {} + // filter the row down to what is actually the row (not joined) + for (let fieldName of Object.keys(table.schema)) { + const pathValue = row[`${table.name}.${fieldName}`] + const value = pathValue != null ? pathValue : row[fieldName] + // all responses include "select col as table.col" so that overlaps are handled + if (value != null) { + thisRow[fieldName] = value + } + } + thisRow._id = generateIdForRow(row, table) + thisRow.tableId = table._id + thisRow._rev = "rev" + return processFormulas(table, thisRow) +} + +function fixArrayTypes(row: Row, table: Table) { + for (let [fieldName, schema] of Object.entries(table.schema)) { + if ( + schema.type === FieldTypes.ARRAY && + typeof row[fieldName] === "string" + ) { + try { + row[fieldName] = JSON.parse(row[fieldName]) + } catch (err) { + // couldn't convert back to array, ignore + delete row[fieldName] + } + } + } + return row +} + +function isOneSide(field: FieldSchema) { + return ( + field.relationshipType && field.relationshipType.split("-")[0] === "one" + ) +} + +export class ExternalRequest { + private operation: Operation + private tableId: string + private datasource?: Datasource + private tables: { [key: string]: Table } = {} + + constructor(operation: Operation, tableId: string, datasource?: Datasource) { + this.operation = operation + this.tableId = tableId + this.datasource = datasource + if (datasource && datasource.entities) { + this.tables = datasource.entities + } + } + + getTable(tableId: string | undefined): Table | undefined { + if (!tableId) { + throw "Table ID is unknown, cannot find table" + } + const { tableName } = breakExternalTableId(tableId) + if (tableName) { + return this.tables[tableName] + } + } + + inputProcessing(row: Row | undefined, table: Table) { + if (!row) { + return { row, manyRelationships: [] } + } + // we don't really support composite keys for relationships, this is why [0] is used + // @ts-ignore + const tablePrimary: string = table.primary[0] + let newRow: Row = {}, + manyRelationships: ManyRelationship[] = [] + for (let [key, field] of Object.entries(table.schema)) { + // if set already, or not set just skip it + if ( + row[key] == null || + newRow[key] || + field.autocolumn || + field.type === FieldTypes.FORMULA + ) { + continue + } + // if its an empty string then it means return the column to null (if possible) + if (row[key] === "") { + newRow[key] = null + continue + } + // parse floats/numbers + if (field.type === FieldTypes.NUMBER && !isNaN(parseFloat(row[key]))) { + newRow[key] = parseFloat(row[key]) + } + // if its not a link then just copy it over + if (field.type !== FieldTypes.LINK) { + newRow[key] = row[key] + continue + } + const { tableName: linkTableName } = breakExternalTableId(field?.tableId) + // table has to exist for many to many + if (!linkTableName || !this.tables[linkTableName]) { + continue + } + const linkTable = this.tables[linkTableName] + // @ts-ignore + const linkTablePrimary = linkTable.primary[0] + // one to many + if (isOneSide(field)) { + let id = row[key][0] + if (typeof row[key] === "string") { + id = decodeURIComponent(row[key]).match(/\[(.*?)\]/)?.[1] + } + newRow[field.foreignKey || linkTablePrimary] = breakRowIdField(id)[0] + } + // many to many + else if (field.through) { + // we're not inserting a doc, will be a bunch of update calls + const otherKey: string = field.throughFrom || linkTablePrimary + const thisKey: string = field.throughTo || tablePrimary + row[key].map((relationship: any) => { + manyRelationships.push({ + tableId: field.through || field.tableId, + isUpdate: false, + key: otherKey, + [otherKey]: breakRowIdField(relationship)[0], + // leave the ID for enrichment later + [thisKey]: `{{ literal ${tablePrimary} }}`, + }) + }) + } + // many to one + else { + const thisKey: string = "id" + // @ts-ignore + const otherKey: string = field.fieldName + row[key].map((relationship: any) => { + manyRelationships.push({ + tableId: field.tableId, + isUpdate: true, + key: otherKey, + [thisKey]: breakRowIdField(relationship)[0], + // leave the ID for enrichment later + [otherKey]: `{{ literal ${tablePrimary} }}`, + }) + }) + } + } + // we return the relationships that may need to be created in the through table + // we do this so that if the ID is generated by the DB it can be inserted + // after the fact + return { row: newRow, manyRelationships } + } + + squashRelationshipColumns( + table: Table, + row: Row, + relationships: RelationshipsJson[] + ): Row { + for (let relationship of relationships) { + const linkedTable = this.tables[relationship.tableName] + if (!linkedTable || !row[relationship.column]) { + continue + } + const display = linkedTable.primaryDisplay + for (let key of Object.keys(row[relationship.column])) { + const related: Row = row[relationship.column][key] + row[relationship.column][key] = { + primaryDisplay: display ? related[display] : undefined, + _id: related._id, } } } return row } - function isOneSide(field: FieldSchema) { - return ( - field.relationshipType && field.relationshipType.split("-")[0] === "one" - ) + /** + * This iterates through the returned rows and works out what elements of the rows + * actually match up to another row (based on primary keys) - this is pretty specific + * to SQL and the way that SQL relationships are returned based on joins. + * This is complicated, but the idea is that when a SQL query returns all the relations + * will be separate rows, with all of the data in each row. We have to decipher what comes + * from where (which tables) and how to convert that into budibase columns. + */ + updateRelationshipColumns( + table: Table, + row: Row, + rows: { [key: string]: Row }, + relationships: RelationshipsJson[] + ) { + const columns: { [key: string]: any } = {} + for (let relationship of relationships) { + const linkedTable = this.tables[relationship.tableName] + if (!linkedTable) { + continue + } + const fromColumn = `${table.name}.${relationship.from}` + const toColumn = `${linkedTable.name}.${relationship.to}` + // this is important when working with multiple relationships + // between the same tables, don't want to overlap/multiply the relations + if ( + !relationship.through && + row[fromColumn]?.toString() !== row[toColumn]?.toString() + ) { + continue + } + let linked = basicProcessing(row, linkedTable) + if (!linked._id) { + continue + } + columns[relationship.column] = linked + } + for (let [column, related] of Object.entries(columns)) { + if (!row._id) { + continue + } + const rowId: string = row._id + if (!Array.isArray(rows[rowId][column])) { + rows[rowId][column] = [] + } + // make sure relationship hasn't been found already + if ( + !rows[rowId][column].find( + (relation: Row) => relation._id === related._id + ) + ) { + rows[rowId][column].push(related) + } + } + return rows } - class ExternalRequest { - private operation: Operation - private tableId: string - private datasource: Datasource - private tables: { [key: string]: Table } = {} - - constructor(operation: Operation, tableId: string, datasource: Datasource) { - this.operation = operation - this.tableId = tableId - this.datasource = datasource - if (datasource && datasource.entities) { - this.tables = datasource.entities - } + outputProcessing( + rows: Row[] = [], + table: Table, + relationships: RelationshipsJson[] + ) { + if (!rows || rows.length === 0 || rows[0].read === true) { + return [] } - - getTable(tableId: string | undefined): Table | undefined { - if (!tableId) { - throw "Table ID is unknown, cannot find table" - } - const { tableName } = breakExternalTableId(tableId) - if (tableName) { - return this.tables[tableName] - } - } - - inputProcessing(row: Row | undefined, table: Table) { - if (!row) { - return { row, manyRelationships: [] } - } - // we don't really support composite keys for relationships, this is why [0] is used - // @ts-ignore - const tablePrimary: string = table.primary[0] - let newRow: Row = {}, - manyRelationships: ManyRelationship[] = [] - for (let [key, field] of Object.entries(table.schema)) { - // if set already, or not set just skip it - if ( - row[key] == null || - newRow[key] || - field.autocolumn || - field.type === FieldTypes.FORMULA - ) { - continue - } - // if its an empty string then it means return the column to null (if possible) - if (row[key] === "") { - newRow[key] = null - continue - } - // parse floats/numbers - if (field.type === FieldTypes.NUMBER && !isNaN(parseFloat(row[key]))) { - newRow[key] = parseFloat(row[key]) - } - // if its not a link then just copy it over - if (field.type !== FieldTypes.LINK) { - newRow[key] = row[key] - continue - } - const { tableName: linkTableName } = breakExternalTableId( - field?.tableId - ) - // table has to exist for many to many - if (!linkTableName || !this.tables[linkTableName]) { - continue - } - const linkTable = this.tables[linkTableName] - // @ts-ignore - const linkTablePrimary = linkTable.primary[0] - // one to many - if (isOneSide(field)) { - let id = row[key][0] - if (typeof row[key] === "string") { - id = decodeURIComponent(row[key]).match(/\[(.*?)\]/)?.[1] - } - newRow[field.foreignKey || linkTablePrimary] = breakRowIdField(id)[0] - } - // many to many - else if (field.through) { - // we're not inserting a doc, will be a bunch of update calls - const otherKey: string = field.throughFrom || linkTablePrimary - const thisKey: string = field.throughTo || tablePrimary - row[key].map((relationship: any) => { - manyRelationships.push({ - tableId: field.through || field.tableId, - isUpdate: false, - key: otherKey, - [otherKey]: breakRowIdField(relationship)[0], - // leave the ID for enrichment later - [thisKey]: `{{ literal ${tablePrimary} }}`, - }) - }) - } - // many to one - else { - const thisKey: string = "id" - // @ts-ignore - const otherKey: string = field.fieldName - row[key].map((relationship: any) => { - manyRelationships.push({ - tableId: field.tableId, - isUpdate: true, - key: otherKey, - [thisKey]: breakRowIdField(relationship)[0], - // leave the ID for enrichment later - [otherKey]: `{{ literal ${tablePrimary} }}`, - }) - }) - } - } - // we return the relationships that may need to be created in the through table - // we do this so that if the ID is generated by the DB it can be inserted - // after the fact - return { row: newRow, manyRelationships } - } - - squashRelationshipColumns( - table: Table, - row: Row, - relationships: RelationshipsJson[] - ): Row { - for (let relationship of relationships) { - const linkedTable = this.tables[relationship.tableName] - if (!linkedTable || !row[relationship.column]) { - continue - } - const display = linkedTable.primaryDisplay - for (let key of Object.keys(row[relationship.column])) { - const related: Row = row[relationship.column][key] - row[relationship.column][key] = { - primaryDisplay: display ? related[display] : undefined, - _id: related._id, - } - } - } - return row - } - - /** - * This iterates through the returned rows and works out what elements of the rows - * actually match up to another row (based on primary keys) - this is pretty specific - * to SQL and the way that SQL relationships are returned based on joins. - * This is complicated, but the idea is that when a SQL query returns all the relations - * will be separate rows, with all of the data in each row. We have to decipher what comes - * from where (which tables) and how to convert that into budibase columns. - */ - updateRelationshipColumns( - table: Table, - row: Row, - rows: { [key: string]: Row }, - relationships: RelationshipsJson[] - ) { - const columns: { [key: string]: any } = {} - for (let relationship of relationships) { - const linkedTable = this.tables[relationship.tableName] - if (!linkedTable) { - continue - } - const fromColumn = `${table.name}.${relationship.from}` - const toColumn = `${linkedTable.name}.${relationship.to}` - // this is important when working with multiple relationships - // between the same tables, don't want to overlap/multiply the relations - if ( - !relationship.through && - row[fromColumn]?.toString() !== row[toColumn]?.toString() - ) { - continue - } - let linked = basicProcessing(row, linkedTable) - if (!linked._id) { - continue - } - columns[relationship.column] = linked - } - for (let [column, related] of Object.entries(columns)) { - if (!row._id) { - continue - } - const rowId: string = row._id - if (!Array.isArray(rows[rowId][column])) { - rows[rowId][column] = [] - } - // make sure relationship hasn't been found already - if ( - !rows[rowId][column].find( - (relation: Row) => relation._id === related._id - ) - ) { - rows[rowId][column].push(related) - } - } - return rows - } - - outputProcessing( - rows: Row[] = [], - table: Table, - relationships: RelationshipsJson[] - ) { - if (!rows || rows.length === 0 || rows[0].read === true) { - return [] - } - let finalRows: { [key: string]: Row } = {} - for (let row of rows) { - const rowId = generateIdForRow(row, table) - row._id = rowId - // this is a relationship of some sort - if (finalRows[rowId]) { - finalRows = this.updateRelationshipColumns( - table, - row, - finalRows, - relationships - ) - continue - } - const thisRow = fixArrayTypes(basicProcessing(row, table), table) - if (thisRow._id == null) { - throw "Unable to generate row ID for SQL rows" - } - finalRows[thisRow._id] = thisRow - // do this at end once its been added to the final rows + let finalRows: { [key: string]: Row } = {} + for (let row of rows) { + const rowId = generateIdForRow(row, table) + row._id = rowId + // this is a relationship of some sort + if (finalRows[rowId]) { finalRows = this.updateRelationshipColumns( table, row, finalRows, relationships ) + continue } - - // Process some additional data types - let finalRowArray = Object.values(finalRows) - finalRowArray = processDates(table, finalRowArray) - finalRowArray = processFormulas(table, finalRowArray) as Row[] - - return finalRowArray.map((row: Row) => - this.squashRelationshipColumns(table, row, relationships) + const thisRow = fixArrayTypes(basicProcessing(row, table), table) + if (thisRow._id == null) { + throw "Unable to generate row ID for SQL rows" + } + finalRows[thisRow._id] = thisRow + // do this at end once its been added to the final rows + finalRows = this.updateRelationshipColumns( + table, + row, + finalRows, + relationships ) } - /** - * Gets the list of relationship JSON structures based on the columns in the table, - * this will be used by the underlying library to build whatever relationship mechanism - * it has (e.g. SQL joins). - */ - buildRelationships(table: Table): RelationshipsJson[] { - const relationships = [] - for (let [fieldName, field] of Object.entries(table.schema)) { - if (field.type !== FieldTypes.LINK) { - continue - } - const { tableName: linkTableName } = breakExternalTableId(field.tableId) - // no table to link to, this is not a valid relationships - if (!linkTableName || !this.tables[linkTableName]) { - continue - } - const linkTable = this.tables[linkTableName] - if (!table.primary || !linkTable.primary) { - continue - } - const definition: any = { - // if no foreign key specified then use the name of the field in other table - from: field.foreignKey || table.primary[0], - to: field.fieldName, - tableName: linkTableName, - // need to specify where to put this back into - column: fieldName, - } - if (field.through) { - const { tableName: throughTableName } = breakExternalTableId( - field.through - ) - definition.through = throughTableName - // don't support composite keys for relationships - definition.from = field.throughTo || table.primary[0] - definition.to = field.throughFrom || linkTable.primary[0] - definition.fromPrimary = table.primary[0] - definition.toPrimary = linkTable.primary[0] - } - relationships.push(definition) - } - return relationships - } + // Process some additional data types + let finalRowArray = Object.values(finalRows) + finalRowArray = processDates(table, finalRowArray) + finalRowArray = processFormulas(table, finalRowArray) as Row[] - /** - * This is a cached lookup, of relationship records, this is mainly for creating/deleting junction - * information. - */ - async lookupRelations(tableId: string, row: Row) { - const related: { [key: string]: any } = {} - const { tableName } = breakExternalTableId(tableId) - if (!tableName) { - return related - } - const table = this.tables[tableName] - // @ts-ignore - const primaryKey = table.primary[0] - // make a new request to get the row with all its relationships - // we need this to work out if any relationships need removed - for (let field of Object.values(table.schema)) { - if ( - field.type !== FieldTypes.LINK || - !field.fieldName || - isOneSide(field) - ) { - continue - } - const isMany = field.relationshipType === RelationshipTypes.MANY_TO_MANY - const tableId = isMany ? field.through : field.tableId - const { tableName: relatedTableName } = breakExternalTableId(tableId) - // @ts-ignore - const linkPrimaryKey = this.tables[relatedTableName].primary[0] - const manyKey = field.throughTo || primaryKey - const lookupField = isMany ? primaryKey : field.foreignKey - const fieldName = isMany ? manyKey : field.fieldName - if (!lookupField || !row[lookupField]) { - continue - } - const response = await getDatasourceAndQuery({ - endpoint: getEndpoint(tableId, DataSourceOperation.READ), - filters: { - equal: { - [fieldName]: row[lookupField], - }, - }, - }) - // this is the response from knex if no rows found - const rows = !response[0].read ? response : [] - const storeTo = isMany ? field.throughFrom || linkPrimaryKey : fieldName - related[storeTo] = { rows, isMany, tableId } - } - return related - } - - /** - * Once a row has been written we may need to update a many field, e.g. updating foreign keys - * in a bunch of rows in another table, or inserting/deleting rows from a junction table (many to many). - * This is quite a complex process and is handled by this function, there are a few things going on here: - * 1. If updating foreign keys its relatively simple, just create a filter for the row that needs updated - * and write the various components. - * 2. If junction table, then we lookup what exists already, write what doesn't exist, work out what - * isn't supposed to exist anymore and delete those. This is better than the usual method of delete them - * all and then re-create, as theres no chance of losing data (e.g. delete succeed, but write fail). - */ - async handleManyRelationships( - mainTableId: string, - row: Row, - relationships: ManyRelationship[] - ) { - // if we're creating (in a through table) need to wipe the existing ones first - const promises = [] - const related = await this.lookupRelations(mainTableId, row) - for (let relationship of relationships) { - const { key, tableId, isUpdate, id, ...rest } = relationship - const body: { [key: string]: any } = processObjectSync(rest, row, {}) - const linkTable = this.getTable(tableId) - // @ts-ignore - const linkPrimary = linkTable?.primary[0] - if (!linkTable || !linkPrimary) { - return - } - const rows = related[key]?.rows || [] - const found = rows.find( - (row: { [key: string]: any }) => - row[linkPrimary] === relationship.id || - row[linkPrimary] === body?.[linkPrimary] - ) - const operation = isUpdate - ? DataSourceOperation.UPDATE - : DataSourceOperation.CREATE - if (!found) { - promises.push( - getDatasourceAndQuery({ - endpoint: getEndpoint(tableId, operation), - // if we're doing many relationships then we're writing, only one response - body, - filters: buildFilters(id, {}, linkTable), - }) - ) - } else { - // remove the relationship from cache so it isn't adjusted again - rows.splice(rows.indexOf(found), 1) - } - } - // finally cleanup anything that needs to be removed - for (let [colName, { isMany, rows, tableId }] of Object.entries( - related - )) { - const table: Table | undefined = this.getTable(tableId) - // if its not the foreign key skip it, nothing to do - if ( - !table || - (table.primary && table.primary.indexOf(colName) !== -1) - ) { - continue - } - for (let row of rows) { - const filters = buildFilters(generateIdForRow(row, table), {}, table) - // safety check, if there are no filters on deletion bad things happen - if (Object.keys(filters).length !== 0) { - const op = isMany - ? DataSourceOperation.DELETE - : DataSourceOperation.UPDATE - const body = isMany ? null : { [colName]: null } - promises.push( - getDatasourceAndQuery({ - endpoint: getEndpoint(tableId, op), - body, - filters, - }) - ) - } - } - } - await Promise.all(promises) - } - - /** - * This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which - * you have column overlap in relationships, e.g. we join a few different tables and they all have the - * concept of an ID, but for some of them it will be null (if they say don't have a relationship). - * Creating the specific list of fields that we desire, and excluding the ones that are no use to us - * is more performant and has the added benefit of protecting against this scenario. - */ - buildFields( - table: Table, - includeRelations: IncludeRelationship = IncludeRelationship.INCLUDE - ) { - function extractRealFields(table: Table, existing: string[] = []) { - return Object.entries(table.schema) - .filter( - column => - column[1].type !== FieldTypes.LINK && - column[1].type !== FieldTypes.FORMULA && - !existing.find((field: string) => field === column[0]) - ) - .map(column => `${table.name}.${column[0]}`) - } - let fields = extractRealFields(table) - for (let field of Object.values(table.schema)) { - if (field.type !== FieldTypes.LINK || !includeRelations) { - continue - } - const { tableName: linkTableName } = breakExternalTableId(field.tableId) - if (linkTableName) { - const linkTable = this.tables[linkTableName] - if (linkTable) { - const linkedFields = extractRealFields(linkTable, fields) - fields = fields.concat(linkedFields) - } - } - } - return fields - } - - async run(config: RunConfig) { - const { operation, tableId } = this - let { datasourceId, tableName } = breakExternalTableId(tableId) - if (!tableName) { - throw "Unable to run without a table name" - } - if (!this.datasource) { - const db = context.getAppDB() - this.datasource = await db.get(datasourceId) - if (!this.datasource || !this.datasource.entities) { - throw "No tables found, fetch tables before query." - } - this.tables = this.datasource.entities - } - const table = this.tables[tableName] - let isSql = isSQL(this.datasource) - if (!table) { - throw `Unable to process query, table "${tableName}" not defined.` - } - // look for specific components of config which may not be considered acceptable - let { id, row, filters, sort, paginate, rows } = cleanupConfig( - config, - table - ) - filters = buildFilters(id, filters || {}, table) - const relationships = this.buildRelationships(table) - // clean up row on ingress using schema - const processed = this.inputProcessing(row, table) - row = processed.row - if ( - operation === DataSourceOperation.DELETE && - (filters == null || Object.keys(filters).length === 0) - ) { - throw "Deletion must be filtered" - } - let json = { - endpoint: { - datasourceId, - entityId: tableName, - operation, - }, - resource: { - // have to specify the fields to avoid column overlap (for SQL) - fields: isSql ? this.buildFields(table) : [], - }, - filters, - sort, - paginate, - relationships, - body: row || rows, - // pass an id filter into extra, purely for mysql/returning - extra: { - idFilter: buildFilters(id || generateIdForRow(row, table), {}, table), - }, - meta: { - table, - }, - } - // can't really use response right now - const response = await getDatasourceAndQuery(json) - // handle many to many relationships now if we know the ID (could be auto increment) - if ( - operation !== DataSourceOperation.READ && - processed.manyRelationships - ) { - await this.handleManyRelationships( - table._id || "", - response[0], - processed.manyRelationships - ) - } - const output = this.outputProcessing(response, table, relationships) - // if reading it'll just be an array of rows, return whole thing - return operation === DataSourceOperation.READ && Array.isArray(response) - ? output - : { row: output[0], table } - } + return finalRowArray.map((row: Row) => + this.squashRelationshipColumns(table, row, relationships) + ) } - module.exports = ExternalRequest + /** + * Gets the list of relationship JSON structures based on the columns in the table, + * this will be used by the underlying library to build whatever relationship mechanism + * it has (e.g. SQL joins). + */ + buildRelationships(table: Table): RelationshipsJson[] { + const relationships = [] + for (let [fieldName, field] of Object.entries(table.schema)) { + if (field.type !== FieldTypes.LINK) { + continue + } + const { tableName: linkTableName } = breakExternalTableId(field.tableId) + // no table to link to, this is not a valid relationships + if (!linkTableName || !this.tables[linkTableName]) { + continue + } + const linkTable = this.tables[linkTableName] + if (!table.primary || !linkTable.primary) { + continue + } + const definition: any = { + // if no foreign key specified then use the name of the field in other table + from: field.foreignKey || table.primary[0], + to: field.fieldName, + tableName: linkTableName, + // need to specify where to put this back into + column: fieldName, + } + if (field.through) { + const { tableName: throughTableName } = breakExternalTableId( + field.through + ) + definition.through = throughTableName + // don't support composite keys for relationships + definition.from = field.throughTo || table.primary[0] + definition.to = field.throughFrom || linkTable.primary[0] + definition.fromPrimary = table.primary[0] + definition.toPrimary = linkTable.primary[0] + } + relationships.push(definition) + } + return relationships + } + + /** + * This is a cached lookup, of relationship records, this is mainly for creating/deleting junction + * information. + */ + async lookupRelations(tableId: string, row: Row) { + const related: { [key: string]: any } = {} + const { tableName } = breakExternalTableId(tableId) + if (!tableName) { + return related + } + const table = this.tables[tableName] + // @ts-ignore + const primaryKey = table.primary[0] + // make a new request to get the row with all its relationships + // we need this to work out if any relationships need removed + for (let field of Object.values(table.schema)) { + if ( + field.type !== FieldTypes.LINK || + !field.fieldName || + isOneSide(field) + ) { + continue + } + const isMany = field.relationshipType === RelationshipTypes.MANY_TO_MANY + const tableId = isMany ? field.through : field.tableId + const { tableName: relatedTableName } = breakExternalTableId(tableId) + // @ts-ignore + const linkPrimaryKey = this.tables[relatedTableName].primary[0] + const manyKey = field.throughTo || primaryKey + const lookupField = isMany ? primaryKey : field.foreignKey + const fieldName = isMany ? manyKey : field.fieldName + if (!lookupField || !row[lookupField]) { + continue + } + const response = await getDatasourceAndQuery({ + endpoint: getEndpoint(tableId, Operation.READ), + filters: { + equal: { + [fieldName]: row[lookupField], + }, + }, + }) + // this is the response from knex if no rows found + const rows = !response[0].read ? response : [] + const storeTo = isMany ? field.throughFrom || linkPrimaryKey : fieldName + related[storeTo] = { rows, isMany, tableId } + } + return related + } + + /** + * Once a row has been written we may need to update a many field, e.g. updating foreign keys + * in a bunch of rows in another table, or inserting/deleting rows from a junction table (many to many). + * This is quite a complex process and is handled by this function, there are a few things going on here: + * 1. If updating foreign keys its relatively simple, just create a filter for the row that needs updated + * and write the various components. + * 2. If junction table, then we lookup what exists already, write what doesn't exist, work out what + * isn't supposed to exist anymore and delete those. This is better than the usual method of delete them + * all and then re-create, as theres no chance of losing data (e.g. delete succeed, but write fail). + */ + async handleManyRelationships( + mainTableId: string, + row: Row, + relationships: ManyRelationship[] + ) { + // if we're creating (in a through table) need to wipe the existing ones first + const promises = [] + const related = await this.lookupRelations(mainTableId, row) + for (let relationship of relationships) { + const { key, tableId, isUpdate, id, ...rest } = relationship + const body: { [key: string]: any } = processObjectSync(rest, row, {}) + const linkTable = this.getTable(tableId) + // @ts-ignore + const linkPrimary = linkTable?.primary[0] + if (!linkTable || !linkPrimary) { + return + } + const rows = related[key]?.rows || [] + const found = rows.find( + (row: { [key: string]: any }) => + row[linkPrimary] === relationship.id || + row[linkPrimary] === body?.[linkPrimary] + ) + const operation = isUpdate ? Operation.UPDATE : Operation.CREATE + if (!found) { + promises.push( + getDatasourceAndQuery({ + endpoint: getEndpoint(tableId, operation), + // if we're doing many relationships then we're writing, only one response + body, + filters: buildFilters(id, {}, linkTable), + }) + ) + } else { + // remove the relationship from cache so it isn't adjusted again + rows.splice(rows.indexOf(found), 1) + } + } + // finally cleanup anything that needs to be removed + for (let [colName, { isMany, rows, tableId }] of Object.entries(related)) { + const table: Table | undefined = this.getTable(tableId) + // if its not the foreign key skip it, nothing to do + if (!table || (table.primary && table.primary.indexOf(colName) !== -1)) { + continue + } + for (let row of rows) { + const filters = buildFilters(generateIdForRow(row, table), {}, table) + // safety check, if there are no filters on deletion bad things happen + if (Object.keys(filters).length !== 0) { + const op = isMany ? Operation.DELETE : Operation.UPDATE + const body = isMany ? null : { [colName]: null } + promises.push( + getDatasourceAndQuery({ + endpoint: getEndpoint(tableId, op), + body, + filters, + }) + ) + } + } + } + await Promise.all(promises) + } + + /** + * This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which + * you have column overlap in relationships, e.g. we join a few different tables and they all have the + * concept of an ID, but for some of them it will be null (if they say don't have a relationship). + * Creating the specific list of fields that we desire, and excluding the ones that are no use to us + * is more performant and has the added benefit of protecting against this scenario. + */ + buildFields( + table: Table, + includeRelations: IncludeRelationship = IncludeRelationship.INCLUDE + ) { + function extractRealFields(table: Table, existing: string[] = []) { + return Object.entries(table.schema) + .filter( + column => + column[1].type !== FieldTypes.LINK && + column[1].type !== FieldTypes.FORMULA && + !existing.find((field: string) => field === column[0]) + ) + .map(column => `${table.name}.${column[0]}`) + } + let fields = extractRealFields(table) + for (let field of Object.values(table.schema)) { + if (field.type !== FieldTypes.LINK || !includeRelations) { + continue + } + const { tableName: linkTableName } = breakExternalTableId(field.tableId) + if (linkTableName) { + const linkTable = this.tables[linkTableName] + if (linkTable) { + const linkedFields = extractRealFields(linkTable, fields) + fields = fields.concat(linkedFields) + } + } + } + return fields + } + + async run(config: RunConfig) { + const { operation, tableId } = this + let { datasourceId, tableName } = breakExternalTableId(tableId) + if (!tableName) { + throw "Unable to run without a table name" + } + if (!this.datasource) { + const db = context.getAppDB() + this.datasource = await db.get(datasourceId) + if (!this.datasource || !this.datasource.entities) { + throw "No tables found, fetch tables before query." + } + this.tables = this.datasource.entities + } + const table = this.tables[tableName] + let isSql = isSQL(this.datasource) + if (!table) { + throw `Unable to process query, table "${tableName}" not defined.` + } + // look for specific components of config which may not be considered acceptable + let { id, row, filters, sort, paginate, rows } = cleanupConfig( + config, + table + ) + filters = buildFilters(id, filters || {}, table) + const relationships = this.buildRelationships(table) + // clean up row on ingress using schema + const processed = this.inputProcessing(row, table) + row = processed.row + if ( + operation === Operation.DELETE && + (filters == null || Object.keys(filters).length === 0) + ) { + throw "Deletion must be filtered" + } + let json = { + endpoint: { + datasourceId, + entityId: tableName, + operation, + }, + resource: { + // have to specify the fields to avoid column overlap (for SQL) + fields: isSql ? this.buildFields(table) : [], + }, + filters, + sort, + paginate, + relationships, + body: row || rows, + // pass an id filter into extra, purely for mysql/returning + extra: { + idFilter: buildFilters(id || generateIdForRow(row, table), {}, table), + }, + meta: { + table, + }, + } + // can't really use response right now + const response = await getDatasourceAndQuery(json) + // handle many to many relationships now if we know the ID (could be auto increment) + if (operation !== Operation.READ && processed.manyRelationships) { + await this.handleManyRelationships( + table._id || "", + response[0], + processed.manyRelationships + ) + } + const output = this.outputProcessing(response, table, relationships) + // if reading it'll just be an array of rows, return whole thing + return operation === Operation.READ && Array.isArray(response) + ? output + : { row: output[0], table } + } } diff --git a/packages/server/src/api/controllers/row/external.js b/packages/server/src/api/controllers/row/external.ts similarity index 63% rename from packages/server/src/api/controllers/row/external.js rename to packages/server/src/api/controllers/row/external.ts index e0c3a9ee4d..83564564b8 100644 --- a/packages/server/src/api/controllers/row/external.js +++ b/packages/server/src/api/controllers/row/external.ts @@ -1,104 +1,117 @@ -const { - DataSourceOperation, +import { SortDirection, FieldTypes, NoEmptyFilterStrings, -} = require("../../../constants") -const { +} from "../../../constants" +import { breakExternalTableId, breakRowIdField, -} = require("../../../integrations/utils") -const ExternalRequest = require("./ExternalRequest") -const { context } = require("@budibase/backend-core") -const exporters = require("../view/exporters") -const { apiFileReturn } = require("../../../utilities/fileSystem") +} from "../../../integrations/utils" +import { ExternalRequest, RunConfig } from "./ExternalRequest" +import { context } from "@budibase/backend-core" +import * as exporters from "../view/exporters" +import { apiFileReturn } from "../../../utilities/fileSystem" +import { + Operation, + BBContext, + Row, + PaginationJson, + Table, + Datasource, +} from "@budibase/types" -async function handleRequest(operation, tableId, opts = {}) { +export async function handleRequest( + operation: Operation, + tableId: string, + opts?: RunConfig +) { // make sure the filters are cleaned up, no empty strings for equals, fuzzy or string if (opts && opts.filters) { for (let filterField of NoEmptyFilterStrings) { if (!opts.filters[filterField]) { continue } + // @ts-ignore for (let [key, value] of Object.entries(opts.filters[filterField])) { if (!value || value === "") { + // @ts-ignore delete opts.filters[filterField][key] } } } } - return new ExternalRequest(operation, tableId, opts.datasource).run(opts) + return new ExternalRequest(operation, tableId, opts?.datasource).run( + opts || {} + ) } -exports.handleRequest = handleRequest - -exports.patch = async ctx => { +export async function patch(ctx: BBContext) { const inputs = ctx.request.body const tableId = ctx.params.tableId const id = inputs._id // don't save the ID to db delete inputs._id - return handleRequest(DataSourceOperation.UPDATE, tableId, { + return handleRequest(Operation.UPDATE, tableId, { id: breakRowIdField(id), row: inputs, }) } -exports.save = async ctx => { +export async function save(ctx: BBContext) { const inputs = ctx.request.body const tableId = ctx.params.tableId - return handleRequest(DataSourceOperation.CREATE, tableId, { + return handleRequest(Operation.CREATE, tableId, { row: inputs, }) } -exports.fetchView = async ctx => { +export async function fetchView(ctx: BBContext) { // there are no views in external datasources, shouldn't ever be called // for now just fetch const split = ctx.params.viewName.split("all_") ctx.params.tableId = split[1] ? split[1] : split[0] - return exports.fetch(ctx) + return fetch(ctx) } -exports.fetch = async ctx => { +export async function fetch(ctx: BBContext) { const tableId = ctx.params.tableId - return handleRequest(DataSourceOperation.READ, tableId) + return handleRequest(Operation.READ, tableId) } -exports.find = async ctx => { +export async function find(ctx: BBContext) { const id = ctx.params.rowId const tableId = ctx.params.tableId - const response = await handleRequest(DataSourceOperation.READ, tableId, { + const response = (await handleRequest(Operation.READ, tableId, { id: breakRowIdField(id), - }) + })) as Row[] return response ? response[0] : response } -exports.destroy = async ctx => { +export async function destroy(ctx: BBContext) { const tableId = ctx.params.tableId const id = ctx.request.body._id - const { row } = await handleRequest(DataSourceOperation.DELETE, tableId, { + const { row } = (await handleRequest(Operation.DELETE, tableId, { id: breakRowIdField(id), - }) + })) as { row: Row } return { response: { ok: true }, row } } -exports.bulkDestroy = async ctx => { +export async function bulkDestroy(ctx: BBContext) { const { rows } = ctx.request.body const tableId = ctx.params.tableId let promises = [] for (let row of rows) { promises.push( - handleRequest(DataSourceOperation.DELETE, tableId, { + handleRequest(Operation.DELETE, tableId, { id: breakRowIdField(row._id), }) ) } - const responses = await Promise.all(promises) + const responses = (await Promise.all(promises)) as { row: Row }[] return { response: { ok: true }, rows: responses.map(resp => resp.row) } } -exports.search = async ctx => { +export async function search(ctx: BBContext) { const tableId = ctx.params.tableId const { paginate, query, ...params } = ctx.request.body let { bookmark, limit } = params @@ -129,26 +142,26 @@ exports.search = async ctx => { } } try { - const rows = await handleRequest(DataSourceOperation.READ, tableId, { + const rows = (await handleRequest(Operation.READ, tableId, { filters: query, sort, - paginate: paginateObj, - }) + paginate: paginateObj as PaginationJson, + })) as Row[] let hasNextPage = false if (paginate && rows.length === limit) { - const nextRows = await handleRequest(DataSourceOperation.READ, tableId, { + const nextRows = (await handleRequest(Operation.READ, tableId, { filters: query, sort, paginate: { limit: 1, page: bookmark * limit + 1, }, - }) + })) as Row[] hasNextPage = nextRows.length > 0 } // need wrapper object for bookmarks etc when paginating return { rows, hasNextPage, bookmark: bookmark + 1 } - } catch (err) { + } catch (err: any) { if (err.message && err.message.includes("does not exist")) { throw new Error( `Table updated externally, please re-fetch - ${err.message}` @@ -159,12 +172,12 @@ exports.search = async ctx => { } } -exports.validate = async () => { +export async function validate(ctx: BBContext) { // can't validate external right now - maybe in future return { valid: true } } -exports.exportRows = async ctx => { +export async function exportRows(ctx: BBContext) { const { datasourceId } = breakExternalTableId(ctx.params.tableId) const db = context.getAppDB() const format = ctx.query.format @@ -176,13 +189,15 @@ exports.exportRows = async ctx => { ctx.request.body = { query: { oneOf: { - _id: ctx.request.body.rows.map(row => JSON.parse(decodeURI(row))[0]), + _id: ctx.request.body.rows.map( + (row: string) => JSON.parse(decodeURI(row))[0] + ), }, }, } - let result = await exports.search(ctx) - let rows = [] + let result = await search(ctx) + let rows: Row[] = [] // Filter data to only specified columns if required if (columns && columns.length) { @@ -197,6 +212,7 @@ exports.exportRows = async ctx => { } let headers = Object.keys(rows[0]) + // @ts-ignore const exporter = exporters[format] const filename = `export.${format}` @@ -205,21 +221,24 @@ exports.exportRows = async ctx => { return apiFileReturn(exporter(headers, rows)) } -exports.fetchEnrichedRow = async ctx => { +export async function fetchEnrichedRow(ctx: BBContext) { const id = ctx.params.rowId const tableId = ctx.params.tableId const { datasourceId, tableName } = breakExternalTableId(tableId) const db = context.getAppDB() - const datasource = await db.get(datasourceId) + const datasource: Datasource = await db.get(datasourceId) + if (!tableName) { + ctx.throw(400, "Unable to find table.") + } if (!datasource || !datasource.entities) { ctx.throw(400, "Datasource has not been configured for plus API.") } const tables = datasource.entities - const response = await handleRequest(DataSourceOperation.READ, tableId, { + const response = (await handleRequest(Operation.READ, tableId, { id, datasource, - }) - const table = tables[tableName] + })) as Row[] + const table: Table = tables[tableName] const row = response[0] // this seems like a lot of work, but basically we need to dig deeper for the enrich // for a single row, there is probably a better way to do this with some smart multi-layer joins @@ -233,21 +252,19 @@ exports.fetchEnrichedRow = async ctx => { } const links = row[fieldName] const linkedTableId = field.tableId - const linkedTable = tables[breakExternalTableId(linkedTableId).tableName] + const linkedTableName = breakExternalTableId(linkedTableId).tableName! + const linkedTable = tables[linkedTableName] // don't support composite keys right now - const linkedIds = links.map(link => breakRowIdField(link._id)[0]) - row[fieldName] = await handleRequest( - DataSourceOperation.READ, - linkedTableId, - { - tables, - filters: { - oneOf: { - [linkedTable.primary]: linkedIds, - }, + const linkedIds = links.map((link: Row) => breakRowIdField(link._id!)[0]) + const primaryLink = linkedTable.primary?.[0] as string + row[fieldName] = await handleRequest(Operation.READ, linkedTableId!, { + tables, + filters: { + oneOf: { + [primaryLink]: linkedIds, }, - } - ) + }, + }) } return row } diff --git a/packages/server/src/api/controllers/row/index.ts b/packages/server/src/api/controllers/row/index.ts index 09af24a9b1..b59f245098 100644 --- a/packages/server/src/api/controllers/row/index.ts +++ b/packages/server/src/api/controllers/row/index.ts @@ -1,6 +1,6 @@ import { quotas } from "@budibase/pro" -import internal from "./internal" -import external from "./external" +import * as internal from "./internal" +import * as external from "./external" import { isExternalTable } from "../../../integrations/utils" function pickApi(tableId: any) { diff --git a/packages/server/src/api/controllers/row/internal.js b/packages/server/src/api/controllers/row/internal.ts similarity index 77% rename from packages/server/src/api/controllers/row/internal.js rename to packages/server/src/api/controllers/row/internal.ts index 9b105fd3ae..ea3277cd59 100644 --- a/packages/server/src/api/controllers/row/internal.js +++ b/packages/server/src/api/controllers/row/internal.ts @@ -1,34 +1,41 @@ -const linkRows = require("../../../db/linkedRows") -const { +import * as linkRows from "../../../db/linkedRows" +import { generateRowID, getRowParams, getTableIDFromRowID, DocumentType, InternalTables, -} = require("../../../db/utils") -const userController = require("../user") -const { +} from "../../../db/utils" +import * as userController from "../user" +import { inputProcessing, outputProcessing, cleanupAttachments, -} = require("../../../utilities/rowProcessor") -const { FieldTypes } = require("../../../constants") -const { validate, findRow } = require("./utils") -const { fullSearch, paginatedSearch } = require("./internalSearch") -const { getGlobalUsersFromMetadata } = require("../../../utilities/global") -const inMemoryViews = require("../../../db/inMemoryView") -const env = require("../../../environment") -const { +} from "../../../utilities/rowProcessor" +import { FieldTypes } from "../../../constants" +import { validate as rowValidate, findRow } from "./utils" +import { fullSearch, paginatedSearch } from "./internalSearch" +import { getGlobalUsersFromMetadata } from "../../../utilities/global" +import * as inMemoryViews from "../../../db/inMemoryView" +import env from "../../../environment" +import { migrateToInMemoryView, migrateToDesignView, getFromDesignDoc, getFromMemoryDoc, -} = require("../view/utils") -const { cloneDeep } = require("lodash/fp") -const { context, db: dbCore } = require("@budibase/backend-core") -const { finaliseRow, updateRelatedFormula } = require("./staticFormula") -const exporters = require("../view/exporters") -const { apiFileReturn } = require("../../../utilities/fileSystem") +} from "../view/utils" +import { cloneDeep } from "lodash/fp" +import { context, db as dbCore } from "@budibase/backend-core" +import { finaliseRow, updateRelatedFormula } from "./staticFormula" +import * as exporters from "../view/exporters" +import { apiFileReturn } from "../../../utilities/fileSystem" +import { + BBContext, + Database, + LinkDocumentValue, + Row, + Table, +} from "@budibase/types" const CALCULATION_TYPES = { SUM: "sum", @@ -36,7 +43,7 @@ const CALCULATION_TYPES = { STATS: "stats", } -async function getView(db, viewName) { +async function getView(db: Database, viewName: string) { let mainGetter = env.SELF_HOSTED ? getFromDesignDoc : getFromMemoryDoc let secondaryGetter = env.SELF_HOSTED ? getFromMemoryDoc : getFromDesignDoc let migration = env.SELF_HOSTED ? migrateToDesignView : migrateToInMemoryView @@ -44,7 +51,7 @@ async function getView(db, viewName) { migrate = false try { viewInfo = await mainGetter(db, viewName) - } catch (err) { + } catch (err: any) { // check if it can be retrieved from design doc (needs migrated) if (err.status !== 404) { viewInfo = null @@ -62,7 +69,7 @@ async function getView(db, viewName) { return viewInfo } -async function getRawTableData(ctx, db, tableId) { +async function getRawTableData(ctx: BBContext, db: Database, tableId: string) { let rows if (tableId === InternalTables.USER_METADATA) { await userController.fetchMetadata(ctx) @@ -75,10 +82,10 @@ async function getRawTableData(ctx, db, tableId) { ) rows = response.rows.map(row => row.doc) } - return rows + return rows as Row[] } -exports.patch = async ctx => { +export async function patch(ctx: BBContext) { const db = context.getAppDB() const inputs = ctx.request.body const tableId = inputs.tableId @@ -103,15 +110,15 @@ exports.patch = async ctx => { } let dbTable = await db.get(tableId) // need to build up full patch fields before coerce - let combinedRow = cloneDeep(oldRow) + let combinedRow: any = cloneDeep(oldRow) for (let key of Object.keys(inputs)) { if (!dbTable.schema[key]) continue combinedRow[key] = inputs[key] } // this returns the table and row incase they have been updated - let { table, row } = inputProcessing(ctx.user, dbTable, combinedRow) - const validateResult = await validate({ + let { table, row } = inputProcessing(ctx.user!, dbTable, combinedRow) + const validateResult = await rowValidate({ row, table, }) @@ -121,12 +128,12 @@ exports.patch = async ctx => { } // returned row is cleaned and prepared for writing to DB - row = await linkRows.updateLinks({ + row = (await linkRows.updateLinks({ eventType: linkRows.EventType.ROW_UPDATE, row, tableId: row.tableId, table, - }) + })) as Row // check if any attachments removed await cleanupAttachments(table, { oldRow, row }) @@ -143,7 +150,7 @@ exports.patch = async ctx => { }) } -exports.save = async function (ctx) { +export async function save(ctx: BBContext) { const db = context.getAppDB() let inputs = ctx.request.body inputs.tableId = ctx.params.tableId @@ -154,8 +161,8 @@ exports.save = async function (ctx) { // this returns the table and row incase they have been updated const dbTable = await db.get(inputs.tableId) - let { table, row } = inputProcessing(ctx.user, dbTable, inputs) - const validateResult = await validate({ + let { table, row } = inputProcessing(ctx.user!, dbTable, inputs) + const validateResult = await rowValidate({ row, table, }) @@ -165,12 +172,12 @@ exports.save = async function (ctx) { } // make sure link rows are up to date - row = await linkRows.updateLinks({ + row = (await linkRows.updateLinks({ eventType: linkRows.EventType.ROW_SAVE, row, tableId: row.tableId, table, - }) + })) as Row return finaliseRow(table, row, { oldTable: dbTable, @@ -178,13 +185,13 @@ exports.save = async function (ctx) { }) } -exports.fetchView = async ctx => { +export async function fetchView(ctx: BBContext) { const viewName = ctx.params.viewName // if this is a table view being looked for just transfer to that if (viewName.startsWith(DocumentType.TABLE)) { ctx.params.tableId = viewName - return exports.fetch(ctx) + return fetch(ctx) } const db = context.getAppDB() @@ -199,7 +206,12 @@ exports.fetchView = async ctx => { } else { const tableId = viewInfo.meta.tableId const data = await getRawTableData(ctx, db, tableId) - response = await inMemoryViews.runView(viewInfo, calculation, group, data) + response = await inMemoryViews.runView( + viewInfo, + calculation as string, + !!group, + data + ) } let rows @@ -240,7 +252,7 @@ exports.fetchView = async ctx => { return rows } -exports.fetch = async ctx => { +export async function fetch(ctx: BBContext) { const db = context.getAppDB() const tableId = ctx.params.tableId @@ -249,7 +261,7 @@ exports.fetch = async ctx => { return outputProcessing(table, rows) } -exports.find = async ctx => { +export async function find(ctx: BBContext) { const db = dbCore.getDB(ctx.appId) const table = await db.get(ctx.params.tableId) let row = await findRow(ctx, ctx.params.tableId, ctx.params.rowId) @@ -257,7 +269,7 @@ exports.find = async ctx => { return row } -exports.destroy = async function (ctx) { +export async function destroy(ctx: BBContext) { const db = context.getAppDB() const { _id } = ctx.request.body let row = await db.get(_id) @@ -293,7 +305,7 @@ exports.destroy = async function (ctx) { return { response, row } } -exports.bulkDestroy = async ctx => { +export async function bulkDestroy(ctx: BBContext) { const db = context.getAppDB() const tableId = ctx.params.tableId const table = await db.get(tableId) @@ -301,10 +313,12 @@ exports.bulkDestroy = async ctx => { // before carrying out any updates, make sure the rows are ready to be returned // they need to be the full rows (including previous relationships) for automations - rows = await outputProcessing(table, rows, { squash: false }) + const processedRows = (await outputProcessing(table, rows, { + squash: false, + })) as Row[] // remove the relationships first - let updates = rows.map(row => + let updates: Promise[] = processedRows.map(row => linkRows.updateLinks({ eventType: linkRows.EventType.ROW_DELETE, row, @@ -313,7 +327,7 @@ exports.bulkDestroy = async ctx => { ) if (tableId === InternalTables.USER_METADATA) { updates = updates.concat( - rows.map(row => { + processedRows.map(row => { ctx.params = { id: row._id, } @@ -321,19 +335,19 @@ exports.bulkDestroy = async ctx => { }) ) } else { - await db.bulkDocs(rows.map(row => ({ ...row, _deleted: true }))) + await db.bulkDocs(processedRows.map(row => ({ ...row, _deleted: true }))) } // remove any attachments that were on the rows from object storage - await cleanupAttachments(table, { rows }) - await updateRelatedFormula(table, rows) + await cleanupAttachments(table, { rows: processedRows }) + await updateRelatedFormula(table, processedRows) await Promise.all(updates) - return { response: { ok: true }, rows } + return { response: { ok: true }, rows: processedRows } } -exports.search = async ctx => { +export async function search(ctx: BBContext) { // Fetch the whole table when running in cypress, as search doesn't work if (!env.COUCH_DB_URL && env.isCypress()) { - return { rows: await exports.fetch(ctx) } + return { rows: await fetch(ctx) } } const { tableId } = ctx.params @@ -362,14 +376,14 @@ exports.search = async ctx => { return response } -exports.validate = async ctx => { - return validate({ +export async function validate(ctx: BBContext) { + return rowValidate({ tableId: ctx.params.tableId, row: ctx.request.body, }) } -exports.exportRows = async ctx => { +export async function exportRows(ctx: BBContext) { const db = context.getAppDB() const table = await db.get(ctx.params.tableId) const rowIds = ctx.request.body.rows @@ -382,8 +396,8 @@ exports.exportRows = async ctx => { }) ).rows.map(row => row.doc) - let result = await outputProcessing(table, response) - let rows = [] + let result = (await outputProcessing(table, response)) as Row[] + let rows: Row[] = [] // Filter data to only specified columns if required if (columns && columns.length) { @@ -398,6 +412,7 @@ exports.exportRows = async ctx => { } let headers = Object.keys(rows[0]) + // @ts-ignore const exporter = exporters[format] const filename = `export.${format}` @@ -406,7 +421,7 @@ exports.exportRows = async ctx => { return apiFileReturn(exporter(headers, rows)) } -exports.fetchEnrichedRow = async ctx => { +export async function fetchEnrichedRow(ctx: BBContext) { const db = context.getAppDB() const tableId = ctx.params.tableId const rowId = ctx.params.rowId @@ -416,10 +431,10 @@ exports.fetchEnrichedRow = async ctx => { findRow(ctx, tableId, rowId), ]) // get the link docs - const linkVals = await linkRows.getLinkDocuments({ + const linkVals = (await linkRows.getLinkDocuments({ tableId, rowId, - }) + })) as LinkDocumentValue[] // look up the actual rows based on the ids let response = ( await db.allDocs({ @@ -428,8 +443,8 @@ exports.fetchEnrichedRow = async ctx => { }) ).rows.map(row => row.doc) // group responses by table - let groups = {}, - tables = {} + let groups: any = {}, + tables: Record = {} for (let row of response) { if (!row.tableId) { row.tableId = getTableIDFromRowID(row._id) @@ -442,11 +457,11 @@ exports.fetchEnrichedRow = async ctx => { groups[linkedTableId].push(row) } } - let linkedRows = [] + let linkedRows: Row[] = [] for (let [tableId, rows] of Object.entries(groups)) { // need to include the IDs in these rows for any links they may have linkedRows = linkedRows.concat( - await outputProcessing(tables[tableId], rows) + await outputProcessing(tables[tableId], rows as Row[]) ) } diff --git a/packages/server/src/api/controllers/row/staticFormula.ts b/packages/server/src/api/controllers/row/staticFormula.ts index ee09264af1..6e43f5822f 100644 --- a/packages/server/src/api/controllers/row/staticFormula.ts +++ b/packages/server/src/api/controllers/row/staticFormula.ts @@ -16,7 +16,7 @@ const { cloneDeep } = require("lodash/fp") * updated. * NOTE: this will only for affect static formulas. */ -exports.updateRelatedFormula = async (table: Table, enrichedRows: Row[]) => { +export async function updateRelatedFormula(table: Table, enrichedRows: Row[]) { const db = context.getAppDB() // no formula to update, we're done if (!table.relatedFormula) { @@ -122,9 +122,9 @@ export async function finaliseRow( const db = context.getAppDB() row.type = "row" // process the row before return, to include relationships - let enrichedRow = await outputProcessing(table, cloneDeep(row), { + let enrichedRow = (await outputProcessing(table, cloneDeep(row), { squash: false, - }) + })) as Row // use enriched row to generate formulas for saving, specifically only use as context row = processFormulas(table, row, { dynamic: false, diff --git a/packages/server/src/api/controllers/row/utils.ts b/packages/server/src/api/controllers/row/utils.ts index 3719c206e2..c003a16266 100644 --- a/packages/server/src/api/controllers/row/utils.ts +++ b/packages/server/src/api/controllers/row/utils.ts @@ -51,16 +51,19 @@ export async function validate({ }: { tableId?: string row: Row - table: Table + table?: Table }) { + let fetchedTable: Table if (!table) { const db = context.getAppDB() - table = await db.get(tableId) + fetchedTable = await db.get(tableId) + } else { + fetchedTable = table } const errors: any = {} - for (let fieldName of Object.keys(table.schema)) { - const constraints = cloneDeep(table.schema[fieldName].constraints) - const type = table.schema[fieldName].type + for (let fieldName of Object.keys(fetchedTable.schema)) { + const constraints = cloneDeep(fetchedTable.schema[fieldName].constraints) + const type = fetchedTable.schema[fieldName].type // formulas shouldn't validated, data will be deleted anyway if (type === FieldTypes.FORMULA) { continue diff --git a/packages/server/src/api/controllers/static/index.ts b/packages/server/src/api/controllers/static/index.ts index 68c01557bc..32f5e33325 100644 --- a/packages/server/src/api/controllers/static/index.ts +++ b/packages/server/src/api/controllers/static/index.ts @@ -14,7 +14,6 @@ const { } = require("../../../utilities/fileSystem") const env = require("../../../environment") const { clientLibraryPath } = require("../../../utilities") -const { upload, deleteFiles } = require("../../../utilities/fileSystem") const { attachmentsRelativeURL } = require("../../../utilities") const { DocumentType } = require("../../../db/utils") const { context, objectStore, utils } = require("@budibase/backend-core") @@ -22,7 +21,7 @@ const AWS = require("aws-sdk") const fs = require("fs") async function prepareUpload({ s3Key, bucket, metadata, file }: any) { - const response = await upload({ + const response = await objectStore.upload({ bucket, metadata, filename: s3Key, @@ -95,7 +94,10 @@ export const uploadFile = async function (ctx: any) { } export const deleteObjects = async function (ctx: any) { - ctx.body = await deleteFiles(ObjectStoreBuckets.APPS, ctx.request.body.keys) + ctx.body = await objectStore.deleteFiles( + ObjectStoreBuckets.APPS, + ctx.request.body.keys + ) } export const serveApp = async function (ctx: any) { diff --git a/packages/server/src/api/controllers/table/external.ts b/packages/server/src/api/controllers/table/external.ts index 8fd227e633..5257c4e39e 100644 --- a/packages/server/src/api/controllers/table/external.ts +++ b/packages/server/src/api/controllers/table/external.ts @@ -8,13 +8,9 @@ import { foreignKeyStructure, hasTypeChanged, } from "./utils" -import { - DataSourceOperation, - FieldTypes, - RelationshipTypes, -} from "../../../constants" +import { FieldTypes, RelationshipTypes } from "../../../constants" import { makeExternalQuery } from "../../../integrations/base/query" -import csvParser from "../../../utilities/csvParser" +import * as csvParser from "../../../utilities/csvParser" import { handleRequest } from "../row/external" import { events, context } from "@budibase/backend-core" import { @@ -347,7 +343,7 @@ export async function bulkImport(ctx: BBContext) { ...dataImport, existingTable: table, }) - await handleRequest(DataSourceOperation.BULK_CREATE, table._id, { + await handleRequest(Operation.BULK_CREATE, table._id!, { rows, }) await events.rows.imported(table, "csv", rows.length) diff --git a/packages/server/src/api/controllers/table/index.ts b/packages/server/src/api/controllers/table/index.ts index 5d8ab5be3e..0df974adc4 100644 --- a/packages/server/src/api/controllers/table/index.ts +++ b/packages/server/src/api/controllers/table/index.ts @@ -1,6 +1,6 @@ import * as internal from "./internal" import * as external from "./external" -import csvParser from "../../../utilities/csvParser" +import * as csvParser from "../../../utilities/csvParser" import { isExternalTable, isSQL } from "../../../integrations/utils" import { getDatasourceParams } from "../../../db/utils" import { context, events } from "@budibase/backend-core" @@ -103,7 +103,10 @@ export async function validateCSVSchema(ctx: BBContext) { if (tableId) { existingTable = await sdk.tables.getTable(tableId) } - let result = await csvParser.parse(csvString, schema) + let result: Record | undefined = await csvParser.parse( + csvString, + schema + ) if (existingTable) { result = csvParser.updateSchema({ schema: result, existingTable }) } diff --git a/packages/server/src/api/controllers/view/viewBuilder.js b/packages/server/src/api/controllers/view/viewBuilder.ts similarity index 85% rename from packages/server/src/api/controllers/view/viewBuilder.js rename to packages/server/src/api/controllers/view/viewBuilder.ts index 6596e0d9e7..3076e311b1 100644 --- a/packages/server/src/api/controllers/view/viewBuilder.js +++ b/packages/server/src/api/controllers/view/viewBuilder.ts @@ -1,4 +1,14 @@ -const TOKEN_MAP = { +import { ViewFilter } from "@budibase/types" + +type ViewTemplateOpts = { + field: string + tableId: string + groupBy: string + filters: ViewFilter[] + calculation: string +} + +const TOKEN_MAP: Record = { EQUALS: "===", NOT_EQUALS: "!==", LT: "<", @@ -10,13 +20,13 @@ const TOKEN_MAP = { OR: "||", } -const CONDITIONS = { +const CONDITIONS: Record = { EMPTY: "EMPTY", NOT_EMPTY: "NOT_EMPTY", CONTAINS: "CONTAINS", } -const isEmptyExpression = key => { +function isEmptyExpression(key: string) { return `( doc["${key}"] === undefined || doc["${key}"] === null || @@ -25,19 +35,19 @@ const isEmptyExpression = key => { )` } -const GROUP_PROPERTY = { +const GROUP_PROPERTY: Record = { group: { type: "string", }, } -const FIELD_PROPERTY = { +const FIELD_PROPERTY: Record = { field: { type: "string", }, } -const SCHEMA_MAP = { +const SCHEMA_MAP: Record = { sum: { field: "string", value: "number", @@ -74,7 +84,7 @@ const SCHEMA_MAP = { * @param {Array} filters - an array of filter objects * @returns {String} JS Expression */ -function parseFilterExpression(filters) { +function parseFilterExpression(filters: ViewFilter[]) { const expression = [] let first = true @@ -111,7 +121,7 @@ function parseFilterExpression(filters) { * @param {String?} field - field to use for calculations, if any * @param {String?} groupBy - field to group calculation results on, if any */ -function parseEmitExpression(field, groupBy) { +function parseEmitExpression(field: string, groupBy: string) { return `emit(doc["${groupBy || "_id"}"], doc["${field}"]);` } @@ -126,7 +136,13 @@ function parseEmitExpression(field, groupBy) { * filters: Array of filter objects containing predicates that are parsed into a JS expression * calculation: an optional calculation to be performed over the view data. */ -function viewTemplate({ field, tableId, groupBy, filters = [], calculation }) { +export = function ({ + field, + tableId, + groupBy, + filters = [], + calculation, +}: ViewTemplateOpts) { // first filter can't have a conjunction if (filters && filters.length > 0 && filters[0].conjunction) { delete filters[0].conjunction @@ -179,5 +195,3 @@ function viewTemplate({ field, tableId, groupBy, filters = [], calculation }) { ...reduction, } } - -module.exports = viewTemplate diff --git a/packages/server/src/api/controllers/webhook.ts b/packages/server/src/api/controllers/webhook.ts index 26bf16bd4c..f877110646 100644 --- a/packages/server/src/api/controllers/webhook.ts +++ b/packages/server/src/api/controllers/webhook.ts @@ -1,5 +1,5 @@ import { getWebhookParams } from "../../db/utils" -import triggers from "../../automations/triggers" +import * as triggers from "../../automations/triggers" import { db as dbCore, context } from "@budibase/backend-core" import { Webhook, diff --git a/packages/server/src/api/index.ts b/packages/server/src/api/index.ts index a77fc62b9b..3375161dd8 100644 --- a/packages/server/src/api/index.ts +++ b/packages/server/src/api/index.ts @@ -37,7 +37,7 @@ router // re-direct before any middlewares occur .redirect("/", "/builder") .use( - auth.buildAuthMiddleware(null, { + auth.buildAuthMiddleware([], { publicAllowed: true, }) ) @@ -45,7 +45,7 @@ router // the server can be public anywhere, so nowhere should throw errors // if the tenancy has not been set, it'll have to be discovered at application layer .use( - auth.buildTenancyMiddleware(null, null, { + auth.buildTenancyMiddleware([], [], { noTenancyRequired: true, }) ) diff --git a/packages/server/src/api/routes/integration.ts b/packages/server/src/api/routes/integration.ts index 835cc5a896..b21915a6d1 100644 --- a/packages/server/src/api/routes/integration.ts +++ b/packages/server/src/api/routes/integration.ts @@ -1,5 +1,5 @@ import Router from "@koa/router" -import controller from "../controllers/integration" +import * as controller from "../controllers/integration" import authorized from "../../middleware/authorized" import { permissions } from "@budibase/backend-core" diff --git a/packages/server/src/api/routes/public/index.ts b/packages/server/src/api/routes/public/index.ts index 198ba2d2b2..79e7731752 100644 --- a/packages/server/src/api/routes/public/index.ts +++ b/packages/server/src/api/routes/public/index.ts @@ -12,11 +12,8 @@ import env from "../../../environment" // below imports don't have declaration files const Router = require("@koa/router") const { RateLimit, Stores } = require("koa2-ratelimit") -const { - PermissionLevel, - PermissionType, -} = require("@budibase/backend-core/permissions") -const { getRedisOptions } = require("@budibase/backend-core/redis").utils +import { redis, permissions } from "@budibase/backend-core" +const { PermissionType, PermissionLevel } = permissions const PREFIX = "/api/public/v1" // allow a lot more requests when in test @@ -31,7 +28,7 @@ function getApiLimitPerSecond(): number { let rateLimitStore: any = null if (!env.isTest()) { - const REDIS_OPTS = getRedisOptions() + const REDIS_OPTS = redis.utils.getRedisOptions() let options if (REDIS_OPTS.redisProtocolUrl) { // fully qualified redis URL diff --git a/packages/server/src/api/routes/tests/deployment.spec.ts b/packages/server/src/api/routes/tests/deployment.spec.ts index 0219e3f2b4..bd705f9f59 100644 --- a/packages/server/src/api/routes/tests/deployment.spec.ts +++ b/packages/server/src/api/routes/tests/deployment.spec.ts @@ -1,4 +1,4 @@ -import setup from "./utilities" +import * as setup from "./utilities" import { events } from "@budibase/backend-core" describe("/deployments", () => { diff --git a/packages/server/src/api/routes/tests/table.spec.js b/packages/server/src/api/routes/tests/table.spec.js index e42aaf07e4..4776878635 100644 --- a/packages/server/src/api/routes/tests/table.spec.js +++ b/packages/server/src/api/routes/tests/table.spec.js @@ -271,7 +271,7 @@ describe("/tables", () => { .expect(200) expect(res.body.message).toEqual(`Table ${testTable._id} deleted.`) expect(events.table.deleted).toBeCalledTimes(1) - expect(events.table.deleted).toBeCalledWith(testTable) + expect(events.table.deleted).toBeCalledWith({ ...testTable, tableId: testTable._id }) }) it("deletes linked references to the table after deletion", async () => { @@ -288,6 +288,7 @@ describe("/tables", () => { }, TestTable: { type: "link", + fieldName: "TestTable", tableId: testTable._id, constraints: { type: "array" diff --git a/packages/server/src/api/routes/tests/utilities/index.js b/packages/server/src/api/routes/tests/utilities/index.ts similarity index 73% rename from packages/server/src/api/routes/tests/utilities/index.js rename to packages/server/src/api/routes/tests/utilities/index.ts index b6dde0cb79..87a373a2c6 100644 --- a/packages/server/src/api/routes/tests/utilities/index.js +++ b/packages/server/src/api/routes/tests/utilities/index.ts @@ -1,6 +1,6 @@ -const TestConfig = require("../../../../tests/utilities/TestConfiguration") -const structures = require("../../../../tests/utilities/structures") -const env = require("../../../../environment") +import TestConfig from "../../../../tests/utilities/TestConfiguration" +import env from "../../../../environment" +export * as structures from "../../../../tests/utilities/structures" function user() { return { @@ -40,16 +40,18 @@ jest.mock("../../../../utilities/workerRequests", () => ({ removeAppFromUserRoles: jest.fn(), })) -exports.delay = ms => new Promise(resolve => setTimeout(resolve, ms)) +export function delay(ms: number) { + return new Promise(resolve => setTimeout(resolve, ms)) +} -let request, config +let request: any, config: any -exports.beforeAll = () => { +export function beforeAll() { config = new TestConfig() request = config.getRequest() } -exports.afterAll = () => { +export function afterAll() { if (config) { config.end() } @@ -59,21 +61,21 @@ exports.afterAll = () => { config = null } -exports.getRequest = () => { +export function getRequest() { if (!request) { exports.beforeAll() } return request } -exports.getConfig = () => { +export function getConfig() { if (!config) { exports.beforeAll() } return config } -exports.switchToSelfHosted = async func => { +export async function switchToSelfHosted(func: any) { // self hosted stops any attempts to Dynamo env._set("NODE_ENV", "production") env._set("SELF_HOSTED", true) @@ -90,5 +92,3 @@ exports.switchToSelfHosted = async func => { throw error } } - -exports.structures = structures diff --git a/packages/server/src/app.ts b/packages/server/src/app.ts index 8a9c1d5b24..0fb78d7525 100644 --- a/packages/server/src/app.ts +++ b/packages/server/src/app.ts @@ -10,20 +10,20 @@ if (process.env.ELASTIC_APM_ENABLED) { } import { ExtendableContext } from "koa" -import db from "./db" +import * as db from "./db" db.init() -const Koa = require("koa") -const destroyable = require("server-destroy") -const koaBody = require("koa-body") -const http = require("http") -const api = require("./api") -const automations = require("./automations/index") -const Sentry = require("@sentry/node") -const { Thread } = require("./threads") +import Koa from "koa" +import koaBody from "koa-body" +import http from "http" +import * as api from "./api" +import * as automations from "./automations" +import { Thread } from "./threads" import * as redis from "./utilities/redis" import { events, logging } from "@budibase/backend-core" import { initialise as initialiseWebsockets } from "./websocket" import { startup } from "./startup" +const Sentry = require("@sentry/node") +const destroyable = require("server-destroy") const app = new Koa() @@ -34,6 +34,7 @@ app.use( formLimit: "10mb", jsonLimit: "10mb", textLimit: "10mb", + // @ts-ignore enableTypes: ["json", "form", "text"], parsedMethods: ["POST", "PUT", "PATCH", "DELETE"], }) @@ -76,12 +77,13 @@ server.on("close", async () => { } }) -module.exports = server.listen(env.PORT || 0, async () => { +export = server.listen(env.PORT || 0, async () => { await startup(app, server) }) const shutdown = () => { server.close() + // @ts-ignore server.destroy() } diff --git a/packages/server/src/automations/actions.js b/packages/server/src/automations/actions.ts similarity index 58% rename from packages/server/src/automations/actions.js rename to packages/server/src/automations/actions.ts index e1ebff560a..456399bc68 100644 --- a/packages/server/src/automations/actions.js +++ b/packages/server/src/automations/actions.ts @@ -1,22 +1,26 @@ -const sendSmtpEmail = require("./steps/sendSmtpEmail") -const createRow = require("./steps/createRow") -const updateRow = require("./steps/updateRow") -const deleteRow = require("./steps/deleteRow") -const executeScript = require("./steps/executeScript") -const executeQuery = require("./steps/executeQuery") -const outgoingWebhook = require("./steps/outgoingWebhook") -const serverLog = require("./steps/serverLog") -const discord = require("./steps/discord") -const slack = require("./steps/slack") -const zapier = require("./steps/zapier") -const integromat = require("./steps/integromat") -let filter = require("./steps/filter") -let delay = require("./steps/delay") -let queryRow = require("./steps/queryRows") -let loop = require("./steps/loop") -const env = require("../environment") +import * as sendSmtpEmail from "./steps/sendSmtpEmail" +import * as createRow from "./steps/createRow" +import * as updateRow from "./steps/updateRow" +import * as deleteRow from "./steps/deleteRow" +import * as executeScript from "./steps/executeScript" +import * as executeQuery from "./steps/executeQuery" +import * as outgoingWebhook from "./steps/outgoingWebhook" +import * as serverLog from "./steps/serverLog" +import * as discord from "./steps/discord" +import * as slack from "./steps/slack" +import * as zapier from "./steps/zapier" +import * as integromat from "./steps/integromat" +import * as filter from "./steps/filter" +import * as delay from "./steps/delay" +import * as queryRow from "./steps/queryRows" +import * as loop from "./steps/loop" +import env from "../environment" +import { AutomationStepSchema, AutomationStepInput } from "@budibase/types" -const ACTION_IMPLS = { +const ACTION_IMPLS: Record< + string, + (opts: AutomationStepInput) => Promise +> = { SEND_EMAIL_SMTP: sendSmtpEmail.run, CREATE_ROW: createRow.run, UPDATE_ROW: updateRow.run, @@ -28,14 +32,13 @@ const ACTION_IMPLS = { DELAY: delay.run, FILTER: filter.run, QUERY_ROWS: queryRow.run, - LOOP: loop.run, // these used to be lowercase step IDs, maintain for backwards compat discord: discord.run, slack: slack.run, zapier: zapier.run, integromat: integromat.run, } -const ACTION_DEFINITIONS = { +export const ACTION_DEFINITIONS: Record = { SEND_EMAIL_SMTP: sendSmtpEmail.definition, CREATE_ROW: createRow.definition, UPDATE_ROW: updateRow.definition, @@ -60,15 +63,15 @@ const ACTION_DEFINITIONS = { // ran at all if (env.SELF_HOSTED) { const bash = require("./steps/bash") + // @ts-ignore ACTION_IMPLS["EXECUTE_BASH"] = bash.run + // @ts-ignore ACTION_DEFINITIONS["EXECUTE_BASH"] = bash.definition } /* istanbul ignore next */ -exports.getAction = async function (actionName) { +export async function getAction(actionName: string) { if (ACTION_IMPLS[actionName] != null) { return ACTION_IMPLS[actionName] } } - -exports.ACTION_DEFINITIONS = ACTION_DEFINITIONS diff --git a/packages/server/src/automations/automationUtils.js b/packages/server/src/automations/automationUtils.ts similarity index 86% rename from packages/server/src/automations/automationUtils.js rename to packages/server/src/automations/automationUtils.ts index 12f7566b6d..8a75de83dd 100644 --- a/packages/server/src/automations/automationUtils.js +++ b/packages/server/src/automations/automationUtils.ts @@ -1,9 +1,10 @@ -const { +import { decodeJSBinding, isJSBinding, encodeJSBinding, -} = require("@budibase/string-templates") -const sdk = require("../sdk") +} from "@budibase/string-templates" +import sdk from "../sdk" +import { Row } from "@budibase/types" /** * When values are input to the system generally they will be of type string as this is required for template strings. @@ -21,7 +22,7 @@ const sdk = require("../sdk") * @returns {object} The inputs object which has had all the various types supported by this function converted to their * primitive types. */ -exports.cleanInputValues = (inputs, schema) => { +export function cleanInputValues(inputs: Record, schema: any) { if (schema == null) { return inputs } @@ -62,12 +63,12 @@ exports.cleanInputValues = (inputs, schema) => { * @param {object} row The input row structure which requires clean-up after having been through template statements. * @returns {Promise} The cleaned up rows object, will should now have all the required primitive types. */ -exports.cleanUpRow = async (tableId, row) => { +export async function cleanUpRow(tableId: string, row: Row) { let table = await sdk.tables.getTable(tableId) - return exports.cleanInputValues(row, { properties: table.schema }) + return cleanInputValues(row, { properties: table.schema }) } -exports.getError = err => { +export function getError(err: any) { if (err == null) { return "No error provided." } @@ -80,13 +81,13 @@ exports.getError = err => { return typeof err !== "string" ? err.toString() : err } -exports.substituteLoopStep = (hbsString, substitute) => { +export function substituteLoopStep(hbsString: string, substitute: string) { let checkForJS = isJSBinding(hbsString) let substitutedHbsString = "" let open = checkForJS ? `$("` : "{{" let closed = checkForJS ? `")` : "}}" if (checkForJS) { - hbsString = decodeJSBinding(hbsString) + hbsString = decodeJSBinding(hbsString) as string } let pointer = 0, openPointer = 0, @@ -111,9 +112,9 @@ exports.substituteLoopStep = (hbsString, substitute) => { return substitutedHbsString } -exports.stringSplit = value => { - if (value == null) { - return [] +export function stringSplit(value: string | string[]) { + if (value == null || Array.isArray(value)) { + return value || [] } if (value.split("\n").length > 1) { value = value.split("\n") diff --git a/packages/server/src/automations/bullboard.js b/packages/server/src/automations/bullboard.js deleted file mode 100644 index dd4a6aa383..0000000000 --- a/packages/server/src/automations/bullboard.js +++ /dev/null @@ -1,39 +0,0 @@ -const { createBullBoard } = require("@bull-board/api") -const { BullAdapter } = require("@bull-board/api/bullAdapter") -const { KoaAdapter } = require("@bull-board/koa") -const { queue } = require("@budibase/backend-core") -const automation = require("../threads/automation") -const { backups } = require("@budibase/pro") - -let automationQueue = queue.createQueue( - queue.JobQueue.AUTOMATION, - automation.removeStalled -) - -const PATH_PREFIX = "/bulladmin" - -exports.init = async () => { - // Set up queues for bull board admin - const backupQueue = await backups.getBackupQueue() - const queues = [automationQueue] - if (backupQueue) { - queues.push(backupQueue) - } - const adapters = [] - const serverAdapter = new KoaAdapter() - for (let queue of queues) { - adapters.push(new BullAdapter(queue)) - } - createBullBoard({ - queues: adapters, - serverAdapter, - }) - serverAdapter.setBasePath(PATH_PREFIX) - return serverAdapter.registerPlugin() -} - -exports.shutdown = async () => { - await queue.shutdown() -} - -exports.automationQueue = automationQueue diff --git a/packages/server/src/automations/bullboard.ts b/packages/server/src/automations/bullboard.ts new file mode 100644 index 0000000000..df784eacff --- /dev/null +++ b/packages/server/src/automations/bullboard.ts @@ -0,0 +1,38 @@ +import { BullAdapter } from "@bull-board/api/bullAdapter" +import { KoaAdapter } from "@bull-board/koa" +import { queue } from "@budibase/backend-core" +import * as automation from "../threads/automation" +import { backups } from "@budibase/pro" +import { createBullBoard } from "@bull-board/api" +import BullQueue from "bull" + +export const automationQueue: BullQueue.Queue = queue.createQueue( + queue.JobQueue.AUTOMATION, + { removeStalledCb: automation.removeStalled } +) + +const PATH_PREFIX = "/bulladmin" + +export async function init() { + // Set up queues for bull board admin + const backupQueue = await backups.getBackupQueue() + const queues = [automationQueue] + if (backupQueue) { + queues.push(backupQueue) + } + const adapters = [] + const serverAdapter: any = new KoaAdapter() + for (let queue of queues) { + adapters.push(new BullAdapter(queue)) + } + createBullBoard({ + queues: adapters, + serverAdapter, + }) + serverAdapter.setBasePath(PATH_PREFIX) + return serverAdapter.registerPlugin() +} + +export async function shutdown() { + await queue.shutdown() +} diff --git a/packages/server/src/automations/index.js b/packages/server/src/automations/index.js deleted file mode 100644 index 521991dd2c..0000000000 --- a/packages/server/src/automations/index.js +++ /dev/null @@ -1,29 +0,0 @@ -const { processEvent } = require("./utils") -const { automationQueue, shutdown } = require("./bullboard") -const { TRIGGER_DEFINITIONS, rebootTrigger } = require("./triggers") -const { ACTION_DEFINITIONS } = require("./actions") - -/** - * This module is built purely to kick off the worker farm and manage the inputs/outputs - */ -exports.init = async function () { - // this promise will not complete - const promise = automationQueue.process(async job => { - await processEvent(job) - }) - // on init we need to trigger any reboot automations - await rebootTrigger() - return promise -} - -exports.getQueues = () => { - return [automationQueue] -} - -exports.shutdown = () => { - return shutdown() -} - -exports.automationQueue = automationQueue -exports.TRIGGER_DEFINITIONS = TRIGGER_DEFINITIONS -exports.ACTION_DEFINITIONS = ACTION_DEFINITIONS diff --git a/packages/server/src/automations/index.ts b/packages/server/src/automations/index.ts new file mode 100644 index 0000000000..e46500d33e --- /dev/null +++ b/packages/server/src/automations/index.ts @@ -0,0 +1,26 @@ +import { processEvent } from "./utils" +import { automationQueue } from "./bullboard" +import { rebootTrigger } from "./triggers" +import BullQueue from "bull" + +export { automationQueue } from "./bullboard" +export { shutdown } from "./bullboard" +export { TRIGGER_DEFINITIONS } from "./triggers" +export { ACTION_DEFINITIONS } from "./actions" + +/** + * This module is built purely to kick off the worker farm and manage the inputs/outputs + */ +export async function init() { + // this promise will not complete + const promise = automationQueue.process(async job => { + await processEvent(job) + }) + // on init we need to trigger any reboot automations + await rebootTrigger() + return promise +} + +export function getQueues(): BullQueue.Queue[] { + return [automationQueue] +} diff --git a/packages/server/src/automations/steps/bash.js b/packages/server/src/automations/steps/bash.ts similarity index 71% rename from packages/server/src/automations/steps/bash.js rename to packages/server/src/automations/steps/bash.ts index efa4295e35..e6deb8c38f 100644 --- a/packages/server/src/automations/steps/bash.js +++ b/packages/server/src/automations/steps/bash.ts @@ -1,16 +1,21 @@ -const { execSync } = require("child_process") -const { processStringSync } = require("@budibase/string-templates") -const automationUtils = require("../automationUtils") -const environment = require("../../environment") +import { execSync } from "child_process" +import { processStringSync } from "@budibase/string-templates" +import * as automationUtils from "../automationUtils" +import environment from "../../environment" +import { + AutomationActionStepId, + AutomationStepSchema, + AutomationStepInput, +} from "@budibase/types" -exports.definition = { +export const definition: AutomationStepSchema = { name: "Bash Scripting", tagline: "Execute a bash command", icon: "JourneyEvent", description: "Run a bash script", type: "ACTION", internal: true, - stepId: "EXECUTE_BASH", + stepId: AutomationActionStepId.EXECUTE_BASH, inputs: {}, schema: { inputs: { @@ -39,7 +44,7 @@ exports.definition = { }, } -exports.run = async function ({ inputs, context }) { +export async function run({ inputs, context }: AutomationStepInput) { if (inputs.code == null) { return { stdout: "Budibase bash automation failed: Invalid inputs", @@ -55,7 +60,7 @@ exports.run = async function ({ inputs, context }) { stdout = execSync(command, { timeout: environment.QUERY_THREAD_TIMEOUT || 500, }).toString() - } catch (err) { + } catch (err: any) { stdout = err.message success = false } diff --git a/packages/server/src/automations/steps/createRow.ts b/packages/server/src/automations/steps/createRow.ts index 906a1529ea..d529127360 100644 --- a/packages/server/src/automations/steps/createRow.ts +++ b/packages/server/src/automations/steps/createRow.ts @@ -1,15 +1,20 @@ import { save } from "../../api/controllers/row" import { cleanUpRow, getError } from "../automationUtils" import { buildCtx } from "./utils" +import { + AutomationActionStepId, + AutomationStepSchema, + AutomationStepInput, +} from "@budibase/types" -export const definition = { +export const definition: AutomationStepSchema = { name: "Create Row", tagline: "Create a {{inputs.enriched.table.name}} row", icon: "TableRowAddBottom", description: "Add a row to your database", type: "ACTION", internal: true, - stepId: "CREATE_ROW", + stepId: AutomationActionStepId.CREATE_ROW, inputs: {}, schema: { inputs: { @@ -58,7 +63,7 @@ export const definition = { }, } -export async function run({ inputs, appId, emitter }: any) { +export async function run({ inputs, appId, emitter }: AutomationStepInput) { if (inputs.row == null || inputs.row.tableId == null) { return { success: false, diff --git a/packages/server/src/automations/steps/delay.js b/packages/server/src/automations/steps/delay.ts similarity index 68% rename from packages/server/src/automations/steps/delay.js rename to packages/server/src/automations/steps/delay.ts index ad59df54c5..58ca383ac1 100644 --- a/packages/server/src/automations/steps/delay.js +++ b/packages/server/src/automations/steps/delay.ts @@ -1,11 +1,16 @@ -let { wait } = require("../../utilities") +import { wait } from "../../utilities" +import { + AutomationActionStepId, + AutomationStepSchema, + AutomationStepInput, +} from "@budibase/types" -exports.definition = { +export const definition: AutomationStepSchema = { name: "Delay", icon: "Clock", tagline: "Delay for {{inputs.time}} milliseconds", description: "Delay the automation until an amount of time has passed", - stepId: "DELAY", + stepId: AutomationActionStepId.DELAY, internal: true, inputs: {}, schema: { @@ -31,7 +36,7 @@ exports.definition = { type: "LOGIC", } -exports.run = async function delay({ inputs }) { +export async function run({ inputs }: AutomationStepInput) { await wait(inputs.time) return { success: true, diff --git a/packages/server/src/automations/steps/deleteRow.ts b/packages/server/src/automations/steps/deleteRow.ts index 0537d60de7..540d95b94d 100644 --- a/packages/server/src/automations/steps/deleteRow.ts +++ b/packages/server/src/automations/steps/deleteRow.ts @@ -1,14 +1,19 @@ import { destroy } from "../../api/controllers/row" import { buildCtx } from "./utils" import { getError } from "../automationUtils" +import { + AutomationActionStepId, + AutomationStepSchema, + AutomationStepInput, +} from "@budibase/types" -export const definition = { +export const definition: AutomationStepSchema = { description: "Delete a row from your database", icon: "TableRowRemoveCenter", name: "Delete Row", tagline: "Delete a {{inputs.enriched.table.name}} row", type: "ACTION", - stepId: "DELETE_ROW", + stepId: AutomationActionStepId.DELETE_ROW, internal: true, inputs: {}, schema: { @@ -47,7 +52,7 @@ export const definition = { }, } -export async function run({ inputs, appId, emitter }: any) { +export async function run({ inputs, appId, emitter }: AutomationStepInput) { if (inputs.id == null) { return { success: false, diff --git a/packages/server/src/automations/steps/discord.js b/packages/server/src/automations/steps/discord.ts similarity index 83% rename from packages/server/src/automations/steps/discord.js rename to packages/server/src/automations/steps/discord.ts index a0b732b11f..ae484fa42e 100644 --- a/packages/server/src/automations/steps/discord.js +++ b/packages/server/src/automations/steps/discord.ts @@ -1,15 +1,20 @@ -const fetch = require("node-fetch") -const { getFetchResponse } = require("./utils") +import fetch from "node-fetch" +import { getFetchResponse } from "./utils" +import { + AutomationActionStepId, + AutomationStepSchema, + AutomationStepInput, +} from "@budibase/types" const DEFAULT_USERNAME = "Budibase Automate" const DEFAULT_AVATAR_URL = "https://i.imgur.com/a1cmTKM.png" -exports.definition = { +export const definition: AutomationStepSchema = { name: "Discord Message", tagline: "Send a message to a Discord server", description: "Send a message to a Discord server", icon: "ri-discord-line", - stepId: "discord", + stepId: AutomationActionStepId.discord, type: "ACTION", internal: false, inputs: {}, @@ -54,7 +59,7 @@ exports.definition = { }, } -exports.run = async function ({ inputs }) { +export async function run({ inputs }: AutomationStepInput) { let { url, username, avatar_url, content } = inputs if (!username) { username = DEFAULT_USERNAME diff --git a/packages/server/src/automations/steps/executeQuery.js b/packages/server/src/automations/steps/executeQuery.ts similarity index 77% rename from packages/server/src/automations/steps/executeQuery.js rename to packages/server/src/automations/steps/executeQuery.ts index 93cc8e0fb8..72fb69b96c 100644 --- a/packages/server/src/automations/steps/executeQuery.js +++ b/packages/server/src/automations/steps/executeQuery.ts @@ -1,14 +1,19 @@ -const queryController = require("../../api/controllers/query") -const { buildCtx } = require("./utils") -const automationUtils = require("../automationUtils") +import * as queryController from "../../api/controllers/query" +import { buildCtx } from "./utils" +import * as automationUtils from "../automationUtils" +import { + AutomationActionStepId, + AutomationStepSchema, + AutomationStepInput, +} from "@budibase/types" -exports.definition = { +export const definition: AutomationStepSchema = { name: "External Data Connector", tagline: "Execute Data Connector", icon: "Data", description: "Execute a query in an external data connector", type: "ACTION", - stepId: "EXECUTE_QUERY", + stepId: AutomationActionStepId.EXECUTE_QUERY, internal: true, inputs: {}, schema: { @@ -50,7 +55,7 @@ exports.definition = { }, } -exports.run = async function ({ inputs, appId, emitter }) { +export async function run({ inputs, appId, emitter }: AutomationStepInput) { if (inputs.query == null) { return { success: false, @@ -62,7 +67,7 @@ exports.run = async function ({ inputs, appId, emitter }) { const { queryId, ...rest } = inputs.query - const ctx = buildCtx(appId, emitter, { + const ctx: any = buildCtx(appId, emitter, { body: { parameters: rest, }, diff --git a/packages/server/src/automations/steps/executeScript.js b/packages/server/src/automations/steps/executeScript.ts similarity index 69% rename from packages/server/src/automations/steps/executeScript.js rename to packages/server/src/automations/steps/executeScript.ts index 7a7296014b..84bdb0e2d5 100644 --- a/packages/server/src/automations/steps/executeScript.js +++ b/packages/server/src/automations/steps/executeScript.ts @@ -1,15 +1,20 @@ -const scriptController = require("../../api/controllers/script") -const { buildCtx } = require("./utils") -const automationUtils = require("../automationUtils") +import * as scriptController from "../../api/controllers/script" +import { buildCtx } from "./utils" +import * as automationUtils from "../automationUtils" +import { + AutomationActionStepId, + AutomationStepSchema, + AutomationStepInput, +} from "@budibase/types" -exports.definition = { +export const definition: AutomationStepSchema = { name: "JS Scripting", tagline: "Execute JavaScript Code", icon: "Code", description: "Run a piece of JavaScript code in your automation", type: "ACTION", internal: true, - stepId: "EXECUTE_SCRIPT", + stepId: AutomationActionStepId.EXECUTE_SCRIPT, inputs: {}, schema: { inputs: { @@ -38,7 +43,12 @@ exports.definition = { }, } -exports.run = async function ({ inputs, appId, context, emitter }) { +export async function run({ + inputs, + appId, + context, + emitter, +}: AutomationStepInput) { if (inputs.code == null) { return { success: false, @@ -48,7 +58,7 @@ exports.run = async function ({ inputs, appId, context, emitter }) { } } - const ctx = buildCtx(appId, emitter, { + const ctx: any = buildCtx(appId, emitter, { body: { script: inputs.code, context, diff --git a/packages/server/src/automations/steps/filter.js b/packages/server/src/automations/steps/filter.ts similarity index 86% rename from packages/server/src/automations/steps/filter.js rename to packages/server/src/automations/steps/filter.ts index 566bb60f22..18914ddca6 100644 --- a/packages/server/src/automations/steps/filter.js +++ b/packages/server/src/automations/steps/filter.ts @@ -1,21 +1,24 @@ -const FilterConditions = { +import { + AutomationActionStepId, + AutomationStepSchema, + AutomationStepInput, +} from "@budibase/types" + +export const FilterConditions = { EQUAL: "EQUAL", NOT_EQUAL: "NOT_EQUAL", GREATER_THAN: "GREATER_THAN", LESS_THAN: "LESS_THAN", } -const PrettyFilterConditions = { +export const PrettyFilterConditions = { [FilterConditions.EQUAL]: "Equals", [FilterConditions.NOT_EQUAL]: "Not equals", [FilterConditions.GREATER_THAN]: "Greater than", [FilterConditions.LESS_THAN]: "Less than", } -exports.FilterConditions = FilterConditions -exports.PrettyFilterConditions = PrettyFilterConditions - -exports.definition = { +export const definition: AutomationStepSchema = { name: "Condition", tagline: "{{inputs.field}} {{inputs.condition}} {{inputs.value}}", icon: "Branch2", @@ -23,9 +26,9 @@ exports.definition = { "Conditionally halt automations which do not meet certain conditions", type: "LOGIC", internal: true, - stepId: "FILTER", + stepId: AutomationActionStepId.FILTER, inputs: { - condition: FilterConditions.EQUALS, + condition: FilterConditions.EQUAL, }, schema: { inputs: { @@ -63,7 +66,7 @@ exports.definition = { }, } -exports.run = async function filter({ inputs }) { +export async function run({ inputs }: AutomationStepInput) { try { let { field, condition, value } = inputs // coerce types so that we can use them diff --git a/packages/server/src/automations/steps/integromat.js b/packages/server/src/automations/steps/integromat.ts similarity index 84% rename from packages/server/src/automations/steps/integromat.js rename to packages/server/src/automations/steps/integromat.ts index e7ea03efca..dd897b5429 100644 --- a/packages/server/src/automations/steps/integromat.js +++ b/packages/server/src/automations/steps/integromat.ts @@ -1,13 +1,18 @@ -const fetch = require("node-fetch") -const { getFetchResponse } = require("./utils") +import fetch from "node-fetch" +import { getFetchResponse } from "./utils" +import { + AutomationActionStepId, + AutomationStepSchema, + AutomationStepInput, +} from "@budibase/types" -exports.definition = { +export const definition: AutomationStepSchema = { name: "Integromat Integration", tagline: "Trigger an Integromat scenario", description: "Performs a webhook call to Integromat and gets the response (if configured)", icon: "ri-shut-down-line", - stepId: "integromat", + stepId: AutomationActionStepId.integromat, type: "ACTION", internal: false, inputs: {}, @@ -61,7 +66,7 @@ exports.definition = { }, } -exports.run = async function ({ inputs }) { +export async function run({ inputs }: AutomationStepInput) { const { url, value1, value2, value3, value4, value5 } = inputs const response = await fetch(url, { diff --git a/packages/server/src/automations/steps/loop.js b/packages/server/src/automations/steps/loop.ts similarity index 86% rename from packages/server/src/automations/steps/loop.js rename to packages/server/src/automations/steps/loop.ts index c77def022a..72087ae357 100644 --- a/packages/server/src/automations/steps/loop.js +++ b/packages/server/src/automations/steps/loop.ts @@ -1,9 +1,11 @@ -exports.definition = { +import { AutomationActionStepId, AutomationStepSchema } from "@budibase/types" + +export const definition: AutomationStepSchema = { name: "Looping", icon: "Reuse", tagline: "Loop the block", description: "Loop", - stepId: "LOOP", + stepId: AutomationActionStepId.LOOP, internal: true, inputs: {}, schema: { diff --git a/packages/server/src/automations/steps/outgoingWebhook.js b/packages/server/src/automations/steps/outgoingWebhook.ts similarity index 84% rename from packages/server/src/automations/steps/outgoingWebhook.js rename to packages/server/src/automations/steps/outgoingWebhook.ts index 01d1e8d6be..ea1ffeb339 100644 --- a/packages/server/src/automations/steps/outgoingWebhook.js +++ b/packages/server/src/automations/steps/outgoingWebhook.ts @@ -1,13 +1,18 @@ -const fetch = require("node-fetch") -const { getFetchResponse } = require("./utils") -const automationUtils = require("../automationUtils") +import fetch from "node-fetch" +import { getFetchResponse } from "./utils" +import * as automationUtils from "../automationUtils" +import { + AutomationActionStepId, + AutomationStepSchema, + AutomationStepInput, +} from "@budibase/types" -const RequestType = { - POST: "POST", - GET: "GET", - PUT: "PUT", - DELETE: "DELETE", - PATCH: "PATCH", +enum RequestType { + POST = "POST", + GET = "GET", + PUT = "PUT", + DELETE = "DELETE", + PATCH = "PATCH", } const BODY_REQUESTS = [RequestType.POST, RequestType.PUT, RequestType.PATCH] @@ -16,7 +21,7 @@ const BODY_REQUESTS = [RequestType.POST, RequestType.PUT, RequestType.PATCH] * NOTE: this functionality is deprecated - it no longer should be used. */ -exports.definition = { +export const definition: AutomationStepSchema = { deprecated: true, name: "Outgoing webhook", tagline: "Send a {{inputs.requestMethod}} request", @@ -24,7 +29,7 @@ exports.definition = { description: "Send a request of specified method to a URL", type: "ACTION", internal: true, - stepId: "OUTGOING_WEBHOOK", + stepId: AutomationActionStepId.OUTGOING_WEBHOOK, inputs: { requestMethod: "POST", url: "http://", @@ -76,12 +81,12 @@ exports.definition = { }, } -exports.run = async function ({ inputs }) { +export async function run({ inputs }: AutomationStepInput) { let { requestMethod, url, requestBody, headers } = inputs if (!url.startsWith("http")) { url = `http://${url}` } - const request = { + const request: any = { method: requestMethod, } if (headers) { diff --git a/packages/server/src/automations/steps/queryRows.js b/packages/server/src/automations/steps/queryRows.ts similarity index 61% rename from packages/server/src/automations/steps/queryRows.js rename to packages/server/src/automations/steps/queryRows.ts index 23a7ce55fa..6de518e931 100644 --- a/packages/server/src/automations/steps/queryRows.js +++ b/packages/server/src/automations/steps/queryRows.ts @@ -1,36 +1,43 @@ -const rowController = require("../../api/controllers/row") -const tableController = require("../../api/controllers/table") -const { FieldTypes } = require("../../constants") -const { buildCtx } = require("./utils") -const automationUtils = require("../automationUtils") +import * as rowController from "../../api/controllers/row" +import * as tableController from "../../api/controllers/table" +import { FieldTypes } from "../../constants" +import { buildCtx } from "./utils" +import * as automationUtils from "../automationUtils" +import { + AutomationActionStepId, + AutomationStepSchema, + AutomationStepInput, + SearchFilters, + Table, +} from "@budibase/types" -const SortOrders = { - ASCENDING: "ascending", - DESCENDING: "descending", +enum SortOrder { + ASCENDING = "ascending", + DESCENDING = "descending", } -const SortOrdersPretty = { - [SortOrders.ASCENDING]: "Ascending", - [SortOrders.DESCENDING]: "Descending", +const SortOrderPretty = { + [SortOrder.ASCENDING]: "Ascending", + [SortOrder.DESCENDING]: "Descending", } -const EmptyFilterOptions = { - RETURN_ALL: "all", - RETURN_NONE: "none", +enum EmptyFilterOption { + RETURN_ALL = "all", + RETURN_NONE = "none", } -const EmptyFilterOptionsPretty = { - [EmptyFilterOptions.RETURN_ALL]: "Return all table rows", - [EmptyFilterOptions.RETURN_NONE]: "Return no rows", +const EmptyFilterOptionPretty = { + [EmptyFilterOption.RETURN_ALL]: "Return all table rows", + [EmptyFilterOption.RETURN_NONE]: "Return no rows", } -exports.definition = { +export const definition: AutomationStepSchema = { description: "Query rows from the database", icon: "Search", name: "Query rows", tagline: "Query rows from {{inputs.enriched.table.name}} table", type: "ACTION", - stepId: "QUERY_ROWS", + stepId: AutomationActionStepId.QUERY_ROWS, internal: true, inputs: {}, schema: { @@ -54,8 +61,8 @@ exports.definition = { sortOrder: { type: "string", title: "Sort Order", - enum: Object.values(SortOrders), - pretty: Object.values(SortOrdersPretty), + enum: Object.values(SortOrder), + pretty: Object.values(SortOrderPretty), }, limit: { type: "number", @@ -63,8 +70,8 @@ exports.definition = { customType: "queryLimit", }, onEmptyFilter: { - pretty: Object.values(EmptyFilterOptionsPretty), - enum: Object.values(EmptyFilterOptions), + pretty: Object.values(EmptyFilterOptionPretty), + enum: Object.values(EmptyFilterOption), type: "string", title: "When Filter Empty", }, @@ -88,8 +95,8 @@ exports.definition = { }, } -async function getTable(appId, tableId) { - const ctx = buildCtx(appId, null, { +async function getTable(appId: string, tableId: string) { + const ctx: any = buildCtx(appId, null, { params: { tableId, }, @@ -98,20 +105,22 @@ async function getTable(appId, tableId) { return ctx.body } -function typeCoercion(filters, table) { +function typeCoercion(filters: SearchFilters, table: Table) { if (!filters || !table) { return filters } for (let key of Object.keys(filters)) { - if (typeof filters[key] === "object") { - for (let [property, value] of Object.entries(filters[key])) { + // @ts-ignore + const searchParam = filters[key] + if (typeof searchParam === "object") { + for (let [property, value] of Object.entries(searchParam)) { const column = table.schema[property] // convert string inputs if (!column || typeof value !== "string") { continue } if (column.type === FieldTypes.NUMBER) { - filters[key][property] = parseFloat(value) + searchParam[property] = parseFloat(value) } } } @@ -119,11 +128,14 @@ function typeCoercion(filters, table) { return filters } -const hasNullFilters = filters => - filters.length === 0 || - filters.some(filter => filter.value === null || filter.value === "") +function hasNullFilters(filters: any[]) { + return ( + filters.length === 0 || + filters.some(filter => filter.value === null || filter.value === "") + ) +} -exports.run = async function ({ inputs, appId }) { +export async function run({ inputs, appId }: AutomationStepInput) { const { tableId, filters, sortColumn, sortOrder, limit } = inputs if (!tableId) { return { @@ -140,7 +152,7 @@ exports.run = async function ({ inputs, appId }) { sortType = fieldType === FieldTypes.NUMBER ? FieldTypes.NUMBER : FieldTypes.STRING } - const ctx = buildCtx(appId, null, { + const ctx: any = buildCtx(appId, null, { params: { tableId, }, @@ -150,7 +162,7 @@ exports.run = async function ({ inputs, appId }) { sort: sortColumn, query: typeCoercion(filters || {}, table), // default to ascending, like data tab - sortOrder: sortOrder || SortOrders.ASCENDING, + sortOrder: sortOrder || SortOrder.ASCENDING, }, version: "1", }) @@ -158,7 +170,7 @@ exports.run = async function ({ inputs, appId }) { let rows if ( - inputs.onEmptyFilter === EmptyFilterOptions.RETURN_NONE && + inputs.onEmptyFilter === EmptyFilterOption.RETURN_NONE && inputs["filters-def"] && hasNullFilters(inputs["filters-def"]) ) { diff --git a/packages/server/src/automations/steps/sendSmtpEmail.js b/packages/server/src/automations/steps/sendSmtpEmail.ts similarity index 80% rename from packages/server/src/automations/steps/sendSmtpEmail.js rename to packages/server/src/automations/steps/sendSmtpEmail.ts index 423363701b..67516c803d 100644 --- a/packages/server/src/automations/steps/sendSmtpEmail.js +++ b/packages/server/src/automations/steps/sendSmtpEmail.ts @@ -1,14 +1,19 @@ -const { sendSmtpEmail } = require("../../utilities/workerRequests") -const automationUtils = require("../automationUtils") +import { sendSmtpEmail } from "../../utilities/workerRequests" +import * as automationUtils from "../automationUtils" +import { + AutomationActionStepId, + AutomationStepSchema, + AutomationStepInput, +} from "@budibase/types" -exports.definition = { +export const definition: AutomationStepSchema = { description: "Send an email using SMTP", tagline: "Send SMTP email to {{inputs.to}}", icon: "Email", name: "Send Email (SMTP)", type: "ACTION", internal: true, - stepId: "SEND_EMAIL_SMTP", + stepId: AutomationActionStepId.SEND_EMAIL_SMTP, inputs: {}, schema: { inputs: { @@ -56,7 +61,7 @@ exports.definition = { }, } -exports.run = async function ({ inputs }) { +export async function run({ inputs }: AutomationStepInput) { let { to, from, subject, contents, cc, bcc } = inputs if (!contents) { contents = "

No content

" diff --git a/packages/server/src/automations/steps/serverLog.js b/packages/server/src/automations/steps/serverLog.ts similarity index 79% rename from packages/server/src/automations/steps/serverLog.js rename to packages/server/src/automations/steps/serverLog.ts index 19bbc36abf..bb2f49ede8 100644 --- a/packages/server/src/automations/steps/serverLog.js +++ b/packages/server/src/automations/steps/serverLog.ts @@ -1,17 +1,23 @@ +import { + AutomationActionStepId, + AutomationStepSchema, + AutomationStepInput, +} from "@budibase/types" + /** * Note, there is some functionality in this that is not currently exposed as it * is complex and maybe better to be opinionated here. * GET/DELETE requests cannot handle body elements so they will not be sent if configured. */ -exports.definition = { +export const definition: AutomationStepSchema = { name: "Backend log", tagline: "Console log a value in the backend", icon: "Monitoring", description: "Logs the given text to the server (using console.log)", type: "ACTION", internal: true, - stepId: "SERVER_LOG", + stepId: AutomationActionStepId.SERVER_LOG, inputs: { text: "", }, @@ -41,7 +47,7 @@ exports.definition = { }, } -exports.run = async function ({ inputs, appId }) { +export async function run({ inputs, appId }: AutomationStepInput) { const message = `App ${appId} - ${inputs.text}` console.log(message) return { diff --git a/packages/server/src/automations/steps/slack.js b/packages/server/src/automations/steps/slack.ts similarity index 78% rename from packages/server/src/automations/steps/slack.js rename to packages/server/src/automations/steps/slack.ts index ec6341a26f..47c66bebf3 100644 --- a/packages/server/src/automations/steps/slack.js +++ b/packages/server/src/automations/steps/slack.ts @@ -1,12 +1,17 @@ -const fetch = require("node-fetch") -const { getFetchResponse } = require("./utils") +import fetch from "node-fetch" +import { getFetchResponse } from "./utils" +import { + AutomationActionStepId, + AutomationStepSchema, + AutomationStepInput, +} from "@budibase/types" -exports.definition = { +export const definition: AutomationStepSchema = { name: "Slack Message", tagline: "Send a message to Slack", description: "Send a message to Slack", icon: "ri-slack-line", - stepId: "slack", + stepId: AutomationActionStepId.slack, type: "ACTION", internal: false, inputs: {}, @@ -43,7 +48,7 @@ exports.definition = { }, } -exports.run = async function ({ inputs }) { +export async function run({ inputs }: AutomationStepInput) { let { url, text } = inputs const response = await fetch(url, { method: "post", diff --git a/packages/server/src/automations/steps/updateRow.js b/packages/server/src/automations/steps/updateRow.ts similarity index 82% rename from packages/server/src/automations/steps/updateRow.js rename to packages/server/src/automations/steps/updateRow.ts index 5a2c158c5f..953313986e 100644 --- a/packages/server/src/automations/steps/updateRow.js +++ b/packages/server/src/automations/steps/updateRow.ts @@ -1,15 +1,20 @@ -const rowController = require("../../api/controllers/row") -const automationUtils = require("../automationUtils") -const { buildCtx } = require("./utils") +import * as rowController from "../../api/controllers/row" +import * as automationUtils from "../automationUtils" +import { buildCtx } from "./utils" +import { + AutomationActionStepId, + AutomationStepSchema, + AutomationStepInput, +} from "@budibase/types" -exports.definition = { +export const definition: AutomationStepSchema = { name: "Update Row", tagline: "Update a {{inputs.enriched.table.name}} row", icon: "Refresh", description: "Update a row in your database", type: "ACTION", internal: true, - stepId: "UPDATE_ROW", + stepId: AutomationActionStepId.UPDATE_ROW, inputs: {}, schema: { inputs: { @@ -55,7 +60,7 @@ exports.definition = { }, } -exports.run = async function ({ inputs, appId, emitter }) { +export async function run({ inputs, appId, emitter }: AutomationStepInput) { if (inputs.rowId == null || inputs.row == null) { return { success: false, @@ -74,7 +79,7 @@ exports.run = async function ({ inputs, appId, emitter }) { } // have to clean up the row, remove the table from it - const ctx = buildCtx(appId, emitter, { + const ctx: any = buildCtx(appId, emitter, { body: { ...inputs.row, _id: inputs.rowId, diff --git a/packages/server/src/automations/steps/utils.js b/packages/server/src/automations/steps/utils.ts similarity index 76% rename from packages/server/src/automations/steps/utils.js rename to packages/server/src/automations/steps/utils.ts index ed9a441499..8b99044303 100644 --- a/packages/server/src/automations/steps/utils.js +++ b/packages/server/src/automations/steps/utils.ts @@ -1,4 +1,6 @@ -exports.getFetchResponse = async fetched => { +import { EventEmitter } from "events" + +export async function getFetchResponse(fetched: any) { let status = fetched.status, message const contentType = fetched.headers.get("content-type") @@ -18,12 +20,16 @@ exports.getFetchResponse = async fetched => { // throw added to them, so that controllers don't // throw a ctx.throw undefined when error occurs // opts can contain, body, params and version -exports.buildCtx = (appId, emitter, opts = {}) => { - const ctx = { +export function buildCtx( + appId: string, + emitter?: EventEmitter | null, + opts: any = {} +) { + const ctx: any = { appId, user: { appId }, eventEmitter: emitter, - throw: (code, error) => { + throw: (code: string, error: any) => { throw error }, } diff --git a/packages/server/src/automations/steps/zapier.js b/packages/server/src/automations/steps/zapier.ts similarity index 82% rename from packages/server/src/automations/steps/zapier.js rename to packages/server/src/automations/steps/zapier.ts index bec90497cd..1a48c1ec92 100644 --- a/packages/server/src/automations/steps/zapier.js +++ b/packages/server/src/automations/steps/zapier.ts @@ -1,14 +1,20 @@ -const fetch = require("node-fetch") -const { getFetchResponse } = require("./utils") +import fetch from "node-fetch" +import { getFetchResponse } from "./utils" +import { + AutomationActionStepId, + AutomationStepSchema, + AutomationStepInput, +} from "@budibase/types" -exports.definition = { +export const definition: AutomationStepSchema = { name: "Zapier Webhook", - stepId: "zapier", + stepId: AutomationActionStepId.zapier, type: "ACTION", internal: false, description: "Trigger a Zapier Zap via webhooks", tagline: "Trigger a Zapier Zap", icon: "ri-flashlight-line", + inputs: {}, schema: { inputs: { properties: { @@ -54,7 +60,7 @@ exports.definition = { }, } -exports.run = async function ({ inputs }) { +export async function run({ inputs }: AutomationStepInput) { const { url, value1, value2, value3, value4, value5 } = inputs // send the platform to make sure zaps always work, even diff --git a/packages/server/src/automations/tests/utilities/index.js b/packages/server/src/automations/tests/utilities/index.ts similarity index 50% rename from packages/server/src/automations/tests/utilities/index.js rename to packages/server/src/automations/tests/utilities/index.ts index 1e705182dc..a18e931bab 100644 --- a/packages/server/src/automations/tests/utilities/index.js +++ b/packages/server/src/automations/tests/utilities/index.ts @@ -1,23 +1,23 @@ -const TestConfig = require("../../../tests/utilities/TestConfiguration") -const { context } = require("@budibase/backend-core") -const actions = require("../../actions") -const emitter = require("../../../events/index") -const env = require("../../../environment") +import TestConfig from "../../../tests/utilities/TestConfiguration" +import { context } from "@budibase/backend-core" +import { ACTION_DEFINITIONS, getAction } from "../../actions" +import emitter from "../../../events/index" +import env from "../../../environment" -let config +let config: any -exports.getConfig = () => { +export function getConfig() { if (!config) { config = new TestConfig(false) } return config } -exports.afterAll = () => { +export function afterAll() { config.end() } -exports.runInProd = async fn => { +export async function runInProd(fn: any) { env._set("NODE_ENV", "production") let error try { @@ -31,15 +31,19 @@ exports.runInProd = async fn => { } } -exports.runStep = async function runStep(stepId, inputs) { +export async function runStep(stepId: string, inputs: any) { async function run() { - let step = await actions.getAction(stepId) + let step = await getAction(stepId) expect(step).toBeDefined() + if (!step) { + throw new Error("No step found") + } return step({ + context: {}, inputs, appId: config ? config.getAppId() : null, // don't really need an API key, mocked out usage quota, not being tested here - apiKey: exports.apiKey, + apiKey, emitter, }) } @@ -52,6 +56,5 @@ exports.runStep = async function runStep(stepId, inputs) { } } -exports.apiKey = "test" - -exports.actions = actions.ACTION_DEFINITIONS +export const apiKey = "test" +export const actions = ACTION_DEFINITIONS diff --git a/packages/server/src/automations/triggerInfo/app.js b/packages/server/src/automations/triggerInfo/app.ts similarity index 78% rename from packages/server/src/automations/triggerInfo/app.js rename to packages/server/src/automations/triggerInfo/app.ts index b1b07e9d5b..fca9acaef8 100644 --- a/packages/server/src/automations/triggerInfo/app.js +++ b/packages/server/src/automations/triggerInfo/app.ts @@ -1,10 +1,15 @@ -exports.definition = { +import { + AutomationTriggerSchema, + AutomationTriggerStepId, +} from "@budibase/types" + +export const definition: AutomationTriggerSchema = { name: "App Action", event: "app:trigger", icon: "Apps", tagline: "Automation fired from the frontend", description: "Trigger an automation from an action inside your app", - stepId: "APP", + stepId: AutomationTriggerStepId.APP, inputs: {}, schema: { inputs: { diff --git a/packages/server/src/automations/triggerInfo/cron.js b/packages/server/src/automations/triggerInfo/cron.ts similarity index 77% rename from packages/server/src/automations/triggerInfo/cron.js rename to packages/server/src/automations/triggerInfo/cron.ts index 9547da69fa..91b41f7243 100644 --- a/packages/server/src/automations/triggerInfo/cron.js +++ b/packages/server/src/automations/triggerInfo/cron.ts @@ -1,10 +1,15 @@ -exports.definition = { +import { + AutomationTriggerSchema, + AutomationTriggerStepId, +} from "@budibase/types" + +export const definition: AutomationTriggerSchema = { name: "Cron Trigger", event: "cron:trigger", icon: "Clock", tagline: "Cron Trigger ({{inputs.cron}})", description: "Triggers automation on a cron schedule.", - stepId: "CRON", + stepId: AutomationTriggerStepId.CRON, inputs: {}, schema: { inputs: { diff --git a/packages/server/src/automations/triggerInfo/index.js b/packages/server/src/automations/triggerInfo/index.js deleted file mode 100644 index 066993d324..0000000000 --- a/packages/server/src/automations/triggerInfo/index.js +++ /dev/null @@ -1,15 +0,0 @@ -const app = require("./app") -const cron = require("./cron") -const rowDeleted = require("./rowDeleted") -const rowSaved = require("./rowSaved") -const rowUpdated = require("./rowUpdated") -const webhook = require("./webhook") - -exports.definitions = { - ROW_SAVED: rowSaved.definition, - ROW_UPDATED: rowUpdated.definition, - ROW_DELETED: rowDeleted.definition, - WEBHOOK: webhook.definition, - APP: app.definition, - CRON: cron.definition, -} diff --git a/packages/server/src/automations/triggerInfo/index.ts b/packages/server/src/automations/triggerInfo/index.ts new file mode 100644 index 0000000000..b35d915ea8 --- /dev/null +++ b/packages/server/src/automations/triggerInfo/index.ts @@ -0,0 +1,15 @@ +import * as app from "./app" +import * as cron from "./cron" +import * as rowDeleted from "./rowDeleted" +import * as rowSaved from "./rowSaved" +import * as rowUpdated from "./rowUpdated" +import * as webhook from "./webhook" + +export const definitions = { + ROW_SAVED: rowSaved.definition, + ROW_UPDATED: rowUpdated.definition, + ROW_DELETED: rowDeleted.definition, + WEBHOOK: webhook.definition, + APP: app.definition, + CRON: cron.definition, +} diff --git a/packages/server/src/automations/triggerInfo/rowDeleted.js b/packages/server/src/automations/triggerInfo/rowDeleted.ts similarity index 77% rename from packages/server/src/automations/triggerInfo/rowDeleted.js rename to packages/server/src/automations/triggerInfo/rowDeleted.ts index 7a7de13b11..de4a1b0412 100644 --- a/packages/server/src/automations/triggerInfo/rowDeleted.js +++ b/packages/server/src/automations/triggerInfo/rowDeleted.ts @@ -1,10 +1,15 @@ -exports.definition = { +import { + AutomationTriggerSchema, + AutomationTriggerStepId, +} from "@budibase/types" + +export const definition: AutomationTriggerSchema = { name: "Row Deleted", event: "row:delete", icon: "TableRowRemoveCenter", tagline: "Row is deleted from {{inputs.enriched.table.name}}", description: "Fired when a row is deleted from your database", - stepId: "ROW_DELETED", + stepId: AutomationTriggerStepId.ROW_DELETED, inputs: {}, schema: { inputs: { diff --git a/packages/server/src/automations/triggerInfo/rowSaved.js b/packages/server/src/automations/triggerInfo/rowSaved.ts similarity index 82% rename from packages/server/src/automations/triggerInfo/rowSaved.js rename to packages/server/src/automations/triggerInfo/rowSaved.ts index d763ca0a8a..c1dde25eef 100644 --- a/packages/server/src/automations/triggerInfo/rowSaved.js +++ b/packages/server/src/automations/triggerInfo/rowSaved.ts @@ -1,10 +1,15 @@ -exports.definition = { +import { + AutomationTriggerSchema, + AutomationTriggerStepId, +} from "@budibase/types" + +export const definition: AutomationTriggerSchema = { name: "Row Created", event: "row:save", icon: "TableRowAddBottom", tagline: "Row is added to {{inputs.enriched.table.name}}", description: "Fired when a row is added to your database", - stepId: "ROW_SAVED", + stepId: AutomationTriggerStepId.ROW_SAVED, inputs: {}, schema: { inputs: { diff --git a/packages/server/src/automations/triggerInfo/rowUpdated.js b/packages/server/src/automations/triggerInfo/rowUpdated.ts similarity index 81% rename from packages/server/src/automations/triggerInfo/rowUpdated.js rename to packages/server/src/automations/triggerInfo/rowUpdated.ts index 182c7c810e..1bc8811d54 100644 --- a/packages/server/src/automations/triggerInfo/rowUpdated.js +++ b/packages/server/src/automations/triggerInfo/rowUpdated.ts @@ -1,10 +1,15 @@ -exports.definition = { +import { + AutomationTriggerSchema, + AutomationTriggerStepId, +} from "@budibase/types" + +export const definition: AutomationTriggerSchema = { name: "Row Updated", event: "row:update", icon: "Refresh", tagline: "Row is updated in {{inputs.enriched.table.name}}", description: "Fired when a row is updated in your database", - stepId: "ROW_UPDATED", + stepId: AutomationTriggerStepId.ROW_UPDATED, inputs: {}, schema: { inputs: { diff --git a/packages/server/src/automations/triggerInfo/webhook.js b/packages/server/src/automations/triggerInfo/webhook.ts similarity index 80% rename from packages/server/src/automations/triggerInfo/webhook.js rename to packages/server/src/automations/triggerInfo/webhook.ts index 56e139311c..906967a02a 100644 --- a/packages/server/src/automations/triggerInfo/webhook.js +++ b/packages/server/src/automations/triggerInfo/webhook.ts @@ -1,10 +1,15 @@ -exports.definition = { +import { + AutomationTriggerSchema, + AutomationTriggerStepId, +} from "@budibase/types" + +export const definition: AutomationTriggerSchema = { name: "Webhook", event: "web:trigger", icon: "Send", tagline: "Webhook endpoint is hit", description: "Trigger an automation when a HTTP POST webhook is hit", - stepId: "WEBHOOK", + stepId: AutomationTriggerStepId.WEBHOOK, inputs: {}, schema: { inputs: { diff --git a/packages/server/src/automations/triggers.js b/packages/server/src/automations/triggers.ts similarity index 77% rename from packages/server/src/automations/triggers.js rename to packages/server/src/automations/triggers.ts index 4f865d4df8..f4b34bc9e8 100644 --- a/packages/server/src/automations/triggers.js +++ b/packages/server/src/automations/triggers.ts @@ -1,16 +1,17 @@ -const emitter = require("../events/index") -const { getAutomationParams } = require("../db/utils") -const { coerce } = require("../utilities/rowProcessor") -const { definitions } = require("./triggerInfo") -const { isDevAppID } = require("../db/utils") +import emitter from "../events/index" +import { getAutomationParams } from "../db/utils" +import { coerce } from "../utilities/rowProcessor" +import { definitions } from "./triggerInfo" +import { isDevAppID } from "../db/utils" // need this to call directly, so we can get a response -const { automationQueue } = require("./bullboard") -const { checkTestFlag } = require("../utilities/redis") -const utils = require("./utils") -const env = require("../environment") -const { context, db: dbCore } = require("@budibase/backend-core") +import { automationQueue } from "./bullboard" +import { checkTestFlag } from "../utilities/redis" +import * as utils from "./utils" +import env from "../environment" +import { context, db as dbCore } from "@budibase/backend-core" +import { Automation, Row } from "@budibase/types" -const TRIGGER_DEFINITIONS = definitions +export const TRIGGER_DEFINITIONS = definitions const JOB_OPTS = { removeOnComplete: true, removeOnFail: true, @@ -24,12 +25,15 @@ async function getAllAutomations() { return automations.rows.map(row => row.doc) } -async function queueRelevantRowAutomations(event, eventType) { +async function queueRelevantRowAutomations( + event: { appId: string; row: Row }, + eventType: string +) { if (event.appId == null) { throw `No appId specified for ${eventType} - check event emitters.` } - context.doInAppContext(event.appId, async () => { + await context.doInAppContext(event.appId, async () => { let automations = await getAllAutomations() // filter down to the correct event type @@ -85,20 +89,20 @@ emitter.on("row:delete", async function (event) { await queueRelevantRowAutomations(event, "row:delete") }) -exports.externalTrigger = async function ( - automation, - params, - { getResponses } = {} +export async function externalTrigger( + automation: Automation, + params: { fields: Record }, + { getResponses }: { getResponses?: boolean } = {} ) { if ( automation.definition != null && automation.definition.trigger != null && automation.definition.trigger.stepId === definitions.APP.stepId && automation.definition.trigger.stepId === "APP" && - !(await checkTestFlag(automation._id)) + !(await checkTestFlag(automation._id!)) ) { // values are likely to be submitted as strings, so we shall convert to correct type - const coercedFields = {} + const coercedFields: any = {} const fields = automation.definition.trigger.inputs.fields for (let key of Object.keys(fields || {})) { coercedFields[key] = coerce(params.fields[key], fields[key]) @@ -113,7 +117,7 @@ exports.externalTrigger = async function ( } } -exports.rebootTrigger = async () => { +export async function rebootTrigger() { // reboot cron option is only available on the main thread at // startup and only usable in self host and single tenant environments if (env.isInThread() || !env.SELF_HOSTED || env.MULTI_TENANCY) { @@ -121,7 +125,10 @@ exports.rebootTrigger = async () => { } // iterate through all production apps, find the reboot crons // and trigger events for them - const appIds = await dbCore.getAllApps({ dev: false, idsOnly: true }) + const appIds = (await dbCore.getAllApps({ + dev: false, + idsOnly: true, + })) as string[] for (let prodAppId of appIds) { await context.doInAppContext(prodAppId, async () => { let automations = await getAllAutomations() @@ -142,5 +149,3 @@ exports.rebootTrigger = async () => { }) } } - -exports.TRIGGER_DEFINITIONS = TRIGGER_DEFINITIONS diff --git a/packages/server/src/constants/index.js b/packages/server/src/constants/index.js deleted file mode 100644 index 5f68cc26ed..0000000000 --- a/packages/server/src/constants/index.js +++ /dev/null @@ -1,206 +0,0 @@ -const { objectStore, roles, constants } = require("@budibase/backend-core") - -const FilterTypes = { - STRING: "string", - FUZZY: "fuzzy", - RANGE: "range", - EQUAL: "equal", - NOT_EQUAL: "notEqual", - EMPTY: "empty", - NOT_EMPTY: "notEmpty", - CONTAINS: "contains", - NOT_CONTAINS: "notContains", - ONE_OF: "oneOf", -} - -exports.FilterTypes = FilterTypes -exports.NoEmptyFilterStrings = [ - FilterTypes.STRING, - FilterTypes.FUZZY, - FilterTypes.EQUAL, - FilterTypes.NOT_EQUAL, - FilterTypes.CONTAINS, - FilterTypes.NOT_CONTAINS, -] - -exports.FieldTypes = { - STRING: "string", - BARCODEQR: "barcodeqr", - LONGFORM: "longform", - OPTIONS: "options", - NUMBER: "number", - BOOLEAN: "boolean", - ARRAY: "array", - DATETIME: "datetime", - ATTACHMENT: "attachment", - LINK: "link", - FORMULA: "formula", - AUTO: "auto", - JSON: "json", - INTERNAL: "internal", -} - -exports.CanSwitchTypes = [ - [exports.FieldTypes.JSON, exports.FieldTypes.ARRAY], - [ - exports.FieldTypes.STRING, - exports.FieldTypes.OPTIONS, - exports.FieldTypes.LONGFORM, - exports.FieldTypes.BARCODEQR, - ], - [exports.FieldTypes.BOOLEAN, exports.FieldTypes.NUMBER], -] - -exports.SwitchableTypes = exports.CanSwitchTypes.reduce((prev, current) => - prev ? prev.concat(current) : current -) - -exports.RelationshipTypes = { - ONE_TO_MANY: "one-to-many", - MANY_TO_ONE: "many-to-one", - MANY_TO_MANY: "many-to-many", -} - -exports.FormulaTypes = { - STATIC: "static", - DYNAMIC: "dynamic", -} - -exports.AuthTypes = { - APP: "app", - BUILDER: "builder", - EXTERNAL: "external", -} - -exports.DataSourceOperation = { - CREATE: "CREATE", - READ: "READ", - UPDATE: "UPDATE", - DELETE: "DELETE", - BULK_CREATE: "BULK_CREATE", - CREATE_TABLE: "CREATE_TABLE", - UPDATE_TABLE: "UPDATE_TABLE", - DELETE_TABLE: "DELETE_TABLE", -} - -exports.DatasourceAuthTypes = { - GOOGLE: "google", -} - -exports.SortDirection = { - ASCENDING: "ASCENDING", - DESCENDING: "DESCENDING", -} - -exports.USERS_TABLE_SCHEMA = { - _id: "ta_users", - type: "table", - views: {}, - name: "Users", - // TODO: ADMIN PANEL - when implemented this doesn't need to be carried out - schema: { - email: { - type: exports.FieldTypes.STRING, - constraints: { - type: exports.FieldTypes.STRING, - email: true, - length: { - maximum: "", - }, - presence: true, - }, - fieldName: "email", - name: "email", - }, - firstName: { - name: "firstName", - fieldName: "firstName", - type: exports.FieldTypes.STRING, - constraints: { - type: exports.FieldTypes.STRING, - presence: false, - }, - }, - lastName: { - name: "lastName", - fieldName: "lastName", - type: exports.FieldTypes.STRING, - constraints: { - type: exports.FieldTypes.STRING, - presence: false, - }, - }, - roleId: { - fieldName: "roleId", - name: "roleId", - type: exports.FieldTypes.OPTIONS, - constraints: { - type: exports.FieldTypes.STRING, - presence: false, - inclusion: Object.values(roles.BUILTIN_ROLE_IDS), - }, - }, - status: { - fieldName: "status", - name: "status", - type: exports.FieldTypes.OPTIONS, - constraints: { - type: exports.FieldTypes.STRING, - presence: false, - inclusion: Object.values(constants.UserStatus), - }, - }, - }, - primaryDisplay: "email", -} - -exports.AutoFieldSubTypes = { - CREATED_BY: "createdBy", - CREATED_AT: "createdAt", - UPDATED_BY: "updatedBy", - UPDATED_AT: "updatedAt", - AUTO_ID: "autoID", -} - -exports.AutoFieldDefaultNames = { - CREATED_BY: "Created By", - CREATED_AT: "Created At", - UPDATED_BY: "Updated By", - UPDATED_AT: "Updated At", - AUTO_ID: "Auto ID", -} - -exports.OBJ_STORE_DIRECTORY = "/prod-budi-app-assets" -exports.BaseQueryVerbs = { - CREATE: "create", - READ: "read", - UPDATE: "update", - DELETE: "delete", -} - -exports.MetadataTypes = { - AUTOMATION_TEST_INPUT: "automationTestInput", - AUTOMATION_TEST_HISTORY: "automationTestHistory", -} - -exports.InvalidColumns = { - ID: "_id", - REV: "_rev", - TABLE_ID: "tableId", -} - -exports.BuildSchemaErrors = { - NO_KEY: "no_key", - INVALID_COLUMN: "invalid_column", -} - -exports.AutomationErrors = { - INCORRECT_TYPE: "INCORRECT_TYPE", - MAX_ITERATIONS: "MAX_ITERATIONS_REACHED", - FAILURE_CONDITION: "FAILURE_CONDITION_MET", -} - -// pass through the list from the auth/core lib -exports.ObjectStoreBuckets = objectStore.ObjectStoreBuckets - -exports.MAX_AUTOMATION_RECURRING_ERRORS = 5 diff --git a/packages/server/src/constants/index.ts b/packages/server/src/constants/index.ts new file mode 100644 index 0000000000..eb4c6211c6 --- /dev/null +++ b/packages/server/src/constants/index.ts @@ -0,0 +1,204 @@ +import { objectStore, roles, constants } from "@budibase/backend-core" + +export enum FilterTypes { + STRING = "string", + FUZZY = "fuzzy", + RANGE = "range", + EQUAL = "equal", + NOT_EQUAL = "notEqual", + EMPTY = "empty", + NOT_EMPTY = "notEmpty", + CONTAINS = "contains", + NOT_CONTAINS = "notContains", + ONE_OF = "oneOf", +} + +export const NoEmptyFilterStrings = [ + FilterTypes.STRING, + FilterTypes.FUZZY, + FilterTypes.EQUAL, + FilterTypes.NOT_EQUAL, + FilterTypes.CONTAINS, + FilterTypes.NOT_CONTAINS, +] + +export enum FieldTypes { + STRING = "string", + BARCODEQR = "barcodeqr", + LONGFORM = "longform", + OPTIONS = "options", + NUMBER = "number", + BOOLEAN = "boolean", + ARRAY = "array", + DATETIME = "datetime", + ATTACHMENT = "attachment", + LINK = "link", + FORMULA = "formula", + AUTO = "auto", + JSON = "json", + INTERNAL = "internal", +} + +export const CanSwitchTypes = [ + [exports.FieldTypes.JSON, exports.FieldTypes.ARRAY], + [ + exports.FieldTypes.STRING, + exports.FieldTypes.OPTIONS, + exports.FieldTypes.LONGFORM, + exports.FieldTypes.BARCODEQR, + ], + [exports.FieldTypes.BOOLEAN, exports.FieldTypes.NUMBER], +] + +export const SwitchableTypes = CanSwitchTypes.reduce((prev, current) => + prev ? prev.concat(current) : current +) + +export enum RelationshipTypes { + ONE_TO_MANY = "one-to-many", + MANY_TO_ONE = "many-to-one", + MANY_TO_MANY = "many-to-many", +} + +export enum FormulaTypes { + STATIC = "static", + DYNAMIC = "dynamic", +} + +export enum AuthTypes { + APP = "app", + BUILDER = "builder", + EXTERNAL = "external", +} + +export enum DataSourceOperation { + CREATE = "CREATE", + READ = "READ", + UPDATE = "UPDATE", + DELETE = "DELETE", + BULK_CREATE = "BULK_CREATE", + CREATE_TABLE = "CREATE_TABLE", + UPDATE_TABLE = "UPDATE_TABLE", + DELETE_TABLE = "DELETE_TABLE", +} + +export enum DatasourceAuthTypes { + GOOGLE = "google", +} + +export enum SortDirection { + ASCENDING = "ASCENDING", + DESCENDING = "DESCENDING", +} + +export const USERS_TABLE_SCHEMA = { + _id: "ta_users", + type: "table", + views: {}, + name: "Users", + // TODO: ADMIN PANEL - when implemented this doesn't need to be carried out + schema: { + email: { + type: exports.FieldTypes.STRING, + constraints: { + type: exports.FieldTypes.STRING, + email: true, + length: { + maximum: "", + }, + presence: true, + }, + fieldName: "email", + name: "email", + }, + firstName: { + name: "firstName", + fieldName: "firstName", + type: exports.FieldTypes.STRING, + constraints: { + type: exports.FieldTypes.STRING, + presence: false, + }, + }, + lastName: { + name: "lastName", + fieldName: "lastName", + type: exports.FieldTypes.STRING, + constraints: { + type: exports.FieldTypes.STRING, + presence: false, + }, + }, + roleId: { + fieldName: "roleId", + name: "roleId", + type: exports.FieldTypes.OPTIONS, + constraints: { + type: exports.FieldTypes.STRING, + presence: false, + inclusion: Object.values(roles.BUILTIN_ROLE_IDS), + }, + }, + status: { + fieldName: "status", + name: "status", + type: exports.FieldTypes.OPTIONS, + constraints: { + type: exports.FieldTypes.STRING, + presence: false, + inclusion: Object.values(constants.UserStatus), + }, + }, + }, + primaryDisplay: "email", +} + +export enum AutoFieldSubTypes { + CREATED_BY = "createdBy", + CREATED_AT = "createdAt", + UPDATED_BY = "updatedBy", + UPDATED_AT = "updatedAt", + AUTO_ID = "autoID", +} + +export enum AutoFieldDefaultNames { + CREATED_BY = "Created By", + CREATED_AT = "Created At", + UPDATED_BY = "Updated By", + UPDATED_AT = "Updated At", + AUTO_ID = "Auto ID", +} + +export const OBJ_STORE_DIRECTORY = "/prod-budi-app-assets" +export enum BaseQueryVerbs { + CREATE = "create", + READ = "read", + UPDATE = "update", + DELETE = "delete", +} + +export enum MetadataTypes { + AUTOMATION_TEST_INPUT = "automationTestInput", + AUTOMATION_TEST_HISTORY = "automationTestHistory", +} + +export enum InvalidColumns { + ID = "_id", + REV = "_rev", + TABLE_ID = "tableId", +} + +export enum BuildSchemaErrors { + NO_KEY = "no_key", + INVALID_COLUMN = "invalid_column", +} + +export enum AutomationErrors { + INCORRECT_TYPE = "INCORRECT_TYPE", + MAX_ITERATIONS = "MAX_ITERATIONS_REACHED", + FAILURE_CONDITION = "FAILURE_CONDITION_MET", +} + +// pass through the list from the auth/core lib +export const ObjectStoreBuckets = objectStore.ObjectStoreBuckets +export const MAX_AUTOMATION_RECURRING_ERRORS = 5 diff --git a/packages/server/src/constants/layouts.js b/packages/server/src/constants/layouts.ts similarity index 95% rename from packages/server/src/constants/layouts.js rename to packages/server/src/constants/layouts.ts index 2402a1f7db..835a5d2e15 100644 --- a/packages/server/src/constants/layouts.js +++ b/packages/server/src/constants/layouts.ts @@ -1,9 +1,9 @@ -const BASE_LAYOUT_PROP_IDS = { +export const BASE_LAYOUT_PROP_IDS = { PRIVATE: "layout_private_master", PUBLIC: "layout_public_master", } -const EMPTY_LAYOUT = { +export const EMPTY_LAYOUT = { componentLibraries: ["@budibase/standard-components"], title: "{{ name }}", favicon: "./_shared/favicon.png", @@ -48,7 +48,7 @@ const EMPTY_LAYOUT = { }, } -const BASE_LAYOUTS = [ +export const BASE_LAYOUTS = [ { _id: BASE_LAYOUT_PROP_IDS.PRIVATE, componentLibraries: ["@budibase/standard-components"], @@ -145,9 +145,3 @@ const BASE_LAYOUTS = [ }, }, ] - -module.exports = { - BASE_LAYOUTS, - BASE_LAYOUT_PROP_IDS, - EMPTY_LAYOUT, -} diff --git a/packages/server/src/constants/screens.js b/packages/server/src/constants/screens.js deleted file mode 100644 index dc21c0d9bd..0000000000 --- a/packages/server/src/constants/screens.js +++ /dev/null @@ -1,46 +0,0 @@ -const { roles } = require("@budibase/backend-core") -const { BASE_LAYOUT_PROP_IDS } = require("./layouts") - -exports.createHomeScreen = () => ({ - description: "", - url: "", - layoutId: BASE_LAYOUT_PROP_IDS.PRIVATE, - props: { - _id: "d834fea2-1b3e-4320-ab34-f9009f5ecc59", - _component: "@budibase/standard-components/container", - _styles: { - normal: {}, - hover: {}, - active: {}, - selected: {}, - }, - _transition: "fade", - _children: [ - { - _id: "ef60083f-4a02-4df3-80f3-a0d3d16847e7", - _component: "@budibase/standard-components/heading", - _styles: { - hover: {}, - active: {}, - selected: {}, - }, - text: "Welcome to your Budibase App 👋", - size: "M", - align: "left", - _instanceName: "Heading", - _children: [], - }, - ], - _instanceName: "Home", - direction: "column", - hAlign: "stretch", - vAlign: "top", - size: "grow", - gap: "M", - }, - routing: { - route: "/", - roleId: roles.BUILTIN_ROLE_IDS.BASIC, - }, - name: "home-screen", -}) diff --git a/packages/server/src/constants/screens.ts b/packages/server/src/constants/screens.ts new file mode 100644 index 0000000000..23e36a65b8 --- /dev/null +++ b/packages/server/src/constants/screens.ts @@ -0,0 +1,48 @@ +import { roles } from "@budibase/backend-core" +import { BASE_LAYOUT_PROP_IDS } from "./layouts" + +export function createHomeScreen() { + return { + description: "", + url: "", + layoutId: BASE_LAYOUT_PROP_IDS.PRIVATE, + props: { + _id: "d834fea2-1b3e-4320-ab34-f9009f5ecc59", + _component: "@budibase/standard-components/container", + _styles: { + normal: {}, + hover: {}, + active: {}, + selected: {}, + }, + _transition: "fade", + _children: [ + { + _id: "ef60083f-4a02-4df3-80f3-a0d3d16847e7", + _component: "@budibase/standard-components/heading", + _styles: { + hover: {}, + active: {}, + selected: {}, + }, + text: "Welcome to your Budibase App 👋", + size: "M", + align: "left", + _instanceName: "Heading", + _children: [], + }, + ], + _instanceName: "Home", + direction: "column", + hAlign: "stretch", + vAlign: "top", + size: "grow", + gap: "M", + }, + routing: { + route: "/", + roleId: roles.BUILTIN_ROLE_IDS.BASIC, + }, + name: "home-screen", + } +} diff --git a/packages/server/src/db/defaultData/datasource_bb_default.js b/packages/server/src/db/defaultData/datasource_bb_default.ts similarity index 85% rename from packages/server/src/db/defaultData/datasource_bb_default.js rename to packages/server/src/db/defaultData/datasource_bb_default.ts index f12ee2c97d..a9fb214cb2 100644 --- a/packages/server/src/db/defaultData/datasource_bb_default.js +++ b/packages/server/src/db/defaultData/datasource_bb_default.ts @@ -1,31 +1,32 @@ -const { +import { FieldTypes, AutoFieldSubTypes, RelationshipTypes, -} = require("../../constants/index") -const { importToRows } = require("../../api/controllers/table/utils") -const { cloneDeep } = require("lodash/fp") -const LinkDocument = require("../linkedRows/LinkDocument") -const { inventoryImport } = require("./inventoryImport") -const { employeeImport } = require("./employeeImport") -const { jobsImport } = require("./jobsImport") -const { expensesImport } = require("./expensesImport") -const { db: dbCore } = require("@budibase/backend-core") +} from "../../constants" +import { importToRows } from "../../api/controllers/table/utils" +import { cloneDeep } from "lodash/fp" +import LinkDocument from "../linkedRows/LinkDocument" +import { inventoryImport } from "./inventoryImport" +import { employeeImport } from "./employeeImport" +import { jobsImport } from "./jobsImport" +import { expensesImport } from "./expensesImport" +import { db as dbCore } from "@budibase/backend-core" +import { Table, Row } from "@budibase/types" -exports.DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs" -exports.DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory" -exports.DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses" -exports.DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee" -exports.DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default" -exports.DEFAULT_BB_DATASOURCE = { - _id: this.DEFAULT_BB_DATASOURCE_ID, +export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs" +export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory" +export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses" +export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee" +export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default" +export const DEFAULT_BB_DATASOURCE = { + _id: DEFAULT_BB_DATASOURCE_ID, type: dbCore.BUDIBASE_DATASOURCE_TYPE, name: "Sample Data", source: "BUDIBASE", config: {}, } -const syncLastIds = (table, rowCount) => { +function syncLastIds(table: Table, rowCount: number) { Object.keys(table.schema).forEach(key => { const entry = table.schema[key] if (entry.autocolumn && entry.subtype == "autoID") { @@ -34,7 +35,7 @@ const syncLastIds = (table, rowCount) => { }) } -const tableImport = (table, data) => { +function tableImport(table: Table, data: Row) { const cloneTable = cloneDeep(table) const rowDocs = importToRows(data, cloneTable) syncLastIds(cloneTable, rowDocs.length) @@ -77,11 +78,11 @@ const AUTO_COLUMNS = { }, } -exports.DEFAULT_INVENTORY_TABLE_SCHEMA = { - _id: this.DEFAULT_INVENTORY_TABLE_ID, +export const DEFAULT_INVENTORY_TABLE_SCHEMA: Table = { + _id: DEFAULT_INVENTORY_TABLE_ID, type: "internal", views: {}, - sourceId: exports.DEFAULT_BB_DATASOURCE_ID, + sourceId: DEFAULT_BB_DATASOURCE_ID, primaryDisplay: "Item Name", name: "Inventory", schema: { @@ -186,12 +187,12 @@ exports.DEFAULT_INVENTORY_TABLE_SCHEMA = { }, } -exports.DEFAULT_EMPLOYEE_TABLE_SCHEMA = { - _id: this.DEFAULT_EMPLOYEE_TABLE_ID, +export const DEFAULT_EMPLOYEE_TABLE_SCHEMA = { + _id: DEFAULT_EMPLOYEE_TABLE_ID, type: "internal", views: {}, name: "Employees", - sourceId: exports.DEFAULT_BB_DATASOURCE_ID, + sourceId: DEFAULT_BB_DATASOURCE_ID, primaryDisplay: "First Name", schema: { "First Name": { @@ -300,7 +301,7 @@ exports.DEFAULT_EMPLOYEE_TABLE_SCHEMA = { fieldName: "Assigned", name: "Jobs", relationshipType: RelationshipTypes.MANY_TO_MANY, - tableId: this.DEFAULT_JOBS_TABLE_ID, + tableId: DEFAULT_JOBS_TABLE_ID, }, "Start Date": { type: FieldTypes.DATETIME, @@ -334,11 +335,11 @@ exports.DEFAULT_EMPLOYEE_TABLE_SCHEMA = { }, } -exports.DEFAULT_JOBS_TABLE_SCHEMA = { - _id: this.DEFAULT_JOBS_TABLE_ID, +export const DEFAULT_JOBS_TABLE_SCHEMA: Table = { + _id: DEFAULT_JOBS_TABLE_ID, type: "internal", name: "Jobs", - sourceId: exports.DEFAULT_BB_DATASOURCE_ID, + sourceId: DEFAULT_BB_DATASOURCE_ID, primaryDisplay: "Job ID", schema: { "Job ID": { @@ -456,7 +457,7 @@ exports.DEFAULT_JOBS_TABLE_SCHEMA = { Assigned: { name: "Assigned", type: FieldTypes.LINK, - tableId: this.DEFAULT_EMPLOYEE_TABLE_ID, + tableId: DEFAULT_EMPLOYEE_TABLE_ID, fieldName: "Jobs", relationshipType: RelationshipTypes.MANY_TO_MANY, // sortable: true, @@ -491,12 +492,12 @@ exports.DEFAULT_JOBS_TABLE_SCHEMA = { }, } -exports.DEFAULT_EXPENSES_TABLE_SCHEMA = { - _id: this.DEFAULT_EXPENSES_TABLE_ID, +export const DEFAULT_EXPENSES_TABLE_SCHEMA: Table = { + _id: DEFAULT_EXPENSES_TABLE_ID, type: "internal", views: {}, name: "Expenses", - sourceId: exports.DEFAULT_BB_DATASOURCE_ID, + sourceId: DEFAULT_BB_DATASOURCE_ID, primaryDisplay: "Expense ID", schema: { "Expense ID": { @@ -601,38 +602,40 @@ exports.DEFAULT_EXPENSES_TABLE_SCHEMA = { }, } -exports.buildDefaultDocs = () => { +export function buildDefaultDocs() { const inventoryData = tableImport( - this.DEFAULT_INVENTORY_TABLE_SCHEMA, + DEFAULT_INVENTORY_TABLE_SCHEMA, inventoryImport ) const employeeData = tableImport( - this.DEFAULT_EMPLOYEE_TABLE_SCHEMA, + DEFAULT_EMPLOYEE_TABLE_SCHEMA, employeeImport ) - const jobData = tableImport(this.DEFAULT_JOBS_TABLE_SCHEMA, jobsImport) + const jobData = tableImport(DEFAULT_JOBS_TABLE_SCHEMA, jobsImport) const expensesData = tableImport( - this.DEFAULT_EXPENSES_TABLE_SCHEMA, + DEFAULT_EXPENSES_TABLE_SCHEMA, expensesImport ) // Build one link doc for each employee/job - const jobEmployeeLinks = employeeData.rows.map((employee, index) => { - return new LinkDocument( - employeeData.table._id, - "Jobs", - employeeData.rows[index]._id, - jobData.table._id, - "Assigned", - jobData.rows[index]._id - ) - }) + const jobEmployeeLinks = employeeData.rows.map( + (employee: any, index: any) => { + return new LinkDocument( + employeeData.table._id!, + "Jobs", + employeeData.rows[index]._id, + jobData.table._id!, + "Assigned", + jobData.rows[index]._id + ) + } + ) return [ - this.DEFAULT_BB_DATASOURCE, + DEFAULT_BB_DATASOURCE, inventoryData.table, employeeData.table, jobData.table, diff --git a/packages/server/src/db/defaultData/employeeImport.js b/packages/server/src/db/defaultData/employeeImport.ts similarity index 99% rename from packages/server/src/db/defaultData/employeeImport.js rename to packages/server/src/db/defaultData/employeeImport.ts index eae43e182a..d2bab601a0 100644 --- a/packages/server/src/db/defaultData/employeeImport.js +++ b/packages/server/src/db/defaultData/employeeImport.ts @@ -1,4 +1,4 @@ -exports.employeeImport = [ +export const employeeImport = [ { "First Name": "Julie", "Last Name": "Jimenez", diff --git a/packages/server/src/db/defaultData/expensesImport.js b/packages/server/src/db/defaultData/expensesImport.ts similarity index 99% rename from packages/server/src/db/defaultData/expensesImport.js rename to packages/server/src/db/defaultData/expensesImport.ts index 2afa147321..002be3e1f9 100644 --- a/packages/server/src/db/defaultData/expensesImport.js +++ b/packages/server/src/db/defaultData/expensesImport.ts @@ -1,4 +1,4 @@ -exports.expensesImport = [ +export const expensesImport = [ { "Date Paid": "2022-11-12T12:00:00.000", "Payment Due": "2022-11-01T12:00:00.000", diff --git a/packages/server/src/db/defaultData/inventoryImport.js b/packages/server/src/db/defaultData/inventoryImport.ts similarity index 98% rename from packages/server/src/db/defaultData/inventoryImport.js rename to packages/server/src/db/defaultData/inventoryImport.ts index 3d4cf98ee4..dae5a09feb 100644 --- a/packages/server/src/db/defaultData/inventoryImport.js +++ b/packages/server/src/db/defaultData/inventoryImport.ts @@ -1,4 +1,4 @@ -exports.inventoryImport = [ +export const inventoryImport = [ { Status: ["Available"], "Item Name": "Little Blue Van", diff --git a/packages/server/src/db/defaultData/jobsImport.js b/packages/server/src/db/defaultData/jobsImport.ts similarity index 99% rename from packages/server/src/db/defaultData/jobsImport.js rename to packages/server/src/db/defaultData/jobsImport.ts index 537cc2a006..ac530afb17 100644 --- a/packages/server/src/db/defaultData/jobsImport.js +++ b/packages/server/src/db/defaultData/jobsImport.ts @@ -1,4 +1,4 @@ -exports.jobsImport = [ +export const jobsImport = [ { "Works End": "2023-01-28T12:00:00.000", "Customer Email": "susie.peterson@example.com", diff --git a/packages/server/src/db/dynamoClient.js b/packages/server/src/db/dynamoClient.ts similarity index 73% rename from packages/server/src/db/dynamoClient.js rename to packages/server/src/db/dynamoClient.ts index 12e53ff1fd..cb045b7d6f 100644 --- a/packages/server/src/db/dynamoClient.js +++ b/packages/server/src/db/dynamoClient.ts @@ -1,8 +1,7 @@ -let { merge } = require("lodash") -let env = require("../environment") +import { merge } from "lodash" +import env from "../environment" -const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1" -exports.AWS_REGION = AWS_REGION +export const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1" const TableInfo = { API_KEYS: { @@ -16,10 +15,36 @@ const TableInfo = { }, } -let docClient = null +let docClient: any = null + +type GetOpts = { + primary: string + sort?: string + otherProps?: any +} + +type UpdateOpts = { + primary: string + sort?: string + expression?: string + condition?: string + names?: string[] + values?: any[] + exists?: boolean + otherProps?: any +} + +type PutOpts = { + item: any + otherProps?: any +} class Table { - constructor(tableInfo) { + _name: string + _primary: string + _sort?: string + + constructor(tableInfo: { name: string; primary: string; sort?: string }) { if (!tableInfo.name || !tableInfo.primary) { throw "Table info must specify a name and a primary key" } @@ -28,7 +53,7 @@ class Table { this._sort = tableInfo.sort } - async get({ primary, sort, otherProps }) { + async get({ primary, sort, otherProps }: GetOpts) { let params = { TableName: this._name, Key: { @@ -54,8 +79,8 @@ class Table { values, exists, otherProps, - }) { - let params = { + }: UpdateOpts) { + let params: any = { TableName: this._name, Key: { [this._primary]: primary, @@ -83,7 +108,7 @@ class Table { return docClient.update(params).promise() } - async put({ item, otherProps }) { + async put({ item, otherProps }: PutOpts) { if ( item[this._primary] == null || (this._sort && item[this._sort] == null) @@ -101,9 +126,9 @@ class Table { } } -exports.init = endpoint => { +export function init(endpoint: string) { let AWS = require("aws-sdk") - let docClientParams = { + let docClientParams: any = { correctClockSkew: true, region: AWS_REGION, } @@ -115,13 +140,8 @@ exports.init = endpoint => { docClient = new AWS.DynamoDB.DocumentClient(docClientParams) } -exports.apiKeyTable = new Table(TableInfo.API_KEYS) -exports.userTable = new Table(TableInfo.USERS) - -if (env.isProd()) { - exports.init(`https://dynamodb.${AWS_REGION}.amazonaws.com`) -} else { +if (!env.isProd()) { env._set("AWS_ACCESS_KEY_ID", "KEY_ID") env._set("AWS_SECRET_ACCESS_KEY", "SECRET_KEY") - exports.init("http://localhost:8333") + init("http://localhost:8333") } diff --git a/packages/server/src/db/inMemoryView.js b/packages/server/src/db/inMemoryView.ts similarity index 68% rename from packages/server/src/db/inMemoryView.js rename to packages/server/src/db/inMemoryView.ts index 278b906e24..3c96aa26bd 100644 --- a/packages/server/src/db/inMemoryView.js +++ b/packages/server/src/db/inMemoryView.ts @@ -1,11 +1,17 @@ -const newid = require("./newid") +import newid from "./newid" +import { Row, View, Document } from "@budibase/types" // bypass the main application db config // use in memory pouchdb directly -const { db: dbCore } = require("@budibase/backend-core") +import { db as dbCore } from "@budibase/backend-core" const Pouch = dbCore.getPouch({ inMemory: true }) -exports.runView = async (view, calculation, group, data) => { +export async function runView( + view: View, + calculation: string, + group: boolean, + data: Row[] +) { // use a different ID each time for the DB, make sure they // are always unique for each query, don't want overlap // which could cause 409s @@ -18,16 +24,16 @@ exports.runView = async (view, calculation, group, data) => { _rev: undefined, })) ) - let fn = (doc, emit) => emit(doc._id) - eval("fn = " + view.map.replace("function (doc)", "function (doc, emit)")) - const queryFns = { + let fn = (doc: Document, emit: any) => emit(doc._id) + eval("fn = " + view?.map?.replace("function (doc)", "function (doc, emit)")) + const queryFns: any = { meta: view.meta, map: fn, } if (view.reduce) { queryFns.reduce = view.reduce } - const response = await db.query(queryFns, { + const response: { rows: Row[] } = await db.query(queryFns, { include_docs: !calculation, group: !!group, }) diff --git a/packages/server/src/db/index.js b/packages/server/src/db/index.js deleted file mode 100644 index 381c295d18..0000000000 --- a/packages/server/src/db/index.js +++ /dev/null @@ -1,16 +0,0 @@ -const core = require("@budibase/backend-core") -const env = require("../environment") - -exports.init = () => { - const dbConfig = { - replication: true, - find: true, - } - - if (env.isTest() && !env.COUCH_DB_URL) { - dbConfig.inMemory = true - dbConfig.allDbs = true - } - - core.init({ db: dbConfig }) -} diff --git a/packages/server/src/db/index.ts b/packages/server/src/db/index.ts new file mode 100644 index 0000000000..60ea2f9987 --- /dev/null +++ b/packages/server/src/db/index.ts @@ -0,0 +1,16 @@ +import { init as coreInit } from "@budibase/backend-core" +import env = require("../environment") + +export function init() { + const dbConfig: any = { + replication: true, + find: true, + } + + if (env.isTest() && !env.COUCH_DB_URL) { + dbConfig.inMemory = true + dbConfig.allDbs = true + } + + coreInit({ db: dbConfig }) +} diff --git a/packages/server/src/db/linkedRows/LinkController.js b/packages/server/src/db/linkedRows/LinkController.ts similarity index 84% rename from packages/server/src/db/linkedRows/LinkController.js rename to packages/server/src/db/linkedRows/LinkController.ts index df24b97e85..690abc1feb 100644 --- a/packages/server/src/db/linkedRows/LinkController.js +++ b/packages/server/src/db/linkedRows/LinkController.ts @@ -1,12 +1,32 @@ -const { IncludeDocs, getLinkDocuments } = require("./linkUtils") -const { InternalTables, getUserMetadataParams } = require("../utils") -const Sentry = require("@sentry/node") -const { FieldTypes, RelationshipTypes } = require("../../constants") -const { context } = require("@budibase/backend-core") -const LinkDocument = require("./LinkDocument") +import { IncludeDocs, getLinkDocuments } from "./linkUtils" +import { InternalTables, getUserMetadataParams } from "../utils" +import Sentry from "@sentry/node" +import { FieldTypes, RelationshipTypes } from "../../constants" +import { context } from "@budibase/backend-core" +import LinkDocument from "./LinkDocument" +import { + Database, + FieldSchema, + LinkDocumentValue, + Row, + Table, +} from "@budibase/types" + +type LinkControllerOpts = { + tableId?: string + row?: Row + table?: Table + oldTable?: Table +} class LinkController { - constructor({ tableId, row, table, oldTable }) { + _db: Database + _tableId?: string + _row?: Row + _table?: Table + _oldTable?: Table + + constructor({ tableId, row, table, oldTable }: LinkControllerOpts) { this._db = context.getAppDB() this._tableId = tableId this._row = row @@ -24,7 +44,7 @@ class LinkController { this._table = this._table == null ? await this._db.get(this._tableId) : this._table } - return this._table + return this._table! } /** @@ -34,7 +54,7 @@ class LinkController { * @returns {Promise} True if there are any linked fields, otherwise it will return * false. */ - async doesTableHaveLinkedFields(table = null) { + async doesTableHaveLinkedFields(table?: Table) { if (table == null) { table = await this.table() } @@ -50,7 +70,7 @@ class LinkController { /** * Utility function for main getLinkDocuments function - refer to it for functionality. */ - getRowLinkDocs(rowId) { + getRowLinkDocs(rowId: string) { return getLinkDocuments({ tableId: this._tableId, rowId, @@ -61,23 +81,23 @@ class LinkController { /** * Utility function for main getLinkDocuments function - refer to it for functionality. */ - getTableLinkDocs() { - return getLinkDocuments({ + async getTableLinkDocs() { + return (await getLinkDocuments({ tableId: this._tableId, includeDocs: IncludeDocs.INCLUDE, - }) + })) as LinkDocument[] } /** * Makes sure the passed in table schema contains valid relationship structures. */ - validateTable(table) { + validateTable(table: Table) { const usedAlready = [] for (let schema of Object.values(table.schema)) { if (schema.type !== FieldTypes.LINK) { continue } - const unique = schema.tableId + schema.fieldName + const unique = schema.tableId! + schema?.fieldName if (usedAlready.indexOf(unique) !== -1) { throw new Error( "Cannot re-use the linked column name for a linked table." @@ -90,7 +110,7 @@ class LinkController { /** * Returns whether the two link schemas are equal (in the important parts, not a pure equality check) */ - areLinkSchemasEqual(linkSchema1, linkSchema2) { + areLinkSchemasEqual(linkSchema1: FieldSchema, linkSchema2: FieldSchema) { const compareFields = [ "name", "type", @@ -100,6 +120,7 @@ class LinkController { "relationshipType", ] for (let field of compareFields) { + // @ts-ignore if (linkSchema1[field] !== linkSchema2[field]) { return false } @@ -111,7 +132,7 @@ class LinkController { * Given the link field of this table, and the link field of the linked table, this makes sure * the state of relationship type is accurate on both. */ - handleRelationshipType(linkerField, linkedField) { + handleRelationshipType(linkerField: FieldSchema, linkedField: FieldSchema) { if ( !linkerField.relationshipType || linkerField.relationshipType === RelationshipTypes.MANY_TO_MANY @@ -138,10 +159,10 @@ class LinkController { */ async rowSaved() { const table = await this.table() - const row = this._row + const row = this._row! const operations = [] // get link docs to compare against - const linkDocs = await this.getRowLinkDocs(row._id) + const linkDocs = (await this.getRowLinkDocs(row._id!)) as LinkDocument[] for (let fieldName of Object.keys(table.schema)) { // get the links this row wants to make const rowField = row[fieldName] @@ -161,30 +182,32 @@ class LinkController { // if 1:N, ensure that this ID is not already attached to another record const linkedTable = await this._db.get(field.tableId) - const linkedSchema = linkedTable.schema[field.fieldName] + const linkedSchema = linkedTable.schema[field.fieldName!] // We need to map the global users to metadata in each app for relationships if (field.tableId === InternalTables.USER_METADATA) { const users = await this._db.allDocs(getUserMetadataParams(null, {})) const metadataRequired = rowField.filter( - userId => !users.rows.some(user => user.id === userId) + (userId: string) => !users.rows.some(user => user.id === userId) ) // ensure non-existing user metadata is created in the app DB await this._db.bulkDocs( - metadataRequired.map(userId => ({ _id: userId })) + metadataRequired.map((userId: string) => ({ _id: userId })) ) } // iterate through the link IDs in the row field, see if any don't exist already for (let linkId of rowField) { - if (linkedSchema.relationshipType === RelationshipTypes.ONE_TO_MANY) { + if ( + linkedSchema?.relationshipType === RelationshipTypes.ONE_TO_MANY + ) { let links = ( - await getLinkDocuments({ + (await getLinkDocuments({ tableId: field.tableId, rowId: linkId, includeDocs: IncludeDocs.EXCLUDE, - }) + })) as LinkDocumentValue[] ).filter( link => link.id !== row._id && link.fieldName === linkedSchema.name @@ -209,11 +232,11 @@ class LinkController { } operations.push( new LinkDocument( - table._id, + table._id!, fieldName, - row._id, - field.tableId, - field.fieldName, + row._id!, + field.tableId!, + field.fieldName!, linkId ) ) @@ -246,9 +269,9 @@ class LinkController { * be accurate. This also returns the row that was deleted. */ async rowDeleted() { - const row = this._row + const row = this._row! // need to get the full link docs to be be able to delete it - const linkDocs = await this.getRowLinkDocs(row._id) + const linkDocs = await this.getRowLinkDocs(row._id!) if (linkDocs.length === 0) { return null } @@ -267,13 +290,13 @@ class LinkController { * @param {string} fieldName The field to be removed from the table. * @returns {Promise} The table has now been updated. */ - async removeFieldFromTable(fieldName) { + async removeFieldFromTable(fieldName: string) { let oldTable = this._oldTable - let field = oldTable.schema[fieldName] + let field = oldTable?.schema[fieldName] as FieldSchema const linkDocs = await this.getTableLinkDocs() let toDelete = linkDocs.filter(linkDoc => { let correctFieldName = - linkDoc.doc1.tableId === oldTable._id + linkDoc.doc1.tableId === oldTable?._id ? linkDoc.doc1.fieldName : linkDoc.doc2.fieldName return correctFieldName === fieldName @@ -288,7 +311,9 @@ class LinkController { ) // remove schema from other table let linkedTable = await this._db.get(field.tableId) - delete linkedTable.schema[field.fieldName] + if (field.fieldName) { + delete linkedTable.schema[field.fieldName] + } await this._db.put(linkedTable) } @@ -305,7 +330,7 @@ class LinkController { const schema = table.schema for (let fieldName of Object.keys(schema)) { const field = schema[fieldName] - if (field.type === FieldTypes.LINK) { + if (field.type === FieldTypes.LINK && field.fieldName) { // handle this in a separate try catch, want // the put to bubble up as an error, if can't update // table for some reason @@ -362,8 +387,8 @@ class LinkController { const oldTable = this._oldTable // first start by checking if any link columns have been deleted const newTable = await this.table() - for (let fieldName of Object.keys(oldTable.schema)) { - const field = oldTable.schema[fieldName] + for (let fieldName of Object.keys(oldTable?.schema || {})) { + const field = oldTable?.schema[fieldName] as FieldSchema // this field has been removed from the table schema if ( field.type === FieldTypes.LINK && @@ -389,7 +414,7 @@ class LinkController { for (let fieldName of Object.keys(schema)) { const field = schema[fieldName] try { - if (field.type === FieldTypes.LINK) { + if (field.type === FieldTypes.LINK && field.fieldName) { const linkedTable = await this._db.get(field.tableId) delete linkedTable.schema[field.fieldName] await this._db.put(linkedTable) @@ -416,4 +441,4 @@ class LinkController { } } -module.exports = LinkController +export = LinkController diff --git a/packages/server/src/db/linkedRows/LinkDocument.js b/packages/server/src/db/linkedRows/LinkDocument.js deleted file mode 100644 index 58aa630adc..0000000000 --- a/packages/server/src/db/linkedRows/LinkDocument.js +++ /dev/null @@ -1,47 +0,0 @@ -const { generateLinkID } = require("../utils") -const { FieldTypes } = require("../../constants") - -/** - * Creates a new link document structure which can be put to the database. It is important to - * note that while this talks about linker/linked the link is bi-directional and for all intent - * and purposes it does not matter from which direction the link was initiated. - * @param {string} tableId1 The ID of the first table (the linker). - * @param {string} tableId2 The ID of the second table (the linked). - * @param {string} fieldName1 The name of the field in the linker table. - * @param {string} fieldName2 The name of the field in the linked table. - * @param {string} rowId1 The ID of the row which is acting as the linker. - * @param {string} rowId2 The ID of the row which is acting as the linked. - * @constructor - */ -function LinkDocument( - tableId1, - fieldName1, - rowId1, - tableId2, - fieldName2, - rowId2 -) { - // build the ID out of unique references to this link document - this._id = generateLinkID( - tableId1, - tableId2, - rowId1, - rowId2, - fieldName1, - fieldName2 - ) - // required for referencing in view - this.type = FieldTypes.LINK - this.doc1 = { - tableId: tableId1, - fieldName: fieldName1, - rowId: rowId1, - } - this.doc2 = { - tableId: tableId2, - fieldName: fieldName2, - rowId: rowId2, - } -} - -module.exports = LinkDocument diff --git a/packages/server/src/db/linkedRows/LinkDocument.ts b/packages/server/src/db/linkedRows/LinkDocument.ts new file mode 100644 index 0000000000..d90f08e78c --- /dev/null +++ b/packages/server/src/db/linkedRows/LinkDocument.ts @@ -0,0 +1,60 @@ +import { generateLinkID } from "../utils" +import { FieldTypes } from "../../constants" +import { LinkDocument } from "@budibase/types" + +/** + * Creates a new link document structure which can be put to the database. It is important to + * note that while this talks about linker/linked the link is bi-directional and for all intent + * and purposes it does not matter from which direction the link was initiated. + * @param {string} tableId1 The ID of the first table (the linker). + * @param {string} tableId2 The ID of the second table (the linked). + * @param {string} fieldName1 The name of the field in the linker table. + * @param {string} fieldName2 The name of the field in the linked table. + * @param {string} rowId1 The ID of the row which is acting as the linker. + * @param {string} rowId2 The ID of the row which is acting as the linked. + * @constructor + */ +class LinkDocumentImpl implements LinkDocument { + _id: string + type: string + doc1: { + rowId: string + fieldName: string + tableId: string + } + doc2: { + rowId: string + fieldName: string + tableId: string + } + constructor( + tableId1: string, + fieldName1: string, + rowId1: string, + tableId2: string, + fieldName2: string, + rowId2: string + ) { + this._id = generateLinkID( + tableId1, + tableId2, + rowId1, + rowId2, + fieldName1, + fieldName2 + ) + this.type = FieldTypes.LINK + this.doc1 = { + tableId: tableId1, + fieldName: fieldName1, + rowId: rowId1, + } + this.doc2 = { + tableId: tableId2, + fieldName: fieldName2, + rowId: rowId2, + } + } +} + +export = LinkDocumentImpl diff --git a/packages/server/src/db/linkedRows/index.js b/packages/server/src/db/linkedRows/index.ts similarity index 80% rename from packages/server/src/db/linkedRows/index.js rename to packages/server/src/db/linkedRows/index.ts index 1ee98f6148..a6ed7de161 100644 --- a/packages/server/src/db/linkedRows/index.js +++ b/packages/server/src/db/linkedRows/index.ts @@ -1,5 +1,5 @@ -const LinkController = require("./LinkController") -const { +import LinkController from "./LinkController" +import { IncludeDocs, getLinkDocuments, createLinkView, @@ -7,21 +7,24 @@ const { getRelatedTableForField, getLinkedTableIDs, getLinkedTable, -} = require("./linkUtils") -const { flatten } = require("lodash") -const { FieldTypes } = require("../../constants") -const { getMultiIDParams, USER_METDATA_PREFIX } = require("../../db/utils") -const { partition } = require("lodash") -const { getGlobalUsersFromMetadata } = require("../../utilities/global") -const { processFormulas } = require("../../utilities/rowProcessor/utils") -const { context } = require("@budibase/backend-core") +} from "./linkUtils" +import { flatten } from "lodash" +import { FieldTypes } from "../../constants" +import { getMultiIDParams, USER_METDATA_PREFIX } from "../utils" +import { partition } from "lodash" +import { getGlobalUsersFromMetadata } from "../../utilities/global" +import { processFormulas } from "../../utilities/rowProcessor" +import { context } from "@budibase/backend-core" +import { Table, Row, LinkDocumentValue } from "@budibase/types" + +export { IncludeDocs, getLinkDocuments, createLinkView } from "./linkUtils" /** * This functionality makes sure that when rows with links are created, updated or deleted they are processed * correctly - making sure that no stale links are left around and that all links have been made successfully. */ -const EventType = { +export const EventType = { ROW_SAVE: "row:save", ROW_UPDATE: "row:update", ROW_DELETE: "row:delete", @@ -30,13 +33,7 @@ const EventType = { TABLE_DELETE: "table:delete", } -exports.EventType = EventType -// re-export search here for ease of use -exports.IncludeDocs = IncludeDocs -exports.getLinkDocuments = getLinkDocuments -exports.createLinkView = createLinkView - -function clearRelationshipFields(table, rows) { +function clearRelationshipFields(table: Table, rows: Row[]) { for (let [key, field] of Object.entries(table.schema)) { if (field.type === FieldTypes.LINK) { rows = rows.map(row => { @@ -48,18 +45,17 @@ function clearRelationshipFields(table, rows) { return rows } -async function getLinksForRows(rows) { +async function getLinksForRows(rows: Row[]) { const tableIds = [...new Set(rows.map(el => el.tableId))] // start by getting all the link values for performance reasons + const promises = tableIds.map(tableId => + getLinkDocuments({ + tableId: tableId, + includeDocs: IncludeDocs.EXCLUDE, + }) + ) const responses = flatten( - await Promise.all( - tableIds.map(tableId => - getLinkDocuments({ - tableId: tableId, - includeDocs: IncludeDocs.EXCLUDE, - }) - ) - ) + (await Promise.all(promises)) as LinkDocumentValue[][] ) // have to get unique as the previous table query can // return duplicates, could be querying for both tables in a relation @@ -72,7 +68,7 @@ async function getLinksForRows(rows) { ) } -async function getFullLinkedDocs(links) { +async function getFullLinkedDocs(links: LinkDocumentValue[]) { // create DBs const db = context.getAppDB() const linkedRowIds = links.map(link => link.id) @@ -103,12 +99,18 @@ async function getFullLinkedDocs(links) { * @returns {Promise} When the update is complete this will respond successfully. Returns the row for * row operations and the table for table operations. */ -exports.updateLinks = async function (args) { +export async function updateLinks(args: { + tableId?: string + eventType: string + row?: Row + table?: Table + oldTable?: Table +}) { const { eventType, row, tableId, table, oldTable } = args const baseReturnObj = row == null ? table : row // make sure table ID is set if (tableId == null && table != null) { - args.tableId = table._id + args.tableId = table._id! } let linkController = new LinkController(args) try { @@ -146,7 +148,7 @@ exports.updateLinks = async function (args) { * @param {array} rows The rows which are to be enriched. * @return {Promise<*>} returns the rows with all of the enriched relationships on it. */ -exports.attachFullLinkedDocs = async (table, rows) => { +export async function attachFullLinkedDocs(table: Table, rows: Row[]) { const linkedTableIds = getLinkedTableIDs(table) if (linkedTableIds.length === 0) { return rows @@ -159,7 +161,7 @@ exports.attachFullLinkedDocs = async (table, rows) => { rows = clearRelationshipFields(table, rows) // now get the docs and combine into the rows let linked = await getFullLinkedDocs(links) - const linkedTables = [] + const linkedTables: Table[] = [] for (let row of rows) { for (let link of links.filter(link => link.thisId === row._id)) { if (row[link.fieldName] == null) { @@ -185,13 +187,16 @@ exports.attachFullLinkedDocs = async (table, rows) => { * @param {array} enriched The pre-enriched rows (full docs) which are to be squashed. * @returns {Promise} The rows after having their links squashed to only contain the ID and primary display. */ -exports.squashLinksToPrimaryDisplay = async (table, enriched) => { +export async function squashLinksToPrimaryDisplay( + table: Table, + enriched: Row[] +) { // will populate this as we find them const linkedTables = [table] for (let row of enriched) { // this only fetches the table if its not already in array - const rowTable = await getLinkedTable(row.tableId, linkedTables) - for (let [column, schema] of Object.entries(rowTable.schema)) { + const rowTable = await getLinkedTable(row.tableId!, linkedTables) + for (let [column, schema] of Object.entries(rowTable?.schema || {})) { if (schema.type !== FieldTypes.LINK || !Array.isArray(row[column])) { continue } @@ -199,8 +204,8 @@ exports.squashLinksToPrimaryDisplay = async (table, enriched) => { for (let link of row[column]) { const linkTblId = link.tableId || getRelatedTableForField(table, column) const linkedTable = await getLinkedTable(linkTblId, linkedTables) - const obj = { _id: link._id } - if (link[linkedTable.primaryDisplay]) { + const obj: any = { _id: link._id } + if (linkedTable?.primaryDisplay && link[linkedTable.primaryDisplay]) { obj.primaryDisplay = link[linkedTable.primaryDisplay] } newLinks.push(obj) diff --git a/packages/server/src/db/linkedRows/linkUtils.js b/packages/server/src/db/linkedRows/linkUtils.ts similarity index 70% rename from packages/server/src/db/linkedRows/linkUtils.js rename to packages/server/src/db/linkedRows/linkUtils.ts index 25a1b5fcf8..c7db7d522a 100644 --- a/packages/server/src/db/linkedRows/linkUtils.js +++ b/packages/server/src/db/linkedRows/linkUtils.ts @@ -1,20 +1,24 @@ -const Sentry = require("@sentry/node") -const { ViewName, getQueryIndex } = require("../utils") -const { FieldTypes } = require("../../constants") -const { createLinkView } = require("../views/staticViews") -const { context } = require("@budibase/backend-core") +import { ViewName, getQueryIndex } from "../utils" +import { FieldTypes } from "../../constants" +import { createLinkView } from "../views/staticViews" +import { context, logging } from "@budibase/backend-core" +import { + FieldSchema, + LinkDocument, + LinkDocumentValue, + Table, +} from "@budibase/types" +export { createLinkView } from "../views/staticViews" /** * Only needed so that boolean parameters are being used for includeDocs * @type {{EXCLUDE: boolean, INCLUDE: boolean}} */ -exports.IncludeDocs = { +export const IncludeDocs = { INCLUDE: true, EXCLUDE: false, } -exports.createLinkView = createLinkView - /** * Gets the linking documents, not the linked documents themselves. * @param {string} args.tableId The table which we are searching for linked rows against. @@ -28,10 +32,14 @@ exports.createLinkView = createLinkView * @returns {Promise} This will return an array of the linking documents that were found * (if any). */ -exports.getLinkDocuments = async function (args) { +export async function getLinkDocuments(args: { + tableId?: string + rowId?: string + includeDocs?: any +}): Promise { const { tableId, rowId, includeDocs } = args const db = context.getAppDB() - let params + let params: any if (rowId != null) { params = { key: [tableId, rowId] } } @@ -43,7 +51,7 @@ exports.getLinkDocuments = async function (args) { try { let linkRows = (await db.query(getQueryIndex(ViewName.LINK), params)).rows // filter to get unique entries - const foundIds = [] + const foundIds: string[] = [] linkRows = linkRows.filter(link => { // make sure anything unique is the correct key if ( @@ -60,35 +68,36 @@ exports.getLinkDocuments = async function (args) { }) if (includeDocs) { - return linkRows.map(row => row.doc) + return linkRows.map(row => row.doc) as LinkDocument[] } else { - return linkRows.map(row => row.value) + return linkRows.map(row => row.value) as LinkDocumentValue[] } - } catch (err) { + } catch (err: any) { // check if the view doesn't exist, it should for all new instances if (err != null && err.name === "not_found") { - await exports.createLinkView() - return exports.getLinkDocuments(arguments[0]) + await createLinkView() + return getLinkDocuments(arguments[0]) } else { /* istanbul ignore next */ - Sentry.captureException(err) + logging.logAlert("Failed to get link documents", err) + throw err } } } -exports.getUniqueByProp = (array, prop) => { +export function getUniqueByProp(array: any[], prop: string) { return array.filter((obj, pos, arr) => { return arr.map(mapObj => mapObj[prop]).indexOf(obj[prop]) === pos }) } -exports.getLinkedTableIDs = table => { +export function getLinkedTableIDs(table: Table) { return Object.values(table.schema) - .filter(column => column.type === FieldTypes.LINK) + .filter((column: FieldSchema) => column.type === FieldTypes.LINK) .map(column => column.tableId) } -exports.getLinkedTable = async (id, tables) => { +export async function getLinkedTable(id: string, tables: Table[]) { const db = context.getAppDB() let linkedTable = tables.find(table => table._id === id) if (linkedTable) { @@ -101,7 +110,7 @@ exports.getLinkedTable = async (id, tables) => { return linkedTable } -exports.getRelatedTableForField = (table, fieldName) => { +export function getRelatedTableForField(table: Table, fieldName: string) { // look to see if its on the table, straight in the schema const field = table.schema[fieldName] if (field != null) { diff --git a/packages/server/src/db/newid.js b/packages/server/src/db/newid.ts similarity index 68% rename from packages/server/src/db/newid.js rename to packages/server/src/db/newid.ts index b4f5e51c07..14a8305e8d 100644 --- a/packages/server/src/db/newid.js +++ b/packages/server/src/db/newid.ts @@ -1,5 +1,5 @@ const { v4 } = require("uuid") -module.exports = function () { +export = function (): string { return v4().replace(/-/g, "") } diff --git a/packages/server/src/db/views/staticViews.js b/packages/server/src/db/views/staticViews.ts similarity index 84% rename from packages/server/src/db/views/staticViews.js rename to packages/server/src/db/views/staticViews.ts index 10ad8fd410..4bccfebeee 100644 --- a/packages/server/src/db/views/staticViews.js +++ b/packages/server/src/db/views/staticViews.ts @@ -1,5 +1,6 @@ -const { context } = require("@budibase/backend-core") -const { DocumentType, SEPARATOR, ViewName, SearchIndexes } = require("../utils") +import { context } from "@budibase/backend-core" +import { DocumentType, SEPARATOR, ViewName, SearchIndexes } from "../utils" +import { LinkDocument, Row } from "@budibase/types" const SCREEN_PREFIX = DocumentType.SCREEN + SEPARATOR /************************************************** @@ -19,16 +20,17 @@ const SCREEN_PREFIX = DocumentType.SCREEN + SEPARATOR * @returns {Promise} The view now exists, please note that the next view of this query will actually build it, * so it may be slow. */ -exports.createLinkView = async () => { +export async function createLinkView() { const db = context.getAppDB() const designDoc = await db.get("_design/database") const view = { - map: function (doc) { + map: function (doc: LinkDocument) { // everything in this must remain constant as its going to Pouch, no external variables if (doc.type === "link") { let doc1 = doc.doc1 let doc2 = doc.doc2 // eslint-disable-next-line no-undef + // @ts-ignore emit([doc1.tableId, doc1.rowId], { id: doc2.rowId, thisId: doc1.rowId, @@ -37,6 +39,7 @@ exports.createLinkView = async () => { // if linking to same table can't emit twice if (doc1.tableId !== doc2.tableId) { // eslint-disable-next-line no-undef + // @ts-ignore emit([doc2.tableId, doc2.rowId], { id: doc1.rowId, thisId: doc2.rowId, @@ -53,7 +56,7 @@ exports.createLinkView = async () => { await db.put(designDoc) } -exports.createRoutingView = async () => { +export async function createRoutingView() { const db = context.getAppDB() const designDoc = await db.get("_design/database") const view = { @@ -74,7 +77,7 @@ exports.createRoutingView = async () => { await db.put(designDoc) } -async function searchIndex(indexName, fnString) { +async function searchIndex(indexName: string, fnString: string) { const db = context.getAppDB() const designDoc = await db.get("_design/database") designDoc.indexes = { @@ -86,11 +89,11 @@ async function searchIndex(indexName, fnString) { await db.put(designDoc) } -exports.createAllSearchIndex = async () => { +export async function createAllSearchIndex() { await searchIndex( SearchIndexes.ROWS, - function (doc) { - function idx(input, prev) { + function (doc: Row) { + function idx(input: Row, prev?: string) { for (let key of Object.keys(input)) { let idxKey = prev != null ? `${prev}.${key}` : key idxKey = idxKey.replace(/ /g, "_") @@ -98,6 +101,7 @@ exports.createAllSearchIndex = async () => { for (let val of input[key]) { if (typeof val !== "object") { // eslint-disable-next-line no-undef + // @ts-ignore index(idxKey, val, { store: true }) } } @@ -106,17 +110,20 @@ exports.createAllSearchIndex = async () => { } if (typeof input[key] === "string") { // eslint-disable-next-line no-undef + // @ts-ignore index(idxKey, input[key].toLowerCase(), { store: true }) } else if (typeof input[key] !== "object") { // eslint-disable-next-line no-undef + // @ts-ignore index(idxKey, input[key], { store: true }) } else { idx(input[key], idxKey) } } } - if (doc._id.startsWith("ro_")) { + if (doc._id!.startsWith("ro_")) { // eslint-disable-next-line no-undef + // @ts-ignore index("default", doc._id) idx(doc) } diff --git a/packages/server/src/environment.js b/packages/server/src/environment.ts similarity index 92% rename from packages/server/src/environment.js rename to packages/server/src/environment.ts index 72baab9167..bf5330f9b6 100644 --- a/packages/server/src/environment.js +++ b/packages/server/src/environment.ts @@ -1,4 +1,4 @@ -const { join } = require("path") +import { join } from "path" function isTest() { return isCypress() || isJest() @@ -28,7 +28,7 @@ if (!LOADED && isDev() && !isTest()) { LOADED = true } -function parseIntSafe(number) { +function parseIntSafe(number?: string) { if (number) { return parseInt(number) } @@ -36,7 +36,7 @@ function parseIntSafe(number) { let inThread = false -module.exports = { +const environment = { // important - prefer app port to generic port PORT: process.env.APP_PORT || process.env.PORT, JWT_SECRET: process.env.JWT_SECRET, @@ -86,9 +86,10 @@ module.exports = { SELF_HOSTED: process.env.SELF_HOSTED, // old CLIENT_ID: process.env.CLIENT_ID, - _set(key, value) { + _set(key: string, value: any) { process.env[key] = value - module.exports[key] = value + // @ts-ignore + environment[key] = value }, isTest, isJest, @@ -108,13 +109,16 @@ module.exports = { // threading can cause memory issues with node-ts in development if (isDev() && module.exports.DISABLE_THREADING == null) { - module.exports._set("DISABLE_THREADING", "1") + environment._set("DISABLE_THREADING", "1") } // clean up any environment variable edge cases for (let [key, value] of Object.entries(module.exports)) { // handle the edge case of "0" to disable an environment variable if (value === "0") { - module.exports[key] = 0 + // @ts-ignore + environment[key] = 0 } } + +export = environment diff --git a/packages/server/src/events/AutomationEmitter.js b/packages/server/src/events/AutomationEmitter.ts similarity index 73% rename from packages/server/src/events/AutomationEmitter.js rename to packages/server/src/events/AutomationEmitter.ts index 99345228ff..d5ebc041ff 100644 --- a/packages/server/src/events/AutomationEmitter.js +++ b/packages/server/src/events/AutomationEmitter.ts @@ -1,6 +1,7 @@ -const { rowEmission, tableEmission } = require("./utils") -const mainEmitter = require("./index") -const env = require("../environment") +import { rowEmission, tableEmission } from "./utils" +import mainEmitter from "./index" +import env from "../environment" +import { Table, Row } from "@budibase/types" // max number of automations that can chain on top of each other // TODO: in future make this configurable at the automation level @@ -13,14 +14,17 @@ const MAX_AUTOMATION_CHAIN = env.SELF_HOSTED ? 5 : 0 * from getting stuck endlessly chaining. */ class AutomationEmitter { - constructor(chainCount) { + chainCount: number + metadata: { automationChainCount: number } + + constructor(chainCount: number) { this.chainCount = chainCount this.metadata = { automationChainCount: chainCount, } } - emitRow(eventName, appId, row, table = null) { + emitRow(eventName: string, appId: string, row: Row, table?: Table) { // don't emit even if we've reached max automation chain if (this.chainCount >= MAX_AUTOMATION_CHAIN) { return @@ -35,11 +39,12 @@ class AutomationEmitter { }) } - emitTable(eventName, appId, table = null) { + emitTable(eventName: string, appId: string, table?: Table) { // don't emit even if we've reached max automation chain if (this.chainCount > MAX_AUTOMATION_CHAIN) { return } + tableEmission({ emitter: mainEmitter, eventName, @@ -50,4 +55,4 @@ class AutomationEmitter { } } -module.exports = AutomationEmitter +export = AutomationEmitter diff --git a/packages/server/src/events/index.js b/packages/server/src/events/BudibaseEmitter.ts similarity index 64% rename from packages/server/src/events/index.js rename to packages/server/src/events/BudibaseEmitter.ts index fe51b0780a..8eb7bffd96 100644 --- a/packages/server/src/events/index.js +++ b/packages/server/src/events/BudibaseEmitter.ts @@ -1,5 +1,6 @@ -const EventEmitter = require("events").EventEmitter -const { rowEmission, tableEmission } = require("./utils") +import { EventEmitter } from "events" +import { rowEmission, tableEmission } from "./utils" +import { Table, Row } from "@budibase/types" /** * keeping event emitter in one central location as it might be used for things other than @@ -12,19 +13,17 @@ const { rowEmission, tableEmission } = require("./utils") * This is specifically quite important for template strings used in automations. */ class BudibaseEmitter extends EventEmitter { - emitRow(eventName, appId, row, table = null) { + emitRow(eventName: string, appId: string, row: Row, table?: Table) { rowEmission({ emitter: this, eventName, appId, row, table }) } - emitTable(eventName, appId, table = null) { + emitTable(eventName: string, appId: string, table?: Table) { tableEmission({ emitter: this, eventName, appId, table }) } - emitPort(portNumber) { + emitPort(portNumber?: number | string) { this.emit("internal:port", portNumber) } } -const emitter = new BudibaseEmitter() - -module.exports = emitter +export = BudibaseEmitter diff --git a/packages/server/src/events/index.ts b/packages/server/src/events/index.ts new file mode 100644 index 0000000000..145d399f56 --- /dev/null +++ b/packages/server/src/events/index.ts @@ -0,0 +1,5 @@ +import BudibaseEmitter from "./BudibaseEmitter" + +const emitter = new BudibaseEmitter() + +export = emitter diff --git a/packages/server/src/events/utils.js b/packages/server/src/events/utils.js deleted file mode 100644 index 2d43139d27..0000000000 --- a/packages/server/src/events/utils.js +++ /dev/null @@ -1,38 +0,0 @@ -exports.rowEmission = ({ emitter, eventName, appId, row, table, metadata }) => { - let event = { - row, - appId, - tableId: row.tableId, - } - if (table) { - event.table = table - } - event.id = row._id - if (row._rev) { - event.revision = row._rev - } - if (metadata) { - event.metadata = metadata - } - emitter.emit(eventName, event) -} - -exports.tableEmission = ({ emitter, eventName, appId, table, metadata }) => { - const tableId = table._id - let event = { - table: { - ...table, - tableId: tableId, - }, - appId, - tableId: tableId, - } - event.id = tableId - if (table._rev) { - event.revision = table._rev - } - if (metadata) { - event.metadata = metadata - } - emitter.emit(eventName, event) -} diff --git a/packages/server/src/events/utils.ts b/packages/server/src/events/utils.ts new file mode 100644 index 0000000000..20efb453f2 --- /dev/null +++ b/packages/server/src/events/utils.ts @@ -0,0 +1,78 @@ +import { Table, Row } from "@budibase/types" +import BudibaseEmitter from "./BudibaseEmitter" + +type BBEventOpts = { + emitter: BudibaseEmitter + eventName: string + appId: string + table?: Table + row?: Row + metadata?: any +} + +interface BBEventTable extends Table { + tableId?: string +} + +type BBEvent = { + appId: string + tableId?: string + row?: Row + table?: BBEventTable + id?: string + revision?: string + metadata?: any +} + +export function rowEmission({ + emitter, + eventName, + appId, + row, + table, + metadata, +}: BBEventOpts) { + let event: BBEvent = { + row, + appId, + tableId: row?.tableId, + } + if (table) { + event.table = table + } + event.id = row?._id + if (row?._rev) { + event.revision = row._rev + } + if (metadata) { + event.metadata = metadata + } + emitter.emit(eventName, event) +} + +export function tableEmission({ + emitter, + eventName, + appId, + table, + metadata, +}: BBEventOpts) { + const tableId = table?._id + const inputTable: BBEventTable | undefined = table + if (inputTable) { + inputTable.tableId = tableId + } + let event: BBEvent = { + table: inputTable, + appId, + tableId: tableId, + } + event.id = tableId + if (table?._rev) { + event.revision = table._rev + } + if (metadata) { + event.metadata = metadata + } + emitter.emit(eventName, event) +} diff --git a/packages/server/src/integrations/googlesheets.ts b/packages/server/src/integrations/googlesheets.ts index 3e17df4076..84c2deaa57 100644 --- a/packages/server/src/integrations/googlesheets.ts +++ b/packages/server/src/integrations/googlesheets.ts @@ -266,6 +266,7 @@ class GoogleSheetsIntegration implements DatasourcePlus { this.deleteTable(json?.table?.name), } + // @ts-ignore const internalQueryMethod = handlers[json.endpoint.operation] return await internalQueryMethod() diff --git a/packages/server/src/integrations/utils.ts b/packages/server/src/integrations/utils.ts index 287783eec6..1861cc9662 100644 --- a/packages/server/src/integrations/utils.ts +++ b/packages/server/src/integrations/utils.ts @@ -230,7 +230,7 @@ function shouldCopySpecialColumn( const fetchedIsNumber = !fetchedColumn || fetchedColumn.type === FieldTypes.NUMBER return ( - specialTypes.indexOf(column.type) !== -1 || + specialTypes.indexOf(column.type as FieldTypes) !== -1 || (fetchedIsNumber && column.type === FieldTypes.BOOLEAN) ) } @@ -292,7 +292,11 @@ export function finaliseExternalTables( if (table.primary == null || table.primary.length === 0) { errors[name] = BuildSchemaErrors.NO_KEY continue - } else if (schemaFields.find(field => invalidColumns.includes(field))) { + } else if ( + schemaFields.find(field => + invalidColumns.includes(field as InvalidColumns) + ) + ) { errors[name] = BuildSchemaErrors.INVALID_COLUMN continue } diff --git a/packages/server/src/migrations/tests/index.spec.ts b/packages/server/src/migrations/tests/index.spec.ts index 8effaec52b..cf283cf518 100644 --- a/packages/server/src/migrations/tests/index.spec.ts +++ b/packages/server/src/migrations/tests/index.spec.ts @@ -6,7 +6,7 @@ import { context, } from "@budibase/backend-core" import TestConfig from "../../tests/utilities/TestConfiguration" -import structures from "../../tests/utilities/structures" +import * as structures from "../../tests/utilities/structures" import { MIGRATIONS } from "../" import * as helpers from "./helpers" diff --git a/packages/server/src/startup.ts b/packages/server/src/startup.ts index 53fcf3ebef..50c1122cef 100644 --- a/packages/server/src/startup.ts +++ b/packages/server/src/startup.ts @@ -13,11 +13,11 @@ import { } from "@budibase/backend-core" import fs from "fs" import { watch } from "./watch" -import automations from "./automations" -import fileSystem from "./utilities/fileSystem" +import * as automations from "./automations" +import * as fileSystem from "./utilities/fileSystem" import eventEmitter from "./events" import * as migrations from "./migrations" -import bullboard from "./automations/bullboard" +import * as bullboard from "./automations/bullboard" import * as pro from "@budibase/pro" import * as api from "./api" import sdk from "./sdk" diff --git a/packages/server/src/tests/utilities/TestConfiguration.js b/packages/server/src/tests/utilities/TestConfiguration.ts similarity index 84% rename from packages/server/src/tests/utilities/TestConfiguration.js rename to packages/server/src/tests/utilities/TestConfiguration.ts index 76c79b838f..bbd940150f 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.js +++ b/packages/server/src/tests/utilities/TestConfiguration.ts @@ -1,6 +1,7 @@ -require("../../db").init() -const env = require("../../environment") -const { +import { init as dbInit } from "../../db" +dbInit() +import env from "../../environment" +import { basicTable, basicRow, basicRole, @@ -11,24 +12,24 @@ const { basicLayout, basicWebhook, TENANT_ID, -} = require("./structures") -const { +} from "./structures" +import { constants, tenancy, sessions, cache, context, - db: dbCore, + db as dbCore, encryption, auth, roles, -} = require("@budibase/backend-core") -const controllers = require("./controllers") +} from "@budibase/backend-core" +import * as controllers from "./controllers" +import { cleanup } from "../../utilities/fileSystem" +import newid from "../../db/newid" +import { generateUserMetadataID } from "../../db/utils" +import { startup } from "../../startup" const supertest = require("supertest") -const { cleanup } = require("../../utilities/fileSystem") -const newid = require("../../db/newid") -const { DocumentType, generateUserMetadataID } = require("../../db/utils") -const { startup } = require("../../startup") const GLOBAL_USER_ID = "us_uuid1" const EMAIL = "babs@babs.com" @@ -37,10 +38,26 @@ const LASTNAME = "Barbington" const CSRF_TOKEN = "e3727778-7af0-4226-b5eb-f43cbe60a306" class TestConfiguration { + server: any + request: any + started: boolean + appId: string | null + allApps: any[] + app: any + prodApp: any + prodAppId: any + user: any + globalUserId: any + userMetadataId: any + table: any + linkedTable: any + automation: any + datasource: any + constructor(openServer = true) { if (openServer) { // use a random port because it doesn't matter - env.PORT = 0 + env.PORT = "0" this.server = require("../../app") // we need the request for logging in, involves cookies, hard to fake this.request = supertest(this.server) @@ -81,7 +98,7 @@ class TestConfiguration { } } - async doInContext(appId, task) { + async doInContext(appId: string | null, task: any) { if (!appId) { appId = this.appId } @@ -125,9 +142,9 @@ class TestConfiguration { // UTILS - async _req(body, params, controlFunc) { + async _req(body: any, params: any, controlFunc: any) { // create a fake request ctx - const request = {} + const request: any = {} const appId = this.appId request.appId = appId // fake cookies, we don't need them @@ -156,8 +173,8 @@ class TestConfiguration { admin = false, email = EMAIL, roles, - } = {}) { - return tenancy.doWithGlobalDB(TENANT_ID, async db => { + }: any = {}) { + return tenancy.doWithGlobalDB(TENANT_ID, async (db: any) => { let existing try { existing = await db.get(id) @@ -221,7 +238,7 @@ class TestConfiguration { } } - async login({ roleId, userId, builder, prodApp = false } = {}) { + async login({ roleId, userId, builder, prodApp = false }: any = {}) { const appId = prodApp ? this.prodAppId : this.appId return context.doInAppContext(appId, async () => { userId = !userId ? `us_uuid1` : userId @@ -278,7 +295,7 @@ class TestConfiguration { } const authToken = auth.jwt.sign(authObj, env.JWT_SECRET) const appToken = auth.jwt.sign(app, env.JWT_SECRET) - const headers = { + const headers: any = { Accept: "application/json", Cookie: [ `${constants.Cookie.Auth}=${authToken}`, @@ -296,7 +313,7 @@ class TestConfiguration { publicHeaders({ prodApp = true } = {}) { const appId = prodApp ? this.prodAppId : this.appId - const headers = { + const headers: any = { Accept: "application/json", } if (appId) { @@ -317,7 +334,7 @@ class TestConfiguration { // API async generateApiKey(userId = GLOBAL_USER_ID) { - return tenancy.doWithGlobalDB(TENANT_ID, async db => { + return tenancy.doWithGlobalDB(TENANT_ID, async (db: any) => { const id = dbCore.generateDevInfoID(userId) let devInfo try { @@ -335,13 +352,15 @@ class TestConfiguration { // APP - async createApp(appName) { + async createApp(appName: string) { // create dev app // clear any old app this.appId = null + // @ts-ignore await context.updateAppId(null) this.app = await this._req({ name: appName }, null, controllers.app.create) this.appId = this.app.appId + // @ts-ignore await context.updateAppId(this.appId) // create production app @@ -355,40 +374,41 @@ class TestConfiguration { async deploy() { await this._req(null, null, controllers.deploy.deployApp) + // @ts-ignore const prodAppId = this.getAppId().replace("_dev", "") this.prodAppId = prodAppId return context.doInAppContext(prodAppId, async () => { const db = context.getProdAppDB() - return await db.get(DocumentType.APP_METADATA) + return await db.get(dbCore.DocumentType.APP_METADATA) }) } // TABLE - async updateTable(config = null) { + async updateTable(config?: any) { config = config || basicTable() this.table = await this._req(config, null, controllers.table.save) return this.table } - async createTable(config = null) { + async createTable(config?: any) { if (config != null && config._id) { delete config._id } return this.updateTable(config) } - async getTable(tableId = null) { + async getTable(tableId?: string) { tableId = tableId || this.table._id return this._req(null, { tableId }, controllers.table.find) } - async createLinkedTable(relationshipType = null, links = ["link"]) { + async createLinkedTable(relationshipType?: string, links: any = ["link"]) { if (!this.table) { throw "Must have created a table first." } - const tableConfig = basicTable() + const tableConfig: any = basicTable() tableConfig.primaryDisplay = "name" for (let link of links) { tableConfig.schema[link] = { @@ -407,7 +427,7 @@ class TestConfiguration { } async createAttachmentTable() { - const table = basicTable() + const table: any = basicTable() table.schema.attachment = { type: "attachment", } @@ -416,7 +436,7 @@ class TestConfiguration { // ROW - async createRow(config = null) { + async createRow(config: any = null) { if (!this.table) { throw "Test requires table to be configured." } @@ -425,11 +445,11 @@ class TestConfiguration { return this._req(config, { tableId }, controllers.row.save) } - async getRow(tableId, rowId) { + async getRow(tableId: string, rowId: string) { return this._req(null, { tableId, rowId }, controllers.row.find) } - async getRows(tableId) { + async getRows(tableId: string) { if (!tableId && this.table) { tableId = this.table._id } @@ -438,12 +458,12 @@ class TestConfiguration { // ROLE - async createRole(config = null) { + async createRole(config?: any) { config = config || basicRole() return this._req(config, null, controllers.role.save) } - async addPermission(roleId, resourceId, level = "read") { + async addPermission(roleId: string, resourceId: string, level = "read") { return this._req( null, { @@ -457,7 +477,7 @@ class TestConfiguration { // VIEW - async createView(config) { + async createView(config?: any) { if (!this.table) { throw "Test requires table to be configured." } @@ -470,7 +490,7 @@ class TestConfiguration { // AUTOMATION - async createAutomation(config) { + async createAutomation(config?: any) { config = config || basicAutomation() if (config._rev) { delete config._rev @@ -485,7 +505,7 @@ class TestConfiguration { return this._req(null, null, controllers.automation.fetch) } - async deleteAutomation(automation = null) { + async deleteAutomation(automation?: any) { automation = automation || this.automation if (!automation) { return @@ -497,7 +517,7 @@ class TestConfiguration { ) } - async createWebhook(config = null) { + async createWebhook(config?: any) { if (!this.automation) { throw "Must create an automation before creating webhook." } @@ -507,14 +527,14 @@ class TestConfiguration { // DATASOURCE - async createDatasource(config = null) { + async createDatasource(config?: any) { config = config || basicDatasource() const response = await this._req(config, null, controllers.datasource.save) this.datasource = response.datasource return this.datasource } - async updateDatasource(datasource) { + async updateDatasource(datasource: any) { const response = await this._req( datasource, { datasourceId: datasource._id }, @@ -524,7 +544,7 @@ class TestConfiguration { return this.datasource } - async restDatasource(cfg) { + async restDatasource(cfg?: any) { return this.createDatasource({ datasource: { ...basicDatasource().datasource, @@ -559,7 +579,14 @@ class TestConfiguration { // QUERY - async previewQuery(request, config, datasource, fields, params, verb) { + async previewQuery( + request: any, + config: any, + datasource: any, + fields: any, + params: any, + verb: string + ) { return request .post(`/api/queries/preview`) .send({ @@ -574,7 +601,7 @@ class TestConfiguration { .expect(200) } - async createQuery(config = null) { + async createQuery(config?: any) { if (!this.datasource && !config) { throw "No datasource created for query." } @@ -584,17 +611,17 @@ class TestConfiguration { // SCREEN - async createScreen(config = null) { + async createScreen(config?: any) { config = config || basicScreen() return this._req(config, null, controllers.screen.save) } // LAYOUT - async createLayout(config = null) { + async createLayout(config?: any) { config = config || basicLayout() return await this._req(config, null, controllers.layout.save) } } -module.exports = TestConfiguration +export = TestConfiguration diff --git a/packages/server/src/tests/utilities/controllers.js b/packages/server/src/tests/utilities/controllers.js deleted file mode 100644 index 8da6c97047..0000000000 --- a/packages/server/src/tests/utilities/controllers.js +++ /dev/null @@ -1,16 +0,0 @@ -module.exports = { - table: require("../../api/controllers/table"), - row: require("../../api/controllers/row"), - role: require("../../api/controllers/role"), - perms: require("../../api/controllers/permission"), - view: require("../../api/controllers/view"), - app: require("../../api/controllers/application"), - user: require("../../api/controllers/user"), - automation: require("../../api/controllers/automation"), - datasource: require("../../api/controllers/datasource"), - query: require("../../api/controllers/query"), - screen: require("../../api/controllers/screen"), - webhook: require("../../api/controllers/webhook"), - layout: require("../../api/controllers/layout"), - deploy: require("../../api/controllers/deploy"), -} diff --git a/packages/server/src/tests/utilities/controllers.ts b/packages/server/src/tests/utilities/controllers.ts new file mode 100644 index 0000000000..315b61551c --- /dev/null +++ b/packages/server/src/tests/utilities/controllers.ts @@ -0,0 +1,14 @@ +export * as table from "../../api/controllers/table" +export * as row from "../../api/controllers/row" +export * as role from "../../api/controllers/role" +export * as perms from "../../api/controllers/permission" +export * as view from "../../api/controllers/view" +export * as app from "../../api/controllers/application" +export * as user from "../../api/controllers/user" +export * as automation from "../../api/controllers/automation" +export * as datasource from "../../api/controllers/datasource" +export * as query from "../../api/controllers/query" +export * as screen from "../../api/controllers/screen" +export * as webhook from "../../api/controllers/webhook" +export * as layout from "../../api/controllers/layout" +export * as deploy from "../../api/controllers/deploy" diff --git a/packages/server/src/tests/utilities/index.js b/packages/server/src/tests/utilities/index.ts similarity index 77% rename from packages/server/src/tests/utilities/index.js rename to packages/server/src/tests/utilities/index.ts index aa8039ce2f..270e8b7b49 100644 --- a/packages/server/src/tests/utilities/index.js +++ b/packages/server/src/tests/utilities/index.ts @@ -1,5 +1,5 @@ -exports.makePartial = obj => { - const newObj = {} +export function makePartial(obj: any) { + const newObj: any = {} for (let key of Object.keys(obj)) { if (typeof obj[key] === "object") { newObj[key] = exports.makePartial(obj[key]) diff --git a/packages/server/src/tests/utilities/structures.js b/packages/server/src/tests/utilities/structures.ts similarity index 58% rename from packages/server/src/tests/utilities/structures.js rename to packages/server/src/tests/utilities/structures.ts index 610084b04e..a412be4931 100644 --- a/packages/server/src/tests/utilities/structures.js +++ b/packages/server/src/tests/utilities/structures.ts @@ -1,13 +1,13 @@ -const { roles, permissions } = require("@budibase/backend-core") -const { createHomeScreen } = require("../../constants/screens") -const { EMPTY_LAYOUT } = require("../../constants/layouts") -const { cloneDeep } = require("lodash/fp") +import { roles, permissions } from "@budibase/backend-core" +import { createHomeScreen } from "../../constants/screens" +import { EMPTY_LAYOUT } from "../../constants/layouts" +import { cloneDeep } from "lodash/fp" +import { TRIGGER_DEFINITIONS, ACTION_DEFINITIONS } from "../../automations" const { v4: uuidv4 } = require("uuid") -const { TRIGGER_DEFINITIONS, ACTION_DEFINITIONS } = require("../../automations") -exports.TENANT_ID = "default" +export const TENANT_ID = "default" -exports.basicTable = () => { +export function basicTable() { return { name: "TestTable", type: "table", @@ -29,16 +29,16 @@ exports.basicTable = () => { } } -exports.basicView = tableId => { +export function basicView(tableId: string) { return { tableId, name: "ViewTest", } } -exports.filterView = tableId => { +export function filterView(tableId: string) { return { - ...this.basicView(tableId), + ...basicView(tableId), filters: [ { value: 0, @@ -49,56 +49,58 @@ exports.filterView = tableId => { } } -exports.calculationView = tableId => { +export function calculationView(tableId: string) { return { - ...this.basicView(tableId), + ...basicView(tableId), field: "count", calculation: "sum", } } -exports.view = tableId => { +export function view(tableId: string) { return { - ...this.filterView(tableId), - ...this.calculationView(tableId), + ...filterView(tableId), + ...calculationView(tableId), } } -exports.automationStep = (actionDefinition = ACTION_DEFINITIONS.CREATE_ROW) => { +export function automationStep( + actionDefinition = ACTION_DEFINITIONS.CREATE_ROW +) { return { id: uuidv4(), ...actionDefinition, } } -exports.automationTrigger = ( +export function automationTrigger( triggerDefinition = TRIGGER_DEFINITIONS.ROW_SAVED -) => { +) { return { id: uuidv4(), ...triggerDefinition, } } -exports.newAutomation = ({ steps, trigger } = {}) => { - const automation = exports.basicAutomation() +export function newAutomation({ steps, trigger }: any = {}) { + const automation: any = basicAutomation() if (trigger) { automation.definition.trigger = trigger } else { - automation.definition.trigger = exports.automationTrigger() + automation.definition.trigger = automationTrigger() } if (steps) { automation.definition.steps = steps } else { - automation.definition.steps = [exports.automationStep()] + automation.definition.steps = [automationStep()] } return automation } -exports.basicAutomation = () => { +export function basicAutomation() { return { name: "My Automation", screenId: "kasdkfldsafkl", @@ -114,7 +116,7 @@ exports.basicAutomation = () => { } } -exports.basicRow = tableId => { +export function basicRow(tableId: string) { return { name: "Test Contact", description: "original description", @@ -122,15 +124,19 @@ exports.basicRow = tableId => { } } -exports.basicLinkedRow = (tableId, linkedRowId, linkField = "link") => { +export function basicLinkedRow( + tableId: string, + linkedRowId: string, + linkField: string = "link" +) { // this is based on the basic linked tables you get from the test configuration return { - ...exports.basicRow(tableId), + ...basicRow(tableId), [linkField]: [linkedRowId], } } -exports.basicRole = () => { +export function basicRole() { return { name: "NewRole", inherits: roles.BUILTIN_ROLE_IDS.BASIC, @@ -138,7 +144,7 @@ exports.basicRole = () => { } } -exports.basicDatasource = () => { +export function basicDatasource() { return { datasource: { type: "datasource", @@ -149,7 +155,7 @@ exports.basicDatasource = () => { } } -exports.basicQuery = datasourceId => { +export function basicQuery(datasourceId: string) { return { datasourceId: datasourceId, name: "New Query", @@ -160,7 +166,7 @@ exports.basicQuery = datasourceId => { } } -exports.basicUser = role => { +export function basicUser(role: string) { return { email: "bill@bill.com", password: "yeeooo", @@ -168,15 +174,15 @@ exports.basicUser = role => { } } -exports.basicScreen = () => { +export function basicScreen() { return createHomeScreen() } -exports.basicLayout = () => { +export function basicLayout() { return cloneDeep(EMPTY_LAYOUT) } -exports.basicWebhook = automationId => { +export function basicWebhook(automationId: string) { return { live: true, name: "webhook", diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index 5e2817ed06..8b343cdf8e 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -6,8 +6,8 @@ import { disableCronById, isErrorInOutput, } from "../automations/utils" -import { default as actions } from "../automations/actions" -import { default as automationUtils } from "../automations/automationUtils" +import * as actions from "../automations/actions" +import * as automationUtils from "../automations/automationUtils" import { default as AutomationEmitter } from "../events/AutomationEmitter" import { generateAutomationMetadataID, isProdAppID } from "../db/utils" import { definitions as triggerDefs } from "../automations/triggerInfo" @@ -335,7 +335,7 @@ class Orchestrator { )) { originalStepInput[key][innerKey][innerObject] = automationUtils.substituteLoopStep( - innerValue, + innerValue as string, `steps.${loopStepNumber}` ) } diff --git a/packages/server/src/threads/utils.ts b/packages/server/src/threads/utils.ts index 53120160a8..5caaeb83d7 100644 --- a/packages/server/src/threads/utils.ts +++ b/packages/server/src/threads/utils.ts @@ -1,6 +1,6 @@ import { QueryVariable } from "./definitions" import env from "../environment" -import db from "../db" +import * as db from "../db" import { redis, db as dbCore } from "@budibase/backend-core" const VARIABLE_TTL_SECONDS = 3600 diff --git a/packages/server/src/utilities/centralPath.js b/packages/server/src/utilities/centralPath.ts similarity index 83% rename from packages/server/src/utilities/centralPath.js rename to packages/server/src/utilities/centralPath.ts index 4ce7bcfbb3..b9c0a8aedf 100644 --- a/packages/server/src/utilities/centralPath.js +++ b/packages/server/src/utilities/centralPath.ts @@ -1,4 +1,4 @@ -const path = require("path") +import path from "path" // this simply runs all of our path join and resolve functions through // a central location incase we need to add some protection to file paths @@ -8,7 +8,7 @@ const path = require("path") * @param args Any number of string arguments to add to a path * @returns {string} The final path ready to use */ -exports.join = function (...args) { +export function join(...args: any) { return path.join(...args) } @@ -17,6 +17,6 @@ exports.join = function (...args) { * @param args Any number of string arguments to add to a path * @returns {string} The final path ready to use */ -exports.resolve = function (...args) { +export function resolve(...args: any) { return path.resolve(...args) } diff --git a/packages/server/src/utilities/csvParser.js b/packages/server/src/utilities/csvParser.ts similarity index 67% rename from packages/server/src/utilities/csvParser.js rename to packages/server/src/utilities/csvParser.ts index 09449f6fc1..0c138abc3e 100644 --- a/packages/server/src/utilities/csvParser.js +++ b/packages/server/src/utilities/csvParser.ts @@ -1,18 +1,25 @@ -const csv = require("csvtojson") -const { FieldTypes } = require("../constants") +import { FieldSchema, Table } from "@budibase/types" +import csv from "csvtojson" +import { FieldTypes } from "../constants" -const VALIDATORS = { +type CsvParseOpts = { + schema?: { [key: string]: any } + existingTable: Table + csvString?: string +} + +const VALIDATORS: any = { [FieldTypes.STRING]: () => true, [FieldTypes.OPTIONS]: () => true, [FieldTypes.BARCODEQR]: () => true, - [FieldTypes.NUMBER]: attribute => { + [FieldTypes.NUMBER]: (attribute?: string) => { // allow not to be present if (!attribute) { return true } return !isNaN(Number(attribute)) }, - [FieldTypes.DATETIME]: attribute => { + [FieldTypes.DATETIME]: (attribute?: string) => { // allow not to be present if (!attribute) { return true @@ -21,14 +28,14 @@ const VALIDATORS = { }, } -const PARSERS = { - [FieldTypes.NUMBER]: attribute => { +const PARSERS: any = { + [FieldTypes.NUMBER]: (attribute?: string) => { if (!attribute) { return attribute } return Number(attribute) }, - [FieldTypes.DATETIME]: attribute => { + [FieldTypes.DATETIME]: (attribute?: string) => { if (!attribute) { return attribute } @@ -36,10 +43,10 @@ const PARSERS = { }, } -function parse(csvString, parsers) { +export function parse(csvString: string, parsers: any): Record { const result = csv().fromString(csvString) - const schema = {} + const schema: Record = {} return new Promise((resolve, reject) => { result.on("header", headers => { @@ -77,16 +84,22 @@ function parse(csvString, parsers) { }) } -function updateSchema({ schema, existingTable }) { +export function updateSchema({ + schema, + existingTable, +}: { + schema?: Record + existingTable?: Table +}) { if (!schema) { return schema } - const finalSchema = {} - const schemaKeyMap = {} + const finalSchema: Record = {} + const schemaKeyMap: Record = {} Object.keys(schema).forEach(key => (schemaKeyMap[key.toLowerCase()] = key)) - for (let [key, field] of Object.entries(existingTable.schema)) { + for (let [key, field] of Object.entries(existingTable?.schema || {})) { const lcKey = key.toLowerCase() - const foundKey = schemaKeyMap[lcKey] + const foundKey: string = schemaKeyMap[lcKey] if (foundKey) { finalSchema[key] = schema[foundKey] finalSchema[key].type = field.type @@ -95,15 +108,22 @@ function updateSchema({ schema, existingTable }) { return finalSchema } -async function transform({ schema, csvString, existingTable }) { - const colParser = {} +export async function transform({ + schema, + csvString, + existingTable, +}: CsvParseOpts) { + if (!schema || !csvString) { + throw new Error("Unable to transform CSV without schema") + } + const colParser: any = {} // make sure the table has all the columns required for import if (existingTable) { schema = updateSchema({ schema, existingTable }) } - for (let [key, field] of Object.entries(schema)) { + for (let [key, field] of Object.entries(schema || {})) { // don't import data to auto columns if (!field.autocolumn) { colParser[key] = PARSERS[field.type] || field.type @@ -112,8 +132,10 @@ async function transform({ schema, csvString, existingTable }) { try { const data = await csv({ colParser }).fromString(csvString) - const schemaKeyMap = {} - Object.keys(schema).forEach(key => (schemaKeyMap[key.toLowerCase()] = key)) + const schemaKeyMap: any = {} + Object.keys(schema || {}).forEach( + key => (schemaKeyMap[key.toLowerCase()] = key) + ) for (let element of data) { if (!data) { continue @@ -137,9 +159,3 @@ async function transform({ schema, csvString, existingTable }) { throw err } } - -module.exports = { - parse, - transform, - updateSchema, -} diff --git a/packages/server/src/utilities/fileSystem/clientLibrary.js b/packages/server/src/utilities/fileSystem/clientLibrary.ts similarity index 91% rename from packages/server/src/utilities/fileSystem/clientLibrary.js rename to packages/server/src/utilities/fileSystem/clientLibrary.ts index 37faa4256f..9fb96ff9a5 100644 --- a/packages/server/src/utilities/fileSystem/clientLibrary.js +++ b/packages/server/src/utilities/fileSystem/clientLibrary.ts @@ -1,9 +1,9 @@ -const { join } = require("path") -const { ObjectStoreBuckets } = require("../../constants") -const fs = require("fs") -const { objectStore } = require("@budibase/backend-core") -const { resolve } = require("../centralPath") -const env = require("../../environment") +import { join } from "path" +import { ObjectStoreBuckets } from "../../constants" +import fs from "fs" +import { objectStore } from "@budibase/backend-core" +import { resolve } from "../centralPath" +import env from "../../environment" const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..") /** @@ -33,7 +33,7 @@ const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..") * @param appId The app ID to backup * @returns {Promise} */ -exports.backupClientLibrary = async appId => { +export async function backupClientLibrary(appId: string) { // Copy existing manifest to tmp let tmpManifestPath try { @@ -85,7 +85,7 @@ exports.backupClientLibrary = async appId => { * @param appId The app ID to update * @returns {Promise} */ -exports.updateClientLibrary = async appId => { +export async function updateClientLibrary(appId: string) { let manifest, client if (env.isDev()) { @@ -124,7 +124,7 @@ exports.updateClientLibrary = async appId => { * @param appId The app ID to revert * @returns {Promise} */ -exports.revertClientLibrary = async appId => { +export async function revertClientLibrary(appId: string) { // Copy backups manifest to tmp directory const tmpManifestPath = await objectStore.retrieveToTmp( ObjectStoreBuckets.APPS, diff --git a/packages/server/src/utilities/fileSystem/index.js b/packages/server/src/utilities/fileSystem/index.ts similarity index 78% rename from packages/server/src/utilities/fileSystem/index.js rename to packages/server/src/utilities/fileSystem/index.ts index 5b46565897..4af42488ef 100644 --- a/packages/server/src/utilities/fileSystem/index.js +++ b/packages/server/src/utilities/fileSystem/index.ts @@ -1,26 +1,17 @@ -const { budibaseTempDir } = require("../budibaseDir") -const fs = require("fs") -const { join } = require("path") +import { budibaseTempDir } from "../budibaseDir" +import fs from "fs" +import { join } from "path" +import { context, objectStore } from "@budibase/backend-core" +import { ObjectStoreBuckets } from "../../constants" +import { updateClientLibrary } from "./clientLibrary" +import { checkSlashesInUrl } from "../" +import env from "../../environment" +import fetch from "node-fetch" const uuid = require("uuid/v4") -const { context, objectStore } = require("@budibase/backend-core") -const { ObjectStoreBuckets } = require("../../constants") -const { updateClientLibrary } = require("./clientLibrary") -const { checkSlashesInUrl } = require("../") -const env = require("../../environment") const tar = require("tar") -const fetch = require("node-fetch") -const { - upload, - retrieve, - retrieveToTmp, - deleteFolder, - downloadTarball, - downloadTarballDirect, - deleteFiles, -} = objectStore -const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..") -const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules") +export const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..") +export const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules") const DATASOURCE_PATH = join(budibaseTempDir(), "datasource") /** @@ -34,14 +25,14 @@ const DATASOURCE_PATH = join(budibaseTempDir(), "datasource") /** * Upon first startup of instance there may not be everything we need in tmp directory, set it up. */ -exports.init = () => { +export function init() { const tempDir = budibaseTempDir() if (!fs.existsSync(tempDir)) { // some test cases fire this quickly enough that // synchronous cases can end up here at the same time try { fs.mkdirSync(tempDir) - } catch (err) { + } catch (err: any) { if (!err || err.code !== "EEXIST") { throw err } @@ -81,7 +72,7 @@ exports.checkDevelopmentEnvironment = () => { * @param {string} path The path to the handlebars file which is to be loaded. * @returns {string} The loaded handlebars file as a string - loaded as utf8. */ -exports.loadHandlebarsFile = path => { +export function loadHandlebarsFile(path: string) { return fs.readFileSync(path, "utf8") } @@ -91,13 +82,13 @@ exports.loadHandlebarsFile = path => { * @param {string} contents the contents of the file which is to be returned from the API. * @return {Object} the read stream which can be put into the koa context body. */ -exports.apiFileReturn = contents => { +export function apiFileReturn(contents: string) { const path = join(budibaseTempDir(), uuid()) fs.writeFileSync(path, contents) return fs.createReadStream(path) } -exports.streamFile = path => { +export function streamFile(path: string) { return fs.createReadStream(path) } @@ -106,7 +97,7 @@ exports.streamFile = path => { * @param {string} fileContents contents which will be written to a temp file. * @return {string} the path to the temp file. */ -exports.storeTempFile = fileContents => { +export function storeTempFile(fileContents: string) { const path = join(budibaseTempDir(), uuid()) fs.writeFileSync(path, fileContents) return path @@ -116,7 +107,7 @@ exports.storeTempFile = fileContents => { * Utility function for getting a file read stream - a simple in memory buffered read * stream doesn't work for pouchdb. */ -exports.stringToFileStream = contents => { +export function stringToFileStream(contents: string) { const path = exports.storeTempFile(contents) return fs.createReadStream(path) } @@ -125,7 +116,7 @@ exports.stringToFileStream = contents => { * Creates a temp file and returns it from the API. * @param {string} fileContents the contents to be returned in file. */ -exports.sendTempFile = fileContents => { +export function sendTempFile(fileContents: string) { const path = exports.storeTempFile(fileContents) return fs.createReadStream(path) } @@ -135,7 +126,7 @@ exports.sendTempFile = fileContents => { * @param {string} appId The ID of the app which is being created. * @return {Promise} once promise completes app resources should be ready in object store. */ -exports.createApp = async appId => { +export async function createApp(appId: string) { await updateClientLibrary(appId) } @@ -144,8 +135,8 @@ exports.createApp = async appId => { * @param {string} appId The ID of the app which is being deleted. * @return {Promise} once promise completes the app resources will be removed from object store. */ -exports.deleteApp = async appId => { - await deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`) +export async function deleteApp(appId: string) { + await objectStore.deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`) } /** @@ -154,17 +145,21 @@ exports.deleteApp = async appId => { * @param name * @return {Promise<*>} */ -exports.downloadTemplate = async (type, name) => { +export async function downloadTemplate(type: string, name: string) { const DEFAULT_TEMPLATES_BUCKET = "prod-budi-templates.s3-eu-west-1.amazonaws.com" const templateUrl = `https://${DEFAULT_TEMPLATES_BUCKET}/templates/${type}/${name}.tar.gz` - return downloadTarball(templateUrl, ObjectStoreBuckets.TEMPLATES, type) + return objectStore.downloadTarball( + templateUrl, + ObjectStoreBuckets.TEMPLATES, + type + ) } /** * Retrieves component libraries from object store (or tmp symlink if in local) */ -exports.getComponentLibraryManifest = async library => { +export async function getComponentLibraryManifest(library: string) { const appId = context.getAppId() const filename = "manifest.json" /* istanbul ignore next */ @@ -182,12 +177,16 @@ exports.getComponentLibraryManifest = async library => { return require(path) } + if (!appId) { + throw new Error("No app ID found - cannot get component libraries") + } + let resp let path try { // Try to load the manifest from the new file location path = join(appId, filename) - resp = await retrieve(ObjectStoreBuckets.APPS, path) + resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path) } catch (error) { console.error( `component-manifest-objectstore=failed appId=${appId} path=${path}`, @@ -195,7 +194,7 @@ exports.getComponentLibraryManifest = async library => { ) // Fallback to loading it from the old location for old apps path = join(appId, "node_modules", library, "package", filename) - resp = await retrieve(ObjectStoreBuckets.APPS, path) + resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path) } if (typeof resp !== "string") { resp = resp.toString("utf8") @@ -207,14 +206,17 @@ exports.getComponentLibraryManifest = async library => { * All file reads come through here just to make sure all of them make sense * allows a centralised location to check logic is all good. */ -exports.readFileSync = (filepath, options = "utf8") => { - return fs.readFileSync(filepath, options) +export function readFileSync( + filepath: string, + options: BufferEncoding = "utf8" +) { + return fs.readFileSync(filepath, { encoding: options }) } /** * Given a set of app IDs makes sure file system is cleared of any of their temp info. */ -exports.cleanup = appIds => { +export function cleanup(appIds: string[]) { for (let appId of appIds) { const path = join(budibaseTempDir(), appId) if (fs.existsSync(path)) { @@ -223,7 +225,7 @@ exports.cleanup = appIds => { } } -const createTempFolder = item => { +export function createTempFolder(item: string) { const path = join(budibaseTempDir(), item) try { // remove old tmp directories automatically - don't combine @@ -231,24 +233,22 @@ const createTempFolder = item => { fs.rmSync(path, { recursive: true, force: true }) } fs.mkdirSync(path) - } catch (err) { + } catch (err: any) { throw new Error(`Path cannot be created: ${err.message}`) } return path } -exports.createTempFolder = createTempFolder -const extractTarball = async (fromFilePath, toPath) => { +export async function extractTarball(fromFilePath: string, toPath: string) { await tar.extract({ file: fromFilePath, C: toPath, }) } -exports.extractTarball = extractTarball -const getPluginMetadata = async path => { - let metadata = {} +export async function getPluginMetadata(path: string) { + let metadata: { schema?: any; package?: any } = {} try { const pkg = fs.readFileSync(join(path, "package.json"), "utf8") const schema = fs.readFileSync(join(path, "schema.json"), "utf8") @@ -265,7 +265,7 @@ const getPluginMetadata = async path => { "package.json is missing one of 'name', 'version' or 'description'." ) } - } catch (err) { + } catch (err: any) { throw new Error( `Unable to process schema.json/package.json in plugin. ${err.message}` ) @@ -273,9 +273,12 @@ const getPluginMetadata = async path => { return { metadata, directory: path } } -exports.getPluginMetadata = getPluginMetadata -exports.getDatasourcePlugin = async (name, url, hash) => { +export async function getDatasourcePlugin( + name: string, + url: string, + hash: string +) { if (!fs.existsSync(DATASOURCE_PATH)) { fs.mkdirSync(DATASOURCE_PATH) } @@ -311,7 +314,7 @@ exports.getDatasourcePlugin = async (name, url, hash) => { /** * Find for a file recursively from start path applying filter, return first match */ -exports.findFileRec = (startPath, filter) => { +export function findFileRec(startPath: string, filter: any) { if (!fs.existsSync(startPath)) { return } @@ -332,21 +335,10 @@ exports.findFileRec = (startPath, filter) => { /** * Remove a folder which is not empty from the file system */ -exports.deleteFolderFileSystem = path => { +export function deleteFolderFileSystem(path: string) { if (!fs.existsSync(path)) { return } fs.rmSync(path, { recursive: true, force: true }) } - -/** - * Full function definition for below can be found in the utilities. - */ -exports.upload = upload -exports.retrieve = retrieve -exports.retrieveToTmp = retrieveToTmp -exports.deleteFiles = deleteFiles -exports.downloadTarballDirect = downloadTarballDirect -exports.TOP_LEVEL_PATH = TOP_LEVEL_PATH -exports.NODE_MODULES_PATH = NODE_MODULES_PATH diff --git a/packages/server/src/utilities/fileSystem/processor.js b/packages/server/src/utilities/fileSystem/processor.ts similarity index 69% rename from packages/server/src/utilities/fileSystem/processor.js rename to packages/server/src/utilities/fileSystem/processor.ts index 3778b50168..a32a7568f4 100644 --- a/packages/server/src/utilities/fileSystem/processor.js +++ b/packages/server/src/utilities/fileSystem/processor.ts @@ -1,21 +1,19 @@ -const jimp = require("jimp") +import jimp from "jimp" const FORMATS = { IMAGES: ["png", "jpg", "jpeg", "gif", "bmp", "tiff"], } -function processImage(file) { +function processImage(file: { path: string }) { // this will overwrite the temp file return jimp.read(file.path).then(img => { return img.resize(300, jimp.AUTO).write(file.path) }) } -async function process(file) { +export async function process(file: { extension: string; path: string }) { if (FORMATS.IMAGES.includes(file.extension.toLowerCase())) { await processImage(file) } return file } - -exports.process = process diff --git a/packages/server/src/utilities/plugins.js b/packages/server/src/utilities/plugins.ts similarity index 69% rename from packages/server/src/utilities/plugins.js rename to packages/server/src/utilities/plugins.ts index 139639e814..b3a9d50fff 100644 --- a/packages/server/src/utilities/plugins.js +++ b/packages/server/src/utilities/plugins.ts @@ -1,8 +1,9 @@ -const env = require("../environment") -const { plugins: ProPlugins } = require("@budibase/pro") -const { objectStore } = require("@budibase/backend-core") +import env from "../environment" +import { plugins as ProPlugins } from "@budibase/pro" +import { objectStore } from "@budibase/backend-core" +import { Plugin } from "@budibase/types" -exports.enrichPluginURLs = plugins => { +export function enrichPluginURLs(plugins: Plugin[]) { if (!plugins || !plugins.length) { return [] } diff --git a/packages/server/src/utilities/queue/inMemoryQueue.js b/packages/server/src/utilities/queue/inMemoryQueue.js deleted file mode 100644 index 79781f9283..0000000000 --- a/packages/server/src/utilities/queue/inMemoryQueue.js +++ /dev/null @@ -1,122 +0,0 @@ -let events = require("events") - -/** - * Bull works with a Job wrapper around all messages that contains a lot more information about - * the state of the message, this object constructor implements the same schema of Bull jobs - * for the sake of maintaining API consistency. - * @param {string} queue The name of the queue which the message will be carried on. - * @param {object} message The JSON message which will be passed back to the consumer. - * @returns {Object} A new job which can now be put onto the queue, this is mostly an - * internal structure so that an in memory queue can be easily swapped for a Bull queue. - */ -function newJob(queue, message) { - return { - timestamp: Date.now(), - queue: queue, - data: message, - } -} - -/** - * This is designed to replicate Bull (https://github.com/OptimalBits/bull) in memory as a sort of mock. - * It is relatively simple, using an event emitter internally to register when messages are available - * to the consumers - in can support many inputs and many consumers. - */ -class InMemoryQueue { - /** - * The constructor the queue, exactly the same as that of Bulls. - * @param {string} name The name of the queue which is being configured. - * @param {object|null} opts This is not used by the in memory queue as there is no real use - * case when in memory, but is the same API as Bull - */ - constructor(name, opts = null) { - this._name = name - this._opts = opts - this._messages = [] - this._emitter = new events.EventEmitter() - } - - /** - * Same callback API as Bull, each callback passed to this will consume messages as they are - * available. Please note this is a queue service, not a notification service, so each - * consumer will receive different messages. - * @param {function} func The callback function which will return a "Job", the same - * as the Bull API, within this job the property "data" contains the JSON message. Please - * note this is incredibly limited compared to Bull as in reality the Job would contain - * a lot more information about the queue and current status of Bull cluster. - */ - process(func) { - this._emitter.on("message", async () => { - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - }) - } - - // simply puts a message to the queue and emits to the queue for processing - /** - * Simple function to replicate the add message functionality of Bull, putting - * a new message on the queue. This then emits an event which will be used to - * return the message to a consumer (if one is attached). - * @param {object} msg A message to be transported over the queue, this should be - * a JSON message as this is required by Bull. - */ - // eslint-disable-next-line no-unused-vars - add(msg, repeat) { - if (typeof msg !== "object") { - throw "Queue only supports carrying JSON." - } - this._messages.push(newJob(this._name, msg)) - this._emitter.emit("message") - } - - /** - * replicating the close function from bull, which waits for jobs to finish. - */ - async close() { - return [] - } - - /** - * This removes a cron which has been implemented, this is part of Bull API. - * @param {string} cronJobId The cron which is to be removed. - */ - removeRepeatableByKey(cronJobId) { - // TODO: implement for testing - console.log(cronJobId) - } - - /** - * Implemented for tests - */ - getRepeatableJobs() { - return [] - } - - // eslint-disable-next-line no-unused-vars - removeJobs(pattern) { - // no-op - } - - /** - * Implemented for tests - */ - async clean() { - return [] - } - - async getJob() { - return {} - } - - on() { - // do nothing - } -} - -module.exports = InMemoryQueue diff --git a/packages/server/src/utilities/rowProcessor/index.ts b/packages/server/src/utilities/rowProcessor/index.ts index 9807fc7d61..e519c261c5 100644 --- a/packages/server/src/utilities/rowProcessor/index.ts +++ b/packages/server/src/utilities/rowProcessor/index.ts @@ -1,4 +1,4 @@ -import linkRows from "../../db/linkedRows" +import * as linkRows from "../../db/linkedRows" import { FieldTypes, AutoFieldSubTypes } from "../../constants" import { attachmentsRelativeURL } from "../index" import { processFormulas, fixAutoColumnSubType } from "./utils" @@ -6,7 +6,7 @@ import { ObjectStoreBuckets } from "../../constants" import { context, db as dbCore, objectStore } from "@budibase/backend-core" import { InternalTables } from "../../db/utils" import { TYPE_TRANSFORM_MAP } from "./map" -import { Row, User, Table } from "@budibase/types" +import { Row, Table, ContextUser } from "@budibase/types" const { cloneDeep } = require("lodash/fp") export * from "./utils" @@ -49,7 +49,7 @@ function getRemovedAttachmentKeys( * for automatic ID purposes. */ export function processAutoColumn( - user: User | null, + user: ContextUser | null, table: Table, row: Row, opts?: AutoColumnProcessingOpts @@ -132,10 +132,10 @@ export function coerce(row: any, type: any) { * @returns {object} the row which has been prepared to be written to the DB. */ export function inputProcessing( - user: User, + user: ContextUser, table: Table, row: Row, - opts: AutoColumnProcessingOpts + opts?: AutoColumnProcessingOpts ) { let clonedRow = cloneDeep(row) // need to copy the table so it can be differenced on way out @@ -189,10 +189,10 @@ export async function outputProcessing( wasArray = false } // attach any linked row information - let enriched = await linkRows.attachFullLinkedDocs(table, rows) + let enriched = await linkRows.attachFullLinkedDocs(table, rows as Row[]) // process formulas - enriched = processFormulas(table, enriched, { dynamic: true }) + enriched = processFormulas(table, enriched, { dynamic: true }) as Row[] // update the attachments URL depending on hosting for (let [property, column] of Object.entries(table.schema)) { diff --git a/packages/server/src/utilities/rowProcessor/map.js b/packages/server/src/utilities/rowProcessor/map.ts similarity index 94% rename from packages/server/src/utilities/rowProcessor/map.js rename to packages/server/src/utilities/rowProcessor/map.ts index 4e05868bfc..8911d62133 100644 --- a/packages/server/src/utilities/rowProcessor/map.js +++ b/packages/server/src/utilities/rowProcessor/map.ts @@ -1,9 +1,10 @@ -const { FieldTypes } = require("../../constants") +// @ts-nocheck +import { FieldTypes } from "../../constants" /** * A map of how we convert various properties in rows to each other based on the row type. */ -exports.TYPE_TRANSFORM_MAP = { +export const TYPE_TRANSFORM_MAP: any = { [FieldTypes.LINK]: { "": [], [null]: [], diff --git a/packages/server/src/utilities/scriptRunner.js b/packages/server/src/utilities/scriptRunner.ts similarity index 70% rename from packages/server/src/utilities/scriptRunner.js rename to packages/server/src/utilities/scriptRunner.ts index f65e0e1b7b..c5ed5010d0 100644 --- a/packages/server/src/utilities/scriptRunner.js +++ b/packages/server/src/utilities/scriptRunner.ts @@ -1,9 +1,13 @@ -const fetch = require("node-fetch") -const { VM, VMScript } = require("vm2") +import fetch from "node-fetch" +import { VM, VMScript } from "vm2" const JS_TIMEOUT_MS = 1000 class ScriptRunner { - constructor(script, context) { + vm: VM + results: { out: string } + script: VMScript + + constructor(script: string, context: any) { const code = `let fn = () => {\n${script}\n}; results.out = fn();` this.vm = new VM({ timeout: JS_TIMEOUT_MS, @@ -21,4 +25,4 @@ class ScriptRunner { } } -module.exports = ScriptRunner +export = ScriptRunner diff --git a/packages/server/src/utilities/statusCodes.js b/packages/server/src/utilities/statusCodes.ts similarity index 82% rename from packages/server/src/utilities/statusCodes.js rename to packages/server/src/utilities/statusCodes.ts index f374ff504e..4c4469b375 100644 --- a/packages/server/src/utilities/statusCodes.js +++ b/packages/server/src/utilities/statusCodes.ts @@ -1,4 +1,4 @@ -module.exports = { +export = { OK: 200, UNAUTHORIZED: 401, FORBIDDEN: 403, diff --git a/packages/server/src/utilities/usageQuota/usageQuoteReset.js b/packages/server/src/utilities/usageQuota/usageQuoteReset.ts similarity index 81% rename from packages/server/src/utilities/usageQuota/usageQuoteReset.js rename to packages/server/src/utilities/usageQuota/usageQuoteReset.ts index ff5a1aa00e..579fa130d0 100644 --- a/packages/server/src/utilities/usageQuota/usageQuoteReset.js +++ b/packages/server/src/utilities/usageQuota/usageQuoteReset.ts @@ -7,7 +7,7 @@ function getNewQuotaReset() { return Date.now() + 2592000000 } -function resetQuotasIfRequired(quota) { +function resetQuotasIfRequired(quota: { quotaReset: number; usageQuota: any }) { // Check if the quota needs reset if (Date.now() >= quota.quotaReset) { quota.quotaReset = getNewQuotaReset() diff --git a/packages/server/src/utilities/workerRequests.ts b/packages/server/src/utilities/workerRequests.ts index b5554bbe6f..d1fd467025 100644 --- a/packages/server/src/utilities/workerRequests.ts +++ b/packages/server/src/utilities/workerRequests.ts @@ -62,7 +62,7 @@ export async function sendSmtpEmail( contents: string, cc: string, bcc: string, - automation: Automation + automation: boolean ) { // tenant ID will be set in header const response = await fetch( diff --git a/packages/server/tsconfig.build.json b/packages/server/tsconfig.build.json index 1ccdbfe0da..2212a5e100 100644 --- a/packages/server/tsconfig.build.json +++ b/packages/server/tsconfig.build.json @@ -3,7 +3,6 @@ "target": "es6", "module": "commonjs", "lib": ["es2020"], - "allowJs": true, "strict": true, "noImplicitAny": true, "esModuleInterop": true, @@ -23,4 +22,4 @@ "**/*.spec.ts", "**/*.spec.js" ] -} \ No newline at end of file +} diff --git a/packages/types/src/documents/app/automation.ts b/packages/types/src/documents/app/automation.ts index d7450e4b0d..184fed629d 100644 --- a/packages/types/src/documents/app/automation.ts +++ b/packages/types/src/documents/app/automation.ts @@ -1,4 +1,5 @@ import { Document } from "../document" +import { EventEmitter } from "events" export enum AutomationTriggerStepId { ROW_SAVED = "ROW_SAVED", @@ -14,6 +15,7 @@ export enum AutomationActionStepId { CREATE_ROW = "CREATE_ROW", UPDATE_ROW = "UPDATE_ROW", DELETE_ROW = "DELETE_ROW", + EXECUTE_BASH = "EXECUTE_BASH", OUTGOING_WEBHOOK = "OUTGOING_WEBHOOK", EXECUTE_SCRIPT = "EXECUTE_SCRIPT", EXECUTE_QUERY = "EXECUTE_QUERY", @@ -39,8 +41,14 @@ export interface Automation extends Document { name: string } -export interface AutomationStep { - id: string +export interface AutomationStepSchema { + name: string + tagline: string + icon: string + description: string + type: string + internal?: boolean + deprecated?: boolean stepId: AutomationTriggerStepId | AutomationActionStepId inputs: { [key: string]: any @@ -52,13 +60,23 @@ export interface AutomationStep { outputs: { [key: string]: any } + required?: string[] } } -export interface AutomationTrigger extends AutomationStep { +export interface AutomationStep extends AutomationStepSchema { + id: string +} + +export interface AutomationTriggerSchema extends AutomationStepSchema { + event?: string cronJobId?: string } +export interface AutomationTrigger extends AutomationTriggerSchema { + id: string +} + export enum AutomationStatus { SUCCESS = "success", ERROR = "error", @@ -91,3 +109,11 @@ export interface AutomationLogPage { hasNextPage: boolean nextPage?: string } + +export type AutomationStepInput = { + inputs: Record + context: Record + emitter: EventEmitter + appId: string + apiKey?: string +} diff --git a/packages/types/src/documents/app/index.ts b/packages/types/src/documents/app/index.ts index 25c150f9da..8544617c30 100644 --- a/packages/types/src/documents/app/index.ts +++ b/packages/types/src/documents/app/index.ts @@ -12,3 +12,4 @@ export * from "./row" export * from "./user" export * from "./backup" export * from "./webhook" +export * from "./links" diff --git a/packages/types/src/documents/app/links.ts b/packages/types/src/documents/app/links.ts new file mode 100644 index 0000000000..d6b2adddf8 --- /dev/null +++ b/packages/types/src/documents/app/links.ts @@ -0,0 +1,21 @@ +import { Document } from "../document" + +export interface LinkDocument extends Document { + type: string + doc1: { + rowId: string + fieldName: string + tableId: string + } + doc2: { + rowId: string + fieldName: string + tableId: string + } +} + +export interface LinkDocumentValue { + id: string + thisId: string + fieldName: string +} diff --git a/packages/types/src/documents/app/table.ts b/packages/types/src/documents/app/table.ts index 6b9a46d59f..23d77c5ad5 100644 --- a/packages/types/src/documents/app/table.ts +++ b/packages/types/src/documents/app/table.ts @@ -8,10 +8,12 @@ export interface FieldSchema { externalType?: string fieldName?: string name: string + sortable?: boolean tableId?: string relationshipType?: string through?: string foreignKey?: string + icon?: string autocolumn?: boolean subtype?: string throughFrom?: string @@ -22,6 +24,7 @@ export interface FieldSchema { ignoreTimezones?: boolean timeOnly?: boolean lastID?: number + useRichText?: boolean | null meta?: { toTable: string toKey: string @@ -31,10 +34,22 @@ export interface FieldSchema { email?: boolean inclusion?: string[] length?: { - minimum?: string | number - maximum?: string | number + minimum?: string | number | null + maximum?: string | number | null + } + numericality?: { + greaterThanOrEqualTo: string | null + lessThanOrEqualTo: string | null + } + presence?: + | boolean + | { + allowEmpty?: boolean + } + datetime?: { + latest: string + earliest: string } - presence?: boolean } } diff --git a/packages/types/src/documents/app/view.ts b/packages/types/src/documents/app/view.ts index 04566ee100..de0dfea7f5 100644 --- a/packages/types/src/documents/app/view.ts +++ b/packages/types/src/documents/app/view.ts @@ -5,6 +5,9 @@ export interface View { filters: ViewFilter[] schema: ViewSchema calculation?: ViewCalculation + map?: string + reduce?: any + meta?: Record } export type ViewSchema = ViewCountOrSumSchema | ViewStatisticsSchema @@ -30,7 +33,7 @@ export interface ViewStatisticsSchema { } export interface ViewFilter { - value: any + value?: any condition: string key: string conjunction?: string diff --git a/packages/types/src/documents/global/config.ts b/packages/types/src/documents/global/config.ts index f62f0a12a8..65a8b15589 100644 --- a/packages/types/src/documents/global/config.ts +++ b/packages/types/src/documents/global/config.ts @@ -31,17 +31,29 @@ export interface GoogleConfig extends Config { } } +export interface OIDCConfiguration { + issuer: string + authorizationURL: string + tokenURL: string + userInfoURL: string + clientID: string + clientSecret: string + callbackURL: string +} + +export interface OIDCInnerCfg { + configUrl: string + clientID: string + clientSecret: string + logo: string + name: string + uuid: string + activated: boolean +} + export interface OIDCConfig extends Config { config: { - configs: { - configUrl: string - clientID: string - clientSecret: string - logo: string - name: string - uuid: string - activated: boolean - }[] + configs: OIDCInnerCfg[] } } @@ -62,7 +74,9 @@ export const isOIDCConfig = (config: Config): config is OIDCConfig => export enum ConfigType { SETTINGS = "settings", + ACCOUNT = "account", SMTP = "smtp", GOOGLE = "google", OIDC = "oidc", + OIDC_LOGOS = "logos_oidc", } diff --git a/packages/types/src/documents/global/index.ts b/packages/types/src/documents/global/index.ts index 9f779d4937..40a30ee477 100644 --- a/packages/types/src/documents/global/index.ts +++ b/packages/types/src/documents/global/index.ts @@ -4,3 +4,4 @@ export * from "./userGroup" export * from "./plugin" export * from "./quotas" export * from "./schedule" +export * from "./templates" diff --git a/packages/types/src/documents/global/templates.ts b/packages/types/src/documents/global/templates.ts new file mode 100644 index 0000000000..df620fc7b0 --- /dev/null +++ b/packages/types/src/documents/global/templates.ts @@ -0,0 +1,9 @@ +import { Document } from "../document" + +export interface Template extends Document { + ownerId?: string + name?: string + contents: string + purpose: string + type?: string +} diff --git a/packages/types/src/documents/global/user.ts b/packages/types/src/documents/global/user.ts index 9a1fb472f0..1778d6e7c6 100644 --- a/packages/types/src/documents/global/user.ts +++ b/packages/types/src/documents/global/user.ts @@ -1,8 +1,37 @@ import { Document } from "../document" -export interface User extends Document { +export interface SSOProfile { + id: string + name?: { + givenName?: string + familyName?: string + } + _json: { + email: string + picture: string + } + provider?: string +} + +export interface ThirdPartyUser extends Document { + thirdPartyProfile?: SSOProfile["_json"] + firstName?: string + lastName?: string + pictureUrl?: string + profile?: SSOProfile + oauth2?: any + provider?: string + providerType?: string + email: string + userId?: string + forceResetPassword?: boolean +} + +export interface User extends ThirdPartyUser { tenantId: string email: string + userId?: string + forceResetPassword?: boolean roles: UserRoles builder?: { global: boolean @@ -10,14 +39,14 @@ export interface User extends Document { admin?: { global: boolean } - providerType?: string password?: string status?: string createdAt?: number // override the default createdAt behaviour - users sdk historically set this to Date.now() userGroups?: string[] - forceResetPassword?: boolean dayPassRecordedAt?: string - userId?: string + account?: { + authType: string + } } export interface UserRoles { diff --git a/packages/types/src/sdk/context.ts b/packages/types/src/sdk/context.ts index 940ac5a0c3..b3403df8af 100644 --- a/packages/types/src/sdk/context.ts +++ b/packages/types/src/sdk/context.ts @@ -1,5 +1,5 @@ import { User, Account } from "../documents" -import { IdentityType } from "./events/identification" +import { IdentityType } from "./events" export interface BaseContext { _id: string diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index 200b25c501..84daed0f90 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -57,6 +57,7 @@ export type DatabaseQueryOpts = { descending?: boolean key?: string keys?: string[] + group?: boolean } export const isDocument = (doc: any): doc is Document => { diff --git a/packages/types/tsconfig.build.json b/packages/types/tsconfig.build.json index 9c92cdbd05..86f128c056 100644 --- a/packages/types/tsconfig.build.json +++ b/packages/types/tsconfig.build.json @@ -3,7 +3,6 @@ "target": "es6", "module": "commonjs", "lib": ["es2020"], - "allowJs": true, "strict": true, "noImplicitAny": true, "esModuleInterop": true, diff --git a/packages/worker/src/api/controllers/global/auth.ts b/packages/worker/src/api/controllers/global/auth.ts index 8d36024634..b76200c1f7 100644 --- a/packages/worker/src/api/controllers/global/auth.ts +++ b/packages/worker/src/api/controllers/global/auth.ts @@ -1,26 +1,30 @@ -import core from "@budibase/backend-core" import { - events, - users as usersCore, + auth, + constants, context, + db as dbCore, + events, tenancy, + users as usersCore, + utils, } from "@budibase/backend-core" -import { Config, EmailTemplatePurpose } from "../../../constants" -import { sendEmail, isEmailConfigured } from "../../../utilities/email" +import { EmailTemplatePurpose } from "../../../constants" +import { isEmailConfigured, sendEmail } from "../../../utilities/email" import { checkResetPasswordCode } from "../../../utilities/redis" import env from "../../../environment" import sdk from "../../../sdk" -import { User } from "@budibase/types" -const { setCookie, getCookie, clearCookie, hash, platformLogout } = core.utils -const { Cookie, Header } = core.constants -const { passport, ssoCallbackUrl, google, oidc } = core.auth +import { Config, ConfigType, User } from "@budibase/types" -export const googleCallbackUrl = async (config: any) => { - return ssoCallbackUrl(tenancy.getGlobalDB(), config, "google") +const { setCookie, getCookie, clearCookie, hash, platformLogout } = utils +const { Cookie, Header } = constants +const { passport, ssoCallbackUrl, google, oidc } = auth + +export async function googleCallbackUrl(config?: { callbackURL?: string }) { + return ssoCallbackUrl(tenancy.getGlobalDB(), config, ConfigType.GOOGLE) } -export const oidcCallbackUrl = async (config: any) => { - return ssoCallbackUrl(tenancy.getGlobalDB(), config, "oidc") +export async function oidcCallbackUrl(config?: { callbackURL?: string }) { + return ssoCallbackUrl(tenancy.getGlobalDB(), config, ConfigType.OIDC) } async function authInternal(ctx: any, user: any, err = null, info = null) { @@ -135,7 +139,7 @@ export const logout = async (ctx: any) => { export const datasourcePreAuth = async (ctx: any, next: any) => { const provider = ctx.params.provider - const middleware = require(`@budibase/backend-core/middleware`) + const { middleware } = require(`@budibase/backend-core`) const handler = middleware.datasource[provider] setCookie( @@ -154,7 +158,7 @@ export const datasourcePreAuth = async (ctx: any, next: any) => { export const datasourceAuth = async (ctx: any, next: any) => { const authStateCookie = getCookie(ctx, Cookie.DatasourceAuth) const provider = authStateCookie.provider - const middleware = require(`@budibase/backend-core/middleware`) + const { middleware } = require(`@budibase/backend-core`) const handler = middleware.datasource[provider] return handler.postAuth(passport, ctx, next) } @@ -166,11 +170,11 @@ export const datasourceAuth = async (ctx: any, next: any) => { export const googlePreAuth = async (ctx: any, next: any) => { const db = tenancy.getGlobalDB() - const config = await core.db.getScopedConfig(db, { - type: Config.GOOGLE, + const config = await dbCore.getScopedConfig(db, { + type: ConfigType.GOOGLE, workspace: ctx.query.workspace, }) - let callbackUrl = await exports.googleCallbackUrl(config) + let callbackUrl = await googleCallbackUrl(config) const strategy = await google.strategyFactory( config, callbackUrl, @@ -187,11 +191,11 @@ export const googlePreAuth = async (ctx: any, next: any) => { export const googleAuth = async (ctx: any, next: any) => { const db = tenancy.getGlobalDB() - const config = await core.db.getScopedConfig(db, { - type: Config.GOOGLE, + const config = await dbCore.getScopedConfig(db, { + type: ConfigType.GOOGLE, workspace: ctx.query.workspace, }) - const callbackUrl = await exports.googleCallbackUrl(config) + const callbackUrl = await googleCallbackUrl(config) const strategy = await google.strategyFactory( config, callbackUrl, @@ -213,13 +217,13 @@ export const googleAuth = async (ctx: any, next: any) => { export const oidcStrategyFactory = async (ctx: any, configId: any) => { const db = tenancy.getGlobalDB() - const config = await core.db.getScopedConfig(db, { - type: Config.OIDC, + const config = await dbCore.getScopedConfig(db, { + type: ConfigType.OIDC, group: ctx.query.group, }) const chosenConfig = config.configs.filter((c: any) => c.uuid === configId)[0] - let callbackUrl = await exports.oidcCallbackUrl(chosenConfig) + let callbackUrl = await oidcCallbackUrl(chosenConfig) //Remote Config const enrichedConfig = await oidc.fetchStrategyConfig( @@ -240,8 +244,8 @@ export const oidcPreAuth = async (ctx: any, next: any) => { setCookie(ctx, configId, Cookie.OIDC_CONFIG) const db = tenancy.getGlobalDB() - const config = await core.db.getScopedConfig(db, { - type: Config.OIDC, + const config = await dbCore.getScopedConfig(db, { + type: ConfigType.OIDC, group: ctx.query.group, }) diff --git a/packages/worker/src/api/controllers/global/configs.js b/packages/worker/src/api/controllers/global/configs.ts similarity index 57% rename from packages/worker/src/api/controllers/global/configs.js rename to packages/worker/src/api/controllers/global/configs.ts index cb8dc48c25..1c4e57f0e1 100644 --- a/packages/worker/src/api/controllers/global/configs.js +++ b/packages/worker/src/api/controllers/global/configs.ts @@ -1,28 +1,26 @@ -const { - generateConfigID, - getConfigParams, - getScopedFullConfig, - getAllApps, -} = require("@budibase/backend-core/db") -const { Config } = require("../../../constants") -const email = require("../../../utilities/email") -const { - upload, - ObjectStoreBuckets, -} = require("@budibase/backend-core/objectStore") -const { getGlobalDB, getTenantId } = require("@budibase/backend-core/tenancy") -const env = require("../../../environment") -const { googleCallbackUrl, oidcCallbackUrl } = require("./auth") -const { - withCache, - CacheKeys, - bustCache, +import * as email from "../../../utilities/email" +import env from "../../../environment" +import { googleCallbackUrl, oidcCallbackUrl } from "./auth" +import { + events, cache, -} = require("@budibase/backend-core/cache") -const { events } = require("@budibase/backend-core") -const { checkAnyUserExists } = require("../../../utilities/users") + objectStore, + tenancy, + db as dbCore, +} from "@budibase/backend-core" +import { checkAnyUserExists } from "../../../utilities/users" +import { + Database, + Config as ConfigDoc, + ConfigType, + SSOType, + GoogleConfig, + OIDCConfig, + SettingsConfig, + BBContext, +} from "@budibase/types" -const getEventFns = async (db, config) => { +const getEventFns = async (db: Database, config: ConfigDoc) => { const fns = [] const type = config.type @@ -31,41 +29,45 @@ const getEventFns = async (db, config) => { existing = await db.get(config._id) } + const ssoType = type as SSOType if (!existing) { switch (config.type) { - case Config.SMTP: { + case ConfigType.SMTP: { fns.push(events.email.SMTPCreated) break } - case Config.GOOGLE: { - fns.push(() => events.auth.SSOCreated(type)) - if (config.config.activated) { - fns.push(() => events.auth.SSOActivated(type)) + case ConfigType.GOOGLE: { + const googleCfg = config as GoogleConfig + fns.push(() => events.auth.SSOCreated(ssoType)) + if (googleCfg.config.activated) { + fns.push(() => events.auth.SSOActivated(ssoType)) } break } - case Config.OIDC: { - fns.push(() => events.auth.SSOCreated(type)) - if (config.config.configs[0].activated) { - fns.push(() => events.auth.SSOActivated(type)) + case ConfigType.OIDC: { + const oidcCfg = config as OIDCConfig + fns.push(() => events.auth.SSOCreated(ssoType)) + if (oidcCfg.config.configs[0].activated) { + fns.push(() => events.auth.SSOActivated(ssoType)) } break } - case Config.SETTINGS: { + case ConfigType.SETTINGS: { // company - const company = config.config.company + const settingsCfg = config as SettingsConfig + const company = settingsCfg.config.company if (company && company !== "Budibase") { fns.push(events.org.nameUpdated) } // logo - const logoUrl = config.config.logoUrl + const logoUrl = settingsCfg.config.logoUrl if (logoUrl) { fns.push(events.org.logoUpdated) } // platform url - const platformUrl = config.config.platformUrl + const platformUrl = settingsCfg.config.platformUrl if ( platformUrl && platformUrl !== "http://localhost:10000" && @@ -78,52 +80,55 @@ const getEventFns = async (db, config) => { } } else { switch (config.type) { - case Config.SMTP: { + case ConfigType.SMTP: { fns.push(events.email.SMTPUpdated) break } - case Config.GOOGLE: { - fns.push(() => events.auth.SSOUpdated(type)) - if (!existing.config.activated && config.config.activated) { - fns.push(() => events.auth.SSOActivated(type)) - } else if (existing.config.activated && !config.config.activated) { - fns.push(() => events.auth.SSODeactivated(type)) + case ConfigType.GOOGLE: { + const googleCfg = config as GoogleConfig + fns.push(() => events.auth.SSOUpdated(ssoType)) + if (!existing.config.activated && googleCfg.config.activated) { + fns.push(() => events.auth.SSOActivated(ssoType)) + } else if (existing.config.activated && !googleCfg.config.activated) { + fns.push(() => events.auth.SSODeactivated(ssoType)) } break } - case Config.OIDC: { - fns.push(() => events.auth.SSOUpdated(type)) + case ConfigType.OIDC: { + const oidcCfg = config as OIDCConfig + fns.push(() => events.auth.SSOUpdated(ssoType)) if ( !existing.config.configs[0].activated && - config.config.configs[0].activated + oidcCfg.config.configs[0].activated ) { - fns.push(() => events.auth.SSOActivated(type)) + fns.push(() => events.auth.SSOActivated(ssoType)) } else if ( existing.config.configs[0].activated && - !config.config.configs[0].activated + !oidcCfg.config.configs[0].activated ) { - fns.push(() => events.auth.SSODeactivated(type)) + fns.push(() => events.auth.SSODeactivated(ssoType)) } break } - case Config.SETTINGS: { + case ConfigType.SETTINGS: { // company + const settingsCfg = config as SettingsConfig const existingCompany = existing.config.company - const company = config.config.company + const company = settingsCfg.config.company if (company && company !== "Budibase" && existingCompany !== company) { fns.push(events.org.nameUpdated) } // logo const existingLogoUrl = existing.config.logoUrl - const logoUrl = config.config.logoUrl + const logoUrl = settingsCfg.config.logoUrl if (logoUrl && existingLogoUrl !== logoUrl) { fns.push(events.org.logoUpdated) } // platform url const existingPlatformUrl = existing.config.platformUrl - const platformUrl = config.config.platformUrl + const platformUrl = settingsCfg.config.platformUrl if ( platformUrl && platformUrl !== "http://localhost:10000" && @@ -140,13 +145,13 @@ const getEventFns = async (db, config) => { return fns } -exports.save = async function (ctx) { - const db = getGlobalDB() +export async function save(ctx: BBContext) { + const db = tenancy.getGlobalDB() const { type, workspace, user, config } = ctx.request.body let eventFns = await getEventFns(db, ctx.request.body) // Config does not exist yet if (!ctx.request.body._id) { - ctx.request.body._id = generateConfigID({ + ctx.request.body._id = dbCore.generateConfigID({ type, workspace, user, @@ -155,18 +160,18 @@ exports.save = async function (ctx) { try { // verify the configuration switch (type) { - case Config.SMTP: + case ConfigType.SMTP: await email.verifyConfig(config) break } - } catch (err) { + } catch (err: any) { ctx.throw(400, err) } try { const response = await db.put(ctx.request.body) - await bustCache(CacheKeys.CHECKLIST) - await bustCache(CacheKeys.ANALYTICS_ENABLED) + await cache.bustCache(cache.CacheKey.CHECKLIST) + await cache.bustCache(cache.CacheKey.ANALYTICS_ENABLED) for (const fn of eventFns) { await fn() @@ -177,15 +182,15 @@ exports.save = async function (ctx) { _id: response.id, _rev: response.rev, } - } catch (err) { + } catch (err: any) { ctx.throw(400, err) } } -exports.fetch = async function (ctx) { - const db = getGlobalDB() +export async function fetch(ctx: BBContext) { + const db = tenancy.getGlobalDB() const response = await db.allDocs( - getConfigParams( + dbCore.getConfigParams( { type: ctx.params.type }, { include_docs: true, @@ -199,23 +204,23 @@ exports.fetch = async function (ctx) { * Gets the most granular config for a particular configuration type. * The hierarchy is type -> workspace -> user. */ -exports.find = async function (ctx) { - const db = getGlobalDB() +export async function find(ctx: BBContext) { + const db = tenancy.getGlobalDB() const { userId, workspaceId } = ctx.query if (workspaceId && userId) { - const workspace = await db.get(workspaceId) + const workspace = await db.get(workspaceId as string) const userInWorkspace = workspace.users.some( - workspaceUser => workspaceUser === userId + (workspaceUser: any) => workspaceUser === userId ) - if (!ctx.user.admin && !userInWorkspace) { + if (!ctx.user!.admin && !userInWorkspace) { ctx.throw(400, `User is not in specified workspace: ${workspace}.`) } } try { // Find the config with the most granular scope based on context - const scopedConfig = await getScopedFullConfig(db, { + const scopedConfig = await dbCore.getScopedFullConfig(db, { type: ctx.params.type, user: userId, workspace: workspaceId, @@ -227,48 +232,48 @@ exports.find = async function (ctx) { // don't throw an error, there simply is nothing to return ctx.body = {} } - } catch (err) { - ctx.throw(err.status, err) + } catch (err: any) { + ctx.throw(err?.status || 400, err) } } -exports.publicOidc = async function (ctx) { - const db = getGlobalDB() +export async function publicOidc(ctx: BBContext) { + const db = tenancy.getGlobalDB() try { // Find the config with the most granular scope based on context - const oidcConfig = await getScopedFullConfig(db, { - type: Config.OIDC, + const oidcConfig = await dbCore.getScopedFullConfig(db, { + type: ConfigType.OIDC, }) if (!oidcConfig) { ctx.body = {} } else { - ctx.body = oidcConfig.config.configs.map(config => ({ + ctx.body = oidcConfig.config.configs.map((config: any) => ({ logo: config.logo, name: config.name, uuid: config.uuid, })) } - } catch (err) { + } catch (err: any) { ctx.throw(err.status, err) } } -exports.publicSettings = async function (ctx) { - const db = getGlobalDB() +export async function publicSettings(ctx: BBContext) { + const db = tenancy.getGlobalDB() try { // Find the config with the most granular scope based on context - const publicConfig = await getScopedFullConfig(db, { - type: Config.SETTINGS, + const publicConfig = await dbCore.getScopedFullConfig(db, { + type: ConfigType.SETTINGS, }) - const googleConfig = await getScopedFullConfig(db, { - type: Config.GOOGLE, + const googleConfig = await dbCore.getScopedFullConfig(db, { + type: ConfigType.GOOGLE, }) - const oidcConfig = await getScopedFullConfig(db, { - type: Config.OIDC, + const oidcConfig = await dbCore.getScopedFullConfig(db, { + type: ConfigType.OIDC, }) let config @@ -301,12 +306,12 @@ exports.publicSettings = async function (ctx) { } ctx.body = config - } catch (err) { + } catch (err: any) { ctx.throw(err.status, err) } } -exports.upload = async function (ctx) { +export async function upload(ctx: BBContext) { if (ctx.request.files == null || ctx.request.files.file.length > 1) { ctx.throw(400, "One file must be uploaded.") } @@ -315,19 +320,19 @@ exports.upload = async function (ctx) { let bucket if (env.SELF_HOSTED) { - bucket = ObjectStoreBuckets.GLOBAL + bucket = objectStore.ObjectStoreBuckets.GLOBAL } else { - bucket = ObjectStoreBuckets.GLOBAL_CLOUD + bucket = objectStore.ObjectStoreBuckets.GLOBAL_CLOUD } let key if (env.MULTI_TENANCY) { - key = `${getTenantId()}/${type}/${name}` + key = `${tenancy.getTenantId()}/${type}/${name}` } else { key = `${type}/${name}` } - await upload({ + await objectStore.upload({ bucket, filename: key, path: file.path, @@ -336,11 +341,11 @@ exports.upload = async function (ctx) { // add to configuration structure // TODO: right now this only does a global level - const db = getGlobalDB() - let cfgStructure = await getScopedFullConfig(db, { type }) + const db = tenancy.getGlobalDB() + let cfgStructure = await dbCore.getScopedFullConfig(db, { type }) if (!cfgStructure) { cfgStructure = { - _id: generateConfigID({ type }), + _id: dbCore.generateConfigID({ type }), config: {}, } } @@ -361,49 +366,49 @@ exports.upload = async function (ctx) { } } -exports.destroy = async function (ctx) { - const db = getGlobalDB() +export async function destroy(ctx: BBContext) { + const db = tenancy.getGlobalDB() const { id, rev } = ctx.params try { await db.remove(id, rev) - await cache.delete(CacheKeys.CHECKLIST) + await cache.destroy(cache.CacheKey.CHECKLIST) ctx.body = { message: "Config deleted successfully" } - } catch (err) { + } catch (err: any) { ctx.throw(err.status, err) } } -exports.configChecklist = async function (ctx) { - const db = getGlobalDB() - const tenantId = getTenantId() +export async function configChecklist(ctx: BBContext) { + const db = tenancy.getGlobalDB() + const tenantId = tenancy.getTenantId() try { - ctx.body = await withCache( - CacheKeys.CHECKLIST, + ctx.body = await cache.withCache( + cache.CacheKey.CHECKLIST, env.CHECKLIST_CACHE_TTL, async () => { let apps = [] if (!env.MULTI_TENANCY || tenantId) { // Apps exist - apps = await getAllApps({ idsOnly: true, efficient: true }) + apps = await dbCore.getAllApps({ idsOnly: true, efficient: true }) } // They have set up SMTP - const smtpConfig = await getScopedFullConfig(db, { - type: Config.SMTP, + const smtpConfig = await dbCore.getScopedFullConfig(db, { + type: ConfigType.SMTP, }) // They have set up Google Auth - const googleConfig = await getScopedFullConfig(db, { - type: Config.GOOGLE, + const googleConfig = await dbCore.getScopedFullConfig(db, { + type: ConfigType.GOOGLE, }) // They have set up OIDC - const oidcConfig = await getScopedFullConfig(db, { - type: Config.OIDC, + const oidcConfig = await dbCore.getScopedFullConfig(db, { + type: ConfigType.OIDC, }) - // They have set up an global user + // They have set up a global user const userExists = await checkAnyUserExists() return { apps: { @@ -429,7 +434,7 @@ exports.configChecklist = async function (ctx) { } } ) - } catch (err) { + } catch (err: any) { ctx.throw(err.status, err) } } diff --git a/packages/worker/src/api/controllers/global/email.js b/packages/worker/src/api/controllers/global/email.ts similarity index 56% rename from packages/worker/src/api/controllers/global/email.js rename to packages/worker/src/api/controllers/global/email.ts index 85e39be0da..f5acad9a66 100644 --- a/packages/worker/src/api/controllers/global/email.js +++ b/packages/worker/src/api/controllers/global/email.ts @@ -1,7 +1,8 @@ -const { sendEmail } = require("../../../utilities/email") -const { getGlobalDB } = require("@budibase/backend-core/tenancy") +import { sendEmail as sendEmailFn } from "../../../utilities/email" +import { tenancy } from "@budibase/backend-core" +import { BBContext } from "@budibase/types" -exports.sendEmail = async ctx => { +export async function sendEmail(ctx: BBContext) { let { workspaceId, email, @@ -16,10 +17,10 @@ exports.sendEmail = async ctx => { } = ctx.request.body let user if (userId) { - const db = getGlobalDB() + const db = tenancy.getGlobalDB() user = await db.get(userId) } - const response = await sendEmail(email, purpose, { + const response = await sendEmailFn(email, purpose, { workspaceId, user, contents, diff --git a/packages/worker/src/api/controllers/global/roles.js b/packages/worker/src/api/controllers/global/roles.js deleted file mode 100644 index d63f1a62b5..0000000000 --- a/packages/worker/src/api/controllers/global/roles.js +++ /dev/null @@ -1,68 +0,0 @@ -const { getAllRoles } = require("@budibase/backend-core/roles") -const { - getAllApps, - getProdAppID, - getDevAppID, - DocumentType, -} = require("@budibase/backend-core/db") -const { doInAppContext, getAppDB } = require("@budibase/backend-core/context") -const { user: userCache } = require("@budibase/backend-core/cache") -const { getGlobalDB } = require("@budibase/backend-core/tenancy") -const { allUsers } = require("../../../sdk/users") - -exports.fetch = async ctx => { - const tenantId = ctx.user.tenantId - // always use the dev apps as they'll be most up to date (true) - const apps = await getAllApps({ tenantId, all: true }) - const promises = [] - for (let app of apps) { - // use dev app IDs - promises.push(getAllRoles(app.appId)) - } - const roles = await Promise.all(promises) - const response = {} - for (let app of apps) { - const deployedAppId = getProdAppID(app.appId) - response[deployedAppId] = { - roles: roles.shift(), - name: app.name, - version: app.version, - url: app.url, - } - } - ctx.body = response -} - -exports.find = async ctx => { - const appId = ctx.params.appId - await doInAppContext(getDevAppID(appId), async () => { - const db = getAppDB() - const app = await db.get(DocumentType.APP_METADATA) - ctx.body = { - roles: await getAllRoles(), - name: app.name, - version: app.version, - url: app.url, - } - }) -} - -exports.removeAppRole = async ctx => { - const { appId } = ctx.params - const db = getGlobalDB() - const users = await allUsers(ctx) - const bulk = [] - const cacheInvalidations = [] - for (let user of users) { - if (user.roles[appId]) { - cacheInvalidations.push(userCache.invalidateUser(user._id)) - delete user.roles[appId] - bulk.push(user) - } - } - await db.bulkDocs(bulk) - await Promise.all(cacheInvalidations) - ctx.body = { - message: "App role removed from all users", - } -} diff --git a/packages/worker/src/api/controllers/global/roles.ts b/packages/worker/src/api/controllers/global/roles.ts new file mode 100644 index 0000000000..e0cb52e1b0 --- /dev/null +++ b/packages/worker/src/api/controllers/global/roles.ts @@ -0,0 +1,66 @@ +import { + db as dbCore, + roles, + context, + cache, + tenancy, +} from "@budibase/backend-core" +import { BBContext, App } from "@budibase/types" +import { allUsers } from "../../../sdk/users" + +export async function fetch(ctx: BBContext) { + const tenantId = ctx.user!.tenantId + // always use the dev apps as they'll be most up to date (true) + const apps = (await dbCore.getAllApps({ tenantId, all: true })) as App[] + const promises = [] + for (let app of apps) { + // use dev app IDs + promises.push(roles.getAllRoles(app.appId)) + } + const roleList = await Promise.all(promises) + const response: any = {} + for (let app of apps) { + const deployedAppId = dbCore.getProdAppID(app.appId) + response[deployedAppId] = { + roles: roleList.shift(), + name: app.name, + version: app.version, + url: app.url, + } + } + ctx.body = response +} + +export async function find(ctx: BBContext) { + const appId = ctx.params.appId + await context.doInAppContext(dbCore.getDevAppID(appId), async () => { + const db = context.getAppDB() + const app = await db.get(dbCore.DocumentType.APP_METADATA) + ctx.body = { + roles: await roles.getAllRoles(), + name: app.name, + version: app.version, + url: app.url, + } + }) +} + +export async function removeAppRole(ctx: BBContext) { + const { appId } = ctx.params + const db = tenancy.getGlobalDB() + const users = await allUsers() + const bulk = [] + const cacheInvalidations = [] + for (let user of users) { + if (user.roles[appId]) { + cacheInvalidations.push(cache.user.invalidateUser(user._id)) + delete user.roles[appId] + bulk.push(user) + } + } + await db.bulkDocs(bulk) + await Promise.all(cacheInvalidations) + ctx.body = { + message: "App role removed from all users", + } +} diff --git a/packages/worker/src/api/controllers/global/users.ts b/packages/worker/src/api/controllers/global/users.ts index 7edb1b710a..e913ccee88 100644 --- a/packages/worker/src/api/controllers/global/users.ts +++ b/packages/worker/src/api/controllers/global/users.ts @@ -104,7 +104,7 @@ export const adminUser = async (ctx: any) => { try { // always bust checklist beforehand, if an error occurs but can proceed, don't get // stuck in a cycle - await cache.bustCache(cache.CacheKeys.CHECKLIST) + await cache.bustCache(cache.CacheKey.CHECKLIST) const finalUser = await sdk.users.save(user, { hashPassword, requirePassword, diff --git a/packages/worker/src/api/controllers/global/workspaces.js b/packages/worker/src/api/controllers/global/workspaces.ts similarity index 52% rename from packages/worker/src/api/controllers/global/workspaces.js rename to packages/worker/src/api/controllers/global/workspaces.ts index d5d1037d9e..cf6486fec9 100644 --- a/packages/worker/src/api/controllers/global/workspaces.js +++ b/packages/worker/src/api/controllers/global/workspaces.ts @@ -1,16 +1,13 @@ -const { - getWorkspaceParams, - generateWorkspaceID, -} = require("@budibase/backend-core/db") -const { getGlobalDB } = require("@budibase/backend-core/tenancy") +import { tenancy, db as dbCore } from "@budibase/backend-core" +import { BBContext } from "@budibase/types" -exports.save = async function (ctx) { - const db = getGlobalDB() +export async function save(ctx: BBContext) { + const db = tenancy.getGlobalDB() const workspaceDoc = ctx.request.body // workspace does not exist yet if (!workspaceDoc._id) { - workspaceDoc._id = generateWorkspaceID() + workspaceDoc._id = dbCore.generateWorkspaceID() } try { @@ -19,38 +16,38 @@ exports.save = async function (ctx) { _id: response.id, _rev: response.rev, } - } catch (err) { + } catch (err: any) { ctx.throw(err.status, err) } } -exports.fetch = async function (ctx) { - const db = getGlobalDB() +export async function fetch(ctx: BBContext) { + const db = tenancy.getGlobalDB() const response = await db.allDocs( - getWorkspaceParams(undefined, { + dbCore.getWorkspaceParams(undefined, { include_docs: true, }) ) ctx.body = response.rows.map(row => row.doc) } -exports.find = async function (ctx) { - const db = getGlobalDB() +export async function find(ctx: BBContext) { + const db = tenancy.getGlobalDB() try { ctx.body = await db.get(ctx.params.id) - } catch (err) { + } catch (err: any) { ctx.throw(err.status, err) } } -exports.destroy = async function (ctx) { - const db = getGlobalDB() +export async function destroy(ctx: BBContext) { + const db = tenancy.getGlobalDB() const { id, rev } = ctx.params try { await db.remove(id, rev) ctx.body = { message: "Workspace deleted successfully" } - } catch (err) { + } catch (err: any) { ctx.throw(err.status, err) } } diff --git a/packages/worker/src/api/controllers/system/restore.ts b/packages/worker/src/api/controllers/system/restore.ts index 96a7c61cb4..def6d58a5d 100644 --- a/packages/worker/src/api/controllers/system/restore.ts +++ b/packages/worker/src/api/controllers/system/restore.ts @@ -6,7 +6,7 @@ export async function systemRestored(ctx: BBContext) { if (!env.SELF_HOSTED) { ctx.throw(405, "This operation is not allowed in cloud.") } - await cache.bustCache(cache.CacheKeys.CHECKLIST) + await cache.bustCache(cache.CacheKey.CHECKLIST) ctx.body = { message: "System prepared after restore.", } diff --git a/packages/worker/src/api/index.ts b/packages/worker/src/api/index.ts index 9a32792691..e37c6c2d94 100644 --- a/packages/worker/src/api/index.ts +++ b/packages/worker/src/api/index.ts @@ -104,7 +104,7 @@ const NO_TENANCY_ENDPOINTS = [ // add them all to be safe const NO_CSRF_ENDPOINTS = [...PUBLIC_ENDPOINTS] -const router = new Router() +const router: Router = new Router() router .use( compress({ @@ -163,4 +163,4 @@ for (let route of routes) { router.use(route.allowedMethods()) } -module.exports = router +export default router diff --git a/packages/worker/src/api/routes/global/auth.js b/packages/worker/src/api/routes/global/auth.ts similarity index 85% rename from packages/worker/src/api/routes/global/auth.js rename to packages/worker/src/api/routes/global/auth.ts index 2bf6bb68bf..05c799beb0 100644 --- a/packages/worker/src/api/routes/global/auth.js +++ b/packages/worker/src/api/routes/global/auth.ts @@ -1,13 +1,13 @@ -const Router = require("@koa/router") -const authController = require("../../controllers/global/auth") -const { joiValidator } = require("@budibase/backend-core/auth") -const Joi = require("joi") +import Router from "@koa/router" +import * as authController from "../../controllers/global/auth" +import { auth } from "@budibase/backend-core" +import Joi from "joi" -const router = new Router() +const router: Router = new Router() function buildAuthValidation() { // prettier-ignore - return joiValidator.body(Joi.object({ + return auth.joiValidator.body(Joi.object({ username: Joi.string().required(), password: Joi.string().required(), }).required().unknown(false)) @@ -15,14 +15,14 @@ function buildAuthValidation() { function buildResetValidation() { // prettier-ignore - return joiValidator.body(Joi.object({ + return auth.joiValidator.body(Joi.object({ email: Joi.string().required(), }).required().unknown(false)) } function buildResetUpdateValidation() { // prettier-ignore - return joiValidator.body(Joi.object({ + return auth.joiValidator.body(Joi.object({ resetCode: Joi.string().required(), password: Joi.string().required(), }).required().unknown(false)) @@ -85,4 +85,4 @@ router .get("/api/global/auth/oidc/callback", authController.oidcAuth) .get("/api/admin/auth/oidc/callback", authController.oidcAuth) -module.exports = router +export = router diff --git a/packages/worker/src/api/routes/global/configs.js b/packages/worker/src/api/routes/global/configs.ts similarity index 70% rename from packages/worker/src/api/routes/global/configs.js rename to packages/worker/src/api/routes/global/configs.ts index fe8b1f97af..a4794abcc6 100644 --- a/packages/worker/src/api/routes/global/configs.js +++ b/packages/worker/src/api/routes/global/configs.ts @@ -1,11 +1,10 @@ -const Router = require("@koa/router") -const controller = require("../../controllers/global/configs") -const { joiValidator } = require("@budibase/backend-core/auth") -const { adminOnly } = require("@budibase/backend-core/auth") -const Joi = require("joi") -const { Config } = require("@budibase/backend-core/constants") +import Router from "@koa/router" +import * as controller from "../../controllers/global/configs" +import { auth } from "@budibase/backend-core" +import Joi from "joi" +import { ConfigType } from "@budibase/types" -const router = new Router() +const router: Router = new Router() function smtpValidation() { // prettier-ignore @@ -55,27 +54,27 @@ function oidcValidation() { activated: Joi.boolean().required(), scopes: Joi.array().optional() }) - ).required(true) + ).required() }).unknown(true) } function buildConfigSaveValidation() { // prettier-ignore - return joiValidator.body(Joi.object({ + return auth.joiValidator.body(Joi.object({ _id: Joi.string().optional(), _rev: Joi.string().optional(), workspace: Joi.string().optional(), - type: Joi.string().valid(...Object.values(Config)).required(), + type: Joi.string().valid(...Object.values(ConfigType)).required(), createdAt: Joi.string().optional(), updatedAt: Joi.string().optional(), config: Joi.alternatives() .conditional("type", { switch: [ - { is: Config.SMTP, then: smtpValidation() }, - { is: Config.SETTINGS, then: settingValidation() }, - { is: Config.ACCOUNT, then: Joi.object().unknown(true) }, - { is: Config.GOOGLE, then: googleValidation() }, - { is: Config.OIDC, then: oidcValidation() } + { is: ConfigType.SMTP, then: smtpValidation() }, + { is: ConfigType.SETTINGS, then: settingValidation() }, + { is: ConfigType.ACCOUNT, then: Joi.object().unknown(true) }, + { is: ConfigType.GOOGLE, then: googleValidation() }, + { is: ConfigType.OIDC, then: oidcValidation() } ], }), }).required().unknown(true), @@ -84,27 +83,27 @@ function buildConfigSaveValidation() { function buildUploadValidation() { // prettier-ignore - return joiValidator.params(Joi.object({ - type: Joi.string().valid(...Object.values(Config)).required(), + return auth.joiValidator.params(Joi.object({ + type: Joi.string().valid(...Object.values(ConfigType)).required(), name: Joi.string().required(), }).required().unknown(true)) } function buildConfigGetValidation() { // prettier-ignore - return joiValidator.params(Joi.object({ - type: Joi.string().valid(...Object.values(Config)).required() + return auth.joiValidator.params(Joi.object({ + type: Joi.string().valid(...Object.values(ConfigType)).required() }).required().unknown(true)) } router .post( "/api/global/configs", - adminOnly, + auth.adminOnly, buildConfigSaveValidation(), controller.save ) - .delete("/api/global/configs/:id/:rev", adminOnly, controller.destroy) + .delete("/api/global/configs/:id/:rev", auth.adminOnly, controller.destroy) .get("/api/global/configs", controller.fetch) .get("/api/global/configs/checklist", controller.configChecklist) .get( @@ -117,9 +116,9 @@ router .get("/api/global/configs/:type", buildConfigGetValidation(), controller.find) .post( "/api/global/configs/upload/:type/:name", - adminOnly, + auth.adminOnly, buildUploadValidation(), controller.upload ) -module.exports = router +export = router diff --git a/packages/worker/src/api/routes/global/email.js b/packages/worker/src/api/routes/global/email.ts similarity index 60% rename from packages/worker/src/api/routes/global/email.js rename to packages/worker/src/api/routes/global/email.ts index 962aea8d14..b49e5b5db2 100644 --- a/packages/worker/src/api/routes/global/email.js +++ b/packages/worker/src/api/routes/global/email.ts @@ -1,15 +1,14 @@ -const Router = require("@koa/router") -const controller = require("../../controllers/global/email") -const { EmailTemplatePurpose } = require("../../../constants") -const { joiValidator } = require("@budibase/backend-core/auth") -const { adminOnly } = require("@budibase/backend-core/auth") -const Joi = require("joi") +import Router from "@koa/router" +import * as controller from "../../controllers/global/email" +import { EmailTemplatePurpose } from "../../../constants" +import { auth } from "@budibase/backend-core" +import Joi from "joi" -const router = new Router() +const router: Router = new Router() function buildEmailSendValidation() { // prettier-ignore - return joiValidator.body(Joi.object({ + return auth.joiValidator.body(Joi.object({ email: Joi.string().email({ multiple: true, }), @@ -30,8 +29,8 @@ function buildEmailSendValidation() { router.post( "/api/global/email/send", buildEmailSendValidation(), - adminOnly, + auth.adminOnly, controller.sendEmail ) -module.exports = router +export = router diff --git a/packages/worker/src/api/routes/global/license.ts b/packages/worker/src/api/routes/global/license.ts index 03908e052b..17b0d59e70 100644 --- a/packages/worker/src/api/routes/global/license.ts +++ b/packages/worker/src/api/routes/global/license.ts @@ -1,7 +1,7 @@ import Router from "@koa/router" import * as controller from "../../controllers/global/license" -const router = new Router() +const router: Router = new Router() router .post("/api/global/license/activate", controller.activate) diff --git a/packages/worker/src/api/routes/global/roles.js b/packages/worker/src/api/routes/global/roles.js deleted file mode 100644 index da7d5405ad..0000000000 --- a/packages/worker/src/api/routes/global/roles.js +++ /dev/null @@ -1,12 +0,0 @@ -const Router = require("@koa/router") -const controller = require("../../controllers/global/roles") -const { builderOrAdmin } = require("@budibase/backend-core/auth") - -const router = new Router() - -router - .get("/api/global/roles", builderOrAdmin, controller.fetch) - .get("/api/global/roles/:appId", builderOrAdmin, controller.find) - .delete("/api/global/roles/:appId", builderOrAdmin, controller.removeAppRole) - -module.exports = router diff --git a/packages/worker/src/api/routes/global/roles.ts b/packages/worker/src/api/routes/global/roles.ts new file mode 100644 index 0000000000..249d1b7076 --- /dev/null +++ b/packages/worker/src/api/routes/global/roles.ts @@ -0,0 +1,16 @@ +import Router from "@koa/router" +import * as controller from "../../controllers/global/roles" +import { auth } from "@budibase/backend-core" + +const router: Router = new Router() + +router + .get("/api/global/roles", auth.builderOrAdmin, controller.fetch) + .get("/api/global/roles/:appId", auth.builderOrAdmin, controller.find) + .delete( + "/api/global/roles/:appId", + auth.builderOrAdmin, + controller.removeAppRole + ) + +export = router diff --git a/packages/worker/src/api/routes/global/self.ts b/packages/worker/src/api/routes/global/self.ts index 4b52225783..bb7828c09d 100644 --- a/packages/worker/src/api/routes/global/self.ts +++ b/packages/worker/src/api/routes/global/self.ts @@ -3,7 +3,7 @@ import * as controller from "../../controllers/global/self" import { auth } from "@budibase/backend-core" import { users } from "../validation" -const router = new Router() +const router: Router = new Router() router .post("/api/global/self/api_key", auth.builderOnly, controller.generateAPIKey) @@ -15,4 +15,4 @@ router controller.updateSelf ) -export default router as any +export = router diff --git a/packages/worker/src/api/routes/global/templates.ts b/packages/worker/src/api/routes/global/templates.ts index 40600ce9aa..a45e244a0b 100644 --- a/packages/worker/src/api/routes/global/templates.ts +++ b/packages/worker/src/api/routes/global/templates.ts @@ -5,7 +5,7 @@ import { auth as authCore } from "@budibase/backend-core" import Joi from "joi" const { adminOnly, joiValidator } = authCore -const router = new Router() +const router: Router = new Router() function buildTemplateSaveValidation() { // prettier-ignore @@ -34,4 +34,4 @@ router .get("/api/global/template/:id", controller.find) .delete("/api/global/template/:id/:rev", adminOnly, controller.destroy) -export default router +export = router diff --git a/packages/worker/src/api/routes/global/users.js b/packages/worker/src/api/routes/global/users.ts similarity index 61% rename from packages/worker/src/api/routes/global/users.js rename to packages/worker/src/api/routes/global/users.ts index 2d9b1d9ac9..cbaba67ba3 100644 --- a/packages/worker/src/api/routes/global/users.js +++ b/packages/worker/src/api/routes/global/users.ts @@ -1,17 +1,15 @@ -const Router = require("@koa/router") -const controller = require("../../controllers/global/users") -const { joiValidator } = require("@budibase/backend-core/auth") -const { adminOnly } = require("@budibase/backend-core/auth") -const Joi = require("joi") -const cloudRestricted = require("../../../middleware/cloudRestricted") -const { users } = require("../validation") -const selfController = require("../../controllers/global/self") -const { builderOrAdmin } = require("@budibase/backend-core/auth") +import Router from "@koa/router" +import * as controller from "../../controllers/global/users" +import { auth } from "@budibase/backend-core" +import Joi from "joi" +import cloudRestricted from "../../../middleware/cloudRestricted" +import { users } from "../validation" +import * as selfController from "../../controllers/global/self" -const router = new Router() +const router: Router = new Router() function buildAdminInitValidation() { - return joiValidator.body( + return auth.joiValidator.body( Joi.object({ email: Joi.string().required(), password: Joi.string(), @@ -24,7 +22,7 @@ function buildAdminInitValidation() { function buildInviteValidation() { // prettier-ignore - return joiValidator.body(Joi.object({ + return auth.joiValidator.body(Joi.object({ email: Joi.string().required(), userInfo: Joi.object().optional(), }).required()) @@ -32,7 +30,7 @@ function buildInviteValidation() { function buildInviteMultipleValidation() { // prettier-ignore - return joiValidator.body(Joi.array().required().items( + return auth.joiValidator.body(Joi.array().required().items( Joi.object({ email: Joi.string(), userInfo: Joi.object().optional(), @@ -42,7 +40,7 @@ function buildInviteMultipleValidation() { function buildInviteAcceptValidation() { // prettier-ignore - return joiValidator.body(Joi.object({ + return auth.joiValidator.body(Joi.object({ inviteCode: Joi.string().required(), password: Joi.string().required(), }).required().unknown(true)) @@ -51,31 +49,35 @@ function buildInviteAcceptValidation() { router .post( "/api/global/users", - adminOnly, + auth.adminOnly, users.buildUserSaveValidation(), controller.save ) .post( "/api/global/users/bulk", - adminOnly, + auth.adminOnly, users.buildUserBulkUserValidation(), controller.bulkUpdate ) - .get("/api/global/users", builderOrAdmin, controller.fetch) - .post("/api/global/users/search", builderOrAdmin, controller.search) - .delete("/api/global/users/:id", adminOnly, controller.destroy) - .get("/api/global/users/count/:appId", builderOrAdmin, controller.countByApp) + .get("/api/global/users", auth.builderOrAdmin, controller.fetch) + .post("/api/global/users/search", auth.builderOrAdmin, controller.search) + .delete("/api/global/users/:id", auth.adminOnly, controller.destroy) + .get( + "/api/global/users/count/:appId", + auth.builderOrAdmin, + controller.countByApp + ) .get("/api/global/roles/:appId") .post( "/api/global/users/invite", - adminOnly, + auth.adminOnly, buildInviteValidation(), controller.invite ) .post( "/api/global/users/multi/invite", - adminOnly, + auth.adminOnly, buildInviteMultipleValidation(), controller.inviteMultiple ) @@ -94,7 +96,7 @@ router ) .get("/api/global/users/tenant/:id", controller.tenantUserLookup) // global endpoint but needs to come at end (blocks other endpoints otherwise) - .get("/api/global/users/:id", builderOrAdmin, controller.find) + .get("/api/global/users/:id", auth.builderOrAdmin, controller.find) // DEPRECATED - use new versions with self API .get("/api/global/users/self", selfController.getSelf) .post( @@ -103,4 +105,4 @@ router selfController.updateSelf ) -module.exports = router +export = router diff --git a/packages/worker/src/api/routes/global/workspaces.js b/packages/worker/src/api/routes/global/workspaces.ts similarity index 61% rename from packages/worker/src/api/routes/global/workspaces.js rename to packages/worker/src/api/routes/global/workspaces.ts index c0e172cd8d..82d4af9230 100644 --- a/packages/worker/src/api/routes/global/workspaces.js +++ b/packages/worker/src/api/routes/global/workspaces.ts @@ -1,14 +1,13 @@ -const Router = require("@koa/router") -const controller = require("../../controllers/global/workspaces") -const { joiValidator } = require("@budibase/backend-core/auth") -const { adminOnly } = require("@budibase/backend-core/auth") -const Joi = require("joi") +import Router from "@koa/router" +import * as controller from "../../controllers/global/workspaces" +import { auth } from "@budibase/backend-core" +import Joi from "joi" -const router = new Router() +const router: Router = new Router() function buildWorkspaceSaveValidation() { // prettier-ignore - return joiValidator.body(Joi.object({ + return auth.joiValidator.body(Joi.object({ _id: Joi.string().optional(), _rev: Joi.string().optional(), name: Joi.string().required(), @@ -27,12 +26,12 @@ function buildWorkspaceSaveValidation() { router .post( "/api/global/workspaces", - adminOnly, + auth.adminOnly, buildWorkspaceSaveValidation(), controller.save ) - .delete("/api/global/workspaces/:id", adminOnly, controller.destroy) + .delete("/api/global/workspaces/:id", auth.adminOnly, controller.destroy) .get("/api/global/workspaces", controller.fetch) .get("/api/global/workspaces/:id", controller.find) -module.exports = router +export = router diff --git a/packages/worker/src/api/routes/index.ts b/packages/worker/src/api/routes/index.ts index 0c107aae26..f0d4911771 100644 --- a/packages/worker/src/api/routes/index.ts +++ b/packages/worker/src/api/routes/index.ts @@ -1,3 +1,4 @@ +import Router from "@koa/router" import { api } from "@budibase/pro" import userRoutes from "./global/users" import configRoutes from "./global/configs" @@ -16,7 +17,7 @@ import accountRoutes from "./system/accounts" import restoreRoutes from "./system/restore" let userGroupRoutes = api.groups -export const routes = [ +export const routes: Router[] = [ configRoutes, userRoutes, workspaceRoutes, diff --git a/packages/worker/src/api/routes/system/accounts.ts b/packages/worker/src/api/routes/system/accounts.ts index 61a46ae437..a5996f3934 100644 --- a/packages/worker/src/api/routes/system/accounts.ts +++ b/packages/worker/src/api/routes/system/accounts.ts @@ -2,7 +2,7 @@ import Router from "@koa/router" import * as controller from "../../controllers/system/accounts" import { middleware } from "@budibase/backend-core" -const router = new Router() +const router: Router = new Router() router .put( diff --git a/packages/worker/src/api/routes/system/environment.ts b/packages/worker/src/api/routes/system/environment.ts index 360ec7ed84..841ec33c82 100644 --- a/packages/worker/src/api/routes/system/environment.ts +++ b/packages/worker/src/api/routes/system/environment.ts @@ -1,8 +1,8 @@ import Router from "@koa/router" import * as controller from "../../controllers/system/environment" -const router = new Router() +const router: Router = new Router() router.get("/api/system/environment", controller.fetch) -export default router +export = router diff --git a/packages/worker/src/api/routes/system/migrations.ts b/packages/worker/src/api/routes/system/migrations.ts index 5dcf90c4de..958de6bb60 100644 --- a/packages/worker/src/api/routes/system/migrations.ts +++ b/packages/worker/src/api/routes/system/migrations.ts @@ -2,7 +2,7 @@ import Router from "@koa/router" import * as migrationsController from "../../controllers/system/migrations" import { auth } from "@budibase/backend-core" -const router = new Router() +const router: Router = new Router() router .post( diff --git a/packages/worker/src/api/routes/system/restore.ts b/packages/worker/src/api/routes/system/restore.ts index ee4bee091d..e348715405 100644 --- a/packages/worker/src/api/routes/system/restore.ts +++ b/packages/worker/src/api/routes/system/restore.ts @@ -1,7 +1,7 @@ import * as controller from "../../controllers/system/restore" import Router from "@koa/router" -const router = new Router() +const router: Router = new Router() router.post("/api/system/restored", controller.systemRestored) diff --git a/packages/worker/src/api/routes/system/status.ts b/packages/worker/src/api/routes/system/status.ts index a5b393b421..7ae6d57699 100644 --- a/packages/worker/src/api/routes/system/status.ts +++ b/packages/worker/src/api/routes/system/status.ts @@ -1,8 +1,8 @@ import Router from "@koa/router" import * as controller from "../../controllers/system/status" -const router = new Router() +const router: Router = new Router() router.get("/api/system/status", controller.fetch) -export default router +export = router diff --git a/packages/worker/src/api/routes/system/tenants.ts b/packages/worker/src/api/routes/system/tenants.ts index 7feb73a234..7967de34b3 100644 --- a/packages/worker/src/api/routes/system/tenants.ts +++ b/packages/worker/src/api/routes/system/tenants.ts @@ -2,7 +2,7 @@ import Router from "@koa/router" import * as controller from "../../controllers/system/tenants" import { middleware } from "@budibase/backend-core" -const router = new Router() +const router: Router = new Router() router.delete( "/api/system/tenants/:tenantId", @@ -10,4 +10,4 @@ router.delete( controller.delete ) -export default router +export = router diff --git a/packages/worker/src/api/routes/validation/users.ts b/packages/worker/src/api/routes/validation/users.ts index 0cb14c047e..35f293ce87 100644 --- a/packages/worker/src/api/routes/validation/users.ts +++ b/packages/worker/src/api/routes/validation/users.ts @@ -1,4 +1,4 @@ -const { joiValidator } = require("@budibase/backend-core/auth") +import { auth } from "@budibase/backend-core" import Joi from "joi" let schema: any = { @@ -25,7 +25,7 @@ export const buildUserSaveValidation = (isSelf = false) => { _rev: Joi.string(), } } - return joiValidator.body(Joi.object(schema).required().unknown(true)) + return auth.joiValidator.body(Joi.object(schema).required().unknown(true)) } export const buildUserBulkUserValidation = (isSelf = false) => { @@ -46,5 +46,5 @@ export const buildUserBulkUserValidation = (isSelf = false) => { }), } - return joiValidator.body(Joi.object(bulkSchema).required().unknown(true)) + return auth.joiValidator.body(Joi.object(bulkSchema).required().unknown(true)) } diff --git a/packages/worker/src/constants/templates/index.js b/packages/worker/src/constants/templates/index.ts similarity index 60% rename from packages/worker/src/constants/templates/index.js rename to packages/worker/src/constants/templates/index.ts index 0631df7011..b0b6029c36 100644 --- a/packages/worker/src/constants/templates/index.js +++ b/packages/worker/src/constants/templates/index.ts @@ -1,15 +1,15 @@ -const { readStaticFile } = require("../../utilities/fileSystem") -const { +import { readStaticFile } from "../../utilities/fileSystem" +import { EmailTemplatePurpose, TemplateType, TemplatePurpose, GLOBAL_OWNER, -} = require("../index") -const { join } = require("path") -const { getTemplateParams } = require("@budibase/backend-core/db") -const { getGlobalDB } = require("@budibase/backend-core/tenancy") +} from "../index" +import { join } from "path" +import { db as dbCore, tenancy } from "@budibase/backend-core" +import { Template } from "@budibase/types" -exports.EmailTemplates = { +export const EmailTemplates = { [EmailTemplatePurpose.PASSWORD_RECOVERY]: readStaticFile( join(__dirname, "passwordRecovery.hbs") ), @@ -23,7 +23,7 @@ exports.EmailTemplates = { [EmailTemplatePurpose.CUSTOM]: readStaticFile(join(__dirname, "custom.hbs")), } -exports.addBaseTemplates = (templates, type = null) => { +export function addBaseTemplates(templates: Template[], type?: string) { let purposeList switch (type) { case TemplateType.EMAIL: @@ -38,9 +38,9 @@ exports.addBaseTemplates = (templates, type = null) => { if (templates.find(template => template.purpose === purpose)) { continue } - if (exports.EmailTemplates[purpose]) { + if (EmailTemplates[purpose]) { templates.push({ - contents: exports.EmailTemplates[purpose], + contents: EmailTemplates[purpose], purpose, type, }) @@ -49,10 +49,14 @@ exports.addBaseTemplates = (templates, type = null) => { return templates } -exports.getTemplates = async ({ ownerId, type, id } = {}) => { - const db = getGlobalDB() +export async function getTemplates({ + ownerId, + type, + id, +}: { ownerId?: string; type?: string; id?: string } = {}) { + const db = tenancy.getGlobalDB() const response = await db.allDocs( - getTemplateParams(ownerId || GLOBAL_OWNER, id, { + dbCore.getTemplateParams(ownerId || GLOBAL_OWNER, id, { include_docs: true, }) ) @@ -64,10 +68,10 @@ exports.getTemplates = async ({ ownerId, type, id } = {}) => { if (type) { templates = templates.filter(template => template.type === type) } - return exports.addBaseTemplates(templates, type) + return addBaseTemplates(templates, type) } -exports.getTemplateByPurpose = async (type, purpose) => { - const templates = await exports.getTemplates({ type }) - return templates.find(template => template.purpose === purpose) +export async function getTemplateByPurpose(type: string, purpose: string) { + const templates = await getTemplates({ type }) + return templates.find((template: Template) => template.purpose === purpose) } diff --git a/packages/worker/src/environment.ts b/packages/worker/src/environment.ts index cfe284cb0a..f4c339ad6d 100644 --- a/packages/worker/src/environment.ts +++ b/packages/worker/src/environment.ts @@ -26,7 +26,7 @@ function parseIntSafe(number: any) { } } -const env = { +const environment = { // auth MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY, MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY, @@ -68,7 +68,7 @@ const env = { _set(key: any, value: any) { process.env[key] = value // @ts-ignore - env[key] = value + environment[key] = value }, isDev, isTest, @@ -78,12 +78,14 @@ const env = { } // if some var haven't been set, define them -if (!env.APPS_URL) { - env.APPS_URL = isDev() ? "http://localhost:4001" : "http://app-service:4002" +if (!environment.APPS_URL) { + environment.APPS_URL = isDev() + ? "http://localhost:4001" + : "http://app-service:4002" } // clean up any environment variable edge cases -for (let [key, value] of Object.entries(env)) { +for (let [key, value] of Object.entries(environment)) { // handle the edge case of "0" to disable an environment variable if (value === "0") { // @ts-ignore @@ -91,4 +93,4 @@ for (let [key, value] of Object.entries(env)) { } } -export = env +export = environment diff --git a/packages/worker/src/index.ts b/packages/worker/src/index.ts index a718beabb8..b513b11279 100644 --- a/packages/worker/src/index.ts +++ b/packages/worker/src/index.ts @@ -1,5 +1,5 @@ // need to load environment first -const env = require("./environment") +import env from "./environment" // enable APM if configured if (process.env.ELASTIC_APM_ENABLED) { @@ -14,19 +14,17 @@ import { Event } from "@sentry/types/dist/event" import Application from "koa" import { bootstrap } from "global-agent" import * as db from "./db" +import { auth, logging, events, pinoSettings } from "@budibase/backend-core" db.init() -const Koa = require("koa") -const destroyable = require("server-destroy") -const koaBody = require("koa-body") +import Koa from "koa" +import koaBody from "koa-body" +import http from "http" +import api from "./api" +import * as redis from "./utilities/redis" +import Sentry from "@sentry/node" const koaSession = require("koa-session") -const { passport } = require("@budibase/backend-core/auth") -const { logAlert } = require("@budibase/backend-core/logging") const logger = require("koa-pino-logger") -const http = require("http") -const api = require("./api") -const redis = require("./utilities/redis") -const Sentry = require("@sentry/node") -import { events, pinoSettings } from "@budibase/backend-core" +const destroyable = require("server-destroy") // this will setup http and https proxies form env variables bootstrap() @@ -41,8 +39,8 @@ app.use(koaSession(app)) app.use(logger(pinoSettings())) // authentication -app.use(passport.initialize()) -app.use(passport.session()) +app.use(auth.passport.initialize()) +app.use(auth.passport.session()) // api routes app.use(api.routes()) @@ -81,17 +79,18 @@ server.on("close", async () => { const shutdown = () => { server.close() + // @ts-ignore server.destroy() } -export = server.listen(parseInt(env.PORT || 4002), async () => { +export = server.listen(parseInt(env.PORT || "4002"), async () => { console.log(`Worker running on ${JSON.stringify(server.address())}`) await redis.init() }) process.on("uncaughtException", err => { errCode = -1 - logAlert("Uncaught exception.", err) + logging.logAlert("Uncaught exception.", err) shutdown() }) diff --git a/packages/worker/src/middleware/cloudRestricted.js b/packages/worker/src/middleware/cloudRestricted.ts similarity index 53% rename from packages/worker/src/middleware/cloudRestricted.js rename to packages/worker/src/middleware/cloudRestricted.ts index 5b9d64b92f..d2ca0c7964 100644 --- a/packages/worker/src/middleware/cloudRestricted.js +++ b/packages/worker/src/middleware/cloudRestricted.ts @@ -1,13 +1,14 @@ -const env = require("../environment") -const { Header } = require("@budibase/backend-core/constants") +import env from "../environment" +import { constants } from "@budibase/backend-core" +import { BBContext } from "@budibase/types" /** * This is a restricted endpoint in the cloud. * Ensure that the correct API key has been supplied. */ -module.exports = async (ctx, next) => { +export = async (ctx: BBContext, next: any) => { if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { - const apiKey = ctx.request.headers[Header.API_KEY] + const apiKey = ctx.request.headers[constants.Header.API_KEY] if (apiKey !== env.INTERNAL_API_KEY) { ctx.throw(403, "Unauthorized") } diff --git a/packages/worker/src/tests/TestConfiguration.ts b/packages/worker/src/tests/TestConfiguration.ts index 015ebb6258..871b7ec6a6 100644 --- a/packages/worker/src/tests/TestConfiguration.ts +++ b/packages/worker/src/tests/TestConfiguration.ts @@ -2,7 +2,7 @@ import "./mocks" import * as dbConfig from "../db" dbConfig.init() import env from "../environment" -import controllers from "./controllers" +import * as controllers from "./controllers" const supertest = require("supertest") import { Config } from "../constants" import { diff --git a/packages/worker/src/tests/controllers.js b/packages/worker/src/tests/controllers.js deleted file mode 100644 index 226355506f..0000000000 --- a/packages/worker/src/tests/controllers.js +++ /dev/null @@ -1,7 +0,0 @@ -module.exports = { - email: require("../api/controllers/global/email"), - workspaces: require("../api/controllers/global/workspaces"), - config: require("../api/controllers/global/configs"), - templates: require("../api/controllers/global/templates"), - users: require("../api/controllers/global/users"), -} diff --git a/packages/worker/src/tests/controllers.ts b/packages/worker/src/tests/controllers.ts new file mode 100644 index 0000000000..bdcf11434b --- /dev/null +++ b/packages/worker/src/tests/controllers.ts @@ -0,0 +1,5 @@ +export * as email from "../api/controllers/global/email" +export * as workspaces from "../api/controllers/global/workspaces" +export * as config from "../api/controllers/global/configs" +export * as templates from "../api/controllers/global/templates" +export * as users from "../api/controllers/global/users" diff --git a/packages/worker/src/tests/structures/configs.js b/packages/worker/src/tests/structures/configs.ts similarity index 88% rename from packages/worker/src/tests/structures/configs.js rename to packages/worker/src/tests/structures/configs.ts index fbd82de0a7..a692e77987 100644 --- a/packages/worker/src/tests/structures/configs.js +++ b/packages/worker/src/tests/structures/configs.ts @@ -1,7 +1,7 @@ const { Config } = require("../../constants") const { utils } = require("@budibase/backend-core") -exports.oidc = conf => { +export function oidc(conf?: any) { return { type: Config.OIDC, config: { @@ -21,7 +21,7 @@ exports.oidc = conf => { } } -exports.google = conf => { +export function google(conf?: any) { return { type: Config.GOOGLE, config: { @@ -33,7 +33,7 @@ exports.google = conf => { } } -exports.smtp = conf => { +export function smtp(conf?: any) { return { type: Config.SMTP, config: { @@ -47,7 +47,7 @@ exports.smtp = conf => { } } -exports.smtpEthereal = () => { +export function smtpEthereal() { return { type: Config.SMTP, config: { @@ -63,7 +63,7 @@ exports.smtpEthereal = () => { } } -exports.settings = conf => { +export function settings(conf?: any) { return { type: Config.SETTINGS, config: { diff --git a/packages/worker/src/tests/structures/index.ts b/packages/worker/src/tests/structures/index.ts index 3a4c3324df..0ac32e77ee 100644 --- a/packages/worker/src/tests/structures/index.ts +++ b/packages/worker/src/tests/structures/index.ts @@ -1,5 +1,5 @@ import { structures } from "@budibase/backend-core/tests" -import configs from "./configs" +import * as configs from "./configs" import * as users from "./users" import * as groups from "./groups" import { v4 as uuid } from "uuid" diff --git a/packages/worker/src/utilities/appService.js b/packages/worker/src/utilities/appService.js deleted file mode 100644 index ae895b831c..0000000000 --- a/packages/worker/src/utilities/appService.js +++ /dev/null @@ -1,33 +0,0 @@ -const fetch = require("node-fetch") -const { Header } = require("@budibase/backend-core/constants") -const { getTenantId, isTenantIdSet } = require("@budibase/backend-core/tenancy") -const { checkSlashesInUrl } = require("../utilities") -const env = require("../environment") - -async function makeAppRequest(url, method, body) { - if (env.isTest()) { - return - } - const request = { headers: {} } - request.headers[Header.API_KEY] = env.INTERNAL_API_KEY - if (isTenantIdSet()) { - request.headers[Header.TENANT_ID] = getTenantId() - } - if (body) { - request.headers["Content-Type"] = "application/json" - request.body = JSON.stringify(body) - } - request.method = method - return fetch(checkSlashesInUrl(env.APPS_URL + url), request) -} - -exports.syncUserInApps = async userId => { - const response = await makeAppRequest( - `/api/users/metadata/sync/${userId}`, - "POST", - {} - ) - if (response && response.status !== 200) { - throw "Unable to sync user." - } -} diff --git a/packages/worker/src/utilities/appService.ts b/packages/worker/src/utilities/appService.ts new file mode 100644 index 0000000000..a0c4314f65 --- /dev/null +++ b/packages/worker/src/utilities/appService.ts @@ -0,0 +1,32 @@ +import fetch from "node-fetch" +import { constants, tenancy } from "@budibase/backend-core" +import { checkSlashesInUrl } from "../utilities" +import env from "../environment" + +async function makeAppRequest(url: string, method: string, body: any) { + if (env.isTest()) { + return + } + const request: any = { headers: {} } + request.headers[constants.Header.API_KEY] = env.INTERNAL_API_KEY + if (tenancy.isTenantIdSet()) { + request.headers[constants.Header.TENANT_ID] = tenancy.getTenantId() + } + if (body) { + request.headers["Content-Type"] = "application/json" + request.body = JSON.stringify(body) + } + request.method = method + return fetch(checkSlashesInUrl(env.APPS_URL + url), request) +} + +export async function syncUserInApps(userId: string) { + const response = await makeAppRequest( + `/api/users/metadata/sync/${userId}`, + "POST", + {} + ) + if (response && response.status !== 200) { + throw "Unable to sync user." + } +} diff --git a/packages/worker/src/utilities/fileSystem.js b/packages/worker/src/utilities/fileSystem.js deleted file mode 100644 index 8f0bc8d3ed..0000000000 --- a/packages/worker/src/utilities/fileSystem.js +++ /dev/null @@ -1,5 +0,0 @@ -const { readFileSync } = require("fs") - -exports.readStaticFile = path => { - return readFileSync(path, "utf-8") -} diff --git a/packages/worker/src/utilities/fileSystem.ts b/packages/worker/src/utilities/fileSystem.ts new file mode 100644 index 0000000000..603a797407 --- /dev/null +++ b/packages/worker/src/utilities/fileSystem.ts @@ -0,0 +1,5 @@ +import { readFileSync } from "fs" + +export function readStaticFile(path: string) { + return readFileSync(path, "utf-8") +} diff --git a/packages/backend-core/src/helpers.js b/packages/worker/src/utilities/index.ts similarity index 85% rename from packages/backend-core/src/helpers.js rename to packages/worker/src/utilities/index.ts index b402a82cf3..e1e065bd4e 100644 --- a/packages/backend-core/src/helpers.js +++ b/packages/worker/src/utilities/index.ts @@ -4,6 +4,6 @@ * @param {string} url The URL to test and remove any extra double slashes. * @return {string} The updated url. */ -exports.checkSlashesInUrl = url => { +export function checkSlashesInUrl(url: string) { return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2") } diff --git a/packages/worker/src/utilities/users.js b/packages/worker/src/utilities/users.js deleted file mode 100644 index 93057ca34f..0000000000 --- a/packages/worker/src/utilities/users.js +++ /dev/null @@ -1,17 +0,0 @@ -const { getGlobalDB } = require("@budibase/backend-core/tenancy") -const { getGlobalUserParams } = require("@budibase/backend-core/db") - -exports.checkAnyUserExists = async () => { - try { - const db = getGlobalDB() - const users = await db.allDocs( - getGlobalUserParams(null, { - include_docs: true, - limit: 1, - }) - ) - return users && users.rows.length >= 1 - } catch (err) { - throw new Error("Unable to retrieve user list") - } -} diff --git a/packages/worker/src/utilities/users.ts b/packages/worker/src/utilities/users.ts new file mode 100644 index 0000000000..149d4d985c --- /dev/null +++ b/packages/worker/src/utilities/users.ts @@ -0,0 +1,16 @@ +import { tenancy, db as dbCore } from "@budibase/backend-core" + +export async function checkAnyUserExists() { + try { + const db = tenancy.getGlobalDB() + const users = await db.allDocs( + dbCore.getGlobalUserParams(null, { + include_docs: true, + limit: 1, + }) + ) + return users && users.rows.length >= 1 + } catch (err) { + throw new Error("Unable to retrieve user list") + } +} diff --git a/packages/worker/tsconfig.build.json b/packages/worker/tsconfig.build.json index 8071b4ad00..7ba657367e 100644 --- a/packages/worker/tsconfig.build.json +++ b/packages/worker/tsconfig.build.json @@ -3,7 +3,6 @@ "target": "es6", "module": "commonjs", "lib": ["es2020"], - "allowJs": true, "strict": true, "noImplicitAny": true, "esModuleInterop": true,