diff --git a/lerna.json b/lerna.json index 36e24a19de..e01e5ae03e 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.11.37", + "version": "2.11.39", "npmClient": "yarn", "packages": [ "packages/*" diff --git a/packages/backend-core/src/cache/appMetadata.ts b/packages/backend-core/src/cache/appMetadata.ts index 420456fd41..bd3efc20db 100644 --- a/packages/backend-core/src/cache/appMetadata.ts +++ b/packages/backend-core/src/cache/appMetadata.ts @@ -33,8 +33,8 @@ function isInvalid(metadata?: { state: string }) { * Get the requested app metadata by id. * Use redis cache to first read the app metadata. * If not present fallback to loading the app metadata directly and re-caching. - * @param {string} appId the id of the app to get metadata from. - * @returns {object} the app metadata. + * @param appId the id of the app to get metadata from. + * @returns the app metadata. */ export async function getAppMetadata(appId: string): Promise { const client = await getAppClient() @@ -72,9 +72,9 @@ export async function getAppMetadata(appId: string): Promise { /** * Invalidate/reset the cached metadata when a change occurs in the db. - * @param appId {string} the cache key to bust/update. - * @param newMetadata {object|undefined} optional - can simply provide the new metadata to update with. - * @return {Promise} will respond with success when cache is updated. + * @param appId the cache key to bust/update. + * @param newMetadata optional - can simply provide the new metadata to update with. + * @return will respond with success when cache is updated. */ export async function invalidateAppMetadata(appId: string, newMetadata?: any) { if (!appId) { diff --git a/packages/backend-core/src/cache/user.ts b/packages/backend-core/src/cache/user.ts index 481d3691e4..313b9a4d4a 100644 --- a/packages/backend-core/src/cache/user.ts +++ b/packages/backend-core/src/cache/user.ts @@ -61,9 +61,9 @@ async function populateUsersFromDB( * Get the requested user by id. * Use redis cache to first read the user. * If not present fallback to loading the user directly and re-caching. - * @param {*} userId the id of the user to get - * @param {*} tenantId the tenant of the user to get - * @param {*} populateUser function to provide the user for re-caching. default to couch db + * @param userId the id of the user to get + * @param tenantId the tenant of the user to get + * @param populateUser function to provide the user for re-caching. default to couch db * @returns */ export async function getUser( @@ -111,8 +111,8 @@ export async function getUser( * Get the requested users by id. * Use redis cache to first read the users. * If not present fallback to loading the users directly and re-caching. - * @param {*} userIds the ids of the user to get - * @param {*} tenantId the tenant of the users to get + * @param userIds the ids of the user to get + * @param tenantId the tenant of the users to get * @returns */ export async function getUsers( diff --git a/packages/backend-core/src/cache/writethrough.ts b/packages/backend-core/src/cache/writethrough.ts index e64c116663..c331d791a6 100644 --- a/packages/backend-core/src/cache/writethrough.ts +++ b/packages/backend-core/src/cache/writethrough.ts @@ -119,8 +119,8 @@ export class Writethrough { this.writeRateMs = writeRateMs } - async put(doc: any) { - return put(this.db, doc, this.writeRateMs) + async put(doc: any, writeRateMs: number = this.writeRateMs) { + return put(this.db, doc, writeRateMs) } async get(id: string) { diff --git a/packages/backend-core/src/configs/configs.ts b/packages/backend-core/src/configs/configs.ts index 49ace84d52..0c83ed005d 100644 --- a/packages/backend-core/src/configs/configs.ts +++ b/packages/backend-core/src/configs/configs.ts @@ -23,7 +23,7 @@ import environment from "../environment" /** * Generates a new configuration ID. - * @returns {string} The new configuration ID which the config doc can be stored under. + * @returns The new configuration ID which the config doc can be stored under. */ export function generateConfigID(type: ConfigType) { return `${DocumentType.CONFIG}${SEPARATOR}${type}` diff --git a/packages/backend-core/src/context/mainContext.ts b/packages/backend-core/src/context/mainContext.ts index 61d96bb4b0..609c18abb5 100644 --- a/packages/backend-core/src/context/mainContext.ts +++ b/packages/backend-core/src/context/mainContext.ts @@ -62,7 +62,7 @@ export function isTenancyEnabled() { /** * Given an app ID this will attempt to retrieve the tenant ID from it. - * @return {null|string} The tenant ID found within the app ID. + * @return The tenant ID found within the app ID. */ export function getTenantIDFromAppID(appId: string) { if (!appId) { diff --git a/packages/backend-core/src/db/Replication.ts b/packages/backend-core/src/db/Replication.ts index e813722d98..f91a37ce8f 100644 --- a/packages/backend-core/src/db/Replication.ts +++ b/packages/backend-core/src/db/Replication.ts @@ -8,8 +8,8 @@ class Replication { /** * - * @param {String} source - the DB you want to replicate or rollback to - * @param {String} target - the DB you want to replicate to, or rollback from + * @param source - the DB you want to replicate or rollback to + * @param target - the DB you want to replicate to, or rollback from */ constructor({ source, target }: any) { this.source = getPouchDB(source) @@ -38,7 +38,7 @@ class Replication { /** * Two way replication operation, intended to be promise based. - * @param {Object} opts - PouchDB replication options + * @param opts - PouchDB replication options */ sync(opts = {}) { this.replication = this.promisify(this.source.sync, opts) @@ -47,7 +47,7 @@ class Replication { /** * One way replication operation, intended to be promise based. - * @param {Object} opts - PouchDB replication options + * @param opts - PouchDB replication options */ replicate(opts = {}) { this.replication = this.promisify(this.source.replicate.to, opts) diff --git a/packages/backend-core/src/db/lucene.ts b/packages/backend-core/src/db/lucene.ts index 7451d581b5..f982ee67d0 100644 --- a/packages/backend-core/src/db/lucene.ts +++ b/packages/backend-core/src/db/lucene.ts @@ -599,10 +599,10 @@ async function runQuery( * Gets round the fixed limit of 200 results from a query by fetching as many * pages as required and concatenating the results. This recursively operates * until enough results have been found. - * @param dbName {string} Which database to run a lucene query on - * @param index {string} Which search index to utilise - * @param query {object} The JSON query structure - * @param params {object} The search params including: + * @param dbName Which database to run a lucene query on + * @param index Which search index to utilise + * @param query The JSON query structure + * @param params The search params including: * tableId {string} The table ID to search * sort {string} The sort column * sortOrder {string} The sort order ("ascending" or "descending") @@ -655,10 +655,10 @@ async function recursiveSearch( * Performs a paginated search. A bookmark will be returned to allow the next * page to be fetched. There is a max limit off 200 results per page in a * paginated search. - * @param dbName {string} Which database to run a lucene query on - * @param index {string} Which search index to utilise - * @param query {object} The JSON query structure - * @param params {object} The search params including: + * @param dbName Which database to run a lucene query on + * @param index Which search index to utilise + * @param query The JSON query structure + * @param params The search params including: * tableId {string} The table ID to search * sort {string} The sort column * sortOrder {string} The sort order ("ascending" or "descending") @@ -722,10 +722,10 @@ export async function paginatedSearch( * desired amount of results. There is a limit of 1000 results to avoid * heavy performance hits, and to avoid client components breaking from * handling too much data. - * @param dbName {string} Which database to run a lucene query on - * @param index {string} Which search index to utilise - * @param query {object} The JSON query structure - * @param params {object} The search params including: + * @param dbName Which database to run a lucene query on + * @param index Which search index to utilise + * @param query The JSON query structure + * @param params The search params including: * tableId {string} The table ID to search * sort {string} The sort column * sortOrder {string} The sort order ("ascending" or "descending") diff --git a/packages/backend-core/src/db/utils.ts b/packages/backend-core/src/db/utils.ts index 4ebf8392b5..d7a4b8224a 100644 --- a/packages/backend-core/src/db/utils.ts +++ b/packages/backend-core/src/db/utils.ts @@ -45,7 +45,7 @@ export async function getAllDbs(opts = { efficient: false }) { * Lots of different points in the system need to find the full list of apps, this will * enumerate the entire CouchDB cluster and get the list of databases (every app). * - * @return {Promise} returns the app information document stored in each app database. + * @return returns the app information document stored in each app database. */ export async function getAllApps({ dev, diff --git a/packages/backend-core/src/docIds/conversions.ts b/packages/backend-core/src/docIds/conversions.ts index 381c5cb90f..b168b74e16 100644 --- a/packages/backend-core/src/docIds/conversions.ts +++ b/packages/backend-core/src/docIds/conversions.ts @@ -25,7 +25,7 @@ export function isDevApp(app: App) { /** * Generates a development app ID from a real app ID. - * @returns {string} the dev app ID which can be used for dev database. + * @returns the dev app ID which can be used for dev database. */ export function getDevelopmentAppID(appId: string) { if (!appId || appId.startsWith(APP_DEV_PREFIX)) { diff --git a/packages/backend-core/src/docIds/ids.ts b/packages/backend-core/src/docIds/ids.ts index 4c9eb713c8..02176109da 100644 --- a/packages/backend-core/src/docIds/ids.ts +++ b/packages/backend-core/src/docIds/ids.ts @@ -8,7 +8,7 @@ import { newid } from "./newid" /** * Generates a new app ID. - * @returns {string} The new app ID which the app doc can be stored under. + * @returns The new app ID which the app doc can be stored under. */ export const generateAppID = (tenantId?: string | null) => { let id = APP_PREFIX @@ -20,9 +20,9 @@ export const generateAppID = (tenantId?: string | null) => { /** * Gets a new row ID for the specified table. - * @param {string} tableId The table which the row is being created for. - * @param {string|null} id If an ID is to be used then the UUID can be substituted for this. - * @returns {string} The new ID which a row doc can be stored under. + * @param tableId The table which the row is being created for. + * @param id If an ID is to be used then the UUID can be substituted for this. + * @returns The new ID which a row doc can be stored under. */ export function generateRowID(tableId: string, id?: string) { id = id || newid() @@ -31,7 +31,7 @@ export function generateRowID(tableId: string, id?: string) { /** * Generates a new workspace ID. - * @returns {string} The new workspace ID which the workspace doc can be stored under. + * @returns The new workspace ID which the workspace doc can be stored under. */ export function generateWorkspaceID() { return `${DocumentType.WORKSPACE}${SEPARATOR}${newid()}` @@ -39,7 +39,7 @@ export function generateWorkspaceID() { /** * Generates a new global user ID. - * @returns {string} The new user ID which the user doc can be stored under. + * @returns The new user ID which the user doc can be stored under. */ export function generateGlobalUserID(id?: any) { return `${DocumentType.USER}${SEPARATOR}${id || newid()}` @@ -52,8 +52,8 @@ export function isGlobalUserID(id: string) { /** * Generates a new user ID based on the passed in global ID. - * @param {string} globalId The ID of the global user. - * @returns {string} The new user ID which the user doc can be stored under. + * @param globalId The ID of the global user. + * @returns The new user ID which the user doc can be stored under. */ export function generateUserMetadataID(globalId: string) { return generateRowID(InternalTable.USER_METADATA, globalId) @@ -84,7 +84,7 @@ export function generateAppUserID(prodAppId: string, userId: string) { /** * Generates a new role ID. - * @returns {string} The new role ID which the role doc can be stored under. + * @returns The new role ID which the role doc can be stored under. */ export function generateRoleID(name: string) { const prefix = `${DocumentType.ROLE}${SEPARATOR}` @@ -103,7 +103,7 @@ export function prefixRoleID(name: string) { /** * Generates a new dev info document ID - this is scoped to a user. - * @returns {string} The new dev info ID which info for dev (like api key) can be stored under. + * @returns The new dev info ID which info for dev (like api key) can be stored under. */ export const generateDevInfoID = (userId: any) => { return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}` @@ -111,7 +111,7 @@ export const generateDevInfoID = (userId: any) => { /** * Generates a new plugin ID - to be used in the global DB. - * @returns {string} The new plugin ID which a plugin metadata document can be stored under. + * @returns The new plugin ID which a plugin metadata document can be stored under. */ export const generatePluginID = (name: string) => { return `${DocumentType.PLUGIN}${SEPARATOR}${name}` diff --git a/packages/backend-core/src/docIds/params.ts b/packages/backend-core/src/docIds/params.ts index 5d563952f7..36fd75622b 100644 --- a/packages/backend-core/src/docIds/params.ts +++ b/packages/backend-core/src/docIds/params.ts @@ -12,12 +12,12 @@ import { getProdAppID } from "./conversions" * is usually the case as most of our docs are top level e.g. tables, automations, users and so on. * More complex cases such as link docs and rows which have multiple levels of IDs that their * ID consists of need their own functions to build the allDocs parameters. - * @param {string} docType The type of document which input params are being built for, e.g. user, + * @param docType The type of document which input params are being built for, e.g. user, * link, app, table and so on. - * @param {string|null} docId The ID of the document minus its type - this is only needed if looking + * @param docId The ID of the document minus its type - this is only needed if looking * for a singular document. - * @param {object} otherProps Add any other properties onto the request, e.g. include_docs. - * @returns {object} Parameters which can then be used with an allDocs request. + * @param otherProps Add any other properties onto the request, e.g. include_docs. + * @returns Parameters which can then be used with an allDocs request. */ export function getDocParams( docType: string, @@ -36,11 +36,11 @@ export function getDocParams( /** * Gets the DB allDocs/query params for retrieving a row. - * @param {string|null} tableId The table in which the rows have been stored. - * @param {string|null} rowId The ID of the row which is being specifically queried for. This can be + * @param tableId The table in which the rows have been stored. + * @param rowId The ID of the row which is being specifically queried for. This can be * left null to get all the rows in the table. - * @param {object} otherProps Any other properties to add to the request. - * @returns {object} Parameters which can then be used with an allDocs request. + * @param otherProps Any other properties to add to the request. + * @returns Parameters which can then be used with an allDocs request. */ export function getRowParams( tableId?: string | null, diff --git a/packages/backend-core/src/helpers.ts b/packages/backend-core/src/helpers.ts index e1e065bd4e..dd241f4af7 100644 --- a/packages/backend-core/src/helpers.ts +++ b/packages/backend-core/src/helpers.ts @@ -1,8 +1,8 @@ /** * Makes sure that a URL has the correct number of slashes, while maintaining the * http(s):// double slashes. - * @param {string} url The URL to test and remove any extra double slashes. - * @return {string} The updated url. + * @param url The URL to test and remove any extra double slashes. + * @return The updated url. */ export function checkSlashesInUrl(url: string) { return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2") diff --git a/packages/backend-core/src/middleware/passport/local.ts b/packages/backend-core/src/middleware/passport/local.ts index e198032532..f1d72cab7a 100644 --- a/packages/backend-core/src/middleware/passport/local.ts +++ b/packages/backend-core/src/middleware/passport/local.ts @@ -13,10 +13,10 @@ export const options = { /** * Passport Local Authentication Middleware. - * @param {*} ctx the request structure - * @param {*} email username to login with - * @param {*} password plain text password to log in with - * @param {*} done callback from passport to return user information and errors + * @param ctx the request structure + * @param email username to login with + * @param password plain text password to log in with + * @param done callback from passport to return user information and errors * @returns The authenticated user, or errors if they occur */ export async function authenticate( diff --git a/packages/backend-core/src/middleware/passport/sso/oidc.ts b/packages/backend-core/src/middleware/passport/sso/oidc.ts index 83bfde28b6..061e0507aa 100644 --- a/packages/backend-core/src/middleware/passport/sso/oidc.ts +++ b/packages/backend-core/src/middleware/passport/sso/oidc.ts @@ -17,15 +17,15 @@ const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy export function buildVerifyFn(saveUserFn: SaveSSOUserFunction) { /** - * @param {*} issuer The identity provider base URL - * @param {*} sub The user ID - * @param {*} profile The user profile information. Created by passport from the /userinfo response - * @param {*} jwtClaims The parsed id_token claims - * @param {*} accessToken The access_token for contacting the identity provider - may or may not be a JWT - * @param {*} refreshToken The refresh_token for obtaining a new access_token - usually not a JWT - * @param {*} idToken The id_token - always a JWT - * @param {*} params The response body from requesting an access_token - * @param {*} done The passport callback: err, user, info + * @param issuer The identity provider base URL + * @param sub The user ID + * @param profile The user profile information. Created by passport from the /userinfo response + * @param jwtClaims The parsed id_token claims + * @param accessToken The access_token for contacting the identity provider - may or may not be a JWT + * @param refreshToken The refresh_token for obtaining a new access_token - usually not a JWT + * @param idToken The id_token - always a JWT + * @param params The response body from requesting an access_token + * @param done The passport callback: err, user, info */ return async ( issuer: string, @@ -61,8 +61,8 @@ export function buildVerifyFn(saveUserFn: SaveSSOUserFunction) { } /** - * @param {*} profile The structured profile created by passport using the user info endpoint - * @param {*} jwtClaims The claims returned in the id token + * @param profile The structured profile created by passport using the user info endpoint + * @param jwtClaims The claims returned in the id token */ function getEmail(profile: SSOProfile, jwtClaims: JwtClaims) { // profile not guaranteed to contain email e.g. github connected azure ad account diff --git a/packages/backend-core/src/middleware/passport/utils.ts b/packages/backend-core/src/middleware/passport/utils.ts index 7e0d3863a0..88642471b9 100644 --- a/packages/backend-core/src/middleware/passport/utils.ts +++ b/packages/backend-core/src/middleware/passport/utils.ts @@ -5,9 +5,9 @@ import { ConfigType, GoogleInnerConfig } from "@budibase/types" /** * Utility to handle authentication errors. * - * @param {*} done The passport callback. - * @param {*} message Message that will be returned in the response body - * @param {*} err (Optional) error that will be logged + * @param done The passport callback. + * @param message Message that will be returned in the response body + * @param err (Optional) error that will be logged */ export function authError(done: Function, message: string, err?: any) { diff --git a/packages/backend-core/src/objectStore/buckets/app.ts b/packages/backend-core/src/objectStore/buckets/app.ts index 9951058d6a..be9fddeaa6 100644 --- a/packages/backend-core/src/objectStore/buckets/app.ts +++ b/packages/backend-core/src/objectStore/buckets/app.ts @@ -6,10 +6,10 @@ import * as cloudfront from "../cloudfront" * In production the client library is stored in the object store, however in development * we use the symlinked version produced by lerna, located in node modules. We link to this * via a specific endpoint (under /api/assets/client). - * @param {string} appId In production we need the appId to look up the correct bucket, as the + * @param appId In production we need the appId to look up the correct bucket, as the * version of the client lib may differ between apps. - * @param {string} version The version to retrieve. - * @return {string} The URL to be inserted into appPackage response or server rendered + * @param version The version to retrieve. + * @return The URL to be inserted into appPackage response or server rendered * app index file. */ export const clientLibraryUrl = (appId: string, version: string) => { diff --git a/packages/backend-core/src/objectStore/objectStore.ts b/packages/backend-core/src/objectStore/objectStore.ts index 4ac3641de1..c36a09915e 100644 --- a/packages/backend-core/src/objectStore/objectStore.ts +++ b/packages/backend-core/src/objectStore/objectStore.ts @@ -61,9 +61,9 @@ export function sanitizeBucket(input: string) { /** * Gets a connection to the object store using the S3 SDK. - * @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from. - * @param {object} opts configuration for the object store. - * @return {Object} an S3 object store object, check S3 Nodejs SDK for usage. + * @param bucket the name of the bucket which blobs will be uploaded/retrieved from. + * @param opts configuration for the object store. + * @return an S3 object store object, check S3 Nodejs SDK for usage. * @constructor */ export const ObjectStore = ( diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index ec1d9d4a90..af2ec6dbaa 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -5,9 +5,9 @@ import { timeout } from "../utils" * Bull works with a Job wrapper around all messages that contains a lot more information about * the state of the message, this object constructor implements the same schema of Bull jobs * for the sake of maintaining API consistency. - * @param {string} queue The name of the queue which the message will be carried on. - * @param {object} message The JSON message which will be passed back to the consumer. - * @returns {Object} A new job which can now be put onto the queue, this is mostly an + * @param queue The name of the queue which the message will be carried on. + * @param message The JSON message which will be passed back to the consumer. + * @returns A new job which can now be put onto the queue, this is mostly an * internal structure so that an in memory queue can be easily swapped for a Bull queue. */ function newJob(queue: string, message: any) { @@ -32,8 +32,8 @@ class InMemoryQueue { _addCount: number /** * The constructor the queue, exactly the same as that of Bulls. - * @param {string} name The name of the queue which is being configured. - * @param {object|null} opts This is not used by the in memory queue as there is no real use + * @param name The name of the queue which is being configured. + * @param opts This is not used by the in memory queue as there is no real use * case when in memory, but is the same API as Bull */ constructor(name: string, opts = null) { @@ -49,7 +49,7 @@ class InMemoryQueue { * Same callback API as Bull, each callback passed to this will consume messages as they are * available. Please note this is a queue service, not a notification service, so each * consumer will receive different messages. - * @param {function} func The callback function which will return a "Job", the same + * @param func The callback function which will return a "Job", the same * as the Bull API, within this job the property "data" contains the JSON message. Please * note this is incredibly limited compared to Bull as in reality the Job would contain * a lot more information about the queue and current status of Bull cluster. @@ -73,9 +73,9 @@ class InMemoryQueue { * Simple function to replicate the add message functionality of Bull, putting * a new message on the queue. This then emits an event which will be used to * return the message to a consumer (if one is attached). - * @param {object} msg A message to be transported over the queue, this should be + * @param msg A message to be transported over the queue, this should be * a JSON message as this is required by Bull. - * @param {boolean} repeat serves no purpose for the import queue. + * @param repeat serves no purpose for the import queue. */ // eslint-disable-next-line no-unused-vars add(msg: any, repeat: boolean) { @@ -96,7 +96,7 @@ class InMemoryQueue { /** * This removes a cron which has been implemented, this is part of Bull API. - * @param {string} cronJobId The cron which is to be removed. + * @param cronJobId The cron which is to be removed. */ removeRepeatableByKey(cronJobId: string) { // TODO: implement for testing diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index e7755f275d..d1e2d8989e 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -142,7 +142,7 @@ function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) { * this can only be done with redis streams because they will have an end. * @param stream A redis stream, specifically as this type of stream will have an end. * @param client The client to use for further lookups. - * @return {Promise} The final output of the stream + * @return The final output of the stream */ function promisifyStream(stream: any, client: RedisWrapper) { return new Promise((resolve, reject) => { diff --git a/packages/backend-core/src/security/permissions.ts b/packages/backend-core/src/security/permissions.ts index 539bbaef27..fe4095d210 100644 --- a/packages/backend-core/src/security/permissions.ts +++ b/packages/backend-core/src/security/permissions.ts @@ -36,8 +36,8 @@ export function levelToNumber(perm: PermissionLevel) { /** * Given the specified permission level for the user return the levels they are allowed to carry out. - * @param {string} userPermLevel The permission level of the user. - * @return {string[]} All the permission levels this user is allowed to carry out. + * @param userPermLevel The permission level of the user. + * @return All the permission levels this user is allowed to carry out. */ export function getAllowedLevels(userPermLevel: PermissionLevel): string[] { switch (userPermLevel) { diff --git a/packages/backend-core/src/security/roles.ts b/packages/backend-core/src/security/roles.ts index 24279e6b5c..b05cf79c8c 100644 --- a/packages/backend-core/src/security/roles.ts +++ b/packages/backend-core/src/security/roles.ts @@ -149,9 +149,9 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string { /** * Gets the role object, this is mainly useful for two purposes, to check if the level exists and * to check if the role inherits any others. - * @param {string|null} roleId The level ID to lookup. - * @param {object|null} opts options for the function, like whether to halt errors, instead return public. - * @returns {Promise} The role object, which may contain an "inherits" property. + * @param roleId The level ID to lookup. + * @param opts options for the function, like whether to halt errors, instead return public. + * @returns The role object, which may contain an "inherits" property. */ export async function getRole( roleId?: string, @@ -225,8 +225,8 @@ export async function getUserRoleIdHierarchy( /** * Returns an ordered array of the user's inherited role IDs, this can be used * to determine if a user can access something that requires a specific role. - * @param {string} userRoleId The user's role ID, this can be found in their access token. - * @returns {Promise} returns an ordered array of the roles, with the first being their + * @param userRoleId The user's role ID, this can be found in their access token. + * @returns returns an ordered array of the roles, with the first being their * highest level of access and the last being the lowest level. */ export async function getUserRoleHierarchy(userRoleId?: string) { @@ -258,7 +258,7 @@ export async function getAllRoleIds(appId?: string) { /** * Given an app ID this will retrieve all of the roles that are currently within that app. - * @return {Promise} An array of the role objects that were found. + * @return An array of the role objects that were found. */ export async function getAllRoles(appId?: string): Promise { if (appId) { diff --git a/packages/backend-core/src/users/db.ts b/packages/backend-core/src/users/db.ts index 1d02bebc32..8bb6300d4e 100644 --- a/packages/backend-core/src/users/db.ts +++ b/packages/backend-core/src/users/db.ts @@ -21,17 +21,21 @@ import { User, UserStatus, UserGroup, - ContextUser, } from "@budibase/types" import { getAccountHolderFromUserIds, isAdmin, + isCreator, validateUniqueUser, } from "./utils" import { searchExistingEmails } from "./lookup" import { hash } from "../utils" -type QuotaUpdateFn = (change: number, cb?: () => Promise) => Promise +type QuotaUpdateFn = ( + change: number, + creatorsChange: number, + cb?: () => Promise +) => Promise type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise type FeatureFn = () => Promise type GroupGetFn = (ids: string[]) => Promise @@ -135,7 +139,7 @@ export class UserDB { if (!fullUser.roles) { fullUser.roles = {} } - // add the active status to a user if its not provided + // add the active status to a user if it's not provided if (fullUser.status == null) { fullUser.status = UserStatus.ACTIVE } @@ -246,7 +250,8 @@ export class UserDB { } const change = dbUser ? 0 : 1 // no change if there is existing user - return UserDB.quotas.addUsers(change, async () => { + const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0 + return UserDB.quotas.addUsers(change, creatorsChange, async () => { await validateUniqueUser(email, tenantId) let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser) @@ -308,6 +313,7 @@ export class UserDB { let usersToSave: any[] = [] let newUsers: any[] = [] + let newCreators: any[] = [] const emails = newUsersRequested.map((user: User) => user.email) const existingEmails = await searchExistingEmails(emails) @@ -328,59 +334,66 @@ export class UserDB { } newUser.userGroups = groups newUsers.push(newUser) + if (isCreator(newUser)) { + newCreators.push(newUser) + } } const account = await accountSdk.getAccountByTenantId(tenantId) - return UserDB.quotas.addUsers(newUsers.length, async () => { - // create the promises array that will be called by bulkDocs - newUsers.forEach((user: any) => { - usersToSave.push( - UserDB.buildUser( - user, - { - hashPassword: true, - requirePassword: user.requirePassword, - }, - tenantId, - undefined, // no dbUser - account + return UserDB.quotas.addUsers( + newUsers.length, + newCreators.length, + async () => { + // create the promises array that will be called by bulkDocs + newUsers.forEach((user: any) => { + usersToSave.push( + UserDB.buildUser( + user, + { + hashPassword: true, + requirePassword: user.requirePassword, + }, + tenantId, + undefined, // no dbUser + account + ) ) - ) - }) + }) - const usersToBulkSave = await Promise.all(usersToSave) - await usersCore.bulkUpdateGlobalUsers(usersToBulkSave) + const usersToBulkSave = await Promise.all(usersToSave) + await usersCore.bulkUpdateGlobalUsers(usersToBulkSave) - // Post-processing of bulk added users, e.g. events and cache operations - for (const user of usersToBulkSave) { - // TODO: Refactor to bulk insert users into the info db - // instead of relying on looping tenant creation - await platform.users.addUser(tenantId, user._id, user.email) - await eventHelpers.handleSaveEvents(user, undefined) - } + // Post-processing of bulk added users, e.g. events and cache operations + for (const user of usersToBulkSave) { + // TODO: Refactor to bulk insert users into the info db + // instead of relying on looping tenant creation + await platform.users.addUser(tenantId, user._id, user.email) + await eventHelpers.handleSaveEvents(user, undefined) + } + + const saved = usersToBulkSave.map(user => { + return { + _id: user._id, + email: user.email, + } + }) + + // now update the groups + if (Array.isArray(saved) && groups) { + const groupPromises = [] + const createdUserIds = saved.map(user => user._id) + for (let groupId of groups) { + groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds)) + } + await Promise.all(groupPromises) + } - const saved = usersToBulkSave.map(user => { return { - _id: user._id, - email: user.email, + successful: saved, + unsuccessful, } - }) - - // now update the groups - if (Array.isArray(saved) && groups) { - const groupPromises = [] - const createdUserIds = saved.map(user => user._id) - for (let groupId of groups) { - groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds)) - } - await Promise.all(groupPromises) } - - return { - successful: saved, - unsuccessful, - } - }) + ) } static async bulkDelete(userIds: string[]): Promise { @@ -420,11 +433,12 @@ export class UserDB { _deleted: true, })) const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete) + const creatorsToDelete = usersToDelete.filter(isCreator) - await UserDB.quotas.removeUsers(toDelete.length) for (let user of usersToDelete) { await bulkDeleteProcessing(user) } + await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length) // Build Response // index users by id @@ -473,7 +487,8 @@ export class UserDB { await db.remove(userId, dbUser._rev) - await UserDB.quotas.removeUsers(1) + const creatorsToDelete = isCreator(dbUser) ? 1 : 0 + await UserDB.quotas.removeUsers(1, creatorsToDelete) await eventHelpers.handleDeleteEvents(dbUser) await cache.user.invalidateUser(userId) await sessions.invalidateSessions(userId, { reason: "deletion" }) diff --git a/packages/backend-core/src/users/users.ts b/packages/backend-core/src/users/users.ts index b087a6b538..a64997224e 100644 --- a/packages/backend-core/src/users/users.ts +++ b/packages/backend-core/src/users/users.ts @@ -14,14 +14,15 @@ import { } from "../db" import { BulkDocsResponse, - ContextUser, SearchQuery, SearchQueryOperators, SearchUsersRequest, User, + ContextUser, } from "@budibase/types" -import * as context from "../context" import { getGlobalDB } from "../context" +import * as context from "../context" +import { isCreator } from "./utils" type GetOpts = { cleanup?: boolean } @@ -283,6 +284,19 @@ export async function getUserCount() { return response.total_rows } +export async function getCreatorCount() { + let creators = 0 + async function iterate(startPage?: string) { + const page = await paginatedUsers({ bookmark: startPage }) + creators += page.data.filter(isCreator).length + if (page.hasNextPage) { + await iterate(page.nextPage) + } + } + await iterate() + return creators +} + // used to remove the builder/admin permissions, for processing the // user as an app user (they may have some specific role/group export function removePortalUserPermissions(user: User | ContextUser) { diff --git a/packages/backend-core/src/users/utils.ts b/packages/backend-core/src/users/utils.ts index af0e8e10c7..0ef4b77998 100644 --- a/packages/backend-core/src/users/utils.ts +++ b/packages/backend-core/src/users/utils.ts @@ -10,6 +10,7 @@ import { getAccountByTenantId } from "../accounts" // extract from shared-core to make easily accessible from backend-core export const isBuilder = sdk.users.isBuilder export const isAdmin = sdk.users.isAdmin +export const isCreator = sdk.users.isCreator export const isGlobalBuilder = sdk.users.isGlobalBuilder export const isAdminOrBuilder = sdk.users.isAdminOrBuilder export const hasAdminPermissions = sdk.users.hasAdminPermissions diff --git a/packages/backend-core/src/utils/utils.ts b/packages/backend-core/src/utils/utils.ts index ac43fa1fdb..b92471a7a4 100644 --- a/packages/backend-core/src/utils/utils.ts +++ b/packages/backend-core/src/utils/utils.ts @@ -79,8 +79,8 @@ export function isPublicApiRequest(ctx: Ctx): boolean { /** * Given a request tries to find the appId, which can be located in various places - * @param {object} ctx The main request body to look through. - * @returns {string|undefined} If an appId was found it will be returned. + * @param ctx The main request body to look through. + * @returns If an appId was found it will be returned. */ export async function getAppIdFromCtx(ctx: Ctx) { // look in headers @@ -135,7 +135,7 @@ function parseAppIdFromUrl(url?: string) { /** * opens the contents of the specified encrypted JWT. - * @return {object} the contents of the token. + * @return the contents of the token. */ export function openJwt(token: string) { if (!token) { @@ -169,8 +169,8 @@ export function isValidInternalAPIKey(apiKey: string) { /** * Get a cookie from context, and decrypt if necessary. - * @param {object} ctx The request which is to be manipulated. - * @param {string} name The name of the cookie to get. + * @param ctx The request which is to be manipulated. + * @param name The name of the cookie to get. */ export function getCookie(ctx: Ctx, name: string) { const cookie = ctx.cookies.get(name) @@ -184,10 +184,10 @@ export function getCookie(ctx: Ctx, name: string) { /** * Store a cookie for the request - it will not expire. - * @param {object} ctx The request which is to be manipulated. - * @param {string} name The name of the cookie to set. - * @param {string|object} value The value of cookie which will be set. - * @param {object} opts options like whether to sign. + * @param ctx The request which is to be manipulated. + * @param name The name of the cookie to set. + * @param value The value of cookie which will be set. + * @param opts options like whether to sign. */ export function setCookie( ctx: Ctx, @@ -223,8 +223,8 @@ export function clearCookie(ctx: Ctx, name: string) { /** * Checks if the API call being made (based on the provided ctx object) is from the client. If * the call is not from a client app then it is from the builder. - * @param {object} ctx The koa context object to be tested. - * @return {boolean} returns true if the call is from the client lib (a built app rather than the builder). + * @param ctx The koa context object to be tested. + * @return returns true if the call is from the client lib (a built app rather than the builder). */ export function isClient(ctx: Ctx) { return ctx.headers[Header.TYPE] === "client" diff --git a/packages/backend-core/tests/core/users/users.spec.js b/packages/backend-core/tests/core/users/users.spec.js new file mode 100644 index 0000000000..ae7109344a --- /dev/null +++ b/packages/backend-core/tests/core/users/users.spec.js @@ -0,0 +1,54 @@ +const _ = require('lodash/fp') +const {structures} = require("../../../tests") + +jest.mock("../../../src/context") +jest.mock("../../../src/db") + +const context = require("../../../src/context") +const db = require("../../../src/db") + +const {getCreatorCount} = require('../../../src/users/users') + +describe("Users", () => { + + let getGlobalDBMock + let getGlobalUserParamsMock + let paginationMock + + beforeEach(() => { + jest.resetAllMocks() + + getGlobalDBMock = jest.spyOn(context, "getGlobalDB") + getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams") + paginationMock = jest.spyOn(db, "pagination") + }) + + it("Retrieves the number of creators", async () => { + const getUsers = (offset, limit, creators = false) => { + const range = _.range(offset, limit) + const opts = creators ? {builder: {global: true}} : undefined + return range.map(() => structures.users.user(opts)) + } + const page1Data = getUsers(0, 8) + const page2Data = getUsers(8, 12, true) + getGlobalDBMock.mockImplementation(() => ({ + name : "fake-db", + allDocs: () => ({ + rows: [...page1Data, ...page2Data] + }) + })) + paginationMock.mockImplementationOnce(() => ({ + data: page1Data, + hasNextPage: true, + nextPage: "1" + })) + paginationMock.mockImplementation(() => ({ + data: page2Data, + hasNextPage: false, + nextPage: undefined + })) + const creatorsCount = await getCreatorCount() + expect(creatorsCount).toBe(4) + expect(paginationMock).toHaveBeenCalledTimes(2) + }) +}) diff --git a/packages/backend-core/tests/core/utilities/structures/licenses.ts b/packages/backend-core/tests/core/utilities/structures/licenses.ts index 5cce84edfd..bb452f9ad5 100644 --- a/packages/backend-core/tests/core/utilities/structures/licenses.ts +++ b/packages/backend-core/tests/core/utilities/structures/licenses.ts @@ -72,6 +72,11 @@ export function quotas(): Quotas { value: 1, triggers: [], }, + creators: { + name: "Creators", + value: 1, + triggers: [], + }, userGroups: { name: "User Groups", value: 1, @@ -118,6 +123,10 @@ export function customer(): Customer { export function subscription(): Subscription { return { amount: 10000, + amounts: { + user: 10000, + creator: 0, + }, cancelAt: undefined, currency: "usd", currentPeriodEnd: 0, @@ -126,6 +135,10 @@ export function subscription(): Subscription { duration: PriceDuration.MONTHLY, pastDueAt: undefined, quantity: 0, + quantities: { + user: 0, + creator: 0, + }, status: "active", } } diff --git a/packages/backend-core/tests/core/utilities/structures/quotas.ts b/packages/backend-core/tests/core/utilities/structures/quotas.ts index e82117053f..8d0b05fe1e 100644 --- a/packages/backend-core/tests/core/utilities/structures/quotas.ts +++ b/packages/backend-core/tests/core/utilities/structures/quotas.ts @@ -1,6 +1,6 @@ import { MonthlyQuotaName, QuotaUsage } from "@budibase/types" -export const usage = (): QuotaUsage => { +export const usage = (users: number = 0, creators: number = 0): QuotaUsage => { return { _id: "usage_quota", quotaReset: new Date().toISOString(), @@ -58,7 +58,8 @@ export const usage = (): QuotaUsage => { usageQuota: { apps: 0, plugins: 0, - users: 0, + users, + creators, userGroups: 0, rows: 0, triggers: {}, diff --git a/packages/builder/src/pages/builder/auth/forgot.svelte b/packages/builder/src/pages/builder/auth/forgot.svelte index 2ea8bf7a94..9df7196cfe 100644 --- a/packages/builder/src/pages/builder/auth/forgot.svelte +++ b/packages/builder/src/pages/builder/auth/forgot.svelte @@ -43,7 +43,7 @@ }) - + logo diff --git a/packages/builder/src/pages/builder/auth/reset.svelte b/packages/builder/src/pages/builder/auth/reset.svelte index 19bc1a1b7d..becc30d9a4 100644 --- a/packages/builder/src/pages/builder/auth/reset.svelte +++ b/packages/builder/src/pages/builder/auth/reset.svelte @@ -53,7 +53,7 @@ }) - + {#if loaded} logo diff --git a/packages/frontend-core/src/api/relationships.js b/packages/frontend-core/src/api/relationships.js index fbc727f8e1..45595750a8 100644 --- a/packages/frontend-core/src/api/relationships.js +++ b/packages/frontend-core/src/api/relationships.js @@ -9,7 +9,9 @@ export const buildRelationshipEndpoints = API => ({ if (!tableId || !rowId) { return [] } - const response = await API.get({ url: `/api/${tableId}/${rowId}/enrich` }) + const response = await API.get({ + url: `/api/${tableId}/${rowId}/enrich?field=${fieldName}`, + }) if (!fieldName) { return response || [] } else { diff --git a/packages/frontend-core/src/components/grid/cells/RelationshipCell.svelte b/packages/frontend-core/src/components/grid/cells/RelationshipCell.svelte index 925c840478..e6d83e0bea 100644 --- a/packages/frontend-core/src/components/grid/cells/RelationshipCell.svelte +++ b/packages/frontend-core/src/components/grid/cells/RelationshipCell.svelte @@ -260,29 +260,31 @@ class:wrap={editable || contentLines > 1} on:wheel={e => (focused ? e.stopPropagation() : null)} > - {#each value || [] as relationship} - {#if relationship[primaryDisplay] || relationship.primaryDisplay} -
- showRelationship(relationship._id) - : null} - > - {readable( - relationship[primaryDisplay] || relationship.primaryDisplay - )} - - {#if editable} - toggleRow(relationship)} - /> - {/if} -
- {/if} - {/each} + {#if Array.isArray(value) && value.length} + {#each value as relationship} + {#if relationship[primaryDisplay] || relationship.primaryDisplay} +
+ showRelationship(relationship._id) + : null} + > + {readable( + relationship[primaryDisplay] || relationship.primaryDisplay + )} + + {#if editable} + toggleRow(relationship)} + /> + {/if} +
+ {/if} + {/each} + {/if} {#if editable}
@@ -318,7 +320,7 @@
- {:else if searchResults?.length} + {:else if Array.isArray(searchResults) && searchResults.length}
{#each searchResults as row, idx}
} The routing structure, this is the full structure designed for use in the builder, + * @returns The routing structure, this is the full structure designed for use in the builder, * if the client routing is required then the updateRoutingStructureForUserRole should be used. */ async function getRoutingStructure() { diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index 71532c37d5..c3c5468840 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -280,17 +280,8 @@ function isEditableColumn(column: FieldSchema) { return !(isExternalAutoColumn || isFormula) } -export type ExternalRequestReturnType = T extends Operation.READ - ? - | Row[] - | { - row: Row - table: Table - } - : { - row: Row - table: Table - } +export type ExternalRequestReturnType = + T extends Operation.READ ? Row[] : { row: Row; table: Table } export class ExternalRequest { private readonly operation: T @@ -857,11 +848,12 @@ export class ExternalRequest { } const output = this.outputProcessing(response, table, relationships) // if reading it'll just be an array of rows, return whole thing - const result = ( - operation === Operation.READ && Array.isArray(response) - ? output - : { row: output[0], table } - ) as ExternalRequestReturnType - return result + if (operation === Operation.READ) { + return ( + Array.isArray(output) ? output : [output] + ) as ExternalRequestReturnType + } else { + return { row: output[0], table } as ExternalRequestReturnType + } } } diff --git a/packages/server/src/api/controllers/row/external.ts b/packages/server/src/api/controllers/row/external.ts index ddc63e5790..0515b6b97e 100644 --- a/packages/server/src/api/controllers/row/external.ts +++ b/packages/server/src/api/controllers/row/external.ts @@ -44,7 +44,7 @@ export async function handleRequest( return [] as any } - return new ExternalRequest(operation, tableId, opts?.datasource).run( + return new ExternalRequest(operation, tableId, opts?.datasource).run( opts || {} ) } @@ -148,17 +148,17 @@ export async function find(ctx: UserCtx): Promise { export async function destroy(ctx: UserCtx) { const tableId = utils.getTableId(ctx) const _id = ctx.request.body._id - const { row } = (await handleRequest(Operation.DELETE, tableId, { + const { row } = await handleRequest(Operation.DELETE, tableId, { id: breakRowIdField(_id), includeSqlRelationships: IncludeRelationship.EXCLUDE, - })) as { row: Row } + }) return { response: { ok: true, id: _id }, row } } export async function bulkDestroy(ctx: UserCtx) { const { rows } = ctx.request.body const tableId = utils.getTableId(ctx) - let promises: Promise[] = [] + let promises: Promise<{ row: Row; table: Table }>[] = [] for (let row of rows) { promises.push( handleRequest(Operation.DELETE, tableId, { @@ -167,7 +167,7 @@ export async function bulkDestroy(ctx: UserCtx) { }) ) } - const responses = (await Promise.all(promises)) as { row: Row }[] + const responses = await Promise.all(promises) return { response: { ok: true }, rows: responses.map(resp => resp.row) } } @@ -183,11 +183,11 @@ export async function fetchEnrichedRow(ctx: UserCtx) { ctx.throw(400, "Datasource has not been configured for plus API.") } const tables = datasource.entities - const response = (await handleRequest(Operation.READ, tableId, { + const response = await handleRequest(Operation.READ, tableId, { id, datasource, includeSqlRelationships: IncludeRelationship.INCLUDE, - })) as Row[] + }) const table: Table = tables[tableName] const row = response[0] // this seems like a lot of work, but basically we need to dig deeper for the enrich diff --git a/packages/server/src/api/controllers/row/index.ts b/packages/server/src/api/controllers/row/index.ts index 6e0a6d979e..0ccbf5cacf 100644 --- a/packages/server/src/api/controllers/row/index.ts +++ b/packages/server/src/api/controllers/row/index.ts @@ -26,6 +26,7 @@ import { fixRow } from "../public/rows" import sdk from "../../../sdk" import * as exporters from "../view/exporters" import { apiFileReturn } from "../../../utilities/fileSystem" +import { Format } from "../view/exporters" export * as views from "./views" function pickApi(tableId: any) { @@ -267,7 +268,7 @@ export const exportRows = async ( async () => { const { fileName, content } = await sdk.rows.exportRows({ tableId, - format, + format: format as Format, rowIds: rows, columns, query, diff --git a/packages/server/src/api/controllers/row/internal.ts b/packages/server/src/api/controllers/row/internal.ts index c838208a3b..80a69cf92b 100644 --- a/packages/server/src/api/controllers/row/internal.ts +++ b/packages/server/src/api/controllers/row/internal.ts @@ -1,6 +1,7 @@ import * as linkRows from "../../../db/linkedRows" import { generateRowID, + getMultiIDParams, getTableIDFromRowID, InternalTables, } from "../../../db/utils" @@ -24,6 +25,8 @@ import { UserCtx, } from "@budibase/types" import sdk from "../../../sdk" +import { getLinkedTableIDs } from "../../../db/linkedRows/linkUtils" +import { flatten } from "lodash" export async function patch(ctx: UserCtx) { const tableId = utils.getTableId(ctx) @@ -154,7 +157,7 @@ export async function destroy(ctx: UserCtx) { if (row.tableId !== tableId) { throw "Supplied tableId doesn't match the row's tableId" } - const table = await sdk.tables.getTable(row.tableId) + const table = await sdk.tables.getTable(tableId) // update the row to include full relationships before deleting them row = await outputProcessing(table, row, { squash: false, @@ -164,7 +167,7 @@ export async function destroy(ctx: UserCtx) { await linkRows.updateLinks({ eventType: linkRows.EventType.ROW_DELETE, row, - tableId: row.tableId, + tableId, }) // remove any attachments that were on the row from object storage await cleanupAttachments(table, { row }) @@ -225,60 +228,52 @@ export async function bulkDestroy(ctx: UserCtx) { } export async function fetchEnrichedRow(ctx: UserCtx) { + const fieldName = ctx.request.query.field as string | undefined const db = context.getAppDB() const tableId = utils.getTableId(ctx) - const rowId = ctx.params.rowId - // need table to work out where links go in row - let [table, row] = await Promise.all([ + const rowId = ctx.params.rowId as string + // need table to work out where links go in row, as well as the link docs + let response = await Promise.all([ sdk.tables.getTable(tableId), utils.findRow(ctx, tableId, rowId), + linkRows.getLinkDocuments({ tableId, rowId, fieldName }), ]) - // get the link docs - const linkVals = (await linkRows.getLinkDocuments({ - tableId, - rowId, - })) as LinkDocumentValue[] + const table = response[0] as Table + const row = response[1] as Row + const linkVals = response[2] as LinkDocumentValue[] // look up the actual rows based on the ids - let response = ( - await db.allDocs({ - include_docs: true, - keys: linkVals.map(linkVal => linkVal.id), - }) - ).rows.map(row => row.doc) - // group responses by table - let groups: any = {}, - tables: Record = {} - for (let row of response) { - if (!row.tableId) { - row.tableId = getTableIDFromRowID(row._id) - } - const linkedTableId = row.tableId - if (groups[linkedTableId] == null) { - groups[linkedTableId] = [row] - tables[linkedTableId] = await db.get(linkedTableId) - } else { - groups[linkedTableId].push(row) - } - } - let linkedRows: Row[] = [] - for (let [tableId, rows] of Object.entries(groups)) { - // need to include the IDs in these rows for any links they may have - linkedRows = linkedRows.concat( - await outputProcessing(tables[tableId], rows as Row[]) + const params = getMultiIDParams(linkVals.map(linkVal => linkVal.id)) + let linkedRows = (await db.allDocs(params)).rows.map(row => row.doc) + + // get the linked tables + const linkTableIds = getLinkedTableIDs(table as Table) + const linkTables = await sdk.tables.getTables(linkTableIds) + + // perform output processing + let final: Promise[] = [] + for (let linkTable of linkTables) { + const relatedRows = linkedRows.filter(row => row.tableId === linkTable._id) + // include the row being enriched for performance reasons, don't need to fetch it to include + final = final.concat( + outputProcessing(linkTable, relatedRows, { + // have to clone to avoid JSON cycle + fromRow: cloneDeep(row), + squash: true, + }) ) } + // finalise the promises + linkedRows = flatten(await Promise.all(final)) // insert the link rows in the correct place throughout the main row for (let fieldName of Object.keys(table.schema)) { let field = table.schema[fieldName] if (field.type === FieldTypes.LINK) { - // find the links that pertain to this field, get their indexes - const linkIndexes = linkVals - .filter(link => link.fieldName === fieldName) - .map(link => linkVals.indexOf(link)) + // find the links that pertain to this field + const links = linkVals.filter(link => link.fieldName === fieldName) // find the rows that the links state are linked to this field - row[fieldName] = linkedRows.filter((linkRow, index) => - linkIndexes.includes(index) + row[fieldName] = linkedRows.filter(linkRow => + links.find(link => link.id === linkRow._id) ) } } diff --git a/packages/server/src/api/controllers/row/staticFormula.ts b/packages/server/src/api/controllers/row/staticFormula.ts index efe6f8719c..6f426c6fa0 100644 --- a/packages/server/src/api/controllers/row/staticFormula.ts +++ b/packages/server/src/api/controllers/row/staticFormula.ts @@ -149,7 +149,7 @@ export async function finaliseRow( await db.put(table) } catch (err: any) { if (err.status === 409) { - const updatedTable = await sdk.tables.getTable(table._id) + const updatedTable = await sdk.tables.getTable(table._id!) let response = processAutoColumn(null, updatedTable, row, { reprocessing: true, }) diff --git a/packages/server/src/api/controllers/row/utils.ts b/packages/server/src/api/controllers/row/utils.ts index 1243d18847..cd311fdf0f 100644 --- a/packages/server/src/api/controllers/row/utils.ts +++ b/packages/server/src/api/controllers/row/utils.ts @@ -17,20 +17,6 @@ import sdk from "../../../sdk" import validateJs from "validate.js" import { cloneDeep } from "lodash/fp" -function isForeignKey(key: string, table: Table) { - const relationships = Object.values(table.schema).filter( - column => column.type === FieldType.LINK - ) - return relationships.some( - relationship => - ( - relationship as - | OneToManyRelationshipFieldMetadata - | ManyToOneRelationshipFieldMetadata - ).foreignKey === key - ) -} - validateJs.extend(validateJs.validators.datetime, { parse: function (value: string) { return new Date(value).getTime() @@ -60,7 +46,7 @@ export async function findRow(ctx: UserCtx, tableId: string, rowId: string) { return row } -export function getTableId(ctx: Ctx) { +export function getTableId(ctx: Ctx): string { // top priority, use the URL first if (ctx.params?.sourceId) { return ctx.params.sourceId @@ -77,112 +63,7 @@ export function getTableId(ctx: Ctx) { if (ctx.params?.viewName) { return ctx.params.viewName } -} - -export async function validate({ - tableId, - row, - table, -}: { - tableId?: string - row: Row - table?: Table -}) { - let fetchedTable: Table - if (!table) { - fetchedTable = await sdk.tables.getTable(tableId) - } else { - fetchedTable = table - } - const errors: any = {} - for (let fieldName of Object.keys(fetchedTable.schema)) { - const column = fetchedTable.schema[fieldName] - const constraints = cloneDeep(column.constraints) - const type = column.type - // foreign keys are likely to be enriched - if (isForeignKey(fieldName, fetchedTable)) { - continue - } - // formulas shouldn't validated, data will be deleted anyway - if (type === FieldTypes.FORMULA || column.autocolumn) { - continue - } - // special case for options, need to always allow unselected (empty) - if (type === FieldTypes.OPTIONS && constraints?.inclusion) { - constraints.inclusion.push(null as any, "") - } - let res - - // Validate.js doesn't seem to handle array - if (type === FieldTypes.ARRAY && row[fieldName]) { - if (row[fieldName].length) { - if (!Array.isArray(row[fieldName])) { - row[fieldName] = row[fieldName].split(",") - } - row[fieldName].map((val: any) => { - if ( - !constraints?.inclusion?.includes(val) && - constraints?.inclusion?.length !== 0 - ) { - errors[fieldName] = "Field not in list" - } - }) - } else if (constraints?.presence && row[fieldName].length === 0) { - // non required MultiSelect creates an empty array, which should not throw errors - errors[fieldName] = [`${fieldName} is required`] - } - } else if ( - (type === FieldTypes.ATTACHMENT || type === FieldTypes.JSON) && - typeof row[fieldName] === "string" - ) { - // this should only happen if there is an error - try { - const json = JSON.parse(row[fieldName]) - if (type === FieldTypes.ATTACHMENT) { - if (Array.isArray(json)) { - row[fieldName] = json - } else { - errors[fieldName] = [`Must be an array`] - } - } - } catch (err) { - errors[fieldName] = [`Contains invalid JSON`] - } - } else { - res = validateJs.single(row[fieldName], constraints) - } - if (res) errors[fieldName] = res - } - return { valid: Object.keys(errors).length === 0, errors } -} - -// don't do a pure falsy check, as 0 is included -// https://github.com/Budibase/budibase/issues/10118 -export function removeEmptyFilters(filters: SearchFilters) { - for (let filterField of NoEmptyFilterStrings) { - if (!filters[filterField]) { - continue - } - - for (let filterType of Object.keys(filters)) { - if (filterType !== filterField) { - continue - } - // don't know which one we're checking, type could be anything - const value = filters[filterType] as unknown - if (typeof value === "object") { - for (let [key, value] of Object.entries( - filters[filterType] as object - )) { - if (value == null || value === "") { - // @ts-ignore - delete filters[filterField][key] - } - } - } - } - } - return filters + throw new Error("Unable to find table ID in request") } export function isUserMetadataTable(tableId: string) { diff --git a/packages/server/src/api/controllers/table/index.ts b/packages/server/src/api/controllers/table/index.ts index 44f673f284..afb2a9d12d 100644 --- a/packages/server/src/api/controllers/table/index.ts +++ b/packages/server/src/api/controllers/table/index.ts @@ -16,6 +16,7 @@ import { Table, TableResponse, UserCtx, + Row, } from "@budibase/types" import sdk from "../../../sdk" import { jsonFromCsvString } from "../../../utilities/csv" @@ -139,8 +140,7 @@ export async function validateNewTableImport(ctx: UserCtx) { } export async function validateExistingTableImport(ctx: UserCtx) { - const { rows, tableId }: { rows: unknown; tableId: unknown } = - ctx.request.body + const { rows, tableId }: { rows: Row[]; tableId?: string } = ctx.request.body let schema = null if (tableId) { diff --git a/packages/server/src/api/controllers/user.ts b/packages/server/src/api/controllers/user.ts index dbbfc5c586..b6c3e7c6bd 100644 --- a/packages/server/src/api/controllers/user.ts +++ b/packages/server/src/api/controllers/user.ts @@ -1,13 +1,11 @@ -import { generateUserFlagID } from "../../db/utils" -import { InternalTables } from "../../db/utils" +import { generateUserFlagID, InternalTables } from "../../db/utils" import { getFullUser } from "../../utilities/users" import { context } from "@budibase/backend-core" import { Ctx, UserCtx } from "@budibase/types" import sdk from "../../sdk" export async function fetchMetadata(ctx: Ctx) { - const users = await sdk.users.fetchMetadata() - ctx.body = users + ctx.body = await sdk.users.fetchMetadata() } export async function updateSelfMetadata(ctx: UserCtx) { diff --git a/packages/server/src/api/controllers/view/viewBuilder.ts b/packages/server/src/api/controllers/view/viewBuilder.ts index 1e0390133d..cbe7e72d04 100644 --- a/packages/server/src/api/controllers/view/viewBuilder.ts +++ b/packages/server/src/api/controllers/view/viewBuilder.ts @@ -88,8 +88,8 @@ const SCHEMA_MAP: Record = { /** * Iterates through the array of filters to create a JS * expression that gets used in a CouchDB view. - * @param {Array} filters - an array of filter objects - * @returns {String} JS Expression + * @param filters - an array of filter objects + * @returns JS Expression */ function parseFilterExpression(filters: ViewFilter[]) { const expression = [] @@ -125,8 +125,8 @@ function parseFilterExpression(filters: ViewFilter[]) { /** * Returns a CouchDB compliant emit() expression that is used to emit the * correct key/value pairs for custom views. - * @param {String?} field - field to use for calculations, if any - * @param {String?} groupBy - field to group calculation results on, if any + * @param field - field to use for calculations, if any + * @param groupBy - field to group calculation results on, if any */ function parseEmitExpression(field: string, groupBy: string) { return `emit(doc["${groupBy || "_id"}"], doc["${field}"]);` @@ -136,7 +136,7 @@ function parseEmitExpression(field: string, groupBy: string) { * Return a fully parsed CouchDB compliant view definition * that will be stored in the design document in the database. * - * @param {Object} viewDefinition - the JSON definition for a custom view. + * @param viewDefinition - the JSON definition for a custom view. * field: field that calculations will be performed on * tableId: tableId of the table this view was created from * groupBy: field that calculations will be grouped by. Field must be present for this to be useful diff --git a/packages/server/src/automations/automationUtils.ts b/packages/server/src/automations/automationUtils.ts index 44de2e7d6b..3e25665a60 100644 --- a/packages/server/src/automations/automationUtils.ts +++ b/packages/server/src/automations/automationUtils.ts @@ -14,13 +14,13 @@ import { LoopStep, LoopStepType, LoopInput } from "../definitions/automations" * make sure that the post template statement can be cast into the correct type, this function does this for numbers * and booleans. * - * @param {object} inputs An object of inputs, please note this will not recurse down into any objects within, it simply + * @param inputs An object of inputs, please note this will not recurse down into any objects within, it simply * cleanses the top level inputs, however it can be used by recursively calling it deeper into the object structures if * the schema is known. - * @param {object} schema The defined schema of the inputs, in the form of JSON schema. The schema definition of an + * @param schema The defined schema of the inputs, in the form of JSON schema. The schema definition of an * automation is the likely use case of this, however validate.js syntax can be converted closely enough to use this by * wrapping the schema properties in a top level "properties" object. - * @returns {object} The inputs object which has had all the various types supported by this function converted to their + * @returns The inputs object which has had all the various types supported by this function converted to their * primitive types. */ export function cleanInputValues(inputs: Record, schema?: any) { @@ -74,9 +74,9 @@ export function cleanInputValues(inputs: Record, schema?: any) { * the automation but is instead part of the Table/Table. This function will get the table schema and use it to instead * perform the cleanInputValues function on the input row. * - * @param {string} tableId The ID of the Table/Table which the schema is to be retrieved for. - * @param {object} row The input row structure which requires clean-up after having been through template statements. - * @returns {Promise} The cleaned up rows object, will should now have all the required primitive types. + * @param tableId The ID of the Table/Table which the schema is to be retrieved for. + * @param row The input row structure which requires clean-up after having been through template statements. + * @returns The cleaned up rows object, will should now have all the required primitive types. */ export async function cleanUpRow(tableId: string, row: Row) { let table = await sdk.tables.getTable(tableId) diff --git a/packages/server/src/automations/utils.ts b/packages/server/src/automations/utils.ts index 9522ad6ccd..53c4d9d3b7 100644 --- a/packages/server/src/automations/utils.ts +++ b/packages/server/src/automations/utils.ts @@ -148,8 +148,8 @@ export function isRebootTrigger(auto: Automation) { /** * This function handles checking of any cron jobs that need to be enabled/updated. - * @param {string} appId The ID of the app in which we are checking for webhooks - * @param {object|undefined} automation The automation object to be updated. + * @param appId The ID of the app in which we are checking for webhooks + * @param automation The automation object to be updated. */ export async function enableCronTrigger(appId: any, automation: Automation) { const trigger = automation ? automation.definition.trigger : null @@ -187,10 +187,10 @@ export async function enableCronTrigger(appId: any, automation: Automation) { /** * This function handles checking if any webhooks need to be created or deleted for automations. - * @param {string} appId The ID of the app in which we are checking for webhooks - * @param {object|undefined} oldAuto The old automation object if updating/deleting - * @param {object|undefined} newAuto The new automation object if creating/updating - * @returns {Promise} After this is complete the new automation object may have been updated and should be + * @param appId The ID of the app in which we are checking for webhooks + * @param oldAuto The old automation object if updating/deleting + * @param newAuto The new automation object if creating/updating + * @returns After this is complete the new automation object may have been updated and should be * written to DB (this does not write to DB as it would be wasteful to repeat). */ export async function checkForWebhooks({ oldAuto, newAuto }: any) { @@ -257,8 +257,8 @@ export async function checkForWebhooks({ oldAuto, newAuto }: any) { /** * When removing an app/unpublishing it need to make sure automations are cleaned up (cron). - * @param appId {string} the app that is being removed. - * @return {Promise} clean is complete if this succeeds. + * @param appId the app that is being removed. + * @return clean is complete if this succeeds. */ export async function cleanupAutomations(appId: any) { await disableAllCrons(appId) @@ -267,7 +267,7 @@ export async function cleanupAutomations(appId: any) { /** * Checks if the supplied automation is of a recurring type. * @param automation The automation to check. - * @return {boolean} if it is recurring (cron). + * @return if it is recurring (cron). */ export function isRecurring(automation: Automation) { return automation.definition.trigger.stepId === definitions.CRON.stepId diff --git a/packages/server/src/db/linkedRows/LinkController.ts b/packages/server/src/db/linkedRows/LinkController.ts index f18a507f5a..c4eed1169a 100644 --- a/packages/server/src/db/linkedRows/LinkController.ts +++ b/packages/server/src/db/linkedRows/LinkController.ts @@ -38,7 +38,7 @@ class LinkController { /** * Retrieves the table, if it was not already found in the eventData. - * @returns {Promise} This will return a table based on the event data, either + * @returns This will return a table based on the event data, either * if it was in the event already, or it uses the specified tableId to get it. */ async table() { @@ -52,8 +52,8 @@ class LinkController { /** * Checks if the table this was constructed with has any linking columns currently. * If the table has not been retrieved this will retrieve it based on the eventData. - * @params {object|null} table If a table that is not known to the link controller is to be tested. - * @returns {Promise} True if there are any linked fields, otherwise it will return + * @params table If a table that is not known to the link controller is to be tested. + * @returns True if there are any linked fields, otherwise it will return * false. */ async doesTableHaveLinkedFields(table?: Table) { @@ -159,7 +159,7 @@ class LinkController { /** * When a row is saved this will carry out the necessary operations to make sure * the link has been created/updated. - * @returns {Promise} returns the row that has been cleaned and prepared to be written to the DB - links + * @returns returns the row that has been cleaned and prepared to be written to the DB - links * have also been created. */ async rowSaved() { @@ -271,7 +271,7 @@ class LinkController { /** * When a row is deleted this will carry out the necessary operations to make sure * any links that existed have been removed. - * @returns {Promise} The operation has been completed and the link documents should now + * @returns The operation has been completed and the link documents should now * be accurate. This also returns the row that was deleted. */ async rowDeleted() { @@ -293,8 +293,8 @@ class LinkController { /** * Remove a field from a table as well as any linked rows that pertained to it. - * @param {string} fieldName The field to be removed from the table. - * @returns {Promise} The table has now been updated. + * @param fieldName The field to be removed from the table. + * @returns The table has now been updated. */ async removeFieldFromTable(fieldName: string) { let oldTable = this._oldTable @@ -333,7 +333,7 @@ class LinkController { /** * When a table is saved this will carry out the necessary operations to make sure * any linked tables are notified and updated correctly. - * @returns {Promise} The operation has been completed and the link documents should now + * @returns The operation has been completed and the link documents should now * be accurate. Also returns the table that was operated on. */ async tableSaved() { @@ -394,7 +394,7 @@ class LinkController { /** * Update a table, this means if a field is removed need to handle removing from other table and removing * any link docs that pertained to it. - * @returns {Promise} The table which has been saved, same response as with the tableSaved function. + * @returns The table which has been saved, same response as with the tableSaved function. */ async tableUpdated() { const oldTable = this._oldTable @@ -418,7 +418,7 @@ class LinkController { * When a table is deleted this will carry out the necessary operations to make sure * any linked tables have the joining column correctly removed as well as removing any * now stale linking documents. - * @returns {Promise} The operation has been completed and the link documents should now + * @returns The operation has been completed and the link documents should now * be accurate. Also returns the table that was operated on. */ async tableDeleted() { diff --git a/packages/server/src/db/linkedRows/LinkDocument.ts b/packages/server/src/db/linkedRows/LinkDocument.ts index 9035641d5f..234f43cb48 100644 --- a/packages/server/src/db/linkedRows/LinkDocument.ts +++ b/packages/server/src/db/linkedRows/LinkDocument.ts @@ -6,12 +6,12 @@ import { LinkDocument } from "@budibase/types" * Creates a new link document structure which can be put to the database. It is important to * note that while this talks about linker/linked the link is bi-directional and for all intent * and purposes it does not matter from which direction the link was initiated. - * @param {string} tableId1 The ID of the first table (the linker). - * @param {string} tableId2 The ID of the second table (the linked). - * @param {string} fieldName1 The name of the field in the linker table. - * @param {string} fieldName2 The name of the field in the linked table. - * @param {string} rowId1 The ID of the row which is acting as the linker. - * @param {string} rowId2 The ID of the row which is acting as the linked. + * @param tableId1 The ID of the first table (the linker). + * @param tableId2 The ID of the second table (the linked). + * @param fieldName1 The name of the field in the linker table. + * @param fieldName2 The name of the field in the linked table. + * @param rowId1 The ID of the row which is acting as the linker. + * @param rowId2 The ID of the row which is acting as the linked. * @constructor */ class LinkDocumentImpl implements LinkDocument { diff --git a/packages/server/src/db/linkedRows/index.ts b/packages/server/src/db/linkedRows/index.ts index 3c5379ac97..7a7a06551e 100644 --- a/packages/server/src/db/linkedRows/index.ts +++ b/packages/server/src/db/linkedRows/index.ts @@ -9,13 +9,13 @@ import { getLinkedTable, } from "./linkUtils" import flatten from "lodash/flatten" -import { FieldTypes } from "../../constants" import { getMultiIDParams, USER_METDATA_PREFIX } from "../utils" import partition from "lodash/partition" import { getGlobalUsersFromMetadata } from "../../utilities/global" import { processFormulas } from "../../utilities/rowProcessor" import { context } from "@budibase/backend-core" -import { Table, Row, LinkDocumentValue } from "@budibase/types" +import { Table, Row, LinkDocumentValue, FieldType } from "@budibase/types" +import sdk from "../../sdk" export { IncludeDocs, getLinkDocuments, createLinkView } from "./linkUtils" @@ -35,7 +35,7 @@ export const EventType = { function clearRelationshipFields(table: Table, rows: Row[]) { for (let [key, field] of Object.entries(table.schema)) { - if (field.type === FieldTypes.LINK) { + if (field.type === FieldType.LINK) { rows = rows.map(row => { delete row[key] return row @@ -45,7 +45,7 @@ function clearRelationshipFields(table: Table, rows: Row[]) { return rows } -async function getLinksForRows(rows: Row[]) { +async function getLinksForRows(rows: Row[]): Promise { const tableIds = [...new Set(rows.map(el => el.tableId))] // start by getting all the link values for performance reasons const promises = tableIds.map(tableId => @@ -90,13 +90,13 @@ async function getFullLinkedDocs(links: LinkDocumentValue[]) { /** * Update link documents for a row or table - this is to be called by the API controller when a change is occurring. - * @param {string} args.eventType states what type of change which is occurring, means this can be expanded upon in the + * @param args.eventType states what type of change which is occurring, means this can be expanded upon in the * future quite easily (all updates go through one function). - * @param {string} args.tableId The ID of the of the table which is being changed. - * @param {object|undefined} args.row The row which is changing, e.g. created, updated or deleted. - * @param {object|undefined} args.table If the table has already been retrieved this can be used to reduce database gets. - * @param {object|undefined} args.oldTable If the table is being updated then the old table can be provided for differencing. - * @returns {Promise} When the update is complete this will respond successfully. Returns the row for + * @param args.tableId The ID of the of the table which is being changed. + * @param args.row The row which is changing, e.g. created, updated or deleted. + * @param args.table If the table has already been retrieved this can be used to reduce database gets. + * @param args.oldTable If the table is being updated then the old table can be provided for differencing. + * @returns When the update is complete this will respond successfully. Returns the row for * row operations and the table for table operations. */ export async function updateLinks(args: { @@ -144,34 +144,59 @@ export async function updateLinks(args: { /** * Given a table and a list of rows this will retrieve all of the attached docs and enrich them into the row. * This is required for formula fields, this may only be utilised internally (for now). - * @param {object} table The table from which the rows originated. - * @param {array} rows The rows which are to be enriched. - * @return {Promise<*>} returns the rows with all of the enriched relationships on it. + * @param table The table from which the rows originated. + * @param rows The rows which are to be enriched. + * @param opts optional - options like passing in a base row to use for enrichment. + * @return returns the rows with all of the enriched relationships on it. */ -export async function attachFullLinkedDocs(table: Table, rows: Row[]) { +export async function attachFullLinkedDocs( + table: Table, + rows: Row[], + opts?: { fromRow?: Row } +) { const linkedTableIds = getLinkedTableIDs(table) if (linkedTableIds.length === 0) { return rows } - // get all the links - const links = (await getLinksForRows(rows)).filter(link => + // get tables and links + let response = await Promise.all([ + getLinksForRows(rows), + sdk.tables.getTables(linkedTableIds), + ]) + // find the links that pertain to one of the rows that is being enriched + const links = (response[0] as LinkDocumentValue[]).filter(link => rows.some(row => row._id === link.thisId) ) + // if fromRow has been passed in, then we don't need to fetch it (optimisation) + let linksWithoutFromRow = links + if (opts?.fromRow) { + linksWithoutFromRow = links.filter(link => link.id !== opts?.fromRow?._id) + } + const linkedTables = response[1] as Table[] // clear any existing links that could be dupe'd rows = clearRelationshipFields(table, rows) // now get the docs and combine into the rows - let linked = await getFullLinkedDocs(links) - const linkedTables: Table[] = [] + let linked = [] + if (linksWithoutFromRow.length > 0) { + linked = await getFullLinkedDocs(linksWithoutFromRow) + } for (let row of rows) { for (let link of links.filter(link => link.thisId === row._id)) { if (row[link.fieldName] == null) { row[link.fieldName] = [] } - const linkedRow = linked.find(row => row._id === link.id) + let linkedRow: Row + if (opts?.fromRow && opts?.fromRow?._id === link.id) { + linkedRow = opts.fromRow! + } else { + linkedRow = linked.find(row => row._id === link.id) + } if (linkedRow) { const linkedTableId = linkedRow.tableId || getRelatedTableForField(table, link.fieldName) - const linkedTable = await getLinkedTable(linkedTableId, linkedTables) + const linkedTable = linkedTables.find( + table => table._id === linkedTableId + ) if (linkedTable) { row[link.fieldName].push(processFormulas(linkedTable, linkedRow)) } @@ -183,9 +208,9 @@ export async function attachFullLinkedDocs(table: Table, rows: Row[]) { /** * This function will take the given enriched rows and squash the links to only contain the primary display field. - * @param {object} table The table from which the rows originated. - * @param {array} enriched The pre-enriched rows (full docs) which are to be squashed. - * @returns {Promise} The rows after having their links squashed to only contain the ID and primary display. + * @param table The table from which the rows originated. + * @param enriched The pre-enriched rows (full docs) which are to be squashed. + * @returns The rows after having their links squashed to only contain the ID and primary display. */ export async function squashLinksToPrimaryDisplay( table: Table, @@ -199,13 +224,13 @@ export async function squashLinksToPrimaryDisplay( // this only fetches the table if its not already in array const rowTable = await getLinkedTable(row.tableId!, linkedTables) for (let [column, schema] of Object.entries(rowTable?.schema || {})) { - if (schema.type !== FieldTypes.LINK || !Array.isArray(row[column])) { + if (schema.type !== FieldType.LINK || !Array.isArray(row[column])) { continue } const newLinks = [] for (let link of row[column]) { const linkTblId = link.tableId || getRelatedTableForField(table, column) - const linkedTable = await getLinkedTable(linkTblId, linkedTables) + const linkedTable = await getLinkedTable(linkTblId!, linkedTables) const obj: any = { _id: link._id } if (linkedTable?.primaryDisplay && link[linkedTable.primaryDisplay]) { obj.primaryDisplay = link[linkedTable.primaryDisplay] diff --git a/packages/server/src/db/linkedRows/linkUtils.ts b/packages/server/src/db/linkedRows/linkUtils.ts index 5129299520..c74674a865 100644 --- a/packages/server/src/db/linkedRows/linkUtils.ts +++ b/packages/server/src/db/linkedRows/linkUtils.ts @@ -17,33 +17,36 @@ export const IncludeDocs = { /** * Gets the linking documents, not the linked documents themselves. - * @param {string} args.tableId The table which we are searching for linked rows against. - * @param {string|null} args.fieldName The name of column/field which is being altered, only looking for + * @param args.tableId The table which we are searching for linked rows against. + * @param args.fieldName The name of column/field which is being altered, only looking for * linking documents that are related to it. If this is not specified then the table level will be assumed. - * @param {string|null} args.rowId The ID of the row which we want to find linking documents for - + * @param args.rowId The ID of the row which we want to find linking documents for - * if this is not specified then it will assume table or field level depending on whether the * field name has been specified. - * @param {boolean|null} args.includeDocs whether to include docs in the response call, this is considerably slower so only + * @param args.includeDocs whether to include docs in the response call, this is considerably slower so only * use this if actually interested in the docs themselves. - * @returns {Promise} This will return an array of the linking documents that were found + * @returns This will return an array of the linking documents that were found * (if any). */ export async function getLinkDocuments(args: { tableId?: string rowId?: string - includeDocs?: any + fieldName?: string + includeDocs?: boolean }): Promise { - const { tableId, rowId, includeDocs } = args + const { tableId, rowId, fieldName, includeDocs } = args const db = context.getAppDB() let params: any - if (rowId != null) { + if (rowId) { params = { key: [tableId, rowId] } } // only table is known else { params = { startKey: [tableId], endKey: [tableId, {}] } } - params.include_docs = !!includeDocs + if (includeDocs) { + params.include_docs = true + } try { let linkRows = (await db.query(getQueryIndex(ViewName.LINK), params)).rows // filter to get unique entries @@ -63,6 +66,14 @@ export async function getLinkDocuments(args: { return unique }) + // filter down to just the required field name + if (fieldName) { + linkRows = linkRows.filter(link => { + const value = link.value as LinkDocumentValue + return value.fieldName === fieldName + }) + } + // return docs if docs requested, otherwise just the value information if (includeDocs) { return linkRows.map(row => row.doc) as LinkDocument[] } else { @@ -87,7 +98,7 @@ export function getUniqueByProp(array: any[], prop: string) { }) } -export function getLinkedTableIDs(table: Table) { +export function getLinkedTableIDs(table: Table): string[] { return Object.values(table.schema) .filter(isRelationshipColumn) .map(column => column.tableId) diff --git a/packages/server/src/db/utils.ts b/packages/server/src/db/utils.ts index 428c955eb2..2c07bd8d22 100644 --- a/packages/server/src/db/utils.ts +++ b/packages/server/src/db/utils.ts @@ -60,7 +60,7 @@ export function getTableParams(tableId?: Optional, otherProps = {}) { /** * Generates a new table ID. - * @returns {string} The new table ID which the table doc can be stored under. + * @returns The new table ID which the table doc can be stored under. */ export function generateTableID() { return `${DocumentType.TABLE}${SEPARATOR}${newid()}` @@ -68,8 +68,8 @@ export function generateTableID() { /** * Given a row ID this will find the table ID within it (only works for internal tables). - * @param {string} rowId The ID of the row. - * @returns {string} The table ID. + * @param rowId The ID of the row. + * @returns The table ID. */ export function getTableIDFromRowID(rowId: string) { const components = rowId @@ -90,7 +90,7 @@ export function getAutomationParams( /** * Generates a new automation ID. - * @returns {string} The new automation ID which the automation doc can be stored under. + * @returns The new automation ID which the automation doc can be stored under. */ export function generateAutomationID() { return `${DocumentType.AUTOMATION}${SEPARATOR}${newid()}` @@ -99,13 +99,13 @@ export function generateAutomationID() { /** * Generates a new link doc ID. This is currently not usable with the alldocs call, * instead a view is built to make walking to tree easier. - * @param {string} tableId1 The ID of the linker table. - * @param {string} tableId2 The ID of the linked table. - * @param {string} rowId1 The ID of the linker row. - * @param {string} rowId2 The ID of the linked row. - * @param {string} fieldName1 The name of the field in the linker row. - * @param {string} fieldName2 the name of the field in the linked row. - * @returns {string} The new link doc ID which the automation doc can be stored under. + * @param tableId1 The ID of the linker table. + * @param tableId2 The ID of the linked table. + * @param rowId1 The ID of the linker row. + * @param rowId2 The ID of the linked row. + * @param fieldName1 The name of the field in the linker row. + * @param fieldName2 the name of the field in the linked row. + * @returns The new link doc ID which the automation doc can be stored under. */ export function generateLinkID( tableId1: string, @@ -130,7 +130,7 @@ export function getLinkParams(otherProps: any = {}) { /** * Generates a new layout ID. - * @returns {string} The new layout ID which the layout doc can be stored under. + * @returns The new layout ID which the layout doc can be stored under. */ export function generateLayoutID(id?: string) { return `${DocumentType.LAYOUT}${SEPARATOR}${id || newid()}` @@ -145,7 +145,7 @@ export function getLayoutParams(layoutId?: Optional, otherProps: any = {}) { /** * Generates a new screen ID. - * @returns {string} The new screen ID which the screen doc can be stored under. + * @returns The new screen ID which the screen doc can be stored under. */ export function generateScreenID() { return `${DocumentType.SCREEN}${SEPARATOR}${newid()}` @@ -160,7 +160,7 @@ export function getScreenParams(screenId?: Optional, otherProps: any = {}) { /** * Generates a new webhook ID. - * @returns {string} The new webhook ID which the webhook doc can be stored under. + * @returns The new webhook ID which the webhook doc can be stored under. */ export function generateWebhookID() { return `${DocumentType.WEBHOOK}${SEPARATOR}${newid()}` @@ -175,7 +175,7 @@ export function getWebhookParams(webhookId?: Optional, otherProps: any = {}) { /** * Generates a new datasource ID. - * @returns {string} The new datasource ID which the webhook doc can be stored under. + * @returns The new datasource ID which the webhook doc can be stored under. */ export function generateDatasourceID({ plus = false } = {}) { return `${ @@ -202,7 +202,7 @@ export function getDatasourcePlusParams( /** * Generates a new query ID. - * @returns {string} The new query ID which the query doc can be stored under. + * @returns The new query ID which the query doc can be stored under. */ export function generateQueryID(datasourceId: string) { return `${ @@ -242,7 +242,7 @@ export function getQueryParams(datasourceId?: Optional, otherProps: any = {}) { /** * Generates a new flag document ID. - * @returns {string} The ID of the flag document that was generated. + * @returns The ID of the flag document that was generated. */ export function generateUserFlagID(userId: string) { return `${DocumentType.USER_FLAG}${SEPARATOR}${userId}` @@ -288,7 +288,7 @@ export function getMultiIDParams(ids: string[]) { /** * Generates a new view ID. - * @returns {string} The new view ID which the view doc can be stored under. + * @returns The new view ID which the view doc can be stored under. */ export function generateViewID(tableId: string) { return `${ diff --git a/packages/server/src/db/views/staticViews.ts b/packages/server/src/db/views/staticViews.ts index 8952a4d6a1..126198f9dc 100644 --- a/packages/server/src/db/views/staticViews.ts +++ b/packages/server/src/db/views/staticViews.ts @@ -17,7 +17,7 @@ const SCREEN_PREFIX = DocumentType.SCREEN + SEPARATOR /** * Creates the link view for the instance, this will overwrite the existing one, but this should only * be called if it is found that the view does not exist. - * @returns {Promise} The view now exists, please note that the next view of this query will actually build it, + * @returns The view now exists, please note that the next view of this query will actually build it, * so it may be slow. */ export async function createLinkView() { diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index add7596165..b24be1987b 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -536,7 +536,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { * @param json The JSON query DSL which is to be converted to SQL. * @param opts extra options which are to be passed into the query builder, e.g. disableReturning * which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes. - * @return {{ sql: string, bindings: object }} the query ready to be passed to the driver. + * @return the query ready to be passed to the driver. */ _query(json: QueryJson, opts: QueryOptions = {}) { const sqlClient = this.getSqlClient() diff --git a/packages/server/src/integrations/base/sqlTable.ts b/packages/server/src/integrations/base/sqlTable.ts index 43ee4f8805..05954f0d25 100644 --- a/packages/server/src/integrations/base/sqlTable.ts +++ b/packages/server/src/integrations/base/sqlTable.ts @@ -189,7 +189,7 @@ class SqlTableQueryBuilder { /** * @param json the input JSON structure from which an SQL query will be built. - * @return {string} the operation that was found in the JSON. + * @return the operation that was found in the JSON. */ _operation(json: QueryJson): Operation { return json.endpoint.operation diff --git a/packages/server/src/integrations/microsoftSqlServer.ts b/packages/server/src/integrations/microsoftSqlServer.ts index 06ffaf955d..ff68026369 100644 --- a/packages/server/src/integrations/microsoftSqlServer.ts +++ b/packages/server/src/integrations/microsoftSqlServer.ts @@ -375,7 +375,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus { /** * Fetches the tables from the sql server database and assigns them to the datasource. - * @param {*} datasourceId - datasourceId to fetch + * @param datasourceId - datasourceId to fetch * @param entities - the tables that are to be built */ async buildSchema( diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index 28d8fdd84d..b3936320ac 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -258,7 +258,7 @@ class OracleIntegration extends Sql implements DatasourcePlus { /** * Fetches the tables from the oracle table and assigns them to the datasource. - * @param {*} datasourceId - datasourceId to fetch + * @param datasourceId - datasourceId to fetch * @param entities - the tables that are to be built */ async buildSchema( diff --git a/packages/server/src/integrations/postgres.ts b/packages/server/src/integrations/postgres.ts index ef63f39d87..8479cd05d8 100644 --- a/packages/server/src/integrations/postgres.ts +++ b/packages/server/src/integrations/postgres.ts @@ -268,7 +268,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus { /** * Fetches the tables from the postgres table and assigns them to the datasource. - * @param {*} datasourceId - datasourceId to fetch + * @param datasourceId - datasourceId to fetch * @param entities - the tables that are to be built */ async buildSchema( diff --git a/packages/server/src/middleware/tests/trimViewRowInfo.spec.ts b/packages/server/src/middleware/tests/trimViewRowInfo.spec.ts index 17b4cc7b93..bf717d5828 100644 --- a/packages/server/src/middleware/tests/trimViewRowInfo.spec.ts +++ b/packages/server/src/middleware/tests/trimViewRowInfo.spec.ts @@ -16,6 +16,7 @@ jest.mock("../../sdk", () => ({ import sdk from "../../sdk" import { Next } from "koa" +const tableId = utils.generateTableID() const mockGetView = sdk.views.get as jest.MockedFunction const mockGetTable = sdk.tables.getTable as jest.MockedFunction< typeof sdk.tables.getTable @@ -41,6 +42,7 @@ class TestConfiguration { body: ctxRequestBody, } this.params.viewId = viewId + this.params.sourceId = tableId return this.middleware( { request: this.request as any, @@ -69,7 +71,7 @@ describe("trimViewRowInfo middleware", () => { }) const table: Table = { - _id: utils.generateTableID(), + _id: tableId, name: generator.word(), type: "table", schema: { diff --git a/packages/server/src/migrations/functions/syncQuotas.ts b/packages/server/src/migrations/functions/syncQuotas.ts index 67f38ba929..83a7670e78 100644 --- a/packages/server/src/migrations/functions/syncQuotas.ts +++ b/packages/server/src/migrations/functions/syncQuotas.ts @@ -3,6 +3,7 @@ import * as syncApps from "./usageQuotas/syncApps" import * as syncRows from "./usageQuotas/syncRows" import * as syncPlugins from "./usageQuotas/syncPlugins" import * as syncUsers from "./usageQuotas/syncUsers" +import * as syncCreators from "./usageQuotas/syncCreators" /** * Synchronise quotas to the state of the db. @@ -13,5 +14,6 @@ export const run = async () => { await syncRows.run() await syncPlugins.run() await syncUsers.run() + await syncCreators.run() }) } diff --git a/packages/server/src/migrations/functions/usageQuotas/syncCreators.ts b/packages/server/src/migrations/functions/usageQuotas/syncCreators.ts new file mode 100644 index 0000000000..ce53be925a --- /dev/null +++ b/packages/server/src/migrations/functions/usageQuotas/syncCreators.ts @@ -0,0 +1,13 @@ +import { users } from "@budibase/backend-core" +import { quotas } from "@budibase/pro" +import { QuotaUsageType, StaticQuotaName } from "@budibase/types" + +export const run = async () => { + const creatorCount = await users.getCreatorCount() + console.log(`Syncing creator count: ${creatorCount}`) + await quotas.setUsage( + creatorCount, + StaticQuotaName.CREATORS, + QuotaUsageType.STATIC + ) +} diff --git a/packages/server/src/migrations/functions/usageQuotas/tests/syncCreators.spec.ts b/packages/server/src/migrations/functions/usageQuotas/tests/syncCreators.spec.ts new file mode 100644 index 0000000000..75fa9f217e --- /dev/null +++ b/packages/server/src/migrations/functions/usageQuotas/tests/syncCreators.spec.ts @@ -0,0 +1,26 @@ +import TestConfig from "../../../../tests/utilities/TestConfiguration" +import * as syncCreators from "../syncCreators" +import { quotas } from "@budibase/pro" + +describe("syncCreators", () => { + let config = new TestConfig(false) + + beforeEach(async () => { + await config.init() + }) + + afterAll(config.end) + + it("syncs creators", async () => { + return config.doInContext(null, async () => { + await config.createUser({ admin: true }) + + await syncCreators.run() + + const usageDoc = await quotas.getQuotaUsage() + // default + additional creator + const creatorsCount = 2 + expect(usageDoc.usageQuota.creators).toBe(creatorsCount) + }) + }) +}) diff --git a/packages/server/src/sdk/app/backups/exports.ts b/packages/server/src/sdk/app/backups/exports.ts index fe875f0c3d..d5ea31cdf5 100644 --- a/packages/server/src/sdk/app/backups/exports.ts +++ b/packages/server/src/sdk/app/backups/exports.ts @@ -48,10 +48,10 @@ function tarFilesToTmp(tmpDir: string, files: string[]) { /** * Exports a DB to either file or a variable (memory). - * @param {string} dbName the DB which is to be exported. - * @param {object} opts various options for the export, e.g. whether to stream, + * @param dbName the DB which is to be exported. + * @param opts various options for the export, e.g. whether to stream, * a filter function or the name of the export. - * @return {*} either a readable stream or a string + * @return either a readable stream or a string */ export async function exportDB( dbName: string, @@ -98,9 +98,9 @@ function defineFilter(excludeRows?: boolean, excludeLogs?: boolean) { /** * Local utility to back up the database state for an app, excluding global user * data or user relationships. - * @param {string} appId The app to back up - * @param {object} config Config to send to export DB/attachment export - * @returns {*} either a string or a stream of the backup + * @param appId The app to back up + * @param config Config to send to export DB/attachment export + * @returns either a string or a stream of the backup */ export async function exportApp(appId: string, config?: ExportOpts) { const prodAppId = dbCore.getProdAppID(appId) @@ -175,10 +175,10 @@ export async function exportApp(appId: string, config?: ExportOpts) { /** * Streams a backup of the database state for an app - * @param {string} appId The ID of the app which is to be backed up. - * @param {boolean} excludeRows Flag to state whether the export should include data. - * @param {string} encryptPassword password for encrypting the export. - * @returns {*} a readable stream of the backup which is written in real time + * @param appId The ID of the app which is to be backed up. + * @param excludeRows Flag to state whether the export should include data. + * @param encryptPassword password for encrypting the export. + * @returns a readable stream of the backup which is written in real time */ export async function streamExportApp({ appId, diff --git a/packages/server/src/sdk/app/backups/imports.ts b/packages/server/src/sdk/app/backups/imports.ts index c8e54e9e1d..5b481acdc3 100644 --- a/packages/server/src/sdk/app/backups/imports.ts +++ b/packages/server/src/sdk/app/backups/imports.ts @@ -96,8 +96,8 @@ async function updateAutomations(prodAppId: string, db: Database) { /** * This function manages temporary template files which are stored by Koa. - * @param {Object} template The template object retrieved from the Koa context object. - * @returns {Object} Returns a fs read stream which can be loaded into the database. + * @param template The template object retrieved from the Koa context object. + * @returns Returns a fs read stream which can be loaded into the database. */ async function getTemplateStream(template: TemplateType) { if (template.file && template.file.type !== "text/plain") { diff --git a/packages/server/src/sdk/app/datasources/datasources.ts b/packages/server/src/sdk/app/datasources/datasources.ts index 35107fd6b8..fb5d04b03e 100644 --- a/packages/server/src/sdk/app/datasources/datasources.ts +++ b/packages/server/src/sdk/app/datasources/datasources.ts @@ -23,10 +23,13 @@ import { getTableParams, } from "../../../db/utils" import sdk from "../../index" +import datasource from "../../../api/routes/datasource" const ENV_VAR_PREFIX = "env." -export async function fetch() { +export async function fetch(opts?: { + enriched: boolean +}): Promise { // Get internal tables const db = context.getAppDB() const internalTables = await db.allDocs( @@ -44,7 +47,7 @@ export async function fetch() { const bbInternalDb = { ...BudibaseInternalDB, - } + } as Datasource // Get external datasources const datasources = ( @@ -66,7 +69,18 @@ export async function fetch() { } } - return [bbInternalDb, ...datasources] + if (opts?.enriched) { + const envVars = await getEnvironmentVariables() + const promises = datasources.map(datasource => + enrichDatasourceWithValues(datasource, envVars) + ) + const enriched = (await Promise.all(promises)).map( + result => result.datasource + ) + return [bbInternalDb, ...enriched] + } else { + return [bbInternalDb, ...datasources] + } } export function areRESTVariablesValid(datasource: Datasource) { @@ -107,9 +121,12 @@ export function checkDatasourceTypes(schema: Integration, config: any) { return config } -async function enrichDatasourceWithValues(datasource: Datasource) { +async function enrichDatasourceWithValues( + datasource: Datasource, + variables?: Record +) { const cloned = cloneDeep(datasource) - const env = await getEnvironmentVariables() + const env = variables ? variables : await getEnvironmentVariables() //Do not process entities, as we do not want to process formulas const { entities, ...clonedWithoutEntities } = cloned const processed = processObjectSync( @@ -235,9 +252,9 @@ export function mergeConfigs(update: Datasource, old: Datasource) { if (value !== PASSWORD_REPLACEMENT) { continue } - if (old.config?.[key]) { + if (update.config && old.config && old.config?.[key]) { update.config[key] = old.config?.[key] - } else { + } else if (update.config) { delete update.config[key] } } diff --git a/packages/server/src/sdk/app/rows/external.ts b/packages/server/src/sdk/app/rows/external.ts index 568bd07e9d..8bcf89a3f5 100644 --- a/packages/server/src/sdk/app/rows/external.ts +++ b/packages/server/src/sdk/app/rows/external.ts @@ -7,11 +7,11 @@ export async function getRow( rowId: string, opts?: { relationships?: boolean } ) { - const response = (await handleRequest(Operation.READ, tableId, { + const response = await handleRequest(Operation.READ, tableId, { id: breakRowIdField(rowId), includeSqlRelationships: opts?.relationships ? IncludeRelationship.INCLUDE : IncludeRelationship.EXCLUDE, - })) as Row[] + }) return response ? response[0] : response } diff --git a/packages/server/src/sdk/app/rows/search.ts b/packages/server/src/sdk/app/rows/search.ts index f75bd07437..ced35db9be 100644 --- a/packages/server/src/sdk/app/rows/search.ts +++ b/packages/server/src/sdk/app/rows/search.ts @@ -1,4 +1,4 @@ -import { SearchFilters, SearchParams } from "@budibase/types" +import { SearchFilters, SearchParams, Row } from "@budibase/types" import { isExternalTable } from "../../../integrations/utils" import * as internal from "./search/internal" import * as external from "./search/external" @@ -45,7 +45,7 @@ export async function exportRows( return pickApi(options.tableId).exportRows(options) } -export async function fetch(tableId: string) { +export async function fetch(tableId: string): Promise { return pickApi(tableId).fetch(tableId) } @@ -53,6 +53,6 @@ export async function fetchView( tableId: string, viewName: string, params: ViewParams -) { +): Promise { return pickApi(tableId).fetchView(viewName, params) } diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index 8dd141f8ef..c41efad171 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -55,15 +55,15 @@ export async function search(options: SearchParams) { try { const table = await sdk.tables.getTable(tableId) options = searchInputMapping(table, options) - let rows = (await handleRequest(Operation.READ, tableId, { + let rows = await handleRequest(Operation.READ, tableId, { filters: query, sort, paginate: paginateObj as PaginationJson, includeSqlRelationships: IncludeRelationship.INCLUDE, - })) as Row[] + }) let hasNextPage = false if (paginate && rows.length === limit) { - const nextRows = (await handleRequest(Operation.READ, tableId, { + const nextRows = await handleRequest(Operation.READ, tableId, { filters: query, sort, paginate: { @@ -71,7 +71,7 @@ export async function search(options: SearchParams) { page: bookmark! * limit + 1, }, includeSqlRelationships: IncludeRelationship.INCLUDE, - })) as Row[] + }) hasNextPage = nextRows.length > 0 } @@ -172,12 +172,18 @@ export async function exportRows( } } -export async function fetch(tableId: string) { - const response = await handleRequest(Operation.READ, tableId, { - includeSqlRelationships: IncludeRelationship.INCLUDE, - }) +export async function fetch(tableId: string): Promise { + const response = await handleRequest( + Operation.READ, + tableId, + { + includeSqlRelationships: IncludeRelationship.INCLUDE, + } + ) const table = await sdk.tables.getTable(tableId) - return await outputProcessing(table, response, { preserveLinks: true }) + return await outputProcessing(table, response, { + preserveLinks: true, + }) } export async function fetchView(viewName: string) { diff --git a/packages/server/src/sdk/app/rows/search/internal.ts b/packages/server/src/sdk/app/rows/search/internal.ts index d78c0213b3..779ff5f777 100644 --- a/packages/server/src/sdk/app/rows/search/internal.ts +++ b/packages/server/src/sdk/app/rows/search/internal.ts @@ -6,26 +6,26 @@ import { import env from "../../../../environment" import { fullSearch, paginatedSearch } from "./internalSearch" import { - InternalTables, - getRowParams, DocumentType, + getRowParams, + InternalTables, } from "../../../../db/utils" import { getGlobalUsersFromMetadata } from "../../../../utilities/global" import { outputProcessing } from "../../../../utilities/rowProcessor" -import { Database, Row, Table, SearchParams } from "@budibase/types" +import { Database, Row, SearchParams, Table } from "@budibase/types" import { cleanExportRows } from "../utils" import { - Format, csv, + Format, json, jsonWithSchema, } from "../../../../api/controllers/view/exporters" import * as inMemoryViews from "../../../../db/inMemoryView" import { - migrateToInMemoryView, - migrateToDesignView, getFromDesignDoc, getFromMemoryDoc, + migrateToDesignView, + migrateToInMemoryView, } from "../../../../api/controllers/view/utils" import sdk from "../../../../sdk" import { ExportRowsParams, ExportRowsResult } from "../search" @@ -139,13 +139,12 @@ export async function exportRows( } } -export async function fetch(tableId: string) { +export async function fetch(tableId: string): Promise { const db = context.getAppDB() const table = await sdk.tables.getTable(tableId) const rows = await getRawTableData(db, tableId) - const result = await outputProcessing(table, rows) - return result + return await outputProcessing(table, rows) } async function getRawTableData(db: Database, tableId: string) { diff --git a/packages/server/src/sdk/app/rows/utils.ts b/packages/server/src/sdk/app/rows/utils.ts index 4049d898fb..d0227c7c6b 100644 --- a/packages/server/src/sdk/app/rows/utils.ts +++ b/packages/server/src/sdk/app/rows/utils.ts @@ -69,12 +69,15 @@ export async function validate({ valid: boolean errors: Record }> { - let fetchedTable: Table - if (!table) { + let fetchedTable: Table | undefined + if (!table && tableId) { fetchedTable = await sdk.tables.getTable(tableId) - } else { + } else if (table) { fetchedTable = table } + if (fetchedTable === undefined) { + throw new Error("Unable to fetch table for validation") + } const errors: Record = {} for (let fieldName of Object.keys(fetchedTable.schema)) { const column = fetchedTable.schema[fieldName] diff --git a/packages/server/src/sdk/app/tables/index.ts b/packages/server/src/sdk/app/tables/index.ts index 64fcde4bff..96cda05396 100644 --- a/packages/server/src/sdk/app/tables/index.ts +++ b/packages/server/src/sdk/app/tables/index.ts @@ -1,11 +1,16 @@ import { context } from "@budibase/backend-core" -import { BudibaseInternalDB, getTableParams } from "../../../db/utils" +import { + BudibaseInternalDB, + getMultiIDParams, + getTableParams, +} from "../../../db/utils" import { breakExternalTableId, isExternalTable, isSQL, } from "../../../integrations/utils" import { + AllDocsResponse, Database, Table, TableResponse, @@ -15,24 +20,70 @@ import datasources from "../datasources" import { populateExternalTableSchemas } from "./validation" import sdk from "../../../sdk" -async function getAllInternalTables(db?: Database): Promise { - if (!db) { - db = context.getAppDB() - } - const internalTables = await db.allDocs( - getTableParams(null, { - include_docs: true, - }) - ) - return internalTables.rows.map((tableDoc: any) => ({ +function processInternalTables(docs: AllDocsResponse): Table[] { + return docs.rows.map((tableDoc: any) => ({ ...tableDoc.doc, type: "internal", sourceId: tableDoc.doc.sourceId || BudibaseInternalDB._id, })) } -async function getAllExternalTables( - datasourceId: any +async function getAllInternalTables(db?: Database): Promise { + if (!db) { + db = context.getAppDB() + } + const internalTableDocs = await db.allDocs( + getTableParams(null, { + include_docs: true, + }) + ) + return processInternalTables(internalTableDocs) +} + +async function getAllExternalTables(): Promise { + const datasources = await sdk.datasources.fetch({ enriched: true }) + const allEntities = datasources.map(datasource => datasource.entities) + let final: Table[] = [] + for (let entities of allEntities) { + if (entities) { + final = final.concat(Object.values(entities)) + } + } + return final +} + +async function getAllTables() { + const [internal, external] = await Promise.all([ + getAllInternalTables(), + getAllExternalTables(), + ]) + return [...internal, external] +} + +async function getTables(tableIds: string[]): Promise { + const externalTableIds = tableIds.filter(tableId => isExternalTable(tableId)), + internalTableIds = tableIds.filter(tableId => !isExternalTable(tableId)) + let tables: Table[] = [] + if (externalTableIds.length) { + const externalTables = await getAllExternalTables() + tables = tables.concat( + externalTables.filter( + table => externalTableIds.indexOf(table._id!) !== -1 + ) + ) + } + if (internalTableIds.length) { + const db = context.getAppDB() + const internalTableDocs = await db.allDocs( + getMultiIDParams(internalTableIds) + ) + tables = tables.concat(processInternalTables(internalTableDocs)) + } + return tables +} + +async function getExternalTablesInDatasource( + datasourceId: string ): Promise> { const datasource = await datasources.get(datasourceId, { enriched: true }) if (!datasource || !datasource.entities) { @@ -42,22 +93,22 @@ async function getAllExternalTables( } async function getExternalTable( - datasourceId: any, - tableName: any + datasourceId: string, + tableName: string ): Promise { - const entities = await getAllExternalTables(datasourceId) + const entities = await getExternalTablesInDatasource(datasourceId) return entities[tableName] } -async function getTable(tableId: any): Promise
{ +async function getTable(tableId: string): Promise
{ const db = context.getAppDB() if (isExternalTable(tableId)) { let { datasourceId, tableName } = breakExternalTableId(tableId) const datasource = await datasources.get(datasourceId!) - const table = await getExternalTable(datasourceId, tableName) + const table = await getExternalTable(datasourceId!, tableName!) return { ...table, sql: isSQL(datasource) } } else { - return db.get(tableId) + return db.get
(tableId) } } @@ -86,9 +137,11 @@ async function saveTable(table: Table) { export default { getAllInternalTables, - getAllExternalTables, + getExternalTablesInDatasource, getExternalTable, getTable, + getAllTables, + getTables, populateExternalTableSchemas, enrichViewSchemas, saveTable, diff --git a/packages/server/src/utilities/centralPath.ts b/packages/server/src/utilities/centralPath.ts index bd0578c7ce..0956cc042e 100644 --- a/packages/server/src/utilities/centralPath.ts +++ b/packages/server/src/utilities/centralPath.ts @@ -6,7 +6,7 @@ import path from "path" /** * Exactly the same as path.join * @param args Any number of string arguments to add to a path - * @returns {string} The final path ready to use + * @returns The final path ready to use */ export function join(...args: string[]) { return path.join(...args) @@ -15,7 +15,7 @@ export function join(...args: string[]) { /** * Exactly the same as path.resolve * @param args Any number of string arguments to add to a path - * @returns {string} The final path ready to use + * @returns The final path ready to use */ export function resolve(...args: string[]) { return path.resolve(...args) diff --git a/packages/server/src/utilities/fileSystem/app.ts b/packages/server/src/utilities/fileSystem/app.ts index 16681c2978..891f750d19 100644 --- a/packages/server/src/utilities/fileSystem/app.ts +++ b/packages/server/src/utilities/fileSystem/app.ts @@ -11,8 +11,8 @@ export const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules") /** * Uploads the latest client library to the object store. - * @param {string} appId The ID of the app which is being created. - * @return {Promise} once promise completes app resources should be ready in object store. + * @param appId The ID of the app which is being created. + * @return once promise completes app resources should be ready in object store. */ export const createApp = async (appId: string) => { await updateClientLibrary(appId) @@ -20,8 +20,8 @@ export const createApp = async (appId: string) => { /** * Removes all of the assets created for an app in the object store. - * @param {string} appId The ID of the app which is being deleted. - * @return {Promise} once promise completes the app resources will be removed from object store. + * @param appId The ID of the app which is being deleted. + * @return once promise completes the app resources will be removed from object store. */ export const deleteApp = async (appId: string) => { await objectStore.deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`) diff --git a/packages/server/src/utilities/fileSystem/filesystem.ts b/packages/server/src/utilities/fileSystem/filesystem.ts index a44fa03c28..e7f6611e87 100644 --- a/packages/server/src/utilities/fileSystem/filesystem.ts +++ b/packages/server/src/utilities/fileSystem/filesystem.ts @@ -53,8 +53,8 @@ export const checkDevelopmentEnvironment = () => { * Used to retrieve a handlebars file from the system which will be used as a template. * This is allowable as the template handlebars files should be static and identical across * the cluster. - * @param {string} path The path to the handlebars file which is to be loaded. - * @returns {string} The loaded handlebars file as a string - loaded as utf8. + * @param path The path to the handlebars file which is to be loaded. + * @returns The loaded handlebars file as a string - loaded as utf8. */ export const loadHandlebarsFile = (path: PathLike) => { return fs.readFileSync(path, "utf8") @@ -63,8 +63,8 @@ export const loadHandlebarsFile = (path: PathLike) => { /** * When return a file from the API need to write the file to the system temporarily so we * can create a read stream to send. - * @param {string} contents the contents of the file which is to be returned from the API. - * @return {Object} the read stream which can be put into the koa context body. + * @param contents the contents of the file which is to be returned from the API. + * @return the read stream which can be put into the koa context body. */ export const apiFileReturn = (contents: any) => { const path = join(budibaseTempDir(), uuid()) @@ -78,8 +78,8 @@ export const streamFile = (path: string) => { /** * Writes the provided contents to a temporary file, which can be used briefly. - * @param {string} fileContents contents which will be written to a temp file. - * @return {string} the path to the temp file. + * @param fileContents contents which will be written to a temp file. + * @return the path to the temp file. */ export const storeTempFile = ( fileContents: string | NodeJS.ArrayBufferView @@ -100,7 +100,7 @@ export const stringToFileStream = (contents: any) => { /** * Creates a temp file and returns it from the API. - * @param {string} fileContents the contents to be returned in file. + * @param fileContents the contents to be returned in file. */ export const sendTempFile = (fileContents: any) => { const path = storeTempFile(fileContents) diff --git a/packages/server/src/utilities/fileSystem/template.ts b/packages/server/src/utilities/fileSystem/template.ts index 87e0b44e5c..2da91d3247 100644 --- a/packages/server/src/utilities/fileSystem/template.ts +++ b/packages/server/src/utilities/fileSystem/template.ts @@ -5,8 +5,8 @@ import { objectStore } from "@budibase/backend-core" /** * This function manages temporary template files which are stored by Koa. - * @param {Object} template The template object retrieved from the Koa context object. - * @returns {Object} Returns an fs read stream which can be loaded into the database. + * @param template The template object retrieved from the Koa context object. + * @returns Returns an fs read stream which can be loaded into the database. */ export const getTemplateStream = async (template: any) => { if (template.file) { @@ -20,7 +20,7 @@ export const getTemplateStream = async (template: any) => { /** * Retrieves a template and pipes it to minio as well as making it available temporarily. - * @param {string} type The type of template which is to be retrieved. + * @param type The type of template which is to be retrieved. * @param name * @return {Promise<*>} */ diff --git a/packages/server/src/utilities/index.ts b/packages/server/src/utilities/index.ts index 15e59f3b66..551299a88e 100644 --- a/packages/server/src/utilities/index.ts +++ b/packages/server/src/utilities/index.ts @@ -24,8 +24,8 @@ export function removeFromArray(array: any[], element: any) { /** * Makes sure that a URL has the correct number of slashes, while maintaining the * http(s):// double slashes. - * @param {string} url The URL to test and remove any extra double slashes. - * @return {string} The updated url. + * @param url The URL to test and remove any extra double slashes. + * @return The updated url. */ export function checkSlashesInUrl(url: string) { return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2") diff --git a/packages/server/src/utilities/rowProcessor/index.ts b/packages/server/src/utilities/rowProcessor/index.ts index b02fc638e3..cf3875b2ea 100644 --- a/packages/server/src/utilities/rowProcessor/index.ts +++ b/packages/server/src/utilities/rowProcessor/index.ts @@ -51,11 +51,11 @@ function getRemovedAttachmentKeys( /** * This will update any auto columns that are found on the row/table with the correct information based on * time now and the current logged in user making the request. - * @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields. - * @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing. - * @param {Object} row The row which is to be updated with information for the auto columns. - * @param {Object} opts specific options for function to carry out optional features. - * @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated + * @param user The user to be used for an appId as well as the createdBy and createdAt fields. + * @param table The table which is to be used for the schema, as well as handling auto IDs incrementing. + * @param row The row which is to be updated with information for the auto columns. + * @param opts specific options for function to carry out optional features. + * @returns The updated row and table, the table may need to be updated * for automatic ID purposes. */ export function processAutoColumn( @@ -111,9 +111,9 @@ export function processAutoColumn( /** * This will coerce a value to the correct types based on the type transform map - * @param {object} row The value to coerce - * @param {object} type The type fo coerce to - * @returns {object} The coerced value + * @param row The value to coerce + * @param type The type fo coerce to + * @returns The coerced value */ export function coerce(row: any, type: string) { // no coercion specified for type, skip it @@ -135,11 +135,11 @@ export function coerce(row: any, type: string) { /** * Given an input route this function will apply all the necessary pre-processing to it, such as coercion * of column values or adding auto-column values. - * @param {object} user the user which is performing the input. - * @param {object} row the row which is being created/updated. - * @param {object} table the table which the row is being saved to. - * @param {object} opts some input processing options (like disabling auto-column relationships). - * @returns {object} the row which has been prepared to be written to the DB. + * @param user the user which is performing the input. + * @param row the row which is being created/updated. + * @param table the table which the row is being saved to. + * @param opts some input processing options (like disabling auto-column relationships). + * @returns the row which has been prepared to be written to the DB. */ export async function inputProcessing( userId: string | null | undefined, @@ -198,11 +198,11 @@ export async function inputProcessing( /** * This function enriches the input rows with anything they are supposed to contain, for example * link records or attachment links. - * @param {object} table the table from which these rows came from originally, this is used to determine + * @param table the table from which these rows came from originally, this is used to determine * the schema of the rows and then enrich. - * @param {object[]|object} rows the rows which are to be enriched. - * @param {object} opts used to set some options for the output, such as disabling relationship squashing. - * @returns {object[]|object} the enriched rows will be returned. + * @param rows the rows which are to be enriched. + * @param opts used to set some options for the output, such as disabling relationship squashing. + * @returns the enriched rows will be returned. */ export async function outputProcessing( table: Table, @@ -210,6 +210,7 @@ export async function outputProcessing( opts: { squash?: boolean preserveLinks?: boolean + fromRow?: Row skipBBReferences?: boolean } = { squash: true, @@ -227,7 +228,9 @@ export async function outputProcessing( } // attach any linked row information let enriched = !opts.preserveLinks - ? await linkRows.attachFullLinkedDocs(table, safeRows) + ? await linkRows.attachFullLinkedDocs(table, safeRows, { + fromRow: opts?.fromRow, + }) : safeRows // process complex types: attachements, bb references... @@ -278,13 +281,13 @@ export async function outputProcessing( /** * Clean up any attachments that were attached to a row. - * @param {object} table The table from which a row is being removed. - * @param {any} row optional - the row being removed. - * @param {any} rows optional - if multiple rows being deleted can do this in bulk. - * @param {any} oldRow optional - if updating a row this will determine the difference. - * @param {any} oldTable optional - if updating a table, can supply the old table to look for + * @param table The table from which a row is being removed. + * @param row optional - the row being removed. + * @param rows optional - if multiple rows being deleted can do this in bulk. + * @param oldRow optional - if updating a row this will determine the difference. + * @param oldTable optional - if updating a table, can supply the old table to look for * deleted attachment columns. - * @return {Promise} When all attachments have been removed this will return. + * @return When all attachments have been removed this will return. */ export async function cleanupAttachments( table: Table, diff --git a/packages/shared-core/src/helpers/helpers.ts b/packages/shared-core/src/helpers/helpers.ts index 8c4795f226..16891de35b 100644 --- a/packages/shared-core/src/helpers/helpers.ts +++ b/packages/shared-core/src/helpers/helpers.ts @@ -8,7 +8,7 @@ import { User } from "@budibase/types" * will return "foo" over "bar". * @param obj the object * @param key the key - * @return {*|null} the value or null if a value was not found for this key + * @return the value or null if a value was not found for this key */ export const deepGet = (obj: { [x: string]: any }, key: string) => { if (!obj || !key) { diff --git a/packages/shared-core/src/sdk/documents/users.ts b/packages/shared-core/src/sdk/documents/users.ts index 03d86daa85..b58994aa46 100644 --- a/packages/shared-core/src/sdk/documents/users.ts +++ b/packages/shared-core/src/sdk/documents/users.ts @@ -6,6 +6,7 @@ import { InternalTable, } from "@budibase/types" import { getProdAppID } from "./applications" +import * as _ from "lodash/fp" // checks if a user is specifically a builder, given an app ID export function isBuilder(user: User | ContextUser, appId?: string): boolean { @@ -58,6 +59,18 @@ export function hasAppBuilderPermissions(user?: User | ContextUser): boolean { return !isGlobalBuilder && appLength != null && appLength > 0 } +export function hasAppCreatorPermissions(user?: User | ContextUser): boolean { + if (!user) { + return false + } + return _.flow( + _.get("roles"), + _.values, + _.find(x => ["CREATOR", "ADMIN"].includes(x)), + x => !!x + )(user) +} + // checks if a user is capable of building any app export function hasBuilderPermissions(user?: User | ContextUser): boolean { if (!user) { @@ -74,6 +87,18 @@ export function hasAdminPermissions(user?: User | ContextUser): boolean { return !!user.admin?.global } +export function isCreator(user?: User | ContextUser): boolean { + if (!user) { + return false + } + return ( + isGlobalBuilder(user) || + hasAdminPermissions(user) || + hasAppBuilderPermissions(user) || + hasAppCreatorPermissions(user) + ) +} + export function getGlobalUserID(userId?: string): string | undefined { if (typeof userId !== "string") { return userId diff --git a/packages/types/src/documents/global/quotas.ts b/packages/types/src/documents/global/quotas.ts index 61410f7435..4eb1168f7d 100644 --- a/packages/types/src/documents/global/quotas.ts +++ b/packages/types/src/documents/global/quotas.ts @@ -32,6 +32,7 @@ export interface StaticUsage { [StaticQuotaName.APPS]: number [StaticQuotaName.PLUGINS]: number [StaticQuotaName.USERS]: number + [StaticQuotaName.CREATORS]: number [StaticQuotaName.USER_GROUPS]: number [StaticQuotaName.ROWS]: number triggers: { diff --git a/packages/types/src/sdk/featureFlag.ts b/packages/types/src/sdk/featureFlag.ts index 53aa4842c4..e3935bc7ee 100644 --- a/packages/types/src/sdk/featureFlag.ts +++ b/packages/types/src/sdk/featureFlag.ts @@ -1,5 +1,8 @@ export enum FeatureFlag { LICENSING = "LICENSING", + // Feature IDs in Posthog + PER_CREATOR_PER_USER_PRICE = "18873", + PER_CREATOR_PER_USER_PRICE_ALERT = "18530", } export interface TenantFeatureFlags { diff --git a/packages/types/src/sdk/licensing/billing.ts b/packages/types/src/sdk/licensing/billing.ts index 35f366c811..bcbc7abd18 100644 --- a/packages/types/src/sdk/licensing/billing.ts +++ b/packages/types/src/sdk/licensing/billing.ts @@ -5,10 +5,17 @@ export interface Customer { currency: string | null | undefined } +export interface SubscriptionItems { + user: number | undefined + creator: number | undefined +} + export interface Subscription { amount: number + amounts: SubscriptionItems | undefined currency: string quantity: number + quantities: SubscriptionItems | undefined duration: PriceDuration cancelAt: number | null | undefined currentPeriodStart: number diff --git a/packages/types/src/sdk/licensing/plan.ts b/packages/types/src/sdk/licensing/plan.ts index 3e214a01ff..1604dfb8af 100644 --- a/packages/types/src/sdk/licensing/plan.ts +++ b/packages/types/src/sdk/licensing/plan.ts @@ -4,7 +4,9 @@ export enum PlanType { PRO = "pro", /** @deprecated */ TEAM = "team", + /** @deprecated */ PREMIUM = "premium", + PREMIUM_PLUS = "premium_plus", BUSINESS = "business", ENTERPRISE = "enterprise", } @@ -26,10 +28,12 @@ export interface AvailablePrice { currency: string duration: PriceDuration priceId: string + type?: string } export enum PlanModel { PER_USER = "perUser", + PER_CREATOR_PER_USER = "per_creator_per_user", DAY_PASS = "dayPass", } diff --git a/packages/types/src/sdk/licensing/quota.ts b/packages/types/src/sdk/licensing/quota.ts index 73afa1ed05..85700f167b 100644 --- a/packages/types/src/sdk/licensing/quota.ts +++ b/packages/types/src/sdk/licensing/quota.ts @@ -14,6 +14,7 @@ export enum StaticQuotaName { ROWS = "rows", APPS = "apps", USERS = "users", + CREATORS = "creators", USER_GROUPS = "userGroups", PLUGINS = "plugins", } @@ -67,6 +68,7 @@ export type StaticQuotas = { [StaticQuotaName.ROWS]: Quota [StaticQuotaName.APPS]: Quota [StaticQuotaName.USERS]: Quota + [StaticQuotaName.CREATORS]: Quota [StaticQuotaName.USER_GROUPS]: Quota [StaticQuotaName.PLUGINS]: Quota } diff --git a/packages/worker/src/utilities/email.ts b/packages/worker/src/utilities/email.ts index 5d596b0bde..38a367c4d5 100644 --- a/packages/worker/src/utilities/email.ts +++ b/packages/worker/src/utilities/email.ts @@ -71,12 +71,12 @@ async function getLinkCode( /** * Builds an email using handlebars and the templates found in the system (default or otherwise). - * @param {string} purpose the purpose of the email being built, e.g. invitation, password reset. - * @param {string} email the address which it is being sent to for contextual purposes. - * @param {object} context the context which is being used for building the email (hbs context). - * @param {object|null} user if being sent to an existing user then the object can be provided for context. - * @param {string|null} contents if using a custom template can supply contents for context. - * @return {Promise} returns the built email HTML if all provided parameters were valid. + * @param purpose the purpose of the email being built, e.g. invitation, password reset. + * @param email the address which it is being sent to for contextual purposes. + * @param context the context which is being used for building the email (hbs context). + * @param user if being sent to an existing user then the object can be provided for context. + * @param contents if using a custom template can supply contents for context. + * @return returns the built email HTML if all provided parameters were valid. */ async function buildEmail( purpose: EmailTemplatePurpose, @@ -128,7 +128,7 @@ async function buildEmail( /** * Checks if a SMTP config exists based on passed in parameters. - * @return {Promise} returns true if there is a configuration that can be used. + * @return returns true if there is a configuration that can be used. */ export async function isEmailConfigured() { // when "testing" or smtp fallback is enabled simply return true @@ -142,10 +142,10 @@ export async function isEmailConfigured() { /** * Given an email address and an email purpose this will retrieve the SMTP configuration and * send an email using it. - * @param {string} email The email address to send to. - * @param {string} purpose The purpose of the email being sent (e.g. reset password). - * @param {object} opts The options for sending the email. - * @return {Promise} returns details about the attempt to send email, e.g. if it is successful; based on + * @param email The email address to send to. + * @param purpose The purpose of the email being sent (e.g. reset password). + * @param opts The options for sending the email. + * @return returns details about the attempt to send email, e.g. if it is successful; based on * nodemailer response. */ export async function sendEmail( @@ -212,8 +212,8 @@ export async function sendEmail( /** * Given an SMTP configuration this runs it through nodemailer to see if it is in fact functional. - * @param {object} config an SMTP configuration - this is based on the nodemailer API. - * @return {Promise} returns true if the configuration is valid. + * @param config an SMTP configuration - this is based on the nodemailer API. + * @return returns true if the configuration is valid. */ export async function verifyConfig(config: SMTPInnerConfig) { const transport = createSMTPTransport(config) diff --git a/packages/worker/src/utilities/index.ts b/packages/worker/src/utilities/index.ts index e1e065bd4e..dd241f4af7 100644 --- a/packages/worker/src/utilities/index.ts +++ b/packages/worker/src/utilities/index.ts @@ -1,8 +1,8 @@ /** * Makes sure that a URL has the correct number of slashes, while maintaining the * http(s):// double slashes. - * @param {string} url The URL to test and remove any extra double slashes. - * @return {string} The updated url. + * @param url The URL to test and remove any extra double slashes. + * @return The updated url. */ export function checkSlashesInUrl(url: string) { return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2") diff --git a/packages/worker/src/utilities/redis.ts b/packages/worker/src/utilities/redis.ts index 6b82df3b57..993cdf97ce 100644 --- a/packages/worker/src/utilities/redis.ts +++ b/packages/worker/src/utilities/redis.ts @@ -37,8 +37,8 @@ async function updateACode(db: string, code: string, value: any) { /** * Given an invite code and invite body, allow the update an existing/valid invite in redis - * @param {string} inviteCode The invite code for an invite in redis - * @param {object} value The body of the updated user invitation + * @param inviteCode The invite code for an invite in redis + * @param value The body of the updated user invitation */ export async function updateInviteCode(inviteCode: string, value: string) { await updateACode(redis.utils.Databases.INVITATIONS, inviteCode, value) @@ -77,9 +77,9 @@ export async function shutdown() { /** * Given a user ID this will store a code (that is returned) for an hour in redis. * The user can then return this code for resetting their password (through their reset link). - * @param {string} userId the ID of the user which is to be reset. - * @param {object} info Info about the user/the reset process. - * @return {Promise} returns the code that was stored to redis. + * @param userId the ID of the user which is to be reset. + * @param info Info about the user/the reset process. + * @return returns the code that was stored to redis. */ export async function getResetPasswordCode(userId: string, info: any) { return writeACode(redis.utils.Databases.PW_RESETS, { userId, info }) @@ -87,9 +87,9 @@ export async function getResetPasswordCode(userId: string, info: any) { /** * Given a reset code this will lookup to redis, check if the code is valid and delete if required. - * @param {string} resetCode The code provided via the email link. - * @param {boolean} deleteCode If the code is used/finished with this will delete it - defaults to true. - * @return {Promise} returns the user ID if it is found + * @param resetCode The code provided via the email link. + * @param deleteCode If the code is used/finished with this will delete it - defaults to true. + * @return returns the user ID if it is found */ export async function checkResetPasswordCode( resetCode: string, @@ -104,9 +104,9 @@ export async function checkResetPasswordCode( /** * Generates an invitation code and writes it to redis - which can later be checked for user creation. - * @param {string} email the email address which the code is being sent to (for use later). - * @param {object|null} info Information to be carried along with the invitation. - * @return {Promise} returns the code that was stored to redis. + * @param email the email address which the code is being sent to (for use later). + * @param info Information to be carried along with the invitation. + * @return returns the code that was stored to redis. */ export async function getInviteCode(email: string, info: any) { return writeACode(redis.utils.Databases.INVITATIONS, { email, info }) @@ -114,9 +114,9 @@ export async function getInviteCode(email: string, info: any) { /** * Checks that the provided invite code is valid - will return the email address of user that was invited. - * @param {string} inviteCode the invite code that was provided as part of the link. - * @param {boolean} deleteCode whether or not the code should be deleted after retrieval - defaults to true. - * @return {Promise} If the code is valid then an email address will be returned. + * @param inviteCode the invite code that was provided as part of the link. + * @param deleteCode whether or not the code should be deleted after retrieval - defaults to true. + * @return If the code is valid then an email address will be returned. */ export async function checkInviteCode( inviteCode: string,