1
0
Fork 0
mirror of synced 2024-07-09 08:16:34 +12:00

Merge pull request #11782 from Budibase/feature/user-column-type

Feature - User column type
This commit is contained in:
Adria Navarro 2023-09-26 12:13:19 +02:00 committed by GitHub
commit a62ab66820
36 changed files with 930 additions and 68 deletions

View file

@ -120,7 +120,7 @@ export async function getUsers(
): Promise<{ users: User[]; notFoundIds?: string[] }> {
const client = await redis.getUserClient()
// try cache
let usersFromCache = await client.bulkGet(userIds)
let usersFromCache = await client.bulkGet<User>(userIds)
const missingUsersFromCache = userIds.filter(uid => !usersFromCache[uid])
const users = Object.values(usersFromCache)
let notFoundIds

View file

@ -242,7 +242,7 @@ class RedisWrapper {
}
}
async bulkGet(keys: string[]) {
async bulkGet<T>(keys: string[]) {
const db = this._db
if (keys.length === 0) {
return {}
@ -250,7 +250,7 @@ class RedisWrapper {
const prefixedKeys = keys.map(key => addDbPrefix(db, key))
let response = await this.getClient().mget(prefixedKeys)
if (Array.isArray(response)) {
let final: Record<string, any> = {}
let final: Record<string, T> = {}
let count = 0
for (let result of response) {
if (result) {

View file

@ -18,7 +18,7 @@ class DBTestConfiguration {
// TENANCY
doInTenant(task: any) {
doInTenant<T>(task: () => Promise<T>) {
return context.doInTenant(this.tenantId, () => {
return task()
})

View file

@ -1 +1,2 @@
export * from "./core/utilities"
export * from "./extra"

View file

@ -25,6 +25,7 @@
longform: StringRenderer,
array: ArrayRenderer,
internal: InternalRenderer,
bb_reference: RelationshipRenderer,
}
$: type = getType(schema)
$: customRenderer = customRenderers?.find(x => x.column === schema?.name)

View file

@ -33,6 +33,7 @@
import { getBindings } from "components/backend/DataTable/formula"
import JSONSchemaModal from "./JSONSchemaModal.svelte"
import { ValidColumnNameRegex } from "@budibase/shared-core"
import { FieldSubtype, FieldType } from "@budibase/types"
import RelationshipSelector from "components/common/RelationshipSelector.svelte"
const AUTO_TYPE = "auto"
@ -42,6 +43,11 @@
const NUMBER_TYPE = FIELDS.NUMBER.type
const JSON_TYPE = FIELDS.JSON.type
const DATE_TYPE = FIELDS.DATETIME.type
const BB_REFERENCE_TYPE = FieldType.BB_REFERENCE
const BB_USER_REFERENCE_TYPE = composeType(
BB_REFERENCE_TYPE,
FieldSubtype.USER
)
const dispatch = createEventDispatcher()
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
@ -68,12 +74,39 @@
let jsonSchemaModal
let allowedTypes = []
let editableColumn = {
type: "string",
type: fieldDefinitions.STRING.type,
constraints: fieldDefinitions.STRING.constraints,
// Initial value for column name in other table for linked records
fieldName: $tables.selected.name,
}
const bbRefTypeMapping = {}
function composeType(fieldType, subtype) {
return `${fieldType}_${subtype}`
}
// Handling fields with subtypes
fieldDefinitions = Object.entries(fieldDefinitions).reduce(
(p, [key, field]) => {
if (field.type === BB_REFERENCE_TYPE) {
const composedType = composeType(field.type, field.subtype)
p[key] = {
...field,
type: composedType,
}
bbRefTypeMapping[composedType] = {
type: field.type,
subtype: field.subtype,
}
} else {
p[key] = field
}
return p
},
{}
)
$: if (primaryDisplay) {
editableColumn.constraints.presence = { allowEmpty: false }
}
@ -107,6 +140,7 @@
const initialiseField = (field, savingColumn) => {
isCreating = !field
if (field && !savingColumn) {
editableColumn = cloneDeep(field)
originalName = editableColumn.name ? editableColumn.name + "" : null
@ -114,6 +148,14 @@
primaryDisplay =
$tables.selected.primaryDisplay == null ||
$tables.selected.primaryDisplay === editableColumn.name
const mapped = Object.entries(bbRefTypeMapping).find(
([_, v]) => v.type === field.type && v.subtype === field.subtype
)
if (mapped) {
editableColumn.type = mapped[0]
delete editableColumn.subtype
}
} else if (!savingColumn) {
let highestNumber = 0
Object.keys(table.schema).forEach(columnName => {
@ -130,6 +172,7 @@
editableColumn.name = "Column 01"
}
}
allowedTypes = getAllowedTypes()
if (editableColumn.type === LINK_TYPE && editableColumn.tableId) {
@ -145,6 +188,8 @@
$: initialiseField(field, savingColumn)
$: isBBReference = !!bbRefTypeMapping[editableColumn.type]
$: checkConstraints(editableColumn)
$: required = !!editableColumn?.constraints?.presence || primaryDisplay
$: uneditable =
@ -220,6 +265,13 @@
let saveColumn = cloneDeep(editableColumn)
if (bbRefTypeMapping[saveColumn.type]) {
saveColumn = {
...saveColumn,
...bbRefTypeMapping[saveColumn.type],
}
}
if (saveColumn.type === AUTO_TYPE) {
saveColumn = buildAutoColumn(
$tables.selected.name,
@ -298,9 +350,10 @@
// Default relationships many to many
if (editableColumn.type === LINK_TYPE) {
editableColumn.relationshipType = RelationshipType.MANY_TO_MANY
}
if (editableColumn.type === FORMULA_TYPE) {
} else if (editableColumn.type === FORMULA_TYPE) {
editableColumn.formulaType = "dynamic"
} else if (editableColumn.type === BB_USER_REFERENCE_TYPE) {
editableColumn.relationshipType = RelationshipType.ONE_TO_MANY
}
}
@ -339,7 +392,9 @@
ALLOWABLE_NUMBER_TYPES.indexOf(editableColumn.type) !== -1
) {
return ALLOWABLE_NUMBER_OPTIONS
} else if (!external) {
}
if (!external) {
return [
...Object.values(fieldDefinitions),
{ name: "Auto Column", type: AUTO_TYPE },
@ -360,6 +415,9 @@
if (!external || table.sql) {
fields = [...fields, FIELDS.LINK, FIELDS.ARRAY]
}
if (fieldDefinitions.USER) {
fields.push(fieldDefinitions.USER)
}
return fields
}
}
@ -613,6 +671,17 @@
<Button primary text on:click={openJsonSchemaEditor}
>Open schema editor</Button
>
{:else if isBBReference}
<Toggle
value={editableColumn.relationshipType === RelationshipType.MANY_TO_MANY}
on:change={e =>
(editableColumn.relationshipType = e.detail
? RelationshipType.MANY_TO_MANY
: RelationshipType.ONE_TO_MANY)}
disabled={!isCreating}
thin
text="Allow multiple users"
/>
{/if}
{#if editableColumn.type === AUTO_TYPE || editableColumn.autocolumn}
<Select

View file

@ -65,6 +65,7 @@ const componentMap = {
"field/array": FormFieldSelect,
"field/json": FormFieldSelect,
"field/barcodeqr": FormFieldSelect,
"field/bb_reference": FormFieldSelect,
// Some validation types are the same as others, so not all types are
// explicitly listed here. e.g. options uses string validation
"validation/string": ValidationEditor,
@ -74,6 +75,7 @@ const componentMap = {
"validation/datetime": ValidationEditor,
"validation/attachment": ValidationEditor,
"validation/link": ValidationEditor,
"validation/bb_reference": ValidationEditor,
}
export const getComponentForSetting = setting => {

View file

@ -43,4 +43,5 @@ export const FieldTypeToComponentMap = {
link: "relationshipfield",
json: "jsonfield",
barcodeqr: "codescanner",
bb_reference: "bbreferencefield",
}

View file

@ -120,6 +120,12 @@ export const FIELDS = {
presence: false,
},
},
USER: {
name: "User",
type: "bb_reference",
subtype: "user",
icon: "User",
},
}
export const AUTO_COLUMN_SUB_TYPES = {

View file

@ -69,7 +69,8 @@
"datetimefield",
"multifieldselect",
"s3upload",
"codescanner"
"codescanner",
"bbreferencefield"
]
},
{

View file

@ -5623,5 +5623,73 @@
"defaultValue": false
}
]
},
"bbreferencefield": {
"devComment": "As bb reference is only used for user subtype for now, we are using user for icon and labels",
"name": "User Field",
"icon": "User",
"styles": ["size"],
"requiredAncestors": ["form"],
"editable": true,
"size": {
"width": 400,
"height": 50
},
"settings": [
{
"type": "field/bb_reference",
"label": "Field",
"key": "field",
"required": true
},
{
"type": "text",
"label": "Label",
"key": "label"
},
{
"type": "text",
"label": "Placeholder",
"key": "placeholder"
},
{
"type": "text",
"label": "Default value",
"key": "defaultValue"
},
{
"type": "event",
"label": "On change",
"key": "onChange",
"context": [
{
"label": "Field Value",
"key": "value"
}
]
},
{
"type": "validation/link",
"label": "Validation",
"key": "validation"
},
{
"type": "filter/relationship",
"label": "Filtering",
"key": "filter"
},
{
"type": "boolean",
"label": "Search",
"key": "autocomplete",
"defaultValue": true
},
{
"type": "boolean",
"label": "Disabled",
"key": "disabled",
"defaultValue": false
}
]
}
}

View file

@ -30,6 +30,7 @@
link: "relationshipfield",
json: "jsonfield",
barcodeqr: "codescanner",
bb_reference: "bbreferencefield",
}
let formId

View file

@ -0,0 +1,9 @@
<script>
import RelationshipField from "./RelationshipField.svelte"
</script>
<RelationshipField
{...$$props}
datasourceType={"user"}
primaryDisplay={"email"}
/>

View file

@ -16,6 +16,8 @@
export let defaultValue
export let onChange
export let filter
export let datasourceType = "table"
export let primaryDisplay
let fieldState
let fieldApi
@ -24,12 +26,15 @@
let searchTerm
let open
$: type =
datasourceType === "table" ? FieldTypes.LINK : FieldTypes.BB_REFERENCE
$: multiselect = fieldSchema?.relationshipType !== "one-to-many"
$: linkedTableId = fieldSchema?.tableId
$: fetch = fetchData({
API,
datasource: {
type: "table",
type: datasourceType,
tableId: linkedTableId,
},
options: {
@ -44,7 +49,7 @@
: flatten(fieldState?.value)?.[0]
$: component = multiselect ? CoreMultiselect : CoreSelect
$: expandedDefaultValue = expand(defaultValue)
$: primaryDisplay = tableDefinition?.primaryDisplay
$: primaryDisplay = primaryDisplay || tableDefinition?.primaryDisplay
let optionsObj = {}
let initialValuesProcessed
@ -54,7 +59,7 @@
// Persist the initial values as options, allowing them to be present in the dropdown,
// even if they are not in the inital fetch results
initialValuesProcessed = true
optionsObj = fieldState?.value?.reduce((accumulator, value) => {
optionsObj = (fieldState?.value || []).reduce((accumulator, value) => {
accumulator[value._id] = {
_id: value._id,
[primaryDisplay]: value.primaryDisplay,
@ -161,7 +166,7 @@
{disabled}
{validation}
defaultValue={expandedDefaultValue}
type={FieldTypes.LINK}
{type}
bind:fieldState
bind:fieldApi
bind:fieldSchema

View file

@ -15,3 +15,4 @@ export { default as formstep } from "./FormStep.svelte"
export { default as jsonfield } from "./JSONField.svelte"
export { default as s3upload } from "./S3Upload.svelte"
export { default as codescanner } from "./CodeScannerField.svelte"
export { default as bbreferencefield } from "./BBReferenceField.svelte"

View file

@ -1,17 +1,5 @@
export const FieldTypes = {
STRING: "string",
BARCODEQR: "barcodeqr",
LONGFORM: "longform",
OPTIONS: "options",
NUMBER: "number",
BOOLEAN: "boolean",
ARRAY: "array",
DATETIME: "datetime",
ATTACHMENT: "attachment",
LINK: "link",
FORMULA: "formula",
JSON: "json",
}
import { FieldType as FieldTypes } from "@budibase/types"
export { FieldType as FieldTypes } from "@budibase/types"
export const UnsortableTypes = [
FieldTypes.FORMULA,

View file

@ -0,0 +1,38 @@
<script>
import { getContext } from "svelte"
import RelationshipCell from "./RelationshipCell.svelte"
import { FieldSubtype } from "@budibase/types"
const { API } = getContext("grid")
const { subtype } = $$props.schema
const schema = {
...$$props.schema,
// This is not really used, just adding some content to be able to render the relationship cell
tableId: "external",
}
async function searchFunction(searchParams) {
if (subtype !== FieldSubtype.USER) {
throw `Search for '${subtype}' not implemented`
}
const results = await API.searchUsers({
...searchParams,
})
// Mapping to the expected data within RelationshipCell
return {
...results,
data: undefined,
rows: results.data,
}
}
</script>
<RelationshipCell
{...$$props}
{schema}
{searchFunction}
primaryDisplay={"email"}
/>

View file

@ -21,6 +21,8 @@
import { Icon, Input, ProgressCircle, clickOutside } from "@budibase/bbui"
import { debounce } from "../../../utils/utils"
const { API, dispatch } = getContext("grid")
export let value
export let api
export let readonly
@ -30,15 +32,15 @@
export let invertX = false
export let invertY = false
export let contentLines = 1
export let searchFunction = API.searchTable
export let primaryDisplay
const { API, dispatch } = getContext("grid")
const color = getColor(0)
let isOpen = false
let searchResults
let searchString
let lastSearchString
let primaryDisplay
let candidateIndex
let lastSearchId
let searching = false
@ -96,7 +98,7 @@
lastSearchId = Math.random()
searching = true
const thisSearchId = lastSearchId
const results = await API.searchTable({
const results = await searchFunction({
paginate: false,
tableId: schema.tableId,
limit: 20,
@ -259,14 +261,16 @@
on:wheel={e => (focused ? e.stopPropagation() : null)}
>
{#each value || [] as relationship}
{#if relationship.primaryDisplay}
{#if relationship[primaryDisplay] || relationship.primaryDisplay}
<div class="badge">
<span
on:click={editable
? () => showRelationship(relationship._id)
: null}
>
{readable(relationship.primaryDisplay)}
{readable(
relationship[primaryDisplay] || relationship.primaryDisplay
)}
</span>
{#if editable}
<Icon

View file

@ -9,6 +9,7 @@ import BooleanCell from "../cells/BooleanCell.svelte"
import FormulaCell from "../cells/FormulaCell.svelte"
import JSONCell from "../cells/JSONCell.svelte"
import AttachmentCell from "../cells/AttachmentCell.svelte"
import BBReferenceCell from "../cells/BBReferenceCell.svelte"
const TypeComponentMap = {
text: TextCell,
@ -23,6 +24,7 @@ const TypeComponentMap = {
link: RelationshipCell,
formula: FormulaCell,
json: JSONCell,
bb_reference: BBReferenceCell,
}
export const getCellRenderer = column => {
return TypeComponentMap[column?.schema?.type] || TextCell

View file

@ -19,12 +19,21 @@ const TypeIconMap = {
formula: "Calculator",
json: "Brackets",
bigint: "TagBold",
bb_reference: {
user: "User",
},
}
export const getColumnIcon = column => {
if (column.schema.autocolumn) {
return "MagicWand"
}
const type = column.schema.type
return TypeIconMap[type] || "Text"
const { type, subtype } = column.schema
const result =
typeof TypeIconMap[type] === "object" && subtype
? TypeIconMap[type][subtype]
: TypeIconMap[type]
return result || "Text"
}

View file

@ -166,7 +166,7 @@ export const addSampleData = async (ctx: UserCtx) => {
// Check if default datasource exists before creating it
await sdk.datasources.get(DEFAULT_BB_DATASOURCE_ID)
} catch (err: any) {
const defaultDbDocs = buildDefaultDocs()
const defaultDbDocs = await buildDefaultDocs()
// add in the default db data docs - tables, datasource, rows and links
await db.bulkDocs([...defaultDbDocs])

View file

@ -80,8 +80,8 @@ export async function save(ctx: UserCtx) {
}
const table = await sdk.tables.getTable(tableId)
const { table: updatedTable, row } = inputProcessing(
ctx.user,
const { table: updatedTable, row } = await inputProcessing(
ctx.user?._id,
cloneDeep(table),
inputs
)

View file

@ -59,7 +59,11 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const tableClone = cloneDeep(dbTable)
// this returns the table and row incase they have been updated
let { table, row } = inputProcessing(ctx.user, tableClone, combinedRow)
let { table, row } = await inputProcessing(
ctx.user?._id,
tableClone,
combinedRow
)
const validateResult = await sdk.rows.utils.validate({
row,
table,
@ -106,7 +110,7 @@ export async function save(ctx: UserCtx) {
// need to copy the table so it can be differenced on way out
const tableClone = cloneDeep(dbTable)
let { table, row } = inputProcessing(ctx.user, tableClone, inputs)
let { table, row } = await inputProcessing(ctx.user?._id, tableClone, inputs)
const validateResult = await sdk.rows.utils.validate({
row,

View file

@ -42,7 +42,7 @@ describe("utils", () => {
const data = [{ name: "Alice" }, { name: "Bob" }, { name: "Claire" }]
const result = importToRows(data, table, config.user)
const result = await importToRows(data, table, config.user)
expect(result).toEqual([
expect.objectContaining({
autoId: 1,
@ -89,7 +89,7 @@ describe("utils", () => {
const data = [{ name: "Alice" }, { name: "Bob" }, { name: "Claire" }]
const result = importToRows(data, table)
const result = await importToRows(data, table)
expect(result).toHaveLength(3)
})
})

View file

@ -99,7 +99,7 @@ export function makeSureTableUpToDate(table: any, tableToSave: any) {
return tableToSave
}
export function importToRows(
export async function importToRows(
data: any[],
table: Table,
user: ContextUser | null = null
@ -113,7 +113,7 @@ export function importToRows(
// We use a reference to table here and update it after input processing,
// so that we can auto increment auto IDs in imported data properly
const processed = inputProcessing(user, table, row, {
const processed = await inputProcessing(user?._id, table, row, {
noAutoRelationships: true,
})
row = processed.row
@ -158,7 +158,7 @@ export async function handleDataImport(
const db = context.getAppDB()
const data = parse(rows, schema)
let finalData: any = importToRows(data, table, user)
let finalData: any = await importToRows(data, table, user)
//Set IDs of finalData to match existing row if an update is expected
if (identifierFields.length > 0) {
@ -422,13 +422,11 @@ export function hasTypeChanged(table: Table, oldTable: Table | undefined) {
if (!oldTable) {
return false
}
let key: any
let field: any
for ([key, field] of Object.entries(oldTable.schema)) {
const oldType = field.type
for (let [key, field] of Object.entries(oldTable.schema)) {
if (!table.schema[key]) {
continue
}
const oldType = field.type
const newType = table.schema[key].type
if (oldType !== newType && !areSwitchableTypes(oldType, newType)) {
return true

View file

@ -34,9 +34,9 @@ function syncLastIds(table: Table, rowCount: number) {
})
}
function tableImport(table: Table, data: Row[]) {
async function tableImport(table: Table, data: Row[]) {
const cloneTable = cloneDeep(table)
const rowDocs = importToRows(data, cloneTable)
const rowDocs = await importToRows(data, cloneTable)
syncLastIds(cloneTable, rowDocs.length)
return { rows: rowDocs, table: cloneTable }
}
@ -601,20 +601,20 @@ export const DEFAULT_EXPENSES_TABLE_SCHEMA: Table = {
},
}
export function buildDefaultDocs() {
const inventoryData = tableImport(
export async function buildDefaultDocs() {
const inventoryData = await tableImport(
DEFAULT_INVENTORY_TABLE_SCHEMA,
inventoryImport
)
const employeeData = tableImport(
const employeeData = await tableImport(
DEFAULT_EMPLOYEE_TABLE_SCHEMA,
employeeImport
)
const jobData = tableImport(DEFAULT_JOBS_TABLE_SCHEMA, jobsImport)
const jobData = await tableImport(DEFAULT_JOBS_TABLE_SCHEMA, jobsImport)
const expensesData = tableImport(
const expensesData = await tableImport(
DEFAULT_EXPENSES_TABLE_SCHEMA,
expensesImport
)

View file

@ -41,6 +41,7 @@ function generateSchema(
case FieldTypes.OPTIONS:
case FieldTypes.LONGFORM:
case FieldTypes.BARCODEQR:
case FieldTypes.BB_REFERENCE:
schema.text(key)
break
case FieldTypes.NUMBER:

View file

@ -41,7 +41,7 @@ export async function fetch(status: AppStatus, user: ContextUser) {
for (let app of apps) {
const lock = locks[app.appId]
if (lock) {
app.lockedBy = lock
app.lockedBy = lock as any
} else {
// make sure its definitely not present
delete app.lockedBy

View file

@ -0,0 +1,74 @@
import { cache } from "@budibase/backend-core"
import { utils } from "@budibase/shared-core"
import { FieldSubtype } from "@budibase/types"
import { InvalidBBRefError } from "./errors"
export async function processInputBBReferences(
value: string | string[] | { _id: string } | { _id: string }[],
subtype: FieldSubtype
): Promise<string | undefined> {
const referenceIds: string[] = []
if (Array.isArray(value)) {
referenceIds.push(
...value.map(idOrDoc =>
typeof idOrDoc === "string" ? idOrDoc : idOrDoc._id
)
)
} else if (typeof value !== "string") {
referenceIds.push(value._id)
} else {
referenceIds.push(
...value
.split(",")
.filter(x => x)
.map((id: string) => id.trim())
)
}
switch (subtype) {
case FieldSubtype.USER:
const { notFoundIds } = await cache.user.getUsers(referenceIds)
if (notFoundIds?.length) {
throw new InvalidBBRefError(notFoundIds[0], FieldSubtype.USER)
}
break
default:
throw utils.unreachable(subtype)
}
return referenceIds.join(",") || undefined
}
export async function processOutputBBReferences(
value: string,
subtype: FieldSubtype
) {
if (typeof value !== "string") {
// Already processed or nothing to process
return value
}
const ids = value.split(",").filter(id => !!id)
switch (subtype) {
case FieldSubtype.USER:
const { users } = await cache.user.getUsers(ids)
if (!users.length) {
return undefined
}
return users.map(u => ({
_id: u._id,
primaryDisplay: u.email,
email: u.email,
firstName: u.firstName,
lastName: u.lastName,
}))
default:
throw utils.unreachable(subtype)
}
}

View file

@ -0,0 +1,7 @@
import { FieldSubtype } from "@budibase/types"
export class InvalidBBRefError extends Error {
constructor(id: string, subtype: FieldSubtype) {
super(`Id "${id}" is not valid for the subtype "${subtype}"`)
}
}

View file

@ -5,8 +5,12 @@ import { ObjectStoreBuckets } from "../../constants"
import { context, db as dbCore, objectStore } from "@budibase/backend-core"
import { InternalTables } from "../../db/utils"
import { TYPE_TRANSFORM_MAP } from "./map"
import { Row, RowAttachment, Table, ContextUser } from "@budibase/types"
const { cloneDeep } = require("lodash/fp")
import { FieldSubtype, Row, RowAttachment, Table } from "@budibase/types"
import { cloneDeep } from "lodash/fp"
import {
processInputBBReferences,
processOutputBBReferences,
} from "./bbReferenceProcessor"
export * from "./utils"
type AutoColumnProcessingOpts = {
@ -48,12 +52,12 @@ function getRemovedAttachmentKeys(
* for automatic ID purposes.
*/
export function processAutoColumn(
user: ContextUser | null,
userId: string | null | undefined,
table: Table,
row: Row,
opts?: AutoColumnProcessingOpts
) {
let noUser = !user || !user.userId
let noUser = !userId
let isUserTable = table._id === InternalTables.USER_METADATA
let now = new Date().toISOString()
// if a row doesn't have a revision then it doesn't exist yet
@ -70,8 +74,8 @@ export function processAutoColumn(
}
switch (schema.subtype) {
case AutoFieldSubTypes.CREATED_BY:
if (creating && shouldUpdateUserFields && user) {
row[key] = [user.userId]
if (creating && shouldUpdateUserFields && userId) {
row[key] = [userId]
}
break
case AutoFieldSubTypes.CREATED_AT:
@ -80,8 +84,8 @@ export function processAutoColumn(
}
break
case AutoFieldSubTypes.UPDATED_BY:
if (shouldUpdateUserFields && user) {
row[key] = [user.userId]
if (shouldUpdateUserFields && userId) {
row[key] = [userId]
}
break
case AutoFieldSubTypes.UPDATED_AT:
@ -130,8 +134,8 @@ export function coerce(row: any, type: string) {
* @param {object} opts some input processing options (like disabling auto-column relationships).
* @returns {object} the row which has been prepared to be written to the DB.
*/
export function inputProcessing(
user: ContextUser | null,
export async function inputProcessing(
userId: string | null | undefined,
table: Table,
row: Row,
opts?: AutoColumnProcessingOpts
@ -166,6 +170,13 @@ export function inputProcessing(
})
}
}
if (field.type === FieldTypes.BB_REFERENCE && value) {
clonedRow[key] = await processInputBBReferences(
value,
field.subtype as FieldSubtype
)
}
}
if (!clonedRow._id || !clonedRow._rev) {
@ -174,7 +185,7 @@ export function inputProcessing(
}
// handle auto columns - this returns an object like {table, row}
return processAutoColumn(user, table, clonedRow, opts)
return processAutoColumn(userId, table, clonedRow, opts)
}
/**
@ -216,6 +227,16 @@ export async function outputProcessing<T extends Row[] | Row>(
attachment.url = objectStore.getAppFileUrl(attachment.key)
})
}
} else if (column.type == FieldTypes.BB_REFERENCE) {
for (let row of enriched) {
if (row[property] == null) {
continue
}
row[property] = await processOutputBBReferences(
row[property],
column.subtype as FieldSubtype
)
}
}
}
if (opts.squash) {

View file

@ -0,0 +1,212 @@
import _ from "lodash"
import * as backendCore from "@budibase/backend-core"
import { FieldSubtype, User } from "@budibase/types"
import {
processInputBBReferences,
processOutputBBReferences,
} from "../bbReferenceProcessor"
import {
DBTestConfiguration,
generator,
structures,
} from "@budibase/backend-core/tests"
import { InvalidBBRefError } from "../errors"
jest.mock("@budibase/backend-core", (): typeof backendCore => {
const actual: typeof backendCore = jest.requireActual(
"@budibase/backend-core"
)
return {
...actual,
cache: {
...actual.cache,
user: {
...actual.cache.user,
getUsers: jest.fn(actual.cache.user.getUsers),
},
},
}
})
const config = new DBTestConfiguration()
describe("bbReferenceProcessor", () => {
const cacheGetUsersSpy = backendCore.cache.user
.getUsers as jest.MockedFunction<typeof backendCore.cache.user.getUsers>
const users: User[] = []
beforeAll(async () => {
const userCount = 10
const userIds = generator.arrayOf(() => generator.guid(), {
min: userCount,
max: userCount,
})
await config.doInTenant(async () => {
const db = backendCore.context.getGlobalDB()
for (const userId of userIds) {
const user = structures.users.user({ _id: userId })
await db.put(user)
users.push(user)
}
})
})
beforeEach(() => {
jest.clearAllMocks()
})
describe("processInputBBReferences", () => {
describe("subtype user", () => {
it("validate valid string id", async () => {
const user = _.sample(users)
const userId = user!._id!
const result = await config.doInTenant(() =>
processInputBBReferences(userId, FieldSubtype.USER)
)
expect(result).toEqual(userId)
expect(cacheGetUsersSpy).toBeCalledTimes(1)
expect(cacheGetUsersSpy).toBeCalledWith([userId])
})
it("throws an error given an invalid id", async () => {
const userId = generator.guid()
await expect(
config.doInTenant(() =>
processInputBBReferences(userId, FieldSubtype.USER)
)
).rejects.toThrowError(new InvalidBBRefError(userId, FieldSubtype.USER))
expect(cacheGetUsersSpy).toBeCalledTimes(1)
expect(cacheGetUsersSpy).toBeCalledWith([userId])
})
it("validates valid user ids as csv", async () => {
const userIds = _.sampleSize(users, 5).map(x => x._id!)
const userIdCsv = userIds.join(" , ")
const result = await config.doInTenant(() =>
processInputBBReferences(userIdCsv, FieldSubtype.USER)
)
expect(result).toEqual(userIds.join(","))
expect(cacheGetUsersSpy).toBeCalledTimes(1)
expect(cacheGetUsersSpy).toBeCalledWith(userIds)
})
it("throws an error given an invalid id in a csv", async () => {
const expectedUserIds = _.sampleSize(users, 2).map(x => x._id!)
const wrongId = generator.guid()
const userIdCsv = [
expectedUserIds[0],
wrongId,
expectedUserIds[1],
].join(" , ")
await expect(
config.doInTenant(() =>
processInputBBReferences(userIdCsv, FieldSubtype.USER)
)
).rejects.toThrowError(
new InvalidBBRefError(wrongId, FieldSubtype.USER)
)
})
it("validate valid user object", async () => {
const userId = _.sample(users)!._id!
const result = await config.doInTenant(() =>
processInputBBReferences({ _id: userId }, FieldSubtype.USER)
)
expect(result).toEqual(userId)
expect(cacheGetUsersSpy).toBeCalledTimes(1)
expect(cacheGetUsersSpy).toBeCalledWith([userId])
})
it("validate valid user object array", async () => {
const userIds = _.sampleSize(users, 3).map(x => x._id!)
const result = await config.doInTenant(() =>
processInputBBReferences(userIds, FieldSubtype.USER)
)
expect(result).toEqual(userIds.join(","))
expect(cacheGetUsersSpy).toBeCalledTimes(1)
expect(cacheGetUsersSpy).toBeCalledWith(userIds)
})
it("empty strings will return undefined", async () => {
const result = await config.doInTenant(() =>
processInputBBReferences("", FieldSubtype.USER)
)
expect(result).toEqual(undefined)
})
it("empty arrays will return undefined", async () => {
const result = await config.doInTenant(() =>
processInputBBReferences([], FieldSubtype.USER)
)
expect(result).toEqual(undefined)
})
})
})
describe("processOutputBBReferences", () => {
describe("subtype user", () => {
it("fetches user given a valid string id", async () => {
const user = _.sample(users)!
const userId = user._id!
const result = await config.doInTenant(() =>
processOutputBBReferences(userId, FieldSubtype.USER)
)
expect(result).toEqual([
{
_id: user._id,
primaryDisplay: user.email,
email: user.email,
firstName: user.firstName,
lastName: user.lastName,
},
])
expect(cacheGetUsersSpy).toBeCalledTimes(1)
expect(cacheGetUsersSpy).toBeCalledWith([userId])
})
it("fetches user given a valid string id csv", async () => {
const [user1, user2] = _.sampleSize(users, 2)
const userId1 = user1._id!
const userId2 = user2._id!
const result = await config.doInTenant(() =>
processOutputBBReferences(
[userId1, userId2].join(","),
FieldSubtype.USER
)
)
expect(result).toHaveLength(2)
expect(result).toEqual(
expect.arrayContaining(
[user1, user2].map(u => ({
_id: u._id,
primaryDisplay: u.email,
email: u.email,
firstName: u.firstName,
lastName: u.lastName,
}))
)
)
expect(cacheGetUsersSpy).toBeCalledTimes(1)
expect(cacheGetUsersSpy).toBeCalledWith([userId1, userId2])
})
})
})
})

View file

@ -0,0 +1,186 @@
import { inputProcessing } from ".."
import { generator, structures } from "@budibase/backend-core/tests"
import { FieldType, FieldTypeSubtypes, Table } from "@budibase/types"
import * as bbReferenceProcessor from "../bbReferenceProcessor"
jest.mock("../bbReferenceProcessor", (): typeof bbReferenceProcessor => ({
processInputBBReferences: jest.fn(),
processOutputBBReferences: jest.fn(),
}))
describe("rowProcessor - inputProcessing", () => {
beforeEach(() => {
jest.resetAllMocks()
})
it("processes BB references if on the schema and it's populated", async () => {
const userId = generator.guid()
const table: Table = {
_id: generator.guid(),
name: "TestTable",
type: "table",
schema: {
name: {
type: FieldType.STRING,
name: "name",
constraints: {
presence: true,
type: "string",
},
},
user: {
type: FieldType.BB_REFERENCE,
subtype: FieldTypeSubtypes.BB_REFERENCE.USER,
name: "user",
constraints: {
presence: true,
type: "string",
},
},
},
}
const newRow = {
name: "Jack",
user: "123",
}
const user = structures.users.user()
;(
bbReferenceProcessor.processInputBBReferences as jest.Mock
).mockResolvedValue(user)
const { row } = await inputProcessing(userId, table, newRow)
expect(bbReferenceProcessor.processInputBBReferences).toBeCalledTimes(1)
expect(bbReferenceProcessor.processInputBBReferences).toBeCalledWith(
"123",
"user"
)
expect(row).toEqual({ ...newRow, user })
})
it("it does not process BB references if on the schema but it is not populated", async () => {
const userId = generator.guid()
const table: Table = {
_id: generator.guid(),
name: "TestTable",
type: "table",
schema: {
name: {
type: FieldType.STRING,
name: "name",
constraints: {
presence: true,
type: "string",
},
},
user: {
type: FieldType.BB_REFERENCE,
subtype: FieldTypeSubtypes.BB_REFERENCE.USER,
name: "user",
constraints: {
presence: false,
type: "string",
},
},
},
}
const newRow = {
name: "Jack",
}
const { row } = await inputProcessing(userId, table, newRow)
expect(bbReferenceProcessor.processInputBBReferences).not.toBeCalled()
expect(row).toEqual({ ...newRow, user: undefined })
})
it.each([undefined, null, ""])(
"it does not process BB references the field is $%",
async userValue => {
const userId = generator.guid()
const table: Table = {
_id: generator.guid(),
name: "TestTable",
type: "table",
schema: {
name: {
type: FieldType.STRING,
name: "name",
constraints: {
presence: true,
type: "string",
},
},
user: {
type: FieldType.BB_REFERENCE,
subtype: FieldTypeSubtypes.BB_REFERENCE.USER,
name: "user",
constraints: {
presence: false,
type: "string",
},
},
},
}
const newRow = {
name: "Jack",
user: userValue,
}
const { row } = await inputProcessing(userId, table, newRow)
expect(bbReferenceProcessor.processInputBBReferences).not.toBeCalled()
expect(row).toEqual(newRow)
}
)
it("it does not process BB references if not in the schema", async () => {
const userId = generator.guid()
const table: Table = {
_id: generator.guid(),
name: "TestTable",
type: "table",
schema: {
name: {
type: FieldType.STRING,
name: "name",
constraints: {
presence: true,
type: "string",
},
},
user: {
type: FieldType.NUMBER,
name: "user",
constraints: {
presence: true,
type: "string",
},
},
},
}
const newRow = {
name: "Jack",
user: "123",
}
const { row } = await inputProcessing(userId, table, newRow)
expect(bbReferenceProcessor.processInputBBReferences).not.toBeCalled()
expect(row).toEqual({
name: "Jack",
user: 123,
})
})
})

View file

@ -0,0 +1,142 @@
import {
FieldSubtype,
FieldType,
FieldTypeSubtypes,
Table,
} from "@budibase/types"
import { outputProcessing } from ".."
import { generator, structures } from "@budibase/backend-core/tests"
import * as bbReferenceProcessor from "../bbReferenceProcessor"
jest.mock("../bbReferenceProcessor", (): typeof bbReferenceProcessor => ({
processInputBBReferences: jest.fn(),
processOutputBBReferences: jest.fn(),
}))
describe("rowProcessor - outputProcessing", () => {
beforeEach(() => {
jest.resetAllMocks()
})
const processOutputBBReferencesMock =
bbReferenceProcessor.processOutputBBReferences as jest.Mock
it("fetches bb user references given a populated field", async () => {
const table: Table = {
_id: generator.guid(),
name: "TestTable",
type: "table",
schema: {
name: {
type: FieldType.STRING,
name: "name",
constraints: {
presence: true,
type: "string",
},
},
user: {
type: FieldType.BB_REFERENCE,
subtype: FieldTypeSubtypes.BB_REFERENCE.USER,
name: "user",
constraints: {
presence: false,
type: "string",
},
},
},
}
const row = {
name: "Jack",
user: "123",
}
const user = structures.users.user()
processOutputBBReferencesMock.mockResolvedValue(user)
const result = await outputProcessing(table, row, { squash: false })
expect(result).toEqual({ name: "Jack", user })
expect(bbReferenceProcessor.processOutputBBReferences).toBeCalledTimes(1)
expect(bbReferenceProcessor.processOutputBBReferences).toBeCalledWith(
"123",
FieldSubtype.USER
)
})
it("does not fetch bb references when fields are empty", async () => {
const table: Table = {
_id: generator.guid(),
name: "TestTable",
type: "table",
schema: {
name: {
type: FieldType.STRING,
name: "name",
constraints: {
presence: true,
type: "string",
},
},
user: {
type: FieldType.BB_REFERENCE,
subtype: FieldTypeSubtypes.BB_REFERENCE.USER,
name: "user",
constraints: {
presence: false,
type: "string",
},
},
},
}
const row = {
name: "Jack",
}
const result = await outputProcessing(table, row, { squash: false })
expect(result).toEqual({ name: "Jack" })
expect(bbReferenceProcessor.processOutputBBReferences).not.toBeCalled()
})
it("does not fetch bb references when not in the schema", async () => {
const table: Table = {
_id: generator.guid(),
name: "TestTable",
type: "table",
schema: {
name: {
type: FieldType.STRING,
name: "name",
constraints: {
presence: true,
type: "string",
},
},
user: {
type: FieldType.NUMBER,
name: "user",
constraints: {
presence: false,
type: "string",
},
},
},
}
const row = {
name: "Jack",
user: "123",
}
const result = await outputProcessing(table, row, { squash: false })
expect(result).toEqual({ name: "Jack", user: "123" })
expect(bbReferenceProcessor.processOutputBBReferences).not.toBeCalled()
})
})

View file

@ -150,7 +150,7 @@ export class BaseSocket {
if (room) {
const sessionIds = await this.getRoomSessionIds(room)
const keys = sessionIds.map(this.getSessionKey.bind(this))
const sessions = await this.redisClient?.bulkGet(keys)
const sessions = await this.redisClient?.bulkGet<SocketSession>(keys)
return Object.values(sessions || {})
} else {
return []

View file

@ -16,6 +16,7 @@ export enum FieldType {
INTERNAL = "internal",
BARCODEQR = "barcodeqr",
BIGINT = "bigint",
BB_REFERENCE = "bb_reference",
}
export interface RowAttachment {
@ -33,3 +34,13 @@ export interface Row extends Document {
_viewId?: string
[key: string]: any
}
export enum FieldSubtype {
USER = "user",
}
export const FieldTypeSubtypes = {
BB_REFERENCE: {
USER: FieldSubtype.USER,
},
}