1
0
Fork 0
mirror of synced 2024-06-28 11:00:55 +12:00

Merge branch 'master' of github.com:Budibase/budibase into BUDI-7573/use_existing_image_cache

This commit is contained in:
mike12345567 2023-10-12 11:08:12 +01:00
commit 7b8e23eb82
92 changed files with 1261 additions and 877 deletions

View file

@ -20,18 +20,12 @@ env:
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
NX_BASE_BRANCH: origin/${{ github.base_ref }}
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' && github.base_ref != 'master'}}
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
lint:
runs-on: ubuntu-latest
steps:
- name: Maximize build space
uses: easimon/maximize-build-space@master
with:
root-reserve-mb: 35000
swap-size-mb: 1024
remove-android: "true"
remove-dotnet: "true"
- name: Checkout repo and submodules
uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
@ -270,18 +264,23 @@ jobs:
if [[ $branch == "master" ]]; then
base_commit=$(git rev-parse origin/master)
else
elif [[ $branch == "develop" ]]; then
base_commit=$(git rev-parse origin/develop)
fi
echo "target_branch=$branch"
echo "target_branch=$branch" >> "$GITHUB_OUTPUT"
echo "pro_commit=$pro_commit"
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
echo "base_commit=$base_commit"
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
if [[ ! -z $base_commit ]]; then
echo "target_branch=$branch"
echo "target_branch=$branch" >> "$GITHUB_OUTPUT"
echo "pro_commit=$pro_commit"
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
echo "base_commit=$base_commit"
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
else
echo "Nothing to do - branch to branch merge."
fi
- name: Check submodule merged to develop
- name: Check submodule merged to base branch
if: ${{ steps.get_pro_commits.outputs.base_commit != '' }}
uses: actions/github-script@v4
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
@ -290,7 +289,7 @@ jobs:
const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}';
if (submoduleCommit !== baseCommit) {
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}"" branch.');
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}" branch.');
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md')
process.exit(1);
} else {

View file

@ -1,29 +0,0 @@
name: check_unreleased_changes
on:
pull_request:
branches:
- master
jobs:
check_unreleased:
runs-on: ubuntu-latest
steps:
- name: Check for unreleased changes
env:
REPO: "Budibase/budibase"
TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
RELEASE_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \
"https://api.github.com/repos/$REPO/releases/latest" | \
jq -r .published_at)
COMMIT_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \
"https://api.github.com/repos/$REPO/commits/master" | \
jq -r .commit.committer.date)
RELEASE_SECONDS=$(date --date="$RELEASE_TIMESTAMP" "+%s")
COMMIT_SECONDS=$(date --date="$COMMIT_TIMESTAMP" "+%s")
if (( COMMIT_SECONDS > RELEASE_SECONDS )); then
echo "There are unreleased changes. Please release these changes before merging."
exit 1
fi
echo "No unreleased changes detected."

View file

@ -14,7 +14,7 @@ jobs:
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_BRANCH: ${{ github.head_ref }}
PAYLOAD_PR_NUMBER: ${{ github.ref }}
PAYLOAD_PR_NUMBER: ${{ github.event.pull_request.number }}
with:
repository: budibase/budibase-deploys
event: featurebranch-qa-deploy

View file

@ -1,5 +1,5 @@
{
"version": "2.11.12",
"version": "2.11.30",
"npmClient": "yarn",
"packages": [
"packages/*"

View file

@ -67,7 +67,6 @@
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
"build:docs": "lerna run --stream build:docs",
"release:helm": "node scripts/releaseHelmChart",
"env:multi:enable": "lerna run --stream env:multi:enable",
"env:multi:disable": "lerna run --stream env:multi:disable",

View file

@ -62,7 +62,7 @@
"@trendyol/jest-testcontainers": "^2.1.1",
"@types/chance": "1.1.3",
"@types/cookies": "0.7.8",
"@types/jest": "29.5.3",
"@types/jest": "29.5.5",
"@types/lodash": "4.14.180",
"@types/node": "18.17.0",
"@types/node-fetch": "2.6.4",

View file

@ -1,5 +1,10 @@
import { prefixed, DocumentType } from "@budibase/types"
export { SEPARATOR, UNICODE_MAX, DocumentType } from "@budibase/types"
export {
SEPARATOR,
UNICODE_MAX,
DocumentType,
InternalTable,
} from "@budibase/types"
/**
* Can be used to create a few different forms of querying a view.
@ -30,10 +35,6 @@ export const DeprecatedViews = {
],
}
export enum InternalTable {
USER_METADATA = "ta_users",
}
export const StaticDatabases = {
GLOBAL: {
name: "global-db",

View file

@ -45,6 +45,11 @@ export function generateGlobalUserID(id?: any) {
return `${DocumentType.USER}${SEPARATOR}${id || newid()}`
}
const isGlobalUserIDRegex = new RegExp(`^${DocumentType.USER}${SEPARATOR}.+`)
export function isGlobalUserID(id: string) {
return isGlobalUserIDRegex.test(id)
}
/**
* Generates a new user ID based on the passed in global ID.
* @param {string} globalId The ID of the global user.

View file

@ -948,12 +948,15 @@ export const buildFormSchema = (component, asset) => {
if (component._component.endsWith("formblock")) {
let schema = {}
const datasource = getDatasourceForProvider(asset, component)
const info = getSchemaForDatasource(component, datasource)
if (!info?.schema) {
return schema
}
if (!component.fields) {
Object.values(info?.schema)
Object.values(info.schema)
.filter(
({ autocolumn, name }) =>
!autocolumn && !["_rev", "_id"].includes(name)

View file

@ -64,6 +64,7 @@ const INITIAL_FRONTEND_STATE = {
},
features: {
componentValidation: false,
disableUserMetadata: false,
},
errors: [],
hasAppPackage: false,

View file

@ -4,6 +4,7 @@
import { TableNames } from "constants"
import { Grid } from "@budibase/frontend-core"
import { API } from "api"
import { store } from "builderStore"
import GridAddColumnModal from "components/backend/DataTable/modals/grid/GridCreateColumnModal.svelte"
import GridCreateEditRowModal from "components/backend/DataTable/modals/grid/GridCreateEditRowModal.svelte"
import GridEditUserModal from "components/backend/DataTable/modals/grid/GridEditUserModal.svelte"
@ -17,11 +18,11 @@
import GridUsersTableButton from "components/backend/DataTable/modals/grid/GridUsersTableButton.svelte"
const userSchemaOverrides = {
firstName: { displayName: "First name" },
lastName: { displayName: "Last name" },
email: { displayName: "Email" },
roleId: { displayName: "Role" },
status: { displayName: "Status" },
firstName: { displayName: "First name", disabled: true },
lastName: { displayName: "Last name", disabled: true },
email: { displayName: "Email", disabled: true },
roleId: { displayName: "Role", disabled: true },
status: { displayName: "Status", disabled: true },
}
$: id = $tables.selected?._id
@ -60,14 +61,14 @@
datasource={gridDatasource}
canAddRows={!isUsersTable}
canDeleteRows={!isUsersTable}
canEditRows={!isUsersTable}
canEditColumns={!isUsersTable}
canEditRows={!isUsersTable || !$store.features.disableUserMetadata}
canEditColumns={!isUsersTable || !$store.features.disableUserMetadata}
schemaOverrides={isUsersTable ? userSchemaOverrides : null}
showAvatars={false}
on:updatedatasource={handleGridTableUpdate}
>
<svelte:fragment slot="filter">
{#if isUsersTable}
{#if isUsersTable && $store.features.disableUserMetadata}
<GridUsersTableButton />
{/if}
<GridFilterButton />

View file

@ -33,7 +33,7 @@
import { getBindings } from "components/backend/DataTable/formula"
import JSONSchemaModal from "./JSONSchemaModal.svelte"
import { ValidColumnNameRegex } from "@budibase/shared-core"
import { FieldType } from "@budibase/types"
import { FieldType, FieldSubtype, SourceName } from "@budibase/types"
import RelationshipSelector from "components/common/RelationshipSelector.svelte"
const AUTO_TYPE = "auto"
@ -43,7 +43,6 @@
const NUMBER_TYPE = FIELDS.NUMBER.type
const JSON_TYPE = FIELDS.JSON.type
const DATE_TYPE = FIELDS.DATETIME.type
const USER_REFRENCE_TYPE = FIELDS.BB_REFERENCE_USER.compositeType
const dispatch = createEventDispatcher()
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
@ -52,7 +51,19 @@
export let field
let mounted = false
let fieldDefinitions = cloneDeep(FIELDS)
const fieldDefinitions = Object.values(FIELDS).reduce(
// Storing the fields by complex field id
(acc, field) => ({
...acc,
[makeFieldId(field.type, field.subtype)]: field,
}),
{}
)
function makeFieldId(type, subtype) {
return `${type}${subtype || ""}`.toUpperCase()
}
let originalName
let linkEditDisabled
let primaryDisplay
@ -72,8 +83,8 @@
let jsonSchemaModal
let allowedTypes = []
let editableColumn = {
type: fieldDefinitions.STRING.type,
constraints: fieldDefinitions.STRING.constraints,
type: FIELDS.STRING.type,
constraints: FIELDS.STRING.constraints,
// Initial value for column name in other table for linked records
fieldName: $tables.selected.name,
}
@ -139,9 +150,6 @@
$tables.selected.primaryDisplay == null ||
$tables.selected.primaryDisplay === editableColumn.name
if (editableColumn.type === FieldType.BB_REFERENCE) {
editableColumn.type = `${editableColumn.type}_${editableColumn.subtype}`
}
// Here we are setting the relationship values based on the editableColumn
// This part of the code is used when viewing an existing field hence the check
// for the tableId
@ -172,7 +180,17 @@
}
}
allowedTypes = getAllowedTypes()
if (!savingColumn) {
editableColumn.fieldId = makeFieldId(
editableColumn.type,
editableColumn.subtype
)
allowedTypes = getAllowedTypes().map(t => ({
fieldId: makeFieldId(t.type, t.subtype),
...t,
}))
}
}
$: initialiseField(field, savingColumn)
@ -249,13 +267,7 @@
let saveColumn = cloneDeep(editableColumn)
// Handle types on composite types
const definition = fieldDefinitions[saveColumn.type.toUpperCase()]
if (definition && saveColumn.type === definition.compositeType) {
saveColumn.type = definition.type
saveColumn.subtype = definition.subtype
delete saveColumn.compositeType
}
delete saveColumn.fieldId
if (saveColumn.type === AUTO_TYPE) {
saveColumn = buildAutoColumn(
@ -320,27 +332,33 @@
}
}
function handleTypeChange(event) {
function onHandleTypeChange(event) {
handleTypeChange(event.detail)
}
function handleTypeChange(type) {
// remove any extra fields that may not be related to this type
delete editableColumn.autocolumn
delete editableColumn.subtype
delete editableColumn.tableId
delete editableColumn.relationshipType
delete editableColumn.formulaType
delete editableColumn.constraints
// Add in defaults and initial definition
const definition = fieldDefinitions[event.detail?.toUpperCase()]
const definition = fieldDefinitions[type?.toUpperCase()]
if (definition?.constraints) {
editableColumn.constraints = definition.constraints
}
editableColumn.type = definition.type
editableColumn.subtype = definition.subtype
// Default relationships many to many
if (editableColumn.type === LINK_TYPE) {
editableColumn.relationshipType = RelationshipType.MANY_TO_MANY
} else if (editableColumn.type === FORMULA_TYPE) {
editableColumn.formulaType = "dynamic"
} else if (editableColumn.type === USER_REFRENCE_TYPE) {
editableColumn.relationshipType = RelationshipType.ONE_TO_MANY
}
}
@ -381,9 +399,26 @@
return ALLOWABLE_NUMBER_OPTIONS
}
const isUsers =
editableColumn.type === FieldType.BB_REFERENCE &&
editableColumn.subtype === FieldSubtype.USERS
if (!external) {
return [
...Object.values(fieldDefinitions),
FIELDS.STRING,
FIELDS.BARCODEQR,
FIELDS.LONGFORM,
FIELDS.OPTIONS,
FIELDS.ARRAY,
FIELDS.NUMBER,
FIELDS.BIGINT,
FIELDS.BOOLEAN,
FIELDS.DATETIME,
FIELDS.ATTACHMENT,
FIELDS.LINK,
FIELDS.FORMULA,
FIELDS.JSON,
isUsers ? FIELDS.USERS : FIELDS.USER,
{ name: "Auto Column", type: AUTO_TYPE },
]
} else {
@ -397,7 +432,7 @@
FIELDS.BOOLEAN,
FIELDS.FORMULA,
FIELDS.BIGINT,
FIELDS.BB_REFERENCE_USER,
isUsers ? FIELDS.USERS : FIELDS.USER,
]
// no-sql or a spreadsheet
if (!external || table.sql) {
@ -472,6 +507,13 @@
return newError
}
function isUsersColumn(column) {
return (
column.type === FieldType.BB_REFERENCE &&
[FieldSubtype.USER, FieldSubtype.USERS].includes(column.subtype)
)
}
onMount(() => {
mounted = true
})
@ -489,11 +531,11 @@
{/if}
<Select
disabled={!typeEnabled}
bind:value={editableColumn.type}
on:change={handleTypeChange}
bind:value={editableColumn.fieldId}
on:change={onHandleTypeChange}
options={allowedTypes}
getOptionLabel={field => field.name}
getOptionValue={field => field.compositeType || field.type}
getOptionValue={field => field.fieldId}
getOptionIcon={field => field.icon}
isOptionEnabled={option => {
if (option.type == AUTO_TYPE) {
@ -555,7 +597,7 @@
<DatePicker bind:value={editableColumn.constraints.datetime.latest} />
</div>
</div>
{#if datasource?.source !== "ORACLE" && datasource?.source !== "SQL_SERVER" && !editableColumn.dateOnly}
{#if datasource?.source !== SourceName.ORACLE && datasource?.source !== SourceName.SQL_SERVER && !editableColumn.dateOnly}
<div>
<div class="row">
<Label>Time zones</Label>
@ -659,18 +701,20 @@
<Button primary text on:click={openJsonSchemaEditor}
>Open schema editor</Button
>
{:else if editableColumn.type === USER_REFRENCE_TYPE}
<!-- Disabled temporally -->
<!-- <Toggle
value={editableColumn.relationshipType === RelationshipType.MANY_TO_MANY}
{:else if isUsersColumn(editableColumn) && datasource?.source !== SourceName.GOOGLE_SHEETS}
<Toggle
value={editableColumn.subtype === FieldSubtype.USERS}
on:change={e =>
(editableColumn.relationshipType = e.detail
? RelationshipType.MANY_TO_MANY
: RelationshipType.ONE_TO_MANY)}
handleTypeChange(
makeFieldId(
FieldType.BB_REFERENCE,
e.detail ? FieldSubtype.USERS : FieldSubtype.USER
)
)}
disabled={!isCreating}
thin
text="Allow multiple users"
/> -->
/>
{/if}
{#if editableColumn.type === AUTO_TYPE || editableColumn.autocolumn}
<Select

View file

@ -49,6 +49,15 @@
label: "Long Form Text",
value: FIELDS.LONGFORM.type,
},
{
label: "User",
value: `${FIELDS.USER.type}${FIELDS.USER.subtype}`,
},
{
label: "Users",
value: `${FIELDS.USERS.type}${FIELDS.USERS.subtype}`,
},
]
$: {
@ -143,7 +152,7 @@
<div class="field">
<span>{name}</span>
<Select
value={schema[name]?.type}
value={`${schema[name]?.type}${schema[name]?.subtype || ""}`}
options={typeOptions}
placeholder={null}
getOptionLabel={option => option.label}

View file

@ -3,6 +3,7 @@
import { FIELDS } from "constants/backend"
import { API } from "api"
import { parseFile } from "./utils"
import { canBeDisplayColumn } from "@budibase/shared-core"
export let rows = []
export let schema = {}
@ -10,36 +11,82 @@
export let displayColumn = null
export let promptUpload = false
const typeOptions = [
{
const typeOptions = {
[FIELDS.STRING.type]: {
label: "Text",
value: FIELDS.STRING.type,
config: {
type: FIELDS.STRING.type,
constraints: FIELDS.STRING.constraints,
},
},
{
[FIELDS.NUMBER.type]: {
label: "Number",
value: FIELDS.NUMBER.type,
config: {
type: FIELDS.NUMBER.type,
constraints: FIELDS.NUMBER.constraints,
},
},
{
[FIELDS.DATETIME.type]: {
label: "Date",
value: FIELDS.DATETIME.type,
config: {
type: FIELDS.DATETIME.type,
constraints: FIELDS.DATETIME.constraints,
},
},
{
[FIELDS.OPTIONS.type]: {
label: "Options",
value: FIELDS.OPTIONS.type,
config: {
type: FIELDS.OPTIONS.type,
constraints: FIELDS.OPTIONS.constraints,
},
},
{
[FIELDS.ARRAY.type]: {
label: "Multi-select",
value: FIELDS.ARRAY.type,
config: {
type: FIELDS.ARRAY.type,
constraints: FIELDS.ARRAY.constraints,
},
},
{
[FIELDS.BARCODEQR.type]: {
label: "Barcode/QR",
value: FIELDS.BARCODEQR.type,
config: {
type: FIELDS.BARCODEQR.type,
constraints: FIELDS.BARCODEQR.constraints,
},
},
{
[FIELDS.LONGFORM.type]: {
label: "Long Form Text",
value: FIELDS.LONGFORM.type,
config: {
type: FIELDS.LONGFORM.type,
constraints: FIELDS.LONGFORM.constraints,
},
},
]
user: {
label: "User",
value: "user",
config: {
type: FIELDS.USER.type,
subtype: FIELDS.USER.subtype,
constraints: FIELDS.USER.constraints,
},
},
users: {
label: "Users",
value: "users",
config: {
type: FIELDS.USERS.type,
subtype: FIELDS.USERS.subtype,
constraints: FIELDS.USERS.constraints,
},
},
}
let fileInput
let error = null
@ -48,10 +95,16 @@
let validation = {}
let validateHash = ""
let errors = {}
let selectedColumnTypes = {}
$: displayColumnOptions = Object.keys(schema || {}).filter(column => {
return validation[column]
return validation[column] && canBeDisplayColumn(schema[column].type)
})
$: if (displayColumn && !canBeDisplayColumn(schema[displayColumn].type)) {
displayColumn = null
}
$: {
// binding in consumer is causing double renders here
const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema)
@ -72,6 +125,13 @@
rows = response.rows
schema = response.schema
fileName = response.fileName
selectedColumnTypes = Object.entries(response.schema).reduce(
(acc, [colName, fieldConfig]) => ({
...acc,
[colName]: fieldConfig.type,
}),
{}
)
} catch (e) {
loading = false
error = e
@ -98,8 +158,10 @@
}
const handleChange = (name, e) => {
schema[name].type = e.detail
schema[name].constraints = FIELDS[e.detail.toUpperCase()].constraints
const { config } = typeOptions[e.detail]
schema[name].type = config.type
schema[name].subtype = config.subtype
schema[name].constraints = config.constraints
}
const openFileUpload = (promptUpload, fileInput) => {
@ -142,9 +204,9 @@
<div class="field">
<span>{column.name}</span>
<Select
bind:value={column.type}
bind:value={selectedColumnTypes[column.name]}
on:change={e => handleChange(name, e)}
options={typeOptions}
options={Object.values(typeOptions)}
placeholder={null}
getOptionLabel={option => option.label}
getOptionValue={option => option.value}

View file

@ -102,7 +102,7 @@
</div>
{/if}
<div class="text" title={showTooltip ? text : null}>
{text}
<span title={text}>{text}</span>
{#if selectedBy}
<UserAvatars size="XS" users={selectedBy} />
{/if}
@ -227,9 +227,6 @@
.text {
font-weight: 600;
font-size: 12px;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
flex: 1 1 auto;
color: var(--spectrum-global-color-gray-900);
order: 2;
@ -238,6 +235,11 @@
align-items: center;
gap: 8px;
}
.text span {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.scrollable .text {
flex: 0 0 auto;
max-width: 160px;

View file

@ -37,7 +37,7 @@
}
$: datasource = getDatasourceForProvider($currentAsset, componentInstance)
$: resourceId = datasource.resourceId || datasource.tableId
$: resourceId = datasource?.resourceId || datasource?.tableId
$: if (!isEqual(value, cachedValue)) {
cachedValue = cloneDeep(value)

View file

@ -20,7 +20,6 @@
import { FieldType } from "@budibase/types"
import { createEventDispatcher, onMount } from "svelte"
import FilterUsers from "./FilterUsers.svelte"
import { RelationshipType } from "constants/backend"
export let schemaFields
export let filters = []
@ -126,6 +125,7 @@
// Update type based on field
const fieldSchema = enrichedSchemaFields.find(x => x.name === filter.field)
filter.type = fieldSchema?.type
filter.subtype = fieldSchema?.subtype
// Update external type based on field
filter.externalType = getSchema(filter)?.externalType
@ -191,12 +191,12 @@
}
const getValidOperatorsForType = filter => {
if (!filter) {
if (!filter?.field) {
return []
}
return LuceneUtils.getValidOperatorsForType(
filter.type,
{ type: filter.type, subtype: filter.subtype },
filter.field,
datasource
)
@ -301,9 +301,10 @@
{:else if filter.type === FieldType.BB_REFERENCE}
<FilterUsers
bind:value={filter.value}
multiselect={getSchema(filter).relationshipType ===
RelationshipType.MANY_TO_MANY ||
filter.operator === OperatorOptions.In.value}
multiselect={[
OperatorOptions.In.value,
OperatorOptions.ContainsAny.value,
].includes(filter.operator)}
disabled={filter.noValue}
/>
{:else}

View file

@ -1,7 +1,9 @@
import { FieldType, FieldSubtype } from "@budibase/types"
export const FIELDS = {
STRING: {
name: "Text",
type: "string",
type: FieldType.STRING,
icon: "Text",
constraints: {
type: "string",
@ -11,7 +13,7 @@ export const FIELDS = {
},
BARCODEQR: {
name: "Barcode/QR",
type: "barcodeqr",
type: FieldType.BARCODEQR,
icon: "Camera",
constraints: {
type: "string",
@ -21,7 +23,7 @@ export const FIELDS = {
},
LONGFORM: {
name: "Long Form Text",
type: "longform",
type: FieldType.LONGFORM,
icon: "TextAlignLeft",
constraints: {
type: "string",
@ -31,7 +33,7 @@ export const FIELDS = {
},
OPTIONS: {
name: "Options",
type: "options",
type: FieldType.OPTIONS,
icon: "Dropdown",
constraints: {
type: "string",
@ -41,7 +43,7 @@ export const FIELDS = {
},
ARRAY: {
name: "Multi-select",
type: "array",
type: FieldType.ARRAY,
icon: "Duplicate",
constraints: {
type: "array",
@ -51,7 +53,7 @@ export const FIELDS = {
},
NUMBER: {
name: "Number",
type: "number",
type: FieldType.NUMBER,
icon: "123",
constraints: {
type: "number",
@ -61,12 +63,12 @@ export const FIELDS = {
},
BIGINT: {
name: "BigInt",
type: "bigint",
type: FieldType.BIGINT,
icon: "TagBold",
},
BOOLEAN: {
name: "Boolean",
type: "boolean",
type: FieldType.BOOLEAN,
icon: "Boolean",
constraints: {
type: "boolean",
@ -75,7 +77,7 @@ export const FIELDS = {
},
DATETIME: {
name: "Date/Time",
type: "datetime",
type: FieldType.DATETIME,
icon: "Calendar",
constraints: {
type: "string",
@ -89,7 +91,7 @@ export const FIELDS = {
},
ATTACHMENT: {
name: "Attachment",
type: "attachment",
type: FieldType.ATTACHMENT,
icon: "Folder",
constraints: {
type: "array",
@ -98,7 +100,7 @@ export const FIELDS = {
},
LINK: {
name: "Relationship",
type: "link",
type: FieldType.LINK,
icon: "Link",
constraints: {
type: "array",
@ -107,26 +109,34 @@ export const FIELDS = {
},
FORMULA: {
name: "Formula",
type: "formula",
type: FieldType.FORMULA,
icon: "Calculator",
constraints: {},
},
JSON: {
name: "JSON",
type: "json",
type: FieldType.JSON,
icon: "Brackets",
constraints: {
type: "object",
presence: false,
},
},
BB_REFERENCE_USER: {
USER: {
name: "User",
type: "bb_reference",
subtype: "user",
compositeType: "bb_reference_user", // Used for working with the subtype on CreateEditColumn as is it was a primary type
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USER,
icon: "User",
},
USERS: {
name: "Users",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
icon: "User",
constraints: {
type: "array",
},
},
}
export const AUTO_COLUMN_SUB_TYPES = {

View file

@ -3,16 +3,17 @@
* e.g.
* name all names result
* ------ ----------- --------
* ("foo") ["foo"] "foo (1)"
* ("foo") ["foo", "foo (1)"] "foo (2)"
* ("foo (1)") ["foo", "foo (1)"] "foo (2)"
* ("foo") ["foo", "foo (2)"] "foo (1)"
* ("foo") ["foo"] "foo 1"
* ("foo") ["foo", "foo 1"] "foo 2"
* ("foo 1") ["foo", "foo 1"] "foo 2"
* ("foo") ["foo", "foo 2"] "foo 1"
*
* Repl
*/
export const duplicateName = (name, allNames) => {
const baseName = name.split(" (")[0]
const isDuplicate = new RegExp(`${baseName}\\s\\((\\d+)\\)$`)
const duplicatePattern = new RegExp(`\\s(\\d+)$`)
const baseName = name.split(duplicatePattern)[0]
const isDuplicate = new RegExp(`${baseName}\\s(\\d+)$`)
// get the sequence from matched names
const sequence = []
@ -28,7 +29,6 @@ export const duplicateName = (name, allNames) => {
return false
})
sequence.sort((a, b) => a - b)
// get the next number in the sequence
let number
if (sequence.length === 0) {
@ -46,5 +46,5 @@ export const duplicateName = (name, allNames) => {
}
}
return `${baseName} (${number})`
return `${baseName} ${number}`
}

View file

@ -9,34 +9,34 @@ describe("duplicate", () => {
const duplicate = duplicateName(name, names)
expect(duplicate).toBe("foo (1)")
expect(duplicate).toBe("foo 1")
})
it("with multiple existing", async () => {
const names = ["foo", "foo (1)", "foo (2)"]
const names = ["foo", "foo 1", "foo 2"]
const name = "foo"
const duplicate = duplicateName(name, names)
expect(duplicate).toBe("foo (3)")
expect(duplicate).toBe("foo 3")
})
it("with mixed multiple existing", async () => {
const names = ["foo", "foo (1)", "foo (2)", "bar", "bar (1)", "bar (2)"]
const names = ["foo", "foo 1", "foo 2", "bar", "bar 1", "bar 2"]
const name = "foo"
const duplicate = duplicateName(name, names)
expect(duplicate).toBe("foo (3)")
expect(duplicate).toBe("foo 3")
})
it("with incomplete sequence", async () => {
const names = ["foo", "foo (2)", "foo (3)"]
const names = ["foo", "foo 2", "foo 3"]
const name = "foo"
const duplicate = duplicateName(name, names)
expect(duplicate).toBe("foo (1)")
expect(duplicate).toBe("foo 1")
})
})
})

View file

@ -118,7 +118,7 @@
}
const getOperatorOptions = condition => {
return LuceneUtils.getValidOperatorsForType(condition.valueType)
return LuceneUtils.getValidOperatorsForType({ type: condition.valueType })
}
const onOperatorChange = (condition, newOperator) => {
@ -137,9 +137,9 @@
condition.referenceValue = null
// Ensure a valid operator is set
const validOperators = LuceneUtils.getValidOperatorsForType(newType).map(
x => x.value
)
const validOperators = LuceneUtils.getValidOperatorsForType({
type: newType,
}).map(x => x.value)
if (!validOperators.includes(condition.operator)) {
condition.operator =
validOperators[0] ?? Constants.OperatorOptions.Equals.value

View file

@ -3419,6 +3419,17 @@
"value": "custom"
}
},
{
"type": "event",
"label": "On change",
"key": "onChange",
"context": [
{
"label": "Field Value",
"key": "value"
}
]
},
{
"type": "validation/string",
"label": "Validation",
@ -5673,11 +5684,6 @@
"label": "Validation",
"key": "validation"
},
{
"type": "filter/relationship",
"label": "Filtering",
"key": "filter"
},
{
"type": "boolean",
"label": "Search",

View file

@ -63,7 +63,7 @@
// Ensure a valid operator is set
const validOperators = LuceneUtils.getValidOperatorsForType(
expression.type,
{ type: expression.type },
expression.field,
datasource
).map(x => x.value)
@ -125,7 +125,7 @@
<Select
disabled={!filter.field}
options={LuceneUtils.getValidOperatorsForType(
filter.type,
{ type: filter.type, subtype: filter.subtype },
filter.field,
datasource
)}

View file

@ -1,9 +1,28 @@
<script>
import RelationshipField from "./RelationshipField.svelte"
import { sdk } from "@budibase/shared-core"
export let defaultValue
function updateUserIDs(value) {
if (Array.isArray(value)) {
return value.map(val => sdk.users.getGlobalUserID(val))
} else {
return sdk.users.getGlobalUserID(value)
}
}
function updateReferences(value) {
if (sdk.users.containsUserID(value)) {
return updateUserIDs(value)
}
return value
}
</script>
<RelationshipField
{...$$props}
datasourceType={"user"}
primaryDisplay={"email"}
defaultValue={updateReferences(defaultValue)}
/>

View file

@ -128,6 +128,7 @@
<div class="manual-input">
<Input
bind:value
updateOnChange={false}
on:change={() => {
dispatch("change", value)
}}

View file

@ -160,7 +160,9 @@
const handleChange = value => {
const changed = fieldApi.setValue(value)
if (onChange && changed) {
onChange({ value })
onChange({
value,
})
}
}

View file

@ -1,7 +1,7 @@
<script>
import { getContext } from "svelte"
import RelationshipCell from "./RelationshipCell.svelte"
import { FieldSubtype } from "@budibase/types"
import { FieldSubtype, RelationshipType } from "@budibase/types"
export let api
@ -12,10 +12,14 @@
...$$props.schema,
// This is not really used, just adding some content to be able to render the relationship cell
tableId: "external",
relationshipType:
subtype === FieldSubtype.USER
? RelationshipType.ONE_TO_MANY
: RelationshipType.MANY_TO_MANY,
}
async function searchFunction(searchParams) {
if (subtype !== FieldSubtype.USER) {
if (subtype !== FieldSubtype.USER && subtype !== FieldSubtype.USERS) {
throw `Search for '${subtype}' not implemented`
}

View file

@ -1,7 +1,8 @@
<script>
import { getContext, onMount, tick } from "svelte"
import GridCell from "./GridCell.svelte"
import { canBeDisplayColumn } from "@budibase/shared-core"
import { Icon, Popover, Menu, MenuItem, clickOutside } from "@budibase/bbui"
import GridCell from "./GridCell.svelte"
import { getColumnIcon } from "../lib/utils"
export let column
@ -24,14 +25,6 @@
datasource,
} = getContext("grid")
const bannedDisplayColumnTypes = [
"link",
"array",
"attachment",
"boolean",
"json",
]
let anchor
let open = false
let editIsOpen = false
@ -231,8 +224,7 @@
<MenuItem
icon="Label"
on:click={makeDisplayColumn}
disabled={idx === "sticky" ||
bannedDisplayColumnTypes.includes(column.schema.type)}
disabled={idx === "sticky" || !canBeDisplayColumn(column.schema.type)}
>
Use as display column
</MenuItem>

View file

@ -21,6 +21,7 @@ const TypeIconMap = {
bigint: "TagBold",
bb_reference: {
user: "User",
users: "UserGroup",
},
}

@ -1 +1 @@
Subproject commit 7040ae5282cc23d7ae56ac1be8a369d1c32aab2f
Subproject commit 044bec6447066b215932d6726c437e7ec5a9e42e

View file

@ -18,8 +18,13 @@
"test": "bash scripts/test.sh",
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
"test:watch": "jest --watch",
<<<<<<< HEAD
"build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
"build:docs": "node ./scripts/docs/generate.js open",
=======
"predocker": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && yarn build && cp ../../yarn.lock ./dist/",
"build:docker": "yarn predocker && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
>>>>>>> c762d8727f69b3154eb14429b0842f3d0f62ad76
"run:docker": "node dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:up": "node scripts/dev/manage.js up",
@ -109,7 +114,7 @@
"to-json-schema": "0.2.5",
"uuid": "3.3.2",
"validate.js": "0.13.1",
"vm2": "3.9.19",
"vm2": "^3.9.19",
"worker-farm": "1.7.0",
"xml2js": "0.5.0"
},
@ -121,7 +126,7 @@
"@trendyol/jest-testcontainers": "2.1.1",
"@types/global-agent": "2.1.1",
"@types/google-spreadsheet": "3.1.5",
"@types/jest": "29.5.3",
"@types/jest": "29.5.5",
"@types/koa": "2.13.4",
"@types/koa__router": "8.0.8",
"@types/lodash": "4.14.180",

View file

@ -1,31 +0,0 @@
### Documentation
This directory contains the scripts required to generate the APIDoc based documentation.
You can find the docs about comment structure at the [APIDocs page](https://apidocjs.com/).
In general most API endpoints will look like:
```js
/**
* @api {post} /api/:param/url Give it a name
* @apiName Give it a name
* @apiGroup group
* @apiPermission permission
* @apiDescription Describe what the endpoint does, any special cases the user
* should be aware of.
*
* @apiParam {string} param describe a URL parameter.
*
* @apiParam (Body) input describe a field on the body.
*
* @apiSuccess {object} output describe the output.
*/
```
There are a few key points to note when writing API docs:
1. Don't use `@apiBody` - this isn't currently supported by our swagger converter.
2. Make sure to always have an `@api` definition at the start, which must always have the
HTTP verb, the endpoint URL and the name.
3. There are three ways you can specify parameters used as inputs for your endpoint,
`@apiParam` for a URL param, `@apiParam (Body)` for a field on the request body and `@apiParam (Query)`
for query string parameters.
4. The `@apiGroup` should be the same for all API Doc comments in a route file.

View file

@ -1,74 +0,0 @@
const fs = require("fs")
const { join } = require("path")
const { createDoc } = require("apidoc")
const packageJson = require("../../package.json")
const toSwagger = require("./toSwagger")
const open = require("open")
const config = {
name: "Budibase API",
version: packageJson.version,
description: "Documenting the Budibase backend API",
title: "Budibase app service API",
}
const shouldOpen = process.argv[2]
const disallowed = []
function filter(parsedRouteFiles) {
const tagToSearch = "url"
for (let routeFile of parsedRouteFiles) {
for (let route of routeFile) {
let routeInfo = route["local"]
if (disallowed.includes(routeInfo[tagToSearch])) {
const idx = routeFile.indexOf(route)
routeFile.splice(idx, 1)
}
}
}
}
async function generate() {
// start by writing a config file
const configPath = join(__dirname, "config.json")
fs.writeFileSync(configPath, JSON.stringify(config))
const mainPath = join(__dirname, "..", "..")
const srcPath = join(mainPath, "src", "api", "routes")
const assetsPath = join(mainPath, "builder", "assets", "docs")
if (!fs.existsSync(assetsPath)) {
fs.mkdirSync(assetsPath, { recursive: true })
}
const options = {
src: [srcPath],
dest: assetsPath,
filters: {
main: {
postFilter: filter,
},
},
config: configPath,
}
const doc = createDoc(options)
if (typeof doc !== "boolean") {
const swagger = toSwagger(JSON.parse(doc.data), JSON.parse(doc.project))
fs.writeFileSync(join(assetsPath, "swagger.json"), JSON.stringify(swagger))
fs.writeFileSync(join(assetsPath, "apidoc.json"), doc.data)
fs.writeFileSync(join(assetsPath, "project.json"), doc.project)
console.log(
`Docs generated successfully, find in ${assetsPath}, swagger.json, apidoc.json and project.json`
)
} else {
throw "Unable to generate docs."
}
// delete the temporary config file
fs.unlinkSync(configPath)
setTimeout(async () => {
if (shouldOpen === "open") {
await open(join(assetsPath, "index.html"), { wait: false })
}
}, 2000)
}
generate().catch(err => {
console.error(err)
})

View file

@ -1,320 +0,0 @@
let _ = require("lodash")
let { pathToRegexp } = require("path-to-regexp")
/********************************************************
* Based on: https://github.com/fsbahman/apidoc-swagger *
********************************************************/
let swagger = {
swagger: "2.0",
info: {},
paths: {},
definitions: {},
}
function toSwagger(apidocJson, projectJson) {
swagger.info = addInfo(projectJson)
swagger.paths = extractPaths(apidocJson)
return swagger
}
let tagsRegex = /(<([^>]+)>)/gi
// Removes <p> </p> tags from text
function removeTags(text) {
return text ? text.replace(tagsRegex, "") : text
}
function addInfo(projectJson) {
let info = {}
info["title"] = projectJson.title || projectJson.name
info["version"] = projectJson.version
info["description"] = projectJson.description
return info
}
/**
* Extracts paths provided in json format
* post, patch, put request parameters are extracted in body
* get and delete are extracted to path parameters
* @param apidocJson
* @returns {{}}
*/
function extractPaths(apidocJson) {
let apiPaths = groupByUrl(apidocJson)
let paths = {}
for (let i = 0; i < apiPaths.length; i++) {
let verbs = apiPaths[i].verbs
let url = verbs[0].url
let pattern = pathToRegexp(url, null)
let matches = pattern.exec(url)
// Surrounds URL parameters with curly brackets -> :email with {email}
let pathKeys = []
for (let j = 1; j < matches.length; j++) {
let key = matches[j].slice(1)
url = url.replace(matches[j], "{" + key + "}")
pathKeys.push(key)
}
for (let j = 0; j < verbs.length; j++) {
let verb = verbs[j]
let type = verb.type
let obj = (paths[url] = paths[url] || {})
if (type === "post" || type === "patch" || type === "put") {
_.extend(
obj,
createPostPushPutOutput(verb, swagger.definitions, pathKeys)
)
} else {
_.extend(obj, createGetDeleteOutput(verb, swagger.definitions))
}
}
}
return paths
}
function createPostPushPutOutput(verbs, definitions, pathKeys) {
let pathItemObject = {}
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
let params = []
let pathParams = createPathParameters(verbs, pathKeys)
pathParams = _.filter(pathParams, function (param) {
let hasKey = pathKeys.indexOf(param.name) !== -1
return !(param.in === "path" && !hasKey)
})
params = params.concat(pathParams)
let required =
verbs.parameter &&
verbs.parameter.fields &&
verbs.parameter.fields.Parameter &&
verbs.parameter.fields.Parameter.length > 0
params.push({
in: "body",
name: "body",
description: removeTags(verbs.description),
required: required,
schema: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelParametersRef,
},
})
pathItemObject[verbs.type] = {
tags: [verbs.group],
summary: removeTags(verbs.description),
consumes: ["application/json"],
produces: ["application/json"],
parameters: params,
}
if (verbDefinitionResult.topLevelSuccessRef) {
pathItemObject[verbs.type].responses = {
200: {
description: "successful operation",
schema: {
type: verbDefinitionResult.topLevelSuccessRefType,
items: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
},
},
},
}
}
return pathItemObject
}
function createVerbDefinitions(verbs, definitions) {
let result = {
topLevelParametersRef: null,
topLevelSuccessRef: null,
topLevelSuccessRefType: null,
}
let defaultObjectName = verbs.name
let fieldArrayResult = {}
if (verbs && verbs.parameter && verbs.parameter.fields) {
fieldArrayResult = createFieldArrayDefinitions(
verbs.parameter.fields.Parameter,
definitions,
verbs.name,
defaultObjectName
)
result.topLevelParametersRef = fieldArrayResult.topLevelRef
}
if (verbs && verbs.success && verbs.success.fields) {
fieldArrayResult = createFieldArrayDefinitions(
verbs.success.fields["Success 200"],
definitions,
verbs.name,
defaultObjectName
)
result.topLevelSuccessRef = fieldArrayResult.topLevelRef
result.topLevelSuccessRefType = fieldArrayResult.topLevelRefType
}
return result
}
function createFieldArrayDefinitions(
fieldArray,
definitions,
topLevelRef,
defaultObjectName
) {
let result = {
topLevelRef: topLevelRef,
topLevelRefType: null,
}
if (!fieldArray) {
return result
}
for (let i = 0; i < fieldArray.length; i++) {
let parameter = fieldArray[i]
let nestedName = createNestedName(parameter.field)
let objectName = nestedName.objectName
if (!objectName) {
objectName = defaultObjectName
}
let type = parameter.type
if (i === 0) {
result.topLevelRefType = type
if (parameter.type === "Object") {
objectName = nestedName.propertyName
nestedName.propertyName = null
} else if (parameter.type === "Array") {
objectName = nestedName.propertyName
nestedName.propertyName = null
result.topLevelRefType = "array"
}
result.topLevelRef = objectName
}
definitions[objectName] = definitions[objectName] || {
properties: {},
required: [],
}
if (nestedName.propertyName) {
let prop = {
type: (parameter.type || "").toLowerCase(),
description: removeTags(parameter.description),
}
if (parameter.type === "Object") {
prop.$ref = "#/definitions/" + parameter.field
}
let typeIndex = type.indexOf("[]")
if (typeIndex !== -1 && typeIndex === type.length - 2) {
prop.type = "array"
prop.items = {
type: type.slice(0, type.length - 2),
}
}
definitions[objectName]["properties"][nestedName.propertyName] = prop
if (!parameter.optional) {
let arr = definitions[objectName]["required"]
if (arr.indexOf(nestedName.propertyName) === -1) {
arr.push(nestedName.propertyName)
}
}
}
}
return result
}
function createNestedName(field) {
let propertyName = field
let objectName
let propertyNames = field.split(".")
if (propertyNames && propertyNames.length > 1) {
propertyName = propertyNames[propertyNames.length - 1]
propertyNames.pop()
objectName = propertyNames.join(".")
}
return {
propertyName: propertyName,
objectName: objectName,
}
}
/**
* Generate get, delete method output
* @param verbs
* @param definitions
* @returns {{}}
*/
function createGetDeleteOutput(verbs, definitions) {
let pathItemObject = {}
verbs.type = verbs.type === "del" ? "delete" : verbs.type
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
pathItemObject[verbs.type] = {
tags: [verbs.group],
summary: removeTags(verbs.description),
consumes: ["application/json"],
produces: ["application/json"],
parameters: createPathParameters(verbs),
}
if (verbDefinitionResult.topLevelSuccessRef) {
pathItemObject[verbs.type].responses = {
200: {
description: "successful operation",
schema: {
type: verbDefinitionResult.topLevelSuccessRefType,
items: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
},
},
},
}
}
return pathItemObject
}
/**
* Iterate through all method parameters and create array of parameter objects which are stored as path parameters
* @param verbs
* @returns {Array}
*/
function createPathParameters(verbs) {
let pathItemObject = []
if (verbs.parameter && verbs.parameter.fields.Parameter) {
for (let i = 0; i < verbs.parameter.fields.Parameter.length; i++) {
let param = verbs.parameter.fields.Parameter[i]
let field = param.field
let type = param.type
pathItemObject.push({
name: field,
in: type === "file" ? "formData" : "path",
required: !param.optional,
type: param.type.toLowerCase(),
description: removeTags(param.description),
})
}
}
return pathItemObject
}
function groupByUrl(apidocJson) {
return _.chain(apidocJson)
.groupBy("url")
.toPairs()
.map(function (element) {
return _.zipObject(["url", "verbs"], element)
})
.value()
}
module.exports = toSwagger

View file

@ -859,7 +859,8 @@
"json",
"internal",
"barcodeqr",
"bigint"
"bigint",
"bb_reference"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},
@ -1064,7 +1065,8 @@
"json",
"internal",
"barcodeqr",
"bigint"
"bigint",
"bb_reference"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},
@ -1280,7 +1282,8 @@
"json",
"internal",
"barcodeqr",
"bigint"
"bigint",
"bb_reference"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},

View file

@ -782,6 +782,7 @@ components:
- internal
- barcodeqr
- bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:
@ -946,6 +947,7 @@ components:
- internal
- barcodeqr
- bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:
@ -1117,6 +1119,7 @@ components:
- internal
- barcodeqr
- bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:

View file

@ -289,6 +289,7 @@ async function performAppCreate(ctx: UserCtx) {
},
features: {
componentValidation: true,
disableUserMetadata: true,
},
}
@ -310,10 +311,13 @@ async function performAppCreate(ctx: UserCtx) {
}
})
// Keep existing validation setting
// Keep existing feature flags
if (!existing.features?.componentValidation) {
newApplication.features!.componentValidation = false
}
if (!existing.features?.disableUserMetadata) {
newApplication.features!.disableUserMetadata = false
}
// Migrate navigation settings and screens if required
if (existing) {

View file

@ -5,8 +5,11 @@ import {
FieldType,
FilterType,
IncludeRelationship,
ManyToManyRelationshipFieldMetadata,
OneToManyRelationshipFieldMetadata,
Operation,
PaginationJson,
RelationshipFieldMetadata,
RelationshipsJson,
RelationshipType,
Row,
@ -254,12 +257,20 @@ function fixArrayTypes(row: Row, table: Table) {
return row
}
function isOneSide(field: FieldSchema) {
function isOneSide(
field: RelationshipFieldMetadata
): field is OneToManyRelationshipFieldMetadata {
return (
field.relationshipType && field.relationshipType.split("-")[0] === "one"
)
}
function isManyToMany(
field: RelationshipFieldMetadata
): field is ManyToManyRelationshipFieldMetadata {
return !!(field as ManyToManyRelationshipFieldMetadata).through
}
function isEditableColumn(column: FieldSchema) {
const isExternalAutoColumn =
column.autocolumn &&
@ -352,11 +363,11 @@ export class ExternalRequest<T extends Operation> {
}
}
// many to many
else if (field.through) {
else if (isManyToMany(field)) {
// we're not inserting a doc, will be a bunch of update calls
const otherKey: string = field.throughFrom || linkTablePrimary
const thisKey: string = field.throughTo || tablePrimary
row[key].forEach((relationship: any) => {
for (const relationship of row[key]) {
manyRelationships.push({
tableId: field.through || field.tableId,
isUpdate: false,
@ -365,14 +376,14 @@ export class ExternalRequest<T extends Operation> {
// leave the ID for enrichment later
[thisKey]: `{{ literal ${tablePrimary} }}`,
})
})
}
}
// many to one
else {
const thisKey: string = "id"
// @ts-ignore
const otherKey: string = field.fieldName
row[key].forEach((relationship: any) => {
for (const relationship of row[key]) {
manyRelationships.push({
tableId: field.tableId,
isUpdate: true,
@ -381,7 +392,7 @@ export class ExternalRequest<T extends Operation> {
// leave the ID for enrichment later
[otherKey]: `{{ literal ${tablePrimary} }}`,
})
})
}
}
}
// we return the relationships that may need to be created in the through table
@ -549,15 +560,12 @@ export class ExternalRequest<T extends Operation> {
if (!table.primary || !linkTable.primary) {
continue
}
const definition: any = {
// if no foreign key specified then use the name of the field in other table
from: field.foreignKey || table.primary[0],
to: field.fieldName,
const definition: RelationshipsJson = {
tableName: linkTableName,
// need to specify where to put this back into
column: fieldName,
}
if (field.through) {
if (isManyToMany(field)) {
const { tableName: throughTableName } = breakExternalTableId(
field.through
)
@ -567,6 +575,10 @@ export class ExternalRequest<T extends Operation> {
definition.to = field.throughFrom || linkTable.primary[0]
definition.fromPrimary = table.primary[0]
definition.toPrimary = linkTable.primary[0]
} else {
// if no foreign key specified then use the name of the field in other table
definition.from = field.foreignKey || table.primary[0]
definition.to = field.fieldName
}
relationships.push(definition)
}
@ -588,7 +600,7 @@ export class ExternalRequest<T extends Operation> {
const primaryKey = table.primary[0]
// make a new request to get the row with all its relationships
// we need this to work out if any relationships need removed
for (let field of Object.values(table.schema)) {
for (const field of Object.values(table.schema)) {
if (
field.type !== FieldTypes.LINK ||
!field.fieldName ||
@ -601,9 +613,9 @@ export class ExternalRequest<T extends Operation> {
const { tableName: relatedTableName } = breakExternalTableId(tableId)
// @ts-ignore
const linkPrimaryKey = this.tables[relatedTableName].primary[0]
const manyKey = field.throughTo || primaryKey
const lookupField = isMany ? primaryKey : field.foreignKey
const fieldName = isMany ? manyKey : field.fieldName
const fieldName = isMany ? field.throughTo || primaryKey : field.fieldName
if (!lookupField || !row[lookupField]) {
continue
}

View file

@ -156,7 +156,10 @@ export async function destroy(ctx: UserCtx) {
}
const table = await sdk.tables.getTable(row.tableId)
// update the row to include full relationships before deleting them
row = await outputProcessing(table, row, { squash: false })
row = await outputProcessing(table, row, {
squash: false,
skipBBReferences: true,
})
// now remove the relationships
await linkRows.updateLinks({
eventType: linkRows.EventType.ROW_DELETE,
@ -190,6 +193,7 @@ export async function bulkDestroy(ctx: UserCtx) {
// they need to be the full rows (including previous relationships) for automations
const processedRows = (await outputProcessing(table, rows, {
squash: false,
skipBBReferences: true,
})) as Row[]
// remove the relationships first

View file

@ -4,6 +4,8 @@ import { context } from "@budibase/backend-core"
import {
Ctx,
FieldType,
ManyToOneRelationshipFieldMetadata,
OneToManyRelationshipFieldMetadata,
Row,
SearchFilters,
Table,
@ -19,7 +21,14 @@ function isForeignKey(key: string, table: Table) {
const relationships = Object.values(table.schema).filter(
column => column.type === FieldType.LINK
)
return relationships.some(relationship => relationship.foreignKey === key)
return relationships.some(
relationship =>
(
relationship as
| OneToManyRelationshipFieldMetadata
| ManyToOneRelationshipFieldMetadata
).foreignKey === key
)
}
validateJs.extend(validateJs.validators.datetime, {

View file

@ -1,4 +1,4 @@
import { FieldTypes, FormulaTypes } from "../../../constants"
import { FormulaTypes } from "../../../constants"
import { clearColumns } from "./utils"
import { doesContainStrings } from "@budibase/string-templates"
import { cloneDeep } from "lodash/fp"
@ -6,12 +6,20 @@ import isEqual from "lodash/isEqual"
import uniq from "lodash/uniq"
import { updateAllFormulasInTable } from "../row/staticFormula"
import { context } from "@budibase/backend-core"
import { FieldSchema, Table } from "@budibase/types"
import {
FieldSchema,
FieldType,
FormulaFieldMetadata,
Table,
} from "@budibase/types"
import sdk from "../../../sdk"
import { isRelationshipColumn } from "../../../db/utils"
function isStaticFormula(column: FieldSchema) {
function isStaticFormula(
column: FieldSchema
): column is FormulaFieldMetadata & { formulaType: FormulaTypes.STATIC } {
return (
column.type === FieldTypes.FORMULA &&
column.type === FieldType.FORMULA &&
column.formulaType === FormulaTypes.STATIC
)
}
@ -56,8 +64,9 @@ async function checkIfFormulaNeedsCleared(
for (let removed of removedColumns) {
let tableToUse: Table | undefined = table
// if relationship, get the related table
if (removed.type === FieldTypes.LINK) {
tableToUse = tables.find(table => table._id === removed.tableId)
if (removed.type === FieldType.LINK) {
const removedTableId = removed.tableId
tableToUse = tables.find(table => table._id === removedTableId)
}
if (!tableToUse) {
continue
@ -73,17 +82,18 @@ async function checkIfFormulaNeedsCleared(
}
for (let relatedTableId of table.relatedFormula) {
const relatedColumns = Object.values(table.schema).filter(
column => column.tableId === relatedTableId
column =>
column.type === FieldType.LINK && column.tableId === relatedTableId
)
const relatedTable = tables.find(table => table._id === relatedTableId)
// look to see if the column was used in a relationship formula,
// relationships won't be used for this
if (relatedTable && relatedColumns && removed.type !== FieldTypes.LINK) {
if (relatedTable && relatedColumns && removed.type !== FieldType.LINK) {
let relatedFormulaToRemove: string[] = []
for (let column of relatedColumns) {
relatedFormulaToRemove = relatedFormulaToRemove.concat(
getFormulaThatUseColumn(relatedTable, [
column.fieldName!,
(column as any).fieldName!,
removed.name,
])
)
@ -116,7 +126,7 @@ async function updateRelatedFormulaLinksOnTables(
const initialTables = cloneDeep(tables)
// first find the related column names
const relatedColumns = Object.values(table.schema).filter(
col => col.type === FieldTypes.LINK
isRelationshipColumn
)
// we start by removing the formula field from all tables
for (let otherTable of tables) {
@ -135,6 +145,7 @@ async function updateRelatedFormulaLinksOnTables(
if (!columns || columns.length === 0) {
continue
}
const relatedTable = tables.find(
related => related._id === relatedCol.tableId
)

View file

@ -15,11 +15,16 @@ import { handleRequest } from "../row/external"
import { context, events } from "@budibase/backend-core"
import { isRows, isSchema, parse } from "../../../utilities/schema"
import {
AutoReason,
BulkImportRequest,
BulkImportResponse,
Datasource,
FieldSchema,
ManyToManyRelationshipFieldMetadata,
ManyToOneRelationshipFieldMetadata,
OneToManyRelationshipFieldMetadata,
Operation,
QueryJson,
RelationshipFieldMetadata,
RelationshipType,
RenameColumn,
SaveTableRequest,
@ -74,10 +79,13 @@ function cleanupRelationships(
schema.type === FieldTypes.LINK &&
(!oldTable || table.schema[key] == null)
) {
const schemaTableId = schema.tableId
const relatedTable = Object.values(tables).find(
table => table._id === schema.tableId
table => table._id === schemaTableId
)
const foreignKey = schema.foreignKey
const foreignKey =
schema.relationshipType !== RelationshipType.MANY_TO_MANY &&
schema.foreignKey
if (!relatedTable || !foreignKey) {
continue
}
@ -116,7 +124,7 @@ function otherRelationshipType(type?: string) {
function generateManyLinkSchema(
datasource: Datasource,
column: FieldSchema,
column: ManyToManyRelationshipFieldMetadata,
table: Table,
relatedTable: Table
): Table {
@ -151,10 +159,12 @@ function generateManyLinkSchema(
}
function generateLinkSchema(
column: FieldSchema,
column:
| OneToManyRelationshipFieldMetadata
| ManyToOneRelationshipFieldMetadata,
table: Table,
relatedTable: Table,
type: RelationshipType
type: RelationshipType.ONE_TO_MANY | RelationshipType.MANY_TO_ONE
) {
if (!table.primary || !relatedTable.primary) {
throw new Error("Unable to generate link schema, no primary keys")
@ -170,20 +180,22 @@ function generateLinkSchema(
}
function generateRelatedSchema(
linkColumn: FieldSchema,
linkColumn: RelationshipFieldMetadata,
table: Table,
relatedTable: Table,
columnName: string
) {
// generate column for other table
const relatedSchema = cloneDeep(linkColumn)
const isMany2Many =
linkColumn.relationshipType === RelationshipType.MANY_TO_MANY
// swap them from the main link
if (linkColumn.foreignKey) {
if (!isMany2Many && linkColumn.foreignKey) {
relatedSchema.fieldName = linkColumn.foreignKey
relatedSchema.foreignKey = linkColumn.fieldName
}
// is many to many
else {
else if (isMany2Many) {
// don't need to copy through, already got it
relatedSchema.fieldName = linkColumn.throughTo
relatedSchema.throughTo = linkColumn.throughFrom
@ -197,8 +209,8 @@ function generateRelatedSchema(
table.schema[columnName] = relatedSchema
}
function isRelationshipSetup(column: FieldSchema) {
return column.foreignKey || column.through
function isRelationshipSetup(column: RelationshipFieldMetadata) {
return (column as any).foreignKey || (column as any).through
}
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
@ -257,14 +269,15 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
if (schema.type !== FieldTypes.LINK || isRelationshipSetup(schema)) {
continue
}
const schemaTableId = schema.tableId
const relatedTable = Object.values(tables).find(
table => table._id === schema.tableId
table => table._id === schemaTableId
)
if (!relatedTable) {
continue
}
const relatedColumnName = schema.fieldName!
const relationType = schema.relationshipType!
const relationType = schema.relationshipType
if (relationType === RelationshipType.MANY_TO_MANY) {
const junctionTable = generateManyLinkSchema(
datasource,
@ -374,10 +387,12 @@ export async function destroy(ctx: UserCtx) {
return tableToDelete
}
export async function bulkImport(ctx: UserCtx) {
export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) {
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows }: { rows: unknown } = ctx.request.body
const schema: unknown = table.schema
const { rows } = ctx.request.body
const schema = table.schema
if (!rows || !isRows(rows) || !isSchema(schema)) {
ctx.throw(400, "Provided data import information is invalid.")

View file

@ -8,6 +8,8 @@ import {
import { isExternalTable, isSQL } from "../../../integrations/utils"
import { events } from "@budibase/backend-core"
import {
BulkImportRequest,
BulkImportResponse,
FetchTablesResponse,
SaveTableRequest,
SaveTableResponse,
@ -18,7 +20,7 @@ import {
import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv"
import { builderSocket } from "../../../websockets"
import { cloneDeep } from "lodash"
import { cloneDeep, isEqual } from "lodash"
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
if (table && !tableId) {
@ -97,9 +99,17 @@ export async function destroy(ctx: UserCtx) {
builderSocket?.emitTableDeletion(ctx, deletedTable)
}
export async function bulkImport(ctx: UserCtx) {
export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) {
const tableId = ctx.params.tableId
await pickApi({ tableId }).bulkImport(ctx)
let tableBefore = await sdk.tables.getTable(tableId)
let tableAfter = await pickApi({ tableId }).bulkImport(ctx)
if (!isEqual(tableBefore, tableAfter)) {
await sdk.tables.saveTable(tableAfter)
}
// right now we don't trigger anything for bulk import because it
// can only be done in the builder, but in the future we may need to
// think about events for bulk items

View file

@ -10,6 +10,8 @@ import {
} from "../../../utilities/rowProcessor"
import { runStaticFormulaChecks } from "./bulkFormula"
import {
BulkImportRequest,
BulkImportResponse,
RenameColumn,
SaveTableRequest,
SaveTableResponse,
@ -78,10 +80,10 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
// make sure that types don't change of a column, have to remove
// the column if you want to change the type
if (oldTable && oldTable.schema) {
for (let propKey of Object.keys(tableToSave.schema)) {
for (const propKey of Object.keys(tableToSave.schema)) {
let oldColumn = oldTable.schema[propKey]
if (oldColumn && oldColumn.type === FieldTypes.INTERNAL) {
oldColumn.type = FieldTypes.AUTO
oldTable.schema[propKey].type = FieldTypes.AUTO
}
}
}
@ -206,7 +208,9 @@ export async function destroy(ctx: any) {
return tableToDelete
}
export async function bulkImport(ctx: any) {
export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) {
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows, identifierFields } = ctx.request.body
await handleDataImport(ctx.user, table, rows, identifierFields)

View file

@ -20,7 +20,13 @@ import viewTemplate from "../view/viewBuilder"
import { cloneDeep } from "lodash/fp"
import { quotas } from "@budibase/pro"
import { events, context } from "@budibase/backend-core"
import { ContextUser, Datasource, SourceName, Table } from "@budibase/types"
import {
ContextUser,
Datasource,
Row,
SourceName,
Table,
} from "@budibase/types"
export async function clearColumns(table: any, columnNames: any) {
const db = context.getAppDB()
@ -144,12 +150,12 @@ export async function importToRows(
}
export async function handleDataImport(
user: any,
table: any,
rows: any,
user: ContextUser,
table: Table,
rows: Row[],
identifierFields: Array<string> = []
) {
const schema: unknown = table.schema
const schema = table.schema
if (!rows || !isRows(rows) || !isSchema(schema)) {
return table

View file

@ -43,3 +43,7 @@ export enum Format {
export function isFormat(format: any): format is Format {
return Object.values(Format).includes(format as Format)
}
export function parseCsvExport<T>(value: string) {
return JSON.parse(value?.replace(/'/g, '"')) as T
}

View file

@ -6,6 +6,8 @@ import * as setup from "./utilities"
import { context, InternalTable, roles, tenancy } from "@budibase/backend-core"
import { quotas } from "@budibase/pro"
import {
AutoFieldSubTypes,
FieldSchema,
FieldType,
FieldTypeSubtypes,
MonthlyQuotaName,
@ -171,7 +173,7 @@ describe.each([
"Row ID": {
name: "Row ID",
type: FieldType.NUMBER,
subtype: "autoID",
subtype: AutoFieldSubTypes.AUTO_ID,
icon: "ri-magic-line",
autocolumn: true,
constraints: {
@ -272,27 +274,27 @@ describe.each([
isInternal &&
it("row values are coerced", async () => {
const str = {
const str: FieldSchema = {
type: FieldType.STRING,
name: "str",
constraints: { type: "string", presence: false },
}
const attachment = {
const attachment: FieldSchema = {
type: FieldType.ATTACHMENT,
name: "attachment",
constraints: { type: "array", presence: false },
}
const bool = {
const bool: FieldSchema = {
type: FieldType.BOOLEAN,
name: "boolean",
constraints: { type: "boolean", presence: false },
}
const number = {
const number: FieldSchema = {
type: FieldType.NUMBER,
name: "str",
constraints: { type: "number", presence: false },
}
const datetime = {
const datetime: FieldSchema = {
type: FieldType.DATETIME,
name: "datetime",
constraints: {
@ -301,7 +303,7 @@ describe.each([
datetime: { earliest: "", latest: "" },
},
}
const arrayField = {
const arrayField: FieldSchema = {
type: FieldType.ARRAY,
constraints: {
type: "array",
@ -311,8 +313,7 @@ describe.each([
name: "Sample Tags",
sortable: false,
}
const optsField = {
fieldName: "Sample Opts",
const optsField: FieldSchema = {
name: "Sample Opts",
type: FieldType.OPTIONS,
constraints: {
@ -1534,7 +1535,7 @@ describe.each([
describe.each([
[
"relationship fields",
() => ({
(): Record<string, FieldSchema> => ({
user: {
name: "user",
relationshipType: RelationshipType.ONE_TO_MANY,
@ -1563,27 +1564,25 @@ describe.each([
],
[
"bb reference fields",
() => ({
(): Record<string, FieldSchema> => ({
user: {
name: "user",
relationshipType: RelationshipType.ONE_TO_MANY,
type: FieldType.BB_REFERENCE,
subtype: FieldTypeSubtypes.BB_REFERENCE.USER,
},
users: {
name: "users",
type: FieldType.BB_REFERENCE,
subtype: FieldTypeSubtypes.BB_REFERENCE.USER,
relationshipType: RelationshipType.MANY_TO_MANY,
subtype: FieldTypeSubtypes.BB_REFERENCE.USERS,
},
}),
() => config.createUser(),
(row: Row) => ({
_id: row._id,
primaryDisplay: row.email,
email: row.email,
firstName: row.firstName,
lastName: row.lastName,
primaryDisplay: row.email,
}),
],
])("links - %s", (__, relSchema, dataGenerator, resultMapper) => {

View file

@ -1,6 +1,12 @@
import { generator } from "@budibase/backend-core/tests"
import { events, context } from "@budibase/backend-core"
import { FieldType, Table, ViewCalculation } from "@budibase/types"
import {
FieldType,
SaveTableRequest,
RelationshipType,
Table,
ViewCalculation,
AutoFieldSubTypes,
} from "@budibase/types"
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities"
const { basicTable } = setup.structures
@ -47,7 +53,7 @@ describe("/tables", () => {
})
it("creates a table via data import", async () => {
const table = basicTable()
const table: SaveTableRequest = basicTable()
table.rows = [{ name: "test-name", description: "test-desc" }]
const res = await createTable(table)
@ -182,6 +188,36 @@ describe("/tables", () => {
1
)
})
it("should update Auto ID field after bulk import", async () => {
const table = await config.createTable({
name: "TestTable",
type: "table",
schema: {
autoId: {
name: "id",
type: FieldType.NUMBER,
subtype: AutoFieldSubTypes.AUTO_ID,
autocolumn: true,
constraints: {
type: "number",
presence: false,
},
},
},
})
let row = await config.api.row.save(table._id!, {})
expect(row.autoId).toEqual(1)
await config.api.row.bulkImport(table._id!, {
rows: [{ autoId: 2 }],
identifierFields: [],
})
row = await config.api.row.save(table._id!, {})
expect(row.autoId).toEqual(3)
})
})
describe("fetch", () => {
@ -352,9 +388,10 @@ describe("/tables", () => {
},
TestTable: {
type: FieldType.LINK,
relationshipType: RelationshipType.ONE_TO_MANY,
name: "TestTable",
fieldName: "TestTable",
tableId: testTable._id,
tableId: testTable._id!,
constraints: {
type: "array",
},

View file

@ -1,6 +1,11 @@
import { objectStore, roles, constants } from "@budibase/backend-core"
import { FieldType as FieldTypes } from "@budibase/types"
export { FieldType as FieldTypes, RelationshipType } from "@budibase/types"
export {
FieldType as FieldTypes,
RelationshipType,
AutoFieldSubTypes,
FormulaTypes,
} from "@budibase/types"
export enum FilterTypes {
STRING = "string",
@ -39,11 +44,6 @@ export const SwitchableTypes = CanSwitchTypes.reduce((prev, current) =>
prev ? prev.concat(current) : current
)
export enum FormulaTypes {
STATIC = "static",
DYNAMIC = "dynamic",
}
export enum AuthTypes {
APP = "app",
BUILDER = "builder",
@ -132,14 +132,6 @@ export const USERS_TABLE_SCHEMA = {
primaryDisplay: "email",
}
export enum AutoFieldSubTypes {
CREATED_BY = "createdBy",
CREATED_AT = "createdAt",
UPDATED_BY = "updatedBy",
UPDATED_AT = "updatedAt",
AUTO_ID = "autoID",
}
export enum AutoFieldDefaultNames {
CREATED_BY = "Created By",
CREATED_AT = "Created At",

View file

@ -7,7 +7,13 @@ import { employeeImport } from "./employeeImport"
import { jobsImport } from "./jobsImport"
import { expensesImport } from "./expensesImport"
import { db as dbCore } from "@budibase/backend-core"
import { Table, Row, RelationshipType } from "@budibase/types"
import {
Table,
Row,
RelationshipType,
FieldType,
TableSchema,
} from "@budibase/types"
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
@ -28,7 +34,11 @@ export const DEFAULT_BB_DATASOURCE = defaultDatasource
function syncLastIds(table: Table, rowCount: number) {
Object.keys(table.schema).forEach(key => {
const entry = table.schema[key]
if (entry.autocolumn && entry.subtype == "autoID") {
if (
entry.autocolumn &&
entry.type === FieldType.NUMBER &&
entry.subtype == AutoFieldSubTypes.AUTO_ID
) {
entry.lastID = rowCount
}
})
@ -42,7 +52,7 @@ async function tableImport(table: Table, data: Row[]) {
}
// AUTO COLUMNS
const AUTO_COLUMNS = {
const AUTO_COLUMNS: TableSchema = {
"Created At": {
name: "Created At",
type: FieldTypes.DATETIME,

View file

@ -7,7 +7,9 @@ import LinkDocument from "./LinkDocument"
import {
Database,
FieldSchema,
FieldType,
LinkDocumentValue,
RelationshipFieldMetadata,
RelationshipType,
Row,
Table,
@ -133,7 +135,10 @@ class LinkController {
* Given the link field of this table, and the link field of the linked table, this makes sure
* the state of relationship type is accurate on both.
*/
handleRelationshipType(linkerField: FieldSchema, linkedField: FieldSchema) {
handleRelationshipType(
linkerField: RelationshipFieldMetadata,
linkedField: RelationshipFieldMetadata
) {
if (
!linkerField.relationshipType ||
linkerField.relationshipType === RelationshipType.MANY_TO_MANY
@ -183,7 +188,7 @@ class LinkController {
// if 1:N, ensure that this ID is not already attached to another record
const linkedTable = await this._db.get<Table>(field.tableId)
const linkedSchema = linkedTable.schema[field.fieldName!]
const linkedSchema = linkedTable.schema[field.fieldName]
// We need to map the global users to metadata in each app for relationships
if (field.tableId === InternalTables.USER_METADATA) {
@ -200,7 +205,10 @@ class LinkController {
// iterate through the link IDs in the row field, see if any don't exist already
for (let linkId of rowField) {
if (linkedSchema?.relationshipType === RelationshipType.ONE_TO_MANY) {
if (
linkedSchema?.type === FieldType.LINK &&
linkedSchema?.relationshipType === RelationshipType.ONE_TO_MANY
) {
let links = (
(await getLinkDocuments({
tableId: field.tableId,
@ -291,7 +299,7 @@ class LinkController {
*/
async removeFieldFromTable(fieldName: string) {
let oldTable = this._oldTable
let field = oldTable?.schema[fieldName] as FieldSchema
let field = oldTable?.schema[fieldName] as RelationshipFieldMetadata
const linkDocs = await this.getTableLinkDocs()
let toDelete = linkDocs.filter(linkDoc => {
let correctFieldName =
@ -308,12 +316,19 @@ class LinkController {
}
})
)
// remove schema from other table
let linkedTable = await this._db.get<Table>(field.tableId)
if (field.fieldName) {
delete linkedTable.schema[field.fieldName]
try {
// remove schema from other table, if it exists
let linkedTable = await this._db.get<Table>(field.tableId)
if (field.fieldName) {
delete linkedTable.schema[field.fieldName]
}
await this._db.put(linkedTable)
} catch (error: any) {
// ignore missing to ensure broken relationship columns can be deleted
if (error.statusCode !== 404) {
throw error
}
}
await this._db.put(linkedTable)
}
/**
@ -344,9 +359,9 @@ class LinkController {
name: field.fieldName,
type: FieldTypes.LINK,
// these are the props of the table that initiated the link
tableId: table._id,
tableId: table._id!,
fieldName: fieldName,
})
} as RelationshipFieldMetadata)
// update table schema after checking relationship types
schema[fieldName] = fields.linkerField

View file

@ -1,13 +1,9 @@
import { ViewName, getQueryIndex } from "../utils"
import { ViewName, getQueryIndex, isRelationshipColumn } from "../utils"
import { FieldTypes } from "../../constants"
import { createLinkView } from "../views/staticViews"
import { context, logging } from "@budibase/backend-core"
import {
FieldSchema,
LinkDocument,
LinkDocumentValue,
Table,
} from "@budibase/types"
import { LinkDocument, LinkDocumentValue, Table } from "@budibase/types"
export { createLinkView } from "../views/staticViews"
/**
@ -93,7 +89,7 @@ export function getUniqueByProp(array: any[], prop: string) {
export function getLinkedTableIDs(table: Table) {
return Object.values(table.schema)
.filter((column: FieldSchema) => column.type === FieldTypes.LINK)
.filter(isRelationshipColumn)
.map(column => column.tableId)
}
@ -113,7 +109,7 @@ export async function getLinkedTable(id: string, tables: Table[]) {
export function getRelatedTableForField(table: Table, fieldName: string) {
// look to see if its on the table, straight in the schema
const field = table.schema[fieldName]
if (field != null) {
if (field?.type === FieldTypes.LINK) {
return field.tableId
}
for (let column of Object.values(table.schema)) {

View file

@ -233,4 +233,19 @@ describe("test the link controller", () => {
}
await config.updateTable(table)
})
it("should be able to remove a linked field from a table, even if the linked table does not exist", async () => {
await createLinkedRow()
await createLinkedRow("link2")
table1.schema["link"].tableId = "not_found"
const controller = await createLinkController(table1, null, table1)
await context.doInAppContext(appId, async () => {
let before = await controller.getTableLinkDocs()
await controller.removeFieldFromTable("link")
let after = await controller.getTableLinkDocs()
expect(before.length).toEqual(2)
// shouldn't delete the other field
expect(after.length).toEqual(1)
})
})
})

View file

@ -1,6 +1,12 @@
import newid from "./newid"
import { db as dbCore } from "@budibase/backend-core"
import { DocumentType, VirtualDocumentType } from "@budibase/types"
import {
DocumentType,
FieldSchema,
RelationshipFieldMetadata,
VirtualDocumentType,
} from "@budibase/types"
import { FieldTypes } from "../constants"
export { DocumentType, VirtualDocumentType } from "@budibase/types"
type Optional = string | null
@ -307,3 +313,9 @@ export function extractViewInfoFromID(viewId: string) {
tableId: res!.groups!["tableId"],
}
}
export function isRelationshipColumn(
column: FieldSchema
): column is RelationshipFieldMetadata {
return column.type === FieldTypes.LINK
}

View file

@ -279,7 +279,8 @@ export interface components {
| "json"
| "internal"
| "barcodeqr"
| "bigint";
| "bigint"
| "bb_reference";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: {
/** @enum {string} */
@ -386,7 +387,8 @@ export interface components {
| "json"
| "internal"
| "barcodeqr"
| "bigint";
| "bigint"
| "bb_reference";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: {
/** @enum {string} */
@ -495,7 +497,8 @@ export interface components {
| "json"
| "internal"
| "barcodeqr"
| "bigint";
| "bigint"
| "bb_reference";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: {
/** @enum {string} */

View file

@ -111,7 +111,7 @@ describe("postgres integrations", () => {
fieldName: oneToManyRelationshipInfo.fieldName,
name: "oneToManyRelation",
relationshipType: RelationshipType.ONE_TO_MANY,
tableId: oneToManyRelationshipInfo.table._id,
tableId: oneToManyRelationshipInfo.table._id!,
main: true,
},
manyToOneRelation: {
@ -122,7 +122,7 @@ describe("postgres integrations", () => {
fieldName: manyToOneRelationshipInfo.fieldName,
name: "manyToOneRelation",
relationshipType: RelationshipType.MANY_TO_ONE,
tableId: manyToOneRelationshipInfo.table._id,
tableId: manyToOneRelationshipInfo.table._id!,
main: true,
},
manyToManyRelation: {
@ -133,7 +133,7 @@ describe("postgres integrations", () => {
fieldName: manyToManyRelationshipInfo.fieldName,
name: "manyToManyRelation",
relationshipType: RelationshipType.MANY_TO_MANY,
tableId: manyToManyRelationshipInfo.table._id,
tableId: manyToManyRelationshipInfo.table._id!,
main: true,
},
},
@ -250,6 +250,7 @@ describe("postgres integrations", () => {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
},
},
sourceId: postgresDatasource._id,

View file

@ -1,9 +1,17 @@
import { Knex, knex } from "knex"
import { Operation, QueryJson, RenameColumn, Table } from "@budibase/types"
import {
FieldSubtype,
NumberFieldMetadata,
Operation,
QueryJson,
RenameColumn,
Table,
} from "@budibase/types"
import { breakExternalTableId } from "../utils"
import SchemaBuilder = Knex.SchemaBuilder
import CreateTableBuilder = Knex.CreateTableBuilder
import { FieldTypes, RelationshipType } from "../../constants"
import { utils } from "@budibase/shared-core"
function generateSchema(
schema: CreateTableBuilder,
@ -15,7 +23,7 @@ function generateSchema(
let primaryKey = table && table.primary ? table.primary[0] : null
const columns = Object.values(table.schema)
// all columns in a junction table will be meta
let metaCols = columns.filter(col => col.meta)
let metaCols = columns.filter(col => (col as NumberFieldMetadata).meta)
let isJunction = metaCols.length === columns.length
// can't change primary once its set for now
if (primaryKey && !oldTable && !isJunction) {
@ -25,7 +33,9 @@ function generateSchema(
}
// check if any columns need added
const foreignKeys = Object.values(table.schema).map(col => col.foreignKey)
const foreignKeys = Object.values(table.schema).map(
col => (col as any).foreignKey
)
for (let [key, column] of Object.entries(table.schema)) {
// skip things that are already correct
const oldColumn = oldTable ? oldTable.schema[key] : null
@ -41,9 +51,21 @@ function generateSchema(
case FieldTypes.OPTIONS:
case FieldTypes.LONGFORM:
case FieldTypes.BARCODEQR:
case FieldTypes.BB_REFERENCE:
schema.text(key)
break
case FieldTypes.BB_REFERENCE:
const subtype = column.subtype as FieldSubtype
switch (subtype) {
case FieldSubtype.USER:
schema.text(key)
break
case FieldSubtype.USERS:
schema.json(key)
break
default:
throw utils.unreachable(subtype)
}
break
case FieldTypes.NUMBER:
// if meta is specified then this is a junction table entry
if (column.meta && column.meta.toKey && column.meta.toTable) {

View file

@ -249,7 +249,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
)
}
private internalConvertType(column: OracleColumn): { type: FieldTypes } {
private internalConvertType(column: OracleColumn) {
if (this.isBooleanType(column)) {
return { type: FieldTypes.BOOLEAN }
}
@ -307,6 +307,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
},
...this.internalConvertType(oracleColumn),
}
table.schema[columnName] = fieldSchema
}

View file

@ -1,7 +1,12 @@
import { SqlQuery, Table, SearchFilters, Datasource } from "@budibase/types"
import {
SqlQuery,
Table,
SearchFilters,
Datasource,
FieldType,
} from "@budibase/types"
import { DocumentType, SEPARATOR } from "../db/utils"
import {
FieldTypes,
BuildSchemaErrors,
InvalidColumns,
NoEmptyFilterStrings,
@ -13,57 +18,57 @@ const ROW_ID_REGEX = /^\[.*]$/g
const ENCODED_SPACE = encodeURIComponent(" ")
const SQL_NUMBER_TYPE_MAP = {
integer: FieldTypes.NUMBER,
int: FieldTypes.NUMBER,
decimal: FieldTypes.NUMBER,
smallint: FieldTypes.NUMBER,
real: FieldTypes.NUMBER,
float: FieldTypes.NUMBER,
numeric: FieldTypes.NUMBER,
mediumint: FieldTypes.NUMBER,
dec: FieldTypes.NUMBER,
double: FieldTypes.NUMBER,
fixed: FieldTypes.NUMBER,
"double precision": FieldTypes.NUMBER,
number: FieldTypes.NUMBER,
binary_float: FieldTypes.NUMBER,
binary_double: FieldTypes.NUMBER,
money: FieldTypes.NUMBER,
smallmoney: FieldTypes.NUMBER,
integer: FieldType.NUMBER,
int: FieldType.NUMBER,
decimal: FieldType.NUMBER,
smallint: FieldType.NUMBER,
real: FieldType.NUMBER,
float: FieldType.NUMBER,
numeric: FieldType.NUMBER,
mediumint: FieldType.NUMBER,
dec: FieldType.NUMBER,
double: FieldType.NUMBER,
fixed: FieldType.NUMBER,
"double precision": FieldType.NUMBER,
number: FieldType.NUMBER,
binary_float: FieldType.NUMBER,
binary_double: FieldType.NUMBER,
money: FieldType.NUMBER,
smallmoney: FieldType.NUMBER,
}
const SQL_DATE_TYPE_MAP = {
timestamp: FieldTypes.DATETIME,
time: FieldTypes.DATETIME,
datetime: FieldTypes.DATETIME,
smalldatetime: FieldTypes.DATETIME,
date: FieldTypes.DATETIME,
timestamp: FieldType.DATETIME,
time: FieldType.DATETIME,
datetime: FieldType.DATETIME,
smalldatetime: FieldType.DATETIME,
date: FieldType.DATETIME,
}
const SQL_DATE_ONLY_TYPES = ["date"]
const SQL_TIME_ONLY_TYPES = ["time"]
const SQL_STRING_TYPE_MAP = {
varchar: FieldTypes.STRING,
char: FieldTypes.STRING,
nchar: FieldTypes.STRING,
nvarchar: FieldTypes.STRING,
ntext: FieldTypes.STRING,
enum: FieldTypes.STRING,
blob: FieldTypes.STRING,
long: FieldTypes.STRING,
text: FieldTypes.STRING,
varchar: FieldType.STRING,
char: FieldType.STRING,
nchar: FieldType.STRING,
nvarchar: FieldType.STRING,
ntext: FieldType.STRING,
enum: FieldType.STRING,
blob: FieldType.STRING,
long: FieldType.STRING,
text: FieldType.STRING,
}
const SQL_BOOLEAN_TYPE_MAP = {
boolean: FieldTypes.BOOLEAN,
bit: FieldTypes.BOOLEAN,
tinyint: FieldTypes.BOOLEAN,
boolean: FieldType.BOOLEAN,
bit: FieldType.BOOLEAN,
tinyint: FieldType.BOOLEAN,
}
const SQL_MISC_TYPE_MAP = {
json: FieldTypes.JSON,
bigint: FieldTypes.BIGINT,
json: FieldType.JSON,
bigint: FieldType.BIGINT,
}
const SQL_TYPE_MAP = {
@ -154,7 +159,7 @@ export function breakRowIdField(_id: string | { _id: string }): any[] {
}
export function convertSqlType(type: string) {
let foundType = FieldTypes.STRING
let foundType = FieldType.STRING
const lcType = type.toLowerCase()
let matchingTypes = []
for (let [external, internal] of Object.entries(SQL_TYPE_MAP)) {
@ -169,7 +174,7 @@ export function convertSqlType(type: string) {
}).internal
}
const schema: any = { type: foundType }
if (foundType === FieldTypes.DATETIME) {
if (foundType === FieldType.DATETIME) {
schema.dateOnly = SQL_DATE_ONLY_TYPES.includes(lcType)
schema.timeOnly = SQL_TIME_ONLY_TYPES.includes(lcType)
}
@ -212,7 +217,7 @@ export function shouldCopyRelationship(
tableIds: string[]
) {
return (
column.type === FieldTypes.LINK &&
column.type === FieldType.LINK &&
column.tableId &&
tableIds.includes(column.tableId)
)
@ -230,22 +235,23 @@ export function shouldCopySpecialColumn(
column: { type: string },
fetchedColumn: { type: string } | undefined
) {
const isFormula = column.type === FieldTypes.FORMULA
const isFormula = column.type === FieldType.FORMULA
const specialTypes = [
FieldTypes.OPTIONS,
FieldTypes.LONGFORM,
FieldTypes.ARRAY,
FieldTypes.FORMULA,
FieldType.OPTIONS,
FieldType.LONGFORM,
FieldType.ARRAY,
FieldType.FORMULA,
FieldType.BB_REFERENCE,
]
// column has been deleted, remove - formulas will never exist, always copy
if (!isFormula && column && !fetchedColumn) {
return false
}
const fetchedIsNumber =
!fetchedColumn || fetchedColumn.type === FieldTypes.NUMBER
!fetchedColumn || fetchedColumn.type === FieldType.NUMBER
return (
specialTypes.indexOf(column.type as FieldTypes) !== -1 ||
(fetchedIsNumber && column.type === FieldTypes.BOOLEAN)
specialTypes.indexOf(column.type as FieldType) !== -1 ||
(fetchedIsNumber && column.type === FieldType.BOOLEAN)
)
}

View file

@ -16,6 +16,7 @@ import { cleanExportRows } from "../utils"
import { utils } from "@budibase/shared-core"
import { ExportRowsParams, ExportRowsResult } from "../search"
import { HTTPError, db } from "@budibase/backend-core"
import { searchInputMapping } from "./utils"
import pick from "lodash/pick"
import { outputProcessing } from "../../../../utilities/rowProcessor"
@ -50,7 +51,10 @@ export async function search(options: SearchParams) {
[params.sort]: { direction },
}
}
try {
const table = await sdk.tables.getTable(tableId)
options = searchInputMapping(table, options)
let rows = (await handleRequest(Operation.READ, tableId, {
filters: query,
sort,
@ -76,7 +80,6 @@ export async function search(options: SearchParams) {
rows = rows.map((r: any) => pick(r, fields))
}
const table = await sdk.tables.getTable(tableId)
rows = await outputProcessing(table, rows, { preserveLinks: true })
// need wrapper object for bookmarks etc when paginating

View file

@ -29,6 +29,7 @@ import {
} from "../../../../api/controllers/view/utils"
import sdk from "../../../../sdk"
import { ExportRowsParams, ExportRowsResult } from "../search"
import { searchInputMapping } from "./utils"
import pick from "lodash/pick"
export async function search(options: SearchParams) {
@ -47,9 +48,9 @@ export async function search(options: SearchParams) {
disableEscaping: options.disableEscaping,
}
let table
let table = await sdk.tables.getTable(tableId)
options = searchInputMapping(table, options)
if (params.sort && !params.sortType) {
table = await sdk.tables.getTable(tableId)
const schema = table.schema
const sortField = schema[params.sort]
params.sortType = sortField.type === "number" ? "number" : "string"
@ -68,7 +69,6 @@ export async function search(options: SearchParams) {
if (tableId === InternalTables.USER_METADATA) {
response.rows = await getGlobalUsersFromMetadata(response.rows)
}
table = table || (await sdk.tables.getTable(tableId))
if (options.fields) {
const fields = [...options.fields, ...db.CONSTANT_INTERNAL_ROW_COLS]

View file

@ -0,0 +1,92 @@
import { searchInputMapping } from "../utils"
import { db as dbCore } from "@budibase/backend-core"
import {
FieldType,
FieldTypeSubtypes,
Table,
SearchParams,
} from "@budibase/types"
const tableId = "ta_a"
const tableWithUserCol: Table = {
_id: tableId,
name: "table",
schema: {
user: {
name: "user",
type: FieldType.BB_REFERENCE,
subtype: FieldTypeSubtypes.BB_REFERENCE.USER,
},
},
}
const tableWithUsersCol: Table = {
_id: tableId,
name: "table",
schema: {
user: {
name: "user",
type: FieldType.BB_REFERENCE,
subtype: FieldTypeSubtypes.BB_REFERENCE.USERS,
},
},
}
describe.each([tableWithUserCol, tableWithUsersCol])(
"searchInputMapping",
col => {
const globalUserId = dbCore.generateGlobalUserID()
const userMedataId = dbCore.generateUserMetadataID(globalUserId)
it("should be able to map ro_ to global user IDs", () => {
const params: SearchParams = {
tableId,
query: {
equal: {
"1:user": userMedataId,
},
},
}
const output = searchInputMapping(col, params)
expect(output.query.equal!["1:user"]).toBe(globalUserId)
})
it("should handle array of user IDs", () => {
const params: SearchParams = {
tableId,
query: {
oneOf: {
"1:user": [userMedataId, globalUserId],
},
},
}
const output = searchInputMapping(col, params)
expect(output.query.oneOf!["1:user"]).toStrictEqual([
globalUserId,
globalUserId,
])
})
it("shouldn't change any other input", () => {
const email = "test@test.com"
const params: SearchParams = {
tableId,
query: {
equal: {
"1:user": email,
},
},
}
const output = searchInputMapping(col, params)
expect(output.query.equal!["1:user"]).toBe(email)
})
it("shouldn't error if no query supplied", () => {
const params: any = {
tableId,
}
const output = searchInputMapping(col, params)
expect(output.query).toBeUndefined()
})
}
)

View file

@ -0,0 +1,84 @@
import {
FieldType,
FieldTypeSubtypes,
SearchParams,
Table,
DocumentType,
SEPARATOR,
FieldSubtype,
} from "@budibase/types"
import { db as dbCore } from "@budibase/backend-core"
import { utils } from "@budibase/shared-core"
function findColumnInQueries(
column: string,
options: SearchParams,
callback: (filter: any) => any
) {
if (!options.query) {
return
}
for (let filterBlock of Object.values(options.query)) {
if (typeof filterBlock !== "object") {
continue
}
for (let [key, filter] of Object.entries(filterBlock)) {
if (key.endsWith(column)) {
filterBlock[key] = callback(filter)
}
}
}
}
function userColumnMapping(column: string, options: SearchParams) {
findColumnInQueries(column, options, (filterValue: any): any => {
const isArray = Array.isArray(filterValue),
isString = typeof filterValue === "string"
if (!isString && !isArray) {
return filterValue
}
const processString = (input: string) => {
const rowPrefix = DocumentType.ROW + SEPARATOR
if (input.startsWith(rowPrefix)) {
return dbCore.getGlobalIDFromUserMetadataID(input)
} else {
return input
}
}
if (isArray) {
return filterValue.map(el => {
if (typeof el === "string") {
return processString(el)
} else {
return el
}
})
} else {
return processString(filterValue)
}
})
}
// maps through the search parameters to check if any of the inputs are invalid
// based on the table schema, converts them to something that is valid.
export function searchInputMapping(table: Table, options: SearchParams) {
if (!table?.schema) {
return options
}
for (let [key, column] of Object.entries(table.schema)) {
switch (column.type) {
case FieldType.BB_REFERENCE:
const subtype = column.subtype as FieldSubtype
switch (subtype) {
case FieldSubtype.USER:
case FieldSubtype.USERS:
userColumnMapping(key, options)
break
default:
utils.unreachable(subtype)
}
break
}
}
return options
}

View file

@ -1,10 +1,11 @@
import cloneDeep from "lodash/cloneDeep"
import validateJs from "validate.js"
import { FieldType, Row, Table, TableSchema } from "@budibase/types"
import { Row, Table, TableSchema } from "@budibase/types"
import { FieldTypes } from "../../../constants"
import { makeExternalQuery } from "../../../integrations/base/query"
import { Format } from "../../../api/controllers/view/exporters"
import sdk from "../.."
import { isRelationshipColumn } from "../../../db/utils"
export async function getDatasourceAndQuery(json: any) {
const datasourceId = json.endpoint.datasourceId
@ -50,10 +51,10 @@ export function cleanExportRows(
}
function isForeignKey(key: string, table: Table) {
const relationships = Object.values(table.schema).filter(
column => column.type === FieldType.LINK
const relationships = Object.values(table.schema).filter(isRelationshipColumn)
return relationships.some(
relationship => (relationship as any).foreignKey === key
)
return relationships.some(relationship => relationship.foreignKey === key)
}
export async function validate({

View file

@ -1,6 +1,6 @@
import { populateExternalTableSchemas } from "../validation"
import { cloneDeep } from "lodash/fp"
import { Datasource, Table } from "@budibase/types"
import { AutoReason, Datasource, Table } from "@budibase/types"
import { isEqual } from "lodash"
const SCHEMA = {
@ -109,7 +109,7 @@ describe("validation and update of external table schemas", () => {
const response = populateExternalTableSchemas(cloneDeep(SCHEMA) as any)
const foreignKey = getForeignKeyColumn(response)
expect(foreignKey.autocolumn).toBe(true)
expect(foreignKey.autoReason).toBe("foreign_key")
expect(foreignKey.autoReason).toBe(AutoReason.FOREIGN_KEY)
noOtherTableChanges(response)
})

View file

@ -1,11 +1,9 @@
import {
AutoReason,
Datasource,
FieldSchema,
FieldType,
RelationshipType,
} from "@budibase/types"
import { FieldTypes } from "../../../constants"
function checkForeignKeysAreAutoColumns(datasource: Datasource) {
if (!datasource.entities) {
@ -15,10 +13,11 @@ function checkForeignKeysAreAutoColumns(datasource: Datasource) {
// make sure all foreign key columns are marked as auto columns
const foreignKeys: { tableId: string; key: string }[] = []
for (let table of tables) {
const relationships = Object.values(table.schema).filter(
column => column.type === FieldType.LINK
)
relationships.forEach(relationship => {
Object.values(table.schema).forEach(column => {
if (column.type !== FieldType.LINK) {
return
}
const relationship = column
if (relationship.relationshipType === RelationshipType.MANY_TO_MANY) {
const tableId = relationship.through!
foreignKeys.push({ key: relationship.throughTo!, tableId })
@ -36,7 +35,7 @@ function checkForeignKeysAreAutoColumns(datasource: Datasource) {
}
// now make sure schemas are all accurate
for (let table of tables) {
for (const table of tables) {
for (let column of Object.values(table.schema)) {
const shouldBeForeign = foreignKeys.find(
options => options.tableId === table._id && options.key === column.name

View file

@ -1,5 +1,11 @@
import _ from "lodash"
import { FieldType, Table, TableSchema, ViewV2 } from "@budibase/types"
import {
FieldSchema,
FieldType,
Table,
TableSchema,
ViewV2,
} from "@budibase/types"
import { generator } from "@budibase/backend-core/tests"
import { enrichSchema, syncSchema } from ".."
@ -316,7 +322,7 @@ describe("table sdk", () => {
...basicView,
}
const newTableSchema = {
const newTableSchema: TableSchema = {
...basicTable.schema,
newField1: {
type: FieldType.STRING,
@ -403,7 +409,7 @@ describe("table sdk", () => {
},
}
const newTableSchema = {
const newTableSchema: TableSchema = {
...basicTable.schema,
newField1: {
type: FieldType.STRING,
@ -531,7 +537,7 @@ describe("table sdk", () => {
id: {
...basicTable.schema.id,
type: FieldType.NUMBER,
},
} as FieldSchema,
},
undefined
)

View file

@ -54,6 +54,7 @@ import {
FieldType,
RelationshipType,
CreateViewRequest,
RelationshipFieldMetadata,
} from "@budibase/types"
import API from "./api"
@ -584,10 +585,10 @@ class TestConfiguration {
tableConfig.schema[link] = {
type: FieldType.LINK,
fieldName: link,
tableId: this.table._id,
tableId: this.table._id!,
name: link,
relationshipType,
}
} as RelationshipFieldMetadata
}
if (this.datasource && !tableConfig.sourceId) {

View file

@ -4,6 +4,8 @@ import {
Row,
ValidateResponse,
ExportRowsRequest,
BulkImportRequest,
BulkImportResponse,
} from "@budibase/types"
import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base"
@ -123,6 +125,19 @@ export class RowAPI extends TestAPI {
return request
}
bulkImport = async (
tableId: string,
body: BulkImportRequest,
{ expectStatus } = { expectStatus: 200 }
): Promise<BulkImportResponse> => {
let request = this.request
.post(`/api/tables/${tableId}/import`)
.send(body)
.set(this.config.defaultHeaders())
.expect(expectStatus)
return (await request).body
}
search = async (
sourceId: string,
{ expectStatus } = { expectStatus: 200 }

View file

@ -1,13 +1,15 @@
import { cache } from "@budibase/backend-core"
import { cache, db as dbCore } from "@budibase/backend-core"
import { utils } from "@budibase/shared-core"
import { FieldSubtype } from "@budibase/types"
import { FieldSubtype, DocumentType, SEPARATOR } from "@budibase/types"
import { InvalidBBRefError } from "./errors"
const ROW_PREFIX = DocumentType.ROW + SEPARATOR
export async function processInputBBReferences(
value: string | string[] | { _id: string } | { _id: string }[],
subtype: FieldSubtype
): Promise<string | null> {
const referenceIds: string[] = []
): Promise<string | string[] | null> {
let referenceIds: string[] = []
if (Array.isArray(value)) {
referenceIds.push(
@ -26,35 +28,52 @@ export async function processInputBBReferences(
)
}
// make sure all reference IDs are correct global user IDs
// they may be user metadata references (start with row prefix)
// and these need to be converted to global IDs
referenceIds = referenceIds.map(id => {
if (id?.startsWith(ROW_PREFIX)) {
return dbCore.getGlobalIDFromUserMetadataID(id)
} else {
return id
}
})
switch (subtype) {
case FieldSubtype.USER:
case FieldSubtype.USERS:
const { notFoundIds } = await cache.user.getUsers(referenceIds)
if (notFoundIds?.length) {
throw new InvalidBBRefError(notFoundIds[0], FieldSubtype.USER)
}
break
if (subtype === FieldSubtype.USERS) {
return referenceIds
}
return referenceIds.join(",") || null
default:
throw utils.unreachable(subtype)
}
return referenceIds.join(",") || null
}
export async function processOutputBBReferences(
value: string,
value: string | string[],
subtype: FieldSubtype
) {
if (typeof value !== "string") {
if (value === null || value === undefined) {
// Already processed or nothing to process
return value || undefined
}
const ids = value.split(",").filter(id => !!id)
const ids =
typeof value === "string" ? value.split(",").filter(id => !!id) : value
switch (subtype) {
case FieldSubtype.USER:
case FieldSubtype.USERS:
const { users } = await cache.user.getUsers(ids)
if (!users.length) {
return undefined

View file

@ -5,7 +5,13 @@ import { ObjectStoreBuckets } from "../../constants"
import { context, db as dbCore, objectStore } from "@budibase/backend-core"
import { InternalTables } from "../../db/utils"
import { TYPE_TRANSFORM_MAP } from "./map"
import { FieldSubtype, Row, RowAttachment, Table } from "@budibase/types"
import {
AutoColumnFieldMetadata,
FieldSubtype,
Row,
RowAttachment,
Table,
} from "@budibase/types"
import { cloneDeep } from "lodash/fp"
import {
processInputBBReferences,
@ -201,9 +207,14 @@ export async function inputProcessing(
export async function outputProcessing<T extends Row[] | Row>(
table: Table,
rows: T,
opts: { squash?: boolean; preserveLinks?: boolean } = {
opts: {
squash?: boolean
preserveLinks?: boolean
skipBBReferences?: boolean
} = {
squash: true,
preserveLinks: false,
skipBBReferences: false,
}
): Promise<T> {
let safeRows: Row[]
@ -219,10 +230,7 @@ export async function outputProcessing<T extends Row[] | Row>(
? await linkRows.attachFullLinkedDocs(table, safeRows)
: safeRows
// process formulas
enriched = processFormulas(table, enriched, { dynamic: true }) as Row[]
// set the attachments URLs
// process complex types: attachements, bb references...
for (let [property, column] of Object.entries(table.schema)) {
if (column.type === FieldTypes.ATTACHMENT) {
for (let row of enriched) {
@ -233,7 +241,10 @@ export async function outputProcessing<T extends Row[] | Row>(
attachment.url = objectStore.getAppFileUrl(attachment.key)
})
}
} else if (column.type == FieldTypes.BB_REFERENCE) {
} else if (
!opts.skipBBReferences &&
column.type == FieldTypes.BB_REFERENCE
) {
for (let row of enriched) {
row[property] = await processOutputBBReferences(
row[property],
@ -242,6 +253,10 @@ export async function outputProcessing<T extends Row[] | Row>(
}
}
}
// process formulas after the complex types had been processed
enriched = processFormulas(table, enriched, { dynamic: true }) as Row[]
if (opts.squash) {
enriched = (await linkRows.squashLinksToPrimaryDisplay(
table,

View file

@ -154,6 +154,15 @@ describe("bbReferenceProcessor", () => {
expect(result).toEqual(null)
})
it("should convert user medata IDs to global IDs", async () => {
const userId = _.sample(users)!._id!
const userMetadataId = backendCore.db.generateUserMetadataID(userId)
const result = await config.doInTenant(() =>
processInputBBReferences(userMetadataId, FieldSubtype.USER)
)
expect(result).toBe(userId)
})
})
})

View file

@ -4,10 +4,10 @@ import { FieldSchema, FieldType, RelationshipType } from "@budibase/types"
describe("rowProcessor utility", () => {
describe("fixAutoColumnSubType", () => {
let schema: FieldSchema = {
const schema: FieldSchema = {
name: "",
type: FieldType.LINK,
subtype: "", // missing subtype
subtype: undefined, // missing subtype
icon: "ri-magic-line",
autocolumn: true,
constraints: { type: "array", presence: false },
@ -22,31 +22,31 @@ describe("rowProcessor utility", () => {
expect(fixAutoColumnSubType(schema).subtype).toEqual(
AutoFieldSubTypes.CREATED_BY
)
schema.subtype = ""
schema.subtype = undefined
schema.name = AutoFieldDefaultNames.UPDATED_BY
expect(fixAutoColumnSubType(schema).subtype).toEqual(
AutoFieldSubTypes.UPDATED_BY
)
schema.subtype = ""
schema.subtype = undefined
schema.name = AutoFieldDefaultNames.CREATED_AT
expect(fixAutoColumnSubType(schema).subtype).toEqual(
AutoFieldSubTypes.CREATED_AT
)
schema.subtype = ""
schema.subtype = undefined
schema.name = AutoFieldDefaultNames.UPDATED_AT
expect(fixAutoColumnSubType(schema).subtype).toEqual(
AutoFieldSubTypes.UPDATED_AT
)
schema.subtype = ""
schema.subtype = undefined
schema.name = AutoFieldDefaultNames.AUTO_ID
expect(fixAutoColumnSubType(schema).subtype).toEqual(
AutoFieldSubTypes.AUTO_ID
)
schema.subtype = ""
schema.subtype = undefined
})
it("returns the column if subtype exists", async () => {

View file

@ -5,13 +5,20 @@ import {
FormulaTypes,
} from "../../constants"
import { processStringSync } from "@budibase/string-templates"
import { FieldSchema, Row, Table } from "@budibase/types"
import {
AutoColumnFieldMetadata,
FieldSchema,
Row,
Table,
} from "@budibase/types"
/**
* If the subtype has been lost for any reason this works out what
* subtype the auto column should be.
*/
export function fixAutoColumnSubType(column: FieldSchema) {
export function fixAutoColumnSubType(
column: FieldSchema
): AutoColumnFieldMetadata | FieldSchema {
if (!column.autocolumn || !column.name || column.subtype) {
return column
}
@ -47,9 +54,13 @@ export function processFormulas(
rowArray = rows
}
for (let [column, schema] of Object.entries(table.schema)) {
if (schema.type !== FieldTypes.FORMULA) {
continue
}
const isStatic = schema.formulaType === FormulaTypes.STATIC
if (
schema.type !== FieldTypes.FORMULA ||
schema.formula == null ||
(dynamic && isStatic) ||
(!dynamic && !isStatic)

View file

@ -1,9 +1,13 @@
import { FieldSubtype } from "@budibase/types"
import { FieldTypes } from "../constants"
import { ValidColumnNameRegex } from "@budibase/shared-core"
import { ValidColumnNameRegex, utils } from "@budibase/shared-core"
import { db } from "@budibase/backend-core"
import { parseCsvExport } from "../api/controllers/view/exporters"
interface SchemaColumn {
readonly name: string
readonly type: FieldTypes
readonly subtype: FieldSubtype
readonly autocolumn?: boolean
readonly constraints?: {
presence: boolean
@ -77,8 +81,14 @@ export function validate(rows: Rows, schema: Schema): ValidationResults {
rows.forEach(row => {
Object.entries(row).forEach(([columnName, columnData]) => {
const columnType = schema[columnName]?.type
const columnSubtype = schema[columnName]?.subtype
const isAutoColumn = schema[columnName]?.autocolumn
// If the column had an invalid value we don't want to override it
if (results.schemaValidation[columnName] === false) {
return
}
// If the columnType is not a string, then it's not present in the schema, and should be added to the invalid columns array
if (typeof columnType !== "string") {
results.invalidColumns.push(columnName)
@ -112,6 +122,11 @@ export function validate(rows: Rows, schema: Schema): ValidationResults {
isNaN(new Date(columnData).getTime())
) {
results.schemaValidation[columnName] = false
} else if (
columnType === FieldTypes.BB_REFERENCE &&
!isValidBBReference(columnData, columnSubtype)
) {
results.schemaValidation[columnName] = false
} else {
results.schemaValidation[columnName] = true
}
@ -138,6 +153,7 @@ export function parse(rows: Rows, schema: Schema): Rows {
}
const columnType = schema[columnName].type
const columnSubtype = schema[columnName].subtype
if (columnType === FieldTypes.NUMBER) {
// If provided must be a valid number
@ -147,6 +163,23 @@ export function parse(rows: Rows, schema: Schema): Rows {
parsedRow[columnName] = columnData
? new Date(columnData).toISOString()
: columnData
} else if (columnType === FieldTypes.BB_REFERENCE) {
const parsedValues =
!!columnData && parseCsvExport<{ _id: string }[]>(columnData)
if (!parsedValues) {
parsedRow[columnName] = undefined
} else {
switch (columnSubtype) {
case FieldSubtype.USER:
parsedRow[columnName] = parsedValues[0]?._id
break
case FieldSubtype.USERS:
parsedRow[columnName] = parsedValues.map(u => u._id)
break
default:
utils.unreachable(columnSubtype)
}
}
} else {
parsedRow[columnName] = columnData
}
@ -155,3 +188,32 @@ export function parse(rows: Rows, schema: Schema): Rows {
return parsedRow
})
}
function isValidBBReference(
columnData: any,
columnSubtype: FieldSubtype
): boolean {
switch (columnSubtype) {
case FieldSubtype.USER:
case FieldSubtype.USERS:
if (typeof columnData !== "string") {
return false
}
const userArray = parseCsvExport<{ _id: string }[]>(columnData)
if (!Array.isArray(userArray)) {
return false
}
if (columnSubtype === FieldSubtype.USER && userArray.length > 1) {
return false
}
const constainsWrongId = userArray.find(
user => !db.isGlobalUserID(user._id)
)
return !constainsWrongId
default:
throw utils.unreachable(columnSubtype)
}
}

View file

@ -6,6 +6,7 @@ import {
SearchFilter,
SearchQuery,
SearchQueryFields,
FieldSubtype,
} from "@budibase/types"
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
import { deepGet } from "./helpers"
@ -14,10 +15,9 @@ const HBS_REGEX = /{{([^{].*?)}}/g
/**
* Returns the valid operator options for a certain data type
* @param type the data type
*/
export const getValidOperatorsForType = (
type: FieldType,
fieldType: { type: FieldType; subtype?: FieldSubtype },
field: string,
datasource: Datasource & { tableId: any } // TODO: is this table id ever populated?
) => {
@ -44,6 +44,7 @@ export const getValidOperatorsForType = (
value: string
label: string
}[] = []
const { type, subtype } = fieldType
if (type === FieldType.STRING) {
ops = stringOps
} else if (type === FieldType.NUMBER || type === FieldType.BIGINT) {
@ -60,8 +61,10 @@ export const getValidOperatorsForType = (
ops = numOps
} else if (type === FieldType.FORMULA) {
ops = stringOps.concat([Op.MoreThan, Op.LessThan])
} else if (type === FieldType.BB_REFERENCE) {
} else if (type === FieldType.BB_REFERENCE && subtype == FieldSubtype.USER) {
ops = [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty, Op.In]
} else if (type === FieldType.BB_REFERENCE && subtype == FieldSubtype.USERS) {
ops = [Op.Contains, Op.NotContains, Op.ContainsAny, Op.Empty, Op.NotEmpty]
}
// Only allow equal/not equal for _id in SQL tables

View file

@ -3,3 +3,4 @@ export * as dataFilters from "./filters"
export * as helpers from "./helpers"
export * as utils from "./utils"
export * as sdk from "./sdk"
export * from "./table"

View file

@ -1,4 +1,10 @@
import { ContextUser, User } from "@budibase/types"
import {
ContextUser,
DocumentType,
SEPARATOR,
User,
InternalTable,
} from "@budibase/types"
import { getProdAppID } from "./applications"
// checks if a user is specifically a builder, given an app ID
@ -67,3 +73,21 @@ export function hasAdminPermissions(user?: User | ContextUser): boolean {
}
return !!user.admin?.global
}
export function getGlobalUserID(userId?: string): string | undefined {
if (typeof userId !== "string") {
return userId
}
const prefix = `${DocumentType.ROW}${SEPARATOR}${InternalTable.USER_METADATA}${SEPARATOR}`
if (!userId.startsWith(prefix)) {
return userId
}
return userId.split(prefix)[1]
}
export function containsUserID(value: string | undefined): boolean {
if (typeof value !== "string") {
return false
}
return value.includes(`${DocumentType.USER}${SEPARATOR}`)
}

View file

@ -0,0 +1,25 @@
import { FieldType } from "@budibase/types"
const allowDisplayColumnByType: Record<FieldType, boolean> = {
[FieldType.STRING]: true,
[FieldType.LONGFORM]: true,
[FieldType.OPTIONS]: true,
[FieldType.NUMBER]: true,
[FieldType.DATETIME]: true,
[FieldType.FORMULA]: true,
[FieldType.AUTO]: true,
[FieldType.INTERNAL]: true,
[FieldType.BARCODEQR]: true,
[FieldType.BIGINT]: true,
[FieldType.BOOLEAN]: false,
[FieldType.ARRAY]: false,
[FieldType.ATTACHMENT]: false,
[FieldType.LINK]: false,
[FieldType.JSON]: false,
[FieldType.BB_REFERENCE]: false,
}
export function canBeDisplayColumn(type: FieldType): boolean {
return !!allowDisplayColumnByType[type]
}

View file

@ -15,7 +15,8 @@
"skipLibCheck": true,
"paths": {
"@budibase/types": ["../types/src"]
}
},
"tsBuildInfoFile": "dist/tsconfig.tsbuildinfo"
},
"include": ["**/*.js", "**/*.ts"],
"exclude": [

View file

@ -3,8 +3,7 @@
"compilerOptions": {
"baseUrl": "..",
"rootDir": "src",
"composite": true,
"tsBuildInfoFile": "dist/tsconfig.tsbuildinfo"
"composite": true
},
"exclude": ["node_modules", "dist"]
}

View file

@ -1,4 +1,5 @@
import {
Row,
Table,
TableRequest,
TableSchema,
@ -18,6 +19,17 @@ export interface TableResponse extends Table {
export type FetchTablesResponse = TableResponse[]
export interface SaveTableRequest extends TableRequest {}
export interface SaveTableRequest extends TableRequest {
rows?: Row[]
}
export type SaveTableResponse = Table
export interface BulkImportRequest {
rows: Row[]
identifierFields?: Array<string>
}
export interface BulkImportResponse {
message: string
}

View file

@ -66,4 +66,5 @@ export interface AppIcon {
export interface AppFeatures {
componentValidation?: boolean
disableUserMetadata?: boolean
}

View file

@ -37,10 +37,12 @@ export interface Row extends Document {
export enum FieldSubtype {
USER = "user",
USERS = "users",
}
export const FieldTypeSubtypes = {
BB_REFERENCE: {
USER: FieldSubtype.USER,
USERS: FieldSubtype.USERS,
},
}

View file

@ -7,3 +7,16 @@ export enum RelationshipType {
export enum AutoReason {
FOREIGN_KEY = "foreign_key",
}
export enum AutoFieldSubTypes {
CREATED_BY = "createdBy",
CREATED_AT = "createdAt",
UPDATED_BY = "updatedBy",
UPDATED_AT = "updatedAt",
AUTO_ID = "autoID",
}
export enum FormulaTypes {
STATIC = "static",
DYNAMIC = "dynamic",
}

View file

@ -1,7 +1,12 @@
// all added by grid/table when defining the
// column size, position and whether it can be viewed
import { FieldType } from "../row"
import { AutoReason, RelationshipType } from "./constants"
import { FieldSubtype, FieldType } from "../row"
import {
AutoFieldSubTypes,
AutoReason,
FormulaTypes,
RelationshipType,
} from "./constants"
export interface UIFieldMetadata {
order?: number
@ -10,28 +15,63 @@ export interface UIFieldMetadata {
icon?: string
}
export interface RelationshipFieldMetadata {
interface BaseRelationshipFieldMetadata
extends Omit<BaseFieldSchema, "subtype"> {
type: FieldType.LINK
main?: boolean
fieldName?: string
tableId?: string
// below is used for SQL relationships, needed to define the foreign keys
// or the tables used for many-to-many relationships (through)
relationshipType?: RelationshipType
through?: string
foreignKey?: string
throughFrom?: string
throughTo?: string
fieldName: string
tableId: string
subtype?: AutoFieldSubTypes.CREATED_BY | AutoFieldSubTypes.UPDATED_BY
}
export interface AutoColumnFieldMetadata {
autocolumn?: boolean
subtype?: string
// External tables use junction tables, internal tables don't require them
type ManyToManyJunctionTableMetadata =
| {
through: string
throughFrom: string
throughTo: string
}
| {
through?: never
throughFrom?: never
throughTo?: never
}
export type ManyToManyRelationshipFieldMetadata =
BaseRelationshipFieldMetadata & {
relationshipType: RelationshipType.MANY_TO_MANY
} & ManyToManyJunctionTableMetadata
export interface OneToManyRelationshipFieldMetadata
extends BaseRelationshipFieldMetadata {
relationshipType: RelationshipType.ONE_TO_MANY
foreignKey?: string
}
export interface ManyToOneRelationshipFieldMetadata
extends BaseRelationshipFieldMetadata {
relationshipType: RelationshipType.MANY_TO_ONE
foreignKey?: string
}
export type RelationshipFieldMetadata =
| ManyToManyRelationshipFieldMetadata
| OneToManyRelationshipFieldMetadata
| ManyToOneRelationshipFieldMetadata
export interface AutoColumnFieldMetadata
extends Omit<BaseFieldSchema, "subtype"> {
type: FieldType.AUTO
autocolumn: true
subtype?: AutoFieldSubTypes
lastID?: number
// if the column was turned to an auto-column for SQL, explains why (primary, foreign etc)
autoReason?: AutoReason
}
export interface NumberFieldMetadata {
export interface NumberFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
type: FieldType.NUMBER
subtype?: AutoFieldSubTypes.AUTO_ID
lastID?: number
autoReason?: AutoReason.FOREIGN_KEY
// used specifically when Budibase generates external tables, this denotes if a number field
// is a foreign key used for a many-to-many relationship
meta?: {
@ -40,18 +80,28 @@ export interface NumberFieldMetadata {
}
}
export interface DateFieldMetadata {
export interface DateFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
type: FieldType.DATETIME
ignoreTimezones?: boolean
timeOnly?: boolean
subtype?: AutoFieldSubTypes.CREATED_AT | AutoFieldSubTypes.UPDATED_AT
}
export interface StringFieldMetadata {
export interface LongFormFieldMetadata extends BaseFieldSchema {
type: FieldType.LONGFORM
useRichText?: boolean | null
}
export interface FormulaFieldMetadata {
formula?: string
formulaType?: string
export interface FormulaFieldMetadata extends BaseFieldSchema {
type: FieldType.FORMULA
formula: string
formulaType?: FormulaTypes
}
export interface BBReferenceFieldMetadata
extends Omit<BaseFieldSchema, "subtype"> {
type: FieldType.BB_REFERENCE
subtype: FieldSubtype.USER | FieldSubtype.USERS
}
export interface FieldConstraints {
@ -77,22 +127,40 @@ export interface FieldConstraints {
}
}
export interface FieldSchema
extends UIFieldMetadata,
DateFieldMetadata,
RelationshipFieldMetadata,
AutoColumnFieldMetadata,
StringFieldMetadata,
FormulaFieldMetadata,
NumberFieldMetadata {
interface BaseFieldSchema extends UIFieldMetadata {
type: FieldType
name: string
sortable?: boolean
// only used by external databases, to denote the real type
externalType?: string
constraints?: FieldConstraints
autocolumn?: boolean
autoReason?: AutoReason.FOREIGN_KEY
subtype?: never
}
interface OtherFieldMetadata extends BaseFieldSchema {
type: Exclude<
FieldType,
| FieldType.DATETIME
| FieldType.LINK
| FieldType.AUTO
| FieldType.FORMULA
| FieldType.NUMBER
| FieldType.LONGFORM
>
}
export type FieldSchema =
| OtherFieldMetadata
| DateFieldMetadata
| RelationshipFieldMetadata
| AutoColumnFieldMetadata
| FormulaFieldMetadata
| NumberFieldMetadata
| LongFormFieldMetadata
| BBReferenceFieldMetadata
export interface TableSchema {
[key: string]: FieldSchema
}

View file

@ -15,7 +15,6 @@ export interface Table extends Document {
constrained?: string[]
sql?: boolean
indexes?: { [key: string]: any }
rows?: { [key: string]: any }
created?: boolean
rowHeight?: number
}

View file

@ -58,6 +58,10 @@ export const DocumentTypesToImport: DocumentType[] = [
DocumentType.LAYOUT,
]
export enum InternalTable {
USER_METADATA = "ta_users",
}
// these documents don't really exist, they are part of other
// documents or enriched into existence as part of get requests
export enum VirtualDocumentType {

View file

@ -11,7 +11,8 @@
"sourceMap": true,
"declaration": true,
"skipLibCheck": true,
"outDir": "dist"
"outDir": "dist",
"tsBuildInfoFile": "dist/tsconfig.tsbuildinfo"
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.spec.ts", "**/*.spec.js"]

View file

@ -3,8 +3,7 @@
"compilerOptions": {
"baseUrl": ".",
"rootDir": "./src",
"composite": true,
"tsBuildInfoFile": "dist/tsconfig.tsbuildinfo"
"composite": true
},
"exclude": ["node_modules", "dist"]
}

View file

@ -74,7 +74,7 @@
"@swc/core": "1.3.71",
"@swc/jest": "0.2.27",
"@trendyol/jest-testcontainers": "2.1.1",
"@types/jest": "29.5.3",
"@types/jest": "29.5.5",
"@types/jsonwebtoken": "8.5.1",
"@types/koa": "2.13.4",
"@types/koa__router": "8.0.8",

View file

@ -5638,6 +5638,14 @@
expect "^29.0.0"
pretty-format "^29.0.0"
"@types/jest@29.5.5":
version "29.5.5"
resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.5.5.tgz#727204e06228fe24373df9bae76b90f3e8236a2a"
integrity sha512-ebylz2hnsWR9mYvmBFbXJXr+33UPc4+ZdxyDXh5w0FlPBTfCVN3wPL+kuOiQt3xvrK419v7XWeAs+AeOksafXg==
dependencies:
expect "^29.0.0"
pretty-format "^29.0.0"
"@types/json-schema@*", "@types/json-schema@^7.0.6", "@types/json-schema@^7.0.8":
version "7.0.11"
resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3"
@ -21854,7 +21862,7 @@ vlq@^0.2.2:
resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26"
integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==
vm2@3.9.19, vm2@^3.9.19:
vm2@^3.9.19:
version "3.9.19"
resolved "https://registry.yarnpkg.com/vm2/-/vm2-3.9.19.tgz#be1e1d7a106122c6c492b4d51c2e8b93d3ed6a4a"
integrity sha512-J637XF0DHDMV57R6JyVsTak7nIL8gy5KH4r1HiwWLf/4GBbb5MKL5y7LpmF4A8E2nR6XmzpmMFQ7V7ppPTmUQg==