1
0
Fork 0
mirror of synced 2024-07-09 00:06:05 +12:00

Merge branch 'develop' into grid-all-datasources

This commit is contained in:
Andrew Kingston 2023-10-12 08:31:09 +01:00 committed by GitHub
commit 4ed3767324
80 changed files with 1055 additions and 894 deletions

View file

@ -264,18 +264,23 @@ jobs:
if [[ $branch == "master" ]]; then if [[ $branch == "master" ]]; then
base_commit=$(git rev-parse origin/master) base_commit=$(git rev-parse origin/master)
else elif [[ $branch == "develop" ]]; then
base_commit=$(git rev-parse origin/develop) base_commit=$(git rev-parse origin/develop)
fi fi
echo "target_branch=$branch" if [[ ! -z $base_commit ]]; then
echo "target_branch=$branch" >> "$GITHUB_OUTPUT" echo "target_branch=$branch"
echo "pro_commit=$pro_commit" echo "target_branch=$branch" >> "$GITHUB_OUTPUT"
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT" echo "pro_commit=$pro_commit"
echo "base_commit=$base_commit" echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT" echo "base_commit=$base_commit"
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
else
echo "Nothing to do - branch to branch merge."
fi
- name: Check submodule merged to develop - name: Check submodule merged to base branch
if: ${{ steps.get_pro_commits.outputs.base_commit != '' }}
uses: actions/github-script@v4 uses: actions/github-script@v4
with: with:
github-token: ${{ secrets.GITHUB_TOKEN }} github-token: ${{ secrets.GITHUB_TOKEN }}
@ -284,7 +289,7 @@ jobs:
const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}'; const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}';
if (submoduleCommit !== baseCommit) { if (submoduleCommit !== baseCommit) {
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}"" branch.'); console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}" branch.');
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md') console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md')
process.exit(1); process.exit(1);
} else { } else {

View file

@ -1,29 +0,0 @@
name: check_unreleased_changes
on:
pull_request:
branches:
- master
jobs:
check_unreleased:
runs-on: ubuntu-latest
steps:
- name: Check for unreleased changes
env:
REPO: "Budibase/budibase"
TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
RELEASE_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \
"https://api.github.com/repos/$REPO/releases/latest" | \
jq -r .published_at)
COMMIT_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \
"https://api.github.com/repos/$REPO/commits/master" | \
jq -r .commit.committer.date)
RELEASE_SECONDS=$(date --date="$RELEASE_TIMESTAMP" "+%s")
COMMIT_SECONDS=$(date --date="$COMMIT_TIMESTAMP" "+%s")
if (( COMMIT_SECONDS > RELEASE_SECONDS )); then
echo "There are unreleased changes. Please release these changes before merging."
exit 1
fi
echo "No unreleased changes detected."

View file

@ -1,5 +1,5 @@
{ {
"version": "2.11.20-alpha.0", "version": "2.11.27-alpha.0",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View file

@ -74,7 +74,6 @@
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting", "build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb", "publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting", "publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
"build:docs": "lerna run --stream build:docs",
"release:helm": "node scripts/releaseHelmChart", "release:helm": "node scripts/releaseHelmChart",
"env:multi:enable": "lerna run --stream env:multi:enable", "env:multi:enable": "lerna run --stream env:multi:enable",
"env:multi:disable": "lerna run --stream env:multi:disable", "env:multi:disable": "lerna run --stream env:multi:disable",

View file

@ -62,7 +62,7 @@
"@trendyol/jest-testcontainers": "^2.1.1", "@trendyol/jest-testcontainers": "^2.1.1",
"@types/chance": "1.1.3", "@types/chance": "1.1.3",
"@types/cookies": "0.7.8", "@types/cookies": "0.7.8",
"@types/jest": "29.5.3", "@types/jest": "29.5.5",
"@types/lodash": "4.14.180", "@types/lodash": "4.14.180",
"@types/node": "18.17.0", "@types/node": "18.17.0",
"@types/node-fetch": "2.6.4", "@types/node-fetch": "2.6.4",

View file

@ -1,5 +1,10 @@
import { prefixed, DocumentType } from "@budibase/types" import { prefixed, DocumentType } from "@budibase/types"
export { SEPARATOR, UNICODE_MAX, DocumentType } from "@budibase/types" export {
SEPARATOR,
UNICODE_MAX,
DocumentType,
InternalTable,
} from "@budibase/types"
/** /**
* Can be used to create a few different forms of querying a view. * Can be used to create a few different forms of querying a view.
@ -30,10 +35,6 @@ export const DeprecatedViews = {
], ],
} }
export enum InternalTable {
USER_METADATA = "ta_users",
}
export const StaticDatabases = { export const StaticDatabases = {
GLOBAL: { GLOBAL: {
name: "global-db", name: "global-db",

View file

@ -45,6 +45,11 @@ export function generateGlobalUserID(id?: any) {
return `${DocumentType.USER}${SEPARATOR}${id || newid()}` return `${DocumentType.USER}${SEPARATOR}${id || newid()}`
} }
const isGlobalUserIDRegex = new RegExp(`^${DocumentType.USER}${SEPARATOR}.+`)
export function isGlobalUserID(id: string) {
return isGlobalUserIDRegex.test(id)
}
/** /**
* Generates a new user ID based on the passed in global ID. * Generates a new user ID based on the passed in global ID.
* @param {string} globalId The ID of the global user. * @param {string} globalId The ID of the global user.

View file

@ -33,7 +33,7 @@
import { getBindings } from "components/backend/DataTable/formula" import { getBindings } from "components/backend/DataTable/formula"
import JSONSchemaModal from "./JSONSchemaModal.svelte" import JSONSchemaModal from "./JSONSchemaModal.svelte"
import { ValidColumnNameRegex } from "@budibase/shared-core" import { ValidColumnNameRegex } from "@budibase/shared-core"
import { FieldType } from "@budibase/types" import { FieldType, FieldSubtype, SourceName } from "@budibase/types"
import RelationshipSelector from "components/common/RelationshipSelector.svelte" import RelationshipSelector from "components/common/RelationshipSelector.svelte"
const AUTO_TYPE = "auto" const AUTO_TYPE = "auto"
@ -43,7 +43,6 @@
const NUMBER_TYPE = FIELDS.NUMBER.type const NUMBER_TYPE = FIELDS.NUMBER.type
const JSON_TYPE = FIELDS.JSON.type const JSON_TYPE = FIELDS.JSON.type
const DATE_TYPE = FIELDS.DATETIME.type const DATE_TYPE = FIELDS.DATETIME.type
const USER_REFRENCE_TYPE = FIELDS.BB_REFERENCE_USER.compositeType
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"] const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
@ -52,7 +51,19 @@
export let field export let field
let mounted = false let mounted = false
let fieldDefinitions = cloneDeep(FIELDS) const fieldDefinitions = Object.values(FIELDS).reduce(
// Storing the fields by complex field id
(acc, field) => ({
...acc,
[makeFieldId(field.type, field.subtype)]: field,
}),
{}
)
function makeFieldId(type, subtype) {
return `${type}${subtype || ""}`.toUpperCase()
}
let originalName let originalName
let linkEditDisabled let linkEditDisabled
let primaryDisplay let primaryDisplay
@ -72,8 +83,8 @@
let jsonSchemaModal let jsonSchemaModal
let allowedTypes = [] let allowedTypes = []
let editableColumn = { let editableColumn = {
type: fieldDefinitions.STRING.type, type: FIELDS.STRING.type,
constraints: fieldDefinitions.STRING.constraints, constraints: FIELDS.STRING.constraints,
// Initial value for column name in other table for linked records // Initial value for column name in other table for linked records
fieldName: $tables.selected.name, fieldName: $tables.selected.name,
} }
@ -139,9 +150,6 @@
$tables.selected.primaryDisplay == null || $tables.selected.primaryDisplay == null ||
$tables.selected.primaryDisplay === editableColumn.name $tables.selected.primaryDisplay === editableColumn.name
if (editableColumn.type === FieldType.BB_REFERENCE) {
editableColumn.type = `${editableColumn.type}_${editableColumn.subtype}`
}
// Here we are setting the relationship values based on the editableColumn // Here we are setting the relationship values based on the editableColumn
// This part of the code is used when viewing an existing field hence the check // This part of the code is used when viewing an existing field hence the check
// for the tableId // for the tableId
@ -172,7 +180,17 @@
} }
} }
allowedTypes = getAllowedTypes() if (!savingColumn) {
editableColumn.fieldId = makeFieldId(
editableColumn.type,
editableColumn.subtype
)
allowedTypes = getAllowedTypes().map(t => ({
fieldId: makeFieldId(t.type, t.subtype),
...t,
}))
}
} }
$: initialiseField(field, savingColumn) $: initialiseField(field, savingColumn)
@ -249,13 +267,7 @@
let saveColumn = cloneDeep(editableColumn) let saveColumn = cloneDeep(editableColumn)
// Handle types on composite types delete saveColumn.fieldId
const definition = fieldDefinitions[saveColumn.type.toUpperCase()]
if (definition && saveColumn.type === definition.compositeType) {
saveColumn.type = definition.type
saveColumn.subtype = definition.subtype
delete saveColumn.compositeType
}
if (saveColumn.type === AUTO_TYPE) { if (saveColumn.type === AUTO_TYPE) {
saveColumn = buildAutoColumn( saveColumn = buildAutoColumn(
@ -320,27 +332,33 @@
} }
} }
function handleTypeChange(event) { function onHandleTypeChange(event) {
handleTypeChange(event.detail)
}
function handleTypeChange(type) {
// remove any extra fields that may not be related to this type // remove any extra fields that may not be related to this type
delete editableColumn.autocolumn delete editableColumn.autocolumn
delete editableColumn.subtype delete editableColumn.subtype
delete editableColumn.tableId delete editableColumn.tableId
delete editableColumn.relationshipType delete editableColumn.relationshipType
delete editableColumn.formulaType delete editableColumn.formulaType
delete editableColumn.constraints
// Add in defaults and initial definition // Add in defaults and initial definition
const definition = fieldDefinitions[event.detail?.toUpperCase()] const definition = fieldDefinitions[type?.toUpperCase()]
if (definition?.constraints) { if (definition?.constraints) {
editableColumn.constraints = definition.constraints editableColumn.constraints = definition.constraints
} }
editableColumn.type = definition.type
editableColumn.subtype = definition.subtype
// Default relationships many to many // Default relationships many to many
if (editableColumn.type === LINK_TYPE) { if (editableColumn.type === LINK_TYPE) {
editableColumn.relationshipType = RelationshipType.MANY_TO_MANY editableColumn.relationshipType = RelationshipType.MANY_TO_MANY
} else if (editableColumn.type === FORMULA_TYPE) { } else if (editableColumn.type === FORMULA_TYPE) {
editableColumn.formulaType = "dynamic" editableColumn.formulaType = "dynamic"
} else if (editableColumn.type === USER_REFRENCE_TYPE) {
editableColumn.relationshipType = RelationshipType.ONE_TO_MANY
} }
} }
@ -381,9 +399,26 @@
return ALLOWABLE_NUMBER_OPTIONS return ALLOWABLE_NUMBER_OPTIONS
} }
const isUsers =
editableColumn.type === FieldType.BB_REFERENCE &&
editableColumn.subtype === FieldSubtype.USERS
if (!external) { if (!external) {
return [ return [
...Object.values(fieldDefinitions), FIELDS.STRING,
FIELDS.BARCODEQR,
FIELDS.LONGFORM,
FIELDS.OPTIONS,
FIELDS.ARRAY,
FIELDS.NUMBER,
FIELDS.BIGINT,
FIELDS.BOOLEAN,
FIELDS.DATETIME,
FIELDS.ATTACHMENT,
FIELDS.LINK,
FIELDS.FORMULA,
FIELDS.JSON,
isUsers ? FIELDS.USERS : FIELDS.USER,
{ name: "Auto Column", type: AUTO_TYPE }, { name: "Auto Column", type: AUTO_TYPE },
] ]
} else { } else {
@ -397,7 +432,7 @@
FIELDS.BOOLEAN, FIELDS.BOOLEAN,
FIELDS.FORMULA, FIELDS.FORMULA,
FIELDS.BIGINT, FIELDS.BIGINT,
FIELDS.BB_REFERENCE_USER, isUsers ? FIELDS.USERS : FIELDS.USER,
] ]
// no-sql or a spreadsheet // no-sql or a spreadsheet
if (!external || table.sql) { if (!external || table.sql) {
@ -472,6 +507,13 @@
return newError return newError
} }
function isUsersColumn(column) {
return (
column.type === FieldType.BB_REFERENCE &&
[FieldSubtype.USER, FieldSubtype.USERS].includes(column.subtype)
)
}
onMount(() => { onMount(() => {
mounted = true mounted = true
}) })
@ -489,11 +531,11 @@
{/if} {/if}
<Select <Select
disabled={!typeEnabled} disabled={!typeEnabled}
bind:value={editableColumn.type} bind:value={editableColumn.fieldId}
on:change={handleTypeChange} on:change={onHandleTypeChange}
options={allowedTypes} options={allowedTypes}
getOptionLabel={field => field.name} getOptionLabel={field => field.name}
getOptionValue={field => field.compositeType || field.type} getOptionValue={field => field.fieldId}
getOptionIcon={field => field.icon} getOptionIcon={field => field.icon}
isOptionEnabled={option => { isOptionEnabled={option => {
if (option.type == AUTO_TYPE) { if (option.type == AUTO_TYPE) {
@ -555,7 +597,7 @@
<DatePicker bind:value={editableColumn.constraints.datetime.latest} /> <DatePicker bind:value={editableColumn.constraints.datetime.latest} />
</div> </div>
</div> </div>
{#if datasource?.source !== "ORACLE" && datasource?.source !== "SQL_SERVER" && !editableColumn.dateOnly} {#if datasource?.source !== SourceName.ORACLE && datasource?.source !== SourceName.SQL_SERVER && !editableColumn.dateOnly}
<div> <div>
<div class="row"> <div class="row">
<Label>Time zones</Label> <Label>Time zones</Label>
@ -659,18 +701,20 @@
<Button primary text on:click={openJsonSchemaEditor} <Button primary text on:click={openJsonSchemaEditor}
>Open schema editor</Button >Open schema editor</Button
> >
{:else if editableColumn.type === USER_REFRENCE_TYPE} {:else if isUsersColumn(editableColumn) && datasource?.source !== SourceName.GOOGLE_SHEETS}
<!-- Disabled temporally --> <Toggle
<!-- <Toggle value={editableColumn.subtype === FieldSubtype.USERS}
value={editableColumn.relationshipType === RelationshipType.MANY_TO_MANY}
on:change={e => on:change={e =>
(editableColumn.relationshipType = e.detail handleTypeChange(
? RelationshipType.MANY_TO_MANY makeFieldId(
: RelationshipType.ONE_TO_MANY)} FieldType.BB_REFERENCE,
e.detail ? FieldSubtype.USERS : FieldSubtype.USER
)
)}
disabled={!isCreating} disabled={!isCreating}
thin thin
text="Allow multiple users" text="Allow multiple users"
/> --> />
{/if} {/if}
{#if editableColumn.type === AUTO_TYPE || editableColumn.autocolumn} {#if editableColumn.type === AUTO_TYPE || editableColumn.autocolumn}
<Select <Select

View file

@ -49,6 +49,15 @@
label: "Long Form Text", label: "Long Form Text",
value: FIELDS.LONGFORM.type, value: FIELDS.LONGFORM.type,
}, },
{
label: "User",
value: `${FIELDS.USER.type}${FIELDS.USER.subtype}`,
},
{
label: "Users",
value: `${FIELDS.USERS.type}${FIELDS.USERS.subtype}`,
},
] ]
$: { $: {
@ -143,7 +152,7 @@
<div class="field"> <div class="field">
<span>{name}</span> <span>{name}</span>
<Select <Select
value={schema[name]?.type} value={`${schema[name]?.type}${schema[name]?.subtype || ""}`}
options={typeOptions} options={typeOptions}
placeholder={null} placeholder={null}
getOptionLabel={option => option.label} getOptionLabel={option => option.label}

View file

@ -3,6 +3,7 @@
import { FIELDS } from "constants/backend" import { FIELDS } from "constants/backend"
import { API } from "api" import { API } from "api"
import { parseFile } from "./utils" import { parseFile } from "./utils"
import { canBeDisplayColumn } from "@budibase/shared-core"
export let rows = [] export let rows = []
export let schema = {} export let schema = {}
@ -10,36 +11,82 @@
export let displayColumn = null export let displayColumn = null
export let promptUpload = false export let promptUpload = false
const typeOptions = [ const typeOptions = {
{ [FIELDS.STRING.type]: {
label: "Text", label: "Text",
value: FIELDS.STRING.type, value: FIELDS.STRING.type,
config: {
type: FIELDS.STRING.type,
constraints: FIELDS.STRING.constraints,
},
}, },
{ [FIELDS.NUMBER.type]: {
label: "Number", label: "Number",
value: FIELDS.NUMBER.type, value: FIELDS.NUMBER.type,
config: {
type: FIELDS.NUMBER.type,
constraints: FIELDS.NUMBER.constraints,
},
}, },
{ [FIELDS.DATETIME.type]: {
label: "Date", label: "Date",
value: FIELDS.DATETIME.type, value: FIELDS.DATETIME.type,
config: {
type: FIELDS.DATETIME.type,
constraints: FIELDS.DATETIME.constraints,
},
}, },
{ [FIELDS.OPTIONS.type]: {
label: "Options", label: "Options",
value: FIELDS.OPTIONS.type, value: FIELDS.OPTIONS.type,
config: {
type: FIELDS.OPTIONS.type,
constraints: FIELDS.OPTIONS.constraints,
},
}, },
{ [FIELDS.ARRAY.type]: {
label: "Multi-select", label: "Multi-select",
value: FIELDS.ARRAY.type, value: FIELDS.ARRAY.type,
config: {
type: FIELDS.ARRAY.type,
constraints: FIELDS.ARRAY.constraints,
},
}, },
{ [FIELDS.BARCODEQR.type]: {
label: "Barcode/QR", label: "Barcode/QR",
value: FIELDS.BARCODEQR.type, value: FIELDS.BARCODEQR.type,
config: {
type: FIELDS.BARCODEQR.type,
constraints: FIELDS.BARCODEQR.constraints,
},
}, },
{ [FIELDS.LONGFORM.type]: {
label: "Long Form Text", label: "Long Form Text",
value: FIELDS.LONGFORM.type, value: FIELDS.LONGFORM.type,
config: {
type: FIELDS.LONGFORM.type,
constraints: FIELDS.LONGFORM.constraints,
},
}, },
] user: {
label: "User",
value: "user",
config: {
type: FIELDS.USER.type,
subtype: FIELDS.USER.subtype,
constraints: FIELDS.USER.constraints,
},
},
users: {
label: "Users",
value: "users",
config: {
type: FIELDS.USERS.type,
subtype: FIELDS.USERS.subtype,
constraints: FIELDS.USERS.constraints,
},
},
}
let fileInput let fileInput
let error = null let error = null
@ -48,10 +95,16 @@
let validation = {} let validation = {}
let validateHash = "" let validateHash = ""
let errors = {} let errors = {}
let selectedColumnTypes = {}
$: displayColumnOptions = Object.keys(schema || {}).filter(column => { $: displayColumnOptions = Object.keys(schema || {}).filter(column => {
return validation[column] return validation[column] && canBeDisplayColumn(schema[column].type)
}) })
$: if (displayColumn && !canBeDisplayColumn(schema[displayColumn].type)) {
displayColumn = null
}
$: { $: {
// binding in consumer is causing double renders here // binding in consumer is causing double renders here
const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema) const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema)
@ -72,6 +125,13 @@
rows = response.rows rows = response.rows
schema = response.schema schema = response.schema
fileName = response.fileName fileName = response.fileName
selectedColumnTypes = Object.entries(response.schema).reduce(
(acc, [colName, fieldConfig]) => ({
...acc,
[colName]: fieldConfig.type,
}),
{}
)
} catch (e) { } catch (e) {
loading = false loading = false
error = e error = e
@ -98,8 +158,10 @@
} }
const handleChange = (name, e) => { const handleChange = (name, e) => {
schema[name].type = e.detail const { config } = typeOptions[e.detail]
schema[name].constraints = FIELDS[e.detail.toUpperCase()].constraints schema[name].type = config.type
schema[name].subtype = config.subtype
schema[name].constraints = config.constraints
} }
const openFileUpload = (promptUpload, fileInput) => { const openFileUpload = (promptUpload, fileInput) => {
@ -142,9 +204,9 @@
<div class="field"> <div class="field">
<span>{column.name}</span> <span>{column.name}</span>
<Select <Select
bind:value={column.type} bind:value={selectedColumnTypes[column.name]}
on:change={e => handleChange(name, e)} on:change={e => handleChange(name, e)}
options={typeOptions} options={Object.values(typeOptions)}
placeholder={null} placeholder={null}
getOptionLabel={option => option.label} getOptionLabel={option => option.label}
getOptionValue={option => option.value} getOptionValue={option => option.value}

View file

@ -20,7 +20,6 @@
import { FieldType } from "@budibase/types" import { FieldType } from "@budibase/types"
import { createEventDispatcher, onMount } from "svelte" import { createEventDispatcher, onMount } from "svelte"
import FilterUsers from "./FilterUsers.svelte" import FilterUsers from "./FilterUsers.svelte"
import { RelationshipType } from "constants/backend"
export let schemaFields export let schemaFields
export let filters = [] export let filters = []
@ -126,6 +125,7 @@
// Update type based on field // Update type based on field
const fieldSchema = enrichedSchemaFields.find(x => x.name === filter.field) const fieldSchema = enrichedSchemaFields.find(x => x.name === filter.field)
filter.type = fieldSchema?.type filter.type = fieldSchema?.type
filter.subtype = fieldSchema?.subtype
// Update external type based on field // Update external type based on field
filter.externalType = getSchema(filter)?.externalType filter.externalType = getSchema(filter)?.externalType
@ -196,7 +196,7 @@
} }
return LuceneUtils.getValidOperatorsForType( return LuceneUtils.getValidOperatorsForType(
filter.type, { type: filter.type, subtype: filter.subtype },
filter.field, filter.field,
datasource datasource
) )
@ -301,9 +301,10 @@
{:else if filter.type === FieldType.BB_REFERENCE} {:else if filter.type === FieldType.BB_REFERENCE}
<FilterUsers <FilterUsers
bind:value={filter.value} bind:value={filter.value}
multiselect={getSchema(filter).relationshipType === multiselect={[
RelationshipType.MANY_TO_MANY || OperatorOptions.In.value,
filter.operator === OperatorOptions.In.value} OperatorOptions.ContainsAny.value,
].includes(filter.operator)}
disabled={filter.noValue} disabled={filter.noValue}
/> />
{:else} {:else}

View file

@ -1,7 +1,9 @@
import { FieldType, FieldSubtype } from "@budibase/types"
export const FIELDS = { export const FIELDS = {
STRING: { STRING: {
name: "Text", name: "Text",
type: "string", type: FieldType.STRING,
icon: "Text", icon: "Text",
constraints: { constraints: {
type: "string", type: "string",
@ -11,7 +13,7 @@ export const FIELDS = {
}, },
BARCODEQR: { BARCODEQR: {
name: "Barcode/QR", name: "Barcode/QR",
type: "barcodeqr", type: FieldType.BARCODEQR,
icon: "Camera", icon: "Camera",
constraints: { constraints: {
type: "string", type: "string",
@ -21,7 +23,7 @@ export const FIELDS = {
}, },
LONGFORM: { LONGFORM: {
name: "Long Form Text", name: "Long Form Text",
type: "longform", type: FieldType.LONGFORM,
icon: "TextAlignLeft", icon: "TextAlignLeft",
constraints: { constraints: {
type: "string", type: "string",
@ -31,7 +33,7 @@ export const FIELDS = {
}, },
OPTIONS: { OPTIONS: {
name: "Options", name: "Options",
type: "options", type: FieldType.OPTIONS,
icon: "Dropdown", icon: "Dropdown",
constraints: { constraints: {
type: "string", type: "string",
@ -41,7 +43,7 @@ export const FIELDS = {
}, },
ARRAY: { ARRAY: {
name: "Multi-select", name: "Multi-select",
type: "array", type: FieldType.ARRAY,
icon: "Duplicate", icon: "Duplicate",
constraints: { constraints: {
type: "array", type: "array",
@ -51,7 +53,7 @@ export const FIELDS = {
}, },
NUMBER: { NUMBER: {
name: "Number", name: "Number",
type: "number", type: FieldType.NUMBER,
icon: "123", icon: "123",
constraints: { constraints: {
type: "number", type: "number",
@ -61,12 +63,12 @@ export const FIELDS = {
}, },
BIGINT: { BIGINT: {
name: "BigInt", name: "BigInt",
type: "bigint", type: FieldType.BIGINT,
icon: "TagBold", icon: "TagBold",
}, },
BOOLEAN: { BOOLEAN: {
name: "Boolean", name: "Boolean",
type: "boolean", type: FieldType.BOOLEAN,
icon: "Boolean", icon: "Boolean",
constraints: { constraints: {
type: "boolean", type: "boolean",
@ -75,7 +77,7 @@ export const FIELDS = {
}, },
DATETIME: { DATETIME: {
name: "Date/Time", name: "Date/Time",
type: "datetime", type: FieldType.DATETIME,
icon: "Calendar", icon: "Calendar",
constraints: { constraints: {
type: "string", type: "string",
@ -89,7 +91,7 @@ export const FIELDS = {
}, },
ATTACHMENT: { ATTACHMENT: {
name: "Attachment", name: "Attachment",
type: "attachment", type: FieldType.ATTACHMENT,
icon: "Folder", icon: "Folder",
constraints: { constraints: {
type: "array", type: "array",
@ -98,7 +100,7 @@ export const FIELDS = {
}, },
LINK: { LINK: {
name: "Relationship", name: "Relationship",
type: "link", type: FieldType.LINK,
icon: "Link", icon: "Link",
constraints: { constraints: {
type: "array", type: "array",
@ -107,26 +109,34 @@ export const FIELDS = {
}, },
FORMULA: { FORMULA: {
name: "Formula", name: "Formula",
type: "formula", type: FieldType.FORMULA,
icon: "Calculator", icon: "Calculator",
constraints: {}, constraints: {},
}, },
JSON: { JSON: {
name: "JSON", name: "JSON",
type: "json", type: FieldType.JSON,
icon: "Brackets", icon: "Brackets",
constraints: { constraints: {
type: "object", type: "object",
presence: false, presence: false,
}, },
}, },
BB_REFERENCE_USER: { USER: {
name: "User", name: "User",
type: "bb_reference", type: FieldType.BB_REFERENCE,
subtype: "user", subtype: FieldSubtype.USER,
compositeType: "bb_reference_user", // Used for working with the subtype on CreateEditColumn as is it was a primary type
icon: "User", icon: "User",
}, },
USERS: {
name: "Users",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
icon: "User",
constraints: {
type: "array",
},
},
} }
export const AUTO_COLUMN_SUB_TYPES = { export const AUTO_COLUMN_SUB_TYPES = {

View file

@ -3,16 +3,17 @@
* e.g. * e.g.
* name all names result * name all names result
* ------ ----------- -------- * ------ ----------- --------
* ("foo") ["foo"] "foo (1)" * ("foo") ["foo"] "foo 1"
* ("foo") ["foo", "foo (1)"] "foo (2)" * ("foo") ["foo", "foo 1"] "foo 2"
* ("foo (1)") ["foo", "foo (1)"] "foo (2)" * ("foo 1") ["foo", "foo 1"] "foo 2"
* ("foo") ["foo", "foo (2)"] "foo (1)" * ("foo") ["foo", "foo 2"] "foo 1"
* *
* Repl * Repl
*/ */
export const duplicateName = (name, allNames) => { export const duplicateName = (name, allNames) => {
const baseName = name.split(" (")[0] const duplicatePattern = new RegExp(`\\s(\\d+)$`)
const isDuplicate = new RegExp(`${baseName}\\s\\((\\d+)\\)$`) const baseName = name.split(duplicatePattern)[0]
const isDuplicate = new RegExp(`${baseName}\\s(\\d+)$`)
// get the sequence from matched names // get the sequence from matched names
const sequence = [] const sequence = []
@ -28,7 +29,6 @@ export const duplicateName = (name, allNames) => {
return false return false
}) })
sequence.sort((a, b) => a - b) sequence.sort((a, b) => a - b)
// get the next number in the sequence // get the next number in the sequence
let number let number
if (sequence.length === 0) { if (sequence.length === 0) {
@ -46,5 +46,5 @@ export const duplicateName = (name, allNames) => {
} }
} }
return `${baseName} (${number})` return `${baseName} ${number}`
} }

View file

@ -9,34 +9,34 @@ describe("duplicate", () => {
const duplicate = duplicateName(name, names) const duplicate = duplicateName(name, names)
expect(duplicate).toBe("foo (1)") expect(duplicate).toBe("foo 1")
}) })
it("with multiple existing", async () => { it("with multiple existing", async () => {
const names = ["foo", "foo (1)", "foo (2)"] const names = ["foo", "foo 1", "foo 2"]
const name = "foo" const name = "foo"
const duplicate = duplicateName(name, names) const duplicate = duplicateName(name, names)
expect(duplicate).toBe("foo (3)") expect(duplicate).toBe("foo 3")
}) })
it("with mixed multiple existing", async () => { it("with mixed multiple existing", async () => {
const names = ["foo", "foo (1)", "foo (2)", "bar", "bar (1)", "bar (2)"] const names = ["foo", "foo 1", "foo 2", "bar", "bar 1", "bar 2"]
const name = "foo" const name = "foo"
const duplicate = duplicateName(name, names) const duplicate = duplicateName(name, names)
expect(duplicate).toBe("foo (3)") expect(duplicate).toBe("foo 3")
}) })
it("with incomplete sequence", async () => { it("with incomplete sequence", async () => {
const names = ["foo", "foo (2)", "foo (3)"] const names = ["foo", "foo 2", "foo 3"]
const name = "foo" const name = "foo"
const duplicate = duplicateName(name, names) const duplicate = duplicateName(name, names)
expect(duplicate).toBe("foo (1)") expect(duplicate).toBe("foo 1")
}) })
}) })
}) })

View file

@ -118,7 +118,7 @@
} }
const getOperatorOptions = condition => { const getOperatorOptions = condition => {
return LuceneUtils.getValidOperatorsForType(condition.valueType) return LuceneUtils.getValidOperatorsForType({ type: condition.valueType })
} }
const onOperatorChange = (condition, newOperator) => { const onOperatorChange = (condition, newOperator) => {
@ -137,9 +137,9 @@
condition.referenceValue = null condition.referenceValue = null
// Ensure a valid operator is set // Ensure a valid operator is set
const validOperators = LuceneUtils.getValidOperatorsForType(newType).map( const validOperators = LuceneUtils.getValidOperatorsForType({
x => x.value type: newType,
) }).map(x => x.value)
if (!validOperators.includes(condition.operator)) { if (!validOperators.includes(condition.operator)) {
condition.operator = condition.operator =
validOperators[0] ?? Constants.OperatorOptions.Equals.value validOperators[0] ?? Constants.OperatorOptions.Equals.value

View file

@ -5687,11 +5687,6 @@
"label": "Validation", "label": "Validation",
"key": "validation" "key": "validation"
}, },
{
"type": "filter/relationship",
"label": "Filtering",
"key": "filter"
},
{ {
"type": "boolean", "type": "boolean",
"label": "Search", "label": "Search",

View file

@ -63,7 +63,7 @@
// Ensure a valid operator is set // Ensure a valid operator is set
const validOperators = LuceneUtils.getValidOperatorsForType( const validOperators = LuceneUtils.getValidOperatorsForType(
expression.type, { type: expression.type },
expression.field, expression.field,
datasource datasource
).map(x => x.value) ).map(x => x.value)
@ -125,7 +125,7 @@
<Select <Select
disabled={!filter.field} disabled={!filter.field}
options={LuceneUtils.getValidOperatorsForType( options={LuceneUtils.getValidOperatorsForType(
filter.type, { type: filter.type, subtype: filter.subtype },
filter.field, filter.field,
datasource datasource
)} )}

View file

@ -1,9 +1,28 @@
<script> <script>
import RelationshipField from "./RelationshipField.svelte" import RelationshipField from "./RelationshipField.svelte"
import { sdk } from "@budibase/shared-core"
export let defaultValue
function updateUserIDs(value) {
if (Array.isArray(value)) {
return value.map(val => sdk.users.getGlobalUserID(val))
} else {
return sdk.users.getGlobalUserID(value)
}
}
function updateReferences(value) {
if (sdk.users.containsUserID(value)) {
return updateUserIDs(value)
}
return value
}
</script> </script>
<RelationshipField <RelationshipField
{...$$props} {...$$props}
datasourceType={"user"} datasourceType={"user"}
primaryDisplay={"email"} primaryDisplay={"email"}
defaultValue={updateReferences(defaultValue)}
/> />

View file

@ -160,7 +160,9 @@
const handleChange = value => { const handleChange = value => {
const changed = fieldApi.setValue(value) const changed = fieldApi.setValue(value)
if (onChange && changed) { if (onChange && changed) {
onChange({ value }) onChange({
value,
})
} }
} }

View file

@ -1,7 +1,7 @@
<script> <script>
import { getContext } from "svelte" import { getContext } from "svelte"
import RelationshipCell from "./RelationshipCell.svelte" import RelationshipCell from "./RelationshipCell.svelte"
import { FieldSubtype } from "@budibase/types" import { FieldSubtype, RelationshipType } from "@budibase/types"
export let api export let api
@ -12,10 +12,14 @@
...$$props.schema, ...$$props.schema,
// This is not really used, just adding some content to be able to render the relationship cell // This is not really used, just adding some content to be able to render the relationship cell
tableId: "external", tableId: "external",
relationshipType:
subtype === FieldSubtype.USER
? RelationshipType.ONE_TO_MANY
: RelationshipType.MANY_TO_MANY,
} }
async function searchFunction(searchParams) { async function searchFunction(searchParams) {
if (subtype !== FieldSubtype.USER) { if (subtype !== FieldSubtype.USER && subtype !== FieldSubtype.USERS) {
throw `Search for '${subtype}' not implemented` throw `Search for '${subtype}' not implemented`
} }

View file

@ -1,7 +1,8 @@
<script> <script>
import { getContext, onMount, tick } from "svelte" import { getContext, onMount, tick } from "svelte"
import GridCell from "./GridCell.svelte" import { canBeDisplayColumn } from "@budibase/shared-core"
import { Icon, Popover, Menu, MenuItem, clickOutside } from "@budibase/bbui" import { Icon, Popover, Menu, MenuItem, clickOutside } from "@budibase/bbui"
import GridCell from "./GridCell.svelte"
import { getColumnIcon } from "../lib/utils" import { getColumnIcon } from "../lib/utils"
export let column export let column
@ -25,14 +26,6 @@
schema, schema,
} = getContext("grid") } = getContext("grid")
const bannedDisplayColumnTypes = [
"link",
"array",
"attachment",
"boolean",
"json",
]
let anchor let anchor
let open = false let open = false
let editIsOpen = false let editIsOpen = false
@ -233,8 +226,7 @@
<MenuItem <MenuItem
icon="Label" icon="Label"
on:click={makeDisplayColumn} on:click={makeDisplayColumn}
disabled={idx === "sticky" || disabled={idx === "sticky" || !canBeDisplayColumn(column.schema.type)}
bannedDisplayColumnTypes.includes(column.schema.type)}
> >
Use as display column Use as display column
</MenuItem> </MenuItem>

View file

@ -21,6 +21,7 @@ const TypeIconMap = {
bigint: "TagBold", bigint: "TagBold",
bb_reference: { bb_reference: {
user: "User", user: "User",
users: "UserGroup",
}, },
} }

@ -1 +1 @@
Subproject commit 044bec6447066b215932d6726c437e7ec5a9e42e Subproject commit 1d581a85e6a31d9aa767a79325cdde74ed02516f

View file

@ -20,7 +20,6 @@
"test:watch": "jest --watch", "test:watch": "jest --watch",
"predocker": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && yarn build && cp ../../yarn.lock ./dist/", "predocker": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && yarn build && cp ../../yarn.lock ./dist/",
"build:docker": "yarn predocker && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION", "build:docker": "yarn predocker && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
"build:docs": "node ./scripts/docs/generate.js open",
"run:docker": "node dist/index.js", "run:docker": "node dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js", "run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:up": "node scripts/dev/manage.js up", "dev:stack:up": "node scripts/dev/manage.js up",
@ -112,7 +111,7 @@
"to-json-schema": "0.2.5", "to-json-schema": "0.2.5",
"uuid": "3.3.2", "uuid": "3.3.2",
"validate.js": "0.13.1", "validate.js": "0.13.1",
"vm2": "3.9.17", "vm2": "^3.9.19",
"worker-farm": "1.7.0", "worker-farm": "1.7.0",
"xml2js": "0.5.0", "xml2js": "0.5.0",
"yargs": "13.2.4" "yargs": "13.2.4"
@ -125,7 +124,7 @@
"@trendyol/jest-testcontainers": "2.1.1", "@trendyol/jest-testcontainers": "2.1.1",
"@types/global-agent": "2.1.1", "@types/global-agent": "2.1.1",
"@types/google-spreadsheet": "3.1.5", "@types/google-spreadsheet": "3.1.5",
"@types/jest": "29.5.3", "@types/jest": "29.5.5",
"@types/koa": "2.13.4", "@types/koa": "2.13.4",
"@types/koa__router": "8.0.8", "@types/koa__router": "8.0.8",
"@types/lodash": "4.14.180", "@types/lodash": "4.14.180",

View file

@ -1,31 +0,0 @@
### Documentation
This directory contains the scripts required to generate the APIDoc based documentation.
You can find the docs about comment structure at the [APIDocs page](https://apidocjs.com/).
In general most API endpoints will look like:
```js
/**
* @api {post} /api/:param/url Give it a name
* @apiName Give it a name
* @apiGroup group
* @apiPermission permission
* @apiDescription Describe what the endpoint does, any special cases the user
* should be aware of.
*
* @apiParam {string} param describe a URL parameter.
*
* @apiParam (Body) input describe a field on the body.
*
* @apiSuccess {object} output describe the output.
*/
```
There are a few key points to note when writing API docs:
1. Don't use `@apiBody` - this isn't currently supported by our swagger converter.
2. Make sure to always have an `@api` definition at the start, which must always have the
HTTP verb, the endpoint URL and the name.
3. There are three ways you can specify parameters used as inputs for your endpoint,
`@apiParam` for a URL param, `@apiParam (Body)` for a field on the request body and `@apiParam (Query)`
for query string parameters.
4. The `@apiGroup` should be the same for all API Doc comments in a route file.

View file

@ -1,74 +0,0 @@
const fs = require("fs")
const { join } = require("path")
const { createDoc } = require("apidoc")
const packageJson = require("../../package.json")
const toSwagger = require("./toSwagger")
const open = require("open")
const config = {
name: "Budibase API",
version: packageJson.version,
description: "Documenting the Budibase backend API",
title: "Budibase app service API",
}
const shouldOpen = process.argv[2]
const disallowed = []
function filter(parsedRouteFiles) {
const tagToSearch = "url"
for (let routeFile of parsedRouteFiles) {
for (let route of routeFile) {
let routeInfo = route["local"]
if (disallowed.includes(routeInfo[tagToSearch])) {
const idx = routeFile.indexOf(route)
routeFile.splice(idx, 1)
}
}
}
}
async function generate() {
// start by writing a config file
const configPath = join(__dirname, "config.json")
fs.writeFileSync(configPath, JSON.stringify(config))
const mainPath = join(__dirname, "..", "..")
const srcPath = join(mainPath, "src", "api", "routes")
const assetsPath = join(mainPath, "builder", "assets", "docs")
if (!fs.existsSync(assetsPath)) {
fs.mkdirSync(assetsPath, { recursive: true })
}
const options = {
src: [srcPath],
dest: assetsPath,
filters: {
main: {
postFilter: filter,
},
},
config: configPath,
}
const doc = createDoc(options)
if (typeof doc !== "boolean") {
const swagger = toSwagger(JSON.parse(doc.data), JSON.parse(doc.project))
fs.writeFileSync(join(assetsPath, "swagger.json"), JSON.stringify(swagger))
fs.writeFileSync(join(assetsPath, "apidoc.json"), doc.data)
fs.writeFileSync(join(assetsPath, "project.json"), doc.project)
console.log(
`Docs generated successfully, find in ${assetsPath}, swagger.json, apidoc.json and project.json`
)
} else {
throw "Unable to generate docs."
}
// delete the temporary config file
fs.unlinkSync(configPath)
setTimeout(async () => {
if (shouldOpen === "open") {
await open(join(assetsPath, "index.html"), { wait: false })
}
}, 2000)
}
generate().catch(err => {
console.error(err)
})

View file

@ -1,320 +0,0 @@
let _ = require("lodash")
let { pathToRegexp } = require("path-to-regexp")
/********************************************************
* Based on: https://github.com/fsbahman/apidoc-swagger *
********************************************************/
let swagger = {
swagger: "2.0",
info: {},
paths: {},
definitions: {},
}
function toSwagger(apidocJson, projectJson) {
swagger.info = addInfo(projectJson)
swagger.paths = extractPaths(apidocJson)
return swagger
}
let tagsRegex = /(<([^>]+)>)/gi
// Removes <p> </p> tags from text
function removeTags(text) {
return text ? text.replace(tagsRegex, "") : text
}
function addInfo(projectJson) {
let info = {}
info["title"] = projectJson.title || projectJson.name
info["version"] = projectJson.version
info["description"] = projectJson.description
return info
}
/**
* Extracts paths provided in json format
* post, patch, put request parameters are extracted in body
* get and delete are extracted to path parameters
* @param apidocJson
* @returns {{}}
*/
function extractPaths(apidocJson) {
let apiPaths = groupByUrl(apidocJson)
let paths = {}
for (let i = 0; i < apiPaths.length; i++) {
let verbs = apiPaths[i].verbs
let url = verbs[0].url
let pattern = pathToRegexp(url, null)
let matches = pattern.exec(url)
// Surrounds URL parameters with curly brackets -> :email with {email}
let pathKeys = []
for (let j = 1; j < matches.length; j++) {
let key = matches[j].slice(1)
url = url.replace(matches[j], "{" + key + "}")
pathKeys.push(key)
}
for (let j = 0; j < verbs.length; j++) {
let verb = verbs[j]
let type = verb.type
let obj = (paths[url] = paths[url] || {})
if (type === "post" || type === "patch" || type === "put") {
_.extend(
obj,
createPostPushPutOutput(verb, swagger.definitions, pathKeys)
)
} else {
_.extend(obj, createGetDeleteOutput(verb, swagger.definitions))
}
}
}
return paths
}
function createPostPushPutOutput(verbs, definitions, pathKeys) {
let pathItemObject = {}
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
let params = []
let pathParams = createPathParameters(verbs, pathKeys)
pathParams = _.filter(pathParams, function (param) {
let hasKey = pathKeys.indexOf(param.name) !== -1
return !(param.in === "path" && !hasKey)
})
params = params.concat(pathParams)
let required =
verbs.parameter &&
verbs.parameter.fields &&
verbs.parameter.fields.Parameter &&
verbs.parameter.fields.Parameter.length > 0
params.push({
in: "body",
name: "body",
description: removeTags(verbs.description),
required: required,
schema: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelParametersRef,
},
})
pathItemObject[verbs.type] = {
tags: [verbs.group],
summary: removeTags(verbs.description),
consumes: ["application/json"],
produces: ["application/json"],
parameters: params,
}
if (verbDefinitionResult.topLevelSuccessRef) {
pathItemObject[verbs.type].responses = {
200: {
description: "successful operation",
schema: {
type: verbDefinitionResult.topLevelSuccessRefType,
items: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
},
},
},
}
}
return pathItemObject
}
function createVerbDefinitions(verbs, definitions) {
let result = {
topLevelParametersRef: null,
topLevelSuccessRef: null,
topLevelSuccessRefType: null,
}
let defaultObjectName = verbs.name
let fieldArrayResult = {}
if (verbs && verbs.parameter && verbs.parameter.fields) {
fieldArrayResult = createFieldArrayDefinitions(
verbs.parameter.fields.Parameter,
definitions,
verbs.name,
defaultObjectName
)
result.topLevelParametersRef = fieldArrayResult.topLevelRef
}
if (verbs && verbs.success && verbs.success.fields) {
fieldArrayResult = createFieldArrayDefinitions(
verbs.success.fields["Success 200"],
definitions,
verbs.name,
defaultObjectName
)
result.topLevelSuccessRef = fieldArrayResult.topLevelRef
result.topLevelSuccessRefType = fieldArrayResult.topLevelRefType
}
return result
}
function createFieldArrayDefinitions(
fieldArray,
definitions,
topLevelRef,
defaultObjectName
) {
let result = {
topLevelRef: topLevelRef,
topLevelRefType: null,
}
if (!fieldArray) {
return result
}
for (let i = 0; i < fieldArray.length; i++) {
let parameter = fieldArray[i]
let nestedName = createNestedName(parameter.field)
let objectName = nestedName.objectName
if (!objectName) {
objectName = defaultObjectName
}
let type = parameter.type
if (i === 0) {
result.topLevelRefType = type
if (parameter.type === "Object") {
objectName = nestedName.propertyName
nestedName.propertyName = null
} else if (parameter.type === "Array") {
objectName = nestedName.propertyName
nestedName.propertyName = null
result.topLevelRefType = "array"
}
result.topLevelRef = objectName
}
definitions[objectName] = definitions[objectName] || {
properties: {},
required: [],
}
if (nestedName.propertyName) {
let prop = {
type: (parameter.type || "").toLowerCase(),
description: removeTags(parameter.description),
}
if (parameter.type === "Object") {
prop.$ref = "#/definitions/" + parameter.field
}
let typeIndex = type.indexOf("[]")
if (typeIndex !== -1 && typeIndex === type.length - 2) {
prop.type = "array"
prop.items = {
type: type.slice(0, type.length - 2),
}
}
definitions[objectName]["properties"][nestedName.propertyName] = prop
if (!parameter.optional) {
let arr = definitions[objectName]["required"]
if (arr.indexOf(nestedName.propertyName) === -1) {
arr.push(nestedName.propertyName)
}
}
}
}
return result
}
function createNestedName(field) {
let propertyName = field
let objectName
let propertyNames = field.split(".")
if (propertyNames && propertyNames.length > 1) {
propertyName = propertyNames[propertyNames.length - 1]
propertyNames.pop()
objectName = propertyNames.join(".")
}
return {
propertyName: propertyName,
objectName: objectName,
}
}
/**
* Generate get, delete method output
* @param verbs
* @param definitions
* @returns {{}}
*/
function createGetDeleteOutput(verbs, definitions) {
let pathItemObject = {}
verbs.type = verbs.type === "del" ? "delete" : verbs.type
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
pathItemObject[verbs.type] = {
tags: [verbs.group],
summary: removeTags(verbs.description),
consumes: ["application/json"],
produces: ["application/json"],
parameters: createPathParameters(verbs),
}
if (verbDefinitionResult.topLevelSuccessRef) {
pathItemObject[verbs.type].responses = {
200: {
description: "successful operation",
schema: {
type: verbDefinitionResult.topLevelSuccessRefType,
items: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
},
},
},
}
}
return pathItemObject
}
/**
* Iterate through all method parameters and create array of parameter objects which are stored as path parameters
* @param verbs
* @returns {Array}
*/
function createPathParameters(verbs) {
let pathItemObject = []
if (verbs.parameter && verbs.parameter.fields.Parameter) {
for (let i = 0; i < verbs.parameter.fields.Parameter.length; i++) {
let param = verbs.parameter.fields.Parameter[i]
let field = param.field
let type = param.type
pathItemObject.push({
name: field,
in: type === "file" ? "formData" : "path",
required: !param.optional,
type: param.type.toLowerCase(),
description: removeTags(param.description),
})
}
}
return pathItemObject
}
function groupByUrl(apidocJson) {
return _.chain(apidocJson)
.groupBy("url")
.toPairs()
.map(function (element) {
return _.zipObject(["url", "verbs"], element)
})
.value()
}
module.exports = toSwagger

View file

@ -859,7 +859,8 @@
"json", "json",
"internal", "internal",
"barcodeqr", "barcodeqr",
"bigint" "bigint",
"bb_reference"
], ],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship." "description": "Defines the type of the column, most explain themselves, a link column is a relationship."
}, },
@ -1064,7 +1065,8 @@
"json", "json",
"internal", "internal",
"barcodeqr", "barcodeqr",
"bigint" "bigint",
"bb_reference"
], ],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship." "description": "Defines the type of the column, most explain themselves, a link column is a relationship."
}, },
@ -1280,7 +1282,8 @@
"json", "json",
"internal", "internal",
"barcodeqr", "barcodeqr",
"bigint" "bigint",
"bb_reference"
], ],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship." "description": "Defines the type of the column, most explain themselves, a link column is a relationship."
}, },

View file

@ -782,6 +782,7 @@ components:
- internal - internal
- barcodeqr - barcodeqr
- bigint - bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link description: Defines the type of the column, most explain themselves, a link
column is a relationship. column is a relationship.
constraints: constraints:
@ -946,6 +947,7 @@ components:
- internal - internal
- barcodeqr - barcodeqr
- bigint - bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link description: Defines the type of the column, most explain themselves, a link
column is a relationship. column is a relationship.
constraints: constraints:
@ -1117,6 +1119,7 @@ components:
- internal - internal
- barcodeqr - barcodeqr
- bigint - bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link description: Defines the type of the column, most explain themselves, a link
column is a relationship. column is a relationship.
constraints: constraints:

View file

@ -5,8 +5,11 @@ import {
FieldType, FieldType,
FilterType, FilterType,
IncludeRelationship, IncludeRelationship,
ManyToManyRelationshipFieldMetadata,
OneToManyRelationshipFieldMetadata,
Operation, Operation,
PaginationJson, PaginationJson,
RelationshipFieldMetadata,
RelationshipsJson, RelationshipsJson,
RelationshipType, RelationshipType,
Row, Row,
@ -254,12 +257,20 @@ function fixArrayTypes(row: Row, table: Table) {
return row return row
} }
function isOneSide(field: FieldSchema) { function isOneSide(
field: RelationshipFieldMetadata
): field is OneToManyRelationshipFieldMetadata {
return ( return (
field.relationshipType && field.relationshipType.split("-")[0] === "one" field.relationshipType && field.relationshipType.split("-")[0] === "one"
) )
} }
function isManyToMany(
field: RelationshipFieldMetadata
): field is ManyToManyRelationshipFieldMetadata {
return !!(field as ManyToManyRelationshipFieldMetadata).through
}
function isEditableColumn(column: FieldSchema) { function isEditableColumn(column: FieldSchema) {
const isExternalAutoColumn = const isExternalAutoColumn =
column.autocolumn && column.autocolumn &&
@ -352,11 +363,11 @@ export class ExternalRequest<T extends Operation> {
} }
} }
// many to many // many to many
else if (field.through) { else if (isManyToMany(field)) {
// we're not inserting a doc, will be a bunch of update calls // we're not inserting a doc, will be a bunch of update calls
const otherKey: string = field.throughFrom || linkTablePrimary const otherKey: string = field.throughFrom || linkTablePrimary
const thisKey: string = field.throughTo || tablePrimary const thisKey: string = field.throughTo || tablePrimary
row[key].forEach((relationship: any) => { for (const relationship of row[key]) {
manyRelationships.push({ manyRelationships.push({
tableId: field.through || field.tableId, tableId: field.through || field.tableId,
isUpdate: false, isUpdate: false,
@ -365,14 +376,14 @@ export class ExternalRequest<T extends Operation> {
// leave the ID for enrichment later // leave the ID for enrichment later
[thisKey]: `{{ literal ${tablePrimary} }}`, [thisKey]: `{{ literal ${tablePrimary} }}`,
}) })
}) }
} }
// many to one // many to one
else { else {
const thisKey: string = "id" const thisKey: string = "id"
// @ts-ignore // @ts-ignore
const otherKey: string = field.fieldName const otherKey: string = field.fieldName
row[key].forEach((relationship: any) => { for (const relationship of row[key]) {
manyRelationships.push({ manyRelationships.push({
tableId: field.tableId, tableId: field.tableId,
isUpdate: true, isUpdate: true,
@ -381,7 +392,7 @@ export class ExternalRequest<T extends Operation> {
// leave the ID for enrichment later // leave the ID for enrichment later
[otherKey]: `{{ literal ${tablePrimary} }}`, [otherKey]: `{{ literal ${tablePrimary} }}`,
}) })
}) }
} }
} }
// we return the relationships that may need to be created in the through table // we return the relationships that may need to be created in the through table
@ -549,15 +560,12 @@ export class ExternalRequest<T extends Operation> {
if (!table.primary || !linkTable.primary) { if (!table.primary || !linkTable.primary) {
continue continue
} }
const definition: any = { const definition: RelationshipsJson = {
// if no foreign key specified then use the name of the field in other table
from: field.foreignKey || table.primary[0],
to: field.fieldName,
tableName: linkTableName, tableName: linkTableName,
// need to specify where to put this back into // need to specify where to put this back into
column: fieldName, column: fieldName,
} }
if (field.through) { if (isManyToMany(field)) {
const { tableName: throughTableName } = breakExternalTableId( const { tableName: throughTableName } = breakExternalTableId(
field.through field.through
) )
@ -567,6 +575,10 @@ export class ExternalRequest<T extends Operation> {
definition.to = field.throughFrom || linkTable.primary[0] definition.to = field.throughFrom || linkTable.primary[0]
definition.fromPrimary = table.primary[0] definition.fromPrimary = table.primary[0]
definition.toPrimary = linkTable.primary[0] definition.toPrimary = linkTable.primary[0]
} else {
// if no foreign key specified then use the name of the field in other table
definition.from = field.foreignKey || table.primary[0]
definition.to = field.fieldName
} }
relationships.push(definition) relationships.push(definition)
} }
@ -588,7 +600,7 @@ export class ExternalRequest<T extends Operation> {
const primaryKey = table.primary[0] const primaryKey = table.primary[0]
// make a new request to get the row with all its relationships // make a new request to get the row with all its relationships
// we need this to work out if any relationships need removed // we need this to work out if any relationships need removed
for (let field of Object.values(table.schema)) { for (const field of Object.values(table.schema)) {
if ( if (
field.type !== FieldTypes.LINK || field.type !== FieldTypes.LINK ||
!field.fieldName || !field.fieldName ||
@ -601,9 +613,9 @@ export class ExternalRequest<T extends Operation> {
const { tableName: relatedTableName } = breakExternalTableId(tableId) const { tableName: relatedTableName } = breakExternalTableId(tableId)
// @ts-ignore // @ts-ignore
const linkPrimaryKey = this.tables[relatedTableName].primary[0] const linkPrimaryKey = this.tables[relatedTableName].primary[0]
const manyKey = field.throughTo || primaryKey
const lookupField = isMany ? primaryKey : field.foreignKey const lookupField = isMany ? primaryKey : field.foreignKey
const fieldName = isMany ? manyKey : field.fieldName const fieldName = isMany ? field.throughTo || primaryKey : field.fieldName
if (!lookupField || !row[lookupField]) { if (!lookupField || !row[lookupField]) {
continue continue
} }

View file

@ -156,7 +156,10 @@ export async function destroy(ctx: UserCtx) {
} }
const table = await sdk.tables.getTable(row.tableId) const table = await sdk.tables.getTable(row.tableId)
// update the row to include full relationships before deleting them // update the row to include full relationships before deleting them
row = await outputProcessing(table, row, { squash: false }) row = await outputProcessing(table, row, {
squash: false,
skipBBReferences: true,
})
// now remove the relationships // now remove the relationships
await linkRows.updateLinks({ await linkRows.updateLinks({
eventType: linkRows.EventType.ROW_DELETE, eventType: linkRows.EventType.ROW_DELETE,
@ -190,6 +193,7 @@ export async function bulkDestroy(ctx: UserCtx) {
// they need to be the full rows (including previous relationships) for automations // they need to be the full rows (including previous relationships) for automations
const processedRows = (await outputProcessing(table, rows, { const processedRows = (await outputProcessing(table, rows, {
squash: false, squash: false,
skipBBReferences: true,
})) as Row[] })) as Row[]
// remove the relationships first // remove the relationships first

View file

@ -4,6 +4,8 @@ import { context } from "@budibase/backend-core"
import { import {
Ctx, Ctx,
FieldType, FieldType,
ManyToOneRelationshipFieldMetadata,
OneToManyRelationshipFieldMetadata,
Row, Row,
SearchFilters, SearchFilters,
Table, Table,
@ -19,7 +21,14 @@ function isForeignKey(key: string, table: Table) {
const relationships = Object.values(table.schema).filter( const relationships = Object.values(table.schema).filter(
column => column.type === FieldType.LINK column => column.type === FieldType.LINK
) )
return relationships.some(relationship => relationship.foreignKey === key) return relationships.some(
relationship =>
(
relationship as
| OneToManyRelationshipFieldMetadata
| ManyToOneRelationshipFieldMetadata
).foreignKey === key
)
} }
validateJs.extend(validateJs.validators.datetime, { validateJs.extend(validateJs.validators.datetime, {

View file

@ -1,4 +1,4 @@
import { FieldTypes, FormulaTypes } from "../../../constants" import { FormulaTypes } from "../../../constants"
import { clearColumns } from "./utils" import { clearColumns } from "./utils"
import { doesContainStrings } from "@budibase/string-templates" import { doesContainStrings } from "@budibase/string-templates"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
@ -6,12 +6,20 @@ import isEqual from "lodash/isEqual"
import uniq from "lodash/uniq" import uniq from "lodash/uniq"
import { updateAllFormulasInTable } from "../row/staticFormula" import { updateAllFormulasInTable } from "../row/staticFormula"
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { FieldSchema, Table } from "@budibase/types" import {
FieldSchema,
FieldType,
FormulaFieldMetadata,
Table,
} from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { isRelationshipColumn } from "../../../db/utils"
function isStaticFormula(column: FieldSchema) { function isStaticFormula(
column: FieldSchema
): column is FormulaFieldMetadata & { formulaType: FormulaTypes.STATIC } {
return ( return (
column.type === FieldTypes.FORMULA && column.type === FieldType.FORMULA &&
column.formulaType === FormulaTypes.STATIC column.formulaType === FormulaTypes.STATIC
) )
} }
@ -56,8 +64,9 @@ async function checkIfFormulaNeedsCleared(
for (let removed of removedColumns) { for (let removed of removedColumns) {
let tableToUse: Table | undefined = table let tableToUse: Table | undefined = table
// if relationship, get the related table // if relationship, get the related table
if (removed.type === FieldTypes.LINK) { if (removed.type === FieldType.LINK) {
tableToUse = tables.find(table => table._id === removed.tableId) const removedTableId = removed.tableId
tableToUse = tables.find(table => table._id === removedTableId)
} }
if (!tableToUse) { if (!tableToUse) {
continue continue
@ -73,17 +82,18 @@ async function checkIfFormulaNeedsCleared(
} }
for (let relatedTableId of table.relatedFormula) { for (let relatedTableId of table.relatedFormula) {
const relatedColumns = Object.values(table.schema).filter( const relatedColumns = Object.values(table.schema).filter(
column => column.tableId === relatedTableId column =>
column.type === FieldType.LINK && column.tableId === relatedTableId
) )
const relatedTable = tables.find(table => table._id === relatedTableId) const relatedTable = tables.find(table => table._id === relatedTableId)
// look to see if the column was used in a relationship formula, // look to see if the column was used in a relationship formula,
// relationships won't be used for this // relationships won't be used for this
if (relatedTable && relatedColumns && removed.type !== FieldTypes.LINK) { if (relatedTable && relatedColumns && removed.type !== FieldType.LINK) {
let relatedFormulaToRemove: string[] = [] let relatedFormulaToRemove: string[] = []
for (let column of relatedColumns) { for (let column of relatedColumns) {
relatedFormulaToRemove = relatedFormulaToRemove.concat( relatedFormulaToRemove = relatedFormulaToRemove.concat(
getFormulaThatUseColumn(relatedTable, [ getFormulaThatUseColumn(relatedTable, [
column.fieldName!, (column as any).fieldName!,
removed.name, removed.name,
]) ])
) )
@ -116,7 +126,7 @@ async function updateRelatedFormulaLinksOnTables(
const initialTables = cloneDeep(tables) const initialTables = cloneDeep(tables)
// first find the related column names // first find the related column names
const relatedColumns = Object.values(table.schema).filter( const relatedColumns = Object.values(table.schema).filter(
col => col.type === FieldTypes.LINK isRelationshipColumn
) )
// we start by removing the formula field from all tables // we start by removing the formula field from all tables
for (let otherTable of tables) { for (let otherTable of tables) {
@ -135,6 +145,7 @@ async function updateRelatedFormulaLinksOnTables(
if (!columns || columns.length === 0) { if (!columns || columns.length === 0) {
continue continue
} }
const relatedTable = tables.find( const relatedTable = tables.find(
related => related._id === relatedCol.tableId related => related._id === relatedCol.tableId
) )

View file

@ -15,11 +15,16 @@ import { handleRequest } from "../row/external"
import { context, events } from "@budibase/backend-core" import { context, events } from "@budibase/backend-core"
import { isRows, isSchema, parse } from "../../../utilities/schema" import { isRows, isSchema, parse } from "../../../utilities/schema"
import { import {
AutoReason, BulkImportRequest,
BulkImportResponse,
Datasource, Datasource,
FieldSchema, FieldSchema,
ManyToManyRelationshipFieldMetadata,
ManyToOneRelationshipFieldMetadata,
OneToManyRelationshipFieldMetadata,
Operation, Operation,
QueryJson, QueryJson,
RelationshipFieldMetadata,
RelationshipType, RelationshipType,
RenameColumn, RenameColumn,
SaveTableRequest, SaveTableRequest,
@ -74,10 +79,13 @@ function cleanupRelationships(
schema.type === FieldTypes.LINK && schema.type === FieldTypes.LINK &&
(!oldTable || table.schema[key] == null) (!oldTable || table.schema[key] == null)
) { ) {
const schemaTableId = schema.tableId
const relatedTable = Object.values(tables).find( const relatedTable = Object.values(tables).find(
table => table._id === schema.tableId table => table._id === schemaTableId
) )
const foreignKey = schema.foreignKey const foreignKey =
schema.relationshipType !== RelationshipType.MANY_TO_MANY &&
schema.foreignKey
if (!relatedTable || !foreignKey) { if (!relatedTable || !foreignKey) {
continue continue
} }
@ -116,7 +124,7 @@ function otherRelationshipType(type?: string) {
function generateManyLinkSchema( function generateManyLinkSchema(
datasource: Datasource, datasource: Datasource,
column: FieldSchema, column: ManyToManyRelationshipFieldMetadata,
table: Table, table: Table,
relatedTable: Table relatedTable: Table
): Table { ): Table {
@ -151,10 +159,12 @@ function generateManyLinkSchema(
} }
function generateLinkSchema( function generateLinkSchema(
column: FieldSchema, column:
| OneToManyRelationshipFieldMetadata
| ManyToOneRelationshipFieldMetadata,
table: Table, table: Table,
relatedTable: Table, relatedTable: Table,
type: RelationshipType type: RelationshipType.ONE_TO_MANY | RelationshipType.MANY_TO_ONE
) { ) {
if (!table.primary || !relatedTable.primary) { if (!table.primary || !relatedTable.primary) {
throw new Error("Unable to generate link schema, no primary keys") throw new Error("Unable to generate link schema, no primary keys")
@ -170,20 +180,22 @@ function generateLinkSchema(
} }
function generateRelatedSchema( function generateRelatedSchema(
linkColumn: FieldSchema, linkColumn: RelationshipFieldMetadata,
table: Table, table: Table,
relatedTable: Table, relatedTable: Table,
columnName: string columnName: string
) { ) {
// generate column for other table // generate column for other table
const relatedSchema = cloneDeep(linkColumn) const relatedSchema = cloneDeep(linkColumn)
const isMany2Many =
linkColumn.relationshipType === RelationshipType.MANY_TO_MANY
// swap them from the main link // swap them from the main link
if (linkColumn.foreignKey) { if (!isMany2Many && linkColumn.foreignKey) {
relatedSchema.fieldName = linkColumn.foreignKey relatedSchema.fieldName = linkColumn.foreignKey
relatedSchema.foreignKey = linkColumn.fieldName relatedSchema.foreignKey = linkColumn.fieldName
} }
// is many to many // is many to many
else { else if (isMany2Many) {
// don't need to copy through, already got it // don't need to copy through, already got it
relatedSchema.fieldName = linkColumn.throughTo relatedSchema.fieldName = linkColumn.throughTo
relatedSchema.throughTo = linkColumn.throughFrom relatedSchema.throughTo = linkColumn.throughFrom
@ -197,8 +209,8 @@ function generateRelatedSchema(
table.schema[columnName] = relatedSchema table.schema[columnName] = relatedSchema
} }
function isRelationshipSetup(column: FieldSchema) { function isRelationshipSetup(column: RelationshipFieldMetadata) {
return column.foreignKey || column.through return (column as any).foreignKey || (column as any).through
} }
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) { export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
@ -257,14 +269,15 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
if (schema.type !== FieldTypes.LINK || isRelationshipSetup(schema)) { if (schema.type !== FieldTypes.LINK || isRelationshipSetup(schema)) {
continue continue
} }
const schemaTableId = schema.tableId
const relatedTable = Object.values(tables).find( const relatedTable = Object.values(tables).find(
table => table._id === schema.tableId table => table._id === schemaTableId
) )
if (!relatedTable) { if (!relatedTable) {
continue continue
} }
const relatedColumnName = schema.fieldName! const relatedColumnName = schema.fieldName!
const relationType = schema.relationshipType! const relationType = schema.relationshipType
if (relationType === RelationshipType.MANY_TO_MANY) { if (relationType === RelationshipType.MANY_TO_MANY) {
const junctionTable = generateManyLinkSchema( const junctionTable = generateManyLinkSchema(
datasource, datasource,
@ -374,10 +387,12 @@ export async function destroy(ctx: UserCtx) {
return tableToDelete return tableToDelete
} }
export async function bulkImport(ctx: UserCtx) { export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) {
const table = await sdk.tables.getTable(ctx.params.tableId) const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows }: { rows: unknown } = ctx.request.body const { rows } = ctx.request.body
const schema: unknown = table.schema const schema = table.schema
if (!rows || !isRows(rows) || !isSchema(schema)) { if (!rows || !isRows(rows) || !isSchema(schema)) {
ctx.throw(400, "Provided data import information is invalid.") ctx.throw(400, "Provided data import information is invalid.")

View file

@ -8,6 +8,8 @@ import {
import { isExternalTable, isSQL } from "../../../integrations/utils" import { isExternalTable, isSQL } from "../../../integrations/utils"
import { events } from "@budibase/backend-core" import { events } from "@budibase/backend-core"
import { import {
BulkImportRequest,
BulkImportResponse,
FetchTablesResponse, FetchTablesResponse,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
@ -18,7 +20,7 @@ import {
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv" import { jsonFromCsvString } from "../../../utilities/csv"
import { builderSocket } from "../../../websockets" import { builderSocket } from "../../../websockets"
import { cloneDeep } from "lodash" import { cloneDeep, isEqual } from "lodash"
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) { function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
if (table && !tableId) { if (table && !tableId) {
@ -97,9 +99,17 @@ export async function destroy(ctx: UserCtx) {
builderSocket?.emitTableDeletion(ctx, deletedTable) builderSocket?.emitTableDeletion(ctx, deletedTable)
} }
export async function bulkImport(ctx: UserCtx) { export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) {
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
await pickApi({ tableId }).bulkImport(ctx) let tableBefore = await sdk.tables.getTable(tableId)
let tableAfter = await pickApi({ tableId }).bulkImport(ctx)
if (!isEqual(tableBefore, tableAfter)) {
await sdk.tables.saveTable(tableAfter)
}
// right now we don't trigger anything for bulk import because it // right now we don't trigger anything for bulk import because it
// can only be done in the builder, but in the future we may need to // can only be done in the builder, but in the future we may need to
// think about events for bulk items // think about events for bulk items

View file

@ -10,6 +10,8 @@ import {
} from "../../../utilities/rowProcessor" } from "../../../utilities/rowProcessor"
import { runStaticFormulaChecks } from "./bulkFormula" import { runStaticFormulaChecks } from "./bulkFormula"
import { import {
BulkImportRequest,
BulkImportResponse,
RenameColumn, RenameColumn,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
@ -78,10 +80,10 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
// make sure that types don't change of a column, have to remove // make sure that types don't change of a column, have to remove
// the column if you want to change the type // the column if you want to change the type
if (oldTable && oldTable.schema) { if (oldTable && oldTable.schema) {
for (let propKey of Object.keys(tableToSave.schema)) { for (const propKey of Object.keys(tableToSave.schema)) {
let oldColumn = oldTable.schema[propKey] let oldColumn = oldTable.schema[propKey]
if (oldColumn && oldColumn.type === FieldTypes.INTERNAL) { if (oldColumn && oldColumn.type === FieldTypes.INTERNAL) {
oldColumn.type = FieldTypes.AUTO oldTable.schema[propKey].type = FieldTypes.AUTO
} }
} }
} }
@ -206,7 +208,9 @@ export async function destroy(ctx: any) {
return tableToDelete return tableToDelete
} }
export async function bulkImport(ctx: any) { export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) {
const table = await sdk.tables.getTable(ctx.params.tableId) const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows, identifierFields } = ctx.request.body const { rows, identifierFields } = ctx.request.body
await handleDataImport(ctx.user, table, rows, identifierFields) await handleDataImport(ctx.user, table, rows, identifierFields)

View file

@ -20,7 +20,13 @@ import viewTemplate from "../view/viewBuilder"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { events, context } from "@budibase/backend-core" import { events, context } from "@budibase/backend-core"
import { ContextUser, Datasource, SourceName, Table } from "@budibase/types" import {
ContextUser,
Datasource,
Row,
SourceName,
Table,
} from "@budibase/types"
export async function clearColumns(table: any, columnNames: any) { export async function clearColumns(table: any, columnNames: any) {
const db = context.getAppDB() const db = context.getAppDB()
@ -144,12 +150,12 @@ export async function importToRows(
} }
export async function handleDataImport( export async function handleDataImport(
user: any, user: ContextUser,
table: any, table: Table,
rows: any, rows: Row[],
identifierFields: Array<string> = [] identifierFields: Array<string> = []
) { ) {
const schema: unknown = table.schema const schema = table.schema
if (!rows || !isRows(rows) || !isSchema(schema)) { if (!rows || !isRows(rows) || !isSchema(schema)) {
return table return table

View file

@ -43,3 +43,7 @@ export enum Format {
export function isFormat(format: any): format is Format { export function isFormat(format: any): format is Format {
return Object.values(Format).includes(format as Format) return Object.values(Format).includes(format as Format)
} }
export function parseCsvExport<T>(value: string) {
return JSON.parse(value?.replace(/'/g, '"')) as T
}

View file

@ -6,6 +6,8 @@ import * as setup from "./utilities"
import { context, InternalTable, roles, tenancy } from "@budibase/backend-core" import { context, InternalTable, roles, tenancy } from "@budibase/backend-core"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { import {
AutoFieldSubTypes,
FieldSchema,
FieldType, FieldType,
FieldTypeSubtypes, FieldTypeSubtypes,
MonthlyQuotaName, MonthlyQuotaName,
@ -171,7 +173,7 @@ describe.each([
"Row ID": { "Row ID": {
name: "Row ID", name: "Row ID",
type: FieldType.NUMBER, type: FieldType.NUMBER,
subtype: "autoID", subtype: AutoFieldSubTypes.AUTO_ID,
icon: "ri-magic-line", icon: "ri-magic-line",
autocolumn: true, autocolumn: true,
constraints: { constraints: {
@ -272,27 +274,27 @@ describe.each([
isInternal && isInternal &&
it("row values are coerced", async () => { it("row values are coerced", async () => {
const str = { const str: FieldSchema = {
type: FieldType.STRING, type: FieldType.STRING,
name: "str", name: "str",
constraints: { type: "string", presence: false }, constraints: { type: "string", presence: false },
} }
const attachment = { const attachment: FieldSchema = {
type: FieldType.ATTACHMENT, type: FieldType.ATTACHMENT,
name: "attachment", name: "attachment",
constraints: { type: "array", presence: false }, constraints: { type: "array", presence: false },
} }
const bool = { const bool: FieldSchema = {
type: FieldType.BOOLEAN, type: FieldType.BOOLEAN,
name: "boolean", name: "boolean",
constraints: { type: "boolean", presence: false }, constraints: { type: "boolean", presence: false },
} }
const number = { const number: FieldSchema = {
type: FieldType.NUMBER, type: FieldType.NUMBER,
name: "str", name: "str",
constraints: { type: "number", presence: false }, constraints: { type: "number", presence: false },
} }
const datetime = { const datetime: FieldSchema = {
type: FieldType.DATETIME, type: FieldType.DATETIME,
name: "datetime", name: "datetime",
constraints: { constraints: {
@ -301,7 +303,7 @@ describe.each([
datetime: { earliest: "", latest: "" }, datetime: { earliest: "", latest: "" },
}, },
} }
const arrayField = { const arrayField: FieldSchema = {
type: FieldType.ARRAY, type: FieldType.ARRAY,
constraints: { constraints: {
type: "array", type: "array",
@ -311,8 +313,7 @@ describe.each([
name: "Sample Tags", name: "Sample Tags",
sortable: false, sortable: false,
} }
const optsField = { const optsField: FieldSchema = {
fieldName: "Sample Opts",
name: "Sample Opts", name: "Sample Opts",
type: FieldType.OPTIONS, type: FieldType.OPTIONS,
constraints: { constraints: {
@ -1534,7 +1535,7 @@ describe.each([
describe.each([ describe.each([
[ [
"relationship fields", "relationship fields",
() => ({ (): Record<string, FieldSchema> => ({
user: { user: {
name: "user", name: "user",
relationshipType: RelationshipType.ONE_TO_MANY, relationshipType: RelationshipType.ONE_TO_MANY,
@ -1563,27 +1564,25 @@ describe.each([
], ],
[ [
"bb reference fields", "bb reference fields",
() => ({ (): Record<string, FieldSchema> => ({
user: { user: {
name: "user", name: "user",
relationshipType: RelationshipType.ONE_TO_MANY,
type: FieldType.BB_REFERENCE, type: FieldType.BB_REFERENCE,
subtype: FieldTypeSubtypes.BB_REFERENCE.USER, subtype: FieldTypeSubtypes.BB_REFERENCE.USER,
}, },
users: { users: {
name: "users", name: "users",
type: FieldType.BB_REFERENCE, type: FieldType.BB_REFERENCE,
subtype: FieldTypeSubtypes.BB_REFERENCE.USER, subtype: FieldTypeSubtypes.BB_REFERENCE.USERS,
relationshipType: RelationshipType.MANY_TO_MANY,
}, },
}), }),
() => config.createUser(), () => config.createUser(),
(row: Row) => ({ (row: Row) => ({
_id: row._id, _id: row._id,
primaryDisplay: row.email,
email: row.email, email: row.email,
firstName: row.firstName, firstName: row.firstName,
lastName: row.lastName, lastName: row.lastName,
primaryDisplay: row.email,
}), }),
], ],
])("links - %s", (__, relSchema, dataGenerator, resultMapper) => { ])("links - %s", (__, relSchema, dataGenerator, resultMapper) => {

View file

@ -1,6 +1,12 @@
import { generator } from "@budibase/backend-core/tests"
import { events, context } from "@budibase/backend-core" import { events, context } from "@budibase/backend-core"
import { FieldType, Table, ViewCalculation } from "@budibase/types" import {
FieldType,
SaveTableRequest,
RelationshipType,
Table,
ViewCalculation,
AutoFieldSubTypes,
} from "@budibase/types"
import { checkBuilderEndpoint } from "./utilities/TestFunctions" import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities" import * as setup from "./utilities"
const { basicTable } = setup.structures const { basicTable } = setup.structures
@ -47,7 +53,7 @@ describe("/tables", () => {
}) })
it("creates a table via data import", async () => { it("creates a table via data import", async () => {
const table = basicTable() const table: SaveTableRequest = basicTable()
table.rows = [{ name: "test-name", description: "test-desc" }] table.rows = [{ name: "test-name", description: "test-desc" }]
const res = await createTable(table) const res = await createTable(table)
@ -182,6 +188,36 @@ describe("/tables", () => {
1 1
) )
}) })
it("should update Auto ID field after bulk import", async () => {
const table = await config.createTable({
name: "TestTable",
type: "table",
schema: {
autoId: {
name: "id",
type: FieldType.NUMBER,
subtype: AutoFieldSubTypes.AUTO_ID,
autocolumn: true,
constraints: {
type: "number",
presence: false,
},
},
},
})
let row = await config.api.row.save(table._id!, {})
expect(row.autoId).toEqual(1)
await config.api.row.bulkImport(table._id!, {
rows: [{ autoId: 2 }],
identifierFields: [],
})
row = await config.api.row.save(table._id!, {})
expect(row.autoId).toEqual(3)
})
}) })
describe("fetch", () => { describe("fetch", () => {
@ -352,9 +388,10 @@ describe("/tables", () => {
}, },
TestTable: { TestTable: {
type: FieldType.LINK, type: FieldType.LINK,
relationshipType: RelationshipType.ONE_TO_MANY,
name: "TestTable", name: "TestTable",
fieldName: "TestTable", fieldName: "TestTable",
tableId: testTable._id, tableId: testTable._id!,
constraints: { constraints: {
type: "array", type: "array",
}, },

View file

@ -1,6 +1,11 @@
import { objectStore, roles, constants } from "@budibase/backend-core" import { objectStore, roles, constants } from "@budibase/backend-core"
import { FieldType as FieldTypes } from "@budibase/types" import { FieldType as FieldTypes } from "@budibase/types"
export { FieldType as FieldTypes, RelationshipType } from "@budibase/types" export {
FieldType as FieldTypes,
RelationshipType,
AutoFieldSubTypes,
FormulaTypes,
} from "@budibase/types"
export enum FilterTypes { export enum FilterTypes {
STRING = "string", STRING = "string",
@ -39,11 +44,6 @@ export const SwitchableTypes = CanSwitchTypes.reduce((prev, current) =>
prev ? prev.concat(current) : current prev ? prev.concat(current) : current
) )
export enum FormulaTypes {
STATIC = "static",
DYNAMIC = "dynamic",
}
export enum AuthTypes { export enum AuthTypes {
APP = "app", APP = "app",
BUILDER = "builder", BUILDER = "builder",
@ -132,14 +132,6 @@ export const USERS_TABLE_SCHEMA = {
primaryDisplay: "email", primaryDisplay: "email",
} }
export enum AutoFieldSubTypes {
CREATED_BY = "createdBy",
CREATED_AT = "createdAt",
UPDATED_BY = "updatedBy",
UPDATED_AT = "updatedAt",
AUTO_ID = "autoID",
}
export enum AutoFieldDefaultNames { export enum AutoFieldDefaultNames {
CREATED_BY = "Created By", CREATED_BY = "Created By",
CREATED_AT = "Created At", CREATED_AT = "Created At",

View file

@ -7,7 +7,13 @@ import { employeeImport } from "./employeeImport"
import { jobsImport } from "./jobsImport" import { jobsImport } from "./jobsImport"
import { expensesImport } from "./expensesImport" import { expensesImport } from "./expensesImport"
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore } from "@budibase/backend-core"
import { Table, Row, RelationshipType } from "@budibase/types" import {
Table,
Row,
RelationshipType,
FieldType,
TableSchema,
} from "@budibase/types"
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs" export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory" export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
@ -28,7 +34,11 @@ export const DEFAULT_BB_DATASOURCE = defaultDatasource
function syncLastIds(table: Table, rowCount: number) { function syncLastIds(table: Table, rowCount: number) {
Object.keys(table.schema).forEach(key => { Object.keys(table.schema).forEach(key => {
const entry = table.schema[key] const entry = table.schema[key]
if (entry.autocolumn && entry.subtype == "autoID") { if (
entry.autocolumn &&
entry.type === FieldType.NUMBER &&
entry.subtype == AutoFieldSubTypes.AUTO_ID
) {
entry.lastID = rowCount entry.lastID = rowCount
} }
}) })
@ -42,7 +52,7 @@ async function tableImport(table: Table, data: Row[]) {
} }
// AUTO COLUMNS // AUTO COLUMNS
const AUTO_COLUMNS = { const AUTO_COLUMNS: TableSchema = {
"Created At": { "Created At": {
name: "Created At", name: "Created At",
type: FieldTypes.DATETIME, type: FieldTypes.DATETIME,

View file

@ -7,7 +7,9 @@ import LinkDocument from "./LinkDocument"
import { import {
Database, Database,
FieldSchema, FieldSchema,
FieldType,
LinkDocumentValue, LinkDocumentValue,
RelationshipFieldMetadata,
RelationshipType, RelationshipType,
Row, Row,
Table, Table,
@ -133,7 +135,10 @@ class LinkController {
* Given the link field of this table, and the link field of the linked table, this makes sure * Given the link field of this table, and the link field of the linked table, this makes sure
* the state of relationship type is accurate on both. * the state of relationship type is accurate on both.
*/ */
handleRelationshipType(linkerField: FieldSchema, linkedField: FieldSchema) { handleRelationshipType(
linkerField: RelationshipFieldMetadata,
linkedField: RelationshipFieldMetadata
) {
if ( if (
!linkerField.relationshipType || !linkerField.relationshipType ||
linkerField.relationshipType === RelationshipType.MANY_TO_MANY linkerField.relationshipType === RelationshipType.MANY_TO_MANY
@ -183,7 +188,7 @@ class LinkController {
// if 1:N, ensure that this ID is not already attached to another record // if 1:N, ensure that this ID is not already attached to another record
const linkedTable = await this._db.get<Table>(field.tableId) const linkedTable = await this._db.get<Table>(field.tableId)
const linkedSchema = linkedTable.schema[field.fieldName!] const linkedSchema = linkedTable.schema[field.fieldName]
// We need to map the global users to metadata in each app for relationships // We need to map the global users to metadata in each app for relationships
if (field.tableId === InternalTables.USER_METADATA) { if (field.tableId === InternalTables.USER_METADATA) {
@ -200,7 +205,10 @@ class LinkController {
// iterate through the link IDs in the row field, see if any don't exist already // iterate through the link IDs in the row field, see if any don't exist already
for (let linkId of rowField) { for (let linkId of rowField) {
if (linkedSchema?.relationshipType === RelationshipType.ONE_TO_MANY) { if (
linkedSchema?.type === FieldType.LINK &&
linkedSchema?.relationshipType === RelationshipType.ONE_TO_MANY
) {
let links = ( let links = (
(await getLinkDocuments({ (await getLinkDocuments({
tableId: field.tableId, tableId: field.tableId,
@ -291,7 +299,7 @@ class LinkController {
*/ */
async removeFieldFromTable(fieldName: string) { async removeFieldFromTable(fieldName: string) {
let oldTable = this._oldTable let oldTable = this._oldTable
let field = oldTable?.schema[fieldName] as FieldSchema let field = oldTable?.schema[fieldName] as RelationshipFieldMetadata
const linkDocs = await this.getTableLinkDocs() const linkDocs = await this.getTableLinkDocs()
let toDelete = linkDocs.filter(linkDoc => { let toDelete = linkDocs.filter(linkDoc => {
let correctFieldName = let correctFieldName =
@ -351,9 +359,9 @@ class LinkController {
name: field.fieldName, name: field.fieldName,
type: FieldTypes.LINK, type: FieldTypes.LINK,
// these are the props of the table that initiated the link // these are the props of the table that initiated the link
tableId: table._id, tableId: table._id!,
fieldName: fieldName, fieldName: fieldName,
}) } as RelationshipFieldMetadata)
// update table schema after checking relationship types // update table schema after checking relationship types
schema[fieldName] = fields.linkerField schema[fieldName] = fields.linkerField

View file

@ -1,13 +1,9 @@
import { ViewName, getQueryIndex } from "../utils" import { ViewName, getQueryIndex, isRelationshipColumn } from "../utils"
import { FieldTypes } from "../../constants" import { FieldTypes } from "../../constants"
import { createLinkView } from "../views/staticViews" import { createLinkView } from "../views/staticViews"
import { context, logging } from "@budibase/backend-core" import { context, logging } from "@budibase/backend-core"
import { import { LinkDocument, LinkDocumentValue, Table } from "@budibase/types"
FieldSchema,
LinkDocument,
LinkDocumentValue,
Table,
} from "@budibase/types"
export { createLinkView } from "../views/staticViews" export { createLinkView } from "../views/staticViews"
/** /**
@ -93,7 +89,7 @@ export function getUniqueByProp(array: any[], prop: string) {
export function getLinkedTableIDs(table: Table) { export function getLinkedTableIDs(table: Table) {
return Object.values(table.schema) return Object.values(table.schema)
.filter((column: FieldSchema) => column.type === FieldTypes.LINK) .filter(isRelationshipColumn)
.map(column => column.tableId) .map(column => column.tableId)
} }
@ -113,7 +109,7 @@ export async function getLinkedTable(id: string, tables: Table[]) {
export function getRelatedTableForField(table: Table, fieldName: string) { export function getRelatedTableForField(table: Table, fieldName: string) {
// look to see if its on the table, straight in the schema // look to see if its on the table, straight in the schema
const field = table.schema[fieldName] const field = table.schema[fieldName]
if (field != null) { if (field?.type === FieldTypes.LINK) {
return field.tableId return field.tableId
} }
for (let column of Object.values(table.schema)) { for (let column of Object.values(table.schema)) {

View file

@ -1,6 +1,12 @@
import newid from "./newid" import newid from "./newid"
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore } from "@budibase/backend-core"
import { DocumentType, VirtualDocumentType } from "@budibase/types" import {
DocumentType,
FieldSchema,
RelationshipFieldMetadata,
VirtualDocumentType,
} from "@budibase/types"
import { FieldTypes } from "../constants"
export { DocumentType, VirtualDocumentType } from "@budibase/types" export { DocumentType, VirtualDocumentType } from "@budibase/types"
type Optional = string | null type Optional = string | null
@ -307,3 +313,9 @@ export function extractViewInfoFromID(viewId: string) {
tableId: res!.groups!["tableId"], tableId: res!.groups!["tableId"],
} }
} }
export function isRelationshipColumn(
column: FieldSchema
): column is RelationshipFieldMetadata {
return column.type === FieldTypes.LINK
}

View file

@ -279,7 +279,8 @@ export interface components {
| "json" | "json"
| "internal" | "internal"
| "barcodeqr" | "barcodeqr"
| "bigint"; | "bigint"
| "bb_reference";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */ /** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: { constraints?: {
/** @enum {string} */ /** @enum {string} */
@ -386,7 +387,8 @@ export interface components {
| "json" | "json"
| "internal" | "internal"
| "barcodeqr" | "barcodeqr"
| "bigint"; | "bigint"
| "bb_reference";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */ /** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: { constraints?: {
/** @enum {string} */ /** @enum {string} */
@ -495,7 +497,8 @@ export interface components {
| "json" | "json"
| "internal" | "internal"
| "barcodeqr" | "barcodeqr"
| "bigint"; | "bigint"
| "bb_reference";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */ /** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: { constraints?: {
/** @enum {string} */ /** @enum {string} */

View file

@ -111,7 +111,7 @@ describe("postgres integrations", () => {
fieldName: oneToManyRelationshipInfo.fieldName, fieldName: oneToManyRelationshipInfo.fieldName,
name: "oneToManyRelation", name: "oneToManyRelation",
relationshipType: RelationshipType.ONE_TO_MANY, relationshipType: RelationshipType.ONE_TO_MANY,
tableId: oneToManyRelationshipInfo.table._id, tableId: oneToManyRelationshipInfo.table._id!,
main: true, main: true,
}, },
manyToOneRelation: { manyToOneRelation: {
@ -122,7 +122,7 @@ describe("postgres integrations", () => {
fieldName: manyToOneRelationshipInfo.fieldName, fieldName: manyToOneRelationshipInfo.fieldName,
name: "manyToOneRelation", name: "manyToOneRelation",
relationshipType: RelationshipType.MANY_TO_ONE, relationshipType: RelationshipType.MANY_TO_ONE,
tableId: manyToOneRelationshipInfo.table._id, tableId: manyToOneRelationshipInfo.table._id!,
main: true, main: true,
}, },
manyToManyRelation: { manyToManyRelation: {
@ -133,7 +133,7 @@ describe("postgres integrations", () => {
fieldName: manyToManyRelationshipInfo.fieldName, fieldName: manyToManyRelationshipInfo.fieldName,
name: "manyToManyRelation", name: "manyToManyRelation",
relationshipType: RelationshipType.MANY_TO_MANY, relationshipType: RelationshipType.MANY_TO_MANY,
tableId: manyToManyRelationshipInfo.table._id, tableId: manyToManyRelationshipInfo.table._id!,
main: true, main: true,
}, },
}, },
@ -250,6 +250,7 @@ describe("postgres integrations", () => {
id: { id: {
name: "id", name: "id",
type: FieldType.AUTO, type: FieldType.AUTO,
autocolumn: true,
}, },
}, },
sourceId: postgresDatasource._id, sourceId: postgresDatasource._id,

View file

@ -1,9 +1,17 @@
import { Knex, knex } from "knex" import { Knex, knex } from "knex"
import { Operation, QueryJson, RenameColumn, Table } from "@budibase/types" import {
FieldSubtype,
NumberFieldMetadata,
Operation,
QueryJson,
RenameColumn,
Table,
} from "@budibase/types"
import { breakExternalTableId } from "../utils" import { breakExternalTableId } from "../utils"
import SchemaBuilder = Knex.SchemaBuilder import SchemaBuilder = Knex.SchemaBuilder
import CreateTableBuilder = Knex.CreateTableBuilder import CreateTableBuilder = Knex.CreateTableBuilder
import { FieldTypes, RelationshipType } from "../../constants" import { FieldTypes, RelationshipType } from "../../constants"
import { utils } from "@budibase/shared-core"
function generateSchema( function generateSchema(
schema: CreateTableBuilder, schema: CreateTableBuilder,
@ -15,7 +23,7 @@ function generateSchema(
let primaryKey = table && table.primary ? table.primary[0] : null let primaryKey = table && table.primary ? table.primary[0] : null
const columns = Object.values(table.schema) const columns = Object.values(table.schema)
// all columns in a junction table will be meta // all columns in a junction table will be meta
let metaCols = columns.filter(col => col.meta) let metaCols = columns.filter(col => (col as NumberFieldMetadata).meta)
let isJunction = metaCols.length === columns.length let isJunction = metaCols.length === columns.length
// can't change primary once its set for now // can't change primary once its set for now
if (primaryKey && !oldTable && !isJunction) { if (primaryKey && !oldTable && !isJunction) {
@ -25,7 +33,9 @@ function generateSchema(
} }
// check if any columns need added // check if any columns need added
const foreignKeys = Object.values(table.schema).map(col => col.foreignKey) const foreignKeys = Object.values(table.schema).map(
col => (col as any).foreignKey
)
for (let [key, column] of Object.entries(table.schema)) { for (let [key, column] of Object.entries(table.schema)) {
// skip things that are already correct // skip things that are already correct
const oldColumn = oldTable ? oldTable.schema[key] : null const oldColumn = oldTable ? oldTable.schema[key] : null
@ -41,9 +51,21 @@ function generateSchema(
case FieldTypes.OPTIONS: case FieldTypes.OPTIONS:
case FieldTypes.LONGFORM: case FieldTypes.LONGFORM:
case FieldTypes.BARCODEQR: case FieldTypes.BARCODEQR:
case FieldTypes.BB_REFERENCE:
schema.text(key) schema.text(key)
break break
case FieldTypes.BB_REFERENCE:
const subtype = column.subtype as FieldSubtype
switch (subtype) {
case FieldSubtype.USER:
schema.text(key)
break
case FieldSubtype.USERS:
schema.json(key)
break
default:
throw utils.unreachable(subtype)
}
break
case FieldTypes.NUMBER: case FieldTypes.NUMBER:
// if meta is specified then this is a junction table entry // if meta is specified then this is a junction table entry
if (column.meta && column.meta.toKey && column.meta.toTable) { if (column.meta && column.meta.toKey && column.meta.toTable) {

View file

@ -249,7 +249,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
) )
} }
private internalConvertType(column: OracleColumn): { type: FieldTypes } { private internalConvertType(column: OracleColumn) {
if (this.isBooleanType(column)) { if (this.isBooleanType(column)) {
return { type: FieldTypes.BOOLEAN } return { type: FieldTypes.BOOLEAN }
} }
@ -307,6 +307,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
}, },
...this.internalConvertType(oracleColumn), ...this.internalConvertType(oracleColumn),
} }
table.schema[columnName] = fieldSchema table.schema[columnName] = fieldSchema
} }

View file

@ -1,7 +1,12 @@
import { SqlQuery, Table, SearchFilters, Datasource } from "@budibase/types" import {
SqlQuery,
Table,
SearchFilters,
Datasource,
FieldType,
} from "@budibase/types"
import { DocumentType, SEPARATOR } from "../db/utils" import { DocumentType, SEPARATOR } from "../db/utils"
import { import {
FieldTypes,
BuildSchemaErrors, BuildSchemaErrors,
InvalidColumns, InvalidColumns,
NoEmptyFilterStrings, NoEmptyFilterStrings,
@ -13,57 +18,57 @@ const ROW_ID_REGEX = /^\[.*]$/g
const ENCODED_SPACE = encodeURIComponent(" ") const ENCODED_SPACE = encodeURIComponent(" ")
const SQL_NUMBER_TYPE_MAP = { const SQL_NUMBER_TYPE_MAP = {
integer: FieldTypes.NUMBER, integer: FieldType.NUMBER,
int: FieldTypes.NUMBER, int: FieldType.NUMBER,
decimal: FieldTypes.NUMBER, decimal: FieldType.NUMBER,
smallint: FieldTypes.NUMBER, smallint: FieldType.NUMBER,
real: FieldTypes.NUMBER, real: FieldType.NUMBER,
float: FieldTypes.NUMBER, float: FieldType.NUMBER,
numeric: FieldTypes.NUMBER, numeric: FieldType.NUMBER,
mediumint: FieldTypes.NUMBER, mediumint: FieldType.NUMBER,
dec: FieldTypes.NUMBER, dec: FieldType.NUMBER,
double: FieldTypes.NUMBER, double: FieldType.NUMBER,
fixed: FieldTypes.NUMBER, fixed: FieldType.NUMBER,
"double precision": FieldTypes.NUMBER, "double precision": FieldType.NUMBER,
number: FieldTypes.NUMBER, number: FieldType.NUMBER,
binary_float: FieldTypes.NUMBER, binary_float: FieldType.NUMBER,
binary_double: FieldTypes.NUMBER, binary_double: FieldType.NUMBER,
money: FieldTypes.NUMBER, money: FieldType.NUMBER,
smallmoney: FieldTypes.NUMBER, smallmoney: FieldType.NUMBER,
} }
const SQL_DATE_TYPE_MAP = { const SQL_DATE_TYPE_MAP = {
timestamp: FieldTypes.DATETIME, timestamp: FieldType.DATETIME,
time: FieldTypes.DATETIME, time: FieldType.DATETIME,
datetime: FieldTypes.DATETIME, datetime: FieldType.DATETIME,
smalldatetime: FieldTypes.DATETIME, smalldatetime: FieldType.DATETIME,
date: FieldTypes.DATETIME, date: FieldType.DATETIME,
} }
const SQL_DATE_ONLY_TYPES = ["date"] const SQL_DATE_ONLY_TYPES = ["date"]
const SQL_TIME_ONLY_TYPES = ["time"] const SQL_TIME_ONLY_TYPES = ["time"]
const SQL_STRING_TYPE_MAP = { const SQL_STRING_TYPE_MAP = {
varchar: FieldTypes.STRING, varchar: FieldType.STRING,
char: FieldTypes.STRING, char: FieldType.STRING,
nchar: FieldTypes.STRING, nchar: FieldType.STRING,
nvarchar: FieldTypes.STRING, nvarchar: FieldType.STRING,
ntext: FieldTypes.STRING, ntext: FieldType.STRING,
enum: FieldTypes.STRING, enum: FieldType.STRING,
blob: FieldTypes.STRING, blob: FieldType.STRING,
long: FieldTypes.STRING, long: FieldType.STRING,
text: FieldTypes.STRING, text: FieldType.STRING,
} }
const SQL_BOOLEAN_TYPE_MAP = { const SQL_BOOLEAN_TYPE_MAP = {
boolean: FieldTypes.BOOLEAN, boolean: FieldType.BOOLEAN,
bit: FieldTypes.BOOLEAN, bit: FieldType.BOOLEAN,
tinyint: FieldTypes.BOOLEAN, tinyint: FieldType.BOOLEAN,
} }
const SQL_MISC_TYPE_MAP = { const SQL_MISC_TYPE_MAP = {
json: FieldTypes.JSON, json: FieldType.JSON,
bigint: FieldTypes.BIGINT, bigint: FieldType.BIGINT,
} }
const SQL_TYPE_MAP = { const SQL_TYPE_MAP = {
@ -154,7 +159,7 @@ export function breakRowIdField(_id: string | { _id: string }): any[] {
} }
export function convertSqlType(type: string) { export function convertSqlType(type: string) {
let foundType = FieldTypes.STRING let foundType = FieldType.STRING
const lcType = type.toLowerCase() const lcType = type.toLowerCase()
let matchingTypes = [] let matchingTypes = []
for (let [external, internal] of Object.entries(SQL_TYPE_MAP)) { for (let [external, internal] of Object.entries(SQL_TYPE_MAP)) {
@ -169,7 +174,7 @@ export function convertSqlType(type: string) {
}).internal }).internal
} }
const schema: any = { type: foundType } const schema: any = { type: foundType }
if (foundType === FieldTypes.DATETIME) { if (foundType === FieldType.DATETIME) {
schema.dateOnly = SQL_DATE_ONLY_TYPES.includes(lcType) schema.dateOnly = SQL_DATE_ONLY_TYPES.includes(lcType)
schema.timeOnly = SQL_TIME_ONLY_TYPES.includes(lcType) schema.timeOnly = SQL_TIME_ONLY_TYPES.includes(lcType)
} }
@ -212,7 +217,7 @@ export function shouldCopyRelationship(
tableIds: string[] tableIds: string[]
) { ) {
return ( return (
column.type === FieldTypes.LINK && column.type === FieldType.LINK &&
column.tableId && column.tableId &&
tableIds.includes(column.tableId) tableIds.includes(column.tableId)
) )
@ -230,22 +235,23 @@ export function shouldCopySpecialColumn(
column: { type: string }, column: { type: string },
fetchedColumn: { type: string } | undefined fetchedColumn: { type: string } | undefined
) { ) {
const isFormula = column.type === FieldTypes.FORMULA const isFormula = column.type === FieldType.FORMULA
const specialTypes = [ const specialTypes = [
FieldTypes.OPTIONS, FieldType.OPTIONS,
FieldTypes.LONGFORM, FieldType.LONGFORM,
FieldTypes.ARRAY, FieldType.ARRAY,
FieldTypes.FORMULA, FieldType.FORMULA,
FieldType.BB_REFERENCE,
] ]
// column has been deleted, remove - formulas will never exist, always copy // column has been deleted, remove - formulas will never exist, always copy
if (!isFormula && column && !fetchedColumn) { if (!isFormula && column && !fetchedColumn) {
return false return false
} }
const fetchedIsNumber = const fetchedIsNumber =
!fetchedColumn || fetchedColumn.type === FieldTypes.NUMBER !fetchedColumn || fetchedColumn.type === FieldType.NUMBER
return ( return (
specialTypes.indexOf(column.type as FieldTypes) !== -1 || specialTypes.indexOf(column.type as FieldType) !== -1 ||
(fetchedIsNumber && column.type === FieldTypes.BOOLEAN) (fetchedIsNumber && column.type === FieldType.BOOLEAN)
) )
} }

View file

@ -20,58 +20,73 @@ const tableWithUserCol: Table = {
}, },
} }
describe("searchInputMapping", () => { const tableWithUsersCol: Table = {
const globalUserId = dbCore.generateGlobalUserID() _id: tableId,
const userMedataId = dbCore.generateUserMetadataID(globalUserId) name: "table",
schema: {
user: {
name: "user",
type: FieldType.BB_REFERENCE,
subtype: FieldTypeSubtypes.BB_REFERENCE.USERS,
},
},
}
it("should be able to map ro_ to global user IDs", () => { describe.each([tableWithUserCol, tableWithUsersCol])(
const params: SearchParams = { "searchInputMapping",
tableId, col => {
query: { const globalUserId = dbCore.generateGlobalUserID()
equal: { const userMedataId = dbCore.generateUserMetadataID(globalUserId)
"1:user": userMedataId,
it("should be able to map ro_ to global user IDs", () => {
const params: SearchParams = {
tableId,
query: {
equal: {
"1:user": userMedataId,
},
}, },
}, }
} const output = searchInputMapping(col, params)
const output = searchInputMapping(tableWithUserCol, params) expect(output.query.equal!["1:user"]).toBe(globalUserId)
expect(output.query.equal!["1:user"]).toBe(globalUserId) })
})
it("should handle array of user IDs", () => { it("should handle array of user IDs", () => {
const params: SearchParams = { const params: SearchParams = {
tableId, tableId,
query: { query: {
oneOf: { oneOf: {
"1:user": [userMedataId, globalUserId], "1:user": [userMedataId, globalUserId],
},
}, },
}, }
} const output = searchInputMapping(col, params)
const output = searchInputMapping(tableWithUserCol, params) expect(output.query.oneOf!["1:user"]).toStrictEqual([
expect(output.query.oneOf!["1:user"]).toStrictEqual([ globalUserId,
globalUserId, globalUserId,
globalUserId, ])
]) })
})
it("shouldn't change any other input", () => { it("shouldn't change any other input", () => {
const email = "test@test.com" const email = "test@test.com"
const params: SearchParams = { const params: SearchParams = {
tableId, tableId,
query: { query: {
equal: { equal: {
"1:user": email, "1:user": email,
},
}, },
}, }
} const output = searchInputMapping(col, params)
const output = searchInputMapping(tableWithUserCol, params) expect(output.query.equal!["1:user"]).toBe(email)
expect(output.query.equal!["1:user"]).toBe(email) })
})
it("shouldn't error if no query supplied", () => { it("shouldn't error if no query supplied", () => {
const params: any = { const params: any = {
tableId, tableId,
} }
const output = searchInputMapping(tableWithUserCol, params) const output = searchInputMapping(col, params)
expect(output.query).toBeUndefined() expect(output.query).toBeUndefined()
}) })
}) }
)

View file

@ -5,8 +5,10 @@ import {
Table, Table,
DocumentType, DocumentType,
SEPARATOR, SEPARATOR,
FieldSubtype,
} from "@budibase/types" } from "@budibase/types"
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore } from "@budibase/backend-core"
import { utils } from "@budibase/shared-core"
function findColumnInQueries( function findColumnInQueries(
column: string, column: string,
@ -66,8 +68,14 @@ export function searchInputMapping(table: Table, options: SearchParams) {
for (let [key, column] of Object.entries(table.schema)) { for (let [key, column] of Object.entries(table.schema)) {
switch (column.type) { switch (column.type) {
case FieldType.BB_REFERENCE: case FieldType.BB_REFERENCE:
if (column.subtype === FieldTypeSubtypes.BB_REFERENCE.USER) { const subtype = column.subtype as FieldSubtype
userColumnMapping(key, options) switch (subtype) {
case FieldSubtype.USER:
case FieldSubtype.USERS:
userColumnMapping(key, options)
break
default:
utils.unreachable(subtype)
} }
break break
} }

View file

@ -1,10 +1,11 @@
import cloneDeep from "lodash/cloneDeep" import cloneDeep from "lodash/cloneDeep"
import validateJs from "validate.js" import validateJs from "validate.js"
import { FieldType, Row, Table, TableSchema } from "@budibase/types" import { Row, Table, TableSchema } from "@budibase/types"
import { FieldTypes } from "../../../constants" import { FieldTypes } from "../../../constants"
import { makeExternalQuery } from "../../../integrations/base/query" import { makeExternalQuery } from "../../../integrations/base/query"
import { Format } from "../../../api/controllers/view/exporters" import { Format } from "../../../api/controllers/view/exporters"
import sdk from "../.." import sdk from "../.."
import { isRelationshipColumn } from "../../../db/utils"
export async function getDatasourceAndQuery(json: any) { export async function getDatasourceAndQuery(json: any) {
const datasourceId = json.endpoint.datasourceId const datasourceId = json.endpoint.datasourceId
@ -50,10 +51,10 @@ export function cleanExportRows(
} }
function isForeignKey(key: string, table: Table) { function isForeignKey(key: string, table: Table) {
const relationships = Object.values(table.schema).filter( const relationships = Object.values(table.schema).filter(isRelationshipColumn)
column => column.type === FieldType.LINK return relationships.some(
relationship => (relationship as any).foreignKey === key
) )
return relationships.some(relationship => relationship.foreignKey === key)
} }
export async function validate({ export async function validate({

View file

@ -1,6 +1,6 @@
import { populateExternalTableSchemas } from "../validation" import { populateExternalTableSchemas } from "../validation"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { Datasource, Table } from "@budibase/types" import { AutoReason, Datasource, Table } from "@budibase/types"
import { isEqual } from "lodash" import { isEqual } from "lodash"
const SCHEMA = { const SCHEMA = {
@ -109,7 +109,7 @@ describe("validation and update of external table schemas", () => {
const response = populateExternalTableSchemas(cloneDeep(SCHEMA) as any) const response = populateExternalTableSchemas(cloneDeep(SCHEMA) as any)
const foreignKey = getForeignKeyColumn(response) const foreignKey = getForeignKeyColumn(response)
expect(foreignKey.autocolumn).toBe(true) expect(foreignKey.autocolumn).toBe(true)
expect(foreignKey.autoReason).toBe("foreign_key") expect(foreignKey.autoReason).toBe(AutoReason.FOREIGN_KEY)
noOtherTableChanges(response) noOtherTableChanges(response)
}) })

View file

@ -1,11 +1,9 @@
import { import {
AutoReason, AutoReason,
Datasource, Datasource,
FieldSchema,
FieldType, FieldType,
RelationshipType, RelationshipType,
} from "@budibase/types" } from "@budibase/types"
import { FieldTypes } from "../../../constants"
function checkForeignKeysAreAutoColumns(datasource: Datasource) { function checkForeignKeysAreAutoColumns(datasource: Datasource) {
if (!datasource.entities) { if (!datasource.entities) {
@ -15,10 +13,11 @@ function checkForeignKeysAreAutoColumns(datasource: Datasource) {
// make sure all foreign key columns are marked as auto columns // make sure all foreign key columns are marked as auto columns
const foreignKeys: { tableId: string; key: string }[] = [] const foreignKeys: { tableId: string; key: string }[] = []
for (let table of tables) { for (let table of tables) {
const relationships = Object.values(table.schema).filter( Object.values(table.schema).forEach(column => {
column => column.type === FieldType.LINK if (column.type !== FieldType.LINK) {
) return
relationships.forEach(relationship => { }
const relationship = column
if (relationship.relationshipType === RelationshipType.MANY_TO_MANY) { if (relationship.relationshipType === RelationshipType.MANY_TO_MANY) {
const tableId = relationship.through! const tableId = relationship.through!
foreignKeys.push({ key: relationship.throughTo!, tableId }) foreignKeys.push({ key: relationship.throughTo!, tableId })
@ -36,7 +35,7 @@ function checkForeignKeysAreAutoColumns(datasource: Datasource) {
} }
// now make sure schemas are all accurate // now make sure schemas are all accurate
for (let table of tables) { for (const table of tables) {
for (let column of Object.values(table.schema)) { for (let column of Object.values(table.schema)) {
const shouldBeForeign = foreignKeys.find( const shouldBeForeign = foreignKeys.find(
options => options.tableId === table._id && options.key === column.name options => options.tableId === table._id && options.key === column.name

View file

@ -1,5 +1,11 @@
import _ from "lodash" import _ from "lodash"
import { FieldType, Table, TableSchema, ViewV2 } from "@budibase/types" import {
FieldSchema,
FieldType,
Table,
TableSchema,
ViewV2,
} from "@budibase/types"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { enrichSchema, syncSchema } from ".." import { enrichSchema, syncSchema } from ".."
@ -316,7 +322,7 @@ describe("table sdk", () => {
...basicView, ...basicView,
} }
const newTableSchema = { const newTableSchema: TableSchema = {
...basicTable.schema, ...basicTable.schema,
newField1: { newField1: {
type: FieldType.STRING, type: FieldType.STRING,
@ -403,7 +409,7 @@ describe("table sdk", () => {
}, },
} }
const newTableSchema = { const newTableSchema: TableSchema = {
...basicTable.schema, ...basicTable.schema,
newField1: { newField1: {
type: FieldType.STRING, type: FieldType.STRING,
@ -531,7 +537,7 @@ describe("table sdk", () => {
id: { id: {
...basicTable.schema.id, ...basicTable.schema.id,
type: FieldType.NUMBER, type: FieldType.NUMBER,
}, } as FieldSchema,
}, },
undefined undefined
) )

View file

@ -54,6 +54,7 @@ import {
FieldType, FieldType,
RelationshipType, RelationshipType,
CreateViewRequest, CreateViewRequest,
RelationshipFieldMetadata,
} from "@budibase/types" } from "@budibase/types"
import API from "./api" import API from "./api"
@ -584,10 +585,10 @@ class TestConfiguration {
tableConfig.schema[link] = { tableConfig.schema[link] = {
type: FieldType.LINK, type: FieldType.LINK,
fieldName: link, fieldName: link,
tableId: this.table._id, tableId: this.table._id!,
name: link, name: link,
relationshipType, relationshipType,
} } as RelationshipFieldMetadata
} }
if (this.datasource && !tableConfig.sourceId) { if (this.datasource && !tableConfig.sourceId) {

View file

@ -4,6 +4,8 @@ import {
Row, Row,
ValidateResponse, ValidateResponse,
ExportRowsRequest, ExportRowsRequest,
BulkImportRequest,
BulkImportResponse,
} from "@budibase/types" } from "@budibase/types"
import TestConfiguration from "../TestConfiguration" import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base" import { TestAPI } from "./base"
@ -123,6 +125,19 @@ export class RowAPI extends TestAPI {
return request return request
} }
bulkImport = async (
tableId: string,
body: BulkImportRequest,
{ expectStatus } = { expectStatus: 200 }
): Promise<BulkImportResponse> => {
let request = this.request
.post(`/api/tables/${tableId}/import`)
.send(body)
.set(this.config.defaultHeaders())
.expect(expectStatus)
return (await request).body
}
search = async ( search = async (
sourceId: string, sourceId: string,
{ expectStatus } = { expectStatus: 200 } { expectStatus } = { expectStatus: 200 }

View file

@ -8,7 +8,7 @@ const ROW_PREFIX = DocumentType.ROW + SEPARATOR
export async function processInputBBReferences( export async function processInputBBReferences(
value: string | string[] | { _id: string } | { _id: string }[], value: string | string[] | { _id: string } | { _id: string }[],
subtype: FieldSubtype subtype: FieldSubtype
): Promise<string | null> { ): Promise<string | string[] | null> {
let referenceIds: string[] = [] let referenceIds: string[] = []
if (Array.isArray(value)) { if (Array.isArray(value)) {
@ -41,33 +41,39 @@ export async function processInputBBReferences(
switch (subtype) { switch (subtype) {
case FieldSubtype.USER: case FieldSubtype.USER:
case FieldSubtype.USERS:
const { notFoundIds } = await cache.user.getUsers(referenceIds) const { notFoundIds } = await cache.user.getUsers(referenceIds)
if (notFoundIds?.length) { if (notFoundIds?.length) {
throw new InvalidBBRefError(notFoundIds[0], FieldSubtype.USER) throw new InvalidBBRefError(notFoundIds[0], FieldSubtype.USER)
} }
break if (subtype === FieldSubtype.USERS) {
return referenceIds
}
return referenceIds.join(",") || null
default: default:
throw utils.unreachable(subtype) throw utils.unreachable(subtype)
} }
return referenceIds.join(",") || null
} }
export async function processOutputBBReferences( export async function processOutputBBReferences(
value: string, value: string | string[],
subtype: FieldSubtype subtype: FieldSubtype
) { ) {
if (typeof value !== "string") { if (value === null || value === undefined) {
// Already processed or nothing to process // Already processed or nothing to process
return value || undefined return value || undefined
} }
const ids = value.split(",").filter(id => !!id) const ids =
typeof value === "string" ? value.split(",").filter(id => !!id) : value
switch (subtype) { switch (subtype) {
case FieldSubtype.USER: case FieldSubtype.USER:
case FieldSubtype.USERS:
const { users } = await cache.user.getUsers(ids) const { users } = await cache.user.getUsers(ids)
if (!users.length) { if (!users.length) {
return undefined return undefined

View file

@ -5,7 +5,13 @@ import { ObjectStoreBuckets } from "../../constants"
import { context, db as dbCore, objectStore } from "@budibase/backend-core" import { context, db as dbCore, objectStore } from "@budibase/backend-core"
import { InternalTables } from "../../db/utils" import { InternalTables } from "../../db/utils"
import { TYPE_TRANSFORM_MAP } from "./map" import { TYPE_TRANSFORM_MAP } from "./map"
import { FieldSubtype, Row, RowAttachment, Table } from "@budibase/types" import {
AutoColumnFieldMetadata,
FieldSubtype,
Row,
RowAttachment,
Table,
} from "@budibase/types"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { import {
processInputBBReferences, processInputBBReferences,
@ -201,9 +207,14 @@ export async function inputProcessing(
export async function outputProcessing<T extends Row[] | Row>( export async function outputProcessing<T extends Row[] | Row>(
table: Table, table: Table,
rows: T, rows: T,
opts: { squash?: boolean; preserveLinks?: boolean } = { opts: {
squash?: boolean
preserveLinks?: boolean
skipBBReferences?: boolean
} = {
squash: true, squash: true,
preserveLinks: false, preserveLinks: false,
skipBBReferences: false,
} }
): Promise<T> { ): Promise<T> {
let safeRows: Row[] let safeRows: Row[]
@ -219,10 +230,7 @@ export async function outputProcessing<T extends Row[] | Row>(
? await linkRows.attachFullLinkedDocs(table, safeRows) ? await linkRows.attachFullLinkedDocs(table, safeRows)
: safeRows : safeRows
// process formulas // process complex types: attachements, bb references...
enriched = processFormulas(table, enriched, { dynamic: true }) as Row[]
// set the attachments URLs
for (let [property, column] of Object.entries(table.schema)) { for (let [property, column] of Object.entries(table.schema)) {
if (column.type === FieldTypes.ATTACHMENT) { if (column.type === FieldTypes.ATTACHMENT) {
for (let row of enriched) { for (let row of enriched) {
@ -233,7 +241,10 @@ export async function outputProcessing<T extends Row[] | Row>(
attachment.url = objectStore.getAppFileUrl(attachment.key) attachment.url = objectStore.getAppFileUrl(attachment.key)
}) })
} }
} else if (column.type == FieldTypes.BB_REFERENCE) { } else if (
!opts.skipBBReferences &&
column.type == FieldTypes.BB_REFERENCE
) {
for (let row of enriched) { for (let row of enriched) {
row[property] = await processOutputBBReferences( row[property] = await processOutputBBReferences(
row[property], row[property],
@ -242,6 +253,10 @@ export async function outputProcessing<T extends Row[] | Row>(
} }
} }
} }
// process formulas after the complex types had been processed
enriched = processFormulas(table, enriched, { dynamic: true }) as Row[]
if (opts.squash) { if (opts.squash) {
enriched = (await linkRows.squashLinksToPrimaryDisplay( enriched = (await linkRows.squashLinksToPrimaryDisplay(
table, table,

View file

@ -4,10 +4,10 @@ import { FieldSchema, FieldType, RelationshipType } from "@budibase/types"
describe("rowProcessor utility", () => { describe("rowProcessor utility", () => {
describe("fixAutoColumnSubType", () => { describe("fixAutoColumnSubType", () => {
let schema: FieldSchema = { const schema: FieldSchema = {
name: "", name: "",
type: FieldType.LINK, type: FieldType.LINK,
subtype: "", // missing subtype subtype: undefined, // missing subtype
icon: "ri-magic-line", icon: "ri-magic-line",
autocolumn: true, autocolumn: true,
constraints: { type: "array", presence: false }, constraints: { type: "array", presence: false },
@ -22,31 +22,31 @@ describe("rowProcessor utility", () => {
expect(fixAutoColumnSubType(schema).subtype).toEqual( expect(fixAutoColumnSubType(schema).subtype).toEqual(
AutoFieldSubTypes.CREATED_BY AutoFieldSubTypes.CREATED_BY
) )
schema.subtype = "" schema.subtype = undefined
schema.name = AutoFieldDefaultNames.UPDATED_BY schema.name = AutoFieldDefaultNames.UPDATED_BY
expect(fixAutoColumnSubType(schema).subtype).toEqual( expect(fixAutoColumnSubType(schema).subtype).toEqual(
AutoFieldSubTypes.UPDATED_BY AutoFieldSubTypes.UPDATED_BY
) )
schema.subtype = "" schema.subtype = undefined
schema.name = AutoFieldDefaultNames.CREATED_AT schema.name = AutoFieldDefaultNames.CREATED_AT
expect(fixAutoColumnSubType(schema).subtype).toEqual( expect(fixAutoColumnSubType(schema).subtype).toEqual(
AutoFieldSubTypes.CREATED_AT AutoFieldSubTypes.CREATED_AT
) )
schema.subtype = "" schema.subtype = undefined
schema.name = AutoFieldDefaultNames.UPDATED_AT schema.name = AutoFieldDefaultNames.UPDATED_AT
expect(fixAutoColumnSubType(schema).subtype).toEqual( expect(fixAutoColumnSubType(schema).subtype).toEqual(
AutoFieldSubTypes.UPDATED_AT AutoFieldSubTypes.UPDATED_AT
) )
schema.subtype = "" schema.subtype = undefined
schema.name = AutoFieldDefaultNames.AUTO_ID schema.name = AutoFieldDefaultNames.AUTO_ID
expect(fixAutoColumnSubType(schema).subtype).toEqual( expect(fixAutoColumnSubType(schema).subtype).toEqual(
AutoFieldSubTypes.AUTO_ID AutoFieldSubTypes.AUTO_ID
) )
schema.subtype = "" schema.subtype = undefined
}) })
it("returns the column if subtype exists", async () => { it("returns the column if subtype exists", async () => {

View file

@ -5,13 +5,20 @@ import {
FormulaTypes, FormulaTypes,
} from "../../constants" } from "../../constants"
import { processStringSync } from "@budibase/string-templates" import { processStringSync } from "@budibase/string-templates"
import { FieldSchema, Row, Table } from "@budibase/types" import {
AutoColumnFieldMetadata,
FieldSchema,
Row,
Table,
} from "@budibase/types"
/** /**
* If the subtype has been lost for any reason this works out what * If the subtype has been lost for any reason this works out what
* subtype the auto column should be. * subtype the auto column should be.
*/ */
export function fixAutoColumnSubType(column: FieldSchema) { export function fixAutoColumnSubType(
column: FieldSchema
): AutoColumnFieldMetadata | FieldSchema {
if (!column.autocolumn || !column.name || column.subtype) { if (!column.autocolumn || !column.name || column.subtype) {
return column return column
} }
@ -47,9 +54,13 @@ export function processFormulas(
rowArray = rows rowArray = rows
} }
for (let [column, schema] of Object.entries(table.schema)) { for (let [column, schema] of Object.entries(table.schema)) {
if (schema.type !== FieldTypes.FORMULA) {
continue
}
const isStatic = schema.formulaType === FormulaTypes.STATIC const isStatic = schema.formulaType === FormulaTypes.STATIC
if ( if (
schema.type !== FieldTypes.FORMULA ||
schema.formula == null || schema.formula == null ||
(dynamic && isStatic) || (dynamic && isStatic) ||
(!dynamic && !isStatic) (!dynamic && !isStatic)

View file

@ -1,9 +1,13 @@
import { FieldSubtype } from "@budibase/types"
import { FieldTypes } from "../constants" import { FieldTypes } from "../constants"
import { ValidColumnNameRegex } from "@budibase/shared-core" import { ValidColumnNameRegex, utils } from "@budibase/shared-core"
import { db } from "@budibase/backend-core"
import { parseCsvExport } from "../api/controllers/view/exporters"
interface SchemaColumn { interface SchemaColumn {
readonly name: string readonly name: string
readonly type: FieldTypes readonly type: FieldTypes
readonly subtype: FieldSubtype
readonly autocolumn?: boolean readonly autocolumn?: boolean
readonly constraints?: { readonly constraints?: {
presence: boolean presence: boolean
@ -77,8 +81,14 @@ export function validate(rows: Rows, schema: Schema): ValidationResults {
rows.forEach(row => { rows.forEach(row => {
Object.entries(row).forEach(([columnName, columnData]) => { Object.entries(row).forEach(([columnName, columnData]) => {
const columnType = schema[columnName]?.type const columnType = schema[columnName]?.type
const columnSubtype = schema[columnName]?.subtype
const isAutoColumn = schema[columnName]?.autocolumn const isAutoColumn = schema[columnName]?.autocolumn
// If the column had an invalid value we don't want to override it
if (results.schemaValidation[columnName] === false) {
return
}
// If the columnType is not a string, then it's not present in the schema, and should be added to the invalid columns array // If the columnType is not a string, then it's not present in the schema, and should be added to the invalid columns array
if (typeof columnType !== "string") { if (typeof columnType !== "string") {
results.invalidColumns.push(columnName) results.invalidColumns.push(columnName)
@ -112,6 +122,11 @@ export function validate(rows: Rows, schema: Schema): ValidationResults {
isNaN(new Date(columnData).getTime()) isNaN(new Date(columnData).getTime())
) { ) {
results.schemaValidation[columnName] = false results.schemaValidation[columnName] = false
} else if (
columnType === FieldTypes.BB_REFERENCE &&
!isValidBBReference(columnData, columnSubtype)
) {
results.schemaValidation[columnName] = false
} else { } else {
results.schemaValidation[columnName] = true results.schemaValidation[columnName] = true
} }
@ -138,6 +153,7 @@ export function parse(rows: Rows, schema: Schema): Rows {
} }
const columnType = schema[columnName].type const columnType = schema[columnName].type
const columnSubtype = schema[columnName].subtype
if (columnType === FieldTypes.NUMBER) { if (columnType === FieldTypes.NUMBER) {
// If provided must be a valid number // If provided must be a valid number
@ -147,6 +163,23 @@ export function parse(rows: Rows, schema: Schema): Rows {
parsedRow[columnName] = columnData parsedRow[columnName] = columnData
? new Date(columnData).toISOString() ? new Date(columnData).toISOString()
: columnData : columnData
} else if (columnType === FieldTypes.BB_REFERENCE) {
const parsedValues =
!!columnData && parseCsvExport<{ _id: string }[]>(columnData)
if (!parsedValues) {
parsedRow[columnName] = undefined
} else {
switch (columnSubtype) {
case FieldSubtype.USER:
parsedRow[columnName] = parsedValues[0]?._id
break
case FieldSubtype.USERS:
parsedRow[columnName] = parsedValues.map(u => u._id)
break
default:
utils.unreachable(columnSubtype)
}
}
} else { } else {
parsedRow[columnName] = columnData parsedRow[columnName] = columnData
} }
@ -155,3 +188,32 @@ export function parse(rows: Rows, schema: Schema): Rows {
return parsedRow return parsedRow
}) })
} }
function isValidBBReference(
columnData: any,
columnSubtype: FieldSubtype
): boolean {
switch (columnSubtype) {
case FieldSubtype.USER:
case FieldSubtype.USERS:
if (typeof columnData !== "string") {
return false
}
const userArray = parseCsvExport<{ _id: string }[]>(columnData)
if (!Array.isArray(userArray)) {
return false
}
if (columnSubtype === FieldSubtype.USER && userArray.length > 1) {
return false
}
const constainsWrongId = userArray.find(
user => !db.isGlobalUserID(user._id)
)
return !constainsWrongId
default:
throw utils.unreachable(columnSubtype)
}
}

View file

@ -6,6 +6,7 @@ import {
SearchFilter, SearchFilter,
SearchQuery, SearchQuery,
SearchQueryFields, SearchQueryFields,
FieldSubtype,
} from "@budibase/types" } from "@budibase/types"
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants" import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
import { deepGet } from "./helpers" import { deepGet } from "./helpers"
@ -16,7 +17,7 @@ const HBS_REGEX = /{{([^{].*?)}}/g
* Returns the valid operator options for a certain data type * Returns the valid operator options for a certain data type
*/ */
export const getValidOperatorsForType = ( export const getValidOperatorsForType = (
type: FieldType, fieldType: { type: FieldType; subtype?: FieldSubtype },
field: string, field: string,
datasource: Datasource & { tableId: any } // TODO: is this table id ever populated? datasource: Datasource & { tableId: any } // TODO: is this table id ever populated?
) => { ) => {
@ -43,6 +44,7 @@ export const getValidOperatorsForType = (
value: string value: string
label: string label: string
}[] = [] }[] = []
const { type, subtype } = fieldType
if (type === FieldType.STRING) { if (type === FieldType.STRING) {
ops = stringOps ops = stringOps
} else if (type === FieldType.NUMBER || type === FieldType.BIGINT) { } else if (type === FieldType.NUMBER || type === FieldType.BIGINT) {
@ -59,8 +61,10 @@ export const getValidOperatorsForType = (
ops = numOps ops = numOps
} else if (type === FieldType.FORMULA) { } else if (type === FieldType.FORMULA) {
ops = stringOps.concat([Op.MoreThan, Op.LessThan]) ops = stringOps.concat([Op.MoreThan, Op.LessThan])
} else if (type === FieldType.BB_REFERENCE) { } else if (type === FieldType.BB_REFERENCE && subtype == FieldSubtype.USER) {
ops = [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty, Op.In] ops = [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty, Op.In]
} else if (type === FieldType.BB_REFERENCE && subtype == FieldSubtype.USERS) {
ops = [Op.Contains, Op.NotContains, Op.ContainsAny, Op.Empty, Op.NotEmpty]
} }
// Only allow equal/not equal for _id in SQL tables // Only allow equal/not equal for _id in SQL tables

View file

@ -3,3 +3,4 @@ export * as dataFilters from "./filters"
export * as helpers from "./helpers" export * as helpers from "./helpers"
export * as utils from "./utils" export * as utils from "./utils"
export * as sdk from "./sdk" export * as sdk from "./sdk"
export * from "./table"

View file

@ -1,4 +1,10 @@
import { ContextUser, User } from "@budibase/types" import {
ContextUser,
DocumentType,
SEPARATOR,
User,
InternalTable,
} from "@budibase/types"
import { getProdAppID } from "./applications" import { getProdAppID } from "./applications"
// checks if a user is specifically a builder, given an app ID // checks if a user is specifically a builder, given an app ID
@ -67,3 +73,21 @@ export function hasAdminPermissions(user?: User | ContextUser): boolean {
} }
return !!user.admin?.global return !!user.admin?.global
} }
export function getGlobalUserID(userId?: string): string | undefined {
if (typeof userId !== "string") {
return userId
}
const prefix = `${DocumentType.ROW}${SEPARATOR}${InternalTable.USER_METADATA}${SEPARATOR}`
if (!userId.startsWith(prefix)) {
return userId
}
return userId.split(prefix)[1]
}
export function containsUserID(value: string | undefined): boolean {
if (typeof value !== "string") {
return false
}
return value.includes(`${DocumentType.USER}${SEPARATOR}`)
}

View file

@ -0,0 +1,25 @@
import { FieldType } from "@budibase/types"
const allowDisplayColumnByType: Record<FieldType, boolean> = {
[FieldType.STRING]: true,
[FieldType.LONGFORM]: true,
[FieldType.OPTIONS]: true,
[FieldType.NUMBER]: true,
[FieldType.DATETIME]: true,
[FieldType.FORMULA]: true,
[FieldType.AUTO]: true,
[FieldType.INTERNAL]: true,
[FieldType.BARCODEQR]: true,
[FieldType.BIGINT]: true,
[FieldType.BOOLEAN]: false,
[FieldType.ARRAY]: false,
[FieldType.ATTACHMENT]: false,
[FieldType.LINK]: false,
[FieldType.JSON]: false,
[FieldType.BB_REFERENCE]: false,
}
export function canBeDisplayColumn(type: FieldType): boolean {
return !!allowDisplayColumnByType[type]
}

View file

@ -15,7 +15,8 @@
"skipLibCheck": true, "skipLibCheck": true,
"paths": { "paths": {
"@budibase/types": ["../types/src"] "@budibase/types": ["../types/src"]
} },
"tsBuildInfoFile": "dist/tsconfig.tsbuildinfo"
}, },
"include": ["**/*.js", "**/*.ts"], "include": ["**/*.js", "**/*.ts"],
"exclude": [ "exclude": [

View file

@ -3,8 +3,7 @@
"compilerOptions": { "compilerOptions": {
"baseUrl": "..", "baseUrl": "..",
"rootDir": "src", "rootDir": "src",
"composite": true, "composite": true
"tsBuildInfoFile": "dist/tsconfig.tsbuildinfo"
}, },
"exclude": ["node_modules", "dist"] "exclude": ["node_modules", "dist"]
} }

View file

@ -29,7 +29,7 @@
"dayjs": "^1.10.8", "dayjs": "^1.10.8",
"handlebars": "^4.7.6", "handlebars": "^4.7.6",
"lodash": "^4.17.20", "lodash": "^4.17.20",
"vm2": "^3.9.15" "vm2": "^3.9.19"
}, },
"devDependencies": { "devDependencies": {
"@rollup/plugin-commonjs": "^17.1.0", "@rollup/plugin-commonjs": "^17.1.0",

View file

@ -1,4 +1,5 @@
import { import {
Row,
Table, Table,
TableRequest, TableRequest,
TableSchema, TableSchema,
@ -18,6 +19,17 @@ export interface TableResponse extends Table {
export type FetchTablesResponse = TableResponse[] export type FetchTablesResponse = TableResponse[]
export interface SaveTableRequest extends TableRequest {} export interface SaveTableRequest extends TableRequest {
rows?: Row[]
}
export type SaveTableResponse = Table export type SaveTableResponse = Table
export interface BulkImportRequest {
rows: Row[]
identifierFields?: Array<string>
}
export interface BulkImportResponse {
message: string
}

View file

@ -37,10 +37,12 @@ export interface Row extends Document {
export enum FieldSubtype { export enum FieldSubtype {
USER = "user", USER = "user",
USERS = "users",
} }
export const FieldTypeSubtypes = { export const FieldTypeSubtypes = {
BB_REFERENCE: { BB_REFERENCE: {
USER: FieldSubtype.USER, USER: FieldSubtype.USER,
USERS: FieldSubtype.USERS,
}, },
} }

View file

@ -7,3 +7,16 @@ export enum RelationshipType {
export enum AutoReason { export enum AutoReason {
FOREIGN_KEY = "foreign_key", FOREIGN_KEY = "foreign_key",
} }
export enum AutoFieldSubTypes {
CREATED_BY = "createdBy",
CREATED_AT = "createdAt",
UPDATED_BY = "updatedBy",
UPDATED_AT = "updatedAt",
AUTO_ID = "autoID",
}
export enum FormulaTypes {
STATIC = "static",
DYNAMIC = "dynamic",
}

View file

@ -1,7 +1,12 @@
// all added by grid/table when defining the // all added by grid/table when defining the
// column size, position and whether it can be viewed // column size, position and whether it can be viewed
import { FieldType } from "../row" import { FieldSubtype, FieldType } from "../row"
import { AutoReason, RelationshipType } from "./constants" import {
AutoFieldSubTypes,
AutoReason,
FormulaTypes,
RelationshipType,
} from "./constants"
export interface UIFieldMetadata { export interface UIFieldMetadata {
order?: number order?: number
@ -10,28 +15,63 @@ export interface UIFieldMetadata {
icon?: string icon?: string
} }
export interface RelationshipFieldMetadata { interface BaseRelationshipFieldMetadata
extends Omit<BaseFieldSchema, "subtype"> {
type: FieldType.LINK
main?: boolean main?: boolean
fieldName?: string fieldName: string
tableId?: string tableId: string
// below is used for SQL relationships, needed to define the foreign keys subtype?: AutoFieldSubTypes.CREATED_BY | AutoFieldSubTypes.UPDATED_BY
// or the tables used for many-to-many relationships (through)
relationshipType?: RelationshipType
through?: string
foreignKey?: string
throughFrom?: string
throughTo?: string
} }
export interface AutoColumnFieldMetadata { // External tables use junction tables, internal tables don't require them
autocolumn?: boolean type ManyToManyJunctionTableMetadata =
subtype?: string | {
through: string
throughFrom: string
throughTo: string
}
| {
through?: never
throughFrom?: never
throughTo?: never
}
export type ManyToManyRelationshipFieldMetadata =
BaseRelationshipFieldMetadata & {
relationshipType: RelationshipType.MANY_TO_MANY
} & ManyToManyJunctionTableMetadata
export interface OneToManyRelationshipFieldMetadata
extends BaseRelationshipFieldMetadata {
relationshipType: RelationshipType.ONE_TO_MANY
foreignKey?: string
}
export interface ManyToOneRelationshipFieldMetadata
extends BaseRelationshipFieldMetadata {
relationshipType: RelationshipType.MANY_TO_ONE
foreignKey?: string
}
export type RelationshipFieldMetadata =
| ManyToManyRelationshipFieldMetadata
| OneToManyRelationshipFieldMetadata
| ManyToOneRelationshipFieldMetadata
export interface AutoColumnFieldMetadata
extends Omit<BaseFieldSchema, "subtype"> {
type: FieldType.AUTO
autocolumn: true
subtype?: AutoFieldSubTypes
lastID?: number lastID?: number
// if the column was turned to an auto-column for SQL, explains why (primary, foreign etc) // if the column was turned to an auto-column for SQL, explains why (primary, foreign etc)
autoReason?: AutoReason autoReason?: AutoReason
} }
export interface NumberFieldMetadata { export interface NumberFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
type: FieldType.NUMBER
subtype?: AutoFieldSubTypes.AUTO_ID
lastID?: number
autoReason?: AutoReason.FOREIGN_KEY
// used specifically when Budibase generates external tables, this denotes if a number field // used specifically when Budibase generates external tables, this denotes if a number field
// is a foreign key used for a many-to-many relationship // is a foreign key used for a many-to-many relationship
meta?: { meta?: {
@ -40,18 +80,28 @@ export interface NumberFieldMetadata {
} }
} }
export interface DateFieldMetadata { export interface DateFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
type: FieldType.DATETIME
ignoreTimezones?: boolean ignoreTimezones?: boolean
timeOnly?: boolean timeOnly?: boolean
subtype?: AutoFieldSubTypes.CREATED_AT | AutoFieldSubTypes.UPDATED_AT
} }
export interface StringFieldMetadata { export interface LongFormFieldMetadata extends BaseFieldSchema {
type: FieldType.LONGFORM
useRichText?: boolean | null useRichText?: boolean | null
} }
export interface FormulaFieldMetadata { export interface FormulaFieldMetadata extends BaseFieldSchema {
formula?: string type: FieldType.FORMULA
formulaType?: string formula: string
formulaType?: FormulaTypes
}
export interface BBReferenceFieldMetadata
extends Omit<BaseFieldSchema, "subtype"> {
type: FieldType.BB_REFERENCE
subtype: FieldSubtype.USER | FieldSubtype.USERS
} }
export interface FieldConstraints { export interface FieldConstraints {
@ -77,22 +127,40 @@ export interface FieldConstraints {
} }
} }
export interface FieldSchema interface BaseFieldSchema extends UIFieldMetadata {
extends UIFieldMetadata,
DateFieldMetadata,
RelationshipFieldMetadata,
AutoColumnFieldMetadata,
StringFieldMetadata,
FormulaFieldMetadata,
NumberFieldMetadata {
type: FieldType type: FieldType
name: string name: string
sortable?: boolean sortable?: boolean
// only used by external databases, to denote the real type // only used by external databases, to denote the real type
externalType?: string externalType?: string
constraints?: FieldConstraints constraints?: FieldConstraints
autocolumn?: boolean
autoReason?: AutoReason.FOREIGN_KEY
subtype?: never
} }
interface OtherFieldMetadata extends BaseFieldSchema {
type: Exclude<
FieldType,
| FieldType.DATETIME
| FieldType.LINK
| FieldType.AUTO
| FieldType.FORMULA
| FieldType.NUMBER
| FieldType.LONGFORM
>
}
export type FieldSchema =
| OtherFieldMetadata
| DateFieldMetadata
| RelationshipFieldMetadata
| AutoColumnFieldMetadata
| FormulaFieldMetadata
| NumberFieldMetadata
| LongFormFieldMetadata
| BBReferenceFieldMetadata
export interface TableSchema { export interface TableSchema {
[key: string]: FieldSchema [key: string]: FieldSchema
} }

View file

@ -15,7 +15,6 @@ export interface Table extends Document {
constrained?: string[] constrained?: string[]
sql?: boolean sql?: boolean
indexes?: { [key: string]: any } indexes?: { [key: string]: any }
rows?: { [key: string]: any }
created?: boolean created?: boolean
rowHeight?: number rowHeight?: number
} }

View file

@ -58,6 +58,10 @@ export const DocumentTypesToImport: DocumentType[] = [
DocumentType.LAYOUT, DocumentType.LAYOUT,
] ]
export enum InternalTable {
USER_METADATA = "ta_users",
}
// these documents don't really exist, they are part of other // these documents don't really exist, they are part of other
// documents or enriched into existence as part of get requests // documents or enriched into existence as part of get requests
export enum VirtualDocumentType { export enum VirtualDocumentType {

View file

@ -11,7 +11,8 @@
"sourceMap": true, "sourceMap": true,
"declaration": true, "declaration": true,
"skipLibCheck": true, "skipLibCheck": true,
"outDir": "dist" "outDir": "dist",
"tsBuildInfoFile": "dist/tsconfig.tsbuildinfo"
}, },
"include": ["src/**/*"], "include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.spec.ts", "**/*.spec.js"] "exclude": ["node_modules", "dist", "**/*.spec.ts", "**/*.spec.js"]

View file

@ -3,8 +3,7 @@
"compilerOptions": { "compilerOptions": {
"baseUrl": ".", "baseUrl": ".",
"rootDir": "./src", "rootDir": "./src",
"composite": true, "composite": true
"tsBuildInfoFile": "dist/tsconfig.tsbuildinfo"
}, },
"exclude": ["node_modules", "dist"] "exclude": ["node_modules", "dist"]
} }

View file

@ -76,7 +76,7 @@
"@swc/core": "1.3.71", "@swc/core": "1.3.71",
"@swc/jest": "0.2.27", "@swc/jest": "0.2.27",
"@trendyol/jest-testcontainers": "2.1.1", "@trendyol/jest-testcontainers": "2.1.1",
"@types/jest": "29.5.3", "@types/jest": "29.5.5",
"@types/jsonwebtoken": "8.5.1", "@types/jsonwebtoken": "8.5.1",
"@types/koa": "2.13.4", "@types/koa": "2.13.4",
"@types/koa__router": "8.0.8", "@types/koa__router": "8.0.8",

View file

@ -4660,6 +4660,14 @@
expect "^29.0.0" expect "^29.0.0"
pretty-format "^29.0.0" pretty-format "^29.0.0"
"@types/jest@29.5.5":
version "29.5.5"
resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.5.5.tgz#727204e06228fe24373df9bae76b90f3e8236a2a"
integrity sha512-ebylz2hnsWR9mYvmBFbXJXr+33UPc4+ZdxyDXh5w0FlPBTfCVN3wPL+kuOiQt3xvrK419v7XWeAs+AeOksafXg==
dependencies:
expect "^29.0.0"
pretty-format "^29.0.0"
"@types/json-schema@*", "@types/json-schema@^7.0.6", "@types/json-schema@^7.0.8": "@types/json-schema@*", "@types/json-schema@^7.0.6", "@types/json-schema@^7.0.8":
version "7.0.11" version "7.0.11"
resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3"
@ -21742,10 +21750,10 @@ vlq@^0.2.2:
resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26" resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26"
integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow== integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==
vm2@3.9.17, vm2@^3.9.15, vm2@^3.9.8: vm2@^3.9.19:
version "3.9.17" version "3.9.19"
resolved "https://registry.yarnpkg.com/vm2/-/vm2-3.9.17.tgz#251b165ff8a0e034942b5181057305e39570aeab" resolved "https://registry.yarnpkg.com/vm2/-/vm2-3.9.19.tgz#be1e1d7a106122c6c492b4d51c2e8b93d3ed6a4a"
integrity sha512-AqwtCnZ/ERcX+AVj9vUsphY56YANXxRuqMb7GsDtAr0m0PcQX3u0Aj3KWiXM0YAHy7i6JEeHrwOnwXbGYgRpAw== integrity sha512-J637XF0DHDMV57R6JyVsTak7nIL8gy5KH4r1HiwWLf/4GBbb5MKL5y7LpmF4A8E2nR6XmzpmMFQ7V7ppPTmUQg==
dependencies: dependencies:
acorn "^8.7.0" acorn "^8.7.0"
acorn-walk "^8.2.0" acorn-walk "^8.2.0"