diff --git a/.github/workflows/release-singleimage.yml b/.github/workflows/release-singleimage.yml
index bd01ed786a..78b5cdd577 100644
--- a/.github/workflows/release-singleimage.yml
+++ b/.github/workflows/release-singleimage.yml
@@ -18,7 +18,7 @@ jobs:
- name: Maximize build space
uses: easimon/maximize-build-space@master
with:
- root-reserve-mb: 35000
+ root-reserve-mb: 30000
swap-size-mb: 1024
remove-android: 'true'
remove-dotnet: 'true'
diff --git a/lerna.json b/lerna.json
index e64bf3e185..95e10c444e 100644
--- a/lerna.json
+++ b/lerna.json
@@ -1,5 +1,5 @@
{
- "version": "2.11.22",
+ "version": "2.11.30",
"npmClient": "yarn",
"packages": [
"packages/*"
diff --git a/package.json b/package.json
index e5b6554fca..c38ef76e17 100644
--- a/package.json
+++ b/package.json
@@ -74,7 +74,6 @@
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
- "build:docs": "lerna run --stream build:docs",
"release:helm": "node scripts/releaseHelmChart",
"env:multi:enable": "lerna run --stream env:multi:enable",
"env:multi:disable": "lerna run --stream env:multi:disable",
diff --git a/packages/builder/src/components/common/NavItem.svelte b/packages/builder/src/components/common/NavItem.svelte
index a65da55c8f..2c8a862535 100644
--- a/packages/builder/src/components/common/NavItem.svelte
+++ b/packages/builder/src/components/common/NavItem.svelte
@@ -102,7 +102,7 @@
{/if}
- {text}
+
{text}
{#if selectedBy}
{/if}
@@ -227,9 +227,6 @@
.text {
font-weight: 600;
font-size: 12px;
- white-space: nowrap;
- overflow: hidden;
- text-overflow: ellipsis;
flex: 1 1 auto;
color: var(--spectrum-global-color-gray-900);
order: 2;
@@ -238,6 +235,11 @@
align-items: center;
gap: 8px;
}
+ .text span {
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ }
.scrollable .text {
flex: 0 0 auto;
max-width: 160px;
diff --git a/packages/builder/src/helpers/duplicate.js b/packages/builder/src/helpers/duplicate.js
index e84a600138..1547fcd4d1 100644
--- a/packages/builder/src/helpers/duplicate.js
+++ b/packages/builder/src/helpers/duplicate.js
@@ -3,16 +3,17 @@
* e.g.
* name all names result
* ------ ----------- --------
- * ("foo") ["foo"] "foo (1)"
- * ("foo") ["foo", "foo (1)"] "foo (2)"
- * ("foo (1)") ["foo", "foo (1)"] "foo (2)"
- * ("foo") ["foo", "foo (2)"] "foo (1)"
+ * ("foo") ["foo"] "foo 1"
+ * ("foo") ["foo", "foo 1"] "foo 2"
+ * ("foo 1") ["foo", "foo 1"] "foo 2"
+ * ("foo") ["foo", "foo 2"] "foo 1"
*
* Repl
*/
export const duplicateName = (name, allNames) => {
- const baseName = name.split(" (")[0]
- const isDuplicate = new RegExp(`${baseName}\\s\\((\\d+)\\)$`)
+ const duplicatePattern = new RegExp(`\\s(\\d+)$`)
+ const baseName = name.split(duplicatePattern)[0]
+ const isDuplicate = new RegExp(`${baseName}\\s(\\d+)$`)
// get the sequence from matched names
const sequence = []
@@ -28,7 +29,6 @@ export const duplicateName = (name, allNames) => {
return false
})
sequence.sort((a, b) => a - b)
-
// get the next number in the sequence
let number
if (sequence.length === 0) {
@@ -46,5 +46,5 @@ export const duplicateName = (name, allNames) => {
}
}
- return `${baseName} (${number})`
+ return `${baseName} ${number}`
}
diff --git a/packages/builder/src/helpers/tests/duplicate.test.js b/packages/builder/src/helpers/tests/duplicate.test.js
index a571054e0c..400abed0aa 100644
--- a/packages/builder/src/helpers/tests/duplicate.test.js
+++ b/packages/builder/src/helpers/tests/duplicate.test.js
@@ -9,34 +9,34 @@ describe("duplicate", () => {
const duplicate = duplicateName(name, names)
- expect(duplicate).toBe("foo (1)")
+ expect(duplicate).toBe("foo 1")
})
it("with multiple existing", async () => {
- const names = ["foo", "foo (1)", "foo (2)"]
+ const names = ["foo", "foo 1", "foo 2"]
const name = "foo"
const duplicate = duplicateName(name, names)
- expect(duplicate).toBe("foo (3)")
+ expect(duplicate).toBe("foo 3")
})
it("with mixed multiple existing", async () => {
- const names = ["foo", "foo (1)", "foo (2)", "bar", "bar (1)", "bar (2)"]
+ const names = ["foo", "foo 1", "foo 2", "bar", "bar 1", "bar 2"]
const name = "foo"
const duplicate = duplicateName(name, names)
- expect(duplicate).toBe("foo (3)")
+ expect(duplicate).toBe("foo 3")
})
it("with incomplete sequence", async () => {
- const names = ["foo", "foo (2)", "foo (3)"]
+ const names = ["foo", "foo 2", "foo 3"]
const name = "foo"
const duplicate = duplicateName(name, names)
- expect(duplicate).toBe("foo (1)")
+ expect(duplicate).toBe("foo 1")
})
})
})
diff --git a/packages/client/manifest.json b/packages/client/manifest.json
index d987344956..47b935b420 100644
--- a/packages/client/manifest.json
+++ b/packages/client/manifest.json
@@ -3419,6 +3419,17 @@
"value": "custom"
}
},
+ {
+ "type": "event",
+ "label": "On change",
+ "key": "onChange",
+ "context": [
+ {
+ "label": "Field Value",
+ "key": "value"
+ }
+ ]
+ },
{
"type": "validation/string",
"label": "Validation",
diff --git a/packages/client/src/components/app/forms/CodeScanner.svelte b/packages/client/src/components/app/forms/CodeScanner.svelte
index 9895413446..04d6919157 100644
--- a/packages/client/src/components/app/forms/CodeScanner.svelte
+++ b/packages/client/src/components/app/forms/CodeScanner.svelte
@@ -128,6 +128,7 @@
{
dispatch("change", value)
}}
diff --git a/packages/server/package.json b/packages/server/package.json
index 795886d7cc..f66e9346e1 100644
--- a/packages/server/package.json
+++ b/packages/server/package.json
@@ -20,7 +20,6 @@
"test:watch": "jest --watch",
"predocker": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && yarn build && cp ../../yarn.lock ./dist/",
"build:docker": "yarn predocker && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
- "build:docs": "node ./scripts/docs/generate.js open",
"run:docker": "node dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:up": "node scripts/dev/manage.js up",
@@ -112,7 +111,7 @@
"to-json-schema": "0.2.5",
"uuid": "3.3.2",
"validate.js": "0.13.1",
- "vm2": "3.9.17",
+ "vm2": "^3.9.19",
"worker-farm": "1.7.0",
"xml2js": "0.5.0",
"yargs": "13.2.4"
diff --git a/packages/server/scripts/docs/README.md b/packages/server/scripts/docs/README.md
deleted file mode 100644
index f1075754d3..0000000000
--- a/packages/server/scripts/docs/README.md
+++ /dev/null
@@ -1,31 +0,0 @@
-### Documentation
-
-This directory contains the scripts required to generate the APIDoc based documentation.
-You can find the docs about comment structure at the [APIDocs page](https://apidocjs.com/).
-
-In general most API endpoints will look like:
-```js
- /**
- * @api {post} /api/:param/url Give it a name
- * @apiName Give it a name
- * @apiGroup group
- * @apiPermission permission
- * @apiDescription Describe what the endpoint does, any special cases the user
- * should be aware of.
- *
- * @apiParam {string} param describe a URL parameter.
- *
- * @apiParam (Body) input describe a field on the body.
- *
- * @apiSuccess {object} output describe the output.
- */
-```
-
-There are a few key points to note when writing API docs:
-1. Don't use `@apiBody` - this isn't currently supported by our swagger converter.
-2. Make sure to always have an `@api` definition at the start, which must always have the
-HTTP verb, the endpoint URL and the name.
-3. There are three ways you can specify parameters used as inputs for your endpoint,
-`@apiParam` for a URL param, `@apiParam (Body)` for a field on the request body and `@apiParam (Query)`
-for query string parameters.
-4. The `@apiGroup` should be the same for all API Doc comments in a route file.
\ No newline at end of file
diff --git a/packages/server/scripts/docs/generate.js b/packages/server/scripts/docs/generate.js
deleted file mode 100644
index 0d6d42fd32..0000000000
--- a/packages/server/scripts/docs/generate.js
+++ /dev/null
@@ -1,74 +0,0 @@
-const fs = require("fs")
-const { join } = require("path")
-const { createDoc } = require("apidoc")
-const packageJson = require("../../package.json")
-const toSwagger = require("./toSwagger")
-const open = require("open")
-
-const config = {
- name: "Budibase API",
- version: packageJson.version,
- description: "Documenting the Budibase backend API",
- title: "Budibase app service API",
-}
-
-const shouldOpen = process.argv[2]
-const disallowed = []
-
-function filter(parsedRouteFiles) {
- const tagToSearch = "url"
- for (let routeFile of parsedRouteFiles) {
- for (let route of routeFile) {
- let routeInfo = route["local"]
- if (disallowed.includes(routeInfo[tagToSearch])) {
- const idx = routeFile.indexOf(route)
- routeFile.splice(idx, 1)
- }
- }
- }
-}
-
-async function generate() {
- // start by writing a config file
- const configPath = join(__dirname, "config.json")
- fs.writeFileSync(configPath, JSON.stringify(config))
- const mainPath = join(__dirname, "..", "..")
- const srcPath = join(mainPath, "src", "api", "routes")
- const assetsPath = join(mainPath, "builder", "assets", "docs")
- if (!fs.existsSync(assetsPath)) {
- fs.mkdirSync(assetsPath, { recursive: true })
- }
- const options = {
- src: [srcPath],
- dest: assetsPath,
- filters: {
- main: {
- postFilter: filter,
- },
- },
- config: configPath,
- }
- const doc = createDoc(options)
- if (typeof doc !== "boolean") {
- const swagger = toSwagger(JSON.parse(doc.data), JSON.parse(doc.project))
- fs.writeFileSync(join(assetsPath, "swagger.json"), JSON.stringify(swagger))
- fs.writeFileSync(join(assetsPath, "apidoc.json"), doc.data)
- fs.writeFileSync(join(assetsPath, "project.json"), doc.project)
- console.log(
- `Docs generated successfully, find in ${assetsPath}, swagger.json, apidoc.json and project.json`
- )
- } else {
- throw "Unable to generate docs."
- }
- // delete the temporary config file
- fs.unlinkSync(configPath)
- setTimeout(async () => {
- if (shouldOpen === "open") {
- await open(join(assetsPath, "index.html"), { wait: false })
- }
- }, 2000)
-}
-
-generate().catch(err => {
- console.error(err)
-})
diff --git a/packages/server/scripts/docs/toSwagger.js b/packages/server/scripts/docs/toSwagger.js
deleted file mode 100644
index 1532e25fa6..0000000000
--- a/packages/server/scripts/docs/toSwagger.js
+++ /dev/null
@@ -1,320 +0,0 @@
-let _ = require("lodash")
-let { pathToRegexp } = require("path-to-regexp")
-
-/********************************************************
- * Based on: https://github.com/fsbahman/apidoc-swagger *
- ********************************************************/
-
-let swagger = {
- swagger: "2.0",
- info: {},
- paths: {},
- definitions: {},
-}
-
-function toSwagger(apidocJson, projectJson) {
- swagger.info = addInfo(projectJson)
- swagger.paths = extractPaths(apidocJson)
- return swagger
-}
-
-let tagsRegex = /(<([^>]+)>)/gi
-// Removes
tags from text
-function removeTags(text) {
- return text ? text.replace(tagsRegex, "") : text
-}
-
-function addInfo(projectJson) {
- let info = {}
- info["title"] = projectJson.title || projectJson.name
- info["version"] = projectJson.version
- info["description"] = projectJson.description
- return info
-}
-
-/**
- * Extracts paths provided in json format
- * post, patch, put request parameters are extracted in body
- * get and delete are extracted to path parameters
- * @param apidocJson
- * @returns {{}}
- */
-function extractPaths(apidocJson) {
- let apiPaths = groupByUrl(apidocJson)
- let paths = {}
- for (let i = 0; i < apiPaths.length; i++) {
- let verbs = apiPaths[i].verbs
- let url = verbs[0].url
- let pattern = pathToRegexp(url, null)
- let matches = pattern.exec(url)
-
- // Surrounds URL parameters with curly brackets -> :email with {email}
- let pathKeys = []
- for (let j = 1; j < matches.length; j++) {
- let key = matches[j].slice(1)
- url = url.replace(matches[j], "{" + key + "}")
- pathKeys.push(key)
- }
-
- for (let j = 0; j < verbs.length; j++) {
- let verb = verbs[j]
- let type = verb.type
-
- let obj = (paths[url] = paths[url] || {})
-
- if (type === "post" || type === "patch" || type === "put") {
- _.extend(
- obj,
- createPostPushPutOutput(verb, swagger.definitions, pathKeys)
- )
- } else {
- _.extend(obj, createGetDeleteOutput(verb, swagger.definitions))
- }
- }
- }
- return paths
-}
-
-function createPostPushPutOutput(verbs, definitions, pathKeys) {
- let pathItemObject = {}
- let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
-
- let params = []
- let pathParams = createPathParameters(verbs, pathKeys)
- pathParams = _.filter(pathParams, function (param) {
- let hasKey = pathKeys.indexOf(param.name) !== -1
- return !(param.in === "path" && !hasKey)
- })
-
- params = params.concat(pathParams)
- let required =
- verbs.parameter &&
- verbs.parameter.fields &&
- verbs.parameter.fields.Parameter &&
- verbs.parameter.fields.Parameter.length > 0
-
- params.push({
- in: "body",
- name: "body",
- description: removeTags(verbs.description),
- required: required,
- schema: {
- $ref: "#/definitions/" + verbDefinitionResult.topLevelParametersRef,
- },
- })
-
- pathItemObject[verbs.type] = {
- tags: [verbs.group],
- summary: removeTags(verbs.description),
- consumes: ["application/json"],
- produces: ["application/json"],
- parameters: params,
- }
-
- if (verbDefinitionResult.topLevelSuccessRef) {
- pathItemObject[verbs.type].responses = {
- 200: {
- description: "successful operation",
- schema: {
- type: verbDefinitionResult.topLevelSuccessRefType,
- items: {
- $ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
- },
- },
- },
- }
- }
-
- return pathItemObject
-}
-
-function createVerbDefinitions(verbs, definitions) {
- let result = {
- topLevelParametersRef: null,
- topLevelSuccessRef: null,
- topLevelSuccessRefType: null,
- }
- let defaultObjectName = verbs.name
-
- let fieldArrayResult = {}
- if (verbs && verbs.parameter && verbs.parameter.fields) {
- fieldArrayResult = createFieldArrayDefinitions(
- verbs.parameter.fields.Parameter,
- definitions,
- verbs.name,
- defaultObjectName
- )
- result.topLevelParametersRef = fieldArrayResult.topLevelRef
- }
-
- if (verbs && verbs.success && verbs.success.fields) {
- fieldArrayResult = createFieldArrayDefinitions(
- verbs.success.fields["Success 200"],
- definitions,
- verbs.name,
- defaultObjectName
- )
- result.topLevelSuccessRef = fieldArrayResult.topLevelRef
- result.topLevelSuccessRefType = fieldArrayResult.topLevelRefType
- }
-
- return result
-}
-
-function createFieldArrayDefinitions(
- fieldArray,
- definitions,
- topLevelRef,
- defaultObjectName
-) {
- let result = {
- topLevelRef: topLevelRef,
- topLevelRefType: null,
- }
-
- if (!fieldArray) {
- return result
- }
-
- for (let i = 0; i < fieldArray.length; i++) {
- let parameter = fieldArray[i]
-
- let nestedName = createNestedName(parameter.field)
- let objectName = nestedName.objectName
- if (!objectName) {
- objectName = defaultObjectName
- }
- let type = parameter.type
- if (i === 0) {
- result.topLevelRefType = type
- if (parameter.type === "Object") {
- objectName = nestedName.propertyName
- nestedName.propertyName = null
- } else if (parameter.type === "Array") {
- objectName = nestedName.propertyName
- nestedName.propertyName = null
- result.topLevelRefType = "array"
- }
- result.topLevelRef = objectName
- }
-
- definitions[objectName] = definitions[objectName] || {
- properties: {},
- required: [],
- }
-
- if (nestedName.propertyName) {
- let prop = {
- type: (parameter.type || "").toLowerCase(),
- description: removeTags(parameter.description),
- }
- if (parameter.type === "Object") {
- prop.$ref = "#/definitions/" + parameter.field
- }
-
- let typeIndex = type.indexOf("[]")
- if (typeIndex !== -1 && typeIndex === type.length - 2) {
- prop.type = "array"
- prop.items = {
- type: type.slice(0, type.length - 2),
- }
- }
-
- definitions[objectName]["properties"][nestedName.propertyName] = prop
- if (!parameter.optional) {
- let arr = definitions[objectName]["required"]
- if (arr.indexOf(nestedName.propertyName) === -1) {
- arr.push(nestedName.propertyName)
- }
- }
- }
- }
-
- return result
-}
-
-function createNestedName(field) {
- let propertyName = field
- let objectName
- let propertyNames = field.split(".")
- if (propertyNames && propertyNames.length > 1) {
- propertyName = propertyNames[propertyNames.length - 1]
- propertyNames.pop()
- objectName = propertyNames.join(".")
- }
-
- return {
- propertyName: propertyName,
- objectName: objectName,
- }
-}
-
-/**
- * Generate get, delete method output
- * @param verbs
- * @param definitions
- * @returns {{}}
- */
-function createGetDeleteOutput(verbs, definitions) {
- let pathItemObject = {}
- verbs.type = verbs.type === "del" ? "delete" : verbs.type
-
- let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
- pathItemObject[verbs.type] = {
- tags: [verbs.group],
- summary: removeTags(verbs.description),
- consumes: ["application/json"],
- produces: ["application/json"],
- parameters: createPathParameters(verbs),
- }
- if (verbDefinitionResult.topLevelSuccessRef) {
- pathItemObject[verbs.type].responses = {
- 200: {
- description: "successful operation",
- schema: {
- type: verbDefinitionResult.topLevelSuccessRefType,
- items: {
- $ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
- },
- },
- },
- }
- }
- return pathItemObject
-}
-
-/**
- * Iterate through all method parameters and create array of parameter objects which are stored as path parameters
- * @param verbs
- * @returns {Array}
- */
-function createPathParameters(verbs) {
- let pathItemObject = []
- if (verbs.parameter && verbs.parameter.fields.Parameter) {
- for (let i = 0; i < verbs.parameter.fields.Parameter.length; i++) {
- let param = verbs.parameter.fields.Parameter[i]
- let field = param.field
- let type = param.type
- pathItemObject.push({
- name: field,
- in: type === "file" ? "formData" : "path",
- required: !param.optional,
- type: param.type.toLowerCase(),
- description: removeTags(param.description),
- })
- }
- }
- return pathItemObject
-}
-
-function groupByUrl(apidocJson) {
- return _.chain(apidocJson)
- .groupBy("url")
- .toPairs()
- .map(function (element) {
- return _.zipObject(["url", "verbs"], element)
- })
- .value()
-}
-
-module.exports = toSwagger
diff --git a/packages/server/specs/openapi.json b/packages/server/specs/openapi.json
index 8baba62a62..a6900adea7 100644
--- a/packages/server/specs/openapi.json
+++ b/packages/server/specs/openapi.json
@@ -859,7 +859,8 @@
"json",
"internal",
"barcodeqr",
- "bigint"
+ "bigint",
+ "bb_reference"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},
@@ -1064,7 +1065,8 @@
"json",
"internal",
"barcodeqr",
- "bigint"
+ "bigint",
+ "bb_reference"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},
@@ -1280,7 +1282,8 @@
"json",
"internal",
"barcodeqr",
- "bigint"
+ "bigint",
+ "bb_reference"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},
diff --git a/packages/server/specs/openapi.yaml b/packages/server/specs/openapi.yaml
index 0673794fb0..ad02a3cd9c 100644
--- a/packages/server/specs/openapi.yaml
+++ b/packages/server/specs/openapi.yaml
@@ -782,6 +782,7 @@ components:
- internal
- barcodeqr
- bigint
+ - bb_reference
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:
@@ -946,6 +947,7 @@ components:
- internal
- barcodeqr
- bigint
+ - bb_reference
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:
@@ -1117,6 +1119,7 @@ components:
- internal
- barcodeqr
- bigint
+ - bb_reference
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:
diff --git a/packages/server/src/api/controllers/table/external.ts b/packages/server/src/api/controllers/table/external.ts
index 2128e12c9c..967176c2e4 100644
--- a/packages/server/src/api/controllers/table/external.ts
+++ b/packages/server/src/api/controllers/table/external.ts
@@ -16,7 +16,9 @@ import { context, events } from "@budibase/backend-core"
import { isRows, isSchema, parse } from "../../../utilities/schema"
import {
BulkImportRequest,
+ BulkImportResponse,
Datasource,
+ FieldSchema,
ManyToManyRelationshipFieldMetadata,
ManyToOneRelationshipFieldMetadata,
OneToManyRelationshipFieldMetadata,
@@ -385,7 +387,9 @@ export async function destroy(ctx: UserCtx) {
return tableToDelete
}
-export async function bulkImport(ctx: UserCtx
) {
+export async function bulkImport(
+ ctx: UserCtx
+) {
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows } = ctx.request.body
const schema = table.schema
diff --git a/packages/server/src/api/controllers/table/index.ts b/packages/server/src/api/controllers/table/index.ts
index 19a8f40ddd..afb2a9d12d 100644
--- a/packages/server/src/api/controllers/table/index.ts
+++ b/packages/server/src/api/controllers/table/index.ts
@@ -9,6 +9,7 @@ import { isExternalTable, isSQL } from "../../../integrations/utils"
import { events } from "@budibase/backend-core"
import {
BulkImportRequest,
+ BulkImportResponse,
FetchTablesResponse,
SaveTableRequest,
SaveTableResponse,
@@ -20,7 +21,7 @@ import {
import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv"
import { builderSocket } from "../../../websockets"
-import { cloneDeep } from "lodash"
+import { cloneDeep, isEqual } from "lodash"
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
if (table && !tableId) {
@@ -99,9 +100,17 @@ export async function destroy(ctx: UserCtx) {
builderSocket?.emitTableDeletion(ctx, deletedTable)
}
-export async function bulkImport(ctx: UserCtx) {
+export async function bulkImport(
+ ctx: UserCtx
+) {
const tableId = ctx.params.tableId
- await pickApi({ tableId }).bulkImport(ctx)
+ let tableBefore = await sdk.tables.getTable(tableId)
+ let tableAfter = await pickApi({ tableId }).bulkImport(ctx)
+
+ if (!isEqual(tableBefore, tableAfter)) {
+ await sdk.tables.saveTable(tableAfter)
+ }
+
// right now we don't trigger anything for bulk import because it
// can only be done in the builder, but in the future we may need to
// think about events for bulk items
diff --git a/packages/server/src/api/controllers/table/internal.ts b/packages/server/src/api/controllers/table/internal.ts
index 11be19a8a7..eeb4a9eb5f 100644
--- a/packages/server/src/api/controllers/table/internal.ts
+++ b/packages/server/src/api/controllers/table/internal.ts
@@ -11,6 +11,7 @@ import {
import { runStaticFormulaChecks } from "./bulkFormula"
import {
BulkImportRequest,
+ BulkImportResponse,
RenameColumn,
SaveTableRequest,
SaveTableResponse,
@@ -207,7 +208,9 @@ export async function destroy(ctx: any) {
return tableToDelete
}
-export async function bulkImport(ctx: UserCtx) {
+export async function bulkImport(
+ ctx: UserCtx
+) {
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows, identifierFields } = ctx.request.body
await handleDataImport(ctx.user, table, rows, identifierFields)
diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts
index 4b1bc65793..0a80253210 100644
--- a/packages/server/src/api/routes/tests/row.spec.ts
+++ b/packages/server/src/api/routes/tests/row.spec.ts
@@ -1580,6 +1580,9 @@ describe.each([
(row: Row) => ({
_id: row._id,
primaryDisplay: row.email,
+ email: row.email,
+ firstName: row.firstName,
+ lastName: row.lastName,
}),
],
])("links - %s", (__, relSchema, dataGenerator, resultMapper) => {
diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts
index 061e7a7217..ded54729b9 100644
--- a/packages/server/src/api/routes/tests/table.spec.ts
+++ b/packages/server/src/api/routes/tests/table.spec.ts
@@ -1,4 +1,3 @@
-import { generator } from "@budibase/backend-core/tests"
import { events, context } from "@budibase/backend-core"
import {
FieldType,
@@ -6,6 +5,7 @@ import {
RelationshipType,
Table,
ViewCalculation,
+ AutoFieldSubTypes,
} from "@budibase/types"
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities"
@@ -188,6 +188,36 @@ describe("/tables", () => {
1
)
})
+
+ it("should update Auto ID field after bulk import", async () => {
+ const table = await config.createTable({
+ name: "TestTable",
+ type: "table",
+ schema: {
+ autoId: {
+ name: "id",
+ type: FieldType.NUMBER,
+ subtype: AutoFieldSubTypes.AUTO_ID,
+ autocolumn: true,
+ constraints: {
+ type: "number",
+ presence: false,
+ },
+ },
+ },
+ })
+
+ let row = await config.api.row.save(table._id!, {})
+ expect(row.autoId).toEqual(1)
+
+ await config.api.row.bulkImport(table._id!, {
+ rows: [{ autoId: 2 }],
+ identifierFields: [],
+ })
+
+ row = await config.api.row.save(table._id!, {})
+ expect(row.autoId).toEqual(3)
+ })
})
describe("fetch", () => {
diff --git a/packages/server/src/definitions/openapi.ts b/packages/server/src/definitions/openapi.ts
index a813f0f135..34014ba626 100644
--- a/packages/server/src/definitions/openapi.ts
+++ b/packages/server/src/definitions/openapi.ts
@@ -279,7 +279,8 @@ export interface components {
| "json"
| "internal"
| "barcodeqr"
- | "bigint";
+ | "bigint"
+ | "bb_reference";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: {
/** @enum {string} */
@@ -386,7 +387,8 @@ export interface components {
| "json"
| "internal"
| "barcodeqr"
- | "bigint";
+ | "bigint"
+ | "bb_reference";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: {
/** @enum {string} */
@@ -495,7 +497,8 @@ export interface components {
| "json"
| "internal"
| "barcodeqr"
- | "bigint";
+ | "bigint"
+ | "bb_reference";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: {
/** @enum {string} */
diff --git a/packages/server/src/tests/utilities/api/row.ts b/packages/server/src/tests/utilities/api/row.ts
index adeb96a593..bb880bb7da 100644
--- a/packages/server/src/tests/utilities/api/row.ts
+++ b/packages/server/src/tests/utilities/api/row.ts
@@ -4,6 +4,8 @@ import {
Row,
ValidateResponse,
ExportRowsRequest,
+ BulkImportRequest,
+ BulkImportResponse,
} from "@budibase/types"
import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base"
@@ -123,6 +125,19 @@ export class RowAPI extends TestAPI {
return request
}
+ bulkImport = async (
+ tableId: string,
+ body: BulkImportRequest,
+ { expectStatus } = { expectStatus: 200 }
+ ): Promise => {
+ let request = this.request
+ .post(`/api/tables/${tableId}/import`)
+ .send(body)
+ .set(this.config.defaultHeaders())
+ .expect(expectStatus)
+ return (await request).body
+ }
+
search = async (
sourceId: string,
{ expectStatus } = { expectStatus: 200 }
diff --git a/packages/server/src/utilities/rowProcessor/bbReferenceProcessor.ts b/packages/server/src/utilities/rowProcessor/bbReferenceProcessor.ts
index c126530b8d..c7b8998bad 100644
--- a/packages/server/src/utilities/rowProcessor/bbReferenceProcessor.ts
+++ b/packages/server/src/utilities/rowProcessor/bbReferenceProcessor.ts
@@ -82,6 +82,9 @@ export async function processOutputBBReferences(
return users.map(u => ({
_id: u._id,
primaryDisplay: u.email,
+ email: u.email,
+ firstName: u.firstName,
+ lastName: u.lastName,
}))
default:
diff --git a/packages/server/src/utilities/rowProcessor/tests/bbReferenceProcessor.spec.ts b/packages/server/src/utilities/rowProcessor/tests/bbReferenceProcessor.spec.ts
index 10d339f6b4..b6174861d4 100644
--- a/packages/server/src/utilities/rowProcessor/tests/bbReferenceProcessor.spec.ts
+++ b/packages/server/src/utilities/rowProcessor/tests/bbReferenceProcessor.spec.ts
@@ -180,6 +180,9 @@ describe("bbReferenceProcessor", () => {
{
_id: user._id,
primaryDisplay: user.email,
+ email: user.email,
+ firstName: user.firstName,
+ lastName: user.lastName,
},
])
expect(cacheGetUsersSpy).toBeCalledTimes(1)
@@ -204,6 +207,9 @@ describe("bbReferenceProcessor", () => {
[user1, user2].map(u => ({
_id: u._id,
primaryDisplay: u.email,
+ email: u.email,
+ firstName: u.firstName,
+ lastName: u.lastName,
}))
)
)
diff --git a/packages/string-templates/package.json b/packages/string-templates/package.json
index 6780840ed3..d9f56442a0 100644
--- a/packages/string-templates/package.json
+++ b/packages/string-templates/package.json
@@ -29,7 +29,7 @@
"dayjs": "^1.10.8",
"handlebars": "^4.7.6",
"lodash": "^4.17.20",
- "vm2": "^3.9.15"
+ "vm2": "^3.9.19"
},
"devDependencies": {
"@rollup/plugin-commonjs": "^17.1.0",
diff --git a/packages/types/src/api/web/app/table.ts b/packages/types/src/api/web/app/table.ts
index 8fb0297a9e..cb5faaa9ea 100644
--- a/packages/types/src/api/web/app/table.ts
+++ b/packages/types/src/api/web/app/table.ts
@@ -29,3 +29,7 @@ export interface BulkImportRequest {
rows: Row[]
identifierFields?: Array
}
+
+export interface BulkImportResponse {
+ message: string
+}
diff --git a/yarn.lock b/yarn.lock
index d8e1d41d56..81c2815663 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -21750,10 +21750,10 @@ vlq@^0.2.2:
resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26"
integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==
-vm2@3.9.17, vm2@^3.9.15, vm2@^3.9.8:
- version "3.9.17"
- resolved "https://registry.yarnpkg.com/vm2/-/vm2-3.9.17.tgz#251b165ff8a0e034942b5181057305e39570aeab"
- integrity sha512-AqwtCnZ/ERcX+AVj9vUsphY56YANXxRuqMb7GsDtAr0m0PcQX3u0Aj3KWiXM0YAHy7i6JEeHrwOnwXbGYgRpAw==
+vm2@^3.9.19:
+ version "3.9.19"
+ resolved "https://registry.yarnpkg.com/vm2/-/vm2-3.9.19.tgz#be1e1d7a106122c6c492b4d51c2e8b93d3ed6a4a"
+ integrity sha512-J637XF0DHDMV57R6JyVsTak7nIL8gy5KH4r1HiwWLf/4GBbb5MKL5y7LpmF4A8E2nR6XmzpmMFQ7V7ppPTmUQg==
dependencies:
acorn "^8.7.0"
acorn-walk "^8.2.0"