From 478e297e9e3c2944fb20b750463db89f2dea1b72 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Fri, 24 Nov 2023 18:11:53 +0000 Subject: [PATCH 01/54] Initial work towards aliasing queries for SQL. --- .../server/src/api/controllers/row/alias.ts | 101 ++++++++++++++++++ packages/server/src/integrations/base/sql.ts | 1 + 2 files changed, 102 insertions(+) create mode 100644 packages/server/src/api/controllers/row/alias.ts diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts new file mode 100644 index 0000000000..8111396ea9 --- /dev/null +++ b/packages/server/src/api/controllers/row/alias.ts @@ -0,0 +1,101 @@ +import { QueryJson, SearchFilters, Table, Row } from "@budibase/types" +import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils" +import { cloneDeep } from "lodash" + +class AliasTables { + character: string + aliases: Record + tableAliases: Record + + constructor() { + this.character = "a" + this.aliases = {} + this.tableAliases = {} + } + + getAlias(tableName: string) { + if (this.aliases[tableName]) { + return this.aliases[tableName] + } + this.character = String.fromCharCode(this.character.charCodeAt(0) + 1) + this.aliases[tableName] = this.character + this.tableAliases[this.character] = tableName + return this.character + } + + aliasField(tableNames: string[], field: string) { + if (field.includes(".")) { + const [tableName, column] = field.split(".") + if (tableNames.includes(tableName)) { + return `${this.getAlias(tableName)}.${column}` + } + } + return field + } + + reverse(tableNames: string[], rows: T): T { + const process = (row: Row) => { + const final: Row = {} + for (let [key, value] of Object.entries(row)) { + if (!key.includes(".")) { + final[key] = value + } else { + const [alias, column] = key.split(".") + const tableName = this.tableAliases[alias] || alias + final[`${tableName}.${column}`] = value + } + } + return final + } + if (Array.isArray(rows)) { + return rows.map(row => process(row)) as T + } else { + return process(rows) as T + } + } + + async queryWithAliasing(tableNames: string[], json: QueryJson) { + json = cloneDeep(json) + const aliasField = (field: string) => this.aliasField(tableNames, field) + const aliasTable = (table: Table) => ({ + ...table, + name: this.getAlias(table.name), + }) + // run through the query json to update anywhere a table may be used + if (json.resource?.fields) { + json.resource.fields = json.resource.fields.map(field => + aliasField(field) + ) + } + if (json.filters) { + for (let [filterKey, filter] of Object.entries(json.filters)) { + if (typeof filter !== "object") { + continue + } + const aliasedFilters: typeof filter = {} + for (let key of Object.keys(filter)) { + aliasedFilters[aliasField(key)] = filter + } + json.filters[filterKey as keyof SearchFilters] = aliasedFilters + } + } + if (json.relationships) { + json.relationships = json.relationships.map(relationship => ({ + ...relationship, + tableName: this.getAlias(relationship.tableName), + })) + } + if (json.meta?.table) { + json.meta.table = aliasTable(json.meta.table) + } + if (json.meta?.tables) { + const aliasedTables: Record = {} + for (let [tableName, table] of Object.entries(json.meta.tables)) { + aliasedTables[this.getAlias(tableName)] = aliasTable(table) + } + json.meta.tables = aliasedTables + } + const response = await getDatasourceAndQuery(json) + return this.reverse(tableNames, response) + } +} diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index 29c8416b34..630c962a15 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -437,6 +437,7 @@ class InternalBuilder { read(knex: Knex, json: QueryJson, limit: number): KnexQuery { let { endpoint, resource, filters, paginate, relationships } = json + const tableName = endpoint.entityId // select all if not specified if (!resource) { From c16ad8614240cbb70f21179aeb2e6239916fcce2 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Fri, 24 Nov 2023 18:12:35 +0000 Subject: [PATCH 02/54] Updating reverse function. --- packages/server/src/api/controllers/row/alias.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index 8111396ea9..d4937186d9 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -33,7 +33,7 @@ class AliasTables { return field } - reverse(tableNames: string[], rows: T): T { + reverse(rows: T): T { const process = (row: Row) => { const final: Row = {} for (let [key, value] of Object.entries(row)) { @@ -96,6 +96,6 @@ class AliasTables { json.meta.tables = aliasedTables } const response = await getDatasourceAndQuery(json) - return this.reverse(tableNames, response) + return this.reverse(response) } } From cb7c1898f2d29ac52f6b943dc04dd716b9ba128e Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 27 Nov 2023 19:02:06 +0000 Subject: [PATCH 03/54] Getting basic aliasing working after some testing. --- .../api/controllers/row/ExternalRequest.ts | 19 +++++++------- .../server/src/api/controllers/row/alias.ts | 25 +++++++++++-------- packages/server/src/integrations/base/sql.ts | 11 +++++--- packages/server/src/sdk/app/rows/utils.ts | 4 +-- packages/types/src/sdk/search.ts | 2 ++ 5 files changed, 36 insertions(+), 25 deletions(-) diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index 7c98fecb9b..29851e457f 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -32,6 +32,7 @@ import { processObjectSync } from "@budibase/string-templates" import { cloneDeep } from "lodash/fp" import { processDates, processFormulas } from "../../../utilities/rowProcessor" import { db as dbCore } from "@budibase/backend-core" +import AliasTables from "./alias" import sdk from "../../../sdk" export interface ManyRelationship { @@ -178,13 +179,13 @@ function generateIdForRow( function getEndpoint(tableId: string | undefined, operation: string) { if (!tableId) { - return {} + throw new Error("Cannot get endpoint information - no table ID specified") } const { datasourceId, tableName } = breakExternalTableId(tableId) return { - datasourceId, - entityId: tableName, - operation, + datasourceId: datasourceId!, + entityId: tableName!, + operation: operation as Operation, } } @@ -704,7 +705,7 @@ export class ExternalRequest { // safety check, if there are no filters on deletion bad things happen if (Object.keys(filters).length !== 0) { const op = isMany ? Operation.DELETE : Operation.UPDATE - const body = isMany ? null : { [colName]: null } + const body = isMany ? undefined : { [colName]: null } promises.push( getDatasourceAndQuery({ endpoint: getEndpoint(tableId, op), @@ -807,7 +808,7 @@ export class ExternalRequest { } let json = { endpoint: { - datasourceId, + datasourceId: datasourceId!, entityId: tableName, operation, }, @@ -829,9 +830,9 @@ export class ExternalRequest { }, } - // can't really use response right now - const response = await getDatasourceAndQuery(json) - // handle many to many relationships now if we know the ID (could be auto increment) + const aliasing = new AliasTables(Object.keys(this.tables)) + const response = await aliasing.queryWithAliasing(json) + // handle many-to-many relationships now if we know the ID (could be auto increment) if (operation !== Operation.READ) { await this.handleManyRelationships( table._id || "", diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index d4937186d9..19be8db654 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -2,12 +2,14 @@ import { QueryJson, SearchFilters, Table, Row } from "@budibase/types" import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils" import { cloneDeep } from "lodash" -class AliasTables { +export default class AliasTables { character: string aliases: Record tableAliases: Record + tableNames: string[] - constructor() { + constructor(tableNames: string[]) { + this.tableNames = tableNames this.character = "a" this.aliases = {} this.tableAliases = {} @@ -17,13 +19,15 @@ class AliasTables { if (this.aliases[tableName]) { return this.aliases[tableName] } - this.character = String.fromCharCode(this.character.charCodeAt(0) + 1) - this.aliases[tableName] = this.character - this.tableAliases[this.character] = tableName - return this.character + const char = this.character + this.aliases[tableName] = char + this.tableAliases[char] = tableName + this.character = String.fromCharCode(char.charCodeAt(0) + 1) + return char } - aliasField(tableNames: string[], field: string) { + aliasField(field: string) { + const tableNames = this.tableNames if (field.includes(".")) { const [tableName, column] = field.split(".") if (tableNames.includes(tableName)) { @@ -54,9 +58,9 @@ class AliasTables { } } - async queryWithAliasing(tableNames: string[], json: QueryJson) { + async queryWithAliasing(json: QueryJson) { json = cloneDeep(json) - const aliasField = (field: string) => this.aliasField(tableNames, field) + const aliasField = (field: string) => this.aliasField(field) const aliasTable = (table: Table) => ({ ...table, name: this.getAlias(table.name), @@ -82,7 +86,7 @@ class AliasTables { if (json.relationships) { json.relationships = json.relationships.map(relationship => ({ ...relationship, - tableName: this.getAlias(relationship.tableName), + alias: this.getAlias(relationship.tableName), })) } if (json.meta?.table) { @@ -95,6 +99,7 @@ class AliasTables { } json.meta.tables = aliasedTables } + json.endpoint.alias = this.getAlias(json.endpoint.entityId) const response = await getDatasourceAndQuery(json) return this.reverse(response) } diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index 630c962a15..3147e8c670 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -439,6 +439,9 @@ class InternalBuilder { let { endpoint, resource, filters, paginate, relationships } = json const tableName = endpoint.entityId + const alias = endpoint.alias + const aliased = alias ? alias : tableName + const tableAliased = alias ? `${tableName} as ${alias}` : tableName // select all if not specified if (!resource) { resource = { fields: [] } @@ -463,20 +466,20 @@ class InternalBuilder { foundLimit = paginate.limit } // start building the query - let query: KnexQuery = knex(tableName).limit(foundLimit) + let query: KnexQuery = knex(tableAliased).limit(foundLimit) if (endpoint.schema) { query = query.withSchema(endpoint.schema) } if (foundOffset) { query = query.offset(foundOffset) } - query = this.addFilters(query, filters, { tableName }) + query = this.addFilters(query, filters, { tableName: aliased }) // add sorting to pre-query query = this.addSorting(query, json) // @ts-ignore let preQuery: KnexQuery = knex({ // @ts-ignore - [tableName]: query, + [aliased]: query, }).select(selectStatement) // have to add after as well (this breaks MS-SQL) if (this.client !== SqlClient.MS_SQL) { @@ -485,7 +488,7 @@ class InternalBuilder { // handle joins query = this.addRelationships( preQuery, - tableName, + aliased, relationships, endpoint.schema ) diff --git a/packages/server/src/sdk/app/rows/utils.ts b/packages/server/src/sdk/app/rows/utils.ts index d0227c7c6b..c160aaba3f 100644 --- a/packages/server/src/sdk/app/rows/utils.ts +++ b/packages/server/src/sdk/app/rows/utils.ts @@ -1,13 +1,13 @@ import cloneDeep from "lodash/cloneDeep" import validateJs from "validate.js" -import { Row, Table, TableSchema } from "@budibase/types" +import { QueryJson, Row, Table, TableSchema } from "@budibase/types" import { FieldTypes } from "../../../constants" import { makeExternalQuery } from "../../../integrations/base/query" import { Format } from "../../../api/controllers/view/exporters" import sdk from "../.." import { isRelationshipColumn } from "../../../db/utils" -export async function getDatasourceAndQuery(json: any) { +export async function getDatasourceAndQuery(json: QueryJson) { const datasourceId = json.endpoint.datasourceId const datasource = await sdk.datasources.get(datasourceId) return makeExternalQuery(datasource, json) diff --git a/packages/types/src/sdk/search.ts b/packages/types/src/sdk/search.ts index 35fd148c05..1f9aa6c375 100644 --- a/packages/types/src/sdk/search.ts +++ b/packages/types/src/sdk/search.ts @@ -67,6 +67,7 @@ export interface RelationshipsJson { fromPrimary?: string toPrimary?: string tableName: string + alias?: string column: string } @@ -74,6 +75,7 @@ export interface QueryJson { endpoint: { datasourceId: string entityId: string + alias?: string operation: Operation schema?: string } From 65cddae9dac4f511c70634e4885987b672989c13 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 28 Nov 2023 18:43:38 +0000 Subject: [PATCH 04/54] Getting relationship aliasing working. --- .../server/src/api/controllers/row/alias.ts | 16 +++++++- packages/server/src/integrations/base/sql.ts | 39 ++++++++++++++----- packages/types/src/sdk/search.ts | 2 +- 3 files changed, 46 insertions(+), 11 deletions(-) diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index 19be8db654..0c7a4bb8a0 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -58,6 +58,16 @@ export default class AliasTables { } } + aliasMap(tableNames: (string | undefined)[]) { + const map: Record = {} + for (let tableName of tableNames) { + if (tableName) { + map[tableName] = this.getAlias(tableName) + } + } + return map + } + async queryWithAliasing(json: QueryJson) { json = cloneDeep(json) const aliasField = (field: string) => this.aliasField(field) @@ -86,7 +96,11 @@ export default class AliasTables { if (json.relationships) { json.relationships = json.relationships.map(relationship => ({ ...relationship, - alias: this.getAlias(relationship.tableName), + aliases: this.aliasMap([ + relationship.through, + relationship.tableName, + json.endpoint.entityId, + ]), })) } if (json.meta?.table) { diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index 3147e8c670..f3f574b1af 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -330,6 +330,17 @@ class InternalBuilder { return query } + tableNameWithSchema( + tableName: string, + opts?: { alias?: string; schema?: string } + ) { + let withSchema = opts?.schema ? `${opts.schema}.${tableName}` : tableName + if (opts?.alias) { + withSchema += ` as ${opts.alias}` + } + return withSchema + } + addRelationships( query: KnexQuery, fromTable: string, @@ -339,9 +350,12 @@ class InternalBuilder { if (!relationships) { return query } - const tableSets: Record = {} + const tableSets: Record = {} + // add up all aliases + let aliases: Record = {} // aggregate into table sets (all the same to tables) for (let relationship of relationships) { + aliases = { ...aliases, ...relationship.aliases } const keyObj: { toTable: string; throughTable: string | undefined } = { toTable: relationship.tableName, throughTable: undefined, @@ -358,10 +372,17 @@ class InternalBuilder { } for (let [key, relationships] of Object.entries(tableSets)) { const { toTable, throughTable } = JSON.parse(key) - const toTableWithSchema = schema ? `${schema}.${toTable}` : toTable - const throughTableWithSchema = schema - ? `${schema}.${throughTable}` - : throughTable + const toAlias = aliases[toTable], + throughAlias = aliases[throughTable], + fromAlias = aliases[fromTable] + let toTableWithSchema = this.tableNameWithSchema(toTable, { + alias: toAlias, + schema, + }) + let throughTableWithSchema = this.tableNameWithSchema(throughTable, { + alias: throughAlias, + schema, + }) if (!throughTable) { // @ts-ignore query = query.leftJoin(toTableWithSchema, function () { @@ -369,7 +390,7 @@ class InternalBuilder { const from = relationship.from, to = relationship.to // @ts-ignore - this.orOn(`${fromTable}.${from}`, "=", `${toTable}.${to}`) + this.orOn(`${fromTable}.${from}`, "=", `${toAlias}.${to}`) } }) } else { @@ -381,9 +402,9 @@ class InternalBuilder { const from = relationship.from // @ts-ignore this.orOn( - `${fromTable}.${fromPrimary}`, + `${fromAlias}.${fromPrimary}`, "=", - `${throughTable}.${from}` + `${throughAlias}.${from}` ) } }) @@ -392,7 +413,7 @@ class InternalBuilder { const toPrimary = relationship.toPrimary const to = relationship.to // @ts-ignore - this.orOn(`${toTable}.${toPrimary}`, `${throughTable}.${to}`) + this.orOn(`${toAlias}.${toPrimary}`, `${throughAlias}.${to}`) } }) } diff --git a/packages/types/src/sdk/search.ts b/packages/types/src/sdk/search.ts index 1f9aa6c375..a4045c2558 100644 --- a/packages/types/src/sdk/search.ts +++ b/packages/types/src/sdk/search.ts @@ -67,7 +67,7 @@ export interface RelationshipsJson { fromPrimary?: string toPrimary?: string tableName: string - alias?: string + aliases?: Record column: string } From 649025ca124a4b9b7f0714d9621edcb3c4ae3424 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 28 Nov 2023 18:45:05 +0000 Subject: [PATCH 05/54] Fixing missed from. --- packages/server/src/integrations/base/sql.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index f3f574b1af..c419edc805 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -390,7 +390,7 @@ class InternalBuilder { const from = relationship.from, to = relationship.to // @ts-ignore - this.orOn(`${fromTable}.${from}`, "=", `${toAlias}.${to}`) + this.orOn(`${fromAlias}.${from}`, "=", `${toAlias}.${to}`) } }) } else { From 5c4dc0dc8351310f6ab9c022594386a4543c47f6 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Fri, 1 Dec 2023 14:14:44 +0000 Subject: [PATCH 06/54] Fixing issue with aliasing. --- packages/server/src/integrations/base/sql.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index c419edc805..57af95eabb 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -372,9 +372,9 @@ class InternalBuilder { } for (let [key, relationships] of Object.entries(tableSets)) { const { toTable, throughTable } = JSON.parse(key) - const toAlias = aliases[toTable], - throughAlias = aliases[throughTable], - fromAlias = aliases[fromTable] + const toAlias = aliases[toTable] || toTable, + throughAlias = aliases[throughTable] || throughTable, + fromAlias = aliases[fromTable] || fromTable let toTableWithSchema = this.tableNameWithSchema(toTable, { alias: toAlias, schema, From 7eccbb851dac11b36e936ae52a67500f99cc7a52 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Fri, 1 Dec 2023 15:27:49 +0000 Subject: [PATCH 07/54] Fixing issues with other SQL functions than just reading. --- .../server/src/api/controllers/row/alias.ts | 2 +- packages/server/src/integrations/base/sql.ts | 46 +++++++++---------- 2 files changed, 23 insertions(+), 25 deletions(-) diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index 0c7a4bb8a0..fc00b505c4 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -88,7 +88,7 @@ export default class AliasTables { } const aliasedFilters: typeof filter = {} for (let key of Object.keys(filter)) { - aliasedFilters[aliasField(key)] = filter + aliasedFilters[aliasField(key)] = filter[key] } json.filters[filterKey as keyof SearchFilters] = aliasedFilters } diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index 57af95eabb..14bcb532cc 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -421,12 +421,24 @@ class InternalBuilder { return query.limit(BASE_LIMIT) } - create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { - const { endpoint, body } = json - let query: KnexQuery = knex(endpoint.entityId) + knexWithAlias( + knex: Knex, + endpoint: { entityId: string; alias?: string; schema?: string } + ): { query: KnexQuery; name: string } { + const tableName = endpoint.entityId + const alias = endpoint.alias + const aliased = alias ? alias : tableName + const tableAliased = alias ? `${tableName} as ${alias}` : tableName + let query = knex(tableAliased) if (endpoint.schema) { query = query.withSchema(endpoint.schema) } + return { query, name: aliased } + } + + create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { + const { endpoint, body } = json + let { query } = this.knexWithAlias(knex, endpoint) const parsedBody = parseBody(body) // make sure no null values in body for creation for (let [key, value] of Object.entries(parsedBody)) { @@ -445,10 +457,7 @@ class InternalBuilder { bulkCreate(knex: Knex, json: QueryJson): KnexQuery { const { endpoint, body } = json - let query: KnexQuery = knex(endpoint.entityId) - if (endpoint.schema) { - query = query.withSchema(endpoint.schema) - } + let { query } = this.knexWithAlias(knex, endpoint) if (!Array.isArray(body)) { return query } @@ -459,10 +468,6 @@ class InternalBuilder { read(knex: Knex, json: QueryJson, limit: number): KnexQuery { let { endpoint, resource, filters, paginate, relationships } = json - const tableName = endpoint.entityId - const alias = endpoint.alias - const aliased = alias ? alias : tableName - const tableAliased = alias ? `${tableName} as ${alias}` : tableName // select all if not specified if (!resource) { resource = { fields: [] } @@ -487,10 +492,9 @@ class InternalBuilder { foundLimit = paginate.limit } // start building the query - let query: KnexQuery = knex(tableAliased).limit(foundLimit) - if (endpoint.schema) { - query = query.withSchema(endpoint.schema) - } + + let { query, name: aliased } = this.knexWithAlias(knex, endpoint) + query = query.limit(foundLimit) if (foundOffset) { query = query.offset(foundOffset) } @@ -518,10 +522,7 @@ class InternalBuilder { update(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { const { endpoint, body, filters } = json - let query: KnexQuery = knex(endpoint.entityId) - if (endpoint.schema) { - query = query.withSchema(endpoint.schema) - } + let { query } = this.knexWithAlias(knex, endpoint) const parsedBody = parseBody(body) query = this.addFilters(query, filters, { tableName: endpoint.entityId }) // mysql can't use returning @@ -534,11 +535,8 @@ class InternalBuilder { delete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { const { endpoint, filters } = json - let query: KnexQuery = knex(endpoint.entityId) - if (endpoint.schema) { - query = query.withSchema(endpoint.schema) - } - query = this.addFilters(query, filters, { tableName: endpoint.entityId }) + let { query, name: aliased } = this.knexWithAlias(knex, endpoint) + query = this.addFilters(query, filters, { tableName: aliased }) // mysql can't use returning if (opts.disableReturning) { return query.delete() From 3ce00c42a2e9751bcfb17d906b8b8a8c85f04752 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 18 Jan 2024 18:13:11 +0000 Subject: [PATCH 08/54] Adding SQL logging capabilities. --- packages/server/src/environment.ts | 1 + packages/server/src/integrations/base/sql.ts | 12 + .../src/integrations/microsoftSqlServer.ts | 1 + packages/server/src/integrations/mysql.ts | 1 + packages/server/src/integrations/oracle.ts | 1 + packages/server/src/integrations/postgres.ts | 4 +- yarn.lock | 660 +----------------- 7 files changed, 55 insertions(+), 625 deletions(-) diff --git a/packages/server/src/environment.ts b/packages/server/src/environment.ts index f692a8b6cf..f46abe5b16 100644 --- a/packages/server/src/environment.ts +++ b/packages/server/src/environment.ts @@ -67,6 +67,7 @@ const environment = { DISABLE_RATE_LIMITING: process.env.DISABLE_RATE_LIMITING, MULTI_TENANCY: process.env.MULTI_TENANCY, ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS, + ENABLE_SQL_LOGGING: process.env.ENABLE_SQL_LOGGING, SELF_HOSTED: process.env.SELF_HOSTED, HTTP_MB_LIMIT: process.env.HTTP_MB_LIMIT, FORKED_PROCESS_NAME: process.env.FORKED_PROCESS_NAME || "main", diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index 14bcb532cc..3375e175e6 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -671,6 +671,18 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { } return results.length ? results : [{ [operation.toLowerCase()]: true }] } + + log(query: string, values?: any[]) { + if (!environment.ENABLE_SQL_LOGGING) { + return + } + const sqlClient = this.getSqlClient() + let string = `[SQL] [${sqlClient.toUpperCase()}] query="${query}"` + if (values) { + string += ` values="${values.join(", ")}"` + } + console.log(string) + } } export default SqlQueryBuilder diff --git a/packages/server/src/integrations/microsoftSqlServer.ts b/packages/server/src/integrations/microsoftSqlServer.ts index d0a06d4476..e063933503 100644 --- a/packages/server/src/integrations/microsoftSqlServer.ts +++ b/packages/server/src/integrations/microsoftSqlServer.ts @@ -329,6 +329,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus { operation === Operation.CREATE ? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;` : query.sql + this.log(sql, query.bindings) return await request.query(sql) } catch (err: any) { let readableMessage = getReadableErrorMessage( diff --git a/packages/server/src/integrations/mysql.ts b/packages/server/src/integrations/mysql.ts index 8ec73307f4..6eebda8df5 100644 --- a/packages/server/src/integrations/mysql.ts +++ b/packages/server/src/integrations/mysql.ts @@ -261,6 +261,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus { const bindings = opts?.disableCoercion ? baseBindings : bindingTypeCoerce(baseBindings) + this.log(query.sql, bindings) // Node MySQL is callback based, so we must wrap our call in a promise const response = await this.client!.query(query.sql, bindings) return response[0] diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index e9a2dc7998..1a1e440410 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -368,6 +368,7 @@ class OracleIntegration extends Sql implements DatasourcePlus { const options: ExecuteOptions = { autoCommit: true } const bindings: BindParameters = query.bindings || [] + this.log(query.sql, bindings) return await connection.execute(query.sql, bindings, options) } finally { if (connection) { diff --git a/packages/server/src/integrations/postgres.ts b/packages/server/src/integrations/postgres.ts index 78955c06dc..f8cd2b62fc 100644 --- a/packages/server/src/integrations/postgres.ts +++ b/packages/server/src/integrations/postgres.ts @@ -262,7 +262,9 @@ class PostgresIntegration extends Sql implements DatasourcePlus { } } try { - return await client.query(query.sql, query.bindings || []) + const bindings = query.bindings || [] + this.log(query.sql, bindings) + return await client.query(query.sql, bindings) } catch (err: any) { await this.closeConnection() let readableMessage = getReadableErrorMessage( diff --git a/yarn.lock b/yarn.lock index 91697cd151..fa746b9d72 100644 --- a/yarn.lock +++ b/yarn.lock @@ -625,13 +625,6 @@ dependencies: tslib "^2.5.0" -"@aws/dynamodb-auto-marshaller@^0.7.1": - version "0.7.1" - resolved "https://registry.yarnpkg.com/@aws/dynamodb-auto-marshaller/-/dynamodb-auto-marshaller-0.7.1.tgz#70676c056e4ecb798c08ec2e398a3d93e703858d" - integrity sha512-LeURlf6/avrfFo9+4Yht9J3CUTJ72yoBpm1FOUmlexuHNW4Ka61tG30w3ZDCXXXmCO2rG0k3ywAgNJEo3WPbyw== - dependencies: - tslib "^1.8.1" - "@azure/abort-controller@^1.0.0", "@azure/abort-controller@^1.0.4": version "1.1.0" resolved "https://registry.yarnpkg.com/@azure/abort-controller/-/abort-controller-1.1.0.tgz#788ee78457a55af8a1ad342acb182383d2119249" @@ -1980,7 +1973,7 @@ resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310" integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA== -"@babel/runtime@^7.12.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.15.4", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": +"@babel/runtime@^7.12.5", "@babel/runtime@^7.15.4", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": version "7.23.8" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.8.tgz#8ee6fe1ac47add7122902f257b8ddf55c898f650" integrity sha512-Y7KbAP984rn1VGMbGqKmBLio9V7y5Je9GvU4rQPCPinCyNfUcToxIXl06d59URp/F3LwinvODxab5N/G6qggkw== @@ -2638,14 +2631,6 @@ teeny-request "^8.0.0" uuid "^8.0.0" -"@grpc/grpc-js@1.9.7": - version "1.9.7" - resolved "https://registry.yarnpkg.com/@grpc/grpc-js/-/grpc-js-1.9.7.tgz#7d0e29bc162287bee2523901c9bc9320d8402397" - integrity sha512-yMaA/cIsRhGzW3ymCNpdlPcInXcovztlgu/rirThj2b87u3RzWUszliOqZ/pldy7yhmJPS8uwog+kZSTa4A0PQ== - dependencies: - "@grpc/proto-loader" "^0.7.8" - "@types/node" ">=12.12.47" - "@grpc/grpc-js@~1.8.0": version "1.8.21" resolved "https://registry.yarnpkg.com/@grpc/grpc-js/-/grpc-js-1.8.21.tgz#d282b122c71227859bf6c5866f4c40f4a2696513" @@ -2654,7 +2639,7 @@ "@grpc/proto-loader" "^0.7.0" "@types/node" ">=12.12.47" -"@grpc/proto-loader@0.7.10", "@grpc/proto-loader@^0.7.0", "@grpc/proto-loader@^0.7.8": +"@grpc/proto-loader@^0.7.0": version "0.7.10" resolved "https://registry.yarnpkg.com/@grpc/proto-loader/-/proto-loader-0.7.10.tgz#6bf26742b1b54d0a473067743da5d3189d06d720" integrity sha512-CAqDfoaQ8ykFd9zqBDn4k6iWT9loLAlc2ETmDFS9JCD70gDcnA4L3AFEo2iV7KyAtAAHFW9ftq1Fz+Vsgq80RQ== @@ -2676,20 +2661,6 @@ dependencies: "@hapi/hoek" "^9.0.0" -"@hubspot/api-client@7.1.2": - version "7.1.2" - resolved "https://registry.yarnpkg.com/@hubspot/api-client/-/api-client-7.1.2.tgz#a405b0a18b8caa27f129fd510b2555e5a5cc2708" - integrity sha512-JVQqh0fdHf97ePk0Hg/7BJsiXNlS9HQRPiM/CLgvVWt5CIviSLQ/kHLZXREmZqTWu7BisjCgHxnSx/d7gRdr2g== - dependencies: - bluebird "^3.7.2" - bottleneck "^2.19.5" - btoa "^1.2.1" - es6-promise "^4.2.4" - form-data "^2.5.0" - lodash "^4.17.21" - node-fetch "^2.6.0" - url-parse "^1.4.3" - "@humanwhocodes/config-array@^0.11.13": version "0.11.13" resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.13.tgz#075dc9684f40a531d9b26b0822153c1e832ee297" @@ -3341,13 +3312,6 @@ dependencies: lodash "^4.17.21" -"@koa/cors@^3.1.0": - version "3.4.3" - resolved "https://registry.yarnpkg.com/@koa/cors/-/cors-3.4.3.tgz#d669ee6e8d6e4f0ec4a7a7b0a17e7a3ed3752ebb" - integrity sha512-WPXQUaAeAMVaLTEFpoq3T2O1C+FstkjJnDQqy95Ck1UdILajsRhu6mhJ8H2f4NFPRBoCNN+qywTJfq/gGki5mw== - dependencies: - vary "^1.1.2" - "@koa/router@8.0.8": version "8.0.8" resolved "https://registry.yarnpkg.com/@koa/router/-/router-8.0.8.tgz#95f32d11373d03d89dcb63fabe9ac6f471095236" @@ -3956,14 +3920,6 @@ is-module "^1.0.0" resolve "^1.19.0" -"@rollup/plugin-replace@^2.4.2": - version "2.4.2" - resolved "https://registry.yarnpkg.com/@rollup/plugin-replace/-/plugin-replace-2.4.2.tgz#a2d539314fbc77c244858faa523012825068510a" - integrity sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg== - dependencies: - "@rollup/pluginutils" "^3.1.0" - magic-string "^0.25.7" - "@rollup/plugin-replace@^5.0.2", "@rollup/plugin-replace@^5.0.3": version "5.0.5" resolved "https://registry.yarnpkg.com/@rollup/plugin-replace/-/plugin-replace-5.0.5.tgz#33d5653dce6d03cb24ef98bef7f6d25b57faefdf" @@ -4006,23 +3962,6 @@ estree-walker "^2.0.2" picomatch "^2.3.1" -"@roxi/routify@2.18.0": - version "2.18.0" - resolved "https://registry.yarnpkg.com/@roxi/routify/-/routify-2.18.0.tgz#8f88bedd936312d0dbe44cbc11ab179b1f938ec2" - integrity sha512-MVB50HN+VQWLzfjLplcBjsSBvwOiExKOmht2DuWR3WQ60JxQi9pSejkB06tFVkFKNXz2X5iYtKDqKBTdae/gRg== - dependencies: - "@roxi/ssr" "^0.2.1" - "@types/node" ">=4.2.0 < 13" - chalk "^4.0.0" - cheap-watch "^1.0.2" - commander "^7.1.0" - configent "^2.1.4" - esm "^3.2.25" - fs-extra "^9.0.1" - log-symbols "^3.0.0" - picomatch "^2.2.2" - rollup-pluginutils "^2.8.2" - "@roxi/routify@2.18.12": version "2.18.12" resolved "https://registry.yarnpkg.com/@roxi/routify/-/routify-2.18.12.tgz#901ca95b96f274ddddaefbf18424557ee1ae3fae" @@ -4104,11 +4043,6 @@ resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.7.0.tgz#9a06f4f137ee84d7df0460c1fdb1135ffa6c50fd" integrity sha512-ONhaKPIufzzrlNbqtWFFd+jlnemX6lJAgq9ZeiZtS7I1PIf/la7CW4m83rTXRnVnsMbW2k56pGYu7AUFJD9Pow== -"@sindresorhus/is@^4.0.0": - version "4.6.0" - resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-4.6.0.tgz#3c7c9c46e678feefe7a2e5bb609d3dbd665ffb3f" - integrity sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw== - "@sinonjs/commons@^2.0.0": version "2.0.0" resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-2.0.0.tgz#fd4ca5b063554307e8327b4564bd56d3b73924a3" @@ -4932,13 +4866,6 @@ dependencies: defer-to-connect "^1.0.1" -"@szmarczak/http-timer@^4.0.5": - version "4.0.6" - resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-4.0.6.tgz#b4a914bb62e7c272d4e5989fe4440f812ab1d807" - integrity sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w== - dependencies: - defer-to-connect "^2.0.0" - "@techpass/passport-openidconnect@0.3.2": version "0.3.2" resolved "https://registry.yarnpkg.com/@techpass/passport-openidconnect/-/passport-openidconnect-0.3.2.tgz#f8fd5d97256286665dbf26dac92431f977ab1e63" @@ -4950,17 +4877,6 @@ request "^2.88.0" webfinger "^0.4.2" -"@techpass/passport-openidconnect@^0.3.0": - version "0.3.3" - resolved "https://registry.yarnpkg.com/@techpass/passport-openidconnect/-/passport-openidconnect-0.3.3.tgz#6c01c78bd8da0ca8917378dfbe18024702620352" - integrity sha512-i2X/CofjnGBqpTmw6b+Ex3Co/NrR2xjnIHvnOJk62XIlJJHNSTwmhJ1PkXoA5RGKlxZWchADFGjLTJnebvRj7A== - dependencies: - base64url "^3.0.1" - oauth "^0.9.15" - passport-strategy "^1.0.0" - request "^2.88.0" - webfinger "^0.4.2" - "@techteamer/ocsp@1.0.0": version "1.0.0" resolved "https://registry.yarnpkg.com/@techteamer/ocsp/-/ocsp-1.0.0.tgz#7b82b02093fbe351e915bb37685ac1ac5a1233d3" @@ -5133,16 +5049,6 @@ "@types/connect" "*" "@types/node" "*" -"@types/cacheable-request@^6.0.1": - version "6.0.3" - resolved "https://registry.yarnpkg.com/@types/cacheable-request/-/cacheable-request-6.0.3.tgz#a430b3260466ca7b5ca5bfd735693b36e7a9d183" - integrity sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw== - dependencies: - "@types/http-cache-semantics" "*" - "@types/keyv" "^3.1.4" - "@types/node" "*" - "@types/responselike" "^1.0.0" - "@types/caseless@*": version "0.12.2" resolved "https://registry.yarnpkg.com/@types/caseless/-/caseless-0.12.2.tgz#f65d3d6389e01eeb458bd54dc8f52b95a9463bc8" @@ -5307,11 +5213,6 @@ resolved "https://registry.yarnpkg.com/@types/http-assert/-/http-assert-1.5.3.tgz#ef8e3d1a8d46c387f04ab0f2e8ab8cb0c5078661" integrity sha512-FyAOrDuQmBi8/or3ns4rwPno7/9tJTijVW6aQQjK02+kOQ8zmoNg2XJtAuQhvQcy1ASJq38wirX5//9J1EqoUA== -"@types/http-cache-semantics@*": - version "4.0.4" - resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.4.tgz#b979ebad3919799c979b17c72621c0bc0a31c6c4" - integrity sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA== - "@types/http-errors@*": version "2.0.1" resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-2.0.1.tgz#20172f9578b225f6c7da63446f56d4ce108d5a65" @@ -5373,13 +5274,6 @@ resolved "https://registry.yarnpkg.com/@types/keygrip/-/keygrip-1.0.2.tgz#513abfd256d7ad0bf1ee1873606317b33b1b2a72" integrity sha512-GJhpTepz2udxGexqos8wgaBx4I/zWIDPh/KOGEwAqtuGDkOUJu5eFvwmdBX4AmB8Odsr+9pHCQqiAqDL/yKMKw== -"@types/keyv@^3.1.4": - version "3.1.4" - resolved "https://registry.yarnpkg.com/@types/keyv/-/keyv-3.1.4.tgz#3ccdb1c6751b0c7e52300bcdacd5bcbf8faa75b6" - integrity sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg== - dependencies: - "@types/node" "*" - "@types/koa-compose@*": version "3.2.5" resolved "https://registry.yarnpkg.com/@types/koa-compose/-/koa-compose-3.2.5.tgz#85eb2e80ac50be95f37ccf8c407c09bbe3468e9d" @@ -5387,29 +5281,13 @@ dependencies: "@types/koa" "*" -"@types/koa-passport@^4.0.3": - version "4.0.3" - resolved "https://registry.yarnpkg.com/@types/koa-passport/-/koa-passport-4.0.3.tgz#063ec6310edee76cf854aadaa717b97f04b104fb" - integrity sha512-tNMYd/bcv0Zw7fc0CzEBYM9uUzVtn4XWzdUYfkTgSkEljP6nap7eI4E5x43ukrUQvztgXSYFkz3Uk+ujFeUzTg== - dependencies: - "@types/koa" "*" - "@types/passport" "*" - -"@types/koa-send@*", "@types/koa-send@^4.1.6": +"@types/koa-send@^4.1.6": version "4.1.6" resolved "https://registry.yarnpkg.com/@types/koa-send/-/koa-send-4.1.6.tgz#15d90e95e3ccce669a15b6a3c56c3a650a167cea" integrity sha512-vgnNGoOJkx7FrF0Jl6rbK1f8bBecqAchKpXtKuXzqIEdXTDO6dsSTjr+eZ5m7ltSjH4K/E7auNJEQCAd0McUPA== dependencies: "@types/koa" "*" -"@types/koa-static@^4.0.2": - version "4.0.4" - resolved "https://registry.yarnpkg.com/@types/koa-static/-/koa-static-4.0.4.tgz#ce6f2a5d14cc7ef19f9bf6ee8e4f3eadfcc77323" - integrity sha512-j1AUzzl7eJYEk9g01hNTlhmipFh8RFbOQmaMNLvLcNNAkPw0bdTs3XTa3V045XFlrWN0QYnblbDJv2RzawTn6A== - dependencies: - "@types/koa" "*" - "@types/koa-send" "*" - "@types/koa@*": version "2.13.5" resolved "https://registry.yarnpkg.com/@types/koa/-/koa-2.13.5.tgz#64b3ca4d54e08c0062e89ec666c9f45443b21a61" @@ -5438,13 +5316,6 @@ "@types/koa-compose" "*" "@types/node" "*" -"@types/koa__cors@^3.1.1": - version "3.3.1" - resolved "https://registry.yarnpkg.com/@types/koa__cors/-/koa__cors-3.3.1.tgz#0ec7543c4c620fd23451bfdd3e21b9a6aadedccd" - integrity sha512-aFGYhTFW7651KhmZZ05VG0QZJre7QxBxDj2LF1lf6GA/wSXEfKVAJxiQQWzRV4ZoMzQIO8vJBXKsUcRuvYK9qw== - dependencies: - "@types/koa" "*" - "@types/koa__router@8.0.8": version "8.0.8" resolved "https://registry.yarnpkg.com/@types/koa__router/-/koa__router-8.0.8.tgz#b1e0e9a512498777d3366bbdf0e853df27ec831c" @@ -5546,42 +5417,21 @@ dependencies: undici-types "~5.26.4" -"@types/node@>=4.2.0 < 13", "@types/node@^12.20.52": - version "12.20.55" - resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.55.tgz#c329cbd434c42164f846b909bd6f85b5537f6240" - integrity sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ== - "@types/node@>=8.0.0 <15": version "14.18.37" resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.37.tgz#0bfcd173e8e1e328337473a8317e37b3b14fd30d" integrity sha512-7GgtHCs/QZrBrDzgIJnQtuSvhFSwhyYSI2uafSwZoNt1iOGhEN5fwNrQMjtONyHm9+/LoA4453jH0CMYcr06Pg== -"@types/node@>=8.1.0": - version "20.11.2" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.2.tgz#39cea3fe02fbbc2f80ed283e94e1d24f2d3856fb" - integrity sha512-cZShBaVa+UO1LjWWBPmWRR4+/eY/JR/UIEcDlVsw3okjWEu+rB7/mH6X3B/L+qJVHDLjk9QW/y2upp9wp1yDXA== - dependencies: - undici-types "~5.26.4" - -"@types/nodemailer@^6.4.4": - version "6.4.14" - resolved "https://registry.yarnpkg.com/@types/nodemailer/-/nodemailer-6.4.14.tgz#5c81a5e856db7f8ede80013e6dbad7c5fb2283e2" - integrity sha512-fUWthHO9k9DSdPCSPRqcu6TWhYyxTBg382vlNIttSe9M7XfsT06y0f24KHXtbnijPGGRIcVvdKHTNikOI6qiHA== - dependencies: - "@types/node" "*" +"@types/node@^12.20.52": + version "12.20.55" + resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.55.tgz#c329cbd434c42164f846b909bd6f85b5537f6240" + integrity sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ== "@types/normalize-package-data@^2.4.0": version "2.4.1" resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz#d3357479a0fdfdd5907fe67e17e0a85c906e1301" integrity sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw== -"@types/oauth@*": - version "0.9.4" - resolved "https://registry.yarnpkg.com/@types/oauth/-/oauth-0.9.4.tgz#dcbab5efa2f34f312b915f80685760ccc8111e0a" - integrity sha512-qk9orhti499fq5XxKCCEbd0OzdPZuancneyse3KtR+vgMiHRbh+mn8M4G6t64ob/Fg+GZGpa565MF/2dKWY32A== - dependencies: - "@types/node" "*" - "@types/oracledb@5.2.2": version "5.2.2" resolved "https://registry.yarnpkg.com/@types/oracledb/-/oracledb-5.2.2.tgz#ae7ba795969e3bbd8d57ab141873a1aa012b86cd" @@ -5590,37 +5440,6 @@ "@types/node" "*" dotenv "^8.2.0" -"@types/passport-google-oauth@^1.0.42": - version "1.0.45" - resolved "https://registry.yarnpkg.com/@types/passport-google-oauth/-/passport-google-oauth-1.0.45.tgz#c986c787ec9706b4a596d2bae43342b50b54973d" - integrity sha512-O3Y3DDKnf9lR8+DSaUOCEGF6aFjVYdI8TLhQYtySZ3Sq75c5tGYJ0KJRDZw0GsyLD/Que0nqFkP/GnDVwZZL9w== - dependencies: - "@types/express" "*" - "@types/passport" "*" - -"@types/passport-microsoft@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@types/passport-microsoft/-/passport-microsoft-1.0.0.tgz#a2ddc2200843570d38c35c53f6388e33df915b58" - integrity sha512-vD9ajSUc9Sz/8gdCj0ODUbPYQDxcI/imIDdgMPh//c5yMK/PgV6SNUXFLBzJo89Y30LU6bYAfXKn40WJqtMBiA== - dependencies: - "@types/passport-oauth2" "*" - -"@types/passport-oauth2@*": - version "1.4.15" - resolved "https://registry.yarnpkg.com/@types/passport-oauth2/-/passport-oauth2-1.4.15.tgz#34f2684f53aad36e664cd01ca9879224229f47e7" - integrity sha512-9cUTP/HStNSZmhxXGuRrBJfEWzIEJRub2eyJu3CvkA+8HAMc9W3aKdFhVq+Qz1hi42qn+GvSAnz3zwacDSYWpw== - dependencies: - "@types/express" "*" - "@types/oauth" "*" - "@types/passport" "*" - -"@types/passport@*": - version "1.0.16" - resolved "https://registry.yarnpkg.com/@types/passport/-/passport-1.0.16.tgz#5a2918b180a16924c4d75c31254c31cdca5ce6cf" - integrity sha512-FD0qD5hbPWQzaM0wHUnJ/T0BBCJBxCeemtnCwc/ThhTg3x9jfrAcRUmj5Dopza+MfFS9acTe3wk7rcVnRIp/0A== - dependencies: - "@types/express" "*" - "@types/pg@8.6.6": version "8.6.6" resolved "https://registry.yarnpkg.com/@types/pg/-/pg-8.6.6.tgz#21cdf873a3e345a6e78f394677e3b3b1b543cb80" @@ -5825,13 +5644,6 @@ dependencies: "@types/node" "*" -"@types/responselike@^1.0.0": - version "1.0.3" - resolved "https://registry.yarnpkg.com/@types/responselike/-/responselike-1.0.3.tgz#cc29706f0a397cfe6df89debfe4bf5cea159db50" - integrity sha512-H/+L+UkTV33uf49PH5pCAUBVPNj2nDBXTN+qS1dOwyyg24l3CcicicCA7ca+HMvJBZcFgl5r8e+RR6elsb4Lyw== - dependencies: - "@types/node" "*" - "@types/rimraf@^3.0.2": version "3.0.2" resolved "https://registry.yarnpkg.com/@types/rimraf/-/rimraf-3.0.2.tgz#a63d175b331748e5220ad48c901d7bbf1f44eef8" @@ -5860,13 +5672,6 @@ dependencies: "@types/node" "*" -"@types/server-destroy@^1.0.1": - version "1.0.3" - resolved "https://registry.yarnpkg.com/@types/server-destroy/-/server-destroy-1.0.3.tgz#2460932ea3a02a70ec99669c8f40ff089a5b8a2b" - integrity sha512-Qq0fn70C7TLDG1W9FCblKufNWW1OckQ41dVKV2Dku5KdZF7bexezG4e2WBaBKhdwL3HZ+cYCEIKwg2BRgzrWmA== - dependencies: - "@types/node" "*" - "@types/stack-utils@^2.0.0": version "2.0.1" resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" @@ -5948,7 +5753,7 @@ dependencies: "@types/node" "*" -"@types/uuid@8.3.4", "@types/uuid@^8.3.4": +"@types/uuid@8.3.4": version "8.3.4" resolved "https://registry.yarnpkg.com/@types/uuid/-/uuid-8.3.4.tgz#bd86a43617df0594787d38b735f55c805becf1bc" integrity sha512-c/I8ZRb51j+pYGAu5CrFMRxqZ2ke4y2grEBO5AUjgSkSk+qT2Ea+OdWElz/OiMf5MNpn2b17kuVBwZLQJXzihw== @@ -6290,6 +6095,11 @@ js-yaml "^3.10.0" tslib "^2.4.0" +"@zerodevx/svelte-json-view@^1.0.7": + version "1.0.7" + resolved "https://registry.yarnpkg.com/@zerodevx/svelte-json-view/-/svelte-json-view-1.0.7.tgz#abf3efa71dedcb3e9d16bc9cc61d5ea98c8d00b1" + integrity sha512-yW0MV+9BCKOwzt3h86y3xDqYdI5st+Rxk+L5pa0Utq7nlPD+VvxyhL7R1gJoLxQvWwjyAvY/fyUCFTdwDyI14w== + "@zkochan/js-yaml@0.0.6": version "0.0.6" resolved "https://registry.yarnpkg.com/@zkochan/js-yaml/-/js-yaml-0.0.6.tgz#975f0b306e705e28b8068a07737fa46d3fc04826" @@ -6332,7 +6142,7 @@ abortcontroller-polyfill@^1.4.0: resolved "https://registry.yarnpkg.com/abortcontroller-polyfill/-/abortcontroller-polyfill-1.7.5.tgz#6738495f4e901fbb57b6c0611d0c75f76c485bed" integrity sha512-JMJ5soJWP18htbbxJjG7bG6yuI6pRhgJ0scHHTfkUjf6wjP912xZWvM+A4sJK3gqd9E8fcPbDnOefbA9Th/FIQ== -abstract-leveldown@^6.2.1, abstract-leveldown@^6.3.0: +abstract-leveldown@^6.2.1: version "6.3.0" resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-6.3.0.tgz#d25221d1e6612f820c35963ba4bd739928f6026a" integrity sha512-TU5nlYgta8YrBMNpc9FwQzRbiXsj49gsALsXadbGHt9CROPzX5fB0rWDR5mtdpOOKa5XqRFpbj1QroPAoPzVjQ== @@ -6854,13 +6664,6 @@ async-retry@^1.3.3: dependencies: retry "0.13.1" -async@^2.6.3: - version "2.6.4" - resolved "https://registry.yarnpkg.com/async/-/async-2.6.4.tgz#706b7ff6084664cd7eae713f6f965433b5504221" - integrity sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA== - dependencies: - lodash "^4.17.14" - async@^3.2.1, async@^3.2.3: version "3.2.4" resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" @@ -7234,11 +7037,6 @@ bootstrap@3.4.1: resolved "https://registry.yarnpkg.com/bootstrap/-/bootstrap-3.4.1.tgz#c3a347d419e289ad11f4033e3c4132b87c081d72" integrity sha512-yN5oZVmRCwe5aKwzRj6736nSmKDX7pLYwsXiCj/EYmo16hODaBiT4En5btW/jhBF/seV+XMx3aYwukYC3A49DA== -bottleneck@^2.19.5: - version "2.19.5" - resolved "https://registry.yarnpkg.com/bottleneck/-/bottleneck-2.19.5.tgz#5df0b90f59fd47656ebe63c78a98419205cadd91" - integrity sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw== - bowser@^2.11.0: version "2.11.0" resolved "https://registry.yarnpkg.com/bowser/-/bowser-2.11.0.tgz#5ca3c35757a7aa5771500c70a73a9f91ef420a8f" @@ -7387,11 +7185,6 @@ bson@^5.4.0: resolved "https://registry.yarnpkg.com/bson/-/bson-5.4.0.tgz#0eea77276d490953ad8616b483298dbff07384c6" integrity sha512-WRZ5SQI5GfUuKnPTNmAYPiKIof3ORXAF4IRU5UcgmivNIon01rWQlw5RUH954dpu8yGL8T59YShVddIPaU/gFA== -btoa@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/btoa/-/btoa-1.2.1.tgz#01a9909f8b2c93f6bf680ba26131eb30f7fa3d73" - integrity sha512-SB4/MIGlsiVkMcHmT+pSmIPoNDoHg+7cMzmt3Uxt628MTz2487DKSqK/fuhFBrkuqrYv5UCEnACpF4dTFNKc/g== - buffer-alloc-unsafe@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" @@ -7405,7 +7198,7 @@ buffer-alloc@^1.2.0: buffer-alloc-unsafe "^1.1.0" buffer-fill "^1.0.0" -buffer-crc32@^0.2.13, buffer-crc32@~0.2.3: +buffer-crc32@~0.2.3: version "0.2.13" resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242" integrity sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ== @@ -7590,11 +7383,6 @@ cache-content-type@^1.0.0: mime-types "^2.1.18" ylru "^1.2.0" -cacheable-lookup@^5.0.3: - version "5.0.4" - resolved "https://registry.yarnpkg.com/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz#5a6b865b2c44357be3d5ebc2a467b032719a7005" - integrity sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA== - cacheable-request@^2.1.1: version "2.1.4" resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-2.1.4.tgz#0d808801b6342ad33c91df9d0b44dc09b91e5c3d" @@ -7621,19 +7409,6 @@ cacheable-request@^6.0.0: normalize-url "^4.1.0" responselike "^1.0.2" -cacheable-request@^7.0.2: - version "7.0.4" - resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-7.0.4.tgz#7a33ebf08613178b403635be7b899d3e69bbe817" - integrity sha512-v+p6ongsrp0yTGbJXjgxPow2+DL93DASP4kXCDKb8/bwRtt9OEF3whggkkDkGNzgcWy2XaF4a8nZglC7uElscg== - dependencies: - clone-response "^1.0.2" - get-stream "^5.1.0" - http-cache-semantics "^4.0.0" - keyv "^4.0.0" - lowercase-keys "^2.0.0" - normalize-url "^6.0.1" - responselike "^2.0.0" - call-bind@^1.0.0, call-bind@^1.0.2, call-bind@^1.0.4, call-bind@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.5.tgz#6fa2b7845ce0ea49bf4d8b9ef64727a2c2e2e513" @@ -7672,52 +7447,6 @@ camelcase@^6.2.0: resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== -camunda-8-credentials-from-env@^1.1.1, camunda-8-credentials-from-env@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/camunda-8-credentials-from-env/-/camunda-8-credentials-from-env-1.2.2.tgz#abe5d216e7e4cfc970e0463e9aa5e802487b1062" - integrity sha512-uj2PY5/IoAgu0cHmeEUp+qmSXCtpQafStzGJ8ORYvyupBN/gVpdP9X+A+UlQRCGmApcaIuPUw8/9FsXig5NWXg== - dependencies: - neon-env "^0.1.1" - -camunda-8-sdk@^0.15.0: - version "0.15.0" - resolved "https://registry.yarnpkg.com/camunda-8-sdk/-/camunda-8-sdk-0.15.0.tgz#13754dca499d16802675b6f2790e2d06bd8034d6" - integrity sha512-felyQU+rD8uupPjBArmyy0E/k9mrmeZvfFliF3y/pxYkGBoaC5kjDHDsx+hNpbnIwShET0RLjklit7f+98yIBw== - dependencies: - camunda-console-client "^0.9.1" - camunda-tasklist-client "0.9.5" - operate-api-client "1.2.3" - optimize-api-client "^1.0.3" - zeebe-node "^8.2.5" - -camunda-console-client@^0.9.1: - version "0.9.2" - resolved "https://registry.yarnpkg.com/camunda-console-client/-/camunda-console-client-0.9.2.tgz#137dbd2e61bb5bbfff38aebe5d53e775653aabb8" - integrity sha512-ni+7lSc5oG0FevCagrBV6juZzwcQ4ciATBZxyOMFQK0yVTmZxOUz5efN9XWP4E36PGpuqALQXsViUDlGZcfZBA== - dependencies: - camunda-8-credentials-from-env "^1.2.2" - camunda-saas-oauth "^1.2.4" - debug "^4.3.4" - dotenv "^16.3.1" - got "^11.8.6" - -camunda-saas-oauth@^1.2.0, camunda-saas-oauth@^1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/camunda-saas-oauth/-/camunda-saas-oauth-1.2.4.tgz#348a8422f266dafed98cf2a73046aa62c89d03f2" - integrity sha512-AO/kcnZXcsodwM3qgMZj/5wn8SBoKmSDpuFYUpPS+HqQhG9GvWY8noBx/4pvX3gYPKiPTYi9/e9ApAe02NARzA== - dependencies: - camunda-8-credentials-from-env "^1.2.2" - got "^11.8.5" - -camunda-tasklist-client@0.9.5: - version "0.9.5" - resolved "https://registry.yarnpkg.com/camunda-tasklist-client/-/camunda-tasklist-client-0.9.5.tgz#c0f2685ef7fb7fdb198a37e5b35a911e3b233b28" - integrity sha512-gipH8ON/ttTgLfleWecQith1g9SpC5Q8CoLXFq2yw3cVJ1JVrcn0ArtgCxA1QCgtZBlV7EuGt9QWGc9UCfbbGw== - dependencies: - camunda-8-credentials-from-env "^1.1.1" - camunda-saas-oauth "^1.2.0" - gotql "^2.1.0-alpha1" - caniuse-api@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0" @@ -7811,7 +7540,7 @@ charenc@0.0.2: resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667" integrity sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA== -cheap-watch@^1.0.2, cheap-watch@^1.0.4: +cheap-watch@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/cheap-watch/-/cheap-watch-1.0.4.tgz#0bcb4a3a8fbd9d5327936493f6b56baa668d8fef" integrity sha512-QR/9FrtRL5fjfUJBhAKCdi0lSRQ3rVRRum3GF9wDKp2TJbEIMGhUEr2yU8lORzm9Isdjx7/k9S0DFDx+z5VGtw== @@ -8138,7 +7867,7 @@ commander@^5.1.0: resolved "https://registry.yarnpkg.com/commander/-/commander-5.1.0.tgz#46abbd1652f8e059bddaef99bbdcb2ad9cf179ae" integrity sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg== -commander@^7.0.0, commander@^7.1.0, commander@^7.2.0: +commander@^7.0.0, commander@^7.2.0: version "7.2.0" resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== @@ -8247,7 +7976,7 @@ config-chain@^1.1.13: ini "^1.3.4" proto-list "~1.2.1" -configent@^2.1.4, configent@^2.2.0: +configent@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/configent/-/configent-2.2.0.tgz#2de230fc43f22c47cfd99016aa6962d6f9546994" integrity sha512-yIN6zfOWk2nycNJ2JFNiWEai0oiqAhISIht8+pbEBP8bdcpwoQ74AhCZPbUv9aRVJwo7wh1MbCBDUV44UJa7Kw== @@ -8271,14 +8000,6 @@ console-control-strings@^1.0.0, console-control-strings@^1.1.0: resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" integrity sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ== -console-stamp@^3.0.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/console-stamp/-/console-stamp-3.1.2.tgz#35dac393e16069a4d9d37b71ca6d5d13d7f3f8fd" - integrity sha512-ab66x3NxOTxPuq71dI6gXEiw2X6ql4Le5gZz0bm7FW3FSCB00eztra/oQUuCoCGlsyKOxtULnHwphzMrRtzMBg== - dependencies: - chalk "^4.1.2" - dateformat "^4.6.3" - consolidate@^0.16.0: version "0.16.0" resolved "https://registry.yarnpkg.com/consolidate/-/consolidate-0.16.0.tgz#a11864768930f2f19431660a65906668f5fbdc16" @@ -8771,7 +8492,7 @@ dateformat@^4.6.3: resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-4.6.3.tgz#556fa6497e5217fedb78821424f8a1c22fa3f4b5" integrity sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA== -dayjs@^1.10.8, dayjs@^1.8.15: +dayjs@^1.10.8: version "1.11.10" resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.10.tgz#68acea85317a6e164457d6d6947564029a6a16a0" integrity sha512-vjAczensTgRcqDERK0SR2XMwsF/tSvnvlv6VcF2GIhg6Sx4yOIt/irsr1RDJsKiIyBzJDpCoXiWWq28MqH2cnQ== @@ -8821,7 +8542,7 @@ dd-trace@5.0.0: semver "^7.5.4" tlhunter-sorted-set "^0.1.0" -debug@4, debug@4.3.4, debug@^4.0.0, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.2.0, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4, debug@~4.3.1, debug@~4.3.2: +debug@4, debug@4.3.4, debug@^4.0.0, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4, debug@~4.3.1, debug@~4.3.2: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== @@ -8870,13 +8591,6 @@ decompress-response@^3.3.0: dependencies: mimic-response "^1.0.0" -decompress-response@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-6.0.0.tgz#ca387612ddb7e104bd16d85aab00d5ecf09c66fc" - integrity sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ== - dependencies: - mimic-response "^3.1.0" - decompress-tar@^4.0.0, decompress-tar@^4.1.0, decompress-tar@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/decompress-tar/-/decompress-tar-4.1.1.tgz#718cbd3fcb16209716e70a26b84e7ba4592e5af1" @@ -9002,11 +8716,6 @@ defer-to-connect@^1.0.1: resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== -defer-to-connect@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-2.0.1.tgz#8016bdb4143e4632b77a3449c6236277de520587" - integrity sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg== - deferred-leveldown@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/deferred-leveldown/-/deferred-leveldown-0.2.0.tgz#2cef1f111e1c57870d8bbb8af2650e587cd2f5b4" @@ -9050,11 +8759,6 @@ defined@^1.0.0: resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.1.tgz#c0b9db27bfaffd95d6f61399419b893df0f91ebf" integrity sha512-hsBd2qSVCRE+5PmNdHt1uzyrFu5d3RwmFDKzyNZMFq/EwDNJF7Ee5+D5oEKF0hU6LhtoUF1macFvOe4AskQC1Q== -defined@~0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/defined/-/defined-0.0.0.tgz#f35eea7d705e933baf13b2f03b3f83d921403b3e" - integrity sha512-zpqiCT8bODLu3QSmLLic8xJnYWBFjOSu/fBCm189oAiTtPq/PSanNACKZDS7kgSyCJY7P+IcODzlIogBK/9RBg== - delay@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/delay/-/delay-5.0.0.tgz#137045ef1b96e5071060dd5be60bf9334436bd1d" @@ -9350,7 +9054,7 @@ docker-compose@0.24.0: dependencies: yaml "^1.10.2" -docker-compose@^0.23.5, docker-compose@^0.23.6: +docker-compose@^0.23.5: version "0.23.19" resolved "https://registry.yarnpkg.com/docker-compose/-/docker-compose-0.23.19.tgz#9947726e2fe67bdfa9e8efe1ff15aa0de2e10eb8" integrity sha512-v5vNLIdUqwj4my80wxFDkNH+4S85zsRuH29SO7dCWVWPCMt/ohZBsGN6g6KXWifT0pzQ7uOxqEKCYCDPJ8Vz4g== @@ -9491,11 +9195,6 @@ dotenv@8.6.0, dotenv@^8.2.0: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.6.0.tgz#061af664d19f7f4d8fc6e4ff9b584ce237adcb8b" integrity sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g== -dotenv@^16.3.1: - version "16.3.1" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e" - integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ== - dotenv@~10.0.0: version "10.0.0" resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" @@ -9548,24 +9247,6 @@ duplexify@^4.0.0, duplexify@^4.1.2: readable-stream "^3.1.1" stream-shift "^1.0.0" -dynalite@^3.2.1: - version "3.2.2" - resolved "https://registry.yarnpkg.com/dynalite/-/dynalite-3.2.2.tgz#34b4f4dd69638f17c0f7551a867959972c892441" - integrity sha512-sx9ZjTgMs/D4gHnba4rnBkw29648dHwHmywJet132KAbiq1ZyWx9W1fMd/eP9cPwTKDXyCBuTYOChE0qMDjaXQ== - dependencies: - async "^2.6.3" - big.js "^5.2.2" - buffer-crc32 "^0.2.13" - lazy "^1.0.11" - levelup "^4.4.0" - lock "^1.1.0" - memdown "^5.1.0" - minimist "^1.2.5" - once "^1.4.0" - subleveldown "^5.0.1" - optionalDependencies: - leveldown "^5.6.0" - eastasianwidth@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" @@ -9672,7 +9353,7 @@ encodeurl@^1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== -encoding-down@^6.2.0, encoding-down@^6.3.0: +encoding-down@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/encoding-down/-/encoding-down-6.3.0.tgz#b1c4eb0e1728c146ecaef8e32963c549e76d082b" integrity sha512-QKrV0iKR6MZVJV08QY0wp1e7vF6QbhnbQhb07bwpEyuz4uZiZgPlEGdkCROuFkUwdxlFaiPIhjyarH1ee/3vhw== @@ -9780,11 +9461,6 @@ envinfo@7.8.1, envinfo@^7.7.3: resolved "https://registry.yarnpkg.com/envinfo/-/envinfo-7.8.1.tgz#06377e3e5f4d379fea7ac592d5ad8927e0c4d475" integrity sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw== -err-code@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/err-code/-/err-code-1.1.2.tgz#06e0116d3028f6aef4806849eb0ea6a748ae6960" - integrity sha512-CJAN+O0/yA1CKfRn9SXOGctSpEM7DCon/r/5r2eXFMY2zCCJBasFhcM5I+1kh3Ap11FsQCX+vGHceNPvpWKhoA== - err-code@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/err-code/-/err-code-2.0.3.tgz#23c2f3b756ffdfc608d30e27c9a941024807e7f9" @@ -9922,11 +9598,6 @@ es6-error@^4.0.1, es6-error@^4.1.1: resolved "https://registry.yarnpkg.com/es6-error/-/es6-error-4.1.1.tgz#9e3af407459deed47e9a91f9b885a84eb05c561d" integrity sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg== -es6-promise@^4.2.4: - version "4.2.8" - resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.2.8.tgz#4eb21594c972bc40553d276e510539143db53e0a" - integrity sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w== - esbuild-loader@^2.16.0: version "2.21.0" resolved "https://registry.yarnpkg.com/esbuild-loader/-/esbuild-loader-2.21.0.tgz#2698a3e565b0db2bb19a3dd91c2b6c9aad526c80" @@ -10525,13 +10196,6 @@ fast-xml-parser@4.2.5: dependencies: strnum "^1.0.5" -fast-xml-parser@^4.1.3: - version "4.3.3" - resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.3.3.tgz#aeaf5778392329f17168c40c51bcbfec8ff965be" - integrity sha512-coV/D1MhrShMvU6D0I+VAK3umz6hUaxxhL0yp/9RjfiYUfAv14rDhGQL+PLForhMdr0wq3PiV07WtkkNjJjNHg== - dependencies: - strnum "^1.0.5" - fast-xml-parser@^4.2.2, fast-xml-parser@^4.2.5: version "4.3.2" resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.3.2.tgz#761e641260706d6e13251c4ef8e3f5694d4b0d79" @@ -10872,11 +10536,6 @@ formidable@^2.1.2: once "^1.4.0" qs "^6.11.0" -fp-ts@^2.5.1: - version "2.16.2" - resolved "https://registry.yarnpkg.com/fp-ts/-/fp-ts-2.16.2.tgz#7faa90f6fc2e8cf84c711d2c4e606afe2be9e342" - integrity sha512-CkqAjnIKFqvo3sCyoBTqgJvF+bHrSik584S9nhTjtBESLx26cbtVMR/T9a6ApChOcSDAaM3JydDmWDUn4EEXng== - fresh@^0.5.2, fresh@~0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" @@ -10918,7 +10577,7 @@ fs-extra@^11.1.0, fs-extra@^11.1.1: jsonfile "^6.0.1" universalify "^2.0.0" -fs-extra@^9.0.0, fs-extra@^9.0.1, fs-extra@^9.1.0: +fs-extra@^9.0.0, fs-extra@^9.1.0: version "9.1.0" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== @@ -10967,7 +10626,7 @@ function.prototype.name@^1.1.6: es-abstract "^1.22.1" functions-have-names "^1.2.3" -functional-red-black-tree@^1.0.1, functional-red-black-tree@~1.0.1: +functional-red-black-tree@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= @@ -11561,23 +11220,6 @@ gopd@^1.0.1: dependencies: get-intrinsic "^1.1.3" -got@^11.5.1, got@^11.8.5, got@^11.8.6: - version "11.8.6" - resolved "https://registry.yarnpkg.com/got/-/got-11.8.6.tgz#276e827ead8772eddbcfc97170590b841823233a" - integrity sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g== - dependencies: - "@sindresorhus/is" "^4.0.0" - "@szmarczak/http-timer" "^4.0.5" - "@types/cacheable-request" "^6.0.1" - "@types/responselike" "^1.0.0" - cacheable-lookup "^5.0.3" - cacheable-request "^7.0.2" - decompress-response "^6.0.0" - http2-wrapper "^1.0.0-beta.5.2" - lowercase-keys "^2.0.0" - p-cancelable "^2.0.0" - responselike "^2.0.0" - got@^8.3.1: version "8.3.2" resolved "https://registry.yarnpkg.com/got/-/got-8.3.2.tgz#1d23f64390e97f776cac52e5b936e5f514d2e937" @@ -11618,15 +11260,6 @@ got@^9.6.0: to-readable-stream "^1.0.0" url-parse-lax "^3.0.0" -gotql@^2.1.0-alpha1: - version "2.1.0-alpha1" - resolved "https://registry.yarnpkg.com/gotql/-/gotql-2.1.0-alpha1.tgz#b04e9adb0d1751a0c2de05bd4399f5c57aec79ba" - integrity sha512-4xG1AczSpK+tdKUDM4kB1ah/2LoNlmFU5IhGNktuYNBLgyWB5iDs4OE36NE7k59iTKYi2B7vudQz2Itw1ZXrRg== - dependencies: - debug "^4.1.1" - got "^11.5.1" - prepend-http "^3.0.1" - graceful-fs@4.2.11, graceful-fs@^4.1.10, graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.1.9, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" @@ -11991,14 +11624,6 @@ http-signature@~1.2.0: jsprim "^1.2.2" sshpk "^1.7.0" -http2-wrapper@^1.0.0-beta.5.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/http2-wrapper/-/http2-wrapper-1.0.3.tgz#b8f55e0c1f25d4ebd08b3b0c2c079f9590800b3d" - integrity sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg== - dependencies: - quick-lru "^5.1.1" - resolve-alpn "^1.0.0" - https-proxy-agent@^5.0.0, https-proxy-agent@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" @@ -12118,11 +11743,6 @@ immediate@~3.0.5: resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.0.6.tgz#9db1dbd0faf8de6fbe0f5dd5e56bb606280de69b" integrity sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ== -immediate@~3.2.3: - version "3.2.3" - resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.2.3.tgz#d140fa8f614659bd6541233097ddaac25cdd991c" - integrity sha512-RrGCXRm/fRVqMIhqXrGEX9rRADavPiDFSoMb/k64i9XMk8uH4r/Omi5Ctierj6XzNecwDbO4WuFbDD1zmpl3Tg== - import-cwd@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/import-cwd/-/import-cwd-3.0.0.tgz#20845547718015126ea9b3676b7592fb8bd4cf92" @@ -13048,15 +12668,6 @@ jest-docblock@^29.7.0: dependencies: detect-newline "^3.0.0" -jest-dynalite@^3.6.1: - version "3.6.1" - resolved "https://registry.yarnpkg.com/jest-dynalite/-/jest-dynalite-3.6.1.tgz#8bae305a3c33d9a8036f563827b173b54a323ca5" - integrity sha512-MERtTt8Pj39vFmbItMC3YuIaqLf1kh/pJIE0DRcjeP/2Fa8Nni9IxwN6XWIMgXNbFKtlOM6ppH+Bsy0rWIdPiw== - dependencies: - "@aws/dynamodb-auto-marshaller" "^0.7.1" - dynalite "^3.2.1" - setimmediate "^1.0.5" - jest-each@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-29.7.0.tgz#162a9b3f2328bdd991beaabffbb74745e56577d1" @@ -13576,11 +13187,6 @@ json-buffer@3.0.0: resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898" integrity sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ== -json-buffer@3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" - integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== - json-parse-better-errors@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" @@ -13749,13 +13355,6 @@ keyv@^3.0.0: dependencies: json-buffer "3.0.0" -keyv@^4.0.0: - version "4.5.4" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.4.tgz#a879a99e29452f942439f2a405e3af8b31d4de93" - integrity sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw== - dependencies: - json-buffer "3.0.1" - kill-port@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/kill-port/-/kill-port-1.6.1.tgz#560fe79484583bdf3a5e908557dae614447618aa" @@ -13871,7 +13470,7 @@ koa-mount@^4.0.0: debug "^4.0.1" koa-compose "^4.1.0" -koa-passport@4.1.4, koa-passport@^4.1.4: +koa-passport@4.1.4: version "4.1.4" resolved "https://registry.yarnpkg.com/koa-passport/-/koa-passport-4.1.4.tgz#5f1665c1c2a37ace79af9f970b770885ca30ccfa" integrity sha512-dJBCkl4X+zdYxbI2V2OtoGy0PUenpvp2ZLLWObc8UJhsId0iQpTFT8RVcuA0709AL2txGwRHnSPoT1bYNGa6Kg== @@ -13905,7 +13504,7 @@ koa-send@5.0.1, koa-send@^5.0.0: http-errors "^1.7.3" resolve-path "^1.4.0" -koa-session@5.13.1, koa-session@^5.12.0: +koa-session@5.13.1: version "5.13.1" resolved "https://registry.yarnpkg.com/koa-session/-/koa-session-5.13.1.tgz#a47e39015a4b464e21e3e1e2deeca48eb83916ee" integrity sha512-TfYiun6xiFosyfIJKnEw0aoG5XmLIwM+K3OVWfkz84qY0NP2gbk0F/olRn0/Hrxq0f14s8amHVXeWyKYH3Cx3Q== @@ -13923,7 +13522,7 @@ koa-static@5.0.0, koa-static@^5.0.0: debug "^3.1.0" koa-send "^5.0.0" -koa-useragent@4.1.0, koa-useragent@^4.1.0: +koa-useragent@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/koa-useragent/-/koa-useragent-4.1.0.tgz#d3f128b552c6da3e5e9e9e9c887b2922b16e4468" integrity sha512-x/HUDZ1zAmNNh5hA9hHbPm9p3UVg2prlpHzxCXQCzbibrNS0kmj7MkCResCbAbG7ZT6FVxNSMjR94ZGamdMwxA== @@ -14023,11 +13622,6 @@ latest-version@^5.1.0: dependencies: package-json "^6.3.0" -lazy@^1.0.11: - version "1.0.11" - resolved "https://registry.yarnpkg.com/lazy/-/lazy-1.0.11.tgz#daa068206282542c088288e975c297c1ae77b690" - integrity sha512-Y+CjUfLmIpoUCCRl0ub4smrYtGGr5AOa2AKOaWelGHOGz33X/Y/KizefGqbkwfz44+cnq/+9habclf8vOmu2LA== - lcid@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf" @@ -14213,13 +13807,6 @@ level-js@^5.0.0: inherits "^2.0.3" ltgt "^2.1.2" -level-option-wrap@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/level-option-wrap/-/level-option-wrap-1.1.0.tgz#ad20e68d9f3c22c8897531cc6aa7af596b1ed129" - integrity sha512-gQouC22iCqHuBLNl4BHxEZUxLvUKALAtT/Q0c6ziOxZQ8c02G/gyxHWNbLbxUzRNfMrRnbt6TZT3gNe8VBqQeg== - dependencies: - defined "~0.0.0" - level-packager@^5.1.0: version "5.1.1" resolved "https://registry.yarnpkg.com/level-packager/-/level-packager-5.1.1.tgz#323ec842d6babe7336f70299c14df2e329c18939" @@ -14268,7 +13855,7 @@ level@6.0.1: level-packager "^5.1.0" leveldown "^5.4.0" -leveldown@5.6.0, leveldown@^5.4.0, leveldown@^5.6.0: +leveldown@5.6.0, leveldown@^5.4.0: version "5.6.0" resolved "https://registry.yarnpkg.com/leveldown/-/leveldown-5.6.0.tgz#16ba937bb2991c6094e13ac5a6898ee66d3eee98" integrity sha512-iB8O/7Db9lPaITU1aA2txU/cBEXAt4vWwKQRrrWuS6XDgbP4QZGj9BL2aNbwb002atoQ/lIotJkfyzz+ygQnUQ== @@ -14277,7 +13864,7 @@ leveldown@5.6.0, leveldown@^5.4.0, leveldown@^5.6.0: napi-macros "~2.0.0" node-gyp-build "~4.1.0" -levelup@4.4.0, levelup@^4.3.2, levelup@^4.4.0: +levelup@4.4.0, levelup@^4.3.2: version "4.4.0" resolved "https://registry.yarnpkg.com/levelup/-/levelup-4.4.0.tgz#f89da3a228c38deb49c48f88a70fb71f01cafed6" integrity sha512-94++VFO3qN95cM/d6eBXvd894oJE0w3cInq9USsyQzzoJxmiYzPAocNcuGCPGGjoXqDVJcr3C1jzt1TSjyaiLQ== @@ -14466,11 +14053,6 @@ locate-path@^6.0.0: dependencies: p-locate "^5.0.0" -lock@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/lock/-/lock-1.1.0.tgz#53157499d1653b136ca66451071fca615703fa55" - integrity sha512-NZQIJJL5Rb9lMJ0Yl1JoVr9GSdo4HTPsUEWsSFzB8dE8DSoiLCVavWZPi7Rnlv/o73u6I24S/XYc/NmG4l8EKA== - lodash-es@^4.17.21: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash-es/-/lodash-es-4.17.21.tgz#43e626c46e6591b7750beb2b50117390c609e3ee" @@ -14611,7 +14193,7 @@ lodash.xor@^4.5.0: resolved "https://registry.yarnpkg.com/lodash.xor/-/lodash.xor-4.5.0.tgz#4d48ed7e98095b0632582ba714d3ff8ae8fb1db6" integrity sha512-sVN2zimthq7aZ5sPGXnSz32rZPuqcparVW50chJQe+mzTYV+IsxSsl/2gnkWWE2Of7K3myBQBqtLKOUEHJKRsQ== -lodash@4.17.21, lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.21, lodash@^4.17.3, lodash@^4.7.0: +lodash@4.17.21, lodash@^4.17.11, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.21, lodash@^4.17.3, lodash@^4.7.0: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -14965,18 +14547,6 @@ memdown@1.4.1: ltgt "~2.2.0" safe-buffer "~5.1.1" -memdown@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/memdown/-/memdown-5.1.0.tgz#608e91a9f10f37f5b5fe767667a8674129a833cb" - integrity sha512-B3J+UizMRAlEArDjWHTMmadet+UKwHd3UjMgGBkZcKAxAYVPS9o0Yeiha4qvz7iGiL2Sb3igUft6p7nbFWctpw== - dependencies: - abstract-leveldown "~6.2.1" - functional-red-black-tree "~1.0.1" - immediate "~3.2.3" - inherits "~2.0.1" - ltgt "~2.2.0" - safe-buffer "~5.2.0" - memory-pager@^1.0.2: version "1.5.0" resolved "https://registry.yarnpkg.com/memory-pager/-/memory-pager-1.5.0.tgz#d8751655d22d384682741c972f2c3d6dfa3e66b5" @@ -15085,11 +14655,6 @@ mimic-response@^1.0.0, mimic-response@^1.0.1: resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b" integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== -mimic-response@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9" - integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ== - min-document@^2.19.0: version "2.19.0" resolved "https://registry.yarnpkg.com/min-document/-/min-document-2.19.0.tgz#7bd282e3f5842ed295bb748cdd9f1ffa2c824685" @@ -15533,11 +15098,6 @@ neo-async@^2.6.0, neo-async@^2.6.2: resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== -neon-env@^0.1.1: - version "0.1.3" - resolved "https://registry.yarnpkg.com/neon-env/-/neon-env-0.1.3.tgz#071e86fde3c698e9314f057d209e0b79ddab16e9" - integrity sha512-Zo+L6Nm19gJrjyfhxn/ZDm8eIIDzr75o64ZhijBau4LNuhLzjEAteRg3gchIvgaN8XTo5BxN6iTNP5clZQ0agA== - nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" @@ -16255,23 +15815,6 @@ opentracing@>=0.12.1: resolved "https://registry.yarnpkg.com/opentracing/-/opentracing-0.14.7.tgz#25d472bd0296dc0b64d7b94cbc995219031428f5" integrity sha512-vz9iS7MJ5+Bp1URw8Khvdyw1H/hGvzHWlKQ7eRrQojSCDL1/SrWfrY9QebLw97n2deyRtzHRC3MkQfVNUCo91Q== -operate-api-client@1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/operate-api-client/-/operate-api-client-1.2.3.tgz#c884ab09fe07360ac5ce5b58ae470ba1e91db879" - integrity sha512-8FWfDsHVxgYIBe4p4fB6e7SSiYdW/PPTCCLFcGnbqdUxlhcUq9ncYu6ZMMm6E3A3WKxagdInYQbxOhtTeVGhVQ== - dependencies: - camunda-saas-oauth "^1.2.0" - got "^11.8.5" - -optimize-api-client@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/optimize-api-client/-/optimize-api-client-1.0.4.tgz#a2e653780fd1e9e54a38912418b0ea27bd0484ef" - integrity sha512-2XBW+sv6eENOCHMc5v0XmH2DaaSETAb/qH5BsfpTDD8Pmeu10ZR61W7Pc/rBqauy96vPP/MfgmMphx5CjHb2xg== - dependencies: - camunda-8-credentials-from-env "^1.1.1" - camunda-saas-oauth "^1.2.4" - got "^11.8.5" - optionator@^0.8.1: version "0.8.3" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" @@ -16345,11 +15888,6 @@ p-cancelable@^1.0.0: resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc" integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== -p-cancelable@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-2.1.1.tgz#aab7fbd416582fa32a3db49859c122487c5ed2cf" - integrity sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg== - p-defer@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" @@ -16662,7 +16200,7 @@ passport-google-oauth20@2.x.x: dependencies: passport-oauth2 "1.x.x" -passport-google-oauth@2.0.0, passport-google-oauth@^2.0.0: +passport-google-oauth@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/passport-google-oauth/-/passport-google-oauth-2.0.0.tgz#f6eb4bc96dd6c16ec0ecfdf4e05ec48ca54d4dae" integrity sha512-JKxZpBx6wBQXX1/a1s7VmdBgwOugohH+IxCy84aPTZNq/iIPX6u7Mqov1zY7MKRz3niFPol0KJz8zPLBoHKtYA== @@ -16677,14 +16215,6 @@ passport-local@1.0.0: dependencies: passport-strategy "1.x.x" -passport-microsoft@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/passport-microsoft/-/passport-microsoft-1.0.0.tgz#78954cf3201fdce61beeb6587a3b158f8e9db86c" - integrity sha512-L1JHeCbSObSZZXiG7jU2KoKie6nzZLwGt38HXz1GasKrsCQdOnf5kH8ltV4BWNUfBL2Pt1csWn1iuBSerprrcg== - dependencies: - passport-oauth2 "1.6.1" - pkginfo "0.4.x" - passport-oauth1@1.x.x: version "1.3.0" resolved "https://registry.yarnpkg.com/passport-oauth1/-/passport-oauth1-1.3.0.tgz#5d57f1415c8e28e46b461a12ec1b492934f7c354" @@ -16699,17 +16229,6 @@ passport-oauth2-refresh@^2.1.0: resolved "https://registry.yarnpkg.com/passport-oauth2-refresh/-/passport-oauth2-refresh-2.1.0.tgz#c31cd133826383f5539d16ad8ab4f35ca73ce4a4" integrity sha512-4ML7ooCESCqiTgdDBzNUFTBcPR8zQq9iM6eppEUGMMvLdsjqRL93jKwWm4Az3OJcI+Q2eIVyI8sVRcPFvxcF/A== -passport-oauth2@1.6.1: - version "1.6.1" - resolved "https://registry.yarnpkg.com/passport-oauth2/-/passport-oauth2-1.6.1.tgz#c5aee8f849ce8bd436c7f81d904a3cd1666f181b" - integrity sha512-ZbV43Hq9d/SBSYQ22GOiglFsjsD1YY/qdiptA+8ej+9C1dL1TVB+mBE5kDH/D4AJo50+2i8f4bx0vg4/yDDZCQ== - dependencies: - base64url "3.x.x" - oauth "0.9.x" - passport-strategy "1.x.x" - uid2 "0.0.x" - utils-merge "1.x.x" - passport-oauth2@1.x.x: version "1.7.0" resolved "https://registry.yarnpkg.com/passport-oauth2/-/passport-oauth2-1.7.0.tgz#5c4766c8531ac45ffe9ec2c09de9809e2c841fc4" @@ -17086,11 +16605,6 @@ pkg-types@^1.0.3: mlly "^1.2.0" pathe "^1.1.0" -pkginfo@0.4.x: - version "0.4.1" - resolved "https://registry.yarnpkg.com/pkginfo/-/pkginfo-0.4.1.tgz#b5418ef0439de5425fc4995042dced14fb2a84ff" - integrity sha512-8xCNE/aT/EXKenuMDZ+xTVwkT8gsoHN2z/Q29l80u0ppGEXVvsKRzNMbtKhg8LS8k1tJLAHHylf6p4VFmP6XUQ== - pluralize@^8.0.0: version "8.0.0" resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-8.0.0.tgz#1a6fa16a38d12a1901e0320fa017051c539ce3b1" @@ -17420,13 +16934,6 @@ postgres-interval@^1.1.0: dependencies: xtend "^4.0.0" -posthog-js@^1.13.4: - version "1.100.0" - resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.100.0.tgz#687b9a6e4ed226aa6572f4040b418ea0c8b3d353" - integrity sha512-r2XZEiHQ9mBK7D1G9k57I8uYZ2kZTAJ0OCX6K/OOdCWN8jKPhw3h5F9No5weilP6eVAn+hrsy7NvPV7SCX7gMg== - dependencies: - fflate "^0.4.1" - posthog-js@^1.36.0: version "1.96.1" resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.96.1.tgz#4f9719a24e4e14037b0e72d430194d7cdb576447" @@ -17723,11 +17230,6 @@ prepend-http@^2.0.0: resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" integrity sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA== -prepend-http@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-3.0.1.tgz#3e724d58fd5867465b300bb9615009fa2f8ee3b6" - integrity sha512-BLxfZh+m6UiAiCPZFJ4+vYoL7NrRs5XgCTRrjseATAggXhdZKKxn+JUNmuVYWY23bDHgaEHodxw8mnmtVEDtHw== - prettier-plugin-svelte@^2.3.0: version "2.6.0" resolved "https://registry.yarnpkg.com/prettier-plugin-svelte/-/prettier-plugin-svelte-2.6.0.tgz#0e845b560b55cd1d951d6c50431b4949f8591746" @@ -17827,14 +17329,6 @@ promise-inflight@^1.0.1: resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" integrity sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g== -promise-retry@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/promise-retry/-/promise-retry-1.1.1.tgz#6739e968e3051da20ce6497fb2b50f6911df3d6d" - integrity sha512-StEy2osPr28o17bIW776GtwO6+Q+M9zPiZkYfosciUUMYqjhU/ffwRAH0zN2+uvGyUsn8/YICIHRzLbPacpZGw== - dependencies: - err-code "^1.0.0" - retry "^0.10.0" - promise-retry@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/promise-retry/-/promise-retry-2.0.1.tgz#ff747a13620ab57ba688f5fc67855410c370da22" @@ -18026,7 +17520,7 @@ q@^1.1.2: resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" integrity sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw== -qs@^6.10.3, qs@^6.11.0, qs@^6.4.0: +qs@^6.11.0, qs@^6.4.0: version "6.11.2" resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.2.tgz#64bea51f12c1f5da1bc01496f48ffcff7c69d7d9" integrity sha512-tDNIz22aBzCDxLtVH++VnTfzxlfeK5CbqohpSqpJgj1Wg/cQbStNAz3NuqCs5vV+pjBsK4x4pN9HlVh7rcYRiA== @@ -18082,11 +17576,6 @@ quick-lru@^4.0.1: resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-4.0.1.tgz#5b8878f113a58217848c6482026c73e1ba57727f" integrity sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g== -quick-lru@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" - integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== - quote-unquote@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/quote-unquote/-/quote-unquote-1.0.0.tgz#67a9a77148effeaf81a4d428404a710baaac8a0b" @@ -18139,11 +17628,6 @@ rc@1.2.8, rc@^1.2.7, rc@^1.2.8: minimist "^1.2.0" strip-json-comments "~2.0.1" -reachdown@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/reachdown/-/reachdown-1.1.0.tgz#c3b85b459dbd0fe2c79782233a0a38e66a9b5454" - integrity sha512-6LsdRe4cZyOjw4NnvbhUd/rGG7WQ9HMopPr+kyL018Uci4kijtxcGR5kVb5Ln13k4PEE+fEFQbjfOvNw7cnXmA== - react-is@^17.0.1: version "17.0.2" resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0" @@ -18541,11 +18025,6 @@ requizzle@^0.2.3: dependencies: lodash "^4.17.21" -resolve-alpn@^1.0.0: - version "1.2.1" - resolved "https://registry.yarnpkg.com/resolve-alpn/-/resolve-alpn-1.2.1.tgz#b7adbdac3546aaaec20b45e7d8265927072726f9" - integrity sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g== - resolve-cwd@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" @@ -18605,13 +18084,6 @@ responselike@1.0.2, responselike@^1.0.2: dependencies: lowercase-keys "^1.0.0" -responselike@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/responselike/-/responselike-2.0.1.tgz#9a0bc8fdc252f3fb1cca68b016591059ba1422bc" - integrity sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw== - dependencies: - lowercase-keys "^2.0.0" - restore-cursor@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" @@ -18633,11 +18105,6 @@ retry@0.13.1, retry@^0.13.1: resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" integrity "sha1-GFsVh6z2eRnWOzVzSeA1N7JIRlg= sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==" -retry@^0.10.0: - version "0.10.1" - resolved "https://registry.yarnpkg.com/retry/-/retry-0.10.1.tgz#e76388d217992c252750241d3d3956fed98d8ff4" - integrity sha512-ZXUSQYTHdl3uS7IuCehYfMzKyIDBNoAuUblvy5oGO5UJSUTmStUUVPXbA9Qxd173Bgre53yCQczQuHgRWAdvJQ== - retry@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" @@ -19094,7 +18561,7 @@ serialize-javascript@^6.0.1: dependencies: randombytes "^2.1.0" -server-destroy@1.0.1, server-destroy@^1.0.1: +server-destroy@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/server-destroy/-/server-destroy-1.0.1.tgz#f13bf928e42b9c3e79383e61cc3998b5d14e6cdd" integrity sha512-rb+9B5YBIEzYcD6x2VKidaa+cqYBJQKnU4oe4E3ANwRRN56yk/ua1YCJT1n21NTS8w6CcOclAKNP3PhdCXKYtQ== @@ -19123,11 +18590,6 @@ set-function-name@^2.0.0, set-function-name@^2.0.1: functions-have-names "^1.2.3" has-property-descriptors "^1.0.0" -setimmediate@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" - integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA== - setprototypeof@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" @@ -19603,7 +19065,7 @@ stable@^0.1.8: resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== -stack-trace@0.0.10, stack-trace@0.0.x: +stack-trace@0.0.x: version "0.0.10" resolved "https://registry.yarnpkg.com/stack-trace/-/stack-trace-0.0.10.tgz#547c70b347e8d32b4e108ea1a2a159e5fdde19c0" integrity sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg== @@ -19885,14 +19347,6 @@ strip-outer@^1.0.0: dependencies: escape-string-regexp "^1.0.2" -stripe@9.16.0: - version "9.16.0" - resolved "https://registry.yarnpkg.com/stripe/-/stripe-9.16.0.tgz#94c24549c91fced457b9e3259e8a1a1bdb6dbd0e" - integrity sha512-Dn8K+jSoQcXjxCobRI4HXUdHjOXsiF/KszK49fJnkbeCFjZ3EZxLG2JiM/CX+Hcq27NBDtv/Sxhvy+HhTmvyaQ== - dependencies: - "@types/node" ">=8.1.0" - qs "^6.10.3" - striptags@^3.1.1: version "3.2.0" resolved "https://registry.yarnpkg.com/striptags/-/striptags-3.2.0.tgz#cc74a137db2de8b0b9a370006334161f7dd67052" @@ -19966,18 +19420,6 @@ sublevel-pouchdb@7.2.2: ltgt "2.2.1" readable-stream "1.1.14" -subleveldown@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/subleveldown/-/subleveldown-5.0.1.tgz#aa2b4e4698a48d9a86856b2c4df1b6bce2d2ce53" - integrity sha512-cVqd/URpp7si1HWu5YqQ3vqQkjuolAwHypY1B4itPlS71/lsf6TQPZ2Y0ijT22EYVkvH5ove9JFJf4u7VGPuZw== - dependencies: - abstract-leveldown "^6.3.0" - encoding-down "^6.2.0" - inherits "^2.0.3" - level-option-wrap "^1.1.0" - levelup "^4.4.0" - reachdown "^1.1.0" - superagent@^8.0.5: version "8.1.2" resolved "https://registry.yarnpkg.com/superagent/-/superagent-8.1.2.tgz#03cb7da3ec8b32472c9d20f6c2a57c7f3765f30b" @@ -20844,11 +20286,6 @@ typed-array-length@^1.0.4: for-each "^0.3.3" is-typed-array "^1.1.9" -typed-duration@^1.0.12: - version "1.0.13" - resolved "https://registry.yarnpkg.com/typed-duration/-/typed-duration-1.0.13.tgz#a40f9ba563b6e20674cac491e15ecbf6811d85a7" - integrity sha512-HLwA+hNq/2eXe03isJSfa7YJt6NikplBGdNKvlhyuR6WL5iZi2uXJIZv1SSOMEIukCZbeQ8QwIcQ801S0/Qulw== - typedarray-to-buffer@^3.1.5: version "3.1.5" resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" @@ -21062,7 +20499,7 @@ update-browserslist-db@^1.0.10: escalade "^3.1.1" picocolors "^1.0.0" -update-dotenv@1.1.1, update-dotenv@^1.1.1: +update-dotenv@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/update-dotenv/-/update-dotenv-1.1.1.tgz#17146f302f216c3c92419d5a327a45be910050ca" integrity sha512-3cIC18In/t0X/yH793c00qqxcKD8jVCgNOPif/fGQkFpYMGecM9YAc+kaAKXuZsM2dE9I9wFI7KvAuNX22SGMQ== @@ -21106,7 +20543,7 @@ url-parse-lax@^3.0.0: dependencies: prepend-http "^2.0.0" -url-parse@^1.4.3, url-parse@^1.5.3: +url-parse@^1.5.3: version "1.5.10" resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== @@ -21176,11 +20613,6 @@ uuid@^3.3.2: resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== -uuid@^7.0.3: - version "7.0.3" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-7.0.3.tgz#c5c9f2c8cf25dc0a372c4df1441c41f5bd0c680b" - integrity sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg== - uuid@^9.0.0, uuid@^9.0.1: version "9.0.1" resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30" @@ -22079,23 +21511,3 @@ z-schema@^5.0.1: validator "^13.7.0" optionalDependencies: commander "^9.4.1" - -zeebe-node@^8.2.5: - version "8.3.1" - resolved "https://registry.yarnpkg.com/zeebe-node/-/zeebe-node-8.3.1.tgz#e100bf3708464e305305b4efa1ffde53f9786c45" - integrity sha512-68ascWO3g7g+9WwDzvfa3I9TkLKHku5auEgSINP+k5ktNfsfGW68ELDmEJA+XHZgzvGsdGILZqGRzVd5SC8aaQ== - dependencies: - "@grpc/grpc-js" "1.9.7" - "@grpc/proto-loader" "0.7.10" - chalk "^2.4.2" - console-stamp "^3.0.2" - dayjs "^1.8.15" - debug "^4.2.0" - fast-xml-parser "^4.1.3" - fp-ts "^2.5.1" - got "^11.8.5" - long "^4.0.0" - promise-retry "^1.1.1" - stack-trace "0.0.10" - typed-duration "^1.0.12" - uuid "^7.0.3" From 6964e2d146de1a51eb55ffbf8a40052bce090575 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 29 Jan 2024 13:43:51 +0000 Subject: [PATCH 09/54] Fixing update aliasing. --- .../server/scripts/integrations/postgres/reset.sh | 1 + packages/server/src/integrations/base/sql.ts | 12 ++++++------ 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/packages/server/scripts/integrations/postgres/reset.sh b/packages/server/scripts/integrations/postgres/reset.sh index 32778bd11f..29a5db0181 100755 --- a/packages/server/scripts/integrations/postgres/reset.sh +++ b/packages/server/scripts/integrations/postgres/reset.sh @@ -1,3 +1,4 @@ #!/bin/bash docker-compose down docker volume prune -f +docker volume rm postgres_pg_data diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index 3375e175e6..d33c077ee5 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -424,7 +424,7 @@ class InternalBuilder { knexWithAlias( knex: Knex, endpoint: { entityId: string; alias?: string; schema?: string } - ): { query: KnexQuery; name: string } { + ): { query: KnexQuery; aliased: string } { const tableName = endpoint.entityId const alias = endpoint.alias const aliased = alias ? alias : tableName @@ -433,7 +433,7 @@ class InternalBuilder { if (endpoint.schema) { query = query.withSchema(endpoint.schema) } - return { query, name: aliased } + return { query, aliased } } create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { @@ -493,7 +493,7 @@ class InternalBuilder { } // start building the query - let { query, name: aliased } = this.knexWithAlias(knex, endpoint) + let { query, aliased } = this.knexWithAlias(knex, endpoint) query = query.limit(foundLimit) if (foundOffset) { query = query.offset(foundOffset) @@ -522,9 +522,9 @@ class InternalBuilder { update(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { const { endpoint, body, filters } = json - let { query } = this.knexWithAlias(knex, endpoint) + let { query, aliased } = this.knexWithAlias(knex, endpoint) const parsedBody = parseBody(body) - query = this.addFilters(query, filters, { tableName: endpoint.entityId }) + query = this.addFilters(query, filters, { tableName: aliased }) // mysql can't use returning if (opts.disableReturning) { return query.update(parsedBody) @@ -535,7 +535,7 @@ class InternalBuilder { delete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { const { endpoint, filters } = json - let { query, name: aliased } = this.knexWithAlias(knex, endpoint) + let { query, aliased } = this.knexWithAlias(knex, endpoint) query = this.addFilters(query, filters, { tableName: aliased }) // mysql can't use returning if (opts.disableReturning) { From 5d2ba68fae4b39086686c41999b903c05c352ba4 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 30 Jan 2024 13:35:45 +0000 Subject: [PATCH 10/54] Adding test case based on capture of real failing query. --- .../server/src/integrations/tests/sql.spec.ts | 107 ++++++++++++++++++ 1 file changed, 107 insertions(+) diff --git a/packages/server/src/integrations/tests/sql.spec.ts b/packages/server/src/integrations/tests/sql.spec.ts index 5cc4849d03..580a8117cb 100644 --- a/packages/server/src/integrations/tests/sql.spec.ts +++ b/packages/server/src/integrations/tests/sql.spec.ts @@ -683,3 +683,110 @@ describe("SQL query builder", () => { }) }) }) + +describe("Captures of real examples", () => { + const limit = 5000 + + it("should handle filtering by relationship", () => { + const queryJson = { + endpoint: { + datasourceId: "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + entityId: "products", + operation: "READ", + alias: "a", + }, + resource: { + fields: [ + "a.productname", + "a.productid", + "b.executorid", + "b.taskname", + "b.taskid", + "b.completed", + "b.qaid", + ], + }, + filters: { + equal: { + "1:tasks.taskname": "assembling", + }, + onEmptyFilter: "all", + }, + sort: { + productname: { + direction: "ASCENDING", + }, + }, + paginate: { + limit: 100, + page: 1, + }, + relationships: [ + { + tableName: "tasks", + column: "tasks", + through: "products_tasks", + from: "productid", + to: "taskid", + fromPrimary: "productid", + toPrimary: "taskid", + aliases: { + products_tasks: "c", + tasks: "b", + products: "a", + }, + }, + ], + meta: { + table: { + type: "table", + _id: "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products", + primary: ["productid"], + name: "a", + schema: { + productname: { + type: "string", + externalType: "character varying", + autocolumn: false, + name: "productname", + constraints: { + presence: false, + }, + }, + productid: { + type: "number", + externalType: "integer", + autocolumn: true, + name: "productid", + constraints: { + presence: false, + }, + }, + tasks: { + tableId: + "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + name: "tasks", + relationshipType: "many-to-many", + fieldName: "taskid", + through: + "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products_tasks", + throughFrom: "taskid", + throughTo: "productid", + type: "link", + main: true, + _id: "ca6862d9ba09146dd8a68e3b5b7055a09", + }, + }, + sourceId: "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + sourceType: "external", + primaryDisplay: "productname", + }, + }, + } + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: [100, "assembling", limit], + sql: `select "a"."productname" as "a.productname", "a"."productid" as "a.productid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" left join "products_tasks" as "c" on "a"."productid" = "c"."productid" left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where "b"."taskname" = $2 order by "a"."productname" asc limit $3`, + }) + }) +}) From 09a0d00aa7df535454cb3eafe49714dcd1adf3e9 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 30 Jan 2024 13:50:36 +0000 Subject: [PATCH 11/54] Fixing some test cases. --- packages/server/src/integrations/tests/sql.spec.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/server/src/integrations/tests/sql.spec.ts b/packages/server/src/integrations/tests/sql.spec.ts index 580a8117cb..bca0cf5422 100644 --- a/packages/server/src/integrations/tests/sql.spec.ts +++ b/packages/server/src/integrations/tests/sql.spec.ts @@ -502,7 +502,7 @@ describe("SQL query builder", () => { const query = sql._query(generateRelationshipJson({ schema: "production" })) expect(query).toEqual({ bindings: [500, 5000], - sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" limit $1) as "brands" left join "production"."products" on "brands"."brand_id" = "products"."brand_id" limit $2`, + sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, }) }) @@ -510,7 +510,7 @@ describe("SQL query builder", () => { const query = sql._query(generateRelationshipJson()) expect(query).toEqual({ bindings: [500, 5000], - sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" limit $1) as "brands" left join "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, + sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, }) }) @@ -520,7 +520,7 @@ describe("SQL query builder", () => { ) expect(query).toEqual({ bindings: [500, 5000], - sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" limit $1) as "stores" left join "production"."stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" on "products"."product_id" = "stocks"."product_id" limit $2`, + sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" limit $2`, }) }) From bb0b776684e29a529ac5198451472e8981cadd1f Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 30 Jan 2024 17:57:10 +0000 Subject: [PATCH 12/54] Updating how aliasing is handled. --- .../server/src/api/controllers/row/alias.ts | 2 +- packages/server/src/integrations/base/sql.ts | 86 ++++++++------ .../server/src/integrations/tests/sql.spec.ts | 110 ++---------------- .../sqlQueryJson/filterByRelationship.json | 94 +++++++++++++++ packages/types/src/sdk/search.ts | 3 +- 5 files changed, 158 insertions(+), 137 deletions(-) create mode 100644 packages/server/src/integrations/tests/sqlQueryJson/filterByRelationship.json diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index fc00b505c4..589431cc1a 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -113,7 +113,7 @@ export default class AliasTables { } json.meta.tables = aliasedTables } - json.endpoint.alias = this.getAlias(json.endpoint.entityId) + json.tableAliases = this.tableAliases const response = await getDatasourceAndQuery(json) return this.reverse(response) } diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index d33c077ee5..c9be2e1ae6 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -129,8 +129,13 @@ class InternalBuilder { addFilters( query: KnexQuery, filters: SearchFilters | undefined, - opts: { relationship?: boolean; tableName?: string } + tableName: string, + opts: { aliases?: Record; relationship?: boolean } ): KnexQuery { + function getTableName(name: string) { + const alias = opts.aliases?.[name] + return alias || name + } function iterate( structure: { [key: string]: any }, fn: (key: string, value: any) => void @@ -139,10 +144,11 @@ class InternalBuilder { const updatedKey = dbCore.removeKeyNumbering(key) const isRelationshipField = updatedKey.includes(".") if (!opts.relationship && !isRelationshipField) { - fn(`${opts.tableName}.${updatedKey}`, value) + fn(`${getTableName(tableName)}.${updatedKey}`, value) } if (opts.relationship && isRelationshipField) { - fn(updatedKey, value) + const [filterTableName, property] = updatedKey.split(".") + fn(`${getTableName(filterTableName)}.${property}`, value) } } } @@ -345,17 +351,15 @@ class InternalBuilder { query: KnexQuery, fromTable: string, relationships: RelationshipsJson[] | undefined, - schema: string | undefined + schema: string | undefined, + aliases?: Record ): KnexQuery { if (!relationships) { return query } const tableSets: Record = {} - // add up all aliases - let aliases: Record = {} // aggregate into table sets (all the same to tables) for (let relationship of relationships) { - aliases = { ...aliases, ...relationship.aliases } const keyObj: { toTable: string; throughTable: string | undefined } = { toTable: relationship.tableName, throughTable: undefined, @@ -372,9 +376,9 @@ class InternalBuilder { } for (let [key, relationships] of Object.entries(tableSets)) { const { toTable, throughTable } = JSON.parse(key) - const toAlias = aliases[toTable] || toTable, - throughAlias = aliases[throughTable] || throughTable, - fromAlias = aliases[fromTable] || fromTable + const toAlias = aliases?.[toTable] || toTable, + throughAlias = aliases?.[throughTable] || throughTable, + fromAlias = aliases?.[fromTable] || fromTable let toTableWithSchema = this.tableNameWithSchema(toTable, { alias: toAlias, schema, @@ -423,22 +427,23 @@ class InternalBuilder { knexWithAlias( knex: Knex, - endpoint: { entityId: string; alias?: string; schema?: string } - ): { query: KnexQuery; aliased: string } { + endpoint: QueryJson["endpoint"], + aliases?: QueryJson["tableAliases"] + ): KnexQuery { const tableName = endpoint.entityId - const alias = endpoint.alias - const aliased = alias ? alias : tableName - const tableAliased = alias ? `${tableName} as ${alias}` : tableName + const tableAliased = aliases?.[tableName] + ? `${tableName} as ${aliases?.[tableName]}` + : tableName let query = knex(tableAliased) if (endpoint.schema) { query = query.withSchema(endpoint.schema) } - return { query, aliased } + return query } create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { const { endpoint, body } = json - let { query } = this.knexWithAlias(knex, endpoint) + let query = this.knexWithAlias(knex, endpoint) const parsedBody = parseBody(body) // make sure no null values in body for creation for (let [key, value] of Object.entries(parsedBody)) { @@ -457,7 +462,7 @@ class InternalBuilder { bulkCreate(knex: Knex, json: QueryJson): KnexQuery { const { endpoint, body } = json - let { query } = this.knexWithAlias(knex, endpoint) + let query = this.knexWithAlias(knex, endpoint) if (!Array.isArray(body)) { return query } @@ -466,8 +471,10 @@ class InternalBuilder { } read(knex: Knex, json: QueryJson, limit: number): KnexQuery { - let { endpoint, resource, filters, paginate, relationships } = json + let { endpoint, resource, filters, paginate, relationships, tableAliases } = + json + const tableName = endpoint.entityId // select all if not specified if (!resource) { resource = { fields: [] } @@ -493,19 +500,20 @@ class InternalBuilder { } // start building the query - let { query, aliased } = this.knexWithAlias(knex, endpoint) + let query = this.knexWithAlias(knex, endpoint, tableAliases) query = query.limit(foundLimit) if (foundOffset) { query = query.offset(foundOffset) } - query = this.addFilters(query, filters, { tableName: aliased }) + query = this.addFilters(query, filters, tableName, { + aliases: tableAliases, + }) // add sorting to pre-query query = this.addSorting(query, json) - // @ts-ignore - let preQuery: KnexQuery = knex({ - // @ts-ignore - [aliased]: query, - }).select(selectStatement) + const alias = tableAliases?.[tableName] || tableName + let preQuery = knex({ + [alias]: query, + } as any).select(selectStatement) as any // have to add after as well (this breaks MS-SQL) if (this.client !== SqlClient.MS_SQL) { preQuery = this.addSorting(preQuery, json) @@ -513,18 +521,24 @@ class InternalBuilder { // handle joins query = this.addRelationships( preQuery, - aliased, + tableName, relationships, - endpoint.schema + endpoint.schema, + tableAliases ) - return this.addFilters(query, filters, { relationship: true }) + return this.addFilters(query, filters, tableName, { + relationship: true, + aliases: tableAliases, + }) } update(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { - const { endpoint, body, filters } = json - let { query, aliased } = this.knexWithAlias(knex, endpoint) + const { endpoint, body, filters, tableAliases } = json + let query = this.knexWithAlias(knex, endpoint, tableAliases) const parsedBody = parseBody(body) - query = this.addFilters(query, filters, { tableName: aliased }) + query = this.addFilters(query, filters, endpoint.entityId, { + aliases: tableAliases, + }) // mysql can't use returning if (opts.disableReturning) { return query.update(parsedBody) @@ -534,9 +548,11 @@ class InternalBuilder { } delete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { - const { endpoint, filters } = json - let { query, aliased } = this.knexWithAlias(knex, endpoint) - query = this.addFilters(query, filters, { tableName: aliased }) + const { endpoint, filters, tableAliases } = json + let query = this.knexWithAlias(knex, endpoint, tableAliases) + query = this.addFilters(query, filters, endpoint.entityId, { + aliases: tableAliases, + }) // mysql can't use returning if (opts.disableReturning) { return query.delete() diff --git a/packages/server/src/integrations/tests/sql.spec.ts b/packages/server/src/integrations/tests/sql.spec.ts index bca0cf5422..0e7257242c 100644 --- a/packages/server/src/integrations/tests/sql.spec.ts +++ b/packages/server/src/integrations/tests/sql.spec.ts @@ -1,5 +1,7 @@ -const Sql = require("../base/sql").default -const { SqlClient } = require("../utils") +import { SqlClient } from "../utils" +import Sql from "../base/sql" +import { QueryJson } from "@budibase/types" +import { join } from "path" const TABLE_NAME = "test" @@ -17,7 +19,7 @@ function generateReadJson({ filters, sort, paginate, -}: any = {}) { +}: any = {}): QueryJson { return { endpoint: endpoint(table || TABLE_NAME, "READ"), resource: { @@ -30,7 +32,7 @@ function generateReadJson({ table: { name: table || TABLE_NAME, primary: ["id"], - }, + } as any, }, } } @@ -687,102 +689,12 @@ describe("SQL query builder", () => { describe("Captures of real examples", () => { const limit = 5000 + function getJson(name: string): QueryJson { + return require(join(__dirname, "sqlQueryJson", name)) as QueryJson + } + it("should handle filtering by relationship", () => { - const queryJson = { - endpoint: { - datasourceId: "datasource_plus_8066e56456784eb2a00129d31be5c3e7", - entityId: "products", - operation: "READ", - alias: "a", - }, - resource: { - fields: [ - "a.productname", - "a.productid", - "b.executorid", - "b.taskname", - "b.taskid", - "b.completed", - "b.qaid", - ], - }, - filters: { - equal: { - "1:tasks.taskname": "assembling", - }, - onEmptyFilter: "all", - }, - sort: { - productname: { - direction: "ASCENDING", - }, - }, - paginate: { - limit: 100, - page: 1, - }, - relationships: [ - { - tableName: "tasks", - column: "tasks", - through: "products_tasks", - from: "productid", - to: "taskid", - fromPrimary: "productid", - toPrimary: "taskid", - aliases: { - products_tasks: "c", - tasks: "b", - products: "a", - }, - }, - ], - meta: { - table: { - type: "table", - _id: "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products", - primary: ["productid"], - name: "a", - schema: { - productname: { - type: "string", - externalType: "character varying", - autocolumn: false, - name: "productname", - constraints: { - presence: false, - }, - }, - productid: { - type: "number", - externalType: "integer", - autocolumn: true, - name: "productid", - constraints: { - presence: false, - }, - }, - tasks: { - tableId: - "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", - name: "tasks", - relationshipType: "many-to-many", - fieldName: "taskid", - through: - "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products_tasks", - throughFrom: "taskid", - throughTo: "productid", - type: "link", - main: true, - _id: "ca6862d9ba09146dd8a68e3b5b7055a09", - }, - }, - sourceId: "datasource_plus_8066e56456784eb2a00129d31be5c3e7", - sourceType: "external", - primaryDisplay: "productname", - }, - }, - } + const queryJson = getJson(`filterByRelationship.json`) let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ bindings: [100, "assembling", limit], diff --git a/packages/server/src/integrations/tests/sqlQueryJson/filterByRelationship.json b/packages/server/src/integrations/tests/sqlQueryJson/filterByRelationship.json new file mode 100644 index 0000000000..eb1025f382 --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/filterByRelationship.json @@ -0,0 +1,94 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "entityId": "products", + "operation": "READ" + }, + "resource": { + "fields": [ + "a.productname", + "a.productid", + "b.executorid", + "b.taskname", + "b.taskid", + "b.completed", + "b.qaid" + ] + }, + "filters": { + "equal": { + "1:tasks.taskname": "assembling" + }, + "onEmptyFilter": "all" + }, + "sort": { + "productname": { + "direction": "ASCENDING" + } + }, + "paginate": { + "limit": 100, + "page": 1 + }, + "relationships": [ + { + "tableName": "tasks", + "column": "tasks", + "through": "products_tasks", + "from": "productid", + "to": "taskid", + "fromPrimary": "productid", + "toPrimary": "taskid" + } + ], + "tableAliases": { + "products_tasks": "c", + "tasks": "b", + "products": "a" + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products", + "primary": [ + "productid" + ], + "name": "a", + "schema": { + "productname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "productname", + "constraints": { + "presence": false + } + }, + "productid": { + "type": "number", + "externalType": "integer", + "autocolumn": true, + "name": "productid", + "constraints": { + "presence": false + } + }, + "tasks": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "tasks", + "relationshipType": "many-to-many", + "fieldName": "taskid", + "through": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products_tasks", + "throughFrom": "taskid", + "throughTo": "productid", + "type": "link", + "main": true, + "_id": "ca6862d9ba09146dd8a68e3b5b7055a09" + } + }, + "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "sourceType": "external", + "primaryDisplay": "productname" + } + } +} \ No newline at end of file diff --git a/packages/types/src/sdk/search.ts b/packages/types/src/sdk/search.ts index a4045c2558..67c344d845 100644 --- a/packages/types/src/sdk/search.ts +++ b/packages/types/src/sdk/search.ts @@ -67,7 +67,6 @@ export interface RelationshipsJson { fromPrimary?: string toPrimary?: string tableName: string - aliases?: Record column: string } @@ -75,7 +74,6 @@ export interface QueryJson { endpoint: { datasourceId: string entityId: string - alias?: string operation: Operation schema?: string } @@ -96,6 +94,7 @@ export interface QueryJson { idFilter?: SearchFilters } relationships?: RelationshipsJson[] + tableAliases?: Record } export interface SqlQuery { From c4f4a46d7002e3b4321bb6cd7c22b0a608d481bf Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 5 Feb 2024 12:45:19 +0000 Subject: [PATCH 13/54] Quick fix based on testing. --- .../server/src/api/controllers/row/alias.ts | 7 +++++- packages/server/src/integrations/base/sql.ts | 1 - .../server/src/integrations/tests/sql.spec.ts | 17 ------------- .../src/integrations/tests/sqlAlias.spec.ts | 25 +++++++++++++++++++ 4 files changed, 31 insertions(+), 19 deletions(-) create mode 100644 packages/server/src/integrations/tests/sqlAlias.spec.ts diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index 589431cc1a..6533e51728 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -113,7 +113,12 @@ export default class AliasTables { } json.meta.tables = aliasedTables } - json.tableAliases = this.tableAliases + // invert and return + const invertedTableAliases: Record = {} + for (let [key, value] of Object.entries(this.tableAliases)) { + invertedTableAliases[value] = key + } + json.tableAliases = invertedTableAliases const response = await getDatasourceAndQuery(json) return this.reverse(response) } diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index c9be2e1ae6..cc2e1d94a8 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -499,7 +499,6 @@ class InternalBuilder { foundLimit = paginate.limit } // start building the query - let query = this.knexWithAlias(knex, endpoint, tableAliases) query = query.limit(foundLimit) if (foundOffset) { diff --git a/packages/server/src/integrations/tests/sql.spec.ts b/packages/server/src/integrations/tests/sql.spec.ts index 0e7257242c..cf22064cb7 100644 --- a/packages/server/src/integrations/tests/sql.spec.ts +++ b/packages/server/src/integrations/tests/sql.spec.ts @@ -685,20 +685,3 @@ describe("SQL query builder", () => { }) }) }) - -describe("Captures of real examples", () => { - const limit = 5000 - - function getJson(name: string): QueryJson { - return require(join(__dirname, "sqlQueryJson", name)) as QueryJson - } - - it("should handle filtering by relationship", () => { - const queryJson = getJson(`filterByRelationship.json`) - let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) - expect(query).toEqual({ - bindings: [100, "assembling", limit], - sql: `select "a"."productname" as "a.productname", "a"."productid" as "a.productid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" left join "products_tasks" as "c" on "a"."productid" = "c"."productid" left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where "b"."taskname" = $2 order by "a"."productname" asc limit $3`, - }) - }) -}) diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts new file mode 100644 index 0000000000..cfd82cfd01 --- /dev/null +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -0,0 +1,25 @@ +import { QueryJson } from "@budibase/types" +import { join } from "path" +import Sql from "../base/sql" +import { SqlClient } from "../utils" + +describe("Captures of real examples", () => { + const limit = 5000 + + function getJson(name: string): QueryJson { + return require(join(__dirname, "sqlQueryJson", name)) as QueryJson + } + + it("should handle basic retrieval", () => { + const queryJson = getJson("") + }) + + it("should handle filtering by relationship", () => { + const queryJson = getJson("filterByRelationship.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: [100, "assembling", limit], + sql: `select "a"."productname" as "a.productname", "a"."productid" as "a.productid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" left join "products_tasks" as "c" on "a"."productid" = "c"."productid" left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where "b"."taskname" = $2 order by "a"."productname" asc limit $3`, + }) + }) +}) From e8e7eea47a234241dd2c79196b7e101b183c56ca Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 5 Feb 2024 15:23:24 +0000 Subject: [PATCH 14/54] Adding some test cases for aliasing. --- .../src/integrations/tests/sqlAlias.spec.ts | 65 ++++++- .../basicFetchWithRelationships.json | 183 ++++++++++++++++++ .../sqlQueryJson/createWithRelationships.json | 173 +++++++++++++++++ .../tests/sqlQueryJson/deleteSimple.json | 75 +++++++ .../sqlQueryJson/updateRelationship.json | 181 +++++++++++++++++ .../tests/sqlQueryJson/updateSimple.json | 181 +++++++++++++++++ 6 files changed, 850 insertions(+), 8 deletions(-) create mode 100644 packages/server/src/integrations/tests/sqlQueryJson/basicFetchWithRelationships.json create mode 100644 packages/server/src/integrations/tests/sqlQueryJson/createWithRelationships.json create mode 100644 packages/server/src/integrations/tests/sqlQueryJson/deleteSimple.json create mode 100644 packages/server/src/integrations/tests/sqlQueryJson/updateRelationship.json create mode 100644 packages/server/src/integrations/tests/sqlQueryJson/updateSimple.json diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index cfd82cfd01..c91d988849 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -10,16 +10,65 @@ describe("Captures of real examples", () => { return require(join(__dirname, "sqlQueryJson", name)) as QueryJson } - it("should handle basic retrieval", () => { - const queryJson = getJson("") + describe("create", () => { + it("should create a row with relationships", () => { + const queryJson = getJson("createWithRelationships.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: ["A Street", 34, "London", "A", "B", "designer", 1990], + sql: `insert into "persons" ("address", "age", "city", "firstname", "lastname", "type", "year") values ($1, $2, $3, $4, $5, $6, $7) returning *`, + }) + }) }) - it("should handle filtering by relationship", () => { - const queryJson = getJson("filterByRelationship.json") - let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) - expect(query).toEqual({ - bindings: [100, "assembling", limit], - sql: `select "a"."productname" as "a.productname", "a"."productid" as "a.productid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" left join "products_tasks" as "c" on "a"."productid" = "c"."productid" left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where "b"."taskname" = $2 order by "a"."productname" asc limit $3`, + describe("read", () => { + it("should handle basic retrieval with relationships", () => { + const queryJson = getJson("basicFetchWithRelationships.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: [100, limit], + sql: `select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid", "a"."address" as "a.address", "a"."age" as "a.age", "a"."type" as "a.type", "a"."city" as "a.city", "a"."lastname" as "a.lastname", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" from (select * from "persons" as "a" order by "a"."firstname" asc limit $1) as "a" left join "tasks" as "b" on "a"."personid" = "b"."qaid" or "a"."personid" = "b"."executorid" order by "a"."firstname" asc limit $2`, + }) + }) + + it("should handle filtering by relationship", () => { + const queryJson = getJson("filterByRelationship.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: [100, "assembling", limit], + sql: `select "a"."productname" as "a.productname", "a"."productid" as "a.productid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" left join "products_tasks" as "c" on "a"."productid" = "c"."productid" left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where "b"."taskname" = $2 order by "a"."productname" asc limit $3`, + }) + }) + }) + + describe("update", () => { + it("should handle performing a simple update", () => { + const queryJson = getJson("updateSimple.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: [1990, "C", "A Street", 34, "designer", "London", "B", 5], + sql: `update "persons" as "a" set "year" = $1, "firstname" = $2, "address" = $3, "age" = $4, "type" = $5, "city" = $6, "lastname" = $7 where "a"."personid" = $8 returning *`, + }) + }) + + it("should handle performing an update of relationships", () => { + const queryJson = getJson("updateRelationship.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: [1990, "C", "A Street", 34, "designer", "London", "B", 5], + sql: `update "persons" as "a" set "year" = $1, "firstname" = $2, "address" = $3, "age" = $4, "type" = $5, "city" = $6, "lastname" = $7 where "a"."personid" = $8 returning *`, + }) + }) + }) + + describe("delete", () => { + it("should handle deleting with relationships", () => { + const queryJson = getJson("deleteSimple.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: ["ddd", ""], + sql: `delete from "compositetable" as "a" where "a"."keypartone" = $1 and "a"."keyparttwo" = $2 returning "a"."keyparttwo" as "a.keyparttwo", "a"."keypartone" as "a.keypartone", "a"."name" as "a.name"`, + }) }) }) }) diff --git a/packages/server/src/integrations/tests/sqlQueryJson/basicFetchWithRelationships.json b/packages/server/src/integrations/tests/sqlQueryJson/basicFetchWithRelationships.json new file mode 100644 index 0000000000..3445f5fe67 --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/basicFetchWithRelationships.json @@ -0,0 +1,183 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "entityId": "persons", + "operation": "READ" + }, + "resource": { + "fields": [ + "a.year", + "a.firstname", + "a.personid", + "a.address", + "a.age", + "a.type", + "a.city", + "a.lastname", + "b.executorid", + "b.taskname", + "b.taskid", + "b.completed", + "b.qaid", + "b.executorid", + "b.taskname", + "b.taskid", + "b.completed", + "b.qaid" + ] + }, + "filters": {}, + "sort": { + "firstname": { + "direction": "ASCENDING" + } + }, + "paginate": { + "limit": 100, + "page": 1 + }, + "relationships": [ + { + "tableName": "tasks", + "column": "QA", + "from": "personid", + "to": "qaid", + "aliases": { + "tasks": "b", + "persons": "a" + } + }, + { + "tableName": "tasks", + "column": "executor", + "from": "personid", + "to": "executorid", + "aliases": { + "tasks": "b", + "persons": "a" + } + } + ], + "extra": { + "idFilter": {} + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons", + "primary": [ + "personid" + ], + "name": "a", + "schema": { + "year": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "year", + "constraints": { + "presence": false + } + }, + "firstname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "firstname", + "constraints": { + "presence": false + } + }, + "personid": { + "type": "number", + "externalType": "integer", + "autocolumn": true, + "name": "personid", + "constraints": { + "presence": false + } + }, + "address": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "address", + "constraints": { + "presence": false + } + }, + "age": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "age", + "constraints": { + "presence": false + } + }, + "type": { + "type": "options", + "externalType": "USER-DEFINED", + "autocolumn": false, + "name": "type", + "constraints": { + "presence": false, + "inclusion": [ + "support", + "designer", + "programmer", + "qa" + ] + } + }, + "city": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "city", + "constraints": { + "presence": false + } + }, + "lastname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "lastname", + "constraints": { + "presence": false + } + }, + "QA": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "QA", + "relationshipType": "many-to-one", + "fieldName": "qaid", + "type": "link", + "main": true, + "_id": "ccb68481c80c34217a4540a2c6c27fe46", + "foreignKey": "personid" + }, + "executor": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "executor", + "relationshipType": "many-to-one", + "fieldName": "executorid", + "type": "link", + "main": true, + "_id": "c89530b9770d94bec851e062b5cff3001", + "foreignKey": "personid", + "tableName": "persons" + } + }, + "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "sourceType": "external", + "primaryDisplay": "firstname", + "views": {} + } + }, + "tableAliases": { + "persons": "a", + "tasks": "b" + } +} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/createWithRelationships.json b/packages/server/src/integrations/tests/sqlQueryJson/createWithRelationships.json new file mode 100644 index 0000000000..20331b949a --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/createWithRelationships.json @@ -0,0 +1,173 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "entityId": "persons", + "operation": "CREATE" + }, + "resource": { + "fields": [ + "a.year", + "a.firstname", + "a.personid", + "a.address", + "a.age", + "a.type", + "a.city", + "a.lastname" + ] + }, + "filters": {}, + "relationships": [ + { + "tableName": "tasks", + "column": "QA", + "from": "personid", + "to": "qaid", + "aliases": { + "tasks": "b", + "persons": "a" + } + }, + { + "tableName": "tasks", + "column": "executor", + "from": "personid", + "to": "executorid", + "aliases": { + "tasks": "b", + "persons": "a" + } + } + ], + "body": { + "year": 1990, + "firstname": "A", + "address": "A Street", + "age": 34, + "type": "designer", + "city": "London", + "lastname": "B" + }, + "extra": { + "idFilter": {} + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons", + "primary": [ + "personid" + ], + "name": "a", + "schema": { + "year": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "year", + "constraints": { + "presence": false + } + }, + "firstname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "firstname", + "constraints": { + "presence": false + } + }, + "personid": { + "type": "number", + "externalType": "integer", + "autocolumn": true, + "name": "personid", + "constraints": { + "presence": false + } + }, + "address": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "address", + "constraints": { + "presence": false + } + }, + "age": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "age", + "constraints": { + "presence": false + } + }, + "type": { + "type": "options", + "externalType": "USER-DEFINED", + "autocolumn": false, + "name": "type", + "constraints": { + "presence": false, + "inclusion": [ + "support", + "designer", + "programmer", + "qa" + ] + } + }, + "city": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "city", + "constraints": { + "presence": false + } + }, + "lastname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "lastname", + "constraints": { + "presence": false + } + }, + "QA": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "QA", + "relationshipType": "many-to-one", + "fieldName": "qaid", + "type": "link", + "main": true, + "_id": "ccb68481c80c34217a4540a2c6c27fe46", + "foreignKey": "personid" + }, + "executor": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "executor", + "relationshipType": "many-to-one", + "fieldName": "executorid", + "type": "link", + "main": true, + "_id": "c89530b9770d94bec851e062b5cff3001", + "foreignKey": "personid", + "tableName": "persons" + } + }, + "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "sourceType": "external", + "primaryDisplay": "firstname", + "views": {} + } + }, + "tableAliases": { + "persons": "a", + "tasks": "b" + } +} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/deleteSimple.json b/packages/server/src/integrations/tests/sqlQueryJson/deleteSimple.json new file mode 100644 index 0000000000..2266b8c8be --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/deleteSimple.json @@ -0,0 +1,75 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "entityId": "compositetable", + "operation": "DELETE" + }, + "resource": { + "fields": [ + "a.keyparttwo", + "a.keypartone", + "a.name" + ] + }, + "filters": { + "equal": { + "keypartone": "ddd", + "keyparttwo": "" + } + }, + "relationships": [], + "extra": { + "idFilter": { + "equal": { + "keypartone": "ddd", + "keyparttwo": "" + } + } + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__compositetable", + "primary": [ + "keypartone", + "keyparttwo" + ], + "name": "a", + "schema": { + "keyparttwo": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "keyparttwo", + "constraints": { + "presence": true + } + }, + "keypartone": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "keypartone", + "constraints": { + "presence": true + } + }, + "name": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "name", + "constraints": { + "presence": false + } + } + }, + "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "sourceType": "external", + "primaryDisplay": "keypartone" + } + }, + "tableAliases": { + "compositetable": "a" + } +} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/updateRelationship.json b/packages/server/src/integrations/tests/sqlQueryJson/updateRelationship.json new file mode 100644 index 0000000000..01e795bd6c --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/updateRelationship.json @@ -0,0 +1,181 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "entityId": "persons", + "operation": "UPDATE" + }, + "resource": { + "fields": [ + "a.year", + "a.firstname", + "a.personid", + "a.address", + "a.age", + "a.type", + "a.city", + "a.lastname" + ] + }, + "filters": { + "equal": { + "personid": 5 + } + }, + "relationships": [ + { + "tableName": "tasks", + "column": "QA", + "from": "personid", + "to": "qaid", + "aliases": { + "tasks": "b", + "persons": "a" + } + }, + { + "tableName": "tasks", + "column": "executor", + "from": "personid", + "to": "executorid", + "aliases": { + "tasks": "b", + "persons": "a" + } + } + ], + "body": { + "year": 1990, + "firstname": "C", + "address": "A Street", + "age": 34, + "type": "designer", + "city": "London", + "lastname": "B" + }, + "extra": { + "idFilter": { + "equal": { + "personid": 5 + } + } + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons", + "primary": [ + "personid" + ], + "name": "a", + "schema": { + "year": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "year", + "constraints": { + "presence": false + } + }, + "firstname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "firstname", + "constraints": { + "presence": false + } + }, + "personid": { + "type": "number", + "externalType": "integer", + "autocolumn": true, + "name": "personid", + "constraints": { + "presence": false + } + }, + "address": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "address", + "constraints": { + "presence": false + } + }, + "age": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "age", + "constraints": { + "presence": false + } + }, + "type": { + "type": "options", + "externalType": "USER-DEFINED", + "autocolumn": false, + "name": "type", + "constraints": { + "presence": false, + "inclusion": [ + "support", + "designer", + "programmer", + "qa" + ] + } + }, + "city": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "city", + "constraints": { + "presence": false + } + }, + "lastname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "lastname", + "constraints": { + "presence": false + } + }, + "QA": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "QA", + "relationshipType": "many-to-one", + "fieldName": "qaid", + "type": "link", + "main": true, + "_id": "ccb68481c80c34217a4540a2c6c27fe46", + "foreignKey": "personid" + }, + "executor": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "executor", + "relationshipType": "many-to-one", + "fieldName": "executorid", + "type": "link", + "main": true, + "_id": "c89530b9770d94bec851e062b5cff3001", + "foreignKey": "personid", + "tableName": "persons" + } + }, + "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "sourceType": "external", + "primaryDisplay": "firstname", + "views": {} + } + }, + "tableAliases": { + "persons": "a", + "tasks": "b" + } +} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/updateSimple.json b/packages/server/src/integrations/tests/sqlQueryJson/updateSimple.json new file mode 100644 index 0000000000..01e795bd6c --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/updateSimple.json @@ -0,0 +1,181 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "entityId": "persons", + "operation": "UPDATE" + }, + "resource": { + "fields": [ + "a.year", + "a.firstname", + "a.personid", + "a.address", + "a.age", + "a.type", + "a.city", + "a.lastname" + ] + }, + "filters": { + "equal": { + "personid": 5 + } + }, + "relationships": [ + { + "tableName": "tasks", + "column": "QA", + "from": "personid", + "to": "qaid", + "aliases": { + "tasks": "b", + "persons": "a" + } + }, + { + "tableName": "tasks", + "column": "executor", + "from": "personid", + "to": "executorid", + "aliases": { + "tasks": "b", + "persons": "a" + } + } + ], + "body": { + "year": 1990, + "firstname": "C", + "address": "A Street", + "age": 34, + "type": "designer", + "city": "London", + "lastname": "B" + }, + "extra": { + "idFilter": { + "equal": { + "personid": 5 + } + } + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons", + "primary": [ + "personid" + ], + "name": "a", + "schema": { + "year": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "year", + "constraints": { + "presence": false + } + }, + "firstname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "firstname", + "constraints": { + "presence": false + } + }, + "personid": { + "type": "number", + "externalType": "integer", + "autocolumn": true, + "name": "personid", + "constraints": { + "presence": false + } + }, + "address": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "address", + "constraints": { + "presence": false + } + }, + "age": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "age", + "constraints": { + "presence": false + } + }, + "type": { + "type": "options", + "externalType": "USER-DEFINED", + "autocolumn": false, + "name": "type", + "constraints": { + "presence": false, + "inclusion": [ + "support", + "designer", + "programmer", + "qa" + ] + } + }, + "city": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "city", + "constraints": { + "presence": false + } + }, + "lastname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "lastname", + "constraints": { + "presence": false + } + }, + "QA": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "QA", + "relationshipType": "many-to-one", + "fieldName": "qaid", + "type": "link", + "main": true, + "_id": "ccb68481c80c34217a4540a2c6c27fe46", + "foreignKey": "personid" + }, + "executor": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "executor", + "relationshipType": "many-to-one", + "fieldName": "executorid", + "type": "link", + "main": true, + "_id": "c89530b9770d94bec851e062b5cff3001", + "foreignKey": "personid", + "tableName": "persons" + } + }, + "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "sourceType": "external", + "primaryDisplay": "firstname", + "views": {} + } + }, + "tableAliases": { + "persons": "a", + "tasks": "b" + } +} \ No newline at end of file From 9a8c31a2a42bc616d096b2d76e0e015c3ac18983 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 5 Feb 2024 18:57:16 +0000 Subject: [PATCH 15/54] Handling deletion of rows that violate constraints, this has been an issue in Budibase for some time and causes some confusion, attempting to resolve this when deleting rows. --- .../api/controllers/row/ExternalRequest.ts | 91 ++++++++++++++++--- 1 file changed, 76 insertions(+), 15 deletions(-) diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index 2f3c1ad557..4f755845dc 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -7,6 +7,7 @@ import { FilterType, IncludeRelationship, ManyToManyRelationshipFieldMetadata, + ManyToOneRelationshipFieldMetadata, OneToManyRelationshipFieldMetadata, Operation, PaginationJson, @@ -102,6 +103,26 @@ function buildFilters( } } +function removeRelationships( + rowId: string, + table: Table, + isManyToMany: boolean, + colName?: string +) { + const tableId = table._id! + const filters = buildFilters(rowId, {}, table) + // safety check, if there are no filters on deletion bad things happen + if (Object.keys(filters).length !== 0) { + const op = isManyToMany ? Operation.DELETE : Operation.UPDATE + const body = colName && !isManyToMany ? { [colName]: null } : undefined + return getDatasourceAndQuery({ + endpoint: getEndpoint(tableId, op), + body, + filters, + }) + } +} + /** * This function checks the incoming parameters to make sure all the inputs are * valid based on on the table schema. The main thing this is looking for is when a @@ -305,6 +326,18 @@ export class ExternalRequest { } } + async getRow(table: Table, rowId: string): Promise { + const response = await getDatasourceAndQuery({ + endpoint: getEndpoint(table._id!, Operation.READ), + filters: buildFilters(rowId, {}, table), + }) + if (response.length > 0) { + return response[0] + } else { + throw new Error(`Cannot fetch row by ID "${rowId}"`) + } + } + inputProcessing(row: Row | undefined, table: Table) { if (!row) { return { row, manyRelationships: [] } @@ -572,7 +605,9 @@ export class ExternalRequest { * information. */ async lookupRelations(tableId: string, row: Row) { - const related: { [key: string]: any } = {} + const related: { + [key: string]: { rows: Row[]; isMany: boolean; tableId: string } + } = {} const { tableName } = breakExternalTableId(tableId) if (!tableName) { return related @@ -591,7 +626,7 @@ export class ExternalRequest { continue } const isMany = field.relationshipType === RelationshipType.MANY_TO_MANY - const tableId = isMany ? field.through : field.tableId + const tableId = isMany ? field.through! : field.tableId! const { tableName: relatedTableName } = breakExternalTableId(tableId) // @ts-ignore const linkPrimaryKey = this.tables[relatedTableName].primary[0] @@ -610,7 +645,7 @@ export class ExternalRequest { }, }) // this is the response from knex if no rows found - const rows = !response[0].read ? response : [] + const rows: Row[] = !response[0].read ? response : [] const storeTo = isMany ? field.throughFrom || linkPrimaryKey : fieldName related[storeTo] = { rows, isMany, tableId } } @@ -698,24 +733,46 @@ export class ExternalRequest { continue } for (let row of rows) { - const filters = buildFilters(generateIdForRow(row, table), {}, table) - // safety check, if there are no filters on deletion bad things happen - if (Object.keys(filters).length !== 0) { - const op = isMany ? Operation.DELETE : Operation.UPDATE - const body = isMany ? undefined : { [colName]: null } - promises.push( - getDatasourceAndQuery({ - endpoint: getEndpoint(tableId, op), - body, - filters, - }) - ) + const promise = removeRelationships( + generateIdForRow(row, table), + table, + isMany, + colName + ) + if (promise) { + promises.push(promise) } } } await Promise.all(promises) } + async removeRelationshipsToRow(table: Table, rowId: string) { + const row = await this.getRow(table, rowId) + const related = await this.lookupRelations(table._id!, row) + for (let column of Object.values(table.schema)) { + if ( + column.type !== FieldType.LINK || + column.relationshipType === RelationshipType.ONE_TO_MANY + ) { + continue + } + const relationshipColumn = column as ManyToOneRelationshipFieldMetadata + const { rows, isMany, tableId } = related[relationshipColumn.fieldName] + const table = this.getTable(tableId)! + await Promise.all( + rows.map(row => + removeRelationships( + generateIdForRow(row, table), + table, + isMany, + relationshipColumn.fieldName + ) + ) + ) + } + } + /** * This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which * you have column overlap in relationships, e.g. we join a few different tables and they all have the @@ -828,6 +885,10 @@ export class ExternalRequest { } const aliasing = new AliasTables(Object.keys(this.tables)) + // remove any relationships that could block deletion + if (operation === Operation.DELETE && id) { + await this.removeRelationshipsToRow(table, generateRowIdField(id)) + } const response = await aliasing.queryWithAliasing(json) // handle many-to-many relationships now if we know the ID (could be auto increment) if (operation !== Operation.READ) { From 5f76f143bf1d8a561821317b3145256ac0a51fd3 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Fri, 23 Feb 2024 17:31:45 +0000 Subject: [PATCH 16/54] Adding the ability to disable SQL aliasing if necessary. --- .../src/api/controllers/row/ExternalRequest.ts | 13 +++++++++++-- packages/server/src/environment.ts | 6 ++++-- packages/server/src/integrations/base/sql.ts | 2 +- 3 files changed, 16 insertions(+), 5 deletions(-) diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index 4f755845dc..0070e0bf24 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -35,6 +35,7 @@ import { processDates, processFormulas } from "../../../utilities/rowProcessor" import { db as dbCore } from "@budibase/backend-core" import AliasTables from "./alias" import sdk from "../../../sdk" +import env from "../../../environment" export interface ManyRelationship { tableId?: string @@ -884,12 +885,20 @@ export class ExternalRequest { }, } - const aliasing = new AliasTables(Object.keys(this.tables)) // remove any relationships that could block deletion if (operation === Operation.DELETE && id) { await this.removeRelationshipsToRow(table, generateRowIdField(id)) } - const response = await aliasing.queryWithAliasing(json) + + // aliasing can be disabled fully if desired + let response + if (!env.SQL_ALIASING_DISABLE) { + const aliasing = new AliasTables(Object.keys(this.tables)) + response = await aliasing.queryWithAliasing(json) + } else { + response = await getDatasourceAndQuery(json) + } + // handle many-to-many relationships now if we know the ID (could be auto increment) if (operation !== Operation.READ) { await this.handleManyRelationships( diff --git a/packages/server/src/environment.ts b/packages/server/src/environment.ts index 1d07eff1ce..a7c6df29ea 100644 --- a/packages/server/src/environment.ts +++ b/packages/server/src/environment.ts @@ -76,13 +76,16 @@ const environment = { DEFAULTS.AUTOMATION_THREAD_TIMEOUT > QUERY_THREAD_TIMEOUT ? DEFAULTS.AUTOMATION_THREAD_TIMEOUT : QUERY_THREAD_TIMEOUT, - SQL_MAX_ROWS: process.env.SQL_MAX_ROWS, BB_ADMIN_USER_EMAIL: process.env.BB_ADMIN_USER_EMAIL, BB_ADMIN_USER_PASSWORD: process.env.BB_ADMIN_USER_PASSWORD, PLUGINS_DIR: process.env.PLUGINS_DIR || DEFAULTS.PLUGINS_DIR, OPENAI_API_KEY: process.env.OPENAI_API_KEY, MAX_IMPORT_SIZE_MB: process.env.MAX_IMPORT_SIZE_MB, SESSION_EXPIRY_SECONDS: process.env.SESSION_EXPIRY_SECONDS, + // SQL + SQL_MAX_ROWS: process.env.SQL_MAX_ROWS, + SQL_LOGGING_ENABLE: process.env.SQL_LOGGING_ENABLE, + SQL_ALIASING_DISABLE: process.env.SQL_ALIASING_DISABLE, // flags ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS, DISABLE_THREADING: process.env.DISABLE_THREADING, @@ -90,7 +93,6 @@ const environment = { DISABLE_RATE_LIMITING: process.env.DISABLE_RATE_LIMITING, MULTI_TENANCY: process.env.MULTI_TENANCY, ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS, - ENABLE_SQL_LOGGING: process.env.ENABLE_SQL_LOGGING, SELF_HOSTED: process.env.SELF_HOSTED, HTTP_MB_LIMIT: process.env.HTTP_MB_LIMIT, FORKED_PROCESS_NAME: diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index cc2e1d94a8..172a10ea7f 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -688,7 +688,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { } log(query: string, values?: any[]) { - if (!environment.ENABLE_SQL_LOGGING) { + if (!environment.SQL_LOGGING_ENABLE) { return } const sqlClient = this.getSqlClient() From 45d2e6790539bd83bbb40da37e8709e2891ad76e Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 26 Feb 2024 13:50:15 +0000 Subject: [PATCH 17/54] Adding some new test cases based on finishing off testing. --- .../src/integrations/tests/sqlAlias.spec.ts | 45 +++- .../sqlQueryJson/enrichRelationship.json | 123 +++++++++++ .../tests/sqlQueryJson/fetchManyToMany.json | 109 ++++++++++ .../sqlQueryJson/manyRelationshipFilters.json | 202 ++++++++++++++++++ 4 files changed, 477 insertions(+), 2 deletions(-) create mode 100644 packages/server/src/integrations/tests/sqlQueryJson/enrichRelationship.json create mode 100644 packages/server/src/integrations/tests/sqlQueryJson/fetchManyToMany.json create mode 100644 packages/server/src/integrations/tests/sqlQueryJson/manyRelationshipFilters.json diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index c91d988849..c7c544be3c 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -5,6 +5,7 @@ import { SqlClient } from "../utils" describe("Captures of real examples", () => { const limit = 5000 + const relationshipLimit = 100 function getJson(name: string): QueryJson { return require(join(__dirname, "sqlQueryJson", name)) as QueryJson @@ -26,7 +27,7 @@ describe("Captures of real examples", () => { const queryJson = getJson("basicFetchWithRelationships.json") let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ - bindings: [100, limit], + bindings: [relationshipLimit, limit], sql: `select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid", "a"."address" as "a.address", "a"."age" as "a.age", "a"."type" as "a.type", "a"."city" as "a.city", "a"."lastname" as "a.lastname", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" from (select * from "persons" as "a" order by "a"."firstname" asc limit $1) as "a" left join "tasks" as "b" on "a"."personid" = "b"."qaid" or "a"."personid" = "b"."executorid" order by "a"."firstname" asc limit $2`, }) }) @@ -35,10 +36,50 @@ describe("Captures of real examples", () => { const queryJson = getJson("filterByRelationship.json") let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ - bindings: [100, "assembling", limit], + bindings: [relationshipLimit, "assembling", limit], sql: `select "a"."productname" as "a.productname", "a"."productid" as "a.productid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" left join "products_tasks" as "c" on "a"."productid" = "c"."productid" left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where "b"."taskname" = $2 order by "a"."productname" asc limit $3`, }) }) + + it("should handle fetching many to many relationships", () => { + const queryJson = getJson("fetchManyToMany.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: [relationshipLimit, limit], + sql: `select "a"."productname" as "a.productname", "a"."productid" as "a.productid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" left join "products_tasks" as "c" on "a"."productid" = "c"."productid" left join "tasks" as "b" on "b"."taskid" = "c"."taskid" order by "a"."productname" asc limit $2`, + }) + }) + + it("should handle enrichment of rows", () => { + const queryJson = getJson("enrichRelationship.json") + const filters = queryJson.filters?.oneOf?.taskid as number[] + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: [...filters, limit, limit], + sql: `select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", "a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "b"."productname" as "b.productname", "b"."productid" as "b.productid" from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) limit $3) as "a" left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid" left join "products" as "b" on "b"."productid" = "c"."productid" limit $4`, + }) + }) + + it("should manage query with many relationship filters", () => { + const queryJson = getJson("manyRelationshipFilters.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + const filters = queryJson.filters + const notEqualsValue = Object.values(filters?.notEqual!)[0] + const rangeValue = Object.values(filters?.range!)[0] + const equalValue = Object.values(filters?.equal!)[0] + + expect(query).toEqual({ + bindings: [ + notEqualsValue, + relationshipLimit, + rangeValue.low, + rangeValue.high, + equalValue, + limit, + ], + sql: `select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", "a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "b"."productname" as "b.productname", "b"."productid" as "b.productid", "c"."year" as "c.year", "c"."firstname" as "c.firstname", "c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", "c"."city" as "c.city", "c"."lastname" as "c.lastname", "c"."year" as "c.year", "c"."firstname" as "c.firstname", "c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", "c"."city" as "c.city", "c"."lastname" as "c.lastname" from (select * from "tasks" as "a" where not "a"."completed" = $1 order by "a"."taskname" asc limit $2) as "a" left join "products_tasks" as "d" on "a"."taskid" = "d"."taskid" left join "products" as "b" on "b"."productid" = "d"."productid" left join "persons" as "c" on "a"."executorid" = "c"."personid" or "a"."qaid" = "c"."personid" where "c"."year" between $3 and $4 and "b"."productname" = $5 order by "a"."taskname" asc limit $6`, + }) + }) }) describe("update", () => { diff --git a/packages/server/src/integrations/tests/sqlQueryJson/enrichRelationship.json b/packages/server/src/integrations/tests/sqlQueryJson/enrichRelationship.json new file mode 100644 index 0000000000..ee658aed18 --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/enrichRelationship.json @@ -0,0 +1,123 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", + "entityId": "tasks", + "operation": "READ" + }, + "resource": { + "fields": [ + "a.executorid", + "a.taskname", + "a.taskid", + "a.completed", + "a.qaid", + "b.productname", + "b.productid" + ] + }, + "filters": { + "oneOf": { + "taskid": [ + 1, + 2 + ] + } + }, + "relationships": [ + { + "tableName": "products", + "column": "products", + "through": "products_tasks", + "from": "taskid", + "to": "productid", + "fromPrimary": "taskid", + "toPrimary": "productid", + "aliases": { + "products_tasks": "c", + "products": "b", + "tasks": "a" + } + } + ], + "extra": { + "idFilter": {} + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks", + "primary": [ + "taskid" + ], + "name": "a", + "schema": { + "executorid": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "executorid", + "constraints": { + "presence": false + } + }, + "taskname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "taskname", + "constraints": { + "presence": false + } + }, + "taskid": { + "type": "number", + "externalType": "integer", + "autocolumn": true, + "name": "taskid", + "constraints": { + "presence": false + } + }, + "completed": { + "type": "boolean", + "externalType": "boolean", + "autocolumn": false, + "name": "completed", + "constraints": { + "presence": false + } + }, + "qaid": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "qaid", + "constraints": { + "presence": false + } + }, + "products": { + "tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products", + "name": "products", + "relationshipType": "many-to-many", + "through": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products_tasks", + "type": "link", + "_id": "c3b91d00cd36c4cc1a347794725b9adbd", + "fieldName": "productid", + "throughFrom": "productid", + "throughTo": "taskid" + } + }, + "sourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", + "sourceType": "external", + "primaryDisplay": "taskname", + "sql": true, + "views": {} + } + }, + "tableAliases": { + "tasks": "a", + "products": "b", + "products_tasks": "c" + } +} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/fetchManyToMany.json b/packages/server/src/integrations/tests/sqlQueryJson/fetchManyToMany.json new file mode 100644 index 0000000000..682ebaab2d --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/fetchManyToMany.json @@ -0,0 +1,109 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", + "entityId": "products", + "operation": "READ" + }, + "resource": { + "fields": [ + "a.productname", + "a.productid", + "b.executorid", + "b.taskname", + "b.taskid", + "b.completed", + "b.qaid" + ] + }, + "filters": { + "string": {}, + "fuzzy": {}, + "range": {}, + "equal": {}, + "notEqual": {}, + "empty": {}, + "notEmpty": {}, + "contains": {}, + "notContains": {}, + "oneOf": {}, + "containsAny": {} + }, + "sort": { + "productname": { + "direction": "ASCENDING" + } + }, + "paginate": { + "limit": 100, + "page": 1 + }, + "relationships": [ + { + "tableName": "tasks", + "column": "tasks", + "through": "products_tasks", + "from": "productid", + "to": "taskid", + "fromPrimary": "productid", + "toPrimary": "taskid", + "aliases": { + "products_tasks": "c", + "tasks": "b", + "products": "a" + } + } + ], + "extra": { + "idFilter": {} + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products", + "primary": [ + "productid" + ], + "name": "a", + "schema": { + "productname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "productname", + "constraints": { + "presence": false + } + }, + "productid": { + "type": "number", + "externalType": "integer", + "autocolumn": true, + "name": "productid", + "constraints": { + "presence": false + } + }, + "tasks": { + "tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks", + "name": "tasks", + "relationshipType": "many-to-many", + "fieldName": "taskid", + "through": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products_tasks", + "throughFrom": "taskid", + "throughTo": "productid", + "type": "link", + "main": true, + "_id": "c3b91d00cd36c4cc1a347794725b9adbd" + } + }, + "sourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", + "sourceType": "external", + "primaryDisplay": "productname" + } + }, + "tableAliases": { + "products": "a", + "tasks": "b", + "products_tasks": "c" + } +} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/manyRelationshipFilters.json b/packages/server/src/integrations/tests/sqlQueryJson/manyRelationshipFilters.json new file mode 100644 index 0000000000..afa0889450 --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/manyRelationshipFilters.json @@ -0,0 +1,202 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", + "entityId": "tasks", + "operation": "READ" + }, + "resource": { + "fields": [ + "a.executorid", + "a.taskname", + "a.taskid", + "a.completed", + "a.qaid", + "b.productname", + "b.productid", + "c.year", + "c.firstname", + "c.personid", + "c.address", + "c.age", + "c.type", + "c.city", + "c.lastname", + "c.year", + "c.firstname", + "c.personid", + "c.address", + "c.age", + "c.type", + "c.city", + "c.lastname" + ] + }, + "filters": { + "string": {}, + "fuzzy": {}, + "range": { + "1:persons.year": { + "low": 1990, + "high": 2147483647 + } + }, + "equal": { + "2:products.productname": "Computers" + }, + "notEqual": { + "3:completed": true + }, + "empty": {}, + "notEmpty": {}, + "contains": {}, + "notContains": {}, + "oneOf": {}, + "containsAny": {}, + "onEmptyFilter": "all" + }, + "sort": { + "taskname": { + "direction": "ASCENDING" + } + }, + "paginate": { + "limit": 100, + "page": 1 + }, + "relationships": [ + { + "tableName": "products", + "column": "products", + "through": "products_tasks", + "from": "taskid", + "to": "productid", + "fromPrimary": "taskid", + "toPrimary": "productid", + "aliases": { + "products_tasks": "d", + "products": "b", + "tasks": "a" + } + }, + { + "tableName": "persons", + "column": "tasksToExecute", + "from": "executorid", + "to": "personid", + "aliases": { + "persons": "c", + "tasks": "a" + } + }, + { + "tableName": "persons", + "column": "tasksToQA", + "from": "qaid", + "to": "personid", + "aliases": { + "persons": "c", + "tasks": "a" + } + } + ], + "extra": { + "idFilter": {} + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks", + "primary": [ + "taskid" + ], + "name": "a", + "schema": { + "executorid": { + "type": "number", + "externalType": "integer", + "name": "executorid", + "constraints": { + "presence": false + }, + "autocolumn": true, + "autoReason": "foreign_key" + }, + "taskname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "taskname", + "constraints": { + "presence": false + } + }, + "taskid": { + "type": "number", + "externalType": "integer", + "autocolumn": true, + "name": "taskid", + "constraints": { + "presence": false + } + }, + "completed": { + "type": "boolean", + "externalType": "boolean", + "autocolumn": false, + "name": "completed", + "constraints": { + "presence": false + } + }, + "qaid": { + "type": "number", + "externalType": "integer", + "name": "qaid", + "constraints": { + "presence": false + } + }, + "products": { + "tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products", + "name": "products", + "relationshipType": "many-to-many", + "through": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products_tasks", + "type": "link", + "_id": "c3b91d00cd36c4cc1a347794725b9adbd", + "fieldName": "productid", + "throughFrom": "productid", + "throughTo": "taskid" + }, + "tasksToExecute": { + "tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__persons", + "name": "tasksToExecute", + "relationshipType": "one-to-many", + "type": "link", + "_id": "c0f440590bda04f28846242156c1dd60b", + "foreignKey": "executorid", + "fieldName": "personid" + }, + "tasksToQA": { + "tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__persons", + "name": "tasksToQA", + "relationshipType": "one-to-many", + "type": "link", + "_id": "c5fdf453a0ba743d58e29491d174c974b", + "foreignKey": "qaid", + "fieldName": "personid" + } + }, + "sourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", + "sourceType": "external", + "primaryDisplay": "taskname", + "sql": true, + "views": {} + } + }, + "tableAliases": { + "tasks": "a", + "products": "b", + "persons": "c", + "products_tasks": "d" + } +} \ No newline at end of file From cb19e1f24c0ab72989cf7058525492f1fe2a84c1 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 26 Feb 2024 17:56:28 +0000 Subject: [PATCH 18/54] Fixing response types of DS+ query function. --- .../api/controllers/row/ExternalRequest.ts | 9 ++++---- .../server/src/api/controllers/row/alias.ts | 21 +++++++++++++------ .../server/src/integrations/base/query.ts | 8 +++++-- .../server/src/integrations/googlesheets.ts | 7 ++++--- .../src/integrations/microsoftSqlServer.ts | 3 ++- packages/server/src/integrations/mysql.ts | 4 ++-- packages/server/src/integrations/oracle.ts | 8 ++++--- packages/server/src/integrations/postgres.ts | 3 ++- packages/server/src/sdk/app/rows/utils.ts | 13 ++++++++++-- packages/types/src/sdk/datasources.ts | 16 +++++++++++++- 10 files changed, 67 insertions(+), 25 deletions(-) diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index 0070e0bf24..0dd17a86e6 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -332,7 +332,7 @@ export class ExternalRequest { endpoint: getEndpoint(table._id!, Operation.READ), filters: buildFilters(rowId, {}, table), }) - if (response.length > 0) { + if (Array.isArray(response)) { return response[0] } else { throw new Error(`Cannot fetch row by ID "${rowId}"`) @@ -646,7 +646,7 @@ export class ExternalRequest { }, }) // this is the response from knex if no rows found - const rows: Row[] = !response[0].read ? response : [] + const rows: Row[] = response?.[0].read ? [] : (response as Row[]) const storeTo = isMany ? field.throughFrom || linkPrimaryKey : fieldName related[storeTo] = { rows, isMany, tableId } } @@ -899,15 +899,16 @@ export class ExternalRequest { response = await getDatasourceAndQuery(json) } + const responseRows = Array.isArray(response) ? response : [] // handle many-to-many relationships now if we know the ID (could be auto increment) if (operation !== Operation.READ) { await this.handleManyRelationships( table._id || "", - response[0], + responseRows[0], processed.manyRelationships ) } - const output = this.outputProcessing(response, table, relationships) + const output = this.outputProcessing(responseRows, table, relationships) // if reading it'll just be an array of rows, return whole thing if (operation === Operation.READ) { return ( diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index 6533e51728..ca144ee518 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -1,4 +1,10 @@ -import { QueryJson, SearchFilters, Table, Row } from "@budibase/types" +import { + QueryJson, + SearchFilters, + Table, + Row, + DatasourcePlusQueryResponse, +} from "@budibase/types" import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils" import { cloneDeep } from "lodash" @@ -68,9 +74,8 @@ export default class AliasTables { return map } - async queryWithAliasing(json: QueryJson) { + async queryWithAliasing(json: QueryJson): DatasourcePlusQueryResponse { json = cloneDeep(json) - const aliasField = (field: string) => this.aliasField(field) const aliasTable = (table: Table) => ({ ...table, name: this.getAlias(table.name), @@ -78,7 +83,7 @@ export default class AliasTables { // run through the query json to update anywhere a table may be used if (json.resource?.fields) { json.resource.fields = json.resource.fields.map(field => - aliasField(field) + this.aliasField(field) ) } if (json.filters) { @@ -88,7 +93,7 @@ export default class AliasTables { } const aliasedFilters: typeof filter = {} for (let key of Object.keys(filter)) { - aliasedFilters[aliasField(key)] = filter[key] + aliasedFilters[this.aliasField(key)] = filter[key] } json.filters[filterKey as keyof SearchFilters] = aliasedFilters } @@ -120,6 +125,10 @@ export default class AliasTables { } json.tableAliases = invertedTableAliases const response = await getDatasourceAndQuery(json) - return this.reverse(response) + if (Array.isArray(response)) { + return this.reverse(response) + } else { + return response + } } } diff --git a/packages/server/src/integrations/base/query.ts b/packages/server/src/integrations/base/query.ts index 4f31e37744..b906ecbb1b 100644 --- a/packages/server/src/integrations/base/query.ts +++ b/packages/server/src/integrations/base/query.ts @@ -1,11 +1,15 @@ -import { QueryJson, Datasource } from "@budibase/types" +import { + QueryJson, + Datasource, + DatasourcePlusQueryResponse, +} from "@budibase/types" import { getIntegration } from "../index" import sdk from "../../sdk" export async function makeExternalQuery( datasource: Datasource, json: QueryJson -) { +): DatasourcePlusQueryResponse { datasource = await sdk.datasources.enrich(datasource) const Integration = await getIntegration(datasource.source) // query is the opinionated function diff --git a/packages/server/src/integrations/googlesheets.ts b/packages/server/src/integrations/googlesheets.ts index 58c867ea0b..32398bde41 100644 --- a/packages/server/src/integrations/googlesheets.ts +++ b/packages/server/src/integrations/googlesheets.ts @@ -16,6 +16,7 @@ import { Table, TableRequest, TableSourceType, + DatasourcePlusQueryResponse, } from "@budibase/types" import { OAuth2Client } from "google-auth-library" import { @@ -334,7 +335,7 @@ class GoogleSheetsIntegration implements DatasourcePlus { return { tables: externalTables, errors } } - async query(json: QueryJson) { + async query(json: QueryJson): DatasourcePlusQueryResponse { const sheet = json.endpoint.entityId switch (json.endpoint.operation) { case Operation.CREATE: @@ -384,7 +385,7 @@ class GoogleSheetsIntegration implements DatasourcePlus { } try { await this.connect() - return await this.client.addSheet({ title: name, headerValues: [name] }) + await this.client.addSheet({ title: name, headerValues: [name] }) } catch (err) { console.error("Error creating new table in google sheets", err) throw err @@ -450,7 +451,7 @@ class GoogleSheetsIntegration implements DatasourcePlus { try { await this.connect() const sheetToDelete = this.client.sheetsByTitle[sheet] - return await sheetToDelete.delete() + await sheetToDelete.delete() } catch (err) { console.error("Error deleting table in google sheets", err) throw err diff --git a/packages/server/src/integrations/microsoftSqlServer.ts b/packages/server/src/integrations/microsoftSqlServer.ts index e063933503..f87e248ac0 100644 --- a/packages/server/src/integrations/microsoftSqlServer.ts +++ b/packages/server/src/integrations/microsoftSqlServer.ts @@ -13,6 +13,7 @@ import { SourceName, Schema, TableSourceType, + DatasourcePlusQueryResponse, } from "@budibase/types" import { getSqlQuery, @@ -493,7 +494,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus { return response.recordset || [{ deleted: true }] } - async query(json: QueryJson) { + async query(json: QueryJson): DatasourcePlusQueryResponse { const schema = this.config.schema await this.connect() if (schema && schema !== DEFAULT_SCHEMA && json?.endpoint) { diff --git a/packages/server/src/integrations/mysql.ts b/packages/server/src/integrations/mysql.ts index 6eebda8df5..f629381807 100644 --- a/packages/server/src/integrations/mysql.ts +++ b/packages/server/src/integrations/mysql.ts @@ -12,7 +12,7 @@ import { SourceName, Schema, TableSourceType, - FieldType, + DatasourcePlusQueryResponse, } from "@budibase/types" import { getSqlQuery, @@ -381,7 +381,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus { return results.length ? results : [{ deleted: true }] } - async query(json: QueryJson) { + async query(json: QueryJson): DatasourcePlusQueryResponse { await this.connect() try { const queryFn = (query: any) => diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index cdf37a9c83..86c4bf519d 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -12,6 +12,8 @@ import { ConnectionInfo, Schema, TableSourceType, + Row, + DatasourcePlusQueryResponse, } from "@budibase/types" import { buildExternalTableId, @@ -420,7 +422,7 @@ class OracleIntegration extends Sql implements DatasourcePlus { : [{ deleted: true }] } - async query(json: QueryJson) { + async query(json: QueryJson): DatasourcePlusQueryResponse { const operation = this._operation(json) const input = this._query(json, { disableReturning: true }) if (Array.isArray(input)) { @@ -444,7 +446,7 @@ class OracleIntegration extends Sql implements DatasourcePlus { if (deletedRows?.rows?.length) { return deletedRows.rows } else if (response.rows?.length) { - return response.rows + return response.rows as Row[] } else { // get the last row that was updated if ( @@ -455,7 +457,7 @@ class OracleIntegration extends Sql implements DatasourcePlus { const lastRow = await this.internalQuery({ sql: `SELECT * FROM \"${json.endpoint.entityId}\" WHERE ROWID = '${response.lastRowid}'`, }) - return lastRow.rows + return lastRow.rows as Row[] } else { return [{ [operation.toLowerCase()]: true }] } diff --git a/packages/server/src/integrations/postgres.ts b/packages/server/src/integrations/postgres.ts index 7fb75f5d9f..6e87f296e1 100644 --- a/packages/server/src/integrations/postgres.ts +++ b/packages/server/src/integrations/postgres.ts @@ -12,6 +12,7 @@ import { SourceName, Schema, TableSourceType, + DatasourcePlusQueryResponse, } from "@budibase/types" import { getSqlQuery, @@ -419,7 +420,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus { return response.rows.length ? response.rows : [{ deleted: true }] } - async query(json: QueryJson) { + async query(json: QueryJson): DatasourcePlusQueryResponse { const operation = this._operation(json).toLowerCase() const input = this._query(json) if (Array.isArray(input)) { diff --git a/packages/server/src/sdk/app/rows/utils.ts b/packages/server/src/sdk/app/rows/utils.ts index 2320820b3e..75f980d9f9 100644 --- a/packages/server/src/sdk/app/rows/utils.ts +++ b/packages/server/src/sdk/app/rows/utils.ts @@ -1,12 +1,21 @@ import cloneDeep from "lodash/cloneDeep" import validateJs from "validate.js" -import { FieldType, QueryJson, Row, Table, TableSchema } from "@budibase/types" +import { + FieldType, + QueryJson, + Row, + Table, + TableSchema, + DatasourcePlusQueryResponse, +} from "@budibase/types" import { makeExternalQuery } from "../../../integrations/base/query" import { Format } from "../../../api/controllers/view/exporters" import sdk from "../.." import { isRelationshipColumn } from "../../../db/utils" -export async function getDatasourceAndQuery(json: QueryJson) { +export async function getDatasourceAndQuery( + json: QueryJson +): DatasourcePlusQueryResponse { const datasourceId = json.endpoint.datasourceId const datasource = await sdk.datasources.get(datasourceId) return makeExternalQuery(datasource, json) diff --git a/packages/types/src/sdk/datasources.ts b/packages/types/src/sdk/datasources.ts index 7a335eb3b9..4cddb0c09e 100644 --- a/packages/types/src/sdk/datasources.ts +++ b/packages/types/src/sdk/datasources.ts @@ -1,4 +1,5 @@ -import { Table } from "../documents" +import { Table, Row } from "../documents" +import { QueryJson } from "./search" export const PASSWORD_REPLACEMENT = "--secret-value--" @@ -180,11 +181,24 @@ export interface Schema { errors: Record } +// return these when an operation occurred but we got no response +enum DSPlusOperation { + CREATE = "create", + READ = "read", + UPDATE = "update", + DELETE = "delete", +} + +export type DatasourcePlusQueryResponse = Promise< + Row[] | Record[] | void +> + export interface DatasourcePlus extends IntegrationBase { // if the datasource supports the use of bindings directly (to protect against SQL injection) // this returns the format of the identifier getBindingIdentifier(): string getStringConcat(parts: string[]): string + query(json: QueryJson): DatasourcePlusQueryResponse buildSchema( datasourceId: string, entities: Record From 1bb375a500557c73de17f8d457abf0b692dc2444 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 26 Feb 2024 18:16:42 +0000 Subject: [PATCH 19/54] Further typing. --- .../scripts/integrations/postgres/reset.sh | 3 +- .../api/controllers/row/ExternalRequest.ts | 69 +++++++++++-------- 2 files changed, 41 insertions(+), 31 deletions(-) diff --git a/packages/server/scripts/integrations/postgres/reset.sh b/packages/server/scripts/integrations/postgres/reset.sh index 29a5db0181..8deb01cdf8 100755 --- a/packages/server/scripts/integrations/postgres/reset.sh +++ b/packages/server/scripts/integrations/postgres/reset.sh @@ -1,4 +1,3 @@ #!/bin/bash -docker-compose down +docker-compose down -v docker volume prune -f -docker volume rm postgres_pg_data diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index 0dd17a86e6..dceadb3cd4 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -19,6 +19,7 @@ import { SortJson, SortType, Table, + isManyToOne, } from "@budibase/types" import { breakExternalTableId, @@ -104,23 +105,36 @@ function buildFilters( } } -function removeRelationships( +async function removeManyToManyRelationships( rowId: string, table: Table, - isManyToMany: boolean, - colName?: string + colName: string ) { const tableId = table._id! const filters = buildFilters(rowId, {}, table) // safety check, if there are no filters on deletion bad things happen if (Object.keys(filters).length !== 0) { - const op = isManyToMany ? Operation.DELETE : Operation.UPDATE - const body = colName && !isManyToMany ? { [colName]: null } : undefined return getDatasourceAndQuery({ - endpoint: getEndpoint(tableId, op), - body, + endpoint: getEndpoint(tableId, Operation.DELETE), + body: { [colName]: null }, filters, }) + } else { + return [] + } +} + +async function removeOneToManyRelationships(rowId: string, table: Table) { + const tableId = table._id! + const filters = buildFilters(rowId, {}, table) + // safety check, if there are no filters on deletion bad things happen + if (Object.keys(filters).length !== 0) { + return getDatasourceAndQuery({ + endpoint: getEndpoint(tableId, Operation.UPDATE), + filters, + }) + } else { + return [] } } @@ -734,12 +748,10 @@ export class ExternalRequest { continue } for (let row of rows) { - const promise = removeRelationships( - generateIdForRow(row, table), - table, - isMany, - colName - ) + const rowId = generateIdForRow(row, table) + const promise: Promise = isMany + ? removeManyToManyRelationships(rowId, table, colName) + : removeOneToManyRelationships(rowId, table) if (promise) { promises.push(promise) } @@ -752,24 +764,23 @@ export class ExternalRequest { const row = await this.getRow(table, rowId) const related = await this.lookupRelations(table._id!, row) for (let column of Object.values(table.schema)) { - if ( - column.type !== FieldType.LINK || - column.relationshipType === RelationshipType.ONE_TO_MANY - ) { + const relationshipColumn = column as RelationshipFieldMetadata + if (!isManyToOne(relationshipColumn)) { continue } - const relationshipColumn = column as ManyToOneRelationshipFieldMetadata const { rows, isMany, tableId } = related[relationshipColumn.fieldName] const table = this.getTable(tableId)! await Promise.all( - rows.map(row => - removeRelationships( - generateIdForRow(row, table), - table, - isMany, - relationshipColumn.fieldName - ) - ) + rows.map(row => { + const rowId = generateIdForRow(row, table) + return isMany + ? removeManyToManyRelationships( + rowId, + table, + relationshipColumn.fieldName + ) + : removeOneToManyRelationships(rowId, table) + }) ) } } @@ -892,11 +903,11 @@ export class ExternalRequest { // aliasing can be disabled fully if desired let response - if (!env.SQL_ALIASING_DISABLE) { + if (env.SQL_ALIASING_DISABLE) { + response = await getDatasourceAndQuery(json) + } else { const aliasing = new AliasTables(Object.keys(this.tables)) response = await aliasing.queryWithAliasing(json) - } else { - response = await getDatasourceAndQuery(json) } const responseRows = Array.isArray(response) ? response : [] From 80dc0beeeda53446decf82528d2882e8b141bf4e Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 26 Feb 2024 18:22:19 +0000 Subject: [PATCH 20/54] Multiline SQL tests. --- .../src/integrations/tests/sqlAlias.spec.ts | 63 ++++++++++++++++--- 1 file changed, 54 insertions(+), 9 deletions(-) diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index c7c544be3c..da88127b16 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -3,6 +3,10 @@ import { join } from "path" import Sql from "../base/sql" import { SqlClient } from "../utils" +function multiline(sql: string) { + return sql.replace(/\n/g, "").replace(/ +/g, " ") +} + describe("Captures of real examples", () => { const limit = 5000 const relationshipLimit = 100 @@ -17,7 +21,8 @@ describe("Captures of real examples", () => { let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ bindings: ["A Street", 34, "London", "A", "B", "designer", 1990], - sql: `insert into "persons" ("address", "age", "city", "firstname", "lastname", "type", "year") values ($1, $2, $3, $4, $5, $6, $7) returning *`, + sql: multiline(`insert into "persons" ("address", "age", "city", "firstname", "lastname", "type", "year") + values ($1, $2, $3, $4, $5, $6, $7) returning *`), }) }) }) @@ -28,7 +33,15 @@ describe("Captures of real examples", () => { let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ bindings: [relationshipLimit, limit], - sql: `select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid", "a"."address" as "a.address", "a"."age" as "a.age", "a"."type" as "a.type", "a"."city" as "a.city", "a"."lastname" as "a.lastname", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" from (select * from "persons" as "a" order by "a"."firstname" asc limit $1) as "a" left join "tasks" as "b" on "a"."personid" = "b"."qaid" or "a"."personid" = "b"."executorid" order by "a"."firstname" asc limit $2`, + sql: multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid", + "a"."address" as "a.address", "a"."age" as "a.age", "a"."type" as "a.type", "a"."city" as "a.city", + "a"."lastname" as "a.lastname", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", + "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid", + "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", + "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" + from (select * from "persons" as "a" order by "a"."firstname" asc limit $1) as "a" + left join "tasks" as "b" on "a"."personid" = "b"."qaid" or "a"."personid" = "b"."executorid" + order by "a"."firstname" asc limit $2`), }) }) @@ -37,7 +50,13 @@ describe("Captures of real examples", () => { let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ bindings: [relationshipLimit, "assembling", limit], - sql: `select "a"."productname" as "a.productname", "a"."productid" as "a.productid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" left join "products_tasks" as "c" on "a"."productid" = "c"."productid" left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where "b"."taskname" = $2 order by "a"."productname" asc limit $3`, + sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid", + "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", + "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" + from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" + left join "products_tasks" as "c" on "a"."productid" = "c"."productid" + left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where "b"."taskname" = $2 + order by "a"."productname" asc limit $3`), }) }) @@ -46,7 +65,13 @@ describe("Captures of real examples", () => { let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ bindings: [relationshipLimit, limit], - sql: `select "a"."productname" as "a.productname", "a"."productid" as "a.productid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" left join "products_tasks" as "c" on "a"."productid" = "c"."productid" left join "tasks" as "b" on "b"."taskid" = "c"."taskid" order by "a"."productname" asc limit $2`, + sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid", + "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", + "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" + from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" + left join "products_tasks" as "c" on "a"."productid" = "c"."productid" + left join "tasks" as "b" on "b"."taskid" = "c"."taskid" + order by "a"."productname" asc limit $2`), }) }) @@ -56,7 +81,12 @@ describe("Captures of real examples", () => { let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ bindings: [...filters, limit, limit], - sql: `select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", "a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "b"."productname" as "b.productname", "b"."productid" as "b.productid" from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) limit $3) as "a" left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid" left join "products" as "b" on "b"."productid" = "c"."productid" limit $4`, + sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", + "a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", + "b"."productname" as "b.productname", "b"."productid" as "b.productid" + from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) limit $3) as "a" + left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid" + left join "products" as "b" on "b"."productid" = "c"."productid" limit $4`), }) }) @@ -77,7 +107,19 @@ describe("Captures of real examples", () => { equalValue, limit, ], - sql: `select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", "a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "b"."productname" as "b.productname", "b"."productid" as "b.productid", "c"."year" as "c.year", "c"."firstname" as "c.firstname", "c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", "c"."city" as "c.city", "c"."lastname" as "c.lastname", "c"."year" as "c.year", "c"."firstname" as "c.firstname", "c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", "c"."city" as "c.city", "c"."lastname" as "c.lastname" from (select * from "tasks" as "a" where not "a"."completed" = $1 order by "a"."taskname" asc limit $2) as "a" left join "products_tasks" as "d" on "a"."taskid" = "d"."taskid" left join "products" as "b" on "b"."productid" = "d"."productid" left join "persons" as "c" on "a"."executorid" = "c"."personid" or "a"."qaid" = "c"."personid" where "c"."year" between $3 and $4 and "b"."productname" = $5 order by "a"."taskname" asc limit $6`, + sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", "a"."taskid" as "a.taskid", + "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "b"."productname" as "b.productname", + "b"."productid" as "b.productid", "c"."year" as "c.year", "c"."firstname" as "c.firstname", + "c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", + "c"."city" as "c.city", "c"."lastname" as "c.lastname", "c"."year" as "c.year", "c"."firstname" as "c.firstname", + "c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", + "c"."city" as "c.city", "c"."lastname" as "c.lastname" + from (select * from "tasks" as "a" where not "a"."completed" = $1 + order by "a"."taskname" asc limit $2) as "a" + left join "products_tasks" as "d" on "a"."taskid" = "d"."taskid" + left join "products" as "b" on "b"."productid" = "d"."productid" + left join "persons" as "c" on "a"."executorid" = "c"."personid" or "a"."qaid" = "c"."personid" + where "c"."year" between $3 and $4 and "b"."productname" = $5 order by "a"."taskname" asc limit $6`), }) }) }) @@ -88,7 +130,8 @@ describe("Captures of real examples", () => { let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ bindings: [1990, "C", "A Street", 34, "designer", "London", "B", 5], - sql: `update "persons" as "a" set "year" = $1, "firstname" = $2, "address" = $3, "age" = $4, "type" = $5, "city" = $6, "lastname" = $7 where "a"."personid" = $8 returning *`, + sql: multiline(`update "persons" as "a" set "year" = $1, "firstname" = $2, "address" = $3, "age" = $4, + "type" = $5, "city" = $6, "lastname" = $7 where "a"."personid" = $8 returning *`), }) }) @@ -97,7 +140,8 @@ describe("Captures of real examples", () => { let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ bindings: [1990, "C", "A Street", 34, "designer", "London", "B", 5], - sql: `update "persons" as "a" set "year" = $1, "firstname" = $2, "address" = $3, "age" = $4, "type" = $5, "city" = $6, "lastname" = $7 where "a"."personid" = $8 returning *`, + sql: multiline(`update "persons" as "a" set "year" = $1, "firstname" = $2, "address" = $3, "age" = $4, + "type" = $5, "city" = $6, "lastname" = $7 where "a"."personid" = $8 returning *`), }) }) }) @@ -108,7 +152,8 @@ describe("Captures of real examples", () => { let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ bindings: ["ddd", ""], - sql: `delete from "compositetable" as "a" where "a"."keypartone" = $1 and "a"."keyparttwo" = $2 returning "a"."keyparttwo" as "a.keyparttwo", "a"."keypartone" as "a.keypartone", "a"."name" as "a.name"`, + sql: multiline(`delete from "compositetable" as "a" where "a"."keypartone" = $1 and "a"."keyparttwo" = $2 + returning "a"."keyparttwo" as "a.keyparttwo", "a"."keypartone" as "a.keypartone", "a"."name" as "a.name"`), }) }) }) From 59ab557a937a2d91ce3375a84aa23e76f7a1ff00 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 26 Feb 2024 18:29:57 +0000 Subject: [PATCH 21/54] Looping characters used. --- packages/server/src/api/controllers/row/alias.ts | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index ca144ee518..05a261af42 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -28,7 +28,13 @@ export default class AliasTables { const char = this.character this.aliases[tableName] = char this.tableAliases[char] = tableName - this.character = String.fromCharCode(char.charCodeAt(0) + 1) + this.character = + char.substring(0, char.length - 1) + + String.fromCharCode(char.charCodeAt(char.length - 1) + 1) + // reached end of characters, extend number of characters used + if (this.character === "z") { + this.character = new Array(this.character.length + 1).fill("a").join("") + } return char } From d68fcbf8f7c7e6b3b1edc16497357b0a624dc2a4 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 26 Feb 2024 18:36:34 +0000 Subject: [PATCH 22/54] Loop aliasing. --- .../server/src/api/controllers/row/alias.ts | 2 +- .../src/integrations/tests/sqlAlias.spec.ts | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index 05a261af42..747166a7ba 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -32,7 +32,7 @@ export default class AliasTables { char.substring(0, char.length - 1) + String.fromCharCode(char.charCodeAt(char.length - 1) + 1) // reached end of characters, extend number of characters used - if (this.character === "z") { + if (this.character.charAt(this.character.length - 1) === "z") { this.character = new Array(this.character.length + 1).fill("a").join("") } return char diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index da88127b16..c35c2b45b6 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -2,6 +2,8 @@ import { QueryJson } from "@budibase/types" import { join } from "path" import Sql from "../base/sql" import { SqlClient } from "../utils" +import AliasTables from "../../api/controllers/row/alias" +import { generator } from "@budibase/backend-core/tests" function multiline(sql: string) { return sql.replace(/\n/g, "").replace(/ +/g, " ") @@ -157,4 +159,19 @@ describe("Captures of real examples", () => { }) }) }) + + describe("check max character aliasing", () => { + it("should handle over 'z' max character alias", () => { + const tableNames = [] + for (let i = 0; i < 100; i++) { + tableNames.push(generator.word()) + } + const aliasing = new AliasTables(tableNames) + let alias: string = "" + for (let table of tableNames) { + alias = aliasing.getAlias(table) + } + expect(alias).toEqual("aaay") + }) + }) }) From c11527d5399fb63f62ed7f45869c42d37efbb856 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 27 Feb 2024 09:56:56 +0000 Subject: [PATCH 23/54] Adding in char sequence. --- .../server/src/api/controllers/row/alias.ts | 38 ++++++++++++++----- .../src/integrations/tests/sqlAlias.spec.ts | 2 +- 2 files changed, 29 insertions(+), 11 deletions(-) diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index 747166a7ba..280d50de9b 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -8,33 +8,51 @@ import { import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils" import { cloneDeep } from "lodash" +class CharSequence { + static alphabet = "abcdefghijklmnopqrstuvwxyz" + counters: number[] + + constructor() { + this.counters = [0] + } + + get character() { + return this.counters.map(i => CharSequence.alphabet[i]).join("") + } + + next() { + for (let i = this.counters.length - 1; i >= 0; i--) { + if (this.counters[i] < CharSequence.alphabet.length - 1) { + this.counters[i]++ + return + } + this.counters[i] = 0 + } + this.counters.unshift(0) + } +} + export default class AliasTables { - character: string aliases: Record tableAliases: Record tableNames: string[] + charSeq: CharSequence constructor(tableNames: string[]) { this.tableNames = tableNames - this.character = "a" this.aliases = {} this.tableAliases = {} + this.charSeq = new CharSequence() } getAlias(tableName: string) { if (this.aliases[tableName]) { return this.aliases[tableName] } - const char = this.character + const char = this.charSeq.character + this.charSeq.next() this.aliases[tableName] = char this.tableAliases[char] = tableName - this.character = - char.substring(0, char.length - 1) + - String.fromCharCode(char.charCodeAt(char.length - 1) + 1) - // reached end of characters, extend number of characters used - if (this.character.charAt(this.character.length - 1) === "z") { - this.character = new Array(this.character.length + 1).fill("a").join("") - } return char } diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index c35c2b45b6..6e8bd0ea72 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -171,7 +171,7 @@ describe("Captures of real examples", () => { for (let table of tableNames) { alias = aliasing.getAlias(table) } - expect(alias).toEqual("aaay") + expect(alias).toEqual("cu") }) }) }) From c0bb03e9382ac16c14c0fd29537d443e8126dd94 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 27 Feb 2024 09:58:06 +0000 Subject: [PATCH 24/54] Adding length check back for get row. --- packages/server/src/api/controllers/row/ExternalRequest.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index dceadb3cd4..165a1847b5 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -346,7 +346,7 @@ export class ExternalRequest { endpoint: getEndpoint(table._id!, Operation.READ), filters: buildFilters(rowId, {}, table), }) - if (Array.isArray(response)) { + if (Array.isArray(response) && response.length > 0) { return response[0] } else { throw new Error(`Cannot fetch row by ID "${rowId}"`) From fac9f18bc2214f6f62249a372c88803ea19a4a7e Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 27 Feb 2024 13:40:31 +0000 Subject: [PATCH 25/54] PR comments. --- .../api/controllers/row/ExternalRequest.ts | 31 ++++++++++++++----- .../server/src/api/controllers/row/alias.ts | 13 +++----- .../src/integrations/tests/sqlAlias.spec.ts | 4 +-- 3 files changed, 30 insertions(+), 18 deletions(-) diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index 165a1847b5..685af4e98e 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -640,14 +640,26 @@ export class ExternalRequest { ) { continue } - const isMany = field.relationshipType === RelationshipType.MANY_TO_MANY - const tableId = isMany ? field.through! : field.tableId! + let tableId: string | undefined, + lookupField: string | undefined, + fieldName: string | undefined + if (isManyToMany(field)) { + tableId = field.through + lookupField = primaryKey + fieldName = field.throughTo || primaryKey + } else if (isManyToOne(field)) { + tableId = field.tableId + lookupField = field.foreignKey + fieldName = field.fieldName + } + if (!tableId || !lookupField || !fieldName) { + throw new Error( + "Unable to lookup relationships - undefined column properties." + ) + } const { tableName: relatedTableName } = breakExternalTableId(tableId) // @ts-ignore const linkPrimaryKey = this.tables[relatedTableName].primary[0] - - const lookupField = isMany ? primaryKey : field.foreignKey - const fieldName = isMany ? field.throughTo || primaryKey : field.fieldName if (!lookupField || !row[lookupField]) { continue } @@ -660,9 +672,12 @@ export class ExternalRequest { }, }) // this is the response from knex if no rows found - const rows: Row[] = response?.[0].read ? [] : (response as Row[]) - const storeTo = isMany ? field.throughFrom || linkPrimaryKey : fieldName - related[storeTo] = { rows, isMany, tableId } + const rows: Row[] = + !Array.isArray(response) || response?.[0].read ? [] : response + const storeTo = isManyToMany(field) + ? field.throughFrom || linkPrimaryKey + : fieldName + related[storeTo] = { rows, isMany: isManyToMany(field), tableId } } return related } diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index 280d50de9b..9d54bbff8e 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -16,19 +16,17 @@ class CharSequence { this.counters = [0] } - get character() { - return this.counters.map(i => CharSequence.alphabet[i]).join("") - } - - next() { + getCharacter(): string { + const char = this.counters.map(i => CharSequence.alphabet[i]).join("") for (let i = this.counters.length - 1; i >= 0; i--) { if (this.counters[i] < CharSequence.alphabet.length - 1) { this.counters[i]++ - return + return char } this.counters[i] = 0 } this.counters.unshift(0) + return char } } @@ -49,8 +47,7 @@ export default class AliasTables { if (this.aliases[tableName]) { return this.aliases[tableName] } - const char = this.charSeq.character - this.charSeq.next() + const char = this.charSeq.getCharacter() this.aliases[tableName] = char this.tableAliases[char] = tableName return char diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index 6e8bd0ea72..61dbc11840 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -164,14 +164,14 @@ describe("Captures of real examples", () => { it("should handle over 'z' max character alias", () => { const tableNames = [] for (let i = 0; i < 100; i++) { - tableNames.push(generator.word()) + tableNames.push(generator.guid()) } const aliasing = new AliasTables(tableNames) let alias: string = "" for (let table of tableNames) { alias = aliasing.getAlias(table) } - expect(alias).toEqual("cu") + expect(alias).toEqual("cv") }) }) }) From 35c1f5bbce2f8d99bebcf56e7f815df0d4e227a8 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 27 Feb 2024 17:15:20 +0000 Subject: [PATCH 26/54] Adding handling for columns with dots in them. --- packages/server/scripts/integrations/postgres/init.sql | 4 ++++ packages/server/src/api/controllers/row/alias.ts | 9 ++++----- packages/server/src/integrations/tests/sqlAlias.spec.ts | 9 +++++++++ 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/packages/server/scripts/integrations/postgres/init.sql b/packages/server/scripts/integrations/postgres/init.sql index b7ce1b7d5b..2d51a2b5cb 100644 --- a/packages/server/scripts/integrations/postgres/init.sql +++ b/packages/server/scripts/integrations/postgres/init.sql @@ -2,6 +2,10 @@ SELECT 'CREATE DATABASE main' WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec CREATE SCHEMA "test-1"; CREATE TYPE person_job AS ENUM ('qa', 'programmer', 'designer', 'support'); +CREATE TABLE "Bad.Table" ( + BadID SERIAL PRIMARY KEY, + "Bad.Column" text +); CREATE TABLE Persons ( PersonID SERIAL PRIMARY KEY, LastName varchar(255), diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index 9d54bbff8e..cb4d4f1453 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -55,11 +55,10 @@ export default class AliasTables { aliasField(field: string) { const tableNames = this.tableNames - if (field.includes(".")) { - const [tableName, column] = field.split(".") - if (tableNames.includes(tableName)) { - return `${this.getAlias(tableName)}.${column}` - } + const foundTable = tableNames.find(name => field.includes(name)) + if (foundTable) { + const aliasedTable = this.getAlias(foundTable) + return field.replace(foundTable, aliasedTable) } return field } diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index 61dbc11840..acf0f9898f 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -174,4 +174,13 @@ describe("Captures of real examples", () => { expect(alias).toEqual("cv") }) }) + + describe("check some edge cases", () => { + it("should handle table names/columns with dots in them", () => { + const tableNames = ["hello.world", "foo.bar.baz"] + const aliasing = new AliasTables(tableNames) + const aliased = aliasing.aliasField("hello.world.field") + expect(aliased).toEqual("a.field") + }) + }) }) From 4068df602564f889da33df8f409bbad3ef9a9878 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 27 Feb 2024 17:46:02 +0000 Subject: [PATCH 27/54] Adding error handling for table names or columns which contain dots (invalid). --- .../scripts/integrations/postgres/init.sql | 4 --- .../server/src/sdk/app/datasources/plus.ts | 36 +++++++++++++++---- 2 files changed, 30 insertions(+), 10 deletions(-) diff --git a/packages/server/scripts/integrations/postgres/init.sql b/packages/server/scripts/integrations/postgres/init.sql index 2d51a2b5cb..b7ce1b7d5b 100644 --- a/packages/server/scripts/integrations/postgres/init.sql +++ b/packages/server/scripts/integrations/postgres/init.sql @@ -2,10 +2,6 @@ SELECT 'CREATE DATABASE main' WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec CREATE SCHEMA "test-1"; CREATE TYPE person_job AS ENUM ('qa', 'programmer', 'designer', 'support'); -CREATE TABLE "Bad.Table" ( - BadID SERIAL PRIMARY KEY, - "Bad.Column" text -); CREATE TABLE Persons ( PersonID SERIAL PRIMARY KEY, LastName varchar(255), diff --git a/packages/server/src/sdk/app/datasources/plus.ts b/packages/server/src/sdk/app/datasources/plus.ts index 04cd508863..31ec51c728 100644 --- a/packages/server/src/sdk/app/datasources/plus.ts +++ b/packages/server/src/sdk/app/datasources/plus.ts @@ -3,12 +3,33 @@ import { DatasourcePlus, IntegrationBase, Schema, + Table, } from "@budibase/types" import * as datasources from "./datasources" import tableSdk from "../tables" import { getIntegration } from "../../../integrations" import { context } from "@budibase/backend-core" +function checkForSchemaErrors(schema: Record) { + const errors: Record = {} + for (let [tableName, table] of Object.entries(schema)) { + if (tableName.includes(".")) { + errors[tableName] = "Table names containing dots are not supported." + } else { + const columnNames = Object.keys(table.schema) + const invalidColumnName = columnNames.find(columnName => + columnName.includes(".") + ) + if (invalidColumnName) { + errors[ + tableName + ] = `Column '${invalidColumnName}' is not supported as it contains a dot.` + } + } + } + return errors +} + export async function buildFilteredSchema( datasource: Datasource, filter?: string[] @@ -30,16 +51,19 @@ export async function buildFilteredSchema( filteredSchema.errors[key] = schema.errors[key] } } - return filteredSchema + + return { + ...filteredSchema, + errors: { + ...filteredSchema.errors, + ...checkForSchemaErrors(filteredSchema.tables), + }, + } } async function buildSchemaHelper(datasource: Datasource): Promise { const connector = (await getConnector(datasource)) as DatasourcePlus - const externalSchema = await connector.buildSchema( - datasource._id!, - datasource.entities! - ) - return externalSchema + return await connector.buildSchema(datasource._id!, datasource.entities!) } export async function getConnector( From 7ac24492016328e3c42e87484947e6f60eb24216 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 28 Feb 2024 10:08:42 +0000 Subject: [PATCH 28/54] Working on typing TestConfiguration.ts. --- .../src/api/routes/tests/application.spec.ts | 6 + packages/server/src/app.ts | 2 +- .../src/tests/utilities/TestConfiguration.ts | 224 ++++++++---------- 3 files changed, 107 insertions(+), 125 deletions(-) diff --git a/packages/server/src/api/routes/tests/application.spec.ts b/packages/server/src/api/routes/tests/application.spec.ts index dbe4eb51ae..78f021ac5d 100644 --- a/packages/server/src/api/routes/tests/application.spec.ts +++ b/packages/server/src/api/routes/tests/application.spec.ts @@ -248,4 +248,10 @@ describe("/applications", () => { expect(devLogs.data.length).toBe(0) }) }) + + describe("permissions", () => { + it("should return the list of apps the user has access to", async () => { + const user = config.user + }) + }) }) diff --git a/packages/server/src/app.ts b/packages/server/src/app.ts index 4e84422dec..aa96a30b00 100644 --- a/packages/server/src/app.ts +++ b/packages/server/src/app.ts @@ -29,6 +29,6 @@ start().catch(err => { throw err }) -export function getServer() { +export function getServer(): Server { return server } diff --git a/packages/server/src/tests/utilities/TestConfiguration.ts b/packages/server/src/tests/utilities/TestConfiguration.ts index 22bb66b130..5333f1ebf2 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.ts +++ b/packages/server/src/tests/utilities/TestConfiguration.ts @@ -49,6 +49,7 @@ import { AuthToken, Automation, CreateViewRequest, + Ctx, Datasource, FieldType, INTERNAL_TABLE_SOURCE_ID, @@ -68,6 +69,8 @@ import { import API from "./api" import { cloneDeep } from "lodash" import jwt, { Secret } from "jsonwebtoken" +import { Server } from "http" +import { userDetailListType } from "aws-sdk/clients/iam" mocks.licenses.init(pro) @@ -82,16 +85,16 @@ export interface TableToBuild extends Omit { } export default class TestConfiguration { - server: any - request: supertest.SuperTest | undefined + server?: Server + request?: supertest.SuperTest started: boolean - appId: string | null + appId?: string allApps: any[] app?: App - prodApp: any - prodAppId: any - user: any - userMetadataId: any + prodApp?: App + prodAppId?: string + user?: User + userMetadataId?: string table?: Table automation: any datasource?: Datasource @@ -99,10 +102,6 @@ export default class TestConfiguration { api: API csrfToken?: string - private get globalUserId() { - return this.user._id - } - constructor(openServer = true) { if (openServer) { // use a random port because it doesn't matter @@ -114,7 +113,7 @@ export default class TestConfiguration { } else { this.started = false } - this.appId = null + this.appId = undefined this.allApps = [] this.api = new API(this) @@ -134,37 +133,49 @@ export default class TestConfiguration { getAppId() { if (!this.appId) { - throw "appId has not been initialised properly" + throw new Error("appId has not been initialised properly") } - return this.appId } getProdAppId() { + if (!this.prodAppId) { + throw new Error( + "prodAppId has not been initialised, call config.init() first" + ) + } return this.prodAppId } + getUser(): User { + if (!this.user) { + throw new Error("User has not been initialised, call config.init() first") + } + return this.user + } + getUserDetails() { + const user = this.getUser() return { - globalId: this.globalUserId, - email: this.user.email, - firstName: this.user.firstName, - lastName: this.user.lastName, + globalId: user._id!, + email: user.email, + firstName: user.firstName, + lastName: user.lastName, } } async doInContext( - appId: string | null, + appId: string | undefined, task: () => Promise ): Promise { - if (!appId) { - appId = this.appId - } - const tenant = this.getTenantId() return tenancy.doInTenant(tenant, () => { + if (!appId) { + appId = this.appId + } + // check if already in a context - if (context.getAppId() == null && appId !== null) { + if (context.getAppId() == null && appId) { return context.doInAppContext(appId, async () => { return task() }) @@ -259,7 +270,11 @@ export default class TestConfiguration { // UTILS - _req(body: any, params: any, controlFunc: any) { + _req, Res, Context extends Ctx>( + handler: (ctx: Context) => Promise, + body?: Req, + params?: Record + ) { // create a fake request ctx const request: any = {} const appId = this.appId @@ -278,29 +293,19 @@ export default class TestConfiguration { throw new Error(`Error ${status} - ${message}`) } return this.doInContext(appId, async () => { - await controlFunc(request) + await handler(request) return request.body }) } // USER / AUTH - async globalUser( - config: { - id?: string - firstName?: string - lastName?: string - builder?: boolean - admin?: boolean - email?: string - roles?: any - } = {} - ): Promise { + async globalUser(config: Partial = {}): Promise { const { - id = `us_${newid()}`, + _id = `us_${newid()}`, firstName = generator.first(), lastName = generator.last(), - builder = true, - admin = false, + builder = { global: true }, + admin = { global: false }, email = generator.email(), roles, } = config @@ -308,72 +313,30 @@ export default class TestConfiguration { const db = tenancy.getTenantDB(this.getTenantId()) let existing try { - existing = await db.get(id) + existing = await db.get(_id) } catch (err) { existing = { email } } const user: User = { - _id: id, + _id: _id, ...existing, roles: roles || {}, tenantId: this.getTenantId(), firstName, lastName, } - await sessions.createASession(id, { + await sessions.createASession(_id, { sessionId: "sessionid", tenantId: this.getTenantId(), csrfToken: this.csrfToken, }) - if (builder) { - user.builder = { global: true } - } else { - user.builder = { global: false } - } - if (admin) { - user.admin = { global: true } - } else { - user.admin = { global: false } - } const resp = await db.put(user) - return { - _rev: resp.rev, - ...user, - } + return { _rev: resp.rev, ...user } } - async createUser( - user: { - id?: string - firstName?: string - lastName?: string - email?: string - builder?: boolean - admin?: boolean - roles?: UserRoles - } = {} - ): Promise { - const { - id, - firstName = generator.first(), - lastName = generator.last(), - email = generator.email(), - builder = true, - admin, - roles, - } = user - - const globalId = !id ? `us_${Math.random()}` : `us_${id}` - const resp = await this.globalUser({ - id: globalId, - firstName, - lastName, - email, - builder, - admin, - roles: roles || {}, - }) - await cache.user.invalidateUser(globalId) + async createUser(user: Partial = {}): Promise { + const resp = await this.globalUser(user) + await cache.user.invalidateUser(resp._id!) return resp } @@ -381,7 +344,7 @@ export default class TestConfiguration { return context.doInTenant(this.tenantId!, async () => { const baseGroup = structures.userGroups.userGroup() baseGroup.roles = { - [this.prodAppId]: roleId, + [this.getProdAppId()]: roleId, } const { id, rev } = await pro.sdk.groups.save(baseGroup) return { @@ -404,8 +367,18 @@ export default class TestConfiguration { }) } - async login({ roleId, userId, builder, prodApp = false }: any = {}) { - const appId = prodApp ? this.prodAppId : this.appId + async login({ + roleId, + userId, + builder, + prodApp, + }: { + roleId: string + userId: string + builder: boolean + prodApp: boolean + }) { + const appId = prodApp ? this.getProdAppId() : this.getAppId() return context.doInAppContext(appId, async () => { userId = !userId ? `us_uuid1` : userId if (!this.request) { @@ -414,9 +387,9 @@ export default class TestConfiguration { // make sure the user exists in the global DB if (roleId !== roles.BUILTIN_ROLE_IDS.PUBLIC) { await this.globalUser({ - id: userId, - builder, - roles: { [this.prodAppId]: roleId }, + _id: userId, + builder: { global: builder }, + roles: { [appId]: roleId }, }) } await sessions.createASession(userId, { @@ -445,8 +418,9 @@ export default class TestConfiguration { defaultHeaders(extras = {}, prodApp = false) { const tenantId = this.getTenantId() + const user = this.getUser() const authObj: AuthToken = { - userId: this.globalUserId, + userId: user._id!, sessionId: "sessionid", tenantId, } @@ -498,7 +472,7 @@ export default class TestConfiguration { builder = false, prodApp = true, } = {}) { - return this.login({ email, roleId, builder, prodApp }) + return this.login({ userId: email, roleId, builder, prodApp }) } // TENANCY @@ -521,7 +495,7 @@ export default class TestConfiguration { this.tenantId = structures.tenant.id() this.user = await this.globalUser() - this.userMetadataId = generateUserMetadataID(this.user._id) + this.userMetadataId = generateUserMetadataID(this.user._id!) return this.createApp(appName) } @@ -532,7 +506,11 @@ export default class TestConfiguration { // API - async generateApiKey(userId = this.user._id) { + async generateApiKey(userId?: string) { + const user = this.getUser() + if (!userId) { + userId = user._id! + } const db = tenancy.getTenantDB(this.getTenantId()) const id = dbCore.generateDevInfoID(userId) let devInfo: any @@ -552,13 +530,15 @@ export default class TestConfiguration { async createApp(appName: string): Promise { // create dev app // clear any old app - this.appId = null + this.appId = undefined this.app = await context.doInTenant(this.tenantId!, async () => { - const app = await this._req({ name: appName }, null, appController.create) + const app = (await this._req(appController.create, { + name: appName, + })) as App this.appId = app.appId! return app }) - return await context.doInAppContext(this.getAppId(), async () => { + return await context.doInAppContext(this.app.appId!, async () => { // create production app this.prodApp = await this.publish() @@ -570,7 +550,7 @@ export default class TestConfiguration { } async publish() { - await this._req(null, null, deployController.publishApp) + await this._req(deployController.publishApp) // @ts-ignore const prodAppId = this.getAppId().replace("_dev", "") this.prodAppId = prodAppId @@ -582,13 +562,11 @@ export default class TestConfiguration { } async unpublish() { - const response = await this._req( - null, - { appId: this.appId }, - appController.unpublish - ) - this.prodAppId = null - this.prodApp = null + const response = await this._req(appController.unpublish, { + appId: this.appId, + }) + this.prodAppId = undefined + this.prodApp = undefined return response } @@ -716,8 +694,7 @@ export default class TestConfiguration { // ROLE async createRole(config?: any) { - config = config || basicRole() - return this._req(config, null, roleController.save) + return this._req(roleController.save, config || basicRole()) } // VIEW @@ -730,7 +707,7 @@ export default class TestConfiguration { tableId: this.table!._id, name: generator.guid(), } - return this._req(view, null, viewController.v1.save) + return this._req(viewController.v1.save, view) } async createView( @@ -760,13 +737,13 @@ export default class TestConfiguration { delete config._rev } this.automation = ( - await this._req(config, null, automationController.create) + await this._req(automationController.create, config) ).automation return this.automation } async getAllAutomations() { - return this._req(null, null, automationController.fetch) + return this._req(automationController.fetch) } async deleteAutomation(automation?: any) { @@ -774,11 +751,10 @@ export default class TestConfiguration { if (!automation) { return } - return this._req( - null, - { id: automation._id, rev: automation._rev }, - automationController.destroy - ) + return this._req(automationController.destroy, { + id: automation._id, + rev: automation._rev, + }) } async createWebhook(config?: any) { @@ -787,7 +763,7 @@ export default class TestConfiguration { } config = config || basicWebhook(this.automation._id) - return (await this._req(config, null, webhookController.save)).webhook + return (await this._req(webhookController.save, config)).webhook } // DATASOURCE @@ -871,21 +847,21 @@ export default class TestConfiguration { throw "No datasource created for query." } config = config || basicQuery(this.datasource!._id!) - return this._req(config, null, queryController.save) + return this._req(queryController.save, config) } // SCREEN async createScreen(config?: any) { config = config || basicScreen() - return this._req(config, null, screenController.save) + return this._req(screenController.save, config) } // LAYOUT async createLayout(config?: any) { config = config || basicLayout() - return await this._req(config, null, layoutController.save) + return await this._req(layoutController.save, config) } } From 0c51a60e9ab16765ca1c5def7d5b1584331124c7 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 28 Feb 2024 10:16:21 +0000 Subject: [PATCH 29/54] Updating test cases as per PR comments. --- .../src/integrations/tests/sqlAlias.spec.ts | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index acf0f9898f..0672b421f2 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -176,11 +176,18 @@ describe("Captures of real examples", () => { }) describe("check some edge cases", () => { + const dotTableNames = ["hello.world", "foo.bar.baz"] + it("should handle table names/columns with dots in them", () => { - const tableNames = ["hello.world", "foo.bar.baz"] - const aliasing = new AliasTables(tableNames) - const aliased = aliasing.aliasField("hello.world.field") - expect(aliased).toEqual("a.field") + const aliasing = new AliasTables(dotTableNames) + const aliased = aliasing.aliasField(`"hello.world"."field"`) + expect(aliased).toEqual(`"a"."field"`) + }) + + it("should confirm table with dots in them works with grave accents", () => { + const aliasing = new AliasTables(dotTableNames) + const aliased = aliasing.aliasField("`hello.world`.`field`") + expect(aliased).toEqual("`a`.`field`") }) }) }) From a2df1deb8542a602572c93647245f06c891461ae Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 28 Feb 2024 11:08:35 +0000 Subject: [PATCH 30/54] Fixing issue found by test cases, column names containing table names. --- packages/server/src/api/controllers/row/alias.ts | 5 +++-- packages/server/src/integrations/tests/sqlAlias.spec.ts | 7 +++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index cb4d4f1453..ec96bb4603 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -55,10 +55,11 @@ export default class AliasTables { aliasField(field: string) { const tableNames = this.tableNames - const foundTable = tableNames.find(name => field.includes(name)) + const possibleTableName = field.substring(0, field.lastIndexOf(".")) + const foundTable = tableNames.find(name => possibleTableName.includes(name)) if (foundTable) { const aliasedTable = this.getAlias(foundTable) - return field.replace(foundTable, aliasedTable) + field = field.replace(foundTable, aliasedTable) } return field } diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index 0672b421f2..6812d358b7 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -189,5 +189,12 @@ describe("Captures of real examples", () => { const aliased = aliasing.aliasField("`hello.world`.`field`") expect(aliased).toEqual("`a`.`field`") }) + + it("should handle if a table name is used in a column", () => { + const tableNames = ["hello", "world"] + const aliasing = new AliasTables(tableNames) + const aliased = aliasing.aliasField(`"hello"."world_relation"`) + expect(aliased).toEqual(`"a"."world_relation"`) + }) }) }) From c81ca66aa4aa45b445cda8c7d66e26b31efe4d5a Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 28 Feb 2024 11:16:26 +0000 Subject: [PATCH 31/54] Get tests passing again. --- .../server/src/api/routes/tests/user.spec.ts | 12 ++++--- .../server/src/migrations/tests/index.spec.ts | 14 ++++---- .../sdk/app/applications/tests/sync.spec.ts | 14 ++++---- .../server/src/sdk/users/tests/utils.spec.ts | 34 ++++++++++++++----- .../src/tests/utilities/TestConfiguration.ts | 24 ++++++++----- 5 files changed, 63 insertions(+), 35 deletions(-) diff --git a/packages/server/src/api/routes/tests/user.spec.ts b/packages/server/src/api/routes/tests/user.spec.ts index e6349099d7..076ee064dc 100644 --- a/packages/server/src/api/routes/tests/user.spec.ts +++ b/packages/server/src/api/routes/tests/user.spec.ts @@ -27,15 +27,17 @@ describe("/users", () => { describe("fetch", () => { it("returns a list of users from an instance db", async () => { - await config.createUser({ id: "uuidx" }) - await config.createUser({ id: "uuidy" }) + const id1 = `us_${utils.newid()}` + const id2 = `us_${utils.newid()}` + await config.createUser({ _id: id1 }) + await config.createUser({ _id: id2 }) const res = await config.api.user.fetch() expect(res.length).toBe(3) const ids = res.map(u => u._id) - expect(ids).toContain(`ro_ta_users_us_uuidx`) - expect(ids).toContain(`ro_ta_users_us_uuidy`) + expect(ids).toContain(`ro_ta_users_${id1}`) + expect(ids).toContain(`ro_ta_users_${id2}`) }) it("should apply authorization to endpoint", async () => { @@ -54,7 +56,7 @@ describe("/users", () => { describe("update", () => { it("should be able to update the user", async () => { const user: UserMetadata = await config.createUser({ - id: `us_update${utils.newid()}`, + _id: `us_update${utils.newid()}`, }) user.roleId = roles.BUILTIN_ROLE_IDS.BASIC delete user._rev diff --git a/packages/server/src/migrations/tests/index.spec.ts b/packages/server/src/migrations/tests/index.spec.ts index c01040593a..236776cd3f 100644 --- a/packages/server/src/migrations/tests/index.spec.ts +++ b/packages/server/src/migrations/tests/index.spec.ts @@ -40,7 +40,7 @@ describe("migrations", () => { describe("backfill", () => { it("runs app db migration", async () => { - await config.doInContext(null, async () => { + await config.doInContext(undefined, async () => { await clearMigrations() await config.createAutomation() await config.createAutomation(structures.newAutomation()) @@ -93,18 +93,18 @@ describe("migrations", () => { }) it("runs global db migration", async () => { - await config.doInContext(null, async () => { + await config.doInContext(undefined, async () => { await clearMigrations() - const appId = config.prodAppId + const appId = config.getProdAppId() const roles = { [appId]: "role_12345" } await config.createUser({ - builder: false, - admin: true, + builder: { global: false }, + admin: { global: true }, roles, }) // admin only await config.createUser({ - builder: false, - admin: false, + builder: { global: false }, + admin: { global: false }, roles, }) // non admin non builder await config.createTable() diff --git a/packages/server/src/sdk/app/applications/tests/sync.spec.ts b/packages/server/src/sdk/app/applications/tests/sync.spec.ts index 1d28ed977c..a53bdb0bd7 100644 --- a/packages/server/src/sdk/app/applications/tests/sync.spec.ts +++ b/packages/server/src/sdk/app/applications/tests/sync.spec.ts @@ -43,8 +43,8 @@ async function createUser(email: string, roles: UserRoles, builder?: boolean) { const user = await config.createUser({ email, roles, - builder: builder || false, - admin: false, + builder: { global: builder || false }, + admin: { global: false }, }) await context.doInContext(config.appId!, async () => { await events.user.created(user) @@ -55,10 +55,10 @@ async function createUser(email: string, roles: UserRoles, builder?: boolean) { async function removeUserRole(user: User) { const final = await config.globalUser({ ...user, - id: user._id, + _id: user._id, roles: {}, - builder: false, - admin: false, + builder: { global: false }, + admin: { global: false }, }) await context.doInContext(config.appId!, async () => { await events.user.updated(final) @@ -69,8 +69,8 @@ async function createGroupAndUser(email: string) { groupUser = await config.createUser({ email, roles: {}, - builder: false, - admin: false, + builder: { global: false }, + admin: { global: false }, }) group = await config.createGroup() await config.addUserToGroup(group._id!, groupUser._id!) diff --git a/packages/server/src/sdk/users/tests/utils.spec.ts b/packages/server/src/sdk/users/tests/utils.spec.ts index efe790d49b..6f1c5afd3d 100644 --- a/packages/server/src/sdk/users/tests/utils.spec.ts +++ b/packages/server/src/sdk/users/tests/utils.spec.ts @@ -22,15 +22,18 @@ describe("syncGlobalUsers", () => { expect(metadata).toHaveLength(1) expect(metadata).toEqual([ expect.objectContaining({ - _id: db.generateUserMetadataID(config.user._id), + _id: db.generateUserMetadataID(config.getUser()._id!), }), ]) }) }) it("admin and builders users are synced", async () => { - const user1 = await config.createUser({ admin: true }) - const user2 = await config.createUser({ admin: false, builder: true }) + const user1 = await config.createUser({ admin: { global: true } }) + const user2 = await config.createUser({ + admin: { global: false }, + builder: { global: true }, + }) await config.doInContext(config.appId, async () => { expect(await rawUserMetadata()).toHaveLength(1) await syncGlobalUsers() @@ -51,7 +54,10 @@ describe("syncGlobalUsers", () => { }) it("app users are not synced if not specified", async () => { - const user = await config.createUser({ admin: false, builder: false }) + const user = await config.createUser({ + admin: { global: false }, + builder: { global: false }, + }) await config.doInContext(config.appId, async () => { await syncGlobalUsers() @@ -68,8 +74,14 @@ describe("syncGlobalUsers", () => { it("app users are added when group is assigned to app", async () => { await config.doInTenant(async () => { const group = await proSdk.groups.save(structures.userGroups.userGroup()) - const user1 = await config.createUser({ admin: false, builder: false }) - const user2 = await config.createUser({ admin: false, builder: false }) + const user1 = await config.createUser({ + admin: { global: false }, + builder: { global: false }, + }) + const user2 = await config.createUser({ + admin: { global: false }, + builder: { global: false }, + }) await proSdk.groups.addUsers(group.id, [user1._id!, user2._id!]) await config.doInContext(config.appId, async () => { @@ -103,8 +115,14 @@ describe("syncGlobalUsers", () => { it("app users are removed when app is removed from user group", async () => { await config.doInTenant(async () => { const group = await proSdk.groups.save(structures.userGroups.userGroup()) - const user1 = await config.createUser({ admin: false, builder: false }) - const user2 = await config.createUser({ admin: false, builder: false }) + const user1 = await config.createUser({ + admin: { global: false }, + builder: { global: false }, + }) + const user2 = await config.createUser({ + admin: { global: false }, + builder: { global: false }, + }) await proSdk.groups.updateGroupApps(group.id, { appsToAdd: [ { appId: config.prodAppId!, roleId: roles.BUILTIN_ROLE_IDS.BASIC }, diff --git a/packages/server/src/tests/utilities/TestConfiguration.ts b/packages/server/src/tests/utilities/TestConfiguration.ts index 5333f1ebf2..f6f0992585 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.ts +++ b/packages/server/src/tests/utilities/TestConfiguration.ts @@ -307,23 +307,28 @@ export default class TestConfiguration { builder = { global: true }, admin = { global: false }, email = generator.email(), - roles, + tenantId = this.getTenantId(), + roles = {}, } = config const db = tenancy.getTenantDB(this.getTenantId()) - let existing + let existing: Partial = {} try { existing = await db.get(_id) } catch (err) { - existing = { email } + // ignore } const user: User = { - _id: _id, + _id, ...existing, - roles: roles || {}, - tenantId: this.getTenantId(), + ...config, + email, + roles, + tenantId, firstName, lastName, + builder, + admin, } await sessions.createASession(_id, { sessionId: "sessionid", @@ -331,7 +336,10 @@ export default class TestConfiguration { csrfToken: this.csrfToken, }) const resp = await db.put(user) - return { _rev: resp.rev, ...user } + return { + _rev: resp.rev, + ...user, + } } async createUser(user: Partial = {}): Promise { @@ -751,7 +759,7 @@ export default class TestConfiguration { if (!automation) { return } - return this._req(automationController.destroy, { + return this._req(automationController.destroy, undefined, { id: automation._id, rev: automation._rev, }) From fde5825589f49a8e025499de997c2734990aebdf Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 28 Feb 2024 11:20:42 +0000 Subject: [PATCH 32/54] Fix type checks. --- packages/server/src/api/routes/tests/row.spec.ts | 2 +- .../functions/usageQuotas/tests/syncApps.spec.ts | 2 +- .../functions/usageQuotas/tests/syncCreators.spec.ts | 4 ++-- .../functions/usageQuotas/tests/syncRows.spec.ts | 2 +- .../functions/usageQuotas/tests/syncUsers.spec.ts | 2 +- .../server/src/sdk/app/rows/tests/internal.spec.ts | 10 +++++----- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 239da36351..726e493b2d 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -110,7 +110,7 @@ describe.each([ config.api.row.get(tbl_Id, id, { expectStatus: status }) const getRowUsage = async () => { - const { total } = await config.doInContext(null, () => + const { total } = await config.doInContext(undefined, () => quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS) ) return total diff --git a/packages/server/src/migrations/functions/usageQuotas/tests/syncApps.spec.ts b/packages/server/src/migrations/functions/usageQuotas/tests/syncApps.spec.ts index d0d50395b2..1d4d4d0f71 100644 --- a/packages/server/src/migrations/functions/usageQuotas/tests/syncApps.spec.ts +++ b/packages/server/src/migrations/functions/usageQuotas/tests/syncApps.spec.ts @@ -13,7 +13,7 @@ describe("syncApps", () => { afterAll(config.end) it("runs successfully", async () => { - return config.doInContext(null, async () => { + return config.doInContext(undefined, async () => { // create the usage quota doc and mock usages await quotas.getQuotaUsage() await quotas.setUsage(3, StaticQuotaName.APPS, QuotaUsageType.STATIC) diff --git a/packages/server/src/migrations/functions/usageQuotas/tests/syncCreators.spec.ts b/packages/server/src/migrations/functions/usageQuotas/tests/syncCreators.spec.ts index 75fa9f217e..93b7d4949b 100644 --- a/packages/server/src/migrations/functions/usageQuotas/tests/syncCreators.spec.ts +++ b/packages/server/src/migrations/functions/usageQuotas/tests/syncCreators.spec.ts @@ -12,8 +12,8 @@ describe("syncCreators", () => { afterAll(config.end) it("syncs creators", async () => { - return config.doInContext(null, async () => { - await config.createUser({ admin: true }) + return config.doInContext(undefined, async () => { + await config.createUser({ admin: { global: true } }) await syncCreators.run() diff --git a/packages/server/src/migrations/functions/usageQuotas/tests/syncRows.spec.ts b/packages/server/src/migrations/functions/usageQuotas/tests/syncRows.spec.ts index e644d605b6..730278683c 100644 --- a/packages/server/src/migrations/functions/usageQuotas/tests/syncRows.spec.ts +++ b/packages/server/src/migrations/functions/usageQuotas/tests/syncRows.spec.ts @@ -14,7 +14,7 @@ describe("syncRows", () => { afterAll(config.end) it("runs successfully", async () => { - return config.doInContext(null, async () => { + return config.doInContext(undefined, async () => { // create the usage quota doc and mock usages await quotas.getQuotaUsage() await quotas.setUsage(300, StaticQuotaName.ROWS, QuotaUsageType.STATIC) diff --git a/packages/server/src/migrations/functions/usageQuotas/tests/syncUsers.spec.ts b/packages/server/src/migrations/functions/usageQuotas/tests/syncUsers.spec.ts index f7500c8b4d..2731cc041d 100644 --- a/packages/server/src/migrations/functions/usageQuotas/tests/syncUsers.spec.ts +++ b/packages/server/src/migrations/functions/usageQuotas/tests/syncUsers.spec.ts @@ -12,7 +12,7 @@ describe("syncUsers", () => { afterAll(config.end) it("syncs users", async () => { - return config.doInContext(null, async () => { + return config.doInContext(undefined, async () => { await config.createUser() await syncUsers.run() diff --git a/packages/server/src/sdk/app/rows/tests/internal.spec.ts b/packages/server/src/sdk/app/rows/tests/internal.spec.ts index dda41d5720..3908ef83ed 100644 --- a/packages/server/src/sdk/app/rows/tests/internal.spec.ts +++ b/packages/server/src/sdk/app/rows/tests/internal.spec.ts @@ -81,7 +81,7 @@ describe("sdk >> rows >> internal", () => { const response = await internalSdk.save( table._id!, row, - config.user._id + config.getUser()._id ) expect(response).toEqual({ @@ -129,7 +129,7 @@ describe("sdk >> rows >> internal", () => { const response = await internalSdk.save( table._id!, row, - config.user._id + config.getUser()._id ) expect(response).toEqual({ @@ -190,15 +190,15 @@ describe("sdk >> rows >> internal", () => { await config.doInContext(config.appId, async () => { for (const row of makeRows(5)) { - await internalSdk.save(table._id!, row, config.user._id) + await internalSdk.save(table._id!, row, config.getUser()._id) } await Promise.all( makeRows(10).map(row => - internalSdk.save(table._id!, row, config.user._id) + internalSdk.save(table._id!, row, config.getUser()._id) ) ) for (const row of makeRows(5)) { - await internalSdk.save(table._id!, row, config.user._id) + await internalSdk.save(table._id!, row, config.getUser()._id) } }) From bfb0064289a24561db92d46a73f9702997ebe27e Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 28 Feb 2024 11:46:58 +0000 Subject: [PATCH 33/54] More types. --- .../server/src/api/controllers/automation.ts | 4 +- .../src/api/routes/tests/automation.spec.ts | 4 +- .../src/api/routes/tests/backup.spec.ts | 2 +- .../src/api/routes/tests/webhook.spec.ts | 4 +- .../src/tests/utilities/TestConfiguration.ts | 60 ++++++++++++------- 5 files changed, 47 insertions(+), 27 deletions(-) diff --git a/packages/server/src/api/controllers/automation.ts b/packages/server/src/api/controllers/automation.ts index 186b68f3b7..b7c29efa6f 100644 --- a/packages/server/src/api/controllers/automation.ts +++ b/packages/server/src/api/controllers/automation.ts @@ -72,7 +72,9 @@ function cleanAutomationInputs(automation: Automation) { return automation } -export async function create(ctx: UserCtx) { +export async function create( + ctx: UserCtx +) { const db = context.getAppDB() let automation = ctx.request.body automation.appId = ctx.appId diff --git a/packages/server/src/api/routes/tests/automation.spec.ts b/packages/server/src/api/routes/tests/automation.spec.ts index 178189555d..ee8fc7d544 100644 --- a/packages/server/src/api/routes/tests/automation.spec.ts +++ b/packages/server/src/api/routes/tests/automation.spec.ts @@ -394,7 +394,7 @@ describe("/automations", () => { it("deletes a automation by its ID", async () => { const automation = await config.createAutomation() const res = await request - .delete(`/api/automations/${automation.id}/${automation.rev}`) + .delete(`/api/automations/${automation._id}/${automation._rev}`) .set(config.defaultHeaders()) .expect("Content-Type", /json/) .expect(200) @@ -408,7 +408,7 @@ describe("/automations", () => { await checkBuilderEndpoint({ config, method: "DELETE", - url: `/api/automations/${automation.id}/${automation._rev}`, + url: `/api/automations/${automation._id}/${automation._rev}`, }) }) }) diff --git a/packages/server/src/api/routes/tests/backup.spec.ts b/packages/server/src/api/routes/tests/backup.spec.ts index acfac783db..becbeb5480 100644 --- a/packages/server/src/api/routes/tests/backup.spec.ts +++ b/packages/server/src/api/routes/tests/backup.spec.ts @@ -44,7 +44,7 @@ describe("/backups", () => { expect(headers["content-disposition"]).toEqual( `attachment; filename="${ - config.getApp()!.name + config.getApp().name }-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"` ) }) diff --git a/packages/server/src/api/routes/tests/webhook.spec.ts b/packages/server/src/api/routes/tests/webhook.spec.ts index 38f84852b4..48a6da38bf 100644 --- a/packages/server/src/api/routes/tests/webhook.spec.ts +++ b/packages/server/src/api/routes/tests/webhook.spec.ts @@ -36,7 +36,7 @@ describe("/webhooks", () => { const automation = await config.createAutomation() const res = await request .put(`/api/webhooks`) - .send(basicWebhook(automation._id)) + .send(basicWebhook(automation._id!)) .set(config.defaultHeaders()) .expect("Content-Type", /json/) .expect(200) @@ -145,7 +145,7 @@ describe("/webhooks", () => { let automation = collectAutomation() let newAutomation = await config.createAutomation(automation) let syncWebhook = await config.createWebhook( - basicWebhook(newAutomation._id) + basicWebhook(newAutomation._id!) ) // replicate changes before checking webhook diff --git a/packages/server/src/tests/utilities/TestConfiguration.ts b/packages/server/src/tests/utilities/TestConfiguration.ts index f6f0992585..599675bd4e 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.ts +++ b/packages/server/src/tests/utilities/TestConfiguration.ts @@ -61,7 +61,7 @@ import { Table, TableSourceType, User, - UserRoles, + UserCtx, View, WithRequired, } from "@budibase/types" @@ -70,7 +70,6 @@ import API from "./api" import { cloneDeep } from "lodash" import jwt, { Secret } from "jsonwebtoken" import { Server } from "http" -import { userDetailListType } from "aws-sdk/clients/iam" mocks.licenses.init(pro) @@ -89,14 +88,14 @@ export default class TestConfiguration { request?: supertest.SuperTest started: boolean appId?: string - allApps: any[] + allApps: App[] app?: App prodApp?: App prodAppId?: string user?: User userMetadataId?: string table?: Table - automation: any + automation?: Automation datasource?: Datasource tenantId?: string api: API @@ -124,16 +123,26 @@ export default class TestConfiguration { } getApp() { + if (!this.app) { + throw new Error("app has not been initialised, call config.init() first") + } return this.app } getProdApp() { + if (!this.prodApp) { + throw new Error( + "prodApp has not been initialised, call config.init() first" + ) + } return this.prodApp } getAppId() { if (!this.appId) { - throw new Error("appId has not been initialised properly") + throw new Error( + "appId has not been initialised, call config.init() first" + ) } return this.appId } @@ -164,6 +173,15 @@ export default class TestConfiguration { } } + getAutomation() { + if (!this.automation) { + throw new Error( + "automation has not been initialised, call config.init() first" + ) + } + return this.automation + } + async doInContext( appId: string | undefined, task: () => Promise @@ -270,11 +288,11 @@ export default class TestConfiguration { // UTILS - _req, Res, Context extends Ctx>( - handler: (ctx: Context) => Promise, + _req, Res>( + handler: (ctx: UserCtx) => Promise, body?: Req, params?: Record - ) { + ): Promise { // create a fake request ctx const request: any = {} const appId = this.appId @@ -539,19 +557,20 @@ export default class TestConfiguration { // create dev app // clear any old app this.appId = undefined - this.app = await context.doInTenant(this.tenantId!, async () => { - const app = (await this._req(appController.create, { - name: appName, - })) as App - this.appId = app.appId! - return app - }) + this.app = await context.doInTenant( + this.tenantId!, + async () => + (await this._req(appController.create, { + name: appName, + })) as App + ) + this.appId = this.app.appId return await context.doInAppContext(this.app.appId!, async () => { // create production app this.prodApp = await this.publish() this.allApps.push(this.prodApp) - this.allApps.push(this.app) + this.allApps.push(this.app!) return this.app! }) @@ -739,14 +758,13 @@ export default class TestConfiguration { // AUTOMATION - async createAutomation(config?: any) { + async createAutomation(config?: Automation) { config = config || basicAutomation() if (config._rev) { delete config._rev } - this.automation = ( - await this._req(automationController.create, config) - ).automation + const res = await this._req(automationController.create, config) + this.automation = res.automation return this.automation } @@ -769,7 +787,7 @@ export default class TestConfiguration { if (!this.automation) { throw "Must create an automation before creating webhook." } - config = config || basicWebhook(this.automation._id) + config = config || basicWebhook(this.automation._id!) return (await this._req(webhookController.save, config)).webhook } From a9392b2176dc9847d4d08bad832364cffe310600 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 28 Feb 2024 12:13:13 +0000 Subject: [PATCH 34/54] More types. --- .../server/src/api/controllers/automation.ts | 3 +- packages/server/src/api/controllers/layout.ts | 4 +- .../server/src/api/controllers/query/index.ts | 2 +- packages/server/src/api/controllers/screen.ts | 10 ++++- .../src/api/routes/tests/datasource.spec.ts | 4 +- .../routes/tests/queries/query.seq.spec.ts | 9 ++-- .../routes/tests/utilities/TestFunctions.ts | 12 ++++-- packages/server/src/constants/layouts.ts | 4 +- packages/server/src/constants/screens.ts | 5 +-- .../src/tests/utilities/TestConfiguration.ts | 42 ++++++++++++------- .../server/src/tests/utilities/structures.ts | 6 ++- packages/types/src/documents/app/layout.ts | 5 +++ packages/types/src/documents/app/screen.ts | 1 + 13 files changed, 72 insertions(+), 35 deletions(-) diff --git a/packages/server/src/api/controllers/automation.ts b/packages/server/src/api/controllers/automation.ts index b7c29efa6f..d1bd580331 100644 --- a/packages/server/src/api/controllers/automation.ts +++ b/packages/server/src/api/controllers/automation.ts @@ -25,6 +25,7 @@ import { getActionDefinitions as actionDefs } from "../../automations/actions" import sdk from "../../sdk" import { builderSocket } from "../../websockets" import env from "../../environment" +import { DocumentDestroyResponse } from "@budibase/nano" async function getActionDefinitions() { return removeDeprecated(await actionDefs()) @@ -209,7 +210,7 @@ export async function find(ctx: UserCtx) { ctx.body = await db.get(ctx.params.id) } -export async function destroy(ctx: UserCtx) { +export async function destroy(ctx: UserCtx) { const db = context.getAppDB() const automationId = ctx.params.id const oldAutomation = await db.get(automationId) diff --git a/packages/server/src/api/controllers/layout.ts b/packages/server/src/api/controllers/layout.ts index 69e4ad91ed..1a15432b88 100644 --- a/packages/server/src/api/controllers/layout.ts +++ b/packages/server/src/api/controllers/layout.ts @@ -1,9 +1,9 @@ import { EMPTY_LAYOUT } from "../../constants/layouts" import { generateLayoutID, getScreenParams } from "../../db/utils" import { events, context } from "@budibase/backend-core" -import { BBContext, Layout } from "@budibase/types" +import { BBContext, Layout, UserCtx } from "@budibase/types" -export async function save(ctx: BBContext) { +export async function save(ctx: UserCtx) { const db = context.getAppDB() let layout = ctx.request.body diff --git a/packages/server/src/api/controllers/query/index.ts b/packages/server/src/api/controllers/query/index.ts index 768c921150..973718ba48 100644 --- a/packages/server/src/api/controllers/query/index.ts +++ b/packages/server/src/api/controllers/query/index.ts @@ -73,7 +73,7 @@ const _import = async (ctx: UserCtx) => { } export { _import as import } -export async function save(ctx: UserCtx) { +export async function save(ctx: UserCtx) { const db = context.getAppDB() const query: Query = ctx.request.body diff --git a/packages/server/src/api/controllers/screen.ts b/packages/server/src/api/controllers/screen.ts index 446fe2e5fa..ee8e0ff892 100644 --- a/packages/server/src/api/controllers/screen.ts +++ b/packages/server/src/api/controllers/screen.ts @@ -7,7 +7,13 @@ import { roles, } from "@budibase/backend-core" import { updateAppPackage } from "./application" -import { Plugin, ScreenProps, BBContext, Screen } from "@budibase/types" +import { + Plugin, + ScreenProps, + BBContext, + Screen, + UserCtx, +} from "@budibase/types" import { builderSocket } from "../../websockets" export async function fetch(ctx: BBContext) { @@ -31,7 +37,7 @@ export async function fetch(ctx: BBContext) { ) } -export async function save(ctx: BBContext) { +export async function save(ctx: UserCtx) { const db = context.getAppDB() let screen = ctx.request.body diff --git a/packages/server/src/api/routes/tests/datasource.spec.ts b/packages/server/src/api/routes/tests/datasource.spec.ts index 41229b0a2a..032da71b80 100644 --- a/packages/server/src/api/routes/tests/datasource.spec.ts +++ b/packages/server/src/api/routes/tests/datasource.spec.ts @@ -86,7 +86,7 @@ describe("/datasources", () => { }) // check variables in cache let contents = await checkCacheForDynamicVariable( - query._id, + query._id!, "variable3" ) expect(contents.rows.length).toEqual(1) @@ -102,7 +102,7 @@ describe("/datasources", () => { expect(res.body.errors).toBeUndefined() // check variables no longer in cache - contents = await checkCacheForDynamicVariable(query._id, "variable3") + contents = await checkCacheForDynamicVariable(query._id!, "variable3") expect(contents).toBe(null) }) }) diff --git a/packages/server/src/api/routes/tests/queries/query.seq.spec.ts b/packages/server/src/api/routes/tests/queries/query.seq.spec.ts index 52d35fa782..2bbc8366ea 100644 --- a/packages/server/src/api/routes/tests/queries/query.seq.spec.ts +++ b/packages/server/src/api/routes/tests/queries/query.seq.spec.ts @@ -467,7 +467,10 @@ describe("/queries", () => { queryString: "test={{ variable3 }}", }) // check its in cache - const contents = await checkCacheForDynamicVariable(base._id, "variable3") + const contents = await checkCacheForDynamicVariable( + base._id!, + "variable3" + ) expect(contents.rows.length).toEqual(1) const responseBody = await preview(datasource, { path: "www.failonce.com", @@ -490,7 +493,7 @@ describe("/queries", () => { queryString: "test={{ variable3 }}", }) // check its in cache - let contents = await checkCacheForDynamicVariable(base._id, "variable3") + let contents = await checkCacheForDynamicVariable(base._id!, "variable3") expect(contents.rows.length).toEqual(1) // delete the query @@ -500,7 +503,7 @@ describe("/queries", () => { .expect(200) // check variables no longer in cache - contents = await checkCacheForDynamicVariable(base._id, "variable3") + contents = await checkCacheForDynamicVariable(base._id!, "variable3") expect(contents).toBe(null) }) }) diff --git a/packages/server/src/api/routes/tests/utilities/TestFunctions.ts b/packages/server/src/api/routes/tests/utilities/TestFunctions.ts index 53e90396aa..0576b1e748 100644 --- a/packages/server/src/api/routes/tests/utilities/TestFunctions.ts +++ b/packages/server/src/api/routes/tests/utilities/TestFunctions.ts @@ -4,6 +4,7 @@ import { AppStatus } from "../../../../db/utils" import { roles, tenancy, context, db } from "@budibase/backend-core" import env from "../../../../environment" import Nano from "@budibase/nano" +import TestConfiguration from "src/tests/utilities/TestConfiguration" class Request { appId: any @@ -52,10 +53,10 @@ export const clearAllApps = async ( }) } -export const clearAllAutomations = async (config: any) => { +export const clearAllAutomations = async (config: TestConfiguration) => { const automations = await config.getAllAutomations() for (let auto of automations) { - await context.doInAppContext(config.appId, async () => { + await context.doInAppContext(config.getAppId(), async () => { await config.deleteAutomation(auto) }) } @@ -101,7 +102,12 @@ export const checkBuilderEndpoint = async ({ method, url, body, -}: any) => { +}: { + config: TestConfiguration + method: string + url: string + body: any +}) => { const headers = await config.login({ userId: "us_fail", builder: false, diff --git a/packages/server/src/constants/layouts.ts b/packages/server/src/constants/layouts.ts index 835a5d2e15..f4eb337c2d 100644 --- a/packages/server/src/constants/layouts.ts +++ b/packages/server/src/constants/layouts.ts @@ -1,9 +1,11 @@ +import { Layout } from "@budibase/types" + export const BASE_LAYOUT_PROP_IDS = { PRIVATE: "layout_private_master", PUBLIC: "layout_public_master", } -export const EMPTY_LAYOUT = { +export const EMPTY_LAYOUT: Layout = { componentLibraries: ["@budibase/standard-components"], title: "{{ name }}", favicon: "./_shared/favicon.png", diff --git a/packages/server/src/constants/screens.ts b/packages/server/src/constants/screens.ts index 6c88b0f957..1107289ea0 100644 --- a/packages/server/src/constants/screens.ts +++ b/packages/server/src/constants/screens.ts @@ -1,5 +1,6 @@ import { roles } from "@budibase/backend-core" import { BASE_LAYOUT_PROP_IDS } from "./layouts" +import { Screen } from "@budibase/types" export function createHomeScreen( config: { @@ -9,10 +10,8 @@ export function createHomeScreen( roleId: roles.BUILTIN_ROLE_IDS.BASIC, route: "/", } -) { +): Screen { return { - description: "", - url: "", layoutId: BASE_LAYOUT_PROP_IDS.PRIVATE, props: { _id: "d834fea2-1b3e-4320-ab34-f9009f5ecc59", diff --git a/packages/server/src/tests/utilities/TestConfiguration.ts b/packages/server/src/tests/utilities/TestConfiguration.ts index 599675bd4e..70794934cc 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.ts +++ b/packages/server/src/tests/utilities/TestConfiguration.ts @@ -53,9 +53,12 @@ import { Datasource, FieldType, INTERNAL_TABLE_SOURCE_ID, + Layout, + Query, RelationshipFieldMetadata, RelationshipType, Row, + Screen, SearchParams, SourceName, Table, @@ -63,6 +66,7 @@ import { User, UserCtx, View, + Webhook, WithRequired, } from "@budibase/types" @@ -182,6 +186,15 @@ export default class TestConfiguration { return this.automation } + getDatasource() { + if (!this.datasource) { + throw new Error( + "datasource has not been initialised, call config.init() first" + ) + } + return this.datasource + } + async doInContext( appId: string | undefined, task: () => Promise @@ -288,10 +301,10 @@ export default class TestConfiguration { // UTILS - _req, Res>( + _req | void, Res>( handler: (ctx: UserCtx) => Promise, body?: Req, - params?: Record + params?: Record ): Promise { // create a fake request ctx const request: any = {} @@ -399,7 +412,7 @@ export default class TestConfiguration { builder, prodApp, }: { - roleId: string + roleId?: string userId: string builder: boolean prodApp: boolean @@ -415,7 +428,7 @@ export default class TestConfiguration { await this.globalUser({ _id: userId, builder: { global: builder }, - roles: { [appId]: roleId }, + roles: { [appId]: roleId || roles.BUILTIN_ROLE_IDS.BASIC }, }) } await sessions.createASession(userId, { @@ -772,7 +785,7 @@ export default class TestConfiguration { return this._req(automationController.fetch) } - async deleteAutomation(automation?: any) { + async deleteAutomation(automation?: Automation) { automation = automation || this.automation if (!automation) { return @@ -783,7 +796,7 @@ export default class TestConfiguration { }) } - async createWebhook(config?: any) { + async createWebhook(config?: Webhook) { if (!this.automation) { throw "Must create an automation before creating webhook." } @@ -811,7 +824,7 @@ export default class TestConfiguration { return { ...this.datasource, _id: this.datasource!._id! } } - async restDatasource(cfg?: any) { + async restDatasource(cfg?: Record) { return this.createDatasource({ datasource: { ...basicDatasource().datasource, @@ -868,24 +881,23 @@ export default class TestConfiguration { // QUERY - async createQuery(config?: any) { - if (!this.datasource && !config) { - throw "No datasource created for query." - } - config = config || basicQuery(this.datasource!._id!) - return this._req(queryController.save, config) + async createQuery(config?: Query) { + return this._req( + queryController.save, + config || basicQuery(this.getDatasource()._id!) + ) } // SCREEN - async createScreen(config?: any) { + async createScreen(config?: Screen) { config = config || basicScreen() return this._req(screenController.save, config) } // LAYOUT - async createLayout(config?: any) { + async createLayout(config?: Layout) { config = config || basicLayout() return await this._req(layoutController.save, config) } diff --git a/packages/server/src/tests/utilities/structures.ts b/packages/server/src/tests/utilities/structures.ts index 2fecf15fd6..5b50bd1175 100644 --- a/packages/server/src/tests/utilities/structures.ts +++ b/packages/server/src/tests/utilities/structures.ts @@ -22,6 +22,8 @@ import { INTERNAL_TABLE_SOURCE_ID, TableSourceType, Query, + Webhook, + WebhookActionType, } from "@budibase/types" import { LoopInput, LoopStepType } from "../../definitions/automations" @@ -407,12 +409,12 @@ export function basicLayout() { return cloneDeep(EMPTY_LAYOUT) } -export function basicWebhook(automationId: string) { +export function basicWebhook(automationId: string): Webhook { return { live: true, name: "webhook", action: { - type: "automation", + type: WebhookActionType.AUTOMATION, target: automationId, }, } diff --git a/packages/types/src/documents/app/layout.ts b/packages/types/src/documents/app/layout.ts index 06542f680d..51ce511712 100644 --- a/packages/types/src/documents/app/layout.ts +++ b/packages/types/src/documents/app/layout.ts @@ -1,6 +1,11 @@ import { Document } from "../document" export interface Layout extends Document { + componentLibraries: string[] + title: string + favicon: string + stylesheets: string[] props: any layoutId?: string + name?: string } diff --git a/packages/types/src/documents/app/screen.ts b/packages/types/src/documents/app/screen.ts index 58c00ef3d6..4977c79b0b 100644 --- a/packages/types/src/documents/app/screen.ts +++ b/packages/types/src/documents/app/screen.ts @@ -22,4 +22,5 @@ export interface Screen extends Document { routing: ScreenRouting props: ScreenProps name?: string + pluginAdded?: boolean } From 07b3d83ebb9df55f4567905169a342cbaa6ae2a7 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 28 Feb 2024 12:14:03 +0000 Subject: [PATCH 35/54] Remove test skeleton. --- packages/server/src/api/routes/tests/application.spec.ts | 6 ------ 1 file changed, 6 deletions(-) diff --git a/packages/server/src/api/routes/tests/application.spec.ts b/packages/server/src/api/routes/tests/application.spec.ts index 78f021ac5d..dbe4eb51ae 100644 --- a/packages/server/src/api/routes/tests/application.spec.ts +++ b/packages/server/src/api/routes/tests/application.spec.ts @@ -248,10 +248,4 @@ describe("/applications", () => { expect(devLogs.data.length).toBe(0) }) }) - - describe("permissions", () => { - it("should return the list of apps the user has access to", async () => { - const user = config.user - }) - }) }) From 237634386c153291d3f8627e8af95a02c4fe4866 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 28 Feb 2024 12:19:08 +0000 Subject: [PATCH 36/54] More typing fixes. --- packages/backend-core/src/docIds/ids.ts | 4 ++-- .../server/src/api/routes/tests/utilities/TestFunctions.ts | 2 +- packages/server/src/tests/utilities/TestConfiguration.ts | 2 +- packages/worker/src/tests/TestConfiguration.ts | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/backend-core/src/docIds/ids.ts b/packages/backend-core/src/docIds/ids.ts index 02176109da..9627b2b94c 100644 --- a/packages/backend-core/src/docIds/ids.ts +++ b/packages/backend-core/src/docIds/ids.ts @@ -74,7 +74,7 @@ export function getGlobalIDFromUserMetadataID(id: string) { * Generates a template ID. * @param ownerId The owner/user of the template, this could be global or a workspace level. */ -export function generateTemplateID(ownerId: any) { +export function generateTemplateID(ownerId: string) { return `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}` } @@ -105,7 +105,7 @@ export function prefixRoleID(name: string) { * Generates a new dev info document ID - this is scoped to a user. * @returns The new dev info ID which info for dev (like api key) can be stored under. */ -export const generateDevInfoID = (userId: any) => { +export const generateDevInfoID = (userId: string) => { return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}` } diff --git a/packages/server/src/api/routes/tests/utilities/TestFunctions.ts b/packages/server/src/api/routes/tests/utilities/TestFunctions.ts index 0576b1e748..8a843551ac 100644 --- a/packages/server/src/api/routes/tests/utilities/TestFunctions.ts +++ b/packages/server/src/api/routes/tests/utilities/TestFunctions.ts @@ -106,7 +106,7 @@ export const checkBuilderEndpoint = async ({ config: TestConfiguration method: string url: string - body: any + body?: any }) => { const headers = await config.login({ userId: "us_fail", diff --git a/packages/server/src/tests/utilities/TestConfiguration.ts b/packages/server/src/tests/utilities/TestConfiguration.ts index 70794934cc..21605b7a5e 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.ts +++ b/packages/server/src/tests/utilities/TestConfiguration.ts @@ -539,7 +539,7 @@ export default class TestConfiguration { return this.createApp(appName) } - doInTenant(task: any) { + doInTenant(task: () => T) { return context.doInTenant(this.getTenantId(), task) } diff --git a/packages/worker/src/tests/TestConfiguration.ts b/packages/worker/src/tests/TestConfiguration.ts index df6726eed1..3ebfb5f020 100644 --- a/packages/worker/src/tests/TestConfiguration.ts +++ b/packages/worker/src/tests/TestConfiguration.ts @@ -280,7 +280,7 @@ class TestConfiguration { const db = context.getGlobalDB() - const id = dbCore.generateDevInfoID(this.user!._id) + const id = dbCore.generateDevInfoID(this.user!._id!) // TODO: dry this.apiKey = encryption.encrypt( `${this.tenantId}${dbCore.SEPARATOR}${utils.newid()}` From cd81a83b949589f3413e589032247f8f99755335 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 28 Feb 2024 13:37:11 +0000 Subject: [PATCH 37/54] Reverting changes to handle full stops. --- .../server/src/api/controllers/row/alias.ts | 15 ++++++++----- .../src/integrations/tests/sqlAlias.spec.ts | 21 +++++++------------ 2 files changed, 17 insertions(+), 19 deletions(-) diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index ec96bb4603..d2d41e085f 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -55,11 +55,16 @@ export default class AliasTables { aliasField(field: string) { const tableNames = this.tableNames - const possibleTableName = field.substring(0, field.lastIndexOf(".")) - const foundTable = tableNames.find(name => possibleTableName.includes(name)) - if (foundTable) { - const aliasedTable = this.getAlias(foundTable) - field = field.replace(foundTable, aliasedTable) + if (field.includes(".")) { + const [tableName, column] = field.split(".") + const foundTableName = tableNames.find(name => tableName.includes(name)) + if (foundTableName) { + const aliasedTableName = tableName.replace( + foundTableName, + this.getAlias(foundTableName) + ) + return `${aliasedTableName}.${column}` + } } return field } diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index 6812d358b7..3fb90a3b4d 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -176,25 +176,18 @@ describe("Captures of real examples", () => { }) describe("check some edge cases", () => { - const dotTableNames = ["hello.world", "foo.bar.baz"] + const tableNames = ["hello", "world"] - it("should handle table names/columns with dots in them", () => { - const aliasing = new AliasTables(dotTableNames) - const aliased = aliasing.aliasField(`"hello.world"."field"`) + it("should quoted table names", () => { + const aliasing = new AliasTables(tableNames) + const aliased = aliasing.aliasField(`"hello"."field"`) expect(aliased).toEqual(`"a"."field"`) }) - it("should confirm table with dots in them works with grave accents", () => { - const aliasing = new AliasTables(dotTableNames) - const aliased = aliasing.aliasField("`hello.world`.`field`") - expect(aliased).toEqual("`a`.`field`") - }) - - it("should handle if a table name is used in a column", () => { - const tableNames = ["hello", "world"] + it("should handle quoted table names with graves", () => { const aliasing = new AliasTables(tableNames) - const aliased = aliasing.aliasField(`"hello"."world_relation"`) - expect(aliased).toEqual(`"a"."world_relation"`) + const aliased = aliasing.aliasField("`hello`.`world`") + expect(aliased).toEqual("`a`.`world`") }) }) }) From aa6fa7661824186674096ca8cfc7dc019c547bea Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 28 Feb 2024 13:37:47 +0000 Subject: [PATCH 38/54] Missing word in test case. --- packages/server/src/integrations/tests/sqlAlias.spec.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index 3fb90a3b4d..497b981f68 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -178,7 +178,7 @@ describe("Captures of real examples", () => { describe("check some edge cases", () => { const tableNames = ["hello", "world"] - it("should quoted table names", () => { + it("should handle quoted table names", () => { const aliasing = new AliasTables(tableNames) const aliased = aliasing.aliasField(`"hello"."field"`) expect(aliased).toEqual(`"a"."field"`) From 58e42714402c4f90c10820f9ff5b80c23e5b7e64 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 28 Feb 2024 14:38:43 +0000 Subject: [PATCH 39/54] Fixing issue with quotes/graves. --- packages/server/src/api/controllers/row/alias.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index d2d41e085f..c27bed6a4b 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -57,13 +57,15 @@ export default class AliasTables { const tableNames = this.tableNames if (field.includes(".")) { const [tableName, column] = field.split(".") - const foundTableName = tableNames.find(name => tableName.includes(name)) + const foundTableName = tableNames.find( + name => tableName.includes(name) && tableName.indexOf(name) <= 1 + ) if (foundTableName) { const aliasedTableName = tableName.replace( foundTableName, this.getAlias(foundTableName) ) - return `${aliasedTableName}.${column}` + field = `${aliasedTableName}.${column}` } } return field From 56b1855f6ad3174ea804d0698ded893a015c4f61 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 28 Feb 2024 15:18:07 +0000 Subject: [PATCH 40/54] Adding test cases for table names in table names. --- .../server/src/api/controllers/row/alias.ts | 10 +- .../server/src/api/routes/tests/row.spec.ts | 3939 +++++++++-------- packages/server/src/integrations/base/sql.ts | 25 +- .../src/integrations/tests/sqlAlias.spec.ts | 11 + 4 files changed, 2008 insertions(+), 1977 deletions(-) diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index c27bed6a4b..9658a0d638 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -57,9 +57,13 @@ export default class AliasTables { const tableNames = this.tableNames if (field.includes(".")) { const [tableName, column] = field.split(".") - const foundTableName = tableNames.find( - name => tableName.includes(name) && tableName.indexOf(name) <= 1 - ) + const foundTableName = tableNames.find(name => { + const idx = tableName.indexOf(name) + if (idx === -1 || idx > 1) { + return + } + return Math.abs(tableName.length - name.length) <= 2 + }) if (foundTableName) { const aliasedTableName = tableName.replace( foundTableName, diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 239da36351..05c6b92bb0 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -39,907 +39,41 @@ tk.freeze(timestamp) const { basicRow } = setup.structures -describe.each([ - ["internal", undefined], - ["postgres", databaseTestProviders.postgres], -])("/rows (%s)", (__, dsProvider) => { - const isInternal = !dsProvider +describe.each([["postgres", databaseTestProviders.postgres]])( + "/rows (%s)", + (__, dsProvider) => { + const isInternal = !dsProvider - const request = setup.getRequest() - const config = setup.getConfig() - let table: Table - let tableId: string + const request = setup.getRequest() + const config = setup.getConfig() + let table: Table + let tableId: string - afterAll(async () => { - if (dsProvider) { - await dsProvider.stop() - } - setup.afterAll() - }) - - beforeAll(async () => { - await config.init() - - if (dsProvider) { - await config.createDatasource({ - datasource: await dsProvider.datasource(), - }) - } - }) - - const generateTableConfig: () => SaveTableRequest = () => { - return { - name: uuid.v4(), - type: "table", - primary: ["id"], - primaryDisplay: "name", - sourceType: TableSourceType.INTERNAL, - sourceId: INTERNAL_TABLE_SOURCE_ID, - schema: { - id: { - type: FieldType.AUTO, - name: "id", - autocolumn: true, - constraints: { - presence: true, - }, - }, - name: { - type: FieldType.STRING, - name: "name", - constraints: { - type: "string", - }, - }, - description: { - type: FieldType.STRING, - name: "description", - constraints: { - type: "string", - }, - }, - }, - } - } - - beforeEach(async () => { - mocks.licenses.useCloudFree() - }) - - const loadRow = (id: string, tbl_Id: string, status = 200) => - config.api.row.get(tbl_Id, id, { expectStatus: status }) - - const getRowUsage = async () => { - const { total } = await config.doInContext(null, () => - quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS) - ) - return total - } - - const assertRowUsage = async (expected: number) => { - const usage = await getRowUsage() - expect(usage).toBe(expected) - } - - const defaultRowFields = isInternal - ? { - type: "row", - createdAt: timestamp, - updatedAt: timestamp, + afterAll(async () => { + if (dsProvider) { + await dsProvider.stop() } - : undefined - - async function createTable( - cfg: Omit, - opts?: { skipReassigning: boolean } - ) { - let table - if (dsProvider) { - table = await config.createExternalTable(cfg, opts) - } else { - table = await config.createTable(cfg, opts) - } - return table - } - - beforeAll(async () => { - const tableConfig = generateTableConfig() - let table = await createTable(tableConfig) - tableId = table._id! - }) - - describe("save, load, update", () => { - it("returns a success message when the row is created", async () => { - const rowUsage = await getRowUsage() - - const res = await request - .post(`/api/${tableId}/rows`) - .send(basicRow(tableId)) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect((res as any).res.statusMessage).toEqual( - `${config.table!.name} saved successfully` - ) - expect(res.body.name).toEqual("Test Contact") - expect(res.body._rev).toBeDefined() - await assertRowUsage(rowUsage + 1) + setup.afterAll() }) - it("Increment row autoId per create row request", async () => { - const rowUsage = await getRowUsage() + beforeAll(async () => { + await config.init() - const tableConfig = generateTableConfig() - const newTable = await createTable( - { - ...tableConfig, - name: "TestTableAuto", - schema: { - ...tableConfig.schema, - "Row ID": { - name: "Row ID", - type: FieldType.NUMBER, - subtype: AutoFieldSubType.AUTO_ID, - icon: "ri-magic-line", - autocolumn: true, - constraints: { - type: "number", - presence: true, - numericality: { - greaterThanOrEqualTo: "", - lessThanOrEqualTo: "", - }, - }, - }, - }, - }, - { skipReassigning: true } - ) - - const ids = [1, 2, 3] - - // Performing several create row requests should increment the autoID fields accordingly - const createRow = async (id: number) => { - const res = await config.api.row.save(newTable._id!, { - name: "row_" + id, + if (dsProvider) { + await config.createDatasource({ + datasource: await dsProvider.datasource(), }) - expect(res.name).toEqual("row_" + id) - expect(res._rev).toBeDefined() - expect(res["Row ID"]).toEqual(id) } - - for (let i = 0; i < ids.length; i++) { - await createRow(ids[i]) - } - - await assertRowUsage(rowUsage + ids.length) }) - it("updates a row successfully", async () => { - const existing = await config.createRow() - const rowUsage = await getRowUsage() - - const res = await config.api.row.save(tableId, { - _id: existing._id, - _rev: existing._rev, - tableId, - name: "Updated Name", - }) - - expect(res.name).toEqual("Updated Name") - await assertRowUsage(rowUsage) - }) - - it("should load a row", async () => { - const existing = await config.createRow() - - const res = await config.api.row.get(tableId, existing._id!) - - expect(res.body).toEqual({ - ...existing, - ...defaultRowFields, - }) - }) - - it("should list all rows for given tableId", async () => { - const table = await createTable(generateTableConfig(), { - skipReassigning: true, - }) - const tableId = table._id! - const newRow = { - tableId, - name: "Second Contact", - description: "new", - } - const firstRow = await config.createRow({ tableId }) - await config.createRow(newRow) - - const res = await config.api.row.fetch(tableId) - - expect(res.length).toBe(2) - expect(res.find((r: Row) => r.name === newRow.name)).toBeDefined() - expect(res.find((r: Row) => r.name === firstRow.name)).toBeDefined() - }) - - it("load should return 404 when row does not exist", async () => { - await config.createRow() - - await config.api.row.get(tableId, "1234567", { - expectStatus: 404, - }) - }) - - isInternal && - it("row values are coerced", async () => { - const str: FieldSchema = { - type: FieldType.STRING, - name: "str", - constraints: { type: "string", presence: false }, - } - const attachment: FieldSchema = { - type: FieldType.ATTACHMENT, - name: "attachment", - constraints: { type: "array", presence: false }, - } - const bool: FieldSchema = { - type: FieldType.BOOLEAN, - name: "boolean", - constraints: { type: "boolean", presence: false }, - } - const number: FieldSchema = { - type: FieldType.NUMBER, - name: "str", - constraints: { type: "number", presence: false }, - } - const datetime: FieldSchema = { - type: FieldType.DATETIME, - name: "datetime", - constraints: { - type: "string", - presence: false, - datetime: { earliest: "", latest: "" }, - }, - } - const arrayField: FieldSchema = { - type: FieldType.ARRAY, - constraints: { - type: "array", - presence: false, - inclusion: ["One", "Two", "Three"], - }, - name: "Sample Tags", - sortable: false, - } - const optsField: FieldSchema = { - name: "Sample Opts", - type: FieldType.OPTIONS, - constraints: { - type: "string", - presence: false, - inclusion: ["Alpha", "Beta", "Gamma"], - }, - } - const table = await createTable({ - name: "TestTable2", - type: "table", - schema: { - name: str, - stringUndefined: str, - stringNull: str, - stringString: str, - numberEmptyString: number, - numberNull: number, - numberUndefined: number, - numberString: number, - numberNumber: number, - datetimeEmptyString: datetime, - datetimeNull: datetime, - datetimeUndefined: datetime, - datetimeString: datetime, - datetimeDate: datetime, - boolNull: bool, - boolEmpty: bool, - boolUndefined: bool, - boolString: bool, - boolBool: bool, - attachmentNull: attachment, - attachmentUndefined: attachment, - attachmentEmpty: attachment, - attachmentEmptyArrayStr: attachment, - arrayFieldEmptyArrayStr: arrayField, - arrayFieldArrayStrKnown: arrayField, - arrayFieldNull: arrayField, - arrayFieldUndefined: arrayField, - optsFieldEmptyStr: optsField, - optsFieldUndefined: optsField, - optsFieldNull: optsField, - optsFieldStrKnown: optsField, - }, - }) - - const row = { - name: "Test Row", - stringUndefined: undefined, - stringNull: null, - stringString: "i am a string", - numberEmptyString: "", - numberNull: null, - numberUndefined: undefined, - numberString: "123", - numberNumber: 123, - datetimeEmptyString: "", - datetimeNull: null, - datetimeUndefined: undefined, - datetimeString: "1984-04-20T00:00:00.000Z", - datetimeDate: new Date("1984-04-20"), - boolNull: null, - boolEmpty: "", - boolUndefined: undefined, - boolString: "true", - boolBool: true, - tableId: table._id, - attachmentNull: null, - attachmentUndefined: undefined, - attachmentEmpty: "", - attachmentEmptyArrayStr: "[]", - arrayFieldEmptyArrayStr: "[]", - arrayFieldUndefined: undefined, - arrayFieldNull: null, - arrayFieldArrayStrKnown: "['One']", - optsFieldEmptyStr: "", - optsFieldUndefined: undefined, - optsFieldNull: null, - optsFieldStrKnown: "Alpha", - } - - const createdRow = await config.createRow(row) - const id = createdRow._id! - - const saved = (await loadRow(id, table._id!)).body - - expect(saved.stringUndefined).toBe(undefined) - expect(saved.stringNull).toBe(null) - expect(saved.stringString).toBe("i am a string") - expect(saved.numberEmptyString).toBe(null) - expect(saved.numberNull).toBe(null) - expect(saved.numberUndefined).toBe(undefined) - expect(saved.numberString).toBe(123) - expect(saved.numberNumber).toBe(123) - expect(saved.datetimeEmptyString).toBe(null) - expect(saved.datetimeNull).toBe(null) - expect(saved.datetimeUndefined).toBe(undefined) - expect(saved.datetimeString).toBe( - new Date(row.datetimeString).toISOString() - ) - expect(saved.datetimeDate).toBe(row.datetimeDate.toISOString()) - expect(saved.boolNull).toBe(null) - expect(saved.boolEmpty).toBe(null) - expect(saved.boolUndefined).toBe(undefined) - expect(saved.boolString).toBe(true) - expect(saved.boolBool).toBe(true) - expect(saved.attachmentNull).toEqual([]) - expect(saved.attachmentUndefined).toBe(undefined) - expect(saved.attachmentEmpty).toEqual([]) - expect(saved.attachmentEmptyArrayStr).toEqual([]) - expect(saved.arrayFieldEmptyArrayStr).toEqual([]) - expect(saved.arrayFieldNull).toEqual([]) - expect(saved.arrayFieldUndefined).toEqual(undefined) - expect(saved.optsFieldEmptyStr).toEqual(null) - expect(saved.optsFieldUndefined).toEqual(undefined) - expect(saved.optsFieldNull).toEqual(null) - expect(saved.arrayFieldArrayStrKnown).toEqual(["One"]) - expect(saved.optsFieldStrKnown).toEqual("Alpha") - }) - }) - - describe("view save", () => { - it("views have extra data trimmed", async () => { - const table = await createTable({ - type: "table", - name: "orders", - primary: ["OrderID"], - schema: { - Country: { - type: FieldType.STRING, - name: "Country", - }, - OrderID: { - type: FieldType.NUMBER, - name: "OrderID", - }, - Story: { - type: FieldType.STRING, - name: "Story", - }, - }, - }) - - const createViewResponse = await config.createView({ - name: uuid.v4(), - schema: { - Country: { - visible: true, - }, - OrderID: { - visible: true, - }, - }, - }) - - const createRowResponse = await config.api.row.save( - createViewResponse.id, - { - OrderID: "1111", - Country: "Aussy", - Story: "aaaaa", - } - ) - - const row = await config.api.row.get(table._id!, createRowResponse._id!) - expect(row.body.Story).toBeUndefined() - expect(row.body).toEqual({ - ...defaultRowFields, - OrderID: 1111, - Country: "Aussy", - _id: createRowResponse._id, - _rev: createRowResponse._rev, - tableId: table._id, - }) - }) - }) - - describe("patch", () => { - let otherTable: Table - - beforeAll(async () => { - const tableConfig = generateTableConfig() - table = await createTable(tableConfig) - const otherTableConfig = generateTableConfig() - // need a short name of table here - for relationship tests - otherTableConfig.name = "a" - otherTableConfig.schema.relationship = { - name: "relationship", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: table._id!, - fieldName: "relationship", - } - otherTable = await createTable(otherTableConfig) - // need to set the config back to the original table - config.table = table - }) - - it("should update only the fields that are supplied", async () => { - const existing = await config.createRow() - - const rowUsage = await getRowUsage() - - const row = await config.api.row.patch(table._id!, { - _id: existing._id!, - _rev: existing._rev!, - tableId: table._id!, - name: "Updated Name", - }) - - expect(row.name).toEqual("Updated Name") - expect(row.description).toEqual(existing.description) - - const savedRow = await loadRow(row._id!, table._id!) - - expect(savedRow.body.description).toEqual(existing.description) - expect(savedRow.body.name).toEqual("Updated Name") - await assertRowUsage(rowUsage) - }) - - it("should throw an error when given improper types", async () => { - const existing = await config.createRow() - const rowUsage = await getRowUsage() - - await config.api.row.patch( - table._id!, - { - _id: existing._id!, - _rev: existing._rev!, - tableId: table._id!, - name: 1, - }, - { expectStatus: 400 } - ) - - await assertRowUsage(rowUsage) - }) - - it("should not overwrite links if those links are not set", async () => { - let linkField: FieldSchema = { - type: FieldType.LINK, - name: "", - fieldName: "", - constraints: { - type: "array", - presence: false, - }, - relationshipType: RelationshipType.ONE_TO_MANY, - tableId: InternalTable.USER_METADATA, - } - - let table = await config.api.table.save({ - name: "TestTable", - type: "table", - sourceType: TableSourceType.INTERNAL, - sourceId: INTERNAL_TABLE_SOURCE_ID, - schema: { - user1: { ...linkField, name: "user1", fieldName: "user1" }, - user2: { ...linkField, name: "user2", fieldName: "user2" }, - }, - }) - - let user1 = await config.createUser() - let user2 = await config.createUser() - - let row = await config.api.row.save(table._id!, { - user1: [{ _id: user1._id }], - user2: [{ _id: user2._id }], - }) - - let getResp = await config.api.row.get(table._id!, row._id!) - expect(getResp.body.user1[0]._id).toEqual(user1._id) - expect(getResp.body.user2[0]._id).toEqual(user2._id) - - let patchResp = await config.api.row.patch(table._id!, { - _id: row._id!, - _rev: row._rev!, - tableId: table._id!, - user1: [{ _id: user2._id }], - }) - expect(patchResp.user1[0]._id).toEqual(user2._id) - expect(patchResp.user2[0]._id).toEqual(user2._id) - - getResp = await config.api.row.get(table._id!, row._id!) - expect(getResp.body.user1[0]._id).toEqual(user2._id) - expect(getResp.body.user2[0]._id).toEqual(user2._id) - }) - - it("should be able to update relationships when both columns are same name", async () => { - let row = await config.api.row.save(table._id!, { - name: "test", - description: "test", - }) - let row2 = await config.api.row.save(otherTable._id!, { - name: "test", - description: "test", - relationship: [row._id], - }) - row = (await config.api.row.get(table._id!, row._id!)).body - expect(row.relationship.length).toBe(1) - const resp = await config.api.row.patch(table._id!, { - _id: row._id!, - _rev: row._rev!, - tableId: row.tableId!, - name: "test2", - relationship: [row2._id], - }) - expect(resp.relationship.length).toBe(1) - }) - }) - - describe("destroy", () => { - beforeAll(async () => { - const tableConfig = generateTableConfig() - table = await createTable(tableConfig) - }) - - it("should be able to delete a row", async () => { - const createdRow = await config.createRow() - const rowUsage = await getRowUsage() - - const res = await config.api.row.delete(table._id!, [createdRow]) - expect(res.body[0]._id).toEqual(createdRow._id) - await assertRowUsage(rowUsage - 1) - }) - }) - - describe("validate", () => { - beforeAll(async () => { - const tableConfig = generateTableConfig() - table = await createTable(tableConfig) - }) - - it("should return no errors on valid row", async () => { - const rowUsage = await getRowUsage() - - const res = await config.api.row.validate(table._id!, { name: "ivan" }) - - expect(res.valid).toBe(true) - expect(Object.keys(res.errors)).toEqual([]) - await assertRowUsage(rowUsage) - }) - - it("should errors on invalid row", async () => { - const rowUsage = await getRowUsage() - - const res = await config.api.row.validate(table._id!, { name: 1 }) - - if (isInternal) { - expect(res.valid).toBe(false) - expect(Object.keys(res.errors)).toEqual(["name"]) - } else { - // Validation for external is not implemented, so it will always return valid - expect(res.valid).toBe(true) - expect(Object.keys(res.errors)).toEqual([]) - } - await assertRowUsage(rowUsage) - }) - }) - - describe("bulkDelete", () => { - beforeAll(async () => { - const tableConfig = generateTableConfig() - table = await createTable(tableConfig) - }) - - it("should be able to delete a bulk set of rows", async () => { - const row1 = await config.createRow() - const row2 = await config.createRow() - const rowUsage = await getRowUsage() - - const res = await config.api.row.delete(table._id!, [row1, row2]) - - expect(res.body.length).toEqual(2) - await loadRow(row1._id!, table._id!, 404) - await assertRowUsage(rowUsage - 2) - }) - - it("should be able to delete a variety of row set types", async () => { - const [row1, row2, row3] = await Promise.all([ - config.createRow(), - config.createRow(), - config.createRow(), - ]) - const rowUsage = await getRowUsage() - - const res = await config.api.row.delete(table._id!, [ - row1, - row2._id, - { _id: row3._id }, - ]) - - expect(res.body.length).toEqual(3) - await loadRow(row1._id!, table._id!, 404) - await assertRowUsage(rowUsage - 3) - }) - - it("should accept a valid row object and delete the row", async () => { - const row1 = await config.createRow() - const rowUsage = await getRowUsage() - - const res = await config.api.row.delete(table._id!, row1) - - expect(res.body.id).toEqual(row1._id) - await loadRow(row1._id!, table._id!, 404) - await assertRowUsage(rowUsage - 1) - }) - - it("Should ignore malformed/invalid delete requests", async () => { - const rowUsage = await getRowUsage() - - const res = await config.api.row.delete( - table._id!, - { not: "valid" }, - { expectStatus: 400 } - ) - expect(res.body.message).toEqual("Invalid delete rows request") - - const res2 = await config.api.row.delete( - table._id!, - { rows: 123 }, - { expectStatus: 400 } - ) - expect(res2.body.message).toEqual("Invalid delete rows request") - - const res3 = await config.api.row.delete(table._id!, "invalid", { - expectStatus: 400, - }) - expect(res3.body.message).toEqual("Invalid delete rows request") - - await assertRowUsage(rowUsage) - }) - }) - - // Legacy views are not available for external - isInternal && - describe("fetchView", () => { - beforeEach(async () => { - const tableConfig = generateTableConfig() - table = await createTable(tableConfig) - }) - - it("should be able to fetch tables contents via 'view'", async () => { - const row = await config.createRow() - const rowUsage = await getRowUsage() - - const res = await config.api.legacyView.get(table._id!) - expect(res.body.length).toEqual(1) - expect(res.body[0]._id).toEqual(row._id) - await assertRowUsage(rowUsage) - }) - - it("should throw an error if view doesn't exist", async () => { - const rowUsage = await getRowUsage() - - await config.api.legacyView.get("derp", { expectStatus: 404 }) - - await assertRowUsage(rowUsage) - }) - - it("should be able to run on a view", async () => { - const view = await config.createLegacyView({ - tableId: table._id!, - name: "ViewTest", - filters: [], - schema: {}, - }) - const row = await config.createRow() - const rowUsage = await getRowUsage() - - const res = await config.api.legacyView.get(view.name) - expect(res.body.length).toEqual(1) - expect(res.body[0]._id).toEqual(row._id) - - await assertRowUsage(rowUsage) - }) - }) - - describe("fetchEnrichedRows", () => { - beforeAll(async () => { - const tableConfig = generateTableConfig() - table = await createTable(tableConfig) - }) - - it("should allow enriching some linked rows", async () => { - const { linkedTable, firstRow, secondRow } = await tenancy.doInTenant( - config.getTenantId(), - async () => { - const linkedTable = await config.createLinkedTable( - RelationshipType.ONE_TO_MANY, - ["link"], - { - // Making sure that the combined table name + column name is within postgres limits - name: uuid.v4().replace(/-/g, "").substring(0, 16), - type: "table", - primary: ["id"], - primaryDisplay: "id", - schema: { - id: { - type: FieldType.AUTO, - name: "id", - autocolumn: true, - constraints: { - presence: true, - }, - }, - }, - } - ) - const firstRow = await config.createRow({ - name: "Test Contact", - description: "original description", - tableId: table._id, - }) - const secondRow = await config.createRow({ - name: "Test 2", - description: "og desc", - link: [{ _id: firstRow._id }], - tableId: linkedTable._id, - }) - return { linkedTable, firstRow, secondRow } - } - ) - const rowUsage = await getRowUsage() - - // test basic enrichment - const resBasic = await config.api.row.get( - linkedTable._id!, - secondRow._id! - ) - expect(resBasic.body.link.length).toBe(1) - expect(resBasic.body.link[0]).toEqual({ - _id: firstRow._id, - primaryDisplay: firstRow.name, - }) - - // test full enrichment - const resEnriched = await config.api.row.getEnriched( - linkedTable._id!, - secondRow._id! - ) - expect(resEnriched.body.link.length).toBe(1) - expect(resEnriched.body.link[0]._id).toBe(firstRow._id) - expect(resEnriched.body.link[0].name).toBe("Test Contact") - expect(resEnriched.body.link[0].description).toBe("original description") - await assertRowUsage(rowUsage) - }) - }) - - isInternal && - describe("attachments", () => { - beforeAll(async () => { - const tableConfig = generateTableConfig() - table = await createTable(tableConfig) - }) - - it("should allow enriching attachment rows", async () => { - const table = await config.createAttachmentTable() - const attachmentId = `${structures.uuid()}.csv` - const row = await config.createRow({ - name: "test", - description: "test", - attachment: [ - { - key: `${config.getAppId()}/attachments/${attachmentId}`, - }, - ], - tableId: table._id, - }) - // the environment needs configured for this - await setup.switchToSelfHosted(async () => { - return context.doInAppContext(config.getAppId(), async () => { - const enriched = await outputProcessing(table, [row]) - expect((enriched as Row[])[0].attachment[0].url).toBe( - `/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}` - ) - }) - }) - }) - }) - - describe("exportData", () => { - beforeAll(async () => { - const tableConfig = generateTableConfig() - table = await createTable(tableConfig) - }) - - it("should allow exporting all columns", async () => { - const existing = await config.createRow() - const res = await config.api.row.exportRows(table._id!, { - rows: [existing._id!], - }) - const results = JSON.parse(res.text) - expect(results.length).toEqual(1) - const row = results[0] - - // Ensure all original columns were exported - expect(Object.keys(row).length).toBeGreaterThanOrEqual( - Object.keys(existing).length - ) - Object.keys(existing).forEach(key => { - expect(row[key]).toEqual(existing[key]) - }) - }) - - it("should allow exporting only certain columns", async () => { - const existing = await config.createRow() - const res = await config.api.row.exportRows(table._id!, { - rows: [existing._id!], - columns: ["_id"], - }) - const results = JSON.parse(res.text) - expect(results.length).toEqual(1) - const row = results[0] - - // Ensure only the _id column was exported - expect(Object.keys(row).length).toEqual(1) - expect(row._id).toEqual(existing._id) - }) - }) - - describe("view 2.0", () => { - async function userTable(): Promise { + const generateTableConfig: () => SaveTableRequest = () => { return { - name: `users_${uuid.v4()}`, - sourceId: INTERNAL_TABLE_SOURCE_ID, - sourceType: TableSourceType.INTERNAL, + name: uuid.v4(), type: "table", primary: ["id"], + primaryDisplay: "name", + sourceType: TableSourceType.INTERNAL, + sourceId: INTERNAL_TABLE_SOURCE_ID, schema: { id: { type: FieldType.AUTO, @@ -952,167 +86,857 @@ describe.each([ name: { type: FieldType.STRING, name: "name", + constraints: { + type: "string", + }, }, - surname: { + description: { type: FieldType.STRING, - name: "surname", - }, - age: { - type: FieldType.NUMBER, - name: "age", - }, - address: { - type: FieldType.STRING, - name: "address", - }, - jobTitle: { - type: FieldType.STRING, - name: "jobTitle", + name: "description", + constraints: { + type: "string", + }, }, }, } } - const randomRowData = () => ({ - name: generator.first(), - surname: generator.last(), - age: generator.age(), - address: generator.address(), - jobTitle: generator.word(), + beforeEach(async () => { + mocks.licenses.useCloudFree() }) - describe("create", () => { - it("should persist a new row with only the provided view fields", async () => { - const table = await createTable(await userTable()) - const view = await config.createView({ - schema: { - name: { visible: true }, - surname: { visible: true }, - address: { visible: true }, - }, - }) + const loadRow = (id: string, tbl_Id: string, status = 200) => + config.api.row.get(tbl_Id, id, { expectStatus: status }) - const data = randomRowData() - const newRow = await config.api.row.save(view.id, { - tableId: table!._id, - _viewId: view.id, - ...data, - }) + const getRowUsage = async () => { + const { total } = await config.doInContext(null, () => + quotas.getCurrentUsageValues( + QuotaUsageType.STATIC, + StaticQuotaName.ROWS + ) + ) + return total + } - const row = await config.api.row.get(table._id!, newRow._id!) - expect(row.body).toEqual({ - name: data.name, - surname: data.surname, - address: data.address, - tableId: table!._id, - _id: newRow._id, - _rev: newRow._rev, - id: newRow.id, - ...defaultRowFields, - }) - expect(row.body._viewId).toBeUndefined() - expect(row.body.age).toBeUndefined() - expect(row.body.jobTitle).toBeUndefined() - }) + const assertRowUsage = async (expected: number) => { + const usage = await getRowUsage() + expect(usage).toBe(expected) + } + + const defaultRowFields = isInternal + ? { + type: "row", + createdAt: timestamp, + updatedAt: timestamp, + } + : undefined + + async function createTable( + cfg: Omit, + opts?: { skipReassigning: boolean } + ) { + let table + if (dsProvider) { + table = await config.createExternalTable(cfg, opts) + } else { + table = await config.createTable(cfg, opts) + } + return table + } + + beforeAll(async () => { + const tableConfig = generateTableConfig() + let table = await createTable(tableConfig) + tableId = table._id! }) - describe("patch", () => { - it("should update only the view fields for a row", async () => { - const table = await createTable(await userTable()) - const tableId = table._id! - const view = await config.createView({ - schema: { - name: { visible: true }, - address: { visible: true }, - }, - }) - - const newRow = await config.api.row.save(view.id, { - tableId, - _viewId: view.id, - ...randomRowData(), - }) - const newData = randomRowData() - await config.api.row.patch(view.id, { - tableId, - _viewId: view.id, - _id: newRow._id!, - _rev: newRow._rev!, - ...newData, - }) - - const row = await config.api.row.get(tableId, newRow._id!) - expect(row.body).toEqual({ - ...newRow, - name: newData.name, - address: newData.address, - _id: newRow._id, - _rev: expect.any(String), - id: newRow.id, - ...defaultRowFields, - }) - expect(row.body._viewId).toBeUndefined() - expect(row.body.age).toBeUndefined() - expect(row.body.jobTitle).toBeUndefined() - }) - }) - - describe("destroy", () => { - it("should be able to delete a row", async () => { - const table = await createTable(await userTable()) - const tableId = table._id! - const view = await config.createView({ - schema: { - name: { visible: true }, - address: { visible: true }, - }, - }) - - const createdRow = await config.createRow() + describe("save, load, update", () => { + it("returns a success message when the row is created", async () => { const rowUsage = await getRowUsage() - await config.api.row.delete(view.id, [createdRow]) + const res = await request + .post(`/api/${tableId}/rows`) + .send(basicRow(tableId)) + .set(config.defaultHeaders()) + .expect("Content-Type", /json/) + .expect(200) + expect((res as any).res.statusMessage).toEqual( + `${config.table!.name} saved successfully` + ) + expect(res.body.name).toEqual("Test Contact") + expect(res.body._rev).toBeDefined() + await assertRowUsage(rowUsage + 1) + }) - await assertRowUsage(rowUsage - 1) + it("Increment row autoId per create row request", async () => { + const rowUsage = await getRowUsage() - await config.api.row.get(tableId, createdRow._id!, { + const tableConfig = generateTableConfig() + const newTable = await createTable( + { + ...tableConfig, + name: "TestTableAuto", + schema: { + ...tableConfig.schema, + "Row ID": { + name: "Row ID", + type: FieldType.NUMBER, + subtype: AutoFieldSubType.AUTO_ID, + icon: "ri-magic-line", + autocolumn: true, + constraints: { + type: "number", + presence: true, + numericality: { + greaterThanOrEqualTo: "", + lessThanOrEqualTo: "", + }, + }, + }, + }, + }, + { skipReassigning: true } + ) + + const ids = [1, 2, 3] + + // Performing several create row requests should increment the autoID fields accordingly + const createRow = async (id: number) => { + const res = await config.api.row.save(newTable._id!, { + name: "row_" + id, + }) + expect(res.name).toEqual("row_" + id) + expect(res._rev).toBeDefined() + expect(res["Row ID"]).toEqual(id) + } + + for (let i = 0; i < ids.length; i++) { + await createRow(ids[i]) + } + + await assertRowUsage(rowUsage + ids.length) + }) + + it("updates a row successfully", async () => { + const existing = await config.createRow() + const rowUsage = await getRowUsage() + + const res = await config.api.row.save(tableId, { + _id: existing._id, + _rev: existing._rev, + tableId, + name: "Updated Name", + }) + + expect(res.name).toEqual("Updated Name") + await assertRowUsage(rowUsage) + }) + + it("should load a row", async () => { + const existing = await config.createRow() + + const res = await config.api.row.get(tableId, existing._id!) + + expect(res.body).toEqual({ + ...existing, + ...defaultRowFields, + }) + }) + + it("should list all rows for given tableId", async () => { + const table = await createTable(generateTableConfig(), { + skipReassigning: true, + }) + const tableId = table._id! + const newRow = { + tableId, + name: "Second Contact", + description: "new", + } + const firstRow = await config.createRow({ tableId }) + await config.createRow(newRow) + + const res = await config.api.row.fetch(tableId) + + expect(res.length).toBe(2) + expect(res.find((r: Row) => r.name === newRow.name)).toBeDefined() + expect(res.find((r: Row) => r.name === firstRow.name)).toBeDefined() + }) + + it("load should return 404 when row does not exist", async () => { + await config.createRow() + + await config.api.row.get(tableId, "1234567", { expectStatus: 404, }) }) - it("should be able to delete multiple rows", async () => { - const table = await createTable(await userTable()) - const tableId = table._id! - const view = await config.createView({ + isInternal && + it("row values are coerced", async () => { + const str: FieldSchema = { + type: FieldType.STRING, + name: "str", + constraints: { type: "string", presence: false }, + } + const attachment: FieldSchema = { + type: FieldType.ATTACHMENT, + name: "attachment", + constraints: { type: "array", presence: false }, + } + const bool: FieldSchema = { + type: FieldType.BOOLEAN, + name: "boolean", + constraints: { type: "boolean", presence: false }, + } + const number: FieldSchema = { + type: FieldType.NUMBER, + name: "str", + constraints: { type: "number", presence: false }, + } + const datetime: FieldSchema = { + type: FieldType.DATETIME, + name: "datetime", + constraints: { + type: "string", + presence: false, + datetime: { earliest: "", latest: "" }, + }, + } + const arrayField: FieldSchema = { + type: FieldType.ARRAY, + constraints: { + type: "array", + presence: false, + inclusion: ["One", "Two", "Three"], + }, + name: "Sample Tags", + sortable: false, + } + const optsField: FieldSchema = { + name: "Sample Opts", + type: FieldType.OPTIONS, + constraints: { + type: "string", + presence: false, + inclusion: ["Alpha", "Beta", "Gamma"], + }, + } + const table = await createTable({ + name: "TestTable2", + type: "table", + schema: { + name: str, + stringUndefined: str, + stringNull: str, + stringString: str, + numberEmptyString: number, + numberNull: number, + numberUndefined: number, + numberString: number, + numberNumber: number, + datetimeEmptyString: datetime, + datetimeNull: datetime, + datetimeUndefined: datetime, + datetimeString: datetime, + datetimeDate: datetime, + boolNull: bool, + boolEmpty: bool, + boolUndefined: bool, + boolString: bool, + boolBool: bool, + attachmentNull: attachment, + attachmentUndefined: attachment, + attachmentEmpty: attachment, + attachmentEmptyArrayStr: attachment, + arrayFieldEmptyArrayStr: arrayField, + arrayFieldArrayStrKnown: arrayField, + arrayFieldNull: arrayField, + arrayFieldUndefined: arrayField, + optsFieldEmptyStr: optsField, + optsFieldUndefined: optsField, + optsFieldNull: optsField, + optsFieldStrKnown: optsField, + }, + }) + + const row = { + name: "Test Row", + stringUndefined: undefined, + stringNull: null, + stringString: "i am a string", + numberEmptyString: "", + numberNull: null, + numberUndefined: undefined, + numberString: "123", + numberNumber: 123, + datetimeEmptyString: "", + datetimeNull: null, + datetimeUndefined: undefined, + datetimeString: "1984-04-20T00:00:00.000Z", + datetimeDate: new Date("1984-04-20"), + boolNull: null, + boolEmpty: "", + boolUndefined: undefined, + boolString: "true", + boolBool: true, + tableId: table._id, + attachmentNull: null, + attachmentUndefined: undefined, + attachmentEmpty: "", + attachmentEmptyArrayStr: "[]", + arrayFieldEmptyArrayStr: "[]", + arrayFieldUndefined: undefined, + arrayFieldNull: null, + arrayFieldArrayStrKnown: "['One']", + optsFieldEmptyStr: "", + optsFieldUndefined: undefined, + optsFieldNull: null, + optsFieldStrKnown: "Alpha", + } + + const createdRow = await config.createRow(row) + const id = createdRow._id! + + const saved = (await loadRow(id, table._id!)).body + + expect(saved.stringUndefined).toBe(undefined) + expect(saved.stringNull).toBe(null) + expect(saved.stringString).toBe("i am a string") + expect(saved.numberEmptyString).toBe(null) + expect(saved.numberNull).toBe(null) + expect(saved.numberUndefined).toBe(undefined) + expect(saved.numberString).toBe(123) + expect(saved.numberNumber).toBe(123) + expect(saved.datetimeEmptyString).toBe(null) + expect(saved.datetimeNull).toBe(null) + expect(saved.datetimeUndefined).toBe(undefined) + expect(saved.datetimeString).toBe( + new Date(row.datetimeString).toISOString() + ) + expect(saved.datetimeDate).toBe(row.datetimeDate.toISOString()) + expect(saved.boolNull).toBe(null) + expect(saved.boolEmpty).toBe(null) + expect(saved.boolUndefined).toBe(undefined) + expect(saved.boolString).toBe(true) + expect(saved.boolBool).toBe(true) + expect(saved.attachmentNull).toEqual([]) + expect(saved.attachmentUndefined).toBe(undefined) + expect(saved.attachmentEmpty).toEqual([]) + expect(saved.attachmentEmptyArrayStr).toEqual([]) + expect(saved.arrayFieldEmptyArrayStr).toEqual([]) + expect(saved.arrayFieldNull).toEqual([]) + expect(saved.arrayFieldUndefined).toEqual(undefined) + expect(saved.optsFieldEmptyStr).toEqual(null) + expect(saved.optsFieldUndefined).toEqual(undefined) + expect(saved.optsFieldNull).toEqual(null) + expect(saved.arrayFieldArrayStrKnown).toEqual(["One"]) + expect(saved.optsFieldStrKnown).toEqual("Alpha") + }) + }) + + describe("view save", () => { + it("views have extra data trimmed", async () => { + const table = await createTable({ + type: "table", + name: "orders", + primary: ["OrderID"], schema: { - name: { visible: true }, - address: { visible: true }, + Country: { + type: FieldType.STRING, + name: "Country", + }, + OrderID: { + type: FieldType.NUMBER, + name: "OrderID", + }, + Story: { + type: FieldType.STRING, + name: "Story", + }, }, }) - const rows = await Promise.all([ + const createViewResponse = await config.createView({ + name: uuid.v4(), + schema: { + Country: { + visible: true, + }, + OrderID: { + visible: true, + }, + }, + }) + + const createRowResponse = await config.api.row.save( + createViewResponse.id, + { + OrderID: "1111", + Country: "Aussy", + Story: "aaaaa", + } + ) + + const row = await config.api.row.get(table._id!, createRowResponse._id!) + expect(row.body.Story).toBeUndefined() + expect(row.body).toEqual({ + ...defaultRowFields, + OrderID: 1111, + Country: "Aussy", + _id: createRowResponse._id, + _rev: createRowResponse._rev, + tableId: table._id, + }) + }) + }) + + describe("patch", () => { + let otherTable: Table + + beforeAll(async () => { + const tableConfig = generateTableConfig() + table = await createTable(tableConfig) + const otherTableConfig = generateTableConfig() + // need a short name of table here - for relationship tests + otherTableConfig.name = "a" + otherTableConfig.schema.relationship = { + name: "relationship", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: table._id!, + fieldName: "relationship", + } + otherTable = await createTable(otherTableConfig) + // need to set the config back to the original table + config.table = table + }) + + it("should update only the fields that are supplied", async () => { + const existing = await config.createRow() + + const rowUsage = await getRowUsage() + + const row = await config.api.row.patch(table._id!, { + _id: existing._id!, + _rev: existing._rev!, + tableId: table._id!, + name: "Updated Name", + }) + + expect(row.name).toEqual("Updated Name") + expect(row.description).toEqual(existing.description) + + const savedRow = await loadRow(row._id!, table._id!) + + expect(savedRow.body.description).toEqual(existing.description) + expect(savedRow.body.name).toEqual("Updated Name") + await assertRowUsage(rowUsage) + }) + + it("should throw an error when given improper types", async () => { + const existing = await config.createRow() + const rowUsage = await getRowUsage() + + await config.api.row.patch( + table._id!, + { + _id: existing._id!, + _rev: existing._rev!, + tableId: table._id!, + name: 1, + }, + { expectStatus: 400 } + ) + + await assertRowUsage(rowUsage) + }) + + it("should not overwrite links if those links are not set", async () => { + let linkField: FieldSchema = { + type: FieldType.LINK, + name: "", + fieldName: "", + constraints: { + type: "array", + presence: false, + }, + relationshipType: RelationshipType.ONE_TO_MANY, + tableId: InternalTable.USER_METADATA, + } + + let table = await config.api.table.save({ + name: "TestTable", + type: "table", + sourceType: TableSourceType.INTERNAL, + sourceId: INTERNAL_TABLE_SOURCE_ID, + schema: { + user1: { ...linkField, name: "user1", fieldName: "user1" }, + user2: { ...linkField, name: "user2", fieldName: "user2" }, + }, + }) + + let user1 = await config.createUser() + let user2 = await config.createUser() + + let row = await config.api.row.save(table._id!, { + user1: [{ _id: user1._id }], + user2: [{ _id: user2._id }], + }) + + let getResp = await config.api.row.get(table._id!, row._id!) + expect(getResp.body.user1[0]._id).toEqual(user1._id) + expect(getResp.body.user2[0]._id).toEqual(user2._id) + + let patchResp = await config.api.row.patch(table._id!, { + _id: row._id!, + _rev: row._rev!, + tableId: table._id!, + user1: [{ _id: user2._id }], + }) + expect(patchResp.user1[0]._id).toEqual(user2._id) + expect(patchResp.user2[0]._id).toEqual(user2._id) + + getResp = await config.api.row.get(table._id!, row._id!) + expect(getResp.body.user1[0]._id).toEqual(user2._id) + expect(getResp.body.user2[0]._id).toEqual(user2._id) + }) + + it("should be able to update relationships when both columns are same name", async () => { + let row = await config.api.row.save(table._id!, { + name: "test", + description: "test", + }) + let row2 = await config.api.row.save(otherTable._id!, { + name: "test", + description: "test", + relationship: [row._id], + }) + row = (await config.api.row.get(table._id!, row._id!)).body + expect(row.relationship.length).toBe(1) + const resp = await config.api.row.patch(table._id!, { + _id: row._id!, + _rev: row._rev!, + tableId: row.tableId!, + name: "test2", + relationship: [row2._id], + }) + expect(resp.relationship.length).toBe(1) + }) + }) + + describe("destroy", () => { + beforeAll(async () => { + const tableConfig = generateTableConfig() + table = await createTable(tableConfig) + }) + + it("should be able to delete a row", async () => { + const createdRow = await config.createRow() + const rowUsage = await getRowUsage() + + const res = await config.api.row.delete(table._id!, [createdRow]) + expect(res.body[0]._id).toEqual(createdRow._id) + await assertRowUsage(rowUsage - 1) + }) + }) + + describe("validate", () => { + beforeAll(async () => { + const tableConfig = generateTableConfig() + table = await createTable(tableConfig) + }) + + it("should return no errors on valid row", async () => { + const rowUsage = await getRowUsage() + + const res = await config.api.row.validate(table._id!, { name: "ivan" }) + + expect(res.valid).toBe(true) + expect(Object.keys(res.errors)).toEqual([]) + await assertRowUsage(rowUsage) + }) + + it("should errors on invalid row", async () => { + const rowUsage = await getRowUsage() + + const res = await config.api.row.validate(table._id!, { name: 1 }) + + if (isInternal) { + expect(res.valid).toBe(false) + expect(Object.keys(res.errors)).toEqual(["name"]) + } else { + // Validation for external is not implemented, so it will always return valid + expect(res.valid).toBe(true) + expect(Object.keys(res.errors)).toEqual([]) + } + await assertRowUsage(rowUsage) + }) + }) + + describe("bulkDelete", () => { + beforeAll(async () => { + const tableConfig = generateTableConfig() + table = await createTable(tableConfig) + }) + + it("should be able to delete a bulk set of rows", async () => { + const row1 = await config.createRow() + const row2 = await config.createRow() + const rowUsage = await getRowUsage() + + const res = await config.api.row.delete(table._id!, [row1, row2]) + + expect(res.body.length).toEqual(2) + await loadRow(row1._id!, table._id!, 404) + await assertRowUsage(rowUsage - 2) + }) + + it("should be able to delete a variety of row set types", async () => { + const [row1, row2, row3] = await Promise.all([ config.createRow(), config.createRow(), config.createRow(), ]) const rowUsage = await getRowUsage() - await config.api.row.delete(view.id, [rows[0], rows[2]]) + const res = await config.api.row.delete(table._id!, [ + row1, + row2._id, + { _id: row3._id }, + ]) - await assertRowUsage(rowUsage - 2) + expect(res.body.length).toEqual(3) + await loadRow(row1._id!, table._id!, 404) + await assertRowUsage(rowUsage - 3) + }) - await config.api.row.get(tableId, rows[0]._id!, { - expectStatus: 404, + it("should accept a valid row object and delete the row", async () => { + const row1 = await config.createRow() + const rowUsage = await getRowUsage() + + const res = await config.api.row.delete(table._id!, row1) + + expect(res.body.id).toEqual(row1._id) + await loadRow(row1._id!, table._id!, 404) + await assertRowUsage(rowUsage - 1) + }) + + it("Should ignore malformed/invalid delete requests", async () => { + const rowUsage = await getRowUsage() + + const res = await config.api.row.delete( + table._id!, + { not: "valid" }, + { expectStatus: 400 } + ) + expect(res.body.message).toEqual("Invalid delete rows request") + + const res2 = await config.api.row.delete( + table._id!, + { rows: 123 }, + { expectStatus: 400 } + ) + expect(res2.body.message).toEqual("Invalid delete rows request") + + const res3 = await config.api.row.delete(table._id!, "invalid", { + expectStatus: 400, }) - await config.api.row.get(tableId, rows[2]._id!, { - expectStatus: 404, - }) - await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 }) + expect(res3.body.message).toEqual("Invalid delete rows request") + + await assertRowUsage(rowUsage) }) }) - describe("view search", () => { - const viewSchema = { age: { visible: true }, name: { visible: true } } + // Legacy views are not available for external + isInternal && + describe("fetchView", () => { + beforeEach(async () => { + const tableConfig = generateTableConfig() + table = await createTable(tableConfig) + }) + + it("should be able to fetch tables contents via 'view'", async () => { + const row = await config.createRow() + const rowUsage = await getRowUsage() + + const res = await config.api.legacyView.get(table._id!) + expect(res.body.length).toEqual(1) + expect(res.body[0]._id).toEqual(row._id) + await assertRowUsage(rowUsage) + }) + + it("should throw an error if view doesn't exist", async () => { + const rowUsage = await getRowUsage() + + await config.api.legacyView.get("derp", { expectStatus: 404 }) + + await assertRowUsage(rowUsage) + }) + + it("should be able to run on a view", async () => { + const view = await config.createLegacyView({ + tableId: table._id!, + name: "ViewTest", + filters: [], + schema: {}, + }) + const row = await config.createRow() + const rowUsage = await getRowUsage() + + const res = await config.api.legacyView.get(view.name) + expect(res.body.length).toEqual(1) + expect(res.body[0]._id).toEqual(row._id) + + await assertRowUsage(rowUsage) + }) + }) + + describe("fetchEnrichedRows", () => { + beforeAll(async () => { + const tableConfig = generateTableConfig() + table = await createTable(tableConfig) + }) + + it("should allow enriching some linked rows", async () => { + const { linkedTable, firstRow, secondRow } = await tenancy.doInTenant( + config.getTenantId(), + async () => { + const linkedTable = await config.createLinkedTable( + RelationshipType.ONE_TO_MANY, + ["link"], + { + // Making sure that the combined table name + column name is within postgres limits + name: uuid.v4().replace(/-/g, "").substring(0, 16), + type: "table", + primary: ["id"], + primaryDisplay: "id", + schema: { + id: { + type: FieldType.AUTO, + name: "id", + autocolumn: true, + constraints: { + presence: true, + }, + }, + }, + } + ) + const firstRow = await config.createRow({ + name: "Test Contact", + description: "original description", + tableId: table._id, + }) + const secondRow = await config.createRow({ + name: "Test 2", + description: "og desc", + link: [{ _id: firstRow._id }], + tableId: linkedTable._id, + }) + return { linkedTable, firstRow, secondRow } + } + ) + const rowUsage = await getRowUsage() + + // test basic enrichment + const resBasic = await config.api.row.get( + linkedTable._id!, + secondRow._id! + ) + expect(resBasic.body.link.length).toBe(1) + expect(resBasic.body.link[0]).toEqual({ + _id: firstRow._id, + primaryDisplay: firstRow.name, + }) + + // test full enrichment + const resEnriched = await config.api.row.getEnriched( + linkedTable._id!, + secondRow._id! + ) + expect(resEnriched.body.link.length).toBe(1) + expect(resEnriched.body.link[0]._id).toBe(firstRow._id) + expect(resEnriched.body.link[0].name).toBe("Test Contact") + expect(resEnriched.body.link[0].description).toBe( + "original description" + ) + await assertRowUsage(rowUsage) + }) + }) + + isInternal && + describe("attachments", () => { + beforeAll(async () => { + const tableConfig = generateTableConfig() + table = await createTable(tableConfig) + }) + + it("should allow enriching attachment rows", async () => { + const table = await config.createAttachmentTable() + const attachmentId = `${structures.uuid()}.csv` + const row = await config.createRow({ + name: "test", + description: "test", + attachment: [ + { + key: `${config.getAppId()}/attachments/${attachmentId}`, + }, + ], + tableId: table._id, + }) + // the environment needs configured for this + await setup.switchToSelfHosted(async () => { + return context.doInAppContext(config.getAppId(), async () => { + const enriched = await outputProcessing(table, [row]) + expect((enriched as Row[])[0].attachment[0].url).toBe( + `/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}` + ) + }) + }) + }) + }) + + describe("exportData", () => { + beforeAll(async () => { + const tableConfig = generateTableConfig() + table = await createTable(tableConfig) + }) + + it("should allow exporting all columns", async () => { + const existing = await config.createRow() + const res = await config.api.row.exportRows(table._id!, { + rows: [existing._id!], + }) + const results = JSON.parse(res.text) + expect(results.length).toEqual(1) + const row = results[0] + + // Ensure all original columns were exported + expect(Object.keys(row).length).toBeGreaterThanOrEqual( + Object.keys(existing).length + ) + Object.keys(existing).forEach(key => { + expect(row[key]).toEqual(existing[key]) + }) + }) + + it("should allow exporting only certain columns", async () => { + const existing = await config.createRow() + const res = await config.api.row.exportRows(table._id!, { + rows: [existing._id!], + columns: ["_id"], + }) + const results = JSON.parse(res.text) + expect(results.length).toEqual(1) + const row = results[0] + + // Ensure only the _id column was exported + expect(Object.keys(row).length).toEqual(1) + expect(row._id).toEqual(existing._id) + }) + }) + + describe("view 2.0", () => { async function userTable(): Promise
{ return { name: `users_${uuid.v4()}`, @@ -1132,949 +956,1098 @@ describe.each([ name: { type: FieldType.STRING, name: "name", - constraints: { type: "string" }, + }, + surname: { + type: FieldType.STRING, + name: "surname", }, age: { type: FieldType.NUMBER, name: "age", - constraints: {}, + }, + address: { + type: FieldType.STRING, + name: "address", + }, + jobTitle: { + type: FieldType.STRING, + name: "jobTitle", }, }, } } - it("returns empty rows from view when no schema is passed", async () => { - const table = await createTable(await userTable()) - const rows = await Promise.all( - Array.from({ length: 10 }, () => - config.api.row.save(table._id!, { tableId: table._id }) - ) - ) - - const createViewResponse = await config.createView() - const response = await config.api.viewV2.search(createViewResponse.id) - - expect(response.body.rows).toHaveLength(10) - expect(response.body).toEqual({ - rows: expect.arrayContaining( - rows.map(r => ({ - _viewId: createViewResponse.id, - tableId: table._id, - _id: r._id, - _rev: r._rev, - ...defaultRowFields, - })) - ), - ...(isInternal - ? {} - : { - hasNextPage: false, - bookmark: null, - }), - }) + const randomRowData = () => ({ + name: generator.first(), + surname: generator.last(), + age: generator.age(), + address: generator.address(), + jobTitle: generator.word(), }) - it("searching respects the view filters", async () => { - const table = await createTable(await userTable()) - - await Promise.all( - Array.from({ length: 10 }, () => - config.api.row.save(table._id!, { - tableId: table._id, - name: generator.name(), - age: generator.integer({ min: 10, max: 30 }), - }) - ) - ) - - const expectedRows = await Promise.all( - Array.from({ length: 5 }, () => - config.api.row.save(table._id!, { - tableId: table._id, - name: generator.name(), - age: 40, - }) - ) - ) - - const createViewResponse = await config.createView({ - query: [ - { operator: SearchQueryOperators.EQUAL, field: "age", value: 40 }, - ], - schema: viewSchema, - }) - - const response = await config.api.viewV2.search(createViewResponse.id) - - expect(response.body.rows).toHaveLength(5) - expect(response.body).toEqual({ - rows: expect.arrayContaining( - expectedRows.map(r => ({ - _viewId: createViewResponse.id, - tableId: table._id, - name: r.name, - age: r.age, - _id: r._id, - _rev: r._rev, - ...defaultRowFields, - })) - ), - ...(isInternal - ? {} - : { - hasNextPage: false, - bookmark: null, - }), - }) - }) - - const sortTestOptions: [ - { - field: string - order?: SortOrder - type?: SortType - }, - string[] - ][] = [ - [ - { - field: "name", - order: SortOrder.ASCENDING, - type: SortType.STRING, - }, - ["Alice", "Bob", "Charly", "Danny"], - ], - [ - { - field: "name", - }, - ["Alice", "Bob", "Charly", "Danny"], - ], - [ - { - field: "name", - order: SortOrder.DESCENDING, - }, - ["Danny", "Charly", "Bob", "Alice"], - ], - [ - { - field: "name", - order: SortOrder.DESCENDING, - type: SortType.STRING, - }, - ["Danny", "Charly", "Bob", "Alice"], - ], - [ - { - field: "age", - order: SortOrder.ASCENDING, - type: SortType.number, - }, - ["Danny", "Alice", "Charly", "Bob"], - ], - [ - { - field: "age", - order: SortOrder.ASCENDING, - }, - ["Danny", "Alice", "Charly", "Bob"], - ], - [ - { - field: "age", - order: SortOrder.DESCENDING, - }, - ["Bob", "Charly", "Alice", "Danny"], - ], - [ - { - field: "age", - order: SortOrder.DESCENDING, - type: SortType.number, - }, - ["Bob", "Charly", "Alice", "Danny"], - ], - ] - - describe("sorting", () => { - beforeAll(async () => { + describe("create", () => { + it("should persist a new row with only the provided view fields", async () => { const table = await createTable(await userTable()) - const users = [ - { name: "Alice", age: 25 }, - { name: "Bob", age: 30 }, - { name: "Charly", age: 27 }, - { name: "Danny", age: 15 }, - ] + const view = await config.createView({ + schema: { + name: { visible: true }, + surname: { visible: true }, + address: { visible: true }, + }, + }) + + const data = randomRowData() + const newRow = await config.api.row.save(view.id, { + tableId: table!._id, + _viewId: view.id, + ...data, + }) + + const row = await config.api.row.get(table._id!, newRow._id!) + expect(row.body).toEqual({ + name: data.name, + surname: data.surname, + address: data.address, + tableId: table!._id, + _id: newRow._id, + _rev: newRow._rev, + id: newRow.id, + ...defaultRowFields, + }) + expect(row.body._viewId).toBeUndefined() + expect(row.body.age).toBeUndefined() + expect(row.body.jobTitle).toBeUndefined() + }) + }) + + describe("patch", () => { + it("should update only the view fields for a row", async () => { + const table = await createTable(await userTable()) + const tableId = table._id! + const view = await config.createView({ + schema: { + name: { visible: true }, + address: { visible: true }, + }, + }) + + const newRow = await config.api.row.save(view.id, { + tableId, + _viewId: view.id, + ...randomRowData(), + }) + const newData = randomRowData() + await config.api.row.patch(view.id, { + tableId, + _viewId: view.id, + _id: newRow._id!, + _rev: newRow._rev!, + ...newData, + }) + + const row = await config.api.row.get(tableId, newRow._id!) + expect(row.body).toEqual({ + ...newRow, + name: newData.name, + address: newData.address, + _id: newRow._id, + _rev: expect.any(String), + id: newRow.id, + ...defaultRowFields, + }) + expect(row.body._viewId).toBeUndefined() + expect(row.body.age).toBeUndefined() + expect(row.body.jobTitle).toBeUndefined() + }) + }) + + describe("destroy", () => { + it("should be able to delete a row", async () => { + const table = await createTable(await userTable()) + const tableId = table._id! + const view = await config.createView({ + schema: { + name: { visible: true }, + address: { visible: true }, + }, + }) + + const createdRow = await config.createRow() + const rowUsage = await getRowUsage() + + await config.api.row.delete(view.id, [createdRow]) + + await assertRowUsage(rowUsage - 1) + + await config.api.row.get(tableId, createdRow._id!, { + expectStatus: 404, + }) + }) + + it("should be able to delete multiple rows", async () => { + const table = await createTable(await userTable()) + const tableId = table._id! + const view = await config.createView({ + schema: { + name: { visible: true }, + address: { visible: true }, + }, + }) + + const rows = await Promise.all([ + config.createRow(), + config.createRow(), + config.createRow(), + ]) + const rowUsage = await getRowUsage() + + await config.api.row.delete(view.id, [rows[0], rows[2]]) + + await assertRowUsage(rowUsage - 2) + + await config.api.row.get(tableId, rows[0]._id!, { + expectStatus: 404, + }) + await config.api.row.get(tableId, rows[2]._id!, { + expectStatus: 404, + }) + await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 }) + }) + }) + + describe("view search", () => { + const viewSchema = { age: { visible: true }, name: { visible: true } } + async function userTable(): Promise
{ + return { + name: `users_${uuid.v4()}`, + sourceId: INTERNAL_TABLE_SOURCE_ID, + sourceType: TableSourceType.INTERNAL, + type: "table", + primary: ["id"], + schema: { + id: { + type: FieldType.AUTO, + name: "id", + autocolumn: true, + constraints: { + presence: true, + }, + }, + name: { + type: FieldType.STRING, + name: "name", + constraints: { type: "string" }, + }, + age: { + type: FieldType.NUMBER, + name: "age", + constraints: {}, + }, + }, + } + } + + it("returns empty rows from view when no schema is passed", async () => { + const table = await createTable(await userTable()) + const rows = await Promise.all( + Array.from({ length: 10 }, () => + config.api.row.save(table._id!, { tableId: table._id }) + ) + ) + + const createViewResponse = await config.createView() + const response = await config.api.viewV2.search(createViewResponse.id) + + expect(response.body.rows).toHaveLength(10) + expect(response.body).toEqual({ + rows: expect.arrayContaining( + rows.map(r => ({ + _viewId: createViewResponse.id, + tableId: table._id, + _id: r._id, + _rev: r._rev, + ...defaultRowFields, + })) + ), + ...(isInternal + ? {} + : { + hasNextPage: false, + bookmark: null, + }), + }) + }) + + it("searching respects the view filters", async () => { + const table = await createTable(await userTable()) + await Promise.all( - users.map(u => + Array.from({ length: 10 }, () => config.api.row.save(table._id!, { tableId: table._id, - ...u, + name: generator.name(), + age: generator.integer({ min: 10, max: 30 }), }) ) ) - }) - it.each(sortTestOptions)( - "allow sorting (%s)", - async (sortParams, expected) => { - const createViewResponse = await config.createView({ - sort: sortParams, - schema: viewSchema, - }) - - const response = await config.api.viewV2.search( - createViewResponse.id + const expectedRows = await Promise.all( + Array.from({ length: 5 }, () => + config.api.row.save(table._id!, { + tableId: table._id, + name: generator.name(), + age: 40, + }) ) - - expect(response.body.rows).toHaveLength(4) - expect(response.body.rows).toEqual( - expected.map(name => expect.objectContaining({ name })) - ) - } - ) - - it.each(sortTestOptions)( - "allow override the default view sorting (%s)", - async (sortParams, expected) => { - const createViewResponse = await config.createView({ - sort: { - field: "name", - order: SortOrder.ASCENDING, - type: SortType.STRING, - }, - schema: viewSchema, - }) - - const response = await config.api.viewV2.search( - createViewResponse.id, - { - sort: sortParams.field, - sortOrder: sortParams.order, - sortType: sortParams.type, - query: {}, - } - ) - - expect(response.body.rows).toHaveLength(4) - expect(response.body.rows).toEqual( - expected.map(name => expect.objectContaining({ name })) - ) - } - ) - }) - - it("when schema is defined, defined columns and row attributes are returned", async () => { - const table = await createTable(await userTable()) - const rows = await Promise.all( - Array.from({ length: 10 }, () => - config.api.row.save(table._id!, { - tableId: table._id, - name: generator.name(), - age: generator.age(), - }) ) - ) - const view = await config.createView({ - schema: { name: { visible: true } }, - }) - const response = await config.api.viewV2.search(view.id) + const createViewResponse = await config.createView({ + query: [ + { operator: SearchQueryOperators.EQUAL, field: "age", value: 40 }, + ], + schema: viewSchema, + }) - expect(response.body.rows).toHaveLength(10) - expect(response.body.rows).toEqual( - expect.arrayContaining( - rows.map(r => ({ - ...(isInternal - ? expectAnyInternalColsAttributes - : expectAnyExternalColsAttributes), - _viewId: view.id, - name: r.name, - })) - ) - ) - }) + const response = await config.api.viewV2.search(createViewResponse.id) - it("views without data can be returned", async () => { - const table = await createTable(await userTable()) - - const createViewResponse = await config.createView() - const response = await config.api.viewV2.search(createViewResponse.id) - - expect(response.body.rows).toHaveLength(0) - }) - - it("respects the limit parameter", async () => { - await createTable(await userTable()) - await Promise.all(Array.from({ length: 10 }, () => config.createRow())) - - const limit = generator.integer({ min: 1, max: 8 }) - - const createViewResponse = await config.createView() - const response = await config.api.viewV2.search(createViewResponse.id, { - limit, - query: {}, + expect(response.body.rows).toHaveLength(5) + expect(response.body).toEqual({ + rows: expect.arrayContaining( + expectedRows.map(r => ({ + _viewId: createViewResponse.id, + tableId: table._id, + name: r.name, + age: r.age, + _id: r._id, + _rev: r._rev, + ...defaultRowFields, + })) + ), + ...(isInternal + ? {} + : { + hasNextPage: false, + bookmark: null, + }), + }) }) - expect(response.body.rows).toHaveLength(limit) - }) - - it("can handle pagination", async () => { - await createTable(await userTable()) - await Promise.all(Array.from({ length: 10 }, () => config.createRow())) - - const createViewResponse = await config.createView() - const allRows = (await config.api.viewV2.search(createViewResponse.id)) - .body.rows - - const firstPageResponse = await config.api.viewV2.search( - createViewResponse.id, + const sortTestOptions: [ { - paginate: true, - limit: 4, - query: {}, - } - ) - expect(firstPageResponse.body).toEqual({ - rows: expect.arrayContaining(allRows.slice(0, 4)), - totalRows: isInternal ? 10 : undefined, - hasNextPage: true, - bookmark: expect.anything(), - }) - - const secondPageResponse = await config.api.viewV2.search( - createViewResponse.id, - { - paginate: true, - limit: 4, - bookmark: firstPageResponse.body.bookmark, - - query: {}, - } - ) - expect(secondPageResponse.body).toEqual({ - rows: expect.arrayContaining(allRows.slice(4, 8)), - totalRows: isInternal ? 10 : undefined, - hasNextPage: true, - bookmark: expect.anything(), - }) - - const lastPageResponse = await config.api.viewV2.search( - createViewResponse.id, - { - paginate: true, - limit: 4, - bookmark: secondPageResponse.body.bookmark, - query: {}, - } - ) - expect(lastPageResponse.body).toEqual({ - rows: expect.arrayContaining(allRows.slice(8)), - totalRows: isInternal ? 10 : undefined, - hasNextPage: false, - bookmark: expect.anything(), - }) - }) - - isInternal && - it("doesn't allow creating in user table", async () => { - const userTableId = InternalTable.USER_METADATA - const response = await config.api.row.save( - userTableId, + field: string + order?: SortOrder + type?: SortType + }, + string[] + ][] = [ + [ { - tableId: userTableId, - firstName: "Joe", - lastName: "Joe", - email: "joe@joe.com", - roles: {}, + field: "name", + order: SortOrder.ASCENDING, + type: SortType.STRING, }, - { expectStatus: 400 } + ["Alice", "Bob", "Charly", "Danny"], + ], + [ + { + field: "name", + }, + ["Alice", "Bob", "Charly", "Danny"], + ], + [ + { + field: "name", + order: SortOrder.DESCENDING, + }, + ["Danny", "Charly", "Bob", "Alice"], + ], + [ + { + field: "name", + order: SortOrder.DESCENDING, + type: SortType.STRING, + }, + ["Danny", "Charly", "Bob", "Alice"], + ], + [ + { + field: "age", + order: SortOrder.ASCENDING, + type: SortType.number, + }, + ["Danny", "Alice", "Charly", "Bob"], + ], + [ + { + field: "age", + order: SortOrder.ASCENDING, + }, + ["Danny", "Alice", "Charly", "Bob"], + ], + [ + { + field: "age", + order: SortOrder.DESCENDING, + }, + ["Bob", "Charly", "Alice", "Danny"], + ], + [ + { + field: "age", + order: SortOrder.DESCENDING, + type: SortType.number, + }, + ["Bob", "Charly", "Alice", "Danny"], + ], + ] + + describe("sorting", () => { + beforeAll(async () => { + const table = await createTable(await userTable()) + const users = [ + { name: "Alice", age: 25 }, + { name: "Bob", age: 30 }, + { name: "Charly", age: 27 }, + { name: "Danny", age: 15 }, + ] + await Promise.all( + users.map(u => + config.api.row.save(table._id!, { + tableId: table._id, + ...u, + }) + ) + ) + }) + + it.each(sortTestOptions)( + "allow sorting (%s)", + async (sortParams, expected) => { + const createViewResponse = await config.createView({ + sort: sortParams, + schema: viewSchema, + }) + + const response = await config.api.viewV2.search( + createViewResponse.id + ) + + expect(response.body.rows).toHaveLength(4) + expect(response.body.rows).toEqual( + expected.map(name => expect.objectContaining({ name })) + ) + } + ) + + it.each(sortTestOptions)( + "allow override the default view sorting (%s)", + async (sortParams, expected) => { + const createViewResponse = await config.createView({ + sort: { + field: "name", + order: SortOrder.ASCENDING, + type: SortType.STRING, + }, + schema: viewSchema, + }) + + const response = await config.api.viewV2.search( + createViewResponse.id, + { + sort: sortParams.field, + sortOrder: sortParams.order, + sortType: sortParams.type, + query: {}, + } + ) + + expect(response.body.rows).toHaveLength(4) + expect(response.body.rows).toEqual( + expected.map(name => expect.objectContaining({ name })) + ) + } ) - expect(response.message).toBe("Cannot create new user entry.") }) - describe("permissions", () => { - let viewId: string - let tableId: string + it("when schema is defined, defined columns and row attributes are returned", async () => { + const table = await createTable(await userTable()) + const rows = await Promise.all( + Array.from({ length: 10 }, () => + config.api.row.save(table._id!, { + tableId: table._id, + name: generator.name(), + age: generator.age(), + }) + ) + ) - beforeAll(async () => { + const view = await config.createView({ + schema: { name: { visible: true } }, + }) + const response = await config.api.viewV2.search(view.id) + + expect(response.body.rows).toHaveLength(10) + expect(response.body.rows).toEqual( + expect.arrayContaining( + rows.map(r => ({ + ...(isInternal + ? expectAnyInternalColsAttributes + : expectAnyExternalColsAttributes), + _viewId: view.id, + name: r.name, + })) + ) + ) + }) + + it("views without data can be returned", async () => { + const table = await createTable(await userTable()) + + const createViewResponse = await config.createView() + const response = await config.api.viewV2.search(createViewResponse.id) + + expect(response.body.rows).toHaveLength(0) + }) + + it("respects the limit parameter", async () => { + await createTable(await userTable()) + await Promise.all( + Array.from({ length: 10 }, () => config.createRow()) + ) + + const limit = generator.integer({ min: 1, max: 8 }) + + const createViewResponse = await config.createView() + const response = await config.api.viewV2.search( + createViewResponse.id, + { + limit, + query: {}, + } + ) + + expect(response.body.rows).toHaveLength(limit) + }) + + it("can handle pagination", async () => { await createTable(await userTable()) await Promise.all( Array.from({ length: 10 }, () => config.createRow()) ) const createViewResponse = await config.createView() + const allRows = ( + await config.api.viewV2.search(createViewResponse.id) + ).body.rows - tableId = table._id! - viewId = createViewResponse.id - }) + const firstPageResponse = await config.api.viewV2.search( + createViewResponse.id, + { + paginate: true, + limit: 4, + query: {}, + } + ) + expect(firstPageResponse.body).toEqual({ + rows: expect.arrayContaining(allRows.slice(0, 4)), + totalRows: isInternal ? 10 : undefined, + hasNextPage: true, + bookmark: expect.anything(), + }) - beforeEach(() => { - mocks.licenses.useViewPermissions() - }) + const secondPageResponse = await config.api.viewV2.search( + createViewResponse.id, + { + paginate: true, + limit: 4, + bookmark: firstPageResponse.body.bookmark, - it("does not allow public users to fetch by default", async () => { - await config.publish() - await config.api.viewV2.search(viewId, undefined, { - expectStatus: 403, - usePublicUser: true, + query: {}, + } + ) + expect(secondPageResponse.body).toEqual({ + rows: expect.arrayContaining(allRows.slice(4, 8)), + totalRows: isInternal ? 10 : undefined, + hasNextPage: true, + bookmark: expect.anything(), + }) + + const lastPageResponse = await config.api.viewV2.search( + createViewResponse.id, + { + paginate: true, + limit: 4, + bookmark: secondPageResponse.body.bookmark, + query: {}, + } + ) + expect(lastPageResponse.body).toEqual({ + rows: expect.arrayContaining(allRows.slice(8)), + totalRows: isInternal ? 10 : undefined, + hasNextPage: false, + bookmark: expect.anything(), }) }) - it("allow public users to fetch when permissions are explicit", async () => { - await config.api.permission.set({ - roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, - level: PermissionLevel.READ, - resourceId: viewId, - }) - await config.publish() - - const response = await config.api.viewV2.search(viewId, undefined, { - usePublicUser: true, + isInternal && + it("doesn't allow creating in user table", async () => { + const userTableId = InternalTable.USER_METADATA + const response = await config.api.row.save( + userTableId, + { + tableId: userTableId, + firstName: "Joe", + lastName: "Joe", + email: "joe@joe.com", + roles: {}, + }, + { expectStatus: 400 } + ) + expect(response.message).toBe("Cannot create new user entry.") }) - expect(response.body.rows).toHaveLength(10) - }) + describe("permissions", () => { + let viewId: string + let tableId: string - it("allow public users to fetch when permissions are inherited", async () => { - await config.api.permission.set({ - roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, - level: PermissionLevel.READ, - resourceId: tableId, - }) - await config.publish() + beforeAll(async () => { + await createTable(await userTable()) + await Promise.all( + Array.from({ length: 10 }, () => config.createRow()) + ) - const response = await config.api.viewV2.search(viewId, undefined, { - usePublicUser: true, + const createViewResponse = await config.createView() + + tableId = table._id! + viewId = createViewResponse.id }) - expect(response.body.rows).toHaveLength(10) - }) - - it("respects inherited permissions, not allowing not public views from public tables", async () => { - await config.api.permission.set({ - roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, - level: PermissionLevel.READ, - resourceId: tableId, + beforeEach(() => { + mocks.licenses.useViewPermissions() }) - await config.api.permission.set({ - roleId: roles.BUILTIN_ROLE_IDS.POWER, - level: PermissionLevel.READ, - resourceId: viewId, - }) - await config.publish() - await config.api.viewV2.search(viewId, undefined, { - usePublicUser: true, - expectStatus: 403, + it("does not allow public users to fetch by default", async () => { + await config.publish() + await config.api.viewV2.search(viewId, undefined, { + expectStatus: 403, + usePublicUser: true, + }) + }) + + it("allow public users to fetch when permissions are explicit", async () => { + await config.api.permission.set({ + roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, + level: PermissionLevel.READ, + resourceId: viewId, + }) + await config.publish() + + const response = await config.api.viewV2.search(viewId, undefined, { + usePublicUser: true, + }) + + expect(response.body.rows).toHaveLength(10) + }) + + it("allow public users to fetch when permissions are inherited", async () => { + await config.api.permission.set({ + roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, + level: PermissionLevel.READ, + resourceId: tableId, + }) + await config.publish() + + const response = await config.api.viewV2.search(viewId, undefined, { + usePublicUser: true, + }) + + expect(response.body.rows).toHaveLength(10) + }) + + it("respects inherited permissions, not allowing not public views from public tables", async () => { + await config.api.permission.set({ + roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, + level: PermissionLevel.READ, + resourceId: tableId, + }) + await config.api.permission.set({ + roleId: roles.BUILTIN_ROLE_IDS.POWER, + level: PermissionLevel.READ, + resourceId: viewId, + }) + await config.publish() + + await config.api.viewV2.search(viewId, undefined, { + usePublicUser: true, + expectStatus: 403, + }) }) }) }) }) - }) - let o2mTable: Table - let m2mTable: Table - beforeAll(async () => { - o2mTable = await createTable( - { ...generateTableConfig(), name: "o2m" }, - { - skipReassigning: true, - } - ) - m2mTable = await createTable( - { ...generateTableConfig(), name: "m2m" }, - { - skipReassigning: true, - } - ) - }) + let o2mTable: Table + let m2mTable: Table + beforeAll(async () => { + o2mTable = await createTable( + { ...generateTableConfig(), name: "o2m" }, + { + skipReassigning: true, + } + ) + m2mTable = await createTable( + { ...generateTableConfig(), name: "m2m" }, + { + skipReassigning: true, + } + ) + }) - describe.each([ - [ - "relationship fields", - (): Record => ({ - user: { - name: "user", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: o2mTable._id!, - fieldName: "fk_o2m", - }, - users: { - name: "users", - relationshipType: RelationshipType.MANY_TO_MANY, - type: FieldType.LINK, - tableId: m2mTable._id!, - fieldName: "fk_m2m", - }, - }), - (tableId: string) => - config.api.row.save(tableId, { - name: uuid.v4(), + describe.each([ + [ + "relationship fields", + (): Record => ({ + user: { + name: "user", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: o2mTable._id!, + fieldName: "fk_o2m", + }, + users: { + name: "users", + relationshipType: RelationshipType.MANY_TO_MANY, + type: FieldType.LINK, + tableId: m2mTable._id!, + fieldName: "fk_m2m", + }, + }), + (tableId: string) => + config.api.row.save(tableId, { + name: uuid.v4(), + description: generator.paragraph(), + tableId, + }), + (row: Row) => ({ + _id: row._id, + primaryDisplay: row.name, + }), + ], + [ + "bb reference fields", + (): Record => ({ + user: { + name: "user", + type: FieldType.BB_REFERENCE, + subtype: FieldTypeSubtypes.BB_REFERENCE.USER, + }, + users: { + name: "users", + type: FieldType.BB_REFERENCE, + subtype: FieldTypeSubtypes.BB_REFERENCE.USERS, + }, + }), + () => config.createUser(), + (row: Row) => ({ + _id: row._id, + primaryDisplay: row.email, + email: row.email, + firstName: row.firstName, + lastName: row.lastName, + }), + ], + ])("links - %s", (__, relSchema, dataGenerator, resultMapper) => { + let tableId: string + let o2mData: Row[] + let m2mData: Row[] + + beforeAll(async () => { + const tableConfig = generateTableConfig() + + if (config.datasource) { + tableConfig.sourceId = config.datasource._id! + if (config.datasource.plus) { + tableConfig.sourceType = TableSourceType.EXTERNAL + } + } + const table = await config.api.table.save({ + ...tableConfig, + schema: { + ...tableConfig.schema, + ...relSchema(), + }, + }) + tableId = table._id! + + o2mData = [ + await dataGenerator(o2mTable._id!), + await dataGenerator(o2mTable._id!), + await dataGenerator(o2mTable._id!), + await dataGenerator(o2mTable._id!), + ] + + m2mData = [ + await dataGenerator(m2mTable._id!), + await dataGenerator(m2mTable._id!), + await dataGenerator(m2mTable._id!), + await dataGenerator(m2mTable._id!), + ] + }) + + it("can save a row when relationship fields are empty", async () => { + const rowData = { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + } + const row = await config.api.row.save(tableId, rowData) + + expect(row).toEqual({ + name: rowData.name, + description: rowData.description, + tableId, + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + }) + }) + + it("can save a row with a single relationship field", async () => { + const user = _.sample(o2mData)! + const rowData = { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + user: [user], + } + const row = await config.api.row.save(tableId, rowData) + + expect(row).toEqual({ + name: rowData.name, + description: rowData.description, + tableId, + user: [user].map(u => resultMapper(u)), + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, + }) + }) + + it("can save a row with a multiple relationship field", async () => { + const selectedUsers = _.sampleSize(m2mData, 2) + const rowData = { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + users: selectedUsers, + } + const row = await config.api.row.save(tableId, rowData) + + expect(row).toEqual({ + name: rowData.name, + description: rowData.description, + tableId, + users: expect.arrayContaining( + selectedUsers.map(u => resultMapper(u)) + ), + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + }) + }) + + it("can retrieve rows with no populated relationships", async () => { + const rowData = { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + } + const row = await config.api.row.save(tableId, rowData) + + const { body: retrieved } = await config.api.row.get(tableId, row._id!) + expect(retrieved).toEqual({ + name: rowData.name, + description: rowData.description, + tableId, + user: undefined, + users: undefined, + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + ...defaultRowFields, + }) + }) + + it("can retrieve rows with populated relationships", async () => { + const user1 = _.sample(o2mData)! + const [user2, user3] = _.sampleSize(m2mData, 2) + + const rowData = { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + users: [user2, user3], + user: [user1], + } + const row = await config.api.row.save(tableId, rowData) + + const { body: retrieved } = await config.api.row.get(tableId, row._id!) + expect(retrieved).toEqual({ + name: rowData.name, + description: rowData.description, + tableId, + user: expect.arrayContaining([user1].map(u => resultMapper(u))), + users: expect.arrayContaining( + [user2, user3].map(u => resultMapper(u)) + ), + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user1.id, + ...defaultRowFields, + }) + }) + + it("can update an existing populated row", async () => { + const user = _.sample(o2mData)! + const [users1, users2, users3] = _.sampleSize(m2mData, 3) + + const rowData = { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + users: [users1, users2], + } + const row = await config.api.row.save(tableId, rowData) + + const updatedRow = await config.api.row.save(tableId, { + ...row, + user: [user], + users: [users3, users1], + }) + expect(updatedRow).toEqual({ + name: rowData.name, + description: rowData.description, + tableId, + user: expect.arrayContaining([user].map(u => resultMapper(u))), + users: expect.arrayContaining( + [users3, users1].map(u => resultMapper(u)) + ), + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, + }) + }) + + it("can wipe an existing populated relationships in row", async () => { + const [user1, user2] = _.sampleSize(m2mData, 2) + + const rowData = { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + users: [user1, user2], + } + const row = await config.api.row.save(tableId, rowData) + + const updatedRow = await config.api.row.save(tableId, { + ...row, + user: null, + users: null, + }) + expect(updatedRow).toEqual({ + name: rowData.name, + description: rowData.description, + tableId, + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + }) + }) + + it("fetch all will populate the relationships", async () => { + const [user1] = _.sampleSize(o2mData, 1) + const [users1, users2, users3] = _.sampleSize(m2mData, 3) + + const rows: { + name: string + description: string + user?: Row[] + users?: Row[] + tableId: string + }[] = [ + { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + users: [users1, users2], + }, + { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + user: [user1], + users: [users1, users3], + }, + { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + users: [users3], + }, + ] + + await config.api.row.save(tableId, rows[0]) + await config.api.row.save(tableId, rows[1]) + await config.api.row.save(tableId, rows[2]) + + const res = await config.api.row.fetch(tableId) + + expect(res).toEqual( + expect.arrayContaining( + rows.map(r => ({ + name: r.name, + description: r.description, + tableId, + user: r.user?.map(u => resultMapper(u)), + users: r.users?.length + ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) + : undefined, + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + [`fk_${o2mTable.name}_fk_o2m`]: + isInternal || !r.user?.length ? undefined : r.user[0].id, + ...defaultRowFields, + })) + ) + ) + }) + + it("search all will populate the relationships", async () => { + const [user1] = _.sampleSize(o2mData, 1) + const [users1, users2, users3] = _.sampleSize(m2mData, 3) + + const rows: { + name: string + description: string + user?: Row[] + users?: Row[] + tableId: string + }[] = [ + { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + users: [users1, users2], + }, + { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + user: [user1], + users: [users1, users3], + }, + { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + users: [users3], + }, + ] + + await config.api.row.save(tableId, rows[0]) + await config.api.row.save(tableId, rows[1]) + await config.api.row.save(tableId, rows[2]) + + const res = await config.api.row.search(tableId) + + expect(res).toEqual({ + rows: expect.arrayContaining( + rows.map(r => ({ + name: r.name, + description: r.description, + tableId, + user: r.user?.map(u => resultMapper(u)), + users: r.users?.length + ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) + : undefined, + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + [`fk_${o2mTable.name}_fk_o2m`]: + isInternal || !r.user?.length ? undefined : r.user[0].id, + ...defaultRowFields, + })) + ), + ...(isInternal + ? {} + : { + hasNextPage: false, + bookmark: null, + }), + }) + }) + }) + + describe("Formula fields", () => { + let relationshipTable: Table, tableId: string, relatedRow: Row + + beforeAll(async () => { + const otherTableId = config.table!._id! + const cfg = generateTableConfig() + relationshipTable = await config.createLinkedTable( + RelationshipType.ONE_TO_MANY, + ["links"], + { + ...cfg, + // needs to be a short name + name: "b", + schema: { + ...cfg.schema, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: "{{ links.0.name }}", + formulaType: FormulaType.DYNAMIC, + }, + }, + } + ) + + tableId = relationshipTable._id! + + relatedRow = await config.api.row.save(otherTableId, { + name: generator.word(), + description: generator.paragraph(), + }) + await config.api.row.save(tableId, { + name: generator.word(), description: generator.paragraph(), tableId, - }), - (row: Row) => ({ - _id: row._id, - primaryDisplay: row.name, - }), - ], - [ - "bb reference fields", - (): Record => ({ - user: { - name: "user", - type: FieldType.BB_REFERENCE, - subtype: FieldTypeSubtypes.BB_REFERENCE.USER, - }, - users: { - name: "users", - type: FieldType.BB_REFERENCE, - subtype: FieldTypeSubtypes.BB_REFERENCE.USERS, - }, - }), - () => config.createUser(), - (row: Row) => ({ - _id: row._id, - primaryDisplay: row.email, - email: row.email, - firstName: row.firstName, - lastName: row.lastName, - }), - ], - ])("links - %s", (__, relSchema, dataGenerator, resultMapper) => { - let tableId: string - let o2mData: Row[] - let m2mData: Row[] - - beforeAll(async () => { - const tableConfig = generateTableConfig() - - if (config.datasource) { - tableConfig.sourceId = config.datasource._id! - if (config.datasource.plus) { - tableConfig.sourceType = TableSourceType.EXTERNAL - } - } - const table = await config.api.table.save({ - ...tableConfig, - schema: { - ...tableConfig.schema, - ...relSchema(), - }, - }) - tableId = table._id! - - o2mData = [ - await dataGenerator(o2mTable._id!), - await dataGenerator(o2mTable._id!), - await dataGenerator(o2mTable._id!), - await dataGenerator(o2mTable._id!), - ] - - m2mData = [ - await dataGenerator(m2mTable._id!), - await dataGenerator(m2mTable._id!), - await dataGenerator(m2mTable._id!), - await dataGenerator(m2mTable._id!), - ] - }) - - it("can save a row when relationship fields are empty", async () => { - const rowData = { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - } - const row = await config.api.row.save(tableId, rowData) - - expect(row).toEqual({ - name: rowData.name, - description: rowData.description, - tableId, - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - }) - }) - - it("can save a row with a single relationship field", async () => { - const user = _.sample(o2mData)! - const rowData = { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - user: [user], - } - const row = await config.api.row.save(tableId, rowData) - - expect(row).toEqual({ - name: rowData.name, - description: rowData.description, - tableId, - user: [user].map(u => resultMapper(u)), - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, - }) - }) - - it("can save a row with a multiple relationship field", async () => { - const selectedUsers = _.sampleSize(m2mData, 2) - const rowData = { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - users: selectedUsers, - } - const row = await config.api.row.save(tableId, rowData) - - expect(row).toEqual({ - name: rowData.name, - description: rowData.description, - tableId, - users: expect.arrayContaining(selectedUsers.map(u => resultMapper(u))), - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - }) - }) - - it("can retrieve rows with no populated relationships", async () => { - const rowData = { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - } - const row = await config.api.row.save(tableId, rowData) - - const { body: retrieved } = await config.api.row.get(tableId, row._id!) - expect(retrieved).toEqual({ - name: rowData.name, - description: rowData.description, - tableId, - user: undefined, - users: undefined, - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - ...defaultRowFields, - }) - }) - - it("can retrieve rows with populated relationships", async () => { - const user1 = _.sample(o2mData)! - const [user2, user3] = _.sampleSize(m2mData, 2) - - const rowData = { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - users: [user2, user3], - user: [user1], - } - const row = await config.api.row.save(tableId, rowData) - - const { body: retrieved } = await config.api.row.get(tableId, row._id!) - expect(retrieved).toEqual({ - name: rowData.name, - description: rowData.description, - tableId, - user: expect.arrayContaining([user1].map(u => resultMapper(u))), - users: expect.arrayContaining([user2, user3].map(u => resultMapper(u))), - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user1.id, - ...defaultRowFields, - }) - }) - - it("can update an existing populated row", async () => { - const user = _.sample(o2mData)! - const [users1, users2, users3] = _.sampleSize(m2mData, 3) - - const rowData = { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - users: [users1, users2], - } - const row = await config.api.row.save(tableId, rowData) - - const updatedRow = await config.api.row.save(tableId, { - ...row, - user: [user], - users: [users3, users1], - }) - expect(updatedRow).toEqual({ - name: rowData.name, - description: rowData.description, - tableId, - user: expect.arrayContaining([user].map(u => resultMapper(u))), - users: expect.arrayContaining( - [users3, users1].map(u => resultMapper(u)) - ), - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, - }) - }) - - it("can wipe an existing populated relationships in row", async () => { - const [user1, user2] = _.sampleSize(m2mData, 2) - - const rowData = { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - users: [user1, user2], - } - const row = await config.api.row.save(tableId, rowData) - - const updatedRow = await config.api.row.save(tableId, { - ...row, - user: null, - users: null, - }) - expect(updatedRow).toEqual({ - name: rowData.name, - description: rowData.description, - tableId, - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - }) - }) - - it("fetch all will populate the relationships", async () => { - const [user1] = _.sampleSize(o2mData, 1) - const [users1, users2, users3] = _.sampleSize(m2mData, 3) - - const rows: { - name: string - description: string - user?: Row[] - users?: Row[] - tableId: string - }[] = [ - { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - users: [users1, users2], - }, - { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - user: [user1], - users: [users1, users3], - }, - { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - users: [users3], - }, - ] - - await config.api.row.save(tableId, rows[0]) - await config.api.row.save(tableId, rows[1]) - await config.api.row.save(tableId, rows[2]) - - const res = await config.api.row.fetch(tableId) - - expect(res).toEqual( - expect.arrayContaining( - rows.map(r => ({ - name: r.name, - description: r.description, - tableId, - user: r.user?.map(u => resultMapper(u)), - users: r.users?.length - ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) - : undefined, - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - [`fk_${o2mTable.name}_fk_o2m`]: - isInternal || !r.user?.length ? undefined : r.user[0].id, - ...defaultRowFields, - })) - ) - ) - }) - - it("search all will populate the relationships", async () => { - const [user1] = _.sampleSize(o2mData, 1) - const [users1, users2, users3] = _.sampleSize(m2mData, 3) - - const rows: { - name: string - description: string - user?: Row[] - users?: Row[] - tableId: string - }[] = [ - { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - users: [users1, users2], - }, - { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - user: [user1], - users: [users1, users3], - }, - { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - users: [users3], - }, - ] - - await config.api.row.save(tableId, rows[0]) - await config.api.row.save(tableId, rows[1]) - await config.api.row.save(tableId, rows[2]) - - const res = await config.api.row.search(tableId) - - expect(res).toEqual({ - rows: expect.arrayContaining( - rows.map(r => ({ - name: r.name, - description: r.description, - tableId, - user: r.user?.map(u => resultMapper(u)), - users: r.users?.length - ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) - : undefined, - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - [`fk_${o2mTable.name}_fk_o2m`]: - isInternal || !r.user?.length ? undefined : r.user[0].id, - ...defaultRowFields, - })) - ), - ...(isInternal - ? {} - : { - hasNextPage: false, - bookmark: null, - }), - }) - }) - }) - - describe("Formula fields", () => { - let relationshipTable: Table, tableId: string, relatedRow: Row - - beforeAll(async () => { - const otherTableId = config.table!._id! - const cfg = generateTableConfig() - relationshipTable = await config.createLinkedTable( - RelationshipType.ONE_TO_MANY, - ["links"], - { - ...cfg, - // needs to be a short name - name: "b", - schema: { - ...cfg.schema, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: "{{ links.0.name }}", - formulaType: FormulaType.DYNAMIC, - }, - }, - } - ) - - tableId = relationshipTable._id! - - relatedRow = await config.api.row.save(otherTableId, { - name: generator.word(), - description: generator.paragraph(), - }) - await config.api.row.save(tableId, { - name: generator.word(), - description: generator.paragraph(), - tableId, - links: [relatedRow._id], - }) - }) - - it("should be able to search for rows containing formulas", async () => { - const { rows } = await config.api.row.search(tableId) - expect(rows.length).toBe(1) - expect(rows[0].links.length).toBe(1) - const row = rows[0] - expect(row.formula).toBe(relatedRow.name) - }) - }) - - describe("Formula JS protection", () => { - it("should time out JS execution if a single cell takes too long", async () => { - await config.withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 20 }, async () => { - const js = Buffer.from( - ` - let i = 0; - while (true) { - i++; - } - return i; - ` - ).toString("base64") - - const table = await config.createTable({ - name: "table", - type: "table", - schema: { - text: { - name: "text", - type: FieldType.STRING, - }, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: `{{ js "${js}"}}`, - formulaType: FormulaType.DYNAMIC, - }, - }, + links: [relatedRow._id], }) + }) - await config.api.row.save(table._id!, { text: "foo" }) - const { rows } = await config.api.row.search(table._id!) - expect(rows).toHaveLength(1) + it("should be able to search for rows containing formulas", async () => { + const { rows } = await config.api.row.search(tableId) + expect(rows.length).toBe(1) + expect(rows[0].links.length).toBe(1) const row = rows[0] - expect(row.text).toBe("foo") - expect(row.formula).toBe("Timed out while executing JS") + expect(row.formula).toBe(relatedRow.name) }) }) - it("should time out JS execution if a multiple cells take too long", async () => { - await config.withEnv( - { - JS_PER_INVOCATION_TIMEOUT_MS: 20, - JS_PER_REQUEST_TIMEOUT_MS: 40, - }, - async () => { + describe("Formula JS protection", () => { + it("should time out JS execution if a single cell takes too long", async () => { + await config.withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 20 }, async () => { const js = Buffer.from( ` let i = 0; @@ -2102,81 +2075,125 @@ describe.each([ }, }) - for (let i = 0; i < 10; i++) { - await config.api.row.save(table._id!, { text: "foo" }) - } - - // Run this test 3 times to make sure that there's no cross-request - // pollution of the execution time tracking. - for (let reqs = 0; reqs < 3; reqs++) { - const { rows } = await config.api.row.search(table._id!) - expect(rows).toHaveLength(10) - - let i = 0 - for (; i < 10; i++) { - const row = rows[i] - if (row.formula !== "Timed out while executing JS") { - break - } - } - - // Given the execution times are not deterministic, we can't be sure - // of the exact number of rows that were executed before the timeout - // but it should absolutely be at least 1. - expect(i).toBeGreaterThan(0) - expect(i).toBeLessThan(5) - - for (; i < 10; i++) { - const row = rows[i] - expect(row.text).toBe("foo") - expect(row.formula).toBe("Request JS execution limit hit") - } - } - } - ) - }) - - it("should not carry over context between formulas", async () => { - const js = Buffer.from(`return $("[text]");`).toString("base64") - const table = await config.createTable({ - name: "table", - type: "table", - schema: { - text: { - name: "text", - type: FieldType.STRING, - }, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: `{{ js "${js}"}}`, - formulaType: FormulaType.DYNAMIC, - }, - }, + await config.api.row.save(table._id!, { text: "foo" }) + const { rows } = await config.api.row.search(table._id!) + expect(rows).toHaveLength(1) + const row = rows[0] + expect(row.text).toBe("foo") + expect(row.formula).toBe("Timed out while executing JS") + }) }) - for (let i = 0; i < 10; i++) { - await config.api.row.save(table._id!, { text: `foo${i}` }) - } + it("should time out JS execution if a multiple cells take too long", async () => { + await config.withEnv( + { + JS_PER_INVOCATION_TIMEOUT_MS: 20, + JS_PER_REQUEST_TIMEOUT_MS: 40, + }, + async () => { + const js = Buffer.from( + ` + let i = 0; + while (true) { + i++; + } + return i; + ` + ).toString("base64") - const { rows } = await config.api.row.search(table._id!) - expect(rows).toHaveLength(10) + const table = await config.createTable({ + name: "table", + type: "table", + schema: { + text: { + name: "text", + type: FieldType.STRING, + }, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: `{{ js "${js}"}}`, + formulaType: FormulaType.DYNAMIC, + }, + }, + }) - const formulaValues = rows.map(r => r.formula) - expect(formulaValues).toEqual( - expect.arrayContaining([ - "foo0", - "foo1", - "foo2", - "foo3", - "foo4", - "foo5", - "foo6", - "foo7", - "foo8", - "foo9", - ]) - ) + for (let i = 0; i < 10; i++) { + await config.api.row.save(table._id!, { text: "foo" }) + } + + // Run this test 3 times to make sure that there's no cross-request + // pollution of the execution time tracking. + for (let reqs = 0; reqs < 3; reqs++) { + const { rows } = await config.api.row.search(table._id!) + expect(rows).toHaveLength(10) + + let i = 0 + for (; i < 10; i++) { + const row = rows[i] + if (row.formula !== "Timed out while executing JS") { + break + } + } + + // Given the execution times are not deterministic, we can't be sure + // of the exact number of rows that were executed before the timeout + // but it should absolutely be at least 1. + expect(i).toBeGreaterThan(0) + expect(i).toBeLessThan(5) + + for (; i < 10; i++) { + const row = rows[i] + expect(row.text).toBe("foo") + expect(row.formula).toBe("Request JS execution limit hit") + } + } + } + ) + }) + + it("should not carry over context between formulas", async () => { + const js = Buffer.from(`return $("[text]");`).toString("base64") + const table = await config.createTable({ + name: "table", + type: "table", + schema: { + text: { + name: "text", + type: FieldType.STRING, + }, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: `{{ js "${js}"}}`, + formulaType: FormulaType.DYNAMIC, + }, + }, + }) + + for (let i = 0; i < 10; i++) { + await config.api.row.save(table._id!, { text: `foo${i}` }) + } + + const { rows } = await config.api.row.search(table._id!) + expect(rows).toHaveLength(10) + + const formulaValues = rows.map(r => r.formula) + expect(formulaValues).toEqual( + expect.arrayContaining([ + "foo0", + "foo1", + "foo2", + "foo3", + "foo4", + "foo5", + "foo6", + "foo7", + "foo8", + "foo9", + ]) + ) + }) }) - }) -}) + } +) diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index fdca32189c..6605052598 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -17,7 +17,6 @@ const envLimit = environment.SQL_MAX_ROWS : null const BASE_LIMIT = envLimit || 5000 -type KnexQuery = Knex.QueryBuilder // these are invalid dates sent by the client, need to convert them to a real max date const MIN_ISO_DATE = "0000-00-00T00:00:00.000Z" const MAX_ISO_DATE = "9999-00-00T00:00:00.000Z" @@ -127,11 +126,11 @@ class InternalBuilder { // right now we only do filters on the specific table being queried addFilters( - query: KnexQuery, + query: Knex.QueryBuilder, filters: SearchFilters | undefined, tableName: string, opts: { aliases?: Record; relationship?: boolean } - ): KnexQuery { + ): Knex.QueryBuilder { function getTableName(name: string) { const alias = opts.aliases?.[name] return alias || name @@ -320,7 +319,7 @@ class InternalBuilder { return query } - addSorting(query: KnexQuery, json: QueryJson): KnexQuery { + addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { let { sort, paginate } = json const table = json.meta?.table if (sort && Object.keys(sort || {}).length > 0) { @@ -348,12 +347,12 @@ class InternalBuilder { } addRelationships( - query: KnexQuery, + query: Knex.QueryBuilder, fromTable: string, relationships: RelationshipsJson[] | undefined, schema: string | undefined, aliases?: Record - ): KnexQuery { + ): Knex.QueryBuilder { if (!relationships) { return query } @@ -429,7 +428,7 @@ class InternalBuilder { knex: Knex, endpoint: QueryJson["endpoint"], aliases?: QueryJson["tableAliases"] - ): KnexQuery { + ): Knex.QueryBuilder { const tableName = endpoint.entityId const tableAliased = aliases?.[tableName] ? `${tableName} as ${aliases?.[tableName]}` @@ -441,7 +440,7 @@ class InternalBuilder { return query } - create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { + create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { const { endpoint, body } = json let query = this.knexWithAlias(knex, endpoint) const parsedBody = parseBody(body) @@ -460,7 +459,7 @@ class InternalBuilder { } } - bulkCreate(knex: Knex, json: QueryJson): KnexQuery { + bulkCreate(knex: Knex, json: QueryJson): Knex.QueryBuilder { const { endpoint, body } = json let query = this.knexWithAlias(knex, endpoint) if (!Array.isArray(body)) { @@ -470,7 +469,7 @@ class InternalBuilder { return query.insert(parsedBody) } - read(knex: Knex, json: QueryJson, limit: number): KnexQuery { + read(knex: Knex, json: QueryJson, limit: number): Knex.QueryBuilder { let { endpoint, resource, filters, paginate, relationships, tableAliases } = json @@ -531,7 +530,7 @@ class InternalBuilder { }) } - update(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { + update(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { const { endpoint, body, filters, tableAliases } = json let query = this.knexWithAlias(knex, endpoint, tableAliases) const parsedBody = parseBody(body) @@ -546,7 +545,7 @@ class InternalBuilder { } } - delete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { + delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { const { endpoint, filters, tableAliases } = json let query = this.knexWithAlias(knex, endpoint, tableAliases) query = this.addFilters(query, filters, endpoint.entityId, { @@ -578,7 +577,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { _query(json: QueryJson, opts: QueryOptions = {}): Knex.SqlNative | Knex.Sql { const sqlClient = this.getSqlClient() const client = knex({ client: sqlClient }) - let query: KnexQuery + let query: Knex.QueryBuilder const builder = new InternalBuilder(sqlClient) switch (this._operation(json)) { case Operation.CREATE: diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index 497b981f68..9b3f6a1b38 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -189,5 +189,16 @@ describe("Captures of real examples", () => { const aliased = aliasing.aliasField("`hello`.`world`") expect(aliased).toEqual("`a`.`world`") }) + + it("should handle table names in table names correctly", () => { + const tableNames = ["he", "hell", "hello"] + const aliasing = new AliasTables(tableNames) + const aliased1 = aliasing.aliasField("`he`.`world`") + const aliased2 = aliasing.aliasField("`hell`.`world`") + const aliased3 = aliasing.aliasField("`hello`.`world`") + expect(aliased1).toEqual("`a`.`world`") + expect(aliased2).toEqual("`b`.`world`") + expect(aliased3).toEqual("`c`.`world`") + }) }) }) From d471a2bf0f2760d5bea11ddf32b58eb8db92961b Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 28 Feb 2024 15:19:55 +0000 Subject: [PATCH 41/54] Undoing change. --- .../server/src/api/routes/tests/row.spec.ts | 3961 ++++++++--------- 1 file changed, 1972 insertions(+), 1989 deletions(-) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 05c6b92bb0..239da36351 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -39,41 +39,907 @@ tk.freeze(timestamp) const { basicRow } = setup.structures -describe.each([["postgres", databaseTestProviders.postgres]])( - "/rows (%s)", - (__, dsProvider) => { - const isInternal = !dsProvider +describe.each([ + ["internal", undefined], + ["postgres", databaseTestProviders.postgres], +])("/rows (%s)", (__, dsProvider) => { + const isInternal = !dsProvider - const request = setup.getRequest() - const config = setup.getConfig() - let table: Table - let tableId: string + const request = setup.getRequest() + const config = setup.getConfig() + let table: Table + let tableId: string - afterAll(async () => { - if (dsProvider) { - await dsProvider.stop() + afterAll(async () => { + if (dsProvider) { + await dsProvider.stop() + } + setup.afterAll() + }) + + beforeAll(async () => { + await config.init() + + if (dsProvider) { + await config.createDatasource({ + datasource: await dsProvider.datasource(), + }) + } + }) + + const generateTableConfig: () => SaveTableRequest = () => { + return { + name: uuid.v4(), + type: "table", + primary: ["id"], + primaryDisplay: "name", + sourceType: TableSourceType.INTERNAL, + sourceId: INTERNAL_TABLE_SOURCE_ID, + schema: { + id: { + type: FieldType.AUTO, + name: "id", + autocolumn: true, + constraints: { + presence: true, + }, + }, + name: { + type: FieldType.STRING, + name: "name", + constraints: { + type: "string", + }, + }, + description: { + type: FieldType.STRING, + name: "description", + constraints: { + type: "string", + }, + }, + }, + } + } + + beforeEach(async () => { + mocks.licenses.useCloudFree() + }) + + const loadRow = (id: string, tbl_Id: string, status = 200) => + config.api.row.get(tbl_Id, id, { expectStatus: status }) + + const getRowUsage = async () => { + const { total } = await config.doInContext(null, () => + quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS) + ) + return total + } + + const assertRowUsage = async (expected: number) => { + const usage = await getRowUsage() + expect(usage).toBe(expected) + } + + const defaultRowFields = isInternal + ? { + type: "row", + createdAt: timestamp, + updatedAt: timestamp, } - setup.afterAll() + : undefined + + async function createTable( + cfg: Omit, + opts?: { skipReassigning: boolean } + ) { + let table + if (dsProvider) { + table = await config.createExternalTable(cfg, opts) + } else { + table = await config.createTable(cfg, opts) + } + return table + } + + beforeAll(async () => { + const tableConfig = generateTableConfig() + let table = await createTable(tableConfig) + tableId = table._id! + }) + + describe("save, load, update", () => { + it("returns a success message when the row is created", async () => { + const rowUsage = await getRowUsage() + + const res = await request + .post(`/api/${tableId}/rows`) + .send(basicRow(tableId)) + .set(config.defaultHeaders()) + .expect("Content-Type", /json/) + .expect(200) + expect((res as any).res.statusMessage).toEqual( + `${config.table!.name} saved successfully` + ) + expect(res.body.name).toEqual("Test Contact") + expect(res.body._rev).toBeDefined() + await assertRowUsage(rowUsage + 1) }) + it("Increment row autoId per create row request", async () => { + const rowUsage = await getRowUsage() + + const tableConfig = generateTableConfig() + const newTable = await createTable( + { + ...tableConfig, + name: "TestTableAuto", + schema: { + ...tableConfig.schema, + "Row ID": { + name: "Row ID", + type: FieldType.NUMBER, + subtype: AutoFieldSubType.AUTO_ID, + icon: "ri-magic-line", + autocolumn: true, + constraints: { + type: "number", + presence: true, + numericality: { + greaterThanOrEqualTo: "", + lessThanOrEqualTo: "", + }, + }, + }, + }, + }, + { skipReassigning: true } + ) + + const ids = [1, 2, 3] + + // Performing several create row requests should increment the autoID fields accordingly + const createRow = async (id: number) => { + const res = await config.api.row.save(newTable._id!, { + name: "row_" + id, + }) + expect(res.name).toEqual("row_" + id) + expect(res._rev).toBeDefined() + expect(res["Row ID"]).toEqual(id) + } + + for (let i = 0; i < ids.length; i++) { + await createRow(ids[i]) + } + + await assertRowUsage(rowUsage + ids.length) + }) + + it("updates a row successfully", async () => { + const existing = await config.createRow() + const rowUsage = await getRowUsage() + + const res = await config.api.row.save(tableId, { + _id: existing._id, + _rev: existing._rev, + tableId, + name: "Updated Name", + }) + + expect(res.name).toEqual("Updated Name") + await assertRowUsage(rowUsage) + }) + + it("should load a row", async () => { + const existing = await config.createRow() + + const res = await config.api.row.get(tableId, existing._id!) + + expect(res.body).toEqual({ + ...existing, + ...defaultRowFields, + }) + }) + + it("should list all rows for given tableId", async () => { + const table = await createTable(generateTableConfig(), { + skipReassigning: true, + }) + const tableId = table._id! + const newRow = { + tableId, + name: "Second Contact", + description: "new", + } + const firstRow = await config.createRow({ tableId }) + await config.createRow(newRow) + + const res = await config.api.row.fetch(tableId) + + expect(res.length).toBe(2) + expect(res.find((r: Row) => r.name === newRow.name)).toBeDefined() + expect(res.find((r: Row) => r.name === firstRow.name)).toBeDefined() + }) + + it("load should return 404 when row does not exist", async () => { + await config.createRow() + + await config.api.row.get(tableId, "1234567", { + expectStatus: 404, + }) + }) + + isInternal && + it("row values are coerced", async () => { + const str: FieldSchema = { + type: FieldType.STRING, + name: "str", + constraints: { type: "string", presence: false }, + } + const attachment: FieldSchema = { + type: FieldType.ATTACHMENT, + name: "attachment", + constraints: { type: "array", presence: false }, + } + const bool: FieldSchema = { + type: FieldType.BOOLEAN, + name: "boolean", + constraints: { type: "boolean", presence: false }, + } + const number: FieldSchema = { + type: FieldType.NUMBER, + name: "str", + constraints: { type: "number", presence: false }, + } + const datetime: FieldSchema = { + type: FieldType.DATETIME, + name: "datetime", + constraints: { + type: "string", + presence: false, + datetime: { earliest: "", latest: "" }, + }, + } + const arrayField: FieldSchema = { + type: FieldType.ARRAY, + constraints: { + type: "array", + presence: false, + inclusion: ["One", "Two", "Three"], + }, + name: "Sample Tags", + sortable: false, + } + const optsField: FieldSchema = { + name: "Sample Opts", + type: FieldType.OPTIONS, + constraints: { + type: "string", + presence: false, + inclusion: ["Alpha", "Beta", "Gamma"], + }, + } + const table = await createTable({ + name: "TestTable2", + type: "table", + schema: { + name: str, + stringUndefined: str, + stringNull: str, + stringString: str, + numberEmptyString: number, + numberNull: number, + numberUndefined: number, + numberString: number, + numberNumber: number, + datetimeEmptyString: datetime, + datetimeNull: datetime, + datetimeUndefined: datetime, + datetimeString: datetime, + datetimeDate: datetime, + boolNull: bool, + boolEmpty: bool, + boolUndefined: bool, + boolString: bool, + boolBool: bool, + attachmentNull: attachment, + attachmentUndefined: attachment, + attachmentEmpty: attachment, + attachmentEmptyArrayStr: attachment, + arrayFieldEmptyArrayStr: arrayField, + arrayFieldArrayStrKnown: arrayField, + arrayFieldNull: arrayField, + arrayFieldUndefined: arrayField, + optsFieldEmptyStr: optsField, + optsFieldUndefined: optsField, + optsFieldNull: optsField, + optsFieldStrKnown: optsField, + }, + }) + + const row = { + name: "Test Row", + stringUndefined: undefined, + stringNull: null, + stringString: "i am a string", + numberEmptyString: "", + numberNull: null, + numberUndefined: undefined, + numberString: "123", + numberNumber: 123, + datetimeEmptyString: "", + datetimeNull: null, + datetimeUndefined: undefined, + datetimeString: "1984-04-20T00:00:00.000Z", + datetimeDate: new Date("1984-04-20"), + boolNull: null, + boolEmpty: "", + boolUndefined: undefined, + boolString: "true", + boolBool: true, + tableId: table._id, + attachmentNull: null, + attachmentUndefined: undefined, + attachmentEmpty: "", + attachmentEmptyArrayStr: "[]", + arrayFieldEmptyArrayStr: "[]", + arrayFieldUndefined: undefined, + arrayFieldNull: null, + arrayFieldArrayStrKnown: "['One']", + optsFieldEmptyStr: "", + optsFieldUndefined: undefined, + optsFieldNull: null, + optsFieldStrKnown: "Alpha", + } + + const createdRow = await config.createRow(row) + const id = createdRow._id! + + const saved = (await loadRow(id, table._id!)).body + + expect(saved.stringUndefined).toBe(undefined) + expect(saved.stringNull).toBe(null) + expect(saved.stringString).toBe("i am a string") + expect(saved.numberEmptyString).toBe(null) + expect(saved.numberNull).toBe(null) + expect(saved.numberUndefined).toBe(undefined) + expect(saved.numberString).toBe(123) + expect(saved.numberNumber).toBe(123) + expect(saved.datetimeEmptyString).toBe(null) + expect(saved.datetimeNull).toBe(null) + expect(saved.datetimeUndefined).toBe(undefined) + expect(saved.datetimeString).toBe( + new Date(row.datetimeString).toISOString() + ) + expect(saved.datetimeDate).toBe(row.datetimeDate.toISOString()) + expect(saved.boolNull).toBe(null) + expect(saved.boolEmpty).toBe(null) + expect(saved.boolUndefined).toBe(undefined) + expect(saved.boolString).toBe(true) + expect(saved.boolBool).toBe(true) + expect(saved.attachmentNull).toEqual([]) + expect(saved.attachmentUndefined).toBe(undefined) + expect(saved.attachmentEmpty).toEqual([]) + expect(saved.attachmentEmptyArrayStr).toEqual([]) + expect(saved.arrayFieldEmptyArrayStr).toEqual([]) + expect(saved.arrayFieldNull).toEqual([]) + expect(saved.arrayFieldUndefined).toEqual(undefined) + expect(saved.optsFieldEmptyStr).toEqual(null) + expect(saved.optsFieldUndefined).toEqual(undefined) + expect(saved.optsFieldNull).toEqual(null) + expect(saved.arrayFieldArrayStrKnown).toEqual(["One"]) + expect(saved.optsFieldStrKnown).toEqual("Alpha") + }) + }) + + describe("view save", () => { + it("views have extra data trimmed", async () => { + const table = await createTable({ + type: "table", + name: "orders", + primary: ["OrderID"], + schema: { + Country: { + type: FieldType.STRING, + name: "Country", + }, + OrderID: { + type: FieldType.NUMBER, + name: "OrderID", + }, + Story: { + type: FieldType.STRING, + name: "Story", + }, + }, + }) + + const createViewResponse = await config.createView({ + name: uuid.v4(), + schema: { + Country: { + visible: true, + }, + OrderID: { + visible: true, + }, + }, + }) + + const createRowResponse = await config.api.row.save( + createViewResponse.id, + { + OrderID: "1111", + Country: "Aussy", + Story: "aaaaa", + } + ) + + const row = await config.api.row.get(table._id!, createRowResponse._id!) + expect(row.body.Story).toBeUndefined() + expect(row.body).toEqual({ + ...defaultRowFields, + OrderID: 1111, + Country: "Aussy", + _id: createRowResponse._id, + _rev: createRowResponse._rev, + tableId: table._id, + }) + }) + }) + + describe("patch", () => { + let otherTable: Table + beforeAll(async () => { - await config.init() - - if (dsProvider) { - await config.createDatasource({ - datasource: await dsProvider.datasource(), - }) + const tableConfig = generateTableConfig() + table = await createTable(tableConfig) + const otherTableConfig = generateTableConfig() + // need a short name of table here - for relationship tests + otherTableConfig.name = "a" + otherTableConfig.schema.relationship = { + name: "relationship", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: table._id!, + fieldName: "relationship", } + otherTable = await createTable(otherTableConfig) + // need to set the config back to the original table + config.table = table }) - const generateTableConfig: () => SaveTableRequest = () => { - return { - name: uuid.v4(), + it("should update only the fields that are supplied", async () => { + const existing = await config.createRow() + + const rowUsage = await getRowUsage() + + const row = await config.api.row.patch(table._id!, { + _id: existing._id!, + _rev: existing._rev!, + tableId: table._id!, + name: "Updated Name", + }) + + expect(row.name).toEqual("Updated Name") + expect(row.description).toEqual(existing.description) + + const savedRow = await loadRow(row._id!, table._id!) + + expect(savedRow.body.description).toEqual(existing.description) + expect(savedRow.body.name).toEqual("Updated Name") + await assertRowUsage(rowUsage) + }) + + it("should throw an error when given improper types", async () => { + const existing = await config.createRow() + const rowUsage = await getRowUsage() + + await config.api.row.patch( + table._id!, + { + _id: existing._id!, + _rev: existing._rev!, + tableId: table._id!, + name: 1, + }, + { expectStatus: 400 } + ) + + await assertRowUsage(rowUsage) + }) + + it("should not overwrite links if those links are not set", async () => { + let linkField: FieldSchema = { + type: FieldType.LINK, + name: "", + fieldName: "", + constraints: { + type: "array", + presence: false, + }, + relationshipType: RelationshipType.ONE_TO_MANY, + tableId: InternalTable.USER_METADATA, + } + + let table = await config.api.table.save({ + name: "TestTable", type: "table", - primary: ["id"], - primaryDisplay: "name", sourceType: TableSourceType.INTERNAL, sourceId: INTERNAL_TABLE_SOURCE_ID, + schema: { + user1: { ...linkField, name: "user1", fieldName: "user1" }, + user2: { ...linkField, name: "user2", fieldName: "user2" }, + }, + }) + + let user1 = await config.createUser() + let user2 = await config.createUser() + + let row = await config.api.row.save(table._id!, { + user1: [{ _id: user1._id }], + user2: [{ _id: user2._id }], + }) + + let getResp = await config.api.row.get(table._id!, row._id!) + expect(getResp.body.user1[0]._id).toEqual(user1._id) + expect(getResp.body.user2[0]._id).toEqual(user2._id) + + let patchResp = await config.api.row.patch(table._id!, { + _id: row._id!, + _rev: row._rev!, + tableId: table._id!, + user1: [{ _id: user2._id }], + }) + expect(patchResp.user1[0]._id).toEqual(user2._id) + expect(patchResp.user2[0]._id).toEqual(user2._id) + + getResp = await config.api.row.get(table._id!, row._id!) + expect(getResp.body.user1[0]._id).toEqual(user2._id) + expect(getResp.body.user2[0]._id).toEqual(user2._id) + }) + + it("should be able to update relationships when both columns are same name", async () => { + let row = await config.api.row.save(table._id!, { + name: "test", + description: "test", + }) + let row2 = await config.api.row.save(otherTable._id!, { + name: "test", + description: "test", + relationship: [row._id], + }) + row = (await config.api.row.get(table._id!, row._id!)).body + expect(row.relationship.length).toBe(1) + const resp = await config.api.row.patch(table._id!, { + _id: row._id!, + _rev: row._rev!, + tableId: row.tableId!, + name: "test2", + relationship: [row2._id], + }) + expect(resp.relationship.length).toBe(1) + }) + }) + + describe("destroy", () => { + beforeAll(async () => { + const tableConfig = generateTableConfig() + table = await createTable(tableConfig) + }) + + it("should be able to delete a row", async () => { + const createdRow = await config.createRow() + const rowUsage = await getRowUsage() + + const res = await config.api.row.delete(table._id!, [createdRow]) + expect(res.body[0]._id).toEqual(createdRow._id) + await assertRowUsage(rowUsage - 1) + }) + }) + + describe("validate", () => { + beforeAll(async () => { + const tableConfig = generateTableConfig() + table = await createTable(tableConfig) + }) + + it("should return no errors on valid row", async () => { + const rowUsage = await getRowUsage() + + const res = await config.api.row.validate(table._id!, { name: "ivan" }) + + expect(res.valid).toBe(true) + expect(Object.keys(res.errors)).toEqual([]) + await assertRowUsage(rowUsage) + }) + + it("should errors on invalid row", async () => { + const rowUsage = await getRowUsage() + + const res = await config.api.row.validate(table._id!, { name: 1 }) + + if (isInternal) { + expect(res.valid).toBe(false) + expect(Object.keys(res.errors)).toEqual(["name"]) + } else { + // Validation for external is not implemented, so it will always return valid + expect(res.valid).toBe(true) + expect(Object.keys(res.errors)).toEqual([]) + } + await assertRowUsage(rowUsage) + }) + }) + + describe("bulkDelete", () => { + beforeAll(async () => { + const tableConfig = generateTableConfig() + table = await createTable(tableConfig) + }) + + it("should be able to delete a bulk set of rows", async () => { + const row1 = await config.createRow() + const row2 = await config.createRow() + const rowUsage = await getRowUsage() + + const res = await config.api.row.delete(table._id!, [row1, row2]) + + expect(res.body.length).toEqual(2) + await loadRow(row1._id!, table._id!, 404) + await assertRowUsage(rowUsage - 2) + }) + + it("should be able to delete a variety of row set types", async () => { + const [row1, row2, row3] = await Promise.all([ + config.createRow(), + config.createRow(), + config.createRow(), + ]) + const rowUsage = await getRowUsage() + + const res = await config.api.row.delete(table._id!, [ + row1, + row2._id, + { _id: row3._id }, + ]) + + expect(res.body.length).toEqual(3) + await loadRow(row1._id!, table._id!, 404) + await assertRowUsage(rowUsage - 3) + }) + + it("should accept a valid row object and delete the row", async () => { + const row1 = await config.createRow() + const rowUsage = await getRowUsage() + + const res = await config.api.row.delete(table._id!, row1) + + expect(res.body.id).toEqual(row1._id) + await loadRow(row1._id!, table._id!, 404) + await assertRowUsage(rowUsage - 1) + }) + + it("Should ignore malformed/invalid delete requests", async () => { + const rowUsage = await getRowUsage() + + const res = await config.api.row.delete( + table._id!, + { not: "valid" }, + { expectStatus: 400 } + ) + expect(res.body.message).toEqual("Invalid delete rows request") + + const res2 = await config.api.row.delete( + table._id!, + { rows: 123 }, + { expectStatus: 400 } + ) + expect(res2.body.message).toEqual("Invalid delete rows request") + + const res3 = await config.api.row.delete(table._id!, "invalid", { + expectStatus: 400, + }) + expect(res3.body.message).toEqual("Invalid delete rows request") + + await assertRowUsage(rowUsage) + }) + }) + + // Legacy views are not available for external + isInternal && + describe("fetchView", () => { + beforeEach(async () => { + const tableConfig = generateTableConfig() + table = await createTable(tableConfig) + }) + + it("should be able to fetch tables contents via 'view'", async () => { + const row = await config.createRow() + const rowUsage = await getRowUsage() + + const res = await config.api.legacyView.get(table._id!) + expect(res.body.length).toEqual(1) + expect(res.body[0]._id).toEqual(row._id) + await assertRowUsage(rowUsage) + }) + + it("should throw an error if view doesn't exist", async () => { + const rowUsage = await getRowUsage() + + await config.api.legacyView.get("derp", { expectStatus: 404 }) + + await assertRowUsage(rowUsage) + }) + + it("should be able to run on a view", async () => { + const view = await config.createLegacyView({ + tableId: table._id!, + name: "ViewTest", + filters: [], + schema: {}, + }) + const row = await config.createRow() + const rowUsage = await getRowUsage() + + const res = await config.api.legacyView.get(view.name) + expect(res.body.length).toEqual(1) + expect(res.body[0]._id).toEqual(row._id) + + await assertRowUsage(rowUsage) + }) + }) + + describe("fetchEnrichedRows", () => { + beforeAll(async () => { + const tableConfig = generateTableConfig() + table = await createTable(tableConfig) + }) + + it("should allow enriching some linked rows", async () => { + const { linkedTable, firstRow, secondRow } = await tenancy.doInTenant( + config.getTenantId(), + async () => { + const linkedTable = await config.createLinkedTable( + RelationshipType.ONE_TO_MANY, + ["link"], + { + // Making sure that the combined table name + column name is within postgres limits + name: uuid.v4().replace(/-/g, "").substring(0, 16), + type: "table", + primary: ["id"], + primaryDisplay: "id", + schema: { + id: { + type: FieldType.AUTO, + name: "id", + autocolumn: true, + constraints: { + presence: true, + }, + }, + }, + } + ) + const firstRow = await config.createRow({ + name: "Test Contact", + description: "original description", + tableId: table._id, + }) + const secondRow = await config.createRow({ + name: "Test 2", + description: "og desc", + link: [{ _id: firstRow._id }], + tableId: linkedTable._id, + }) + return { linkedTable, firstRow, secondRow } + } + ) + const rowUsage = await getRowUsage() + + // test basic enrichment + const resBasic = await config.api.row.get( + linkedTable._id!, + secondRow._id! + ) + expect(resBasic.body.link.length).toBe(1) + expect(resBasic.body.link[0]).toEqual({ + _id: firstRow._id, + primaryDisplay: firstRow.name, + }) + + // test full enrichment + const resEnriched = await config.api.row.getEnriched( + linkedTable._id!, + secondRow._id! + ) + expect(resEnriched.body.link.length).toBe(1) + expect(resEnriched.body.link[0]._id).toBe(firstRow._id) + expect(resEnriched.body.link[0].name).toBe("Test Contact") + expect(resEnriched.body.link[0].description).toBe("original description") + await assertRowUsage(rowUsage) + }) + }) + + isInternal && + describe("attachments", () => { + beforeAll(async () => { + const tableConfig = generateTableConfig() + table = await createTable(tableConfig) + }) + + it("should allow enriching attachment rows", async () => { + const table = await config.createAttachmentTable() + const attachmentId = `${structures.uuid()}.csv` + const row = await config.createRow({ + name: "test", + description: "test", + attachment: [ + { + key: `${config.getAppId()}/attachments/${attachmentId}`, + }, + ], + tableId: table._id, + }) + // the environment needs configured for this + await setup.switchToSelfHosted(async () => { + return context.doInAppContext(config.getAppId(), async () => { + const enriched = await outputProcessing(table, [row]) + expect((enriched as Row[])[0].attachment[0].url).toBe( + `/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}` + ) + }) + }) + }) + }) + + describe("exportData", () => { + beforeAll(async () => { + const tableConfig = generateTableConfig() + table = await createTable(tableConfig) + }) + + it("should allow exporting all columns", async () => { + const existing = await config.createRow() + const res = await config.api.row.exportRows(table._id!, { + rows: [existing._id!], + }) + const results = JSON.parse(res.text) + expect(results.length).toEqual(1) + const row = results[0] + + // Ensure all original columns were exported + expect(Object.keys(row).length).toBeGreaterThanOrEqual( + Object.keys(existing).length + ) + Object.keys(existing).forEach(key => { + expect(row[key]).toEqual(existing[key]) + }) + }) + + it("should allow exporting only certain columns", async () => { + const existing = await config.createRow() + const res = await config.api.row.exportRows(table._id!, { + rows: [existing._id!], + columns: ["_id"], + }) + const results = JSON.parse(res.text) + expect(results.length).toEqual(1) + const row = results[0] + + // Ensure only the _id column was exported + expect(Object.keys(row).length).toEqual(1) + expect(row._id).toEqual(existing._id) + }) + }) + + describe("view 2.0", () => { + async function userTable(): Promise
{ + return { + name: `users_${uuid.v4()}`, + sourceId: INTERNAL_TABLE_SOURCE_ID, + sourceType: TableSourceType.INTERNAL, + type: "table", + primary: ["id"], schema: { id: { type: FieldType.AUTO, @@ -86,857 +952,167 @@ describe.each([["postgres", databaseTestProviders.postgres]])( name: { type: FieldType.STRING, name: "name", - constraints: { - type: "string", - }, }, - description: { + surname: { type: FieldType.STRING, - name: "description", - constraints: { - type: "string", - }, + name: "surname", + }, + age: { + type: FieldType.NUMBER, + name: "age", + }, + address: { + type: FieldType.STRING, + name: "address", + }, + jobTitle: { + type: FieldType.STRING, + name: "jobTitle", }, }, } } - beforeEach(async () => { - mocks.licenses.useCloudFree() + const randomRowData = () => ({ + name: generator.first(), + surname: generator.last(), + age: generator.age(), + address: generator.address(), + jobTitle: generator.word(), }) - const loadRow = (id: string, tbl_Id: string, status = 200) => - config.api.row.get(tbl_Id, id, { expectStatus: status }) - - const getRowUsage = async () => { - const { total } = await config.doInContext(null, () => - quotas.getCurrentUsageValues( - QuotaUsageType.STATIC, - StaticQuotaName.ROWS - ) - ) - return total - } - - const assertRowUsage = async (expected: number) => { - const usage = await getRowUsage() - expect(usage).toBe(expected) - } - - const defaultRowFields = isInternal - ? { - type: "row", - createdAt: timestamp, - updatedAt: timestamp, - } - : undefined - - async function createTable( - cfg: Omit, - opts?: { skipReassigning: boolean } - ) { - let table - if (dsProvider) { - table = await config.createExternalTable(cfg, opts) - } else { - table = await config.createTable(cfg, opts) - } - return table - } - - beforeAll(async () => { - const tableConfig = generateTableConfig() - let table = await createTable(tableConfig) - tableId = table._id! - }) - - describe("save, load, update", () => { - it("returns a success message when the row is created", async () => { - const rowUsage = await getRowUsage() - - const res = await request - .post(`/api/${tableId}/rows`) - .send(basicRow(tableId)) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect((res as any).res.statusMessage).toEqual( - `${config.table!.name} saved successfully` - ) - expect(res.body.name).toEqual("Test Contact") - expect(res.body._rev).toBeDefined() - await assertRowUsage(rowUsage + 1) - }) - - it("Increment row autoId per create row request", async () => { - const rowUsage = await getRowUsage() - - const tableConfig = generateTableConfig() - const newTable = await createTable( - { - ...tableConfig, - name: "TestTableAuto", - schema: { - ...tableConfig.schema, - "Row ID": { - name: "Row ID", - type: FieldType.NUMBER, - subtype: AutoFieldSubType.AUTO_ID, - icon: "ri-magic-line", - autocolumn: true, - constraints: { - type: "number", - presence: true, - numericality: { - greaterThanOrEqualTo: "", - lessThanOrEqualTo: "", - }, - }, - }, - }, - }, - { skipReassigning: true } - ) - - const ids = [1, 2, 3] - - // Performing several create row requests should increment the autoID fields accordingly - const createRow = async (id: number) => { - const res = await config.api.row.save(newTable._id!, { - name: "row_" + id, - }) - expect(res.name).toEqual("row_" + id) - expect(res._rev).toBeDefined() - expect(res["Row ID"]).toEqual(id) - } - - for (let i = 0; i < ids.length; i++) { - await createRow(ids[i]) - } - - await assertRowUsage(rowUsage + ids.length) - }) - - it("updates a row successfully", async () => { - const existing = await config.createRow() - const rowUsage = await getRowUsage() - - const res = await config.api.row.save(tableId, { - _id: existing._id, - _rev: existing._rev, - tableId, - name: "Updated Name", - }) - - expect(res.name).toEqual("Updated Name") - await assertRowUsage(rowUsage) - }) - - it("should load a row", async () => { - const existing = await config.createRow() - - const res = await config.api.row.get(tableId, existing._id!) - - expect(res.body).toEqual({ - ...existing, - ...defaultRowFields, - }) - }) - - it("should list all rows for given tableId", async () => { - const table = await createTable(generateTableConfig(), { - skipReassigning: true, - }) - const tableId = table._id! - const newRow = { - tableId, - name: "Second Contact", - description: "new", - } - const firstRow = await config.createRow({ tableId }) - await config.createRow(newRow) - - const res = await config.api.row.fetch(tableId) - - expect(res.length).toBe(2) - expect(res.find((r: Row) => r.name === newRow.name)).toBeDefined() - expect(res.find((r: Row) => r.name === firstRow.name)).toBeDefined() - }) - - it("load should return 404 when row does not exist", async () => { - await config.createRow() - - await config.api.row.get(tableId, "1234567", { - expectStatus: 404, - }) - }) - - isInternal && - it("row values are coerced", async () => { - const str: FieldSchema = { - type: FieldType.STRING, - name: "str", - constraints: { type: "string", presence: false }, - } - const attachment: FieldSchema = { - type: FieldType.ATTACHMENT, - name: "attachment", - constraints: { type: "array", presence: false }, - } - const bool: FieldSchema = { - type: FieldType.BOOLEAN, - name: "boolean", - constraints: { type: "boolean", presence: false }, - } - const number: FieldSchema = { - type: FieldType.NUMBER, - name: "str", - constraints: { type: "number", presence: false }, - } - const datetime: FieldSchema = { - type: FieldType.DATETIME, - name: "datetime", - constraints: { - type: "string", - presence: false, - datetime: { earliest: "", latest: "" }, - }, - } - const arrayField: FieldSchema = { - type: FieldType.ARRAY, - constraints: { - type: "array", - presence: false, - inclusion: ["One", "Two", "Three"], - }, - name: "Sample Tags", - sortable: false, - } - const optsField: FieldSchema = { - name: "Sample Opts", - type: FieldType.OPTIONS, - constraints: { - type: "string", - presence: false, - inclusion: ["Alpha", "Beta", "Gamma"], - }, - } - const table = await createTable({ - name: "TestTable2", - type: "table", - schema: { - name: str, - stringUndefined: str, - stringNull: str, - stringString: str, - numberEmptyString: number, - numberNull: number, - numberUndefined: number, - numberString: number, - numberNumber: number, - datetimeEmptyString: datetime, - datetimeNull: datetime, - datetimeUndefined: datetime, - datetimeString: datetime, - datetimeDate: datetime, - boolNull: bool, - boolEmpty: bool, - boolUndefined: bool, - boolString: bool, - boolBool: bool, - attachmentNull: attachment, - attachmentUndefined: attachment, - attachmentEmpty: attachment, - attachmentEmptyArrayStr: attachment, - arrayFieldEmptyArrayStr: arrayField, - arrayFieldArrayStrKnown: arrayField, - arrayFieldNull: arrayField, - arrayFieldUndefined: arrayField, - optsFieldEmptyStr: optsField, - optsFieldUndefined: optsField, - optsFieldNull: optsField, - optsFieldStrKnown: optsField, - }, - }) - - const row = { - name: "Test Row", - stringUndefined: undefined, - stringNull: null, - stringString: "i am a string", - numberEmptyString: "", - numberNull: null, - numberUndefined: undefined, - numberString: "123", - numberNumber: 123, - datetimeEmptyString: "", - datetimeNull: null, - datetimeUndefined: undefined, - datetimeString: "1984-04-20T00:00:00.000Z", - datetimeDate: new Date("1984-04-20"), - boolNull: null, - boolEmpty: "", - boolUndefined: undefined, - boolString: "true", - boolBool: true, - tableId: table._id, - attachmentNull: null, - attachmentUndefined: undefined, - attachmentEmpty: "", - attachmentEmptyArrayStr: "[]", - arrayFieldEmptyArrayStr: "[]", - arrayFieldUndefined: undefined, - arrayFieldNull: null, - arrayFieldArrayStrKnown: "['One']", - optsFieldEmptyStr: "", - optsFieldUndefined: undefined, - optsFieldNull: null, - optsFieldStrKnown: "Alpha", - } - - const createdRow = await config.createRow(row) - const id = createdRow._id! - - const saved = (await loadRow(id, table._id!)).body - - expect(saved.stringUndefined).toBe(undefined) - expect(saved.stringNull).toBe(null) - expect(saved.stringString).toBe("i am a string") - expect(saved.numberEmptyString).toBe(null) - expect(saved.numberNull).toBe(null) - expect(saved.numberUndefined).toBe(undefined) - expect(saved.numberString).toBe(123) - expect(saved.numberNumber).toBe(123) - expect(saved.datetimeEmptyString).toBe(null) - expect(saved.datetimeNull).toBe(null) - expect(saved.datetimeUndefined).toBe(undefined) - expect(saved.datetimeString).toBe( - new Date(row.datetimeString).toISOString() - ) - expect(saved.datetimeDate).toBe(row.datetimeDate.toISOString()) - expect(saved.boolNull).toBe(null) - expect(saved.boolEmpty).toBe(null) - expect(saved.boolUndefined).toBe(undefined) - expect(saved.boolString).toBe(true) - expect(saved.boolBool).toBe(true) - expect(saved.attachmentNull).toEqual([]) - expect(saved.attachmentUndefined).toBe(undefined) - expect(saved.attachmentEmpty).toEqual([]) - expect(saved.attachmentEmptyArrayStr).toEqual([]) - expect(saved.arrayFieldEmptyArrayStr).toEqual([]) - expect(saved.arrayFieldNull).toEqual([]) - expect(saved.arrayFieldUndefined).toEqual(undefined) - expect(saved.optsFieldEmptyStr).toEqual(null) - expect(saved.optsFieldUndefined).toEqual(undefined) - expect(saved.optsFieldNull).toEqual(null) - expect(saved.arrayFieldArrayStrKnown).toEqual(["One"]) - expect(saved.optsFieldStrKnown).toEqual("Alpha") - }) - }) - - describe("view save", () => { - it("views have extra data trimmed", async () => { - const table = await createTable({ - type: "table", - name: "orders", - primary: ["OrderID"], + describe("create", () => { + it("should persist a new row with only the provided view fields", async () => { + const table = await createTable(await userTable()) + const view = await config.createView({ schema: { - Country: { - type: FieldType.STRING, - name: "Country", - }, - OrderID: { - type: FieldType.NUMBER, - name: "OrderID", - }, - Story: { - type: FieldType.STRING, - name: "Story", - }, + name: { visible: true }, + surname: { visible: true }, + address: { visible: true }, }, }) - const createViewResponse = await config.createView({ - name: uuid.v4(), - schema: { - Country: { - visible: true, - }, - OrderID: { - visible: true, - }, - }, + const data = randomRowData() + const newRow = await config.api.row.save(view.id, { + tableId: table!._id, + _viewId: view.id, + ...data, }) - const createRowResponse = await config.api.row.save( - createViewResponse.id, - { - OrderID: "1111", - Country: "Aussy", - Story: "aaaaa", - } - ) - - const row = await config.api.row.get(table._id!, createRowResponse._id!) - expect(row.body.Story).toBeUndefined() + const row = await config.api.row.get(table._id!, newRow._id!) expect(row.body).toEqual({ + name: data.name, + surname: data.surname, + address: data.address, + tableId: table!._id, + _id: newRow._id, + _rev: newRow._rev, + id: newRow.id, ...defaultRowFields, - OrderID: 1111, - Country: "Aussy", - _id: createRowResponse._id, - _rev: createRowResponse._rev, - tableId: table._id, }) + expect(row.body._viewId).toBeUndefined() + expect(row.body.age).toBeUndefined() + expect(row.body.jobTitle).toBeUndefined() }) }) describe("patch", () => { - let otherTable: Table - - beforeAll(async () => { - const tableConfig = generateTableConfig() - table = await createTable(tableConfig) - const otherTableConfig = generateTableConfig() - // need a short name of table here - for relationship tests - otherTableConfig.name = "a" - otherTableConfig.schema.relationship = { - name: "relationship", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: table._id!, - fieldName: "relationship", - } - otherTable = await createTable(otherTableConfig) - // need to set the config back to the original table - config.table = table - }) - - it("should update only the fields that are supplied", async () => { - const existing = await config.createRow() - - const rowUsage = await getRowUsage() - - const row = await config.api.row.patch(table._id!, { - _id: existing._id!, - _rev: existing._rev!, - tableId: table._id!, - name: "Updated Name", - }) - - expect(row.name).toEqual("Updated Name") - expect(row.description).toEqual(existing.description) - - const savedRow = await loadRow(row._id!, table._id!) - - expect(savedRow.body.description).toEqual(existing.description) - expect(savedRow.body.name).toEqual("Updated Name") - await assertRowUsage(rowUsage) - }) - - it("should throw an error when given improper types", async () => { - const existing = await config.createRow() - const rowUsage = await getRowUsage() - - await config.api.row.patch( - table._id!, - { - _id: existing._id!, - _rev: existing._rev!, - tableId: table._id!, - name: 1, - }, - { expectStatus: 400 } - ) - - await assertRowUsage(rowUsage) - }) - - it("should not overwrite links if those links are not set", async () => { - let linkField: FieldSchema = { - type: FieldType.LINK, - name: "", - fieldName: "", - constraints: { - type: "array", - presence: false, - }, - relationshipType: RelationshipType.ONE_TO_MANY, - tableId: InternalTable.USER_METADATA, - } - - let table = await config.api.table.save({ - name: "TestTable", - type: "table", - sourceType: TableSourceType.INTERNAL, - sourceId: INTERNAL_TABLE_SOURCE_ID, + it("should update only the view fields for a row", async () => { + const table = await createTable(await userTable()) + const tableId = table._id! + const view = await config.createView({ schema: { - user1: { ...linkField, name: "user1", fieldName: "user1" }, - user2: { ...linkField, name: "user2", fieldName: "user2" }, + name: { visible: true }, + address: { visible: true }, }, }) - let user1 = await config.createUser() - let user2 = await config.createUser() - - let row = await config.api.row.save(table._id!, { - user1: [{ _id: user1._id }], - user2: [{ _id: user2._id }], + const newRow = await config.api.row.save(view.id, { + tableId, + _viewId: view.id, + ...randomRowData(), + }) + const newData = randomRowData() + await config.api.row.patch(view.id, { + tableId, + _viewId: view.id, + _id: newRow._id!, + _rev: newRow._rev!, + ...newData, }) - let getResp = await config.api.row.get(table._id!, row._id!) - expect(getResp.body.user1[0]._id).toEqual(user1._id) - expect(getResp.body.user2[0]._id).toEqual(user2._id) - - let patchResp = await config.api.row.patch(table._id!, { - _id: row._id!, - _rev: row._rev!, - tableId: table._id!, - user1: [{ _id: user2._id }], + const row = await config.api.row.get(tableId, newRow._id!) + expect(row.body).toEqual({ + ...newRow, + name: newData.name, + address: newData.address, + _id: newRow._id, + _rev: expect.any(String), + id: newRow.id, + ...defaultRowFields, }) - expect(patchResp.user1[0]._id).toEqual(user2._id) - expect(patchResp.user2[0]._id).toEqual(user2._id) - - getResp = await config.api.row.get(table._id!, row._id!) - expect(getResp.body.user1[0]._id).toEqual(user2._id) - expect(getResp.body.user2[0]._id).toEqual(user2._id) - }) - - it("should be able to update relationships when both columns are same name", async () => { - let row = await config.api.row.save(table._id!, { - name: "test", - description: "test", - }) - let row2 = await config.api.row.save(otherTable._id!, { - name: "test", - description: "test", - relationship: [row._id], - }) - row = (await config.api.row.get(table._id!, row._id!)).body - expect(row.relationship.length).toBe(1) - const resp = await config.api.row.patch(table._id!, { - _id: row._id!, - _rev: row._rev!, - tableId: row.tableId!, - name: "test2", - relationship: [row2._id], - }) - expect(resp.relationship.length).toBe(1) + expect(row.body._viewId).toBeUndefined() + expect(row.body.age).toBeUndefined() + expect(row.body.jobTitle).toBeUndefined() }) }) describe("destroy", () => { - beforeAll(async () => { - const tableConfig = generateTableConfig() - table = await createTable(tableConfig) - }) - it("should be able to delete a row", async () => { + const table = await createTable(await userTable()) + const tableId = table._id! + const view = await config.createView({ + schema: { + name: { visible: true }, + address: { visible: true }, + }, + }) + const createdRow = await config.createRow() const rowUsage = await getRowUsage() - const res = await config.api.row.delete(table._id!, [createdRow]) - expect(res.body[0]._id).toEqual(createdRow._id) + await config.api.row.delete(view.id, [createdRow]) + await assertRowUsage(rowUsage - 1) - }) - }) - describe("validate", () => { - beforeAll(async () => { - const tableConfig = generateTableConfig() - table = await createTable(tableConfig) + await config.api.row.get(tableId, createdRow._id!, { + expectStatus: 404, + }) }) - it("should return no errors on valid row", async () => { + it("should be able to delete multiple rows", async () => { + const table = await createTable(await userTable()) + const tableId = table._id! + const view = await config.createView({ + schema: { + name: { visible: true }, + address: { visible: true }, + }, + }) + + const rows = await Promise.all([ + config.createRow(), + config.createRow(), + config.createRow(), + ]) const rowUsage = await getRowUsage() - const res = await config.api.row.validate(table._id!, { name: "ivan" }) + await config.api.row.delete(view.id, [rows[0], rows[2]]) - expect(res.valid).toBe(true) - expect(Object.keys(res.errors)).toEqual([]) - await assertRowUsage(rowUsage) - }) - - it("should errors on invalid row", async () => { - const rowUsage = await getRowUsage() - - const res = await config.api.row.validate(table._id!, { name: 1 }) - - if (isInternal) { - expect(res.valid).toBe(false) - expect(Object.keys(res.errors)).toEqual(["name"]) - } else { - // Validation for external is not implemented, so it will always return valid - expect(res.valid).toBe(true) - expect(Object.keys(res.errors)).toEqual([]) - } - await assertRowUsage(rowUsage) - }) - }) - - describe("bulkDelete", () => { - beforeAll(async () => { - const tableConfig = generateTableConfig() - table = await createTable(tableConfig) - }) - - it("should be able to delete a bulk set of rows", async () => { - const row1 = await config.createRow() - const row2 = await config.createRow() - const rowUsage = await getRowUsage() - - const res = await config.api.row.delete(table._id!, [row1, row2]) - - expect(res.body.length).toEqual(2) - await loadRow(row1._id!, table._id!, 404) await assertRowUsage(rowUsage - 2) - }) - it("should be able to delete a variety of row set types", async () => { - const [row1, row2, row3] = await Promise.all([ - config.createRow(), - config.createRow(), - config.createRow(), - ]) - const rowUsage = await getRowUsage() - - const res = await config.api.row.delete(table._id!, [ - row1, - row2._id, - { _id: row3._id }, - ]) - - expect(res.body.length).toEqual(3) - await loadRow(row1._id!, table._id!, 404) - await assertRowUsage(rowUsage - 3) - }) - - it("should accept a valid row object and delete the row", async () => { - const row1 = await config.createRow() - const rowUsage = await getRowUsage() - - const res = await config.api.row.delete(table._id!, row1) - - expect(res.body.id).toEqual(row1._id) - await loadRow(row1._id!, table._id!, 404) - await assertRowUsage(rowUsage - 1) - }) - - it("Should ignore malformed/invalid delete requests", async () => { - const rowUsage = await getRowUsage() - - const res = await config.api.row.delete( - table._id!, - { not: "valid" }, - { expectStatus: 400 } - ) - expect(res.body.message).toEqual("Invalid delete rows request") - - const res2 = await config.api.row.delete( - table._id!, - { rows: 123 }, - { expectStatus: 400 } - ) - expect(res2.body.message).toEqual("Invalid delete rows request") - - const res3 = await config.api.row.delete(table._id!, "invalid", { - expectStatus: 400, + await config.api.row.get(tableId, rows[0]._id!, { + expectStatus: 404, }) - expect(res3.body.message).toEqual("Invalid delete rows request") - - await assertRowUsage(rowUsage) + await config.api.row.get(tableId, rows[2]._id!, { + expectStatus: 404, + }) + await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 }) }) }) - // Legacy views are not available for external - isInternal && - describe("fetchView", () => { - beforeEach(async () => { - const tableConfig = generateTableConfig() - table = await createTable(tableConfig) - }) - - it("should be able to fetch tables contents via 'view'", async () => { - const row = await config.createRow() - const rowUsage = await getRowUsage() - - const res = await config.api.legacyView.get(table._id!) - expect(res.body.length).toEqual(1) - expect(res.body[0]._id).toEqual(row._id) - await assertRowUsage(rowUsage) - }) - - it("should throw an error if view doesn't exist", async () => { - const rowUsage = await getRowUsage() - - await config.api.legacyView.get("derp", { expectStatus: 404 }) - - await assertRowUsage(rowUsage) - }) - - it("should be able to run on a view", async () => { - const view = await config.createLegacyView({ - tableId: table._id!, - name: "ViewTest", - filters: [], - schema: {}, - }) - const row = await config.createRow() - const rowUsage = await getRowUsage() - - const res = await config.api.legacyView.get(view.name) - expect(res.body.length).toEqual(1) - expect(res.body[0]._id).toEqual(row._id) - - await assertRowUsage(rowUsage) - }) - }) - - describe("fetchEnrichedRows", () => { - beforeAll(async () => { - const tableConfig = generateTableConfig() - table = await createTable(tableConfig) - }) - - it("should allow enriching some linked rows", async () => { - const { linkedTable, firstRow, secondRow } = await tenancy.doInTenant( - config.getTenantId(), - async () => { - const linkedTable = await config.createLinkedTable( - RelationshipType.ONE_TO_MANY, - ["link"], - { - // Making sure that the combined table name + column name is within postgres limits - name: uuid.v4().replace(/-/g, "").substring(0, 16), - type: "table", - primary: ["id"], - primaryDisplay: "id", - schema: { - id: { - type: FieldType.AUTO, - name: "id", - autocolumn: true, - constraints: { - presence: true, - }, - }, - }, - } - ) - const firstRow = await config.createRow({ - name: "Test Contact", - description: "original description", - tableId: table._id, - }) - const secondRow = await config.createRow({ - name: "Test 2", - description: "og desc", - link: [{ _id: firstRow._id }], - tableId: linkedTable._id, - }) - return { linkedTable, firstRow, secondRow } - } - ) - const rowUsage = await getRowUsage() - - // test basic enrichment - const resBasic = await config.api.row.get( - linkedTable._id!, - secondRow._id! - ) - expect(resBasic.body.link.length).toBe(1) - expect(resBasic.body.link[0]).toEqual({ - _id: firstRow._id, - primaryDisplay: firstRow.name, - }) - - // test full enrichment - const resEnriched = await config.api.row.getEnriched( - linkedTable._id!, - secondRow._id! - ) - expect(resEnriched.body.link.length).toBe(1) - expect(resEnriched.body.link[0]._id).toBe(firstRow._id) - expect(resEnriched.body.link[0].name).toBe("Test Contact") - expect(resEnriched.body.link[0].description).toBe( - "original description" - ) - await assertRowUsage(rowUsage) - }) - }) - - isInternal && - describe("attachments", () => { - beforeAll(async () => { - const tableConfig = generateTableConfig() - table = await createTable(tableConfig) - }) - - it("should allow enriching attachment rows", async () => { - const table = await config.createAttachmentTable() - const attachmentId = `${structures.uuid()}.csv` - const row = await config.createRow({ - name: "test", - description: "test", - attachment: [ - { - key: `${config.getAppId()}/attachments/${attachmentId}`, - }, - ], - tableId: table._id, - }) - // the environment needs configured for this - await setup.switchToSelfHosted(async () => { - return context.doInAppContext(config.getAppId(), async () => { - const enriched = await outputProcessing(table, [row]) - expect((enriched as Row[])[0].attachment[0].url).toBe( - `/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}` - ) - }) - }) - }) - }) - - describe("exportData", () => { - beforeAll(async () => { - const tableConfig = generateTableConfig() - table = await createTable(tableConfig) - }) - - it("should allow exporting all columns", async () => { - const existing = await config.createRow() - const res = await config.api.row.exportRows(table._id!, { - rows: [existing._id!], - }) - const results = JSON.parse(res.text) - expect(results.length).toEqual(1) - const row = results[0] - - // Ensure all original columns were exported - expect(Object.keys(row).length).toBeGreaterThanOrEqual( - Object.keys(existing).length - ) - Object.keys(existing).forEach(key => { - expect(row[key]).toEqual(existing[key]) - }) - }) - - it("should allow exporting only certain columns", async () => { - const existing = await config.createRow() - const res = await config.api.row.exportRows(table._id!, { - rows: [existing._id!], - columns: ["_id"], - }) - const results = JSON.parse(res.text) - expect(results.length).toEqual(1) - const row = results[0] - - // Ensure only the _id column was exported - expect(Object.keys(row).length).toEqual(1) - expect(row._id).toEqual(existing._id) - }) - }) - - describe("view 2.0", () => { + describe("view search", () => { + const viewSchema = { age: { visible: true }, name: { visible: true } } async function userTable(): Promise
{ return { name: `users_${uuid.v4()}`, @@ -956,1034 +1132,36 @@ describe.each([["postgres", databaseTestProviders.postgres]])( name: { type: FieldType.STRING, name: "name", - }, - surname: { - type: FieldType.STRING, - name: "surname", + constraints: { type: "string" }, }, age: { type: FieldType.NUMBER, name: "age", - }, - address: { - type: FieldType.STRING, - name: "address", - }, - jobTitle: { - type: FieldType.STRING, - name: "jobTitle", + constraints: {}, }, }, } } - const randomRowData = () => ({ - name: generator.first(), - surname: generator.last(), - age: generator.age(), - address: generator.address(), - jobTitle: generator.word(), - }) - - describe("create", () => { - it("should persist a new row with only the provided view fields", async () => { - const table = await createTable(await userTable()) - const view = await config.createView({ - schema: { - name: { visible: true }, - surname: { visible: true }, - address: { visible: true }, - }, - }) - - const data = randomRowData() - const newRow = await config.api.row.save(view.id, { - tableId: table!._id, - _viewId: view.id, - ...data, - }) - - const row = await config.api.row.get(table._id!, newRow._id!) - expect(row.body).toEqual({ - name: data.name, - surname: data.surname, - address: data.address, - tableId: table!._id, - _id: newRow._id, - _rev: newRow._rev, - id: newRow.id, - ...defaultRowFields, - }) - expect(row.body._viewId).toBeUndefined() - expect(row.body.age).toBeUndefined() - expect(row.body.jobTitle).toBeUndefined() - }) - }) - - describe("patch", () => { - it("should update only the view fields for a row", async () => { - const table = await createTable(await userTable()) - const tableId = table._id! - const view = await config.createView({ - schema: { - name: { visible: true }, - address: { visible: true }, - }, - }) - - const newRow = await config.api.row.save(view.id, { - tableId, - _viewId: view.id, - ...randomRowData(), - }) - const newData = randomRowData() - await config.api.row.patch(view.id, { - tableId, - _viewId: view.id, - _id: newRow._id!, - _rev: newRow._rev!, - ...newData, - }) - - const row = await config.api.row.get(tableId, newRow._id!) - expect(row.body).toEqual({ - ...newRow, - name: newData.name, - address: newData.address, - _id: newRow._id, - _rev: expect.any(String), - id: newRow.id, - ...defaultRowFields, - }) - expect(row.body._viewId).toBeUndefined() - expect(row.body.age).toBeUndefined() - expect(row.body.jobTitle).toBeUndefined() - }) - }) - - describe("destroy", () => { - it("should be able to delete a row", async () => { - const table = await createTable(await userTable()) - const tableId = table._id! - const view = await config.createView({ - schema: { - name: { visible: true }, - address: { visible: true }, - }, - }) - - const createdRow = await config.createRow() - const rowUsage = await getRowUsage() - - await config.api.row.delete(view.id, [createdRow]) - - await assertRowUsage(rowUsage - 1) - - await config.api.row.get(tableId, createdRow._id!, { - expectStatus: 404, - }) - }) - - it("should be able to delete multiple rows", async () => { - const table = await createTable(await userTable()) - const tableId = table._id! - const view = await config.createView({ - schema: { - name: { visible: true }, - address: { visible: true }, - }, - }) - - const rows = await Promise.all([ - config.createRow(), - config.createRow(), - config.createRow(), - ]) - const rowUsage = await getRowUsage() - - await config.api.row.delete(view.id, [rows[0], rows[2]]) - - await assertRowUsage(rowUsage - 2) - - await config.api.row.get(tableId, rows[0]._id!, { - expectStatus: 404, - }) - await config.api.row.get(tableId, rows[2]._id!, { - expectStatus: 404, - }) - await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 }) - }) - }) - - describe("view search", () => { - const viewSchema = { age: { visible: true }, name: { visible: true } } - async function userTable(): Promise
{ - return { - name: `users_${uuid.v4()}`, - sourceId: INTERNAL_TABLE_SOURCE_ID, - sourceType: TableSourceType.INTERNAL, - type: "table", - primary: ["id"], - schema: { - id: { - type: FieldType.AUTO, - name: "id", - autocolumn: true, - constraints: { - presence: true, - }, - }, - name: { - type: FieldType.STRING, - name: "name", - constraints: { type: "string" }, - }, - age: { - type: FieldType.NUMBER, - name: "age", - constraints: {}, - }, - }, - } - } - - it("returns empty rows from view when no schema is passed", async () => { - const table = await createTable(await userTable()) - const rows = await Promise.all( - Array.from({ length: 10 }, () => - config.api.row.save(table._id!, { tableId: table._id }) - ) - ) - - const createViewResponse = await config.createView() - const response = await config.api.viewV2.search(createViewResponse.id) - - expect(response.body.rows).toHaveLength(10) - expect(response.body).toEqual({ - rows: expect.arrayContaining( - rows.map(r => ({ - _viewId: createViewResponse.id, - tableId: table._id, - _id: r._id, - _rev: r._rev, - ...defaultRowFields, - })) - ), - ...(isInternal - ? {} - : { - hasNextPage: false, - bookmark: null, - }), - }) - }) - - it("searching respects the view filters", async () => { - const table = await createTable(await userTable()) - - await Promise.all( - Array.from({ length: 10 }, () => - config.api.row.save(table._id!, { - tableId: table._id, - name: generator.name(), - age: generator.integer({ min: 10, max: 30 }), - }) - ) - ) - - const expectedRows = await Promise.all( - Array.from({ length: 5 }, () => - config.api.row.save(table._id!, { - tableId: table._id, - name: generator.name(), - age: 40, - }) - ) - ) - - const createViewResponse = await config.createView({ - query: [ - { operator: SearchQueryOperators.EQUAL, field: "age", value: 40 }, - ], - schema: viewSchema, - }) - - const response = await config.api.viewV2.search(createViewResponse.id) - - expect(response.body.rows).toHaveLength(5) - expect(response.body).toEqual({ - rows: expect.arrayContaining( - expectedRows.map(r => ({ - _viewId: createViewResponse.id, - tableId: table._id, - name: r.name, - age: r.age, - _id: r._id, - _rev: r._rev, - ...defaultRowFields, - })) - ), - ...(isInternal - ? {} - : { - hasNextPage: false, - bookmark: null, - }), - }) - }) - - const sortTestOptions: [ - { - field: string - order?: SortOrder - type?: SortType - }, - string[] - ][] = [ - [ - { - field: "name", - order: SortOrder.ASCENDING, - type: SortType.STRING, - }, - ["Alice", "Bob", "Charly", "Danny"], - ], - [ - { - field: "name", - }, - ["Alice", "Bob", "Charly", "Danny"], - ], - [ - { - field: "name", - order: SortOrder.DESCENDING, - }, - ["Danny", "Charly", "Bob", "Alice"], - ], - [ - { - field: "name", - order: SortOrder.DESCENDING, - type: SortType.STRING, - }, - ["Danny", "Charly", "Bob", "Alice"], - ], - [ - { - field: "age", - order: SortOrder.ASCENDING, - type: SortType.number, - }, - ["Danny", "Alice", "Charly", "Bob"], - ], - [ - { - field: "age", - order: SortOrder.ASCENDING, - }, - ["Danny", "Alice", "Charly", "Bob"], - ], - [ - { - field: "age", - order: SortOrder.DESCENDING, - }, - ["Bob", "Charly", "Alice", "Danny"], - ], - [ - { - field: "age", - order: SortOrder.DESCENDING, - type: SortType.number, - }, - ["Bob", "Charly", "Alice", "Danny"], - ], - ] - - describe("sorting", () => { - beforeAll(async () => { - const table = await createTable(await userTable()) - const users = [ - { name: "Alice", age: 25 }, - { name: "Bob", age: 30 }, - { name: "Charly", age: 27 }, - { name: "Danny", age: 15 }, - ] - await Promise.all( - users.map(u => - config.api.row.save(table._id!, { - tableId: table._id, - ...u, - }) - ) - ) - }) - - it.each(sortTestOptions)( - "allow sorting (%s)", - async (sortParams, expected) => { - const createViewResponse = await config.createView({ - sort: sortParams, - schema: viewSchema, - }) - - const response = await config.api.viewV2.search( - createViewResponse.id - ) - - expect(response.body.rows).toHaveLength(4) - expect(response.body.rows).toEqual( - expected.map(name => expect.objectContaining({ name })) - ) - } - ) - - it.each(sortTestOptions)( - "allow override the default view sorting (%s)", - async (sortParams, expected) => { - const createViewResponse = await config.createView({ - sort: { - field: "name", - order: SortOrder.ASCENDING, - type: SortType.STRING, - }, - schema: viewSchema, - }) - - const response = await config.api.viewV2.search( - createViewResponse.id, - { - sort: sortParams.field, - sortOrder: sortParams.order, - sortType: sortParams.type, - query: {}, - } - ) - - expect(response.body.rows).toHaveLength(4) - expect(response.body.rows).toEqual( - expected.map(name => expect.objectContaining({ name })) - ) - } - ) - }) - - it("when schema is defined, defined columns and row attributes are returned", async () => { - const table = await createTable(await userTable()) - const rows = await Promise.all( - Array.from({ length: 10 }, () => - config.api.row.save(table._id!, { - tableId: table._id, - name: generator.name(), - age: generator.age(), - }) - ) - ) - - const view = await config.createView({ - schema: { name: { visible: true } }, - }) - const response = await config.api.viewV2.search(view.id) - - expect(response.body.rows).toHaveLength(10) - expect(response.body.rows).toEqual( - expect.arrayContaining( - rows.map(r => ({ - ...(isInternal - ? expectAnyInternalColsAttributes - : expectAnyExternalColsAttributes), - _viewId: view.id, - name: r.name, - })) - ) - ) - }) - - it("views without data can be returned", async () => { - const table = await createTable(await userTable()) - - const createViewResponse = await config.createView() - const response = await config.api.viewV2.search(createViewResponse.id) - - expect(response.body.rows).toHaveLength(0) - }) - - it("respects the limit parameter", async () => { - await createTable(await userTable()) - await Promise.all( - Array.from({ length: 10 }, () => config.createRow()) - ) - - const limit = generator.integer({ min: 1, max: 8 }) - - const createViewResponse = await config.createView() - const response = await config.api.viewV2.search( - createViewResponse.id, - { - limit, - query: {}, - } - ) - - expect(response.body.rows).toHaveLength(limit) - }) - - it("can handle pagination", async () => { - await createTable(await userTable()) - await Promise.all( - Array.from({ length: 10 }, () => config.createRow()) - ) - - const createViewResponse = await config.createView() - const allRows = ( - await config.api.viewV2.search(createViewResponse.id) - ).body.rows - - const firstPageResponse = await config.api.viewV2.search( - createViewResponse.id, - { - paginate: true, - limit: 4, - query: {}, - } - ) - expect(firstPageResponse.body).toEqual({ - rows: expect.arrayContaining(allRows.slice(0, 4)), - totalRows: isInternal ? 10 : undefined, - hasNextPage: true, - bookmark: expect.anything(), - }) - - const secondPageResponse = await config.api.viewV2.search( - createViewResponse.id, - { - paginate: true, - limit: 4, - bookmark: firstPageResponse.body.bookmark, - - query: {}, - } - ) - expect(secondPageResponse.body).toEqual({ - rows: expect.arrayContaining(allRows.slice(4, 8)), - totalRows: isInternal ? 10 : undefined, - hasNextPage: true, - bookmark: expect.anything(), - }) - - const lastPageResponse = await config.api.viewV2.search( - createViewResponse.id, - { - paginate: true, - limit: 4, - bookmark: secondPageResponse.body.bookmark, - query: {}, - } - ) - expect(lastPageResponse.body).toEqual({ - rows: expect.arrayContaining(allRows.slice(8)), - totalRows: isInternal ? 10 : undefined, - hasNextPage: false, - bookmark: expect.anything(), - }) - }) - - isInternal && - it("doesn't allow creating in user table", async () => { - const userTableId = InternalTable.USER_METADATA - const response = await config.api.row.save( - userTableId, - { - tableId: userTableId, - firstName: "Joe", - lastName: "Joe", - email: "joe@joe.com", - roles: {}, - }, - { expectStatus: 400 } - ) - expect(response.message).toBe("Cannot create new user entry.") - }) - - describe("permissions", () => { - let viewId: string - let tableId: string - - beforeAll(async () => { - await createTable(await userTable()) - await Promise.all( - Array.from({ length: 10 }, () => config.createRow()) - ) - - const createViewResponse = await config.createView() - - tableId = table._id! - viewId = createViewResponse.id - }) - - beforeEach(() => { - mocks.licenses.useViewPermissions() - }) - - it("does not allow public users to fetch by default", async () => { - await config.publish() - await config.api.viewV2.search(viewId, undefined, { - expectStatus: 403, - usePublicUser: true, - }) - }) - - it("allow public users to fetch when permissions are explicit", async () => { - await config.api.permission.set({ - roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, - level: PermissionLevel.READ, - resourceId: viewId, - }) - await config.publish() - - const response = await config.api.viewV2.search(viewId, undefined, { - usePublicUser: true, - }) - - expect(response.body.rows).toHaveLength(10) - }) - - it("allow public users to fetch when permissions are inherited", async () => { - await config.api.permission.set({ - roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, - level: PermissionLevel.READ, - resourceId: tableId, - }) - await config.publish() - - const response = await config.api.viewV2.search(viewId, undefined, { - usePublicUser: true, - }) - - expect(response.body.rows).toHaveLength(10) - }) - - it("respects inherited permissions, not allowing not public views from public tables", async () => { - await config.api.permission.set({ - roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, - level: PermissionLevel.READ, - resourceId: tableId, - }) - await config.api.permission.set({ - roleId: roles.BUILTIN_ROLE_IDS.POWER, - level: PermissionLevel.READ, - resourceId: viewId, - }) - await config.publish() - - await config.api.viewV2.search(viewId, undefined, { - usePublicUser: true, - expectStatus: 403, - }) - }) - }) - }) - }) - - let o2mTable: Table - let m2mTable: Table - beforeAll(async () => { - o2mTable = await createTable( - { ...generateTableConfig(), name: "o2m" }, - { - skipReassigning: true, - } - ) - m2mTable = await createTable( - { ...generateTableConfig(), name: "m2m" }, - { - skipReassigning: true, - } - ) - }) - - describe.each([ - [ - "relationship fields", - (): Record => ({ - user: { - name: "user", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: o2mTable._id!, - fieldName: "fk_o2m", - }, - users: { - name: "users", - relationshipType: RelationshipType.MANY_TO_MANY, - type: FieldType.LINK, - tableId: m2mTable._id!, - fieldName: "fk_m2m", - }, - }), - (tableId: string) => - config.api.row.save(tableId, { - name: uuid.v4(), - description: generator.paragraph(), - tableId, - }), - (row: Row) => ({ - _id: row._id, - primaryDisplay: row.name, - }), - ], - [ - "bb reference fields", - (): Record => ({ - user: { - name: "user", - type: FieldType.BB_REFERENCE, - subtype: FieldTypeSubtypes.BB_REFERENCE.USER, - }, - users: { - name: "users", - type: FieldType.BB_REFERENCE, - subtype: FieldTypeSubtypes.BB_REFERENCE.USERS, - }, - }), - () => config.createUser(), - (row: Row) => ({ - _id: row._id, - primaryDisplay: row.email, - email: row.email, - firstName: row.firstName, - lastName: row.lastName, - }), - ], - ])("links - %s", (__, relSchema, dataGenerator, resultMapper) => { - let tableId: string - let o2mData: Row[] - let m2mData: Row[] - - beforeAll(async () => { - const tableConfig = generateTableConfig() - - if (config.datasource) { - tableConfig.sourceId = config.datasource._id! - if (config.datasource.plus) { - tableConfig.sourceType = TableSourceType.EXTERNAL - } - } - const table = await config.api.table.save({ - ...tableConfig, - schema: { - ...tableConfig.schema, - ...relSchema(), - }, - }) - tableId = table._id! - - o2mData = [ - await dataGenerator(o2mTable._id!), - await dataGenerator(o2mTable._id!), - await dataGenerator(o2mTable._id!), - await dataGenerator(o2mTable._id!), - ] - - m2mData = [ - await dataGenerator(m2mTable._id!), - await dataGenerator(m2mTable._id!), - await dataGenerator(m2mTable._id!), - await dataGenerator(m2mTable._id!), - ] - }) - - it("can save a row when relationship fields are empty", async () => { - const rowData = { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - } - const row = await config.api.row.save(tableId, rowData) - - expect(row).toEqual({ - name: rowData.name, - description: rowData.description, - tableId, - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - }) - }) - - it("can save a row with a single relationship field", async () => { - const user = _.sample(o2mData)! - const rowData = { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - user: [user], - } - const row = await config.api.row.save(tableId, rowData) - - expect(row).toEqual({ - name: rowData.name, - description: rowData.description, - tableId, - user: [user].map(u => resultMapper(u)), - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, - }) - }) - - it("can save a row with a multiple relationship field", async () => { - const selectedUsers = _.sampleSize(m2mData, 2) - const rowData = { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - users: selectedUsers, - } - const row = await config.api.row.save(tableId, rowData) - - expect(row).toEqual({ - name: rowData.name, - description: rowData.description, - tableId, - users: expect.arrayContaining( - selectedUsers.map(u => resultMapper(u)) - ), - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - }) - }) - - it("can retrieve rows with no populated relationships", async () => { - const rowData = { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - } - const row = await config.api.row.save(tableId, rowData) - - const { body: retrieved } = await config.api.row.get(tableId, row._id!) - expect(retrieved).toEqual({ - name: rowData.name, - description: rowData.description, - tableId, - user: undefined, - users: undefined, - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - ...defaultRowFields, - }) - }) - - it("can retrieve rows with populated relationships", async () => { - const user1 = _.sample(o2mData)! - const [user2, user3] = _.sampleSize(m2mData, 2) - - const rowData = { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - users: [user2, user3], - user: [user1], - } - const row = await config.api.row.save(tableId, rowData) - - const { body: retrieved } = await config.api.row.get(tableId, row._id!) - expect(retrieved).toEqual({ - name: rowData.name, - description: rowData.description, - tableId, - user: expect.arrayContaining([user1].map(u => resultMapper(u))), - users: expect.arrayContaining( - [user2, user3].map(u => resultMapper(u)) - ), - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user1.id, - ...defaultRowFields, - }) - }) - - it("can update an existing populated row", async () => { - const user = _.sample(o2mData)! - const [users1, users2, users3] = _.sampleSize(m2mData, 3) - - const rowData = { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - users: [users1, users2], - } - const row = await config.api.row.save(tableId, rowData) - - const updatedRow = await config.api.row.save(tableId, { - ...row, - user: [user], - users: [users3, users1], - }) - expect(updatedRow).toEqual({ - name: rowData.name, - description: rowData.description, - tableId, - user: expect.arrayContaining([user].map(u => resultMapper(u))), - users: expect.arrayContaining( - [users3, users1].map(u => resultMapper(u)) - ), - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, - }) - }) - - it("can wipe an existing populated relationships in row", async () => { - const [user1, user2] = _.sampleSize(m2mData, 2) - - const rowData = { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - users: [user1, user2], - } - const row = await config.api.row.save(tableId, rowData) - - const updatedRow = await config.api.row.save(tableId, { - ...row, - user: null, - users: null, - }) - expect(updatedRow).toEqual({ - name: rowData.name, - description: rowData.description, - tableId, - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - }) - }) - - it("fetch all will populate the relationships", async () => { - const [user1] = _.sampleSize(o2mData, 1) - const [users1, users2, users3] = _.sampleSize(m2mData, 3) - - const rows: { - name: string - description: string - user?: Row[] - users?: Row[] - tableId: string - }[] = [ - { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - users: [users1, users2], - }, - { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - user: [user1], - users: [users1, users3], - }, - { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - users: [users3], - }, - ] - - await config.api.row.save(tableId, rows[0]) - await config.api.row.save(tableId, rows[1]) - await config.api.row.save(tableId, rows[2]) - - const res = await config.api.row.fetch(tableId) - - expect(res).toEqual( - expect.arrayContaining( - rows.map(r => ({ - name: r.name, - description: r.description, - tableId, - user: r.user?.map(u => resultMapper(u)), - users: r.users?.length - ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) - : undefined, - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - [`fk_${o2mTable.name}_fk_o2m`]: - isInternal || !r.user?.length ? undefined : r.user[0].id, - ...defaultRowFields, - })) + it("returns empty rows from view when no schema is passed", async () => { + const table = await createTable(await userTable()) + const rows = await Promise.all( + Array.from({ length: 10 }, () => + config.api.row.save(table._id!, { tableId: table._id }) ) ) - }) - it("search all will populate the relationships", async () => { - const [user1] = _.sampleSize(o2mData, 1) - const [users1, users2, users3] = _.sampleSize(m2mData, 3) + const createViewResponse = await config.createView() + const response = await config.api.viewV2.search(createViewResponse.id) - const rows: { - name: string - description: string - user?: Row[] - users?: Row[] - tableId: string - }[] = [ - { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - users: [users1, users2], - }, - { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - user: [user1], - users: [users1, users3], - }, - { - ...basicRow(tableId), - name: generator.name(), - description: generator.name(), - users: [users3], - }, - ] - - await config.api.row.save(tableId, rows[0]) - await config.api.row.save(tableId, rows[1]) - await config.api.row.save(tableId, rows[2]) - - const res = await config.api.row.search(tableId) - - expect(res).toEqual({ + expect(response.body.rows).toHaveLength(10) + expect(response.body).toEqual({ rows: expect.arrayContaining( rows.map(r => ({ - name: r.name, - description: r.description, - tableId, - user: r.user?.map(u => resultMapper(u)), - users: r.users?.length - ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) - : undefined, - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - [`fk_${o2mTable.name}_fk_o2m`]: - isInternal || !r.user?.length ? undefined : r.user[0].id, + _viewId: createViewResponse.id, + tableId: table._id, + _id: r._id, + _rev: r._rev, ...defaultRowFields, })) ), @@ -1995,59 +1173,908 @@ describe.each([["postgres", databaseTestProviders.postgres]])( }), }) }) - }) - describe("Formula fields", () => { - let relationshipTable: Table, tableId: string, relatedRow: Row + it("searching respects the view filters", async () => { + const table = await createTable(await userTable()) - beforeAll(async () => { - const otherTableId = config.table!._id! - const cfg = generateTableConfig() - relationshipTable = await config.createLinkedTable( - RelationshipType.ONE_TO_MANY, - ["links"], + await Promise.all( + Array.from({ length: 10 }, () => + config.api.row.save(table._id!, { + tableId: table._id, + name: generator.name(), + age: generator.integer({ min: 10, max: 30 }), + }) + ) + ) + + const expectedRows = await Promise.all( + Array.from({ length: 5 }, () => + config.api.row.save(table._id!, { + tableId: table._id, + name: generator.name(), + age: 40, + }) + ) + ) + + const createViewResponse = await config.createView({ + query: [ + { operator: SearchQueryOperators.EQUAL, field: "age", value: 40 }, + ], + schema: viewSchema, + }) + + const response = await config.api.viewV2.search(createViewResponse.id) + + expect(response.body.rows).toHaveLength(5) + expect(response.body).toEqual({ + rows: expect.arrayContaining( + expectedRows.map(r => ({ + _viewId: createViewResponse.id, + tableId: table._id, + name: r.name, + age: r.age, + _id: r._id, + _rev: r._rev, + ...defaultRowFields, + })) + ), + ...(isInternal + ? {} + : { + hasNextPage: false, + bookmark: null, + }), + }) + }) + + const sortTestOptions: [ + { + field: string + order?: SortOrder + type?: SortType + }, + string[] + ][] = [ + [ { - ...cfg, - // needs to be a short name - name: "b", - schema: { - ...cfg.schema, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: "{{ links.0.name }}", - formulaType: FormulaType.DYNAMIC, - }, - }, + field: "name", + order: SortOrder.ASCENDING, + type: SortType.STRING, + }, + ["Alice", "Bob", "Charly", "Danny"], + ], + [ + { + field: "name", + }, + ["Alice", "Bob", "Charly", "Danny"], + ], + [ + { + field: "name", + order: SortOrder.DESCENDING, + }, + ["Danny", "Charly", "Bob", "Alice"], + ], + [ + { + field: "name", + order: SortOrder.DESCENDING, + type: SortType.STRING, + }, + ["Danny", "Charly", "Bob", "Alice"], + ], + [ + { + field: "age", + order: SortOrder.ASCENDING, + type: SortType.number, + }, + ["Danny", "Alice", "Charly", "Bob"], + ], + [ + { + field: "age", + order: SortOrder.ASCENDING, + }, + ["Danny", "Alice", "Charly", "Bob"], + ], + [ + { + field: "age", + order: SortOrder.DESCENDING, + }, + ["Bob", "Charly", "Alice", "Danny"], + ], + [ + { + field: "age", + order: SortOrder.DESCENDING, + type: SortType.number, + }, + ["Bob", "Charly", "Alice", "Danny"], + ], + ] + + describe("sorting", () => { + beforeAll(async () => { + const table = await createTable(await userTable()) + const users = [ + { name: "Alice", age: 25 }, + { name: "Bob", age: 30 }, + { name: "Charly", age: 27 }, + { name: "Danny", age: 15 }, + ] + await Promise.all( + users.map(u => + config.api.row.save(table._id!, { + tableId: table._id, + ...u, + }) + ) + ) + }) + + it.each(sortTestOptions)( + "allow sorting (%s)", + async (sortParams, expected) => { + const createViewResponse = await config.createView({ + sort: sortParams, + schema: viewSchema, + }) + + const response = await config.api.viewV2.search( + createViewResponse.id + ) + + expect(response.body.rows).toHaveLength(4) + expect(response.body.rows).toEqual( + expected.map(name => expect.objectContaining({ name })) + ) } ) - tableId = relationshipTable._id! + it.each(sortTestOptions)( + "allow override the default view sorting (%s)", + async (sortParams, expected) => { + const createViewResponse = await config.createView({ + sort: { + field: "name", + order: SortOrder.ASCENDING, + type: SortType.STRING, + }, + schema: viewSchema, + }) - relatedRow = await config.api.row.save(otherTableId, { - name: generator.word(), - description: generator.paragraph(), + const response = await config.api.viewV2.search( + createViewResponse.id, + { + sort: sortParams.field, + sortOrder: sortParams.order, + sortType: sortParams.type, + query: {}, + } + ) + + expect(response.body.rows).toHaveLength(4) + expect(response.body.rows).toEqual( + expected.map(name => expect.objectContaining({ name })) + ) + } + ) + }) + + it("when schema is defined, defined columns and row attributes are returned", async () => { + const table = await createTable(await userTable()) + const rows = await Promise.all( + Array.from({ length: 10 }, () => + config.api.row.save(table._id!, { + tableId: table._id, + name: generator.name(), + age: generator.age(), + }) + ) + ) + + const view = await config.createView({ + schema: { name: { visible: true } }, }) - await config.api.row.save(tableId, { - name: generator.word(), - description: generator.paragraph(), - tableId, - links: [relatedRow._id], + const response = await config.api.viewV2.search(view.id) + + expect(response.body.rows).toHaveLength(10) + expect(response.body.rows).toEqual( + expect.arrayContaining( + rows.map(r => ({ + ...(isInternal + ? expectAnyInternalColsAttributes + : expectAnyExternalColsAttributes), + _viewId: view.id, + name: r.name, + })) + ) + ) + }) + + it("views without data can be returned", async () => { + const table = await createTable(await userTable()) + + const createViewResponse = await config.createView() + const response = await config.api.viewV2.search(createViewResponse.id) + + expect(response.body.rows).toHaveLength(0) + }) + + it("respects the limit parameter", async () => { + await createTable(await userTable()) + await Promise.all(Array.from({ length: 10 }, () => config.createRow())) + + const limit = generator.integer({ min: 1, max: 8 }) + + const createViewResponse = await config.createView() + const response = await config.api.viewV2.search(createViewResponse.id, { + limit, + query: {}, + }) + + expect(response.body.rows).toHaveLength(limit) + }) + + it("can handle pagination", async () => { + await createTable(await userTable()) + await Promise.all(Array.from({ length: 10 }, () => config.createRow())) + + const createViewResponse = await config.createView() + const allRows = (await config.api.viewV2.search(createViewResponse.id)) + .body.rows + + const firstPageResponse = await config.api.viewV2.search( + createViewResponse.id, + { + paginate: true, + limit: 4, + query: {}, + } + ) + expect(firstPageResponse.body).toEqual({ + rows: expect.arrayContaining(allRows.slice(0, 4)), + totalRows: isInternal ? 10 : undefined, + hasNextPage: true, + bookmark: expect.anything(), + }) + + const secondPageResponse = await config.api.viewV2.search( + createViewResponse.id, + { + paginate: true, + limit: 4, + bookmark: firstPageResponse.body.bookmark, + + query: {}, + } + ) + expect(secondPageResponse.body).toEqual({ + rows: expect.arrayContaining(allRows.slice(4, 8)), + totalRows: isInternal ? 10 : undefined, + hasNextPage: true, + bookmark: expect.anything(), + }) + + const lastPageResponse = await config.api.viewV2.search( + createViewResponse.id, + { + paginate: true, + limit: 4, + bookmark: secondPageResponse.body.bookmark, + query: {}, + } + ) + expect(lastPageResponse.body).toEqual({ + rows: expect.arrayContaining(allRows.slice(8)), + totalRows: isInternal ? 10 : undefined, + hasNextPage: false, + bookmark: expect.anything(), }) }) - it("should be able to search for rows containing formulas", async () => { - const { rows } = await config.api.row.search(tableId) - expect(rows.length).toBe(1) - expect(rows[0].links.length).toBe(1) - const row = rows[0] - expect(row.formula).toBe(relatedRow.name) + isInternal && + it("doesn't allow creating in user table", async () => { + const userTableId = InternalTable.USER_METADATA + const response = await config.api.row.save( + userTableId, + { + tableId: userTableId, + firstName: "Joe", + lastName: "Joe", + email: "joe@joe.com", + roles: {}, + }, + { expectStatus: 400 } + ) + expect(response.message).toBe("Cannot create new user entry.") + }) + + describe("permissions", () => { + let viewId: string + let tableId: string + + beforeAll(async () => { + await createTable(await userTable()) + await Promise.all( + Array.from({ length: 10 }, () => config.createRow()) + ) + + const createViewResponse = await config.createView() + + tableId = table._id! + viewId = createViewResponse.id + }) + + beforeEach(() => { + mocks.licenses.useViewPermissions() + }) + + it("does not allow public users to fetch by default", async () => { + await config.publish() + await config.api.viewV2.search(viewId, undefined, { + expectStatus: 403, + usePublicUser: true, + }) + }) + + it("allow public users to fetch when permissions are explicit", async () => { + await config.api.permission.set({ + roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, + level: PermissionLevel.READ, + resourceId: viewId, + }) + await config.publish() + + const response = await config.api.viewV2.search(viewId, undefined, { + usePublicUser: true, + }) + + expect(response.body.rows).toHaveLength(10) + }) + + it("allow public users to fetch when permissions are inherited", async () => { + await config.api.permission.set({ + roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, + level: PermissionLevel.READ, + resourceId: tableId, + }) + await config.publish() + + const response = await config.api.viewV2.search(viewId, undefined, { + usePublicUser: true, + }) + + expect(response.body.rows).toHaveLength(10) + }) + + it("respects inherited permissions, not allowing not public views from public tables", async () => { + await config.api.permission.set({ + roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, + level: PermissionLevel.READ, + resourceId: tableId, + }) + await config.api.permission.set({ + roleId: roles.BUILTIN_ROLE_IDS.POWER, + level: PermissionLevel.READ, + resourceId: viewId, + }) + await config.publish() + + await config.api.viewV2.search(viewId, undefined, { + usePublicUser: true, + expectStatus: 403, + }) + }) + }) + }) + }) + + let o2mTable: Table + let m2mTable: Table + beforeAll(async () => { + o2mTable = await createTable( + { ...generateTableConfig(), name: "o2m" }, + { + skipReassigning: true, + } + ) + m2mTable = await createTable( + { ...generateTableConfig(), name: "m2m" }, + { + skipReassigning: true, + } + ) + }) + + describe.each([ + [ + "relationship fields", + (): Record => ({ + user: { + name: "user", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: o2mTable._id!, + fieldName: "fk_o2m", + }, + users: { + name: "users", + relationshipType: RelationshipType.MANY_TO_MANY, + type: FieldType.LINK, + tableId: m2mTable._id!, + fieldName: "fk_m2m", + }, + }), + (tableId: string) => + config.api.row.save(tableId, { + name: uuid.v4(), + description: generator.paragraph(), + tableId, + }), + (row: Row) => ({ + _id: row._id, + primaryDisplay: row.name, + }), + ], + [ + "bb reference fields", + (): Record => ({ + user: { + name: "user", + type: FieldType.BB_REFERENCE, + subtype: FieldTypeSubtypes.BB_REFERENCE.USER, + }, + users: { + name: "users", + type: FieldType.BB_REFERENCE, + subtype: FieldTypeSubtypes.BB_REFERENCE.USERS, + }, + }), + () => config.createUser(), + (row: Row) => ({ + _id: row._id, + primaryDisplay: row.email, + email: row.email, + firstName: row.firstName, + lastName: row.lastName, + }), + ], + ])("links - %s", (__, relSchema, dataGenerator, resultMapper) => { + let tableId: string + let o2mData: Row[] + let m2mData: Row[] + + beforeAll(async () => { + const tableConfig = generateTableConfig() + + if (config.datasource) { + tableConfig.sourceId = config.datasource._id! + if (config.datasource.plus) { + tableConfig.sourceType = TableSourceType.EXTERNAL + } + } + const table = await config.api.table.save({ + ...tableConfig, + schema: { + ...tableConfig.schema, + ...relSchema(), + }, + }) + tableId = table._id! + + o2mData = [ + await dataGenerator(o2mTable._id!), + await dataGenerator(o2mTable._id!), + await dataGenerator(o2mTable._id!), + await dataGenerator(o2mTable._id!), + ] + + m2mData = [ + await dataGenerator(m2mTable._id!), + await dataGenerator(m2mTable._id!), + await dataGenerator(m2mTable._id!), + await dataGenerator(m2mTable._id!), + ] + }) + + it("can save a row when relationship fields are empty", async () => { + const rowData = { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + } + const row = await config.api.row.save(tableId, rowData) + + expect(row).toEqual({ + name: rowData.name, + description: rowData.description, + tableId, + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, }) }) - describe("Formula JS protection", () => { - it("should time out JS execution if a single cell takes too long", async () => { - await config.withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 20 }, async () => { + it("can save a row with a single relationship field", async () => { + const user = _.sample(o2mData)! + const rowData = { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + user: [user], + } + const row = await config.api.row.save(tableId, rowData) + + expect(row).toEqual({ + name: rowData.name, + description: rowData.description, + tableId, + user: [user].map(u => resultMapper(u)), + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, + }) + }) + + it("can save a row with a multiple relationship field", async () => { + const selectedUsers = _.sampleSize(m2mData, 2) + const rowData = { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + users: selectedUsers, + } + const row = await config.api.row.save(tableId, rowData) + + expect(row).toEqual({ + name: rowData.name, + description: rowData.description, + tableId, + users: expect.arrayContaining(selectedUsers.map(u => resultMapper(u))), + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + }) + }) + + it("can retrieve rows with no populated relationships", async () => { + const rowData = { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + } + const row = await config.api.row.save(tableId, rowData) + + const { body: retrieved } = await config.api.row.get(tableId, row._id!) + expect(retrieved).toEqual({ + name: rowData.name, + description: rowData.description, + tableId, + user: undefined, + users: undefined, + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + ...defaultRowFields, + }) + }) + + it("can retrieve rows with populated relationships", async () => { + const user1 = _.sample(o2mData)! + const [user2, user3] = _.sampleSize(m2mData, 2) + + const rowData = { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + users: [user2, user3], + user: [user1], + } + const row = await config.api.row.save(tableId, rowData) + + const { body: retrieved } = await config.api.row.get(tableId, row._id!) + expect(retrieved).toEqual({ + name: rowData.name, + description: rowData.description, + tableId, + user: expect.arrayContaining([user1].map(u => resultMapper(u))), + users: expect.arrayContaining([user2, user3].map(u => resultMapper(u))), + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user1.id, + ...defaultRowFields, + }) + }) + + it("can update an existing populated row", async () => { + const user = _.sample(o2mData)! + const [users1, users2, users3] = _.sampleSize(m2mData, 3) + + const rowData = { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + users: [users1, users2], + } + const row = await config.api.row.save(tableId, rowData) + + const updatedRow = await config.api.row.save(tableId, { + ...row, + user: [user], + users: [users3, users1], + }) + expect(updatedRow).toEqual({ + name: rowData.name, + description: rowData.description, + tableId, + user: expect.arrayContaining([user].map(u => resultMapper(u))), + users: expect.arrayContaining( + [users3, users1].map(u => resultMapper(u)) + ), + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, + }) + }) + + it("can wipe an existing populated relationships in row", async () => { + const [user1, user2] = _.sampleSize(m2mData, 2) + + const rowData = { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + users: [user1, user2], + } + const row = await config.api.row.save(tableId, rowData) + + const updatedRow = await config.api.row.save(tableId, { + ...row, + user: null, + users: null, + }) + expect(updatedRow).toEqual({ + name: rowData.name, + description: rowData.description, + tableId, + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + }) + }) + + it("fetch all will populate the relationships", async () => { + const [user1] = _.sampleSize(o2mData, 1) + const [users1, users2, users3] = _.sampleSize(m2mData, 3) + + const rows: { + name: string + description: string + user?: Row[] + users?: Row[] + tableId: string + }[] = [ + { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + users: [users1, users2], + }, + { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + user: [user1], + users: [users1, users3], + }, + { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + users: [users3], + }, + ] + + await config.api.row.save(tableId, rows[0]) + await config.api.row.save(tableId, rows[1]) + await config.api.row.save(tableId, rows[2]) + + const res = await config.api.row.fetch(tableId) + + expect(res).toEqual( + expect.arrayContaining( + rows.map(r => ({ + name: r.name, + description: r.description, + tableId, + user: r.user?.map(u => resultMapper(u)), + users: r.users?.length + ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) + : undefined, + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + [`fk_${o2mTable.name}_fk_o2m`]: + isInternal || !r.user?.length ? undefined : r.user[0].id, + ...defaultRowFields, + })) + ) + ) + }) + + it("search all will populate the relationships", async () => { + const [user1] = _.sampleSize(o2mData, 1) + const [users1, users2, users3] = _.sampleSize(m2mData, 3) + + const rows: { + name: string + description: string + user?: Row[] + users?: Row[] + tableId: string + }[] = [ + { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + users: [users1, users2], + }, + { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + user: [user1], + users: [users1, users3], + }, + { + ...basicRow(tableId), + name: generator.name(), + description: generator.name(), + users: [users3], + }, + ] + + await config.api.row.save(tableId, rows[0]) + await config.api.row.save(tableId, rows[1]) + await config.api.row.save(tableId, rows[2]) + + const res = await config.api.row.search(tableId) + + expect(res).toEqual({ + rows: expect.arrayContaining( + rows.map(r => ({ + name: r.name, + description: r.description, + tableId, + user: r.user?.map(u => resultMapper(u)), + users: r.users?.length + ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) + : undefined, + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + [`fk_${o2mTable.name}_fk_o2m`]: + isInternal || !r.user?.length ? undefined : r.user[0].id, + ...defaultRowFields, + })) + ), + ...(isInternal + ? {} + : { + hasNextPage: false, + bookmark: null, + }), + }) + }) + }) + + describe("Formula fields", () => { + let relationshipTable: Table, tableId: string, relatedRow: Row + + beforeAll(async () => { + const otherTableId = config.table!._id! + const cfg = generateTableConfig() + relationshipTable = await config.createLinkedTable( + RelationshipType.ONE_TO_MANY, + ["links"], + { + ...cfg, + // needs to be a short name + name: "b", + schema: { + ...cfg.schema, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: "{{ links.0.name }}", + formulaType: FormulaType.DYNAMIC, + }, + }, + } + ) + + tableId = relationshipTable._id! + + relatedRow = await config.api.row.save(otherTableId, { + name: generator.word(), + description: generator.paragraph(), + }) + await config.api.row.save(tableId, { + name: generator.word(), + description: generator.paragraph(), + tableId, + links: [relatedRow._id], + }) + }) + + it("should be able to search for rows containing formulas", async () => { + const { rows } = await config.api.row.search(tableId) + expect(rows.length).toBe(1) + expect(rows[0].links.length).toBe(1) + const row = rows[0] + expect(row.formula).toBe(relatedRow.name) + }) + }) + + describe("Formula JS protection", () => { + it("should time out JS execution if a single cell takes too long", async () => { + await config.withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 20 }, async () => { + const js = Buffer.from( + ` + let i = 0; + while (true) { + i++; + } + return i; + ` + ).toString("base64") + + const table = await config.createTable({ + name: "table", + type: "table", + schema: { + text: { + name: "text", + type: FieldType.STRING, + }, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: `{{ js "${js}"}}`, + formulaType: FormulaType.DYNAMIC, + }, + }, + }) + + await config.api.row.save(table._id!, { text: "foo" }) + const { rows } = await config.api.row.search(table._id!) + expect(rows).toHaveLength(1) + const row = rows[0] + expect(row.text).toBe("foo") + expect(row.formula).toBe("Timed out while executing JS") + }) + }) + + it("should time out JS execution if a multiple cells take too long", async () => { + await config.withEnv( + { + JS_PER_INVOCATION_TIMEOUT_MS: 20, + JS_PER_REQUEST_TIMEOUT_MS: 40, + }, + async () => { const js = Buffer.from( ` let i = 0; @@ -2075,125 +2102,81 @@ describe.each([["postgres", databaseTestProviders.postgres]])( }, }) - await config.api.row.save(table._id!, { text: "foo" }) - const { rows } = await config.api.row.search(table._id!) - expect(rows).toHaveLength(1) - const row = rows[0] - expect(row.text).toBe("foo") - expect(row.formula).toBe("Timed out while executing JS") - }) - }) + for (let i = 0; i < 10; i++) { + await config.api.row.save(table._id!, { text: "foo" }) + } - it("should time out JS execution if a multiple cells take too long", async () => { - await config.withEnv( - { - JS_PER_INVOCATION_TIMEOUT_MS: 20, - JS_PER_REQUEST_TIMEOUT_MS: 40, - }, - async () => { - const js = Buffer.from( - ` - let i = 0; - while (true) { - i++; + // Run this test 3 times to make sure that there's no cross-request + // pollution of the execution time tracking. + for (let reqs = 0; reqs < 3; reqs++) { + const { rows } = await config.api.row.search(table._id!) + expect(rows).toHaveLength(10) + + let i = 0 + for (; i < 10; i++) { + const row = rows[i] + if (row.formula !== "Timed out while executing JS") { + break } - return i; - ` - ).toString("base64") - - const table = await config.createTable({ - name: "table", - type: "table", - schema: { - text: { - name: "text", - type: FieldType.STRING, - }, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: `{{ js "${js}"}}`, - formulaType: FormulaType.DYNAMIC, - }, - }, - }) - - for (let i = 0; i < 10; i++) { - await config.api.row.save(table._id!, { text: "foo" }) } - // Run this test 3 times to make sure that there's no cross-request - // pollution of the execution time tracking. - for (let reqs = 0; reqs < 3; reqs++) { - const { rows } = await config.api.row.search(table._id!) - expect(rows).toHaveLength(10) + // Given the execution times are not deterministic, we can't be sure + // of the exact number of rows that were executed before the timeout + // but it should absolutely be at least 1. + expect(i).toBeGreaterThan(0) + expect(i).toBeLessThan(5) - let i = 0 - for (; i < 10; i++) { - const row = rows[i] - if (row.formula !== "Timed out while executing JS") { - break - } - } - - // Given the execution times are not deterministic, we can't be sure - // of the exact number of rows that were executed before the timeout - // but it should absolutely be at least 1. - expect(i).toBeGreaterThan(0) - expect(i).toBeLessThan(5) - - for (; i < 10; i++) { - const row = rows[i] - expect(row.text).toBe("foo") - expect(row.formula).toBe("Request JS execution limit hit") - } + for (; i < 10; i++) { + const row = rows[i] + expect(row.text).toBe("foo") + expect(row.formula).toBe("Request JS execution limit hit") } } - ) - }) - - it("should not carry over context between formulas", async () => { - const js = Buffer.from(`return $("[text]");`).toString("base64") - const table = await config.createTable({ - name: "table", - type: "table", - schema: { - text: { - name: "text", - type: FieldType.STRING, - }, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: `{{ js "${js}"}}`, - formulaType: FormulaType.DYNAMIC, - }, - }, - }) - - for (let i = 0; i < 10; i++) { - await config.api.row.save(table._id!, { text: `foo${i}` }) } - - const { rows } = await config.api.row.search(table._id!) - expect(rows).toHaveLength(10) - - const formulaValues = rows.map(r => r.formula) - expect(formulaValues).toEqual( - expect.arrayContaining([ - "foo0", - "foo1", - "foo2", - "foo3", - "foo4", - "foo5", - "foo6", - "foo7", - "foo8", - "foo9", - ]) - ) - }) + ) }) - } -) + + it("should not carry over context between formulas", async () => { + const js = Buffer.from(`return $("[text]");`).toString("base64") + const table = await config.createTable({ + name: "table", + type: "table", + schema: { + text: { + name: "text", + type: FieldType.STRING, + }, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: `{{ js "${js}"}}`, + formulaType: FormulaType.DYNAMIC, + }, + }, + }) + + for (let i = 0; i < 10; i++) { + await config.api.row.save(table._id!, { text: `foo${i}` }) + } + + const { rows } = await config.api.row.search(table._id!) + expect(rows).toHaveLength(10) + + const formulaValues = rows.map(r => r.formula) + expect(formulaValues).toEqual( + expect.arrayContaining([ + "foo0", + "foo1", + "foo2", + "foo3", + "foo4", + "foo5", + "foo6", + "foo7", + "foo8", + "foo9", + ]) + ) + }) + }) +}) From 223a268483f968e851450fe3c5ef5e2c8a7ea218 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 28 Feb 2024 16:35:15 +0000 Subject: [PATCH 42/54] Respond to PR feedback. --- packages/server/src/api/controllers/automation.ts | 4 ++-- packages/server/src/api/controllers/layout.ts | 12 ++++++++++-- packages/types/src/api/web/automation.ts | 3 +++ packages/types/src/api/web/index.ts | 2 ++ packages/types/src/api/web/layout.ts | 5 +++++ 5 files changed, 22 insertions(+), 4 deletions(-) create mode 100644 packages/types/src/api/web/automation.ts create mode 100644 packages/types/src/api/web/layout.ts diff --git a/packages/server/src/api/controllers/automation.ts b/packages/server/src/api/controllers/automation.ts index d1bd580331..b986b5232b 100644 --- a/packages/server/src/api/controllers/automation.ts +++ b/packages/server/src/api/controllers/automation.ts @@ -20,12 +20,12 @@ import { AutomationActionStepId, AutomationResults, UserCtx, + DeleteAutomationResponse, } from "@budibase/types" import { getActionDefinitions as actionDefs } from "../../automations/actions" import sdk from "../../sdk" import { builderSocket } from "../../websockets" import env from "../../environment" -import { DocumentDestroyResponse } from "@budibase/nano" async function getActionDefinitions() { return removeDeprecated(await actionDefs()) @@ -210,7 +210,7 @@ export async function find(ctx: UserCtx) { ctx.body = await db.get(ctx.params.id) } -export async function destroy(ctx: UserCtx) { +export async function destroy(ctx: UserCtx) { const db = context.getAppDB() const automationId = ctx.params.id const oldAutomation = await db.get(automationId) diff --git a/packages/server/src/api/controllers/layout.ts b/packages/server/src/api/controllers/layout.ts index 1a15432b88..c0406f50ac 100644 --- a/packages/server/src/api/controllers/layout.ts +++ b/packages/server/src/api/controllers/layout.ts @@ -1,9 +1,17 @@ import { EMPTY_LAYOUT } from "../../constants/layouts" import { generateLayoutID, getScreenParams } from "../../db/utils" import { events, context } from "@budibase/backend-core" -import { BBContext, Layout, UserCtx } from "@budibase/types" +import { + BBContext, + Layout, + SaveLayoutRequest, + SaveLayoutResponse, + UserCtx, +} from "@budibase/types" -export async function save(ctx: UserCtx) { +export async function save( + ctx: UserCtx +) { const db = context.getAppDB() let layout = ctx.request.body diff --git a/packages/types/src/api/web/automation.ts b/packages/types/src/api/web/automation.ts new file mode 100644 index 0000000000..c1f3d01b2f --- /dev/null +++ b/packages/types/src/api/web/automation.ts @@ -0,0 +1,3 @@ +import { DocumentDestroyResponse } from "@budibase/nano" + +export interface DeleteAutomationResponse extends DocumentDestroyResponse {} diff --git a/packages/types/src/api/web/index.ts b/packages/types/src/api/web/index.ts index ab18add208..62d8ce8280 100644 --- a/packages/types/src/api/web/index.ts +++ b/packages/types/src/api/web/index.ts @@ -11,3 +11,5 @@ export * from "./global" export * from "./pagination" export * from "./searchFilter" export * from "./cookies" +export * from "./automation" +export * from "./layout" diff --git a/packages/types/src/api/web/layout.ts b/packages/types/src/api/web/layout.ts new file mode 100644 index 0000000000..50512777ef --- /dev/null +++ b/packages/types/src/api/web/layout.ts @@ -0,0 +1,5 @@ +import { Layout } from "../../documents" + +export interface SaveLayoutRequest extends Layout {} + +export interface SaveLayoutResponse extends Layout {} From 0205db104d3340d3e587e75eb295172306b52a94 Mon Sep 17 00:00:00 2001 From: melohagan <101575380+melohagan@users.noreply.github.com> Date: Thu, 29 Feb 2024 09:15:01 +0000 Subject: [PATCH 43/54] FIX: clicking on design tab while data tab loads does not navigate correctly (#13152) * If still loading, try navigation later * Set active tab even if still loading * Refactor - timeout not needed! --- .../pages/builder/app/[application]/_layout.svelte | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/packages/builder/src/pages/builder/app/[application]/_layout.svelte b/packages/builder/src/pages/builder/app/[application]/_layout.svelte index c7f8c98e73..dd66f5bc34 100644 --- a/packages/builder/src/pages/builder/app/[application]/_layout.svelte +++ b/packages/builder/src/pages/builder/app/[application]/_layout.svelte @@ -69,11 +69,12 @@ // brought back to the same screen. const topItemNavigate = path => () => { const activeTopNav = $layout.children.find(c => $isActive(c.path)) - if (!activeTopNav) return - builderStore.setPreviousTopNavPath( - activeTopNav.path, - window.location.pathname - ) + if (activeTopNav) { + builderStore.setPreviousTopNavPath( + activeTopNav.path, + window.location.pathname + ) + } $goto($builderStore.previousTopNavPath[path] || path) } From 8a109ffe6a4e1bf8064f609272918885dab8fb28 Mon Sep 17 00:00:00 2001 From: Budibase Staging Release Bot <> Date: Thu, 29 Feb 2024 09:38:13 +0000 Subject: [PATCH 44/54] Bump version to 2.20.13 --- lerna.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lerna.json b/lerna.json index e1a469adf1..1b559f217d 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.20.12", + "version": "2.20.13", "npmClient": "yarn", "packages": [ "packages/*", From de3968ffe3dadb17629f6b44e265324930508220 Mon Sep 17 00:00:00 2001 From: Michael Drury Date: Thu, 29 Feb 2024 12:32:15 +0000 Subject: [PATCH 45/54] Revert "SQL Query aliasing" --- .../scripts/integrations/postgres/reset.sh | 2 +- .../api/controllers/row/ExternalRequest.ts | 160 ++------ .../server/src/api/controllers/row/alias.ts | 166 -------- packages/server/src/environment.ts | 5 +- .../server/src/integrations/base/query.ts | 8 +- packages/server/src/integrations/base/sql.ts | 155 +++---- .../server/src/integrations/googlesheets.ts | 7 +- .../src/integrations/microsoftSqlServer.ts | 4 +- packages/server/src/integrations/mysql.ts | 4 +- packages/server/src/integrations/oracle.ts | 9 +- packages/server/src/integrations/postgres.ts | 7 +- .../server/src/integrations/tests/sql.spec.ts | 15 +- .../src/integrations/tests/sqlAlias.spec.ts | 204 ---------- .../basicFetchWithRelationships.json | 183 --------- .../sqlQueryJson/createWithRelationships.json | 173 -------- .../tests/sqlQueryJson/deleteSimple.json | 75 ---- .../sqlQueryJson/enrichRelationship.json | 123 ------ .../tests/sqlQueryJson/fetchManyToMany.json | 109 ----- .../sqlQueryJson/filterByRelationship.json | 94 ----- .../sqlQueryJson/manyRelationshipFilters.json | 202 --------- .../sqlQueryJson/updateRelationship.json | 181 --------- .../tests/sqlQueryJson/updateSimple.json | 181 --------- .../server/src/sdk/app/datasources/plus.ts | 36 +- packages/server/src/sdk/app/rows/utils.ts | 13 +- packages/types/src/sdk/datasources.ts | 16 +- packages/types/src/sdk/search.ts | 1 - yarn.lock | 384 ++++++++---------- 27 files changed, 284 insertions(+), 2233 deletions(-) delete mode 100644 packages/server/src/api/controllers/row/alias.ts delete mode 100644 packages/server/src/integrations/tests/sqlAlias.spec.ts delete mode 100644 packages/server/src/integrations/tests/sqlQueryJson/basicFetchWithRelationships.json delete mode 100644 packages/server/src/integrations/tests/sqlQueryJson/createWithRelationships.json delete mode 100644 packages/server/src/integrations/tests/sqlQueryJson/deleteSimple.json delete mode 100644 packages/server/src/integrations/tests/sqlQueryJson/enrichRelationship.json delete mode 100644 packages/server/src/integrations/tests/sqlQueryJson/fetchManyToMany.json delete mode 100644 packages/server/src/integrations/tests/sqlQueryJson/filterByRelationship.json delete mode 100644 packages/server/src/integrations/tests/sqlQueryJson/manyRelationshipFilters.json delete mode 100644 packages/server/src/integrations/tests/sqlQueryJson/updateRelationship.json delete mode 100644 packages/server/src/integrations/tests/sqlQueryJson/updateSimple.json diff --git a/packages/server/scripts/integrations/postgres/reset.sh b/packages/server/scripts/integrations/postgres/reset.sh index 8deb01cdf8..32778bd11f 100755 --- a/packages/server/scripts/integrations/postgres/reset.sh +++ b/packages/server/scripts/integrations/postgres/reset.sh @@ -1,3 +1,3 @@ #!/bin/bash -docker-compose down -v +docker-compose down docker volume prune -f diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index 685af4e98e..b7dc02c0db 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -7,7 +7,6 @@ import { FilterType, IncludeRelationship, ManyToManyRelationshipFieldMetadata, - ManyToOneRelationshipFieldMetadata, OneToManyRelationshipFieldMetadata, Operation, PaginationJson, @@ -19,7 +18,6 @@ import { SortJson, SortType, Table, - isManyToOne, } from "@budibase/types" import { breakExternalTableId, @@ -34,9 +32,7 @@ import { processObjectSync } from "@budibase/string-templates" import { cloneDeep } from "lodash/fp" import { processDates, processFormulas } from "../../../utilities/rowProcessor" import { db as dbCore } from "@budibase/backend-core" -import AliasTables from "./alias" import sdk from "../../../sdk" -import env from "../../../environment" export interface ManyRelationship { tableId?: string @@ -105,39 +101,6 @@ function buildFilters( } } -async function removeManyToManyRelationships( - rowId: string, - table: Table, - colName: string -) { - const tableId = table._id! - const filters = buildFilters(rowId, {}, table) - // safety check, if there are no filters on deletion bad things happen - if (Object.keys(filters).length !== 0) { - return getDatasourceAndQuery({ - endpoint: getEndpoint(tableId, Operation.DELETE), - body: { [colName]: null }, - filters, - }) - } else { - return [] - } -} - -async function removeOneToManyRelationships(rowId: string, table: Table) { - const tableId = table._id! - const filters = buildFilters(rowId, {}, table) - // safety check, if there are no filters on deletion bad things happen - if (Object.keys(filters).length !== 0) { - return getDatasourceAndQuery({ - endpoint: getEndpoint(tableId, Operation.UPDATE), - filters, - }) - } else { - return [] - } -} - /** * This function checks the incoming parameters to make sure all the inputs are * valid based on on the table schema. The main thing this is looking for is when a @@ -215,13 +178,13 @@ function generateIdForRow( function getEndpoint(tableId: string | undefined, operation: string) { if (!tableId) { - throw new Error("Cannot get endpoint information - no table ID specified") + return {} } const { datasourceId, tableName } = breakExternalTableId(tableId) return { - datasourceId: datasourceId!, - entityId: tableName!, - operation: operation as Operation, + datasourceId, + entityId: tableName, + operation, } } @@ -341,18 +304,6 @@ export class ExternalRequest { } } - async getRow(table: Table, rowId: string): Promise { - const response = await getDatasourceAndQuery({ - endpoint: getEndpoint(table._id!, Operation.READ), - filters: buildFilters(rowId, {}, table), - }) - if (Array.isArray(response) && response.length > 0) { - return response[0] - } else { - throw new Error(`Cannot fetch row by ID "${rowId}"`) - } - } - inputProcessing(row: Row | undefined, table: Table) { if (!row) { return { row, manyRelationships: [] } @@ -620,9 +571,7 @@ export class ExternalRequest { * information. */ async lookupRelations(tableId: string, row: Row) { - const related: { - [key: string]: { rows: Row[]; isMany: boolean; tableId: string } - } = {} + const related: { [key: string]: any } = {} const { tableName } = breakExternalTableId(tableId) if (!tableName) { return related @@ -640,26 +589,14 @@ export class ExternalRequest { ) { continue } - let tableId: string | undefined, - lookupField: string | undefined, - fieldName: string | undefined - if (isManyToMany(field)) { - tableId = field.through - lookupField = primaryKey - fieldName = field.throughTo || primaryKey - } else if (isManyToOne(field)) { - tableId = field.tableId - lookupField = field.foreignKey - fieldName = field.fieldName - } - if (!tableId || !lookupField || !fieldName) { - throw new Error( - "Unable to lookup relationships - undefined column properties." - ) - } + const isMany = field.relationshipType === RelationshipType.MANY_TO_MANY + const tableId = isMany ? field.through : field.tableId const { tableName: relatedTableName } = breakExternalTableId(tableId) // @ts-ignore const linkPrimaryKey = this.tables[relatedTableName].primary[0] + + const lookupField = isMany ? primaryKey : field.foreignKey + const fieldName = isMany ? field.throughTo || primaryKey : field.fieldName if (!lookupField || !row[lookupField]) { continue } @@ -672,12 +609,9 @@ export class ExternalRequest { }, }) // this is the response from knex if no rows found - const rows: Row[] = - !Array.isArray(response) || response?.[0].read ? [] : response - const storeTo = isManyToMany(field) - ? field.throughFrom || linkPrimaryKey - : fieldName - related[storeTo] = { rows, isMany: isManyToMany(field), tableId } + const rows = !response[0].read ? response : [] + const storeTo = isMany ? field.throughFrom || linkPrimaryKey : fieldName + related[storeTo] = { rows, isMany, tableId } } return related } @@ -763,43 +697,24 @@ export class ExternalRequest { continue } for (let row of rows) { - const rowId = generateIdForRow(row, table) - const promise: Promise = isMany - ? removeManyToManyRelationships(rowId, table, colName) - : removeOneToManyRelationships(rowId, table) - if (promise) { - promises.push(promise) + const filters = buildFilters(generateIdForRow(row, table), {}, table) + // safety check, if there are no filters on deletion bad things happen + if (Object.keys(filters).length !== 0) { + const op = isMany ? Operation.DELETE : Operation.UPDATE + const body = isMany ? null : { [colName]: null } + promises.push( + getDatasourceAndQuery({ + endpoint: getEndpoint(tableId, op), + body, + filters, + }) + ) } } } await Promise.all(promises) } - async removeRelationshipsToRow(table: Table, rowId: string) { - const row = await this.getRow(table, rowId) - const related = await this.lookupRelations(table._id!, row) - for (let column of Object.values(table.schema)) { - const relationshipColumn = column as RelationshipFieldMetadata - if (!isManyToOne(relationshipColumn)) { - continue - } - const { rows, isMany, tableId } = related[relationshipColumn.fieldName] - const table = this.getTable(tableId)! - await Promise.all( - rows.map(row => { - const rowId = generateIdForRow(row, table) - return isMany - ? removeManyToManyRelationships( - rowId, - table, - relationshipColumn.fieldName - ) - : removeOneToManyRelationships(rowId, table) - }) - ) - } - } - /** * This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which * you have column overlap in relationships, e.g. we join a few different tables and they all have the @@ -889,7 +804,7 @@ export class ExternalRequest { } let json = { endpoint: { - datasourceId: datasourceId!, + datasourceId, entityId: tableName, operation, }, @@ -911,30 +826,17 @@ export class ExternalRequest { }, } - // remove any relationships that could block deletion - if (operation === Operation.DELETE && id) { - await this.removeRelationshipsToRow(table, generateRowIdField(id)) - } - - // aliasing can be disabled fully if desired - let response - if (env.SQL_ALIASING_DISABLE) { - response = await getDatasourceAndQuery(json) - } else { - const aliasing = new AliasTables(Object.keys(this.tables)) - response = await aliasing.queryWithAliasing(json) - } - - const responseRows = Array.isArray(response) ? response : [] - // handle many-to-many relationships now if we know the ID (could be auto increment) + // can't really use response right now + const response = await getDatasourceAndQuery(json) + // handle many to many relationships now if we know the ID (could be auto increment) if (operation !== Operation.READ) { await this.handleManyRelationships( table._id || "", - responseRows[0], + response[0], processed.manyRelationships ) } - const output = this.outputProcessing(responseRows, table, relationships) + const output = this.outputProcessing(response, table, relationships) // if reading it'll just be an array of rows, return whole thing if (operation === Operation.READ) { return ( diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts deleted file mode 100644 index 9658a0d638..0000000000 --- a/packages/server/src/api/controllers/row/alias.ts +++ /dev/null @@ -1,166 +0,0 @@ -import { - QueryJson, - SearchFilters, - Table, - Row, - DatasourcePlusQueryResponse, -} from "@budibase/types" -import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils" -import { cloneDeep } from "lodash" - -class CharSequence { - static alphabet = "abcdefghijklmnopqrstuvwxyz" - counters: number[] - - constructor() { - this.counters = [0] - } - - getCharacter(): string { - const char = this.counters.map(i => CharSequence.alphabet[i]).join("") - for (let i = this.counters.length - 1; i >= 0; i--) { - if (this.counters[i] < CharSequence.alphabet.length - 1) { - this.counters[i]++ - return char - } - this.counters[i] = 0 - } - this.counters.unshift(0) - return char - } -} - -export default class AliasTables { - aliases: Record - tableAliases: Record - tableNames: string[] - charSeq: CharSequence - - constructor(tableNames: string[]) { - this.tableNames = tableNames - this.aliases = {} - this.tableAliases = {} - this.charSeq = new CharSequence() - } - - getAlias(tableName: string) { - if (this.aliases[tableName]) { - return this.aliases[tableName] - } - const char = this.charSeq.getCharacter() - this.aliases[tableName] = char - this.tableAliases[char] = tableName - return char - } - - aliasField(field: string) { - const tableNames = this.tableNames - if (field.includes(".")) { - const [tableName, column] = field.split(".") - const foundTableName = tableNames.find(name => { - const idx = tableName.indexOf(name) - if (idx === -1 || idx > 1) { - return - } - return Math.abs(tableName.length - name.length) <= 2 - }) - if (foundTableName) { - const aliasedTableName = tableName.replace( - foundTableName, - this.getAlias(foundTableName) - ) - field = `${aliasedTableName}.${column}` - } - } - return field - } - - reverse(rows: T): T { - const process = (row: Row) => { - const final: Row = {} - for (let [key, value] of Object.entries(row)) { - if (!key.includes(".")) { - final[key] = value - } else { - const [alias, column] = key.split(".") - const tableName = this.tableAliases[alias] || alias - final[`${tableName}.${column}`] = value - } - } - return final - } - if (Array.isArray(rows)) { - return rows.map(row => process(row)) as T - } else { - return process(rows) as T - } - } - - aliasMap(tableNames: (string | undefined)[]) { - const map: Record = {} - for (let tableName of tableNames) { - if (tableName) { - map[tableName] = this.getAlias(tableName) - } - } - return map - } - - async queryWithAliasing(json: QueryJson): DatasourcePlusQueryResponse { - json = cloneDeep(json) - const aliasTable = (table: Table) => ({ - ...table, - name: this.getAlias(table.name), - }) - // run through the query json to update anywhere a table may be used - if (json.resource?.fields) { - json.resource.fields = json.resource.fields.map(field => - this.aliasField(field) - ) - } - if (json.filters) { - for (let [filterKey, filter] of Object.entries(json.filters)) { - if (typeof filter !== "object") { - continue - } - const aliasedFilters: typeof filter = {} - for (let key of Object.keys(filter)) { - aliasedFilters[this.aliasField(key)] = filter[key] - } - json.filters[filterKey as keyof SearchFilters] = aliasedFilters - } - } - if (json.relationships) { - json.relationships = json.relationships.map(relationship => ({ - ...relationship, - aliases: this.aliasMap([ - relationship.through, - relationship.tableName, - json.endpoint.entityId, - ]), - })) - } - if (json.meta?.table) { - json.meta.table = aliasTable(json.meta.table) - } - if (json.meta?.tables) { - const aliasedTables: Record = {} - for (let [tableName, table] of Object.entries(json.meta.tables)) { - aliasedTables[this.getAlias(tableName)] = aliasTable(table) - } - json.meta.tables = aliasedTables - } - // invert and return - const invertedTableAliases: Record = {} - for (let [key, value] of Object.entries(this.tableAliases)) { - invertedTableAliases[value] = key - } - json.tableAliases = invertedTableAliases - const response = await getDatasourceAndQuery(json) - if (Array.isArray(response)) { - return this.reverse(response) - } else { - return response - } - } -} diff --git a/packages/server/src/environment.ts b/packages/server/src/environment.ts index a7c6df29ea..d0b7e91401 100644 --- a/packages/server/src/environment.ts +++ b/packages/server/src/environment.ts @@ -76,16 +76,13 @@ const environment = { DEFAULTS.AUTOMATION_THREAD_TIMEOUT > QUERY_THREAD_TIMEOUT ? DEFAULTS.AUTOMATION_THREAD_TIMEOUT : QUERY_THREAD_TIMEOUT, + SQL_MAX_ROWS: process.env.SQL_MAX_ROWS, BB_ADMIN_USER_EMAIL: process.env.BB_ADMIN_USER_EMAIL, BB_ADMIN_USER_PASSWORD: process.env.BB_ADMIN_USER_PASSWORD, PLUGINS_DIR: process.env.PLUGINS_DIR || DEFAULTS.PLUGINS_DIR, OPENAI_API_KEY: process.env.OPENAI_API_KEY, MAX_IMPORT_SIZE_MB: process.env.MAX_IMPORT_SIZE_MB, SESSION_EXPIRY_SECONDS: process.env.SESSION_EXPIRY_SECONDS, - // SQL - SQL_MAX_ROWS: process.env.SQL_MAX_ROWS, - SQL_LOGGING_ENABLE: process.env.SQL_LOGGING_ENABLE, - SQL_ALIASING_DISABLE: process.env.SQL_ALIASING_DISABLE, // flags ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS, DISABLE_THREADING: process.env.DISABLE_THREADING, diff --git a/packages/server/src/integrations/base/query.ts b/packages/server/src/integrations/base/query.ts index b906ecbb1b..4f31e37744 100644 --- a/packages/server/src/integrations/base/query.ts +++ b/packages/server/src/integrations/base/query.ts @@ -1,15 +1,11 @@ -import { - QueryJson, - Datasource, - DatasourcePlusQueryResponse, -} from "@budibase/types" +import { QueryJson, Datasource } from "@budibase/types" import { getIntegration } from "../index" import sdk from "../../sdk" export async function makeExternalQuery( datasource: Datasource, json: QueryJson -): DatasourcePlusQueryResponse { +) { datasource = await sdk.datasources.enrich(datasource) const Integration = await getIntegration(datasource.source) // query is the opinionated function diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index 6605052598..e52e9dd2ae 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -17,6 +17,7 @@ const envLimit = environment.SQL_MAX_ROWS : null const BASE_LIMIT = envLimit || 5000 +type KnexQuery = Knex.QueryBuilder | Knex // these are invalid dates sent by the client, need to convert them to a real max date const MIN_ISO_DATE = "0000-00-00T00:00:00.000Z" const MAX_ISO_DATE = "9999-00-00T00:00:00.000Z" @@ -126,15 +127,10 @@ class InternalBuilder { // right now we only do filters on the specific table being queried addFilters( - query: Knex.QueryBuilder, + query: KnexQuery, filters: SearchFilters | undefined, - tableName: string, - opts: { aliases?: Record; relationship?: boolean } - ): Knex.QueryBuilder { - function getTableName(name: string) { - const alias = opts.aliases?.[name] - return alias || name - } + opts: { relationship?: boolean; tableName?: string } + ): KnexQuery { function iterate( structure: { [key: string]: any }, fn: (key: string, value: any) => void @@ -143,11 +139,10 @@ class InternalBuilder { const updatedKey = dbCore.removeKeyNumbering(key) const isRelationshipField = updatedKey.includes(".") if (!opts.relationship && !isRelationshipField) { - fn(`${getTableName(tableName)}.${updatedKey}`, value) + fn(`${opts.tableName}.${updatedKey}`, value) } if (opts.relationship && isRelationshipField) { - const [filterTableName, property] = updatedKey.split(".") - fn(`${getTableName(filterTableName)}.${property}`, value) + fn(updatedKey, value) } } } @@ -319,7 +314,7 @@ class InternalBuilder { return query } - addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { + addSorting(query: KnexQuery, json: QueryJson): KnexQuery { let { sort, paginate } = json const table = json.meta?.table if (sort && Object.keys(sort || {}).length > 0) { @@ -335,28 +330,16 @@ class InternalBuilder { return query } - tableNameWithSchema( - tableName: string, - opts?: { alias?: string; schema?: string } - ) { - let withSchema = opts?.schema ? `${opts.schema}.${tableName}` : tableName - if (opts?.alias) { - withSchema += ` as ${opts.alias}` - } - return withSchema - } - addRelationships( - query: Knex.QueryBuilder, + query: KnexQuery, fromTable: string, relationships: RelationshipsJson[] | undefined, - schema: string | undefined, - aliases?: Record - ): Knex.QueryBuilder { + schema: string | undefined + ): KnexQuery { if (!relationships) { return query } - const tableSets: Record = {} + const tableSets: Record = {} // aggregate into table sets (all the same to tables) for (let relationship of relationships) { const keyObj: { toTable: string; throughTable: string | undefined } = { @@ -375,17 +358,10 @@ class InternalBuilder { } for (let [key, relationships] of Object.entries(tableSets)) { const { toTable, throughTable } = JSON.parse(key) - const toAlias = aliases?.[toTable] || toTable, - throughAlias = aliases?.[throughTable] || throughTable, - fromAlias = aliases?.[fromTable] || fromTable - let toTableWithSchema = this.tableNameWithSchema(toTable, { - alias: toAlias, - schema, - }) - let throughTableWithSchema = this.tableNameWithSchema(throughTable, { - alias: throughAlias, - schema, - }) + const toTableWithSchema = schema ? `${schema}.${toTable}` : toTable + const throughTableWithSchema = schema + ? `${schema}.${throughTable}` + : throughTable if (!throughTable) { // @ts-ignore query = query.leftJoin(toTableWithSchema, function () { @@ -393,7 +369,7 @@ class InternalBuilder { const from = relationship.from, to = relationship.to // @ts-ignore - this.orOn(`${fromAlias}.${from}`, "=", `${toAlias}.${to}`) + this.orOn(`${fromTable}.${from}`, "=", `${toTable}.${to}`) } }) } else { @@ -405,9 +381,9 @@ class InternalBuilder { const from = relationship.from // @ts-ignore this.orOn( - `${fromAlias}.${fromPrimary}`, + `${fromTable}.${fromPrimary}`, "=", - `${throughAlias}.${from}` + `${throughTable}.${from}` ) } }) @@ -416,7 +392,7 @@ class InternalBuilder { const toPrimary = relationship.toPrimary const to = relationship.to // @ts-ignore - this.orOn(`${toAlias}.${toPrimary}`, `${throughAlias}.${to}`) + this.orOn(`${toTable}.${toPrimary}`, `${throughTable}.${to}`) } }) } @@ -424,25 +400,12 @@ class InternalBuilder { return query.limit(BASE_LIMIT) } - knexWithAlias( - knex: Knex, - endpoint: QueryJson["endpoint"], - aliases?: QueryJson["tableAliases"] - ): Knex.QueryBuilder { - const tableName = endpoint.entityId - const tableAliased = aliases?.[tableName] - ? `${tableName} as ${aliases?.[tableName]}` - : tableName - let query = knex(tableAliased) + create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { + const { endpoint, body } = json + let query: KnexQuery = knex(endpoint.entityId) if (endpoint.schema) { query = query.withSchema(endpoint.schema) } - return query - } - - create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { - const { endpoint, body } = json - let query = this.knexWithAlias(knex, endpoint) const parsedBody = parseBody(body) // make sure no null values in body for creation for (let [key, value] of Object.entries(parsedBody)) { @@ -461,7 +424,10 @@ class InternalBuilder { bulkCreate(knex: Knex, json: QueryJson): Knex.QueryBuilder { const { endpoint, body } = json - let query = this.knexWithAlias(knex, endpoint) + let query: KnexQuery = knex(endpoint.entityId) + if (endpoint.schema) { + query = query.withSchema(endpoint.schema) + } if (!Array.isArray(body)) { return query } @@ -469,10 +435,8 @@ class InternalBuilder { return query.insert(parsedBody) } - read(knex: Knex, json: QueryJson, limit: number): Knex.QueryBuilder { - let { endpoint, resource, filters, paginate, relationships, tableAliases } = - json - + read(knex: Knex, json: QueryJson, limit: number): KnexQuery { + let { endpoint, resource, filters, paginate, relationships } = json const tableName = endpoint.entityId // select all if not specified if (!resource) { @@ -498,20 +462,21 @@ class InternalBuilder { foundLimit = paginate.limit } // start building the query - let query = this.knexWithAlias(knex, endpoint, tableAliases) - query = query.limit(foundLimit) + let query: KnexQuery = knex(tableName).limit(foundLimit) + if (endpoint.schema) { + query = query.withSchema(endpoint.schema) + } if (foundOffset) { query = query.offset(foundOffset) } - query = this.addFilters(query, filters, tableName, { - aliases: tableAliases, - }) + query = this.addFilters(query, filters, { tableName }) // add sorting to pre-query query = this.addSorting(query, json) - const alias = tableAliases?.[tableName] || tableName - let preQuery = knex({ - [alias]: query, - } as any).select(selectStatement) as any + // @ts-ignore + let preQuery: KnexQuery = knex({ + // @ts-ignore + [tableName]: query, + }).select(selectStatement) // have to add after as well (this breaks MS-SQL) if (this.client !== SqlClient.MS_SQL) { preQuery = this.addSorting(preQuery, json) @@ -521,22 +486,19 @@ class InternalBuilder { preQuery, tableName, relationships, - endpoint.schema, - tableAliases + endpoint.schema ) - return this.addFilters(query, filters, tableName, { - relationship: true, - aliases: tableAliases, - }) + return this.addFilters(query, filters, { relationship: true }) } update(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { - const { endpoint, body, filters, tableAliases } = json - let query = this.knexWithAlias(knex, endpoint, tableAliases) + const { endpoint, body, filters } = json + let query: KnexQuery = knex(endpoint.entityId) + if (endpoint.schema) { + query = query.withSchema(endpoint.schema) + } const parsedBody = parseBody(body) - query = this.addFilters(query, filters, endpoint.entityId, { - aliases: tableAliases, - }) + query = this.addFilters(query, filters, { tableName: endpoint.entityId }) // mysql can't use returning if (opts.disableReturning) { return query.update(parsedBody) @@ -546,11 +508,12 @@ class InternalBuilder { } delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { - const { endpoint, filters, tableAliases } = json - let query = this.knexWithAlias(knex, endpoint, tableAliases) - query = this.addFilters(query, filters, endpoint.entityId, { - aliases: tableAliases, - }) + const { endpoint, filters } = json + let query: KnexQuery = knex(endpoint.entityId) + if (endpoint.schema) { + query = query.withSchema(endpoint.schema) + } + query = this.addFilters(query, filters, { tableName: endpoint.entityId }) // mysql can't use returning if (opts.disableReturning) { return query.delete() @@ -584,7 +547,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { query = builder.create(client, json, opts) break case Operation.READ: - query = builder.read(client, json, this.limit) + query = builder.read(client, json, this.limit) as Knex.QueryBuilder break case Operation.UPDATE: query = builder.update(client, json, opts) @@ -683,18 +646,6 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { } return results.length ? results : [{ [operation.toLowerCase()]: true }] } - - log(query: string, values?: any[]) { - if (!environment.SQL_LOGGING_ENABLE) { - return - } - const sqlClient = this.getSqlClient() - let string = `[SQL] [${sqlClient.toUpperCase()}] query="${query}"` - if (values) { - string += ` values="${values.join(", ")}"` - } - console.log(string) - } } export default SqlQueryBuilder diff --git a/packages/server/src/integrations/googlesheets.ts b/packages/server/src/integrations/googlesheets.ts index 32398bde41..58c867ea0b 100644 --- a/packages/server/src/integrations/googlesheets.ts +++ b/packages/server/src/integrations/googlesheets.ts @@ -16,7 +16,6 @@ import { Table, TableRequest, TableSourceType, - DatasourcePlusQueryResponse, } from "@budibase/types" import { OAuth2Client } from "google-auth-library" import { @@ -335,7 +334,7 @@ class GoogleSheetsIntegration implements DatasourcePlus { return { tables: externalTables, errors } } - async query(json: QueryJson): DatasourcePlusQueryResponse { + async query(json: QueryJson) { const sheet = json.endpoint.entityId switch (json.endpoint.operation) { case Operation.CREATE: @@ -385,7 +384,7 @@ class GoogleSheetsIntegration implements DatasourcePlus { } try { await this.connect() - await this.client.addSheet({ title: name, headerValues: [name] }) + return await this.client.addSheet({ title: name, headerValues: [name] }) } catch (err) { console.error("Error creating new table in google sheets", err) throw err @@ -451,7 +450,7 @@ class GoogleSheetsIntegration implements DatasourcePlus { try { await this.connect() const sheetToDelete = this.client.sheetsByTitle[sheet] - await sheetToDelete.delete() + return await sheetToDelete.delete() } catch (err) { console.error("Error deleting table in google sheets", err) throw err diff --git a/packages/server/src/integrations/microsoftSqlServer.ts b/packages/server/src/integrations/microsoftSqlServer.ts index f87e248ac0..d0a06d4476 100644 --- a/packages/server/src/integrations/microsoftSqlServer.ts +++ b/packages/server/src/integrations/microsoftSqlServer.ts @@ -13,7 +13,6 @@ import { SourceName, Schema, TableSourceType, - DatasourcePlusQueryResponse, } from "@budibase/types" import { getSqlQuery, @@ -330,7 +329,6 @@ class SqlServerIntegration extends Sql implements DatasourcePlus { operation === Operation.CREATE ? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;` : query.sql - this.log(sql, query.bindings) return await request.query(sql) } catch (err: any) { let readableMessage = getReadableErrorMessage( @@ -494,7 +492,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus { return response.recordset || [{ deleted: true }] } - async query(json: QueryJson): DatasourcePlusQueryResponse { + async query(json: QueryJson) { const schema = this.config.schema await this.connect() if (schema && schema !== DEFAULT_SCHEMA && json?.endpoint) { diff --git a/packages/server/src/integrations/mysql.ts b/packages/server/src/integrations/mysql.ts index f629381807..5a206e1a7f 100644 --- a/packages/server/src/integrations/mysql.ts +++ b/packages/server/src/integrations/mysql.ts @@ -12,7 +12,6 @@ import { SourceName, Schema, TableSourceType, - DatasourcePlusQueryResponse, } from "@budibase/types" import { getSqlQuery, @@ -261,7 +260,6 @@ class MySQLIntegration extends Sql implements DatasourcePlus { const bindings = opts?.disableCoercion ? baseBindings : bindingTypeCoerce(baseBindings) - this.log(query.sql, bindings) // Node MySQL is callback based, so we must wrap our call in a promise const response = await this.client!.query(query.sql, bindings) return response[0] @@ -381,7 +379,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus { return results.length ? results : [{ deleted: true }] } - async query(json: QueryJson): DatasourcePlusQueryResponse { + async query(json: QueryJson) { await this.connect() try { const queryFn = (query: any) => diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index 08f3058d63..b3aefc578c 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -12,8 +12,6 @@ import { ConnectionInfo, Schema, TableSourceType, - Row, - DatasourcePlusQueryResponse, } from "@budibase/types" import { buildExternalTableId, @@ -370,7 +368,6 @@ class OracleIntegration extends Sql implements DatasourcePlus { const options: ExecuteOptions = { autoCommit: true } const bindings: BindParameters = query.bindings || [] - this.log(query.sql, bindings) return await connection.execute(query.sql, bindings, options) } finally { if (connection) { @@ -422,7 +419,7 @@ class OracleIntegration extends Sql implements DatasourcePlus { : [{ deleted: true }] } - async query(json: QueryJson): DatasourcePlusQueryResponse { + async query(json: QueryJson) { const operation = this._operation(json) const input = this._query(json, { disableReturning: true }) as SqlQuery if (Array.isArray(input)) { @@ -446,7 +443,7 @@ class OracleIntegration extends Sql implements DatasourcePlus { if (deletedRows?.rows?.length) { return deletedRows.rows } else if (response.rows?.length) { - return response.rows as Row[] + return response.rows } else { // get the last row that was updated if ( @@ -457,7 +454,7 @@ class OracleIntegration extends Sql implements DatasourcePlus { const lastRow = await this.internalQuery({ sql: `SELECT * FROM \"${json.endpoint.entityId}\" WHERE ROWID = '${response.lastRowid}'`, }) - return lastRow.rows as Row[] + return lastRow.rows } else { return [{ [operation.toLowerCase()]: true }] } diff --git a/packages/server/src/integrations/postgres.ts b/packages/server/src/integrations/postgres.ts index 635d834761..bea31d4031 100644 --- a/packages/server/src/integrations/postgres.ts +++ b/packages/server/src/integrations/postgres.ts @@ -12,7 +12,6 @@ import { SourceName, Schema, TableSourceType, - DatasourcePlusQueryResponse, } from "@budibase/types" import { getSqlQuery, @@ -269,9 +268,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus { } } try { - const bindings = query.bindings || [] - this.log(query.sql, bindings) - return await client.query(query.sql, bindings) + return await client.query(query.sql, query.bindings || []) } catch (err: any) { await this.closeConnection() let readableMessage = getReadableErrorMessage( @@ -420,7 +417,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus { return response.rows.length ? response.rows : [{ deleted: true }] } - async query(json: QueryJson): DatasourcePlusQueryResponse { + async query(json: QueryJson) { const operation = this._operation(json).toLowerCase() const input = this._query(json) as SqlQuery if (Array.isArray(input)) { diff --git a/packages/server/src/integrations/tests/sql.spec.ts b/packages/server/src/integrations/tests/sql.spec.ts index f4eaf2859c..fd705fc27c 100644 --- a/packages/server/src/integrations/tests/sql.spec.ts +++ b/packages/server/src/integrations/tests/sql.spec.ts @@ -1,5 +1,3 @@ -import { SqlClient } from "../utils" -import Sql from "../base/sql" import { Operation, QueryJson, @@ -8,6 +6,9 @@ import { FieldType, } from "@budibase/types" +const Sql = require("../base/sql").default +const { SqlClient } = require("../utils") + const TABLE_NAME = "test" function endpoint(table: any, operation: any) { @@ -41,7 +42,7 @@ function generateReadJson({ schema: {}, name: table || TABLE_NAME, primary: ["id"], - } as any, + }, }, } } @@ -518,7 +519,7 @@ describe("SQL query builder", () => { const query = sql._query(generateRelationshipJson({ schema: "production" })) expect(query).toEqual({ bindings: [500, 5000], - sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, + sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" limit $1) as "brands" left join "production"."products" on "brands"."brand_id" = "products"."brand_id" limit $2`, }) }) @@ -526,7 +527,7 @@ describe("SQL query builder", () => { const query = sql._query(generateRelationshipJson()) expect(query).toEqual({ bindings: [500, 5000], - sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, + sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" limit $1) as "brands" left join "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, }) }) @@ -536,7 +537,7 @@ describe("SQL query builder", () => { ) expect(query).toEqual({ bindings: [500, 5000], - sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" limit $2`, + sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" limit $1) as "stores" left join "production"."stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" on "products"."product_id" = "stocks"."product_id" limit $2`, }) }) @@ -732,7 +733,7 @@ describe("SQL query builder", () => { }, meta: { table: oldTable, - tables: { [oldTable.name]: oldTable }, + tables: [oldTable], renamed: { old: "name", updated: "first_name", diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts deleted file mode 100644 index 9b3f6a1b38..0000000000 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ /dev/null @@ -1,204 +0,0 @@ -import { QueryJson } from "@budibase/types" -import { join } from "path" -import Sql from "../base/sql" -import { SqlClient } from "../utils" -import AliasTables from "../../api/controllers/row/alias" -import { generator } from "@budibase/backend-core/tests" - -function multiline(sql: string) { - return sql.replace(/\n/g, "").replace(/ +/g, " ") -} - -describe("Captures of real examples", () => { - const limit = 5000 - const relationshipLimit = 100 - - function getJson(name: string): QueryJson { - return require(join(__dirname, "sqlQueryJson", name)) as QueryJson - } - - describe("create", () => { - it("should create a row with relationships", () => { - const queryJson = getJson("createWithRelationships.json") - let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) - expect(query).toEqual({ - bindings: ["A Street", 34, "London", "A", "B", "designer", 1990], - sql: multiline(`insert into "persons" ("address", "age", "city", "firstname", "lastname", "type", "year") - values ($1, $2, $3, $4, $5, $6, $7) returning *`), - }) - }) - }) - - describe("read", () => { - it("should handle basic retrieval with relationships", () => { - const queryJson = getJson("basicFetchWithRelationships.json") - let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) - expect(query).toEqual({ - bindings: [relationshipLimit, limit], - sql: multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid", - "a"."address" as "a.address", "a"."age" as "a.age", "a"."type" as "a.type", "a"."city" as "a.city", - "a"."lastname" as "a.lastname", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", - "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid", - "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", - "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" - from (select * from "persons" as "a" order by "a"."firstname" asc limit $1) as "a" - left join "tasks" as "b" on "a"."personid" = "b"."qaid" or "a"."personid" = "b"."executorid" - order by "a"."firstname" asc limit $2`), - }) - }) - - it("should handle filtering by relationship", () => { - const queryJson = getJson("filterByRelationship.json") - let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) - expect(query).toEqual({ - bindings: [relationshipLimit, "assembling", limit], - sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid", - "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", - "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" - from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" - left join "products_tasks" as "c" on "a"."productid" = "c"."productid" - left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where "b"."taskname" = $2 - order by "a"."productname" asc limit $3`), - }) - }) - - it("should handle fetching many to many relationships", () => { - const queryJson = getJson("fetchManyToMany.json") - let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) - expect(query).toEqual({ - bindings: [relationshipLimit, limit], - sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid", - "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", - "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" - from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" - left join "products_tasks" as "c" on "a"."productid" = "c"."productid" - left join "tasks" as "b" on "b"."taskid" = "c"."taskid" - order by "a"."productname" asc limit $2`), - }) - }) - - it("should handle enrichment of rows", () => { - const queryJson = getJson("enrichRelationship.json") - const filters = queryJson.filters?.oneOf?.taskid as number[] - let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) - expect(query).toEqual({ - bindings: [...filters, limit, limit], - sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", - "a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", - "b"."productname" as "b.productname", "b"."productid" as "b.productid" - from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) limit $3) as "a" - left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid" - left join "products" as "b" on "b"."productid" = "c"."productid" limit $4`), - }) - }) - - it("should manage query with many relationship filters", () => { - const queryJson = getJson("manyRelationshipFilters.json") - let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) - const filters = queryJson.filters - const notEqualsValue = Object.values(filters?.notEqual!)[0] - const rangeValue = Object.values(filters?.range!)[0] - const equalValue = Object.values(filters?.equal!)[0] - - expect(query).toEqual({ - bindings: [ - notEqualsValue, - relationshipLimit, - rangeValue.low, - rangeValue.high, - equalValue, - limit, - ], - sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", "a"."taskid" as "a.taskid", - "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "b"."productname" as "b.productname", - "b"."productid" as "b.productid", "c"."year" as "c.year", "c"."firstname" as "c.firstname", - "c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", - "c"."city" as "c.city", "c"."lastname" as "c.lastname", "c"."year" as "c.year", "c"."firstname" as "c.firstname", - "c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", - "c"."city" as "c.city", "c"."lastname" as "c.lastname" - from (select * from "tasks" as "a" where not "a"."completed" = $1 - order by "a"."taskname" asc limit $2) as "a" - left join "products_tasks" as "d" on "a"."taskid" = "d"."taskid" - left join "products" as "b" on "b"."productid" = "d"."productid" - left join "persons" as "c" on "a"."executorid" = "c"."personid" or "a"."qaid" = "c"."personid" - where "c"."year" between $3 and $4 and "b"."productname" = $5 order by "a"."taskname" asc limit $6`), - }) - }) - }) - - describe("update", () => { - it("should handle performing a simple update", () => { - const queryJson = getJson("updateSimple.json") - let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) - expect(query).toEqual({ - bindings: [1990, "C", "A Street", 34, "designer", "London", "B", 5], - sql: multiline(`update "persons" as "a" set "year" = $1, "firstname" = $2, "address" = $3, "age" = $4, - "type" = $5, "city" = $6, "lastname" = $7 where "a"."personid" = $8 returning *`), - }) - }) - - it("should handle performing an update of relationships", () => { - const queryJson = getJson("updateRelationship.json") - let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) - expect(query).toEqual({ - bindings: [1990, "C", "A Street", 34, "designer", "London", "B", 5], - sql: multiline(`update "persons" as "a" set "year" = $1, "firstname" = $2, "address" = $3, "age" = $4, - "type" = $5, "city" = $6, "lastname" = $7 where "a"."personid" = $8 returning *`), - }) - }) - }) - - describe("delete", () => { - it("should handle deleting with relationships", () => { - const queryJson = getJson("deleteSimple.json") - let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) - expect(query).toEqual({ - bindings: ["ddd", ""], - sql: multiline(`delete from "compositetable" as "a" where "a"."keypartone" = $1 and "a"."keyparttwo" = $2 - returning "a"."keyparttwo" as "a.keyparttwo", "a"."keypartone" as "a.keypartone", "a"."name" as "a.name"`), - }) - }) - }) - - describe("check max character aliasing", () => { - it("should handle over 'z' max character alias", () => { - const tableNames = [] - for (let i = 0; i < 100; i++) { - tableNames.push(generator.guid()) - } - const aliasing = new AliasTables(tableNames) - let alias: string = "" - for (let table of tableNames) { - alias = aliasing.getAlias(table) - } - expect(alias).toEqual("cv") - }) - }) - - describe("check some edge cases", () => { - const tableNames = ["hello", "world"] - - it("should handle quoted table names", () => { - const aliasing = new AliasTables(tableNames) - const aliased = aliasing.aliasField(`"hello"."field"`) - expect(aliased).toEqual(`"a"."field"`) - }) - - it("should handle quoted table names with graves", () => { - const aliasing = new AliasTables(tableNames) - const aliased = aliasing.aliasField("`hello`.`world`") - expect(aliased).toEqual("`a`.`world`") - }) - - it("should handle table names in table names correctly", () => { - const tableNames = ["he", "hell", "hello"] - const aliasing = new AliasTables(tableNames) - const aliased1 = aliasing.aliasField("`he`.`world`") - const aliased2 = aliasing.aliasField("`hell`.`world`") - const aliased3 = aliasing.aliasField("`hello`.`world`") - expect(aliased1).toEqual("`a`.`world`") - expect(aliased2).toEqual("`b`.`world`") - expect(aliased3).toEqual("`c`.`world`") - }) - }) -}) diff --git a/packages/server/src/integrations/tests/sqlQueryJson/basicFetchWithRelationships.json b/packages/server/src/integrations/tests/sqlQueryJson/basicFetchWithRelationships.json deleted file mode 100644 index 3445f5fe67..0000000000 --- a/packages/server/src/integrations/tests/sqlQueryJson/basicFetchWithRelationships.json +++ /dev/null @@ -1,183 +0,0 @@ -{ - "endpoint": { - "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", - "entityId": "persons", - "operation": "READ" - }, - "resource": { - "fields": [ - "a.year", - "a.firstname", - "a.personid", - "a.address", - "a.age", - "a.type", - "a.city", - "a.lastname", - "b.executorid", - "b.taskname", - "b.taskid", - "b.completed", - "b.qaid", - "b.executorid", - "b.taskname", - "b.taskid", - "b.completed", - "b.qaid" - ] - }, - "filters": {}, - "sort": { - "firstname": { - "direction": "ASCENDING" - } - }, - "paginate": { - "limit": 100, - "page": 1 - }, - "relationships": [ - { - "tableName": "tasks", - "column": "QA", - "from": "personid", - "to": "qaid", - "aliases": { - "tasks": "b", - "persons": "a" - } - }, - { - "tableName": "tasks", - "column": "executor", - "from": "personid", - "to": "executorid", - "aliases": { - "tasks": "b", - "persons": "a" - } - } - ], - "extra": { - "idFilter": {} - }, - "meta": { - "table": { - "type": "table", - "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons", - "primary": [ - "personid" - ], - "name": "a", - "schema": { - "year": { - "type": "number", - "externalType": "integer", - "autocolumn": false, - "name": "year", - "constraints": { - "presence": false - } - }, - "firstname": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "firstname", - "constraints": { - "presence": false - } - }, - "personid": { - "type": "number", - "externalType": "integer", - "autocolumn": true, - "name": "personid", - "constraints": { - "presence": false - } - }, - "address": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "address", - "constraints": { - "presence": false - } - }, - "age": { - "type": "number", - "externalType": "integer", - "autocolumn": false, - "name": "age", - "constraints": { - "presence": false - } - }, - "type": { - "type": "options", - "externalType": "USER-DEFINED", - "autocolumn": false, - "name": "type", - "constraints": { - "presence": false, - "inclusion": [ - "support", - "designer", - "programmer", - "qa" - ] - } - }, - "city": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "city", - "constraints": { - "presence": false - } - }, - "lastname": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "lastname", - "constraints": { - "presence": false - } - }, - "QA": { - "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", - "name": "QA", - "relationshipType": "many-to-one", - "fieldName": "qaid", - "type": "link", - "main": true, - "_id": "ccb68481c80c34217a4540a2c6c27fe46", - "foreignKey": "personid" - }, - "executor": { - "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", - "name": "executor", - "relationshipType": "many-to-one", - "fieldName": "executorid", - "type": "link", - "main": true, - "_id": "c89530b9770d94bec851e062b5cff3001", - "foreignKey": "personid", - "tableName": "persons" - } - }, - "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", - "sourceType": "external", - "primaryDisplay": "firstname", - "views": {} - } - }, - "tableAliases": { - "persons": "a", - "tasks": "b" - } -} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/createWithRelationships.json b/packages/server/src/integrations/tests/sqlQueryJson/createWithRelationships.json deleted file mode 100644 index 20331b949a..0000000000 --- a/packages/server/src/integrations/tests/sqlQueryJson/createWithRelationships.json +++ /dev/null @@ -1,173 +0,0 @@ -{ - "endpoint": { - "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", - "entityId": "persons", - "operation": "CREATE" - }, - "resource": { - "fields": [ - "a.year", - "a.firstname", - "a.personid", - "a.address", - "a.age", - "a.type", - "a.city", - "a.lastname" - ] - }, - "filters": {}, - "relationships": [ - { - "tableName": "tasks", - "column": "QA", - "from": "personid", - "to": "qaid", - "aliases": { - "tasks": "b", - "persons": "a" - } - }, - { - "tableName": "tasks", - "column": "executor", - "from": "personid", - "to": "executorid", - "aliases": { - "tasks": "b", - "persons": "a" - } - } - ], - "body": { - "year": 1990, - "firstname": "A", - "address": "A Street", - "age": 34, - "type": "designer", - "city": "London", - "lastname": "B" - }, - "extra": { - "idFilter": {} - }, - "meta": { - "table": { - "type": "table", - "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons", - "primary": [ - "personid" - ], - "name": "a", - "schema": { - "year": { - "type": "number", - "externalType": "integer", - "autocolumn": false, - "name": "year", - "constraints": { - "presence": false - } - }, - "firstname": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "firstname", - "constraints": { - "presence": false - } - }, - "personid": { - "type": "number", - "externalType": "integer", - "autocolumn": true, - "name": "personid", - "constraints": { - "presence": false - } - }, - "address": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "address", - "constraints": { - "presence": false - } - }, - "age": { - "type": "number", - "externalType": "integer", - "autocolumn": false, - "name": "age", - "constraints": { - "presence": false - } - }, - "type": { - "type": "options", - "externalType": "USER-DEFINED", - "autocolumn": false, - "name": "type", - "constraints": { - "presence": false, - "inclusion": [ - "support", - "designer", - "programmer", - "qa" - ] - } - }, - "city": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "city", - "constraints": { - "presence": false - } - }, - "lastname": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "lastname", - "constraints": { - "presence": false - } - }, - "QA": { - "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", - "name": "QA", - "relationshipType": "many-to-one", - "fieldName": "qaid", - "type": "link", - "main": true, - "_id": "ccb68481c80c34217a4540a2c6c27fe46", - "foreignKey": "personid" - }, - "executor": { - "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", - "name": "executor", - "relationshipType": "many-to-one", - "fieldName": "executorid", - "type": "link", - "main": true, - "_id": "c89530b9770d94bec851e062b5cff3001", - "foreignKey": "personid", - "tableName": "persons" - } - }, - "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", - "sourceType": "external", - "primaryDisplay": "firstname", - "views": {} - } - }, - "tableAliases": { - "persons": "a", - "tasks": "b" - } -} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/deleteSimple.json b/packages/server/src/integrations/tests/sqlQueryJson/deleteSimple.json deleted file mode 100644 index 2266b8c8be..0000000000 --- a/packages/server/src/integrations/tests/sqlQueryJson/deleteSimple.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "endpoint": { - "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", - "entityId": "compositetable", - "operation": "DELETE" - }, - "resource": { - "fields": [ - "a.keyparttwo", - "a.keypartone", - "a.name" - ] - }, - "filters": { - "equal": { - "keypartone": "ddd", - "keyparttwo": "" - } - }, - "relationships": [], - "extra": { - "idFilter": { - "equal": { - "keypartone": "ddd", - "keyparttwo": "" - } - } - }, - "meta": { - "table": { - "type": "table", - "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__compositetable", - "primary": [ - "keypartone", - "keyparttwo" - ], - "name": "a", - "schema": { - "keyparttwo": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "keyparttwo", - "constraints": { - "presence": true - } - }, - "keypartone": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "keypartone", - "constraints": { - "presence": true - } - }, - "name": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "name", - "constraints": { - "presence": false - } - } - }, - "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", - "sourceType": "external", - "primaryDisplay": "keypartone" - } - }, - "tableAliases": { - "compositetable": "a" - } -} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/enrichRelationship.json b/packages/server/src/integrations/tests/sqlQueryJson/enrichRelationship.json deleted file mode 100644 index ee658aed18..0000000000 --- a/packages/server/src/integrations/tests/sqlQueryJson/enrichRelationship.json +++ /dev/null @@ -1,123 +0,0 @@ -{ - "endpoint": { - "datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", - "entityId": "tasks", - "operation": "READ" - }, - "resource": { - "fields": [ - "a.executorid", - "a.taskname", - "a.taskid", - "a.completed", - "a.qaid", - "b.productname", - "b.productid" - ] - }, - "filters": { - "oneOf": { - "taskid": [ - 1, - 2 - ] - } - }, - "relationships": [ - { - "tableName": "products", - "column": "products", - "through": "products_tasks", - "from": "taskid", - "to": "productid", - "fromPrimary": "taskid", - "toPrimary": "productid", - "aliases": { - "products_tasks": "c", - "products": "b", - "tasks": "a" - } - } - ], - "extra": { - "idFilter": {} - }, - "meta": { - "table": { - "type": "table", - "_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks", - "primary": [ - "taskid" - ], - "name": "a", - "schema": { - "executorid": { - "type": "number", - "externalType": "integer", - "autocolumn": false, - "name": "executorid", - "constraints": { - "presence": false - } - }, - "taskname": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "taskname", - "constraints": { - "presence": false - } - }, - "taskid": { - "type": "number", - "externalType": "integer", - "autocolumn": true, - "name": "taskid", - "constraints": { - "presence": false - } - }, - "completed": { - "type": "boolean", - "externalType": "boolean", - "autocolumn": false, - "name": "completed", - "constraints": { - "presence": false - } - }, - "qaid": { - "type": "number", - "externalType": "integer", - "autocolumn": false, - "name": "qaid", - "constraints": { - "presence": false - } - }, - "products": { - "tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products", - "name": "products", - "relationshipType": "many-to-many", - "through": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products_tasks", - "type": "link", - "_id": "c3b91d00cd36c4cc1a347794725b9adbd", - "fieldName": "productid", - "throughFrom": "productid", - "throughTo": "taskid" - } - }, - "sourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", - "sourceType": "external", - "primaryDisplay": "taskname", - "sql": true, - "views": {} - } - }, - "tableAliases": { - "tasks": "a", - "products": "b", - "products_tasks": "c" - } -} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/fetchManyToMany.json b/packages/server/src/integrations/tests/sqlQueryJson/fetchManyToMany.json deleted file mode 100644 index 682ebaab2d..0000000000 --- a/packages/server/src/integrations/tests/sqlQueryJson/fetchManyToMany.json +++ /dev/null @@ -1,109 +0,0 @@ -{ - "endpoint": { - "datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", - "entityId": "products", - "operation": "READ" - }, - "resource": { - "fields": [ - "a.productname", - "a.productid", - "b.executorid", - "b.taskname", - "b.taskid", - "b.completed", - "b.qaid" - ] - }, - "filters": { - "string": {}, - "fuzzy": {}, - "range": {}, - "equal": {}, - "notEqual": {}, - "empty": {}, - "notEmpty": {}, - "contains": {}, - "notContains": {}, - "oneOf": {}, - "containsAny": {} - }, - "sort": { - "productname": { - "direction": "ASCENDING" - } - }, - "paginate": { - "limit": 100, - "page": 1 - }, - "relationships": [ - { - "tableName": "tasks", - "column": "tasks", - "through": "products_tasks", - "from": "productid", - "to": "taskid", - "fromPrimary": "productid", - "toPrimary": "taskid", - "aliases": { - "products_tasks": "c", - "tasks": "b", - "products": "a" - } - } - ], - "extra": { - "idFilter": {} - }, - "meta": { - "table": { - "type": "table", - "_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products", - "primary": [ - "productid" - ], - "name": "a", - "schema": { - "productname": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "productname", - "constraints": { - "presence": false - } - }, - "productid": { - "type": "number", - "externalType": "integer", - "autocolumn": true, - "name": "productid", - "constraints": { - "presence": false - } - }, - "tasks": { - "tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks", - "name": "tasks", - "relationshipType": "many-to-many", - "fieldName": "taskid", - "through": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products_tasks", - "throughFrom": "taskid", - "throughTo": "productid", - "type": "link", - "main": true, - "_id": "c3b91d00cd36c4cc1a347794725b9adbd" - } - }, - "sourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", - "sourceType": "external", - "primaryDisplay": "productname" - } - }, - "tableAliases": { - "products": "a", - "tasks": "b", - "products_tasks": "c" - } -} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/filterByRelationship.json b/packages/server/src/integrations/tests/sqlQueryJson/filterByRelationship.json deleted file mode 100644 index eb1025f382..0000000000 --- a/packages/server/src/integrations/tests/sqlQueryJson/filterByRelationship.json +++ /dev/null @@ -1,94 +0,0 @@ -{ - "endpoint": { - "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", - "entityId": "products", - "operation": "READ" - }, - "resource": { - "fields": [ - "a.productname", - "a.productid", - "b.executorid", - "b.taskname", - "b.taskid", - "b.completed", - "b.qaid" - ] - }, - "filters": { - "equal": { - "1:tasks.taskname": "assembling" - }, - "onEmptyFilter": "all" - }, - "sort": { - "productname": { - "direction": "ASCENDING" - } - }, - "paginate": { - "limit": 100, - "page": 1 - }, - "relationships": [ - { - "tableName": "tasks", - "column": "tasks", - "through": "products_tasks", - "from": "productid", - "to": "taskid", - "fromPrimary": "productid", - "toPrimary": "taskid" - } - ], - "tableAliases": { - "products_tasks": "c", - "tasks": "b", - "products": "a" - }, - "meta": { - "table": { - "type": "table", - "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products", - "primary": [ - "productid" - ], - "name": "a", - "schema": { - "productname": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "productname", - "constraints": { - "presence": false - } - }, - "productid": { - "type": "number", - "externalType": "integer", - "autocolumn": true, - "name": "productid", - "constraints": { - "presence": false - } - }, - "tasks": { - "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", - "name": "tasks", - "relationshipType": "many-to-many", - "fieldName": "taskid", - "through": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products_tasks", - "throughFrom": "taskid", - "throughTo": "productid", - "type": "link", - "main": true, - "_id": "ca6862d9ba09146dd8a68e3b5b7055a09" - } - }, - "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", - "sourceType": "external", - "primaryDisplay": "productname" - } - } -} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/manyRelationshipFilters.json b/packages/server/src/integrations/tests/sqlQueryJson/manyRelationshipFilters.json deleted file mode 100644 index afa0889450..0000000000 --- a/packages/server/src/integrations/tests/sqlQueryJson/manyRelationshipFilters.json +++ /dev/null @@ -1,202 +0,0 @@ -{ - "endpoint": { - "datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", - "entityId": "tasks", - "operation": "READ" - }, - "resource": { - "fields": [ - "a.executorid", - "a.taskname", - "a.taskid", - "a.completed", - "a.qaid", - "b.productname", - "b.productid", - "c.year", - "c.firstname", - "c.personid", - "c.address", - "c.age", - "c.type", - "c.city", - "c.lastname", - "c.year", - "c.firstname", - "c.personid", - "c.address", - "c.age", - "c.type", - "c.city", - "c.lastname" - ] - }, - "filters": { - "string": {}, - "fuzzy": {}, - "range": { - "1:persons.year": { - "low": 1990, - "high": 2147483647 - } - }, - "equal": { - "2:products.productname": "Computers" - }, - "notEqual": { - "3:completed": true - }, - "empty": {}, - "notEmpty": {}, - "contains": {}, - "notContains": {}, - "oneOf": {}, - "containsAny": {}, - "onEmptyFilter": "all" - }, - "sort": { - "taskname": { - "direction": "ASCENDING" - } - }, - "paginate": { - "limit": 100, - "page": 1 - }, - "relationships": [ - { - "tableName": "products", - "column": "products", - "through": "products_tasks", - "from": "taskid", - "to": "productid", - "fromPrimary": "taskid", - "toPrimary": "productid", - "aliases": { - "products_tasks": "d", - "products": "b", - "tasks": "a" - } - }, - { - "tableName": "persons", - "column": "tasksToExecute", - "from": "executorid", - "to": "personid", - "aliases": { - "persons": "c", - "tasks": "a" - } - }, - { - "tableName": "persons", - "column": "tasksToQA", - "from": "qaid", - "to": "personid", - "aliases": { - "persons": "c", - "tasks": "a" - } - } - ], - "extra": { - "idFilter": {} - }, - "meta": { - "table": { - "type": "table", - "_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks", - "primary": [ - "taskid" - ], - "name": "a", - "schema": { - "executorid": { - "type": "number", - "externalType": "integer", - "name": "executorid", - "constraints": { - "presence": false - }, - "autocolumn": true, - "autoReason": "foreign_key" - }, - "taskname": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "taskname", - "constraints": { - "presence": false - } - }, - "taskid": { - "type": "number", - "externalType": "integer", - "autocolumn": true, - "name": "taskid", - "constraints": { - "presence": false - } - }, - "completed": { - "type": "boolean", - "externalType": "boolean", - "autocolumn": false, - "name": "completed", - "constraints": { - "presence": false - } - }, - "qaid": { - "type": "number", - "externalType": "integer", - "name": "qaid", - "constraints": { - "presence": false - } - }, - "products": { - "tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products", - "name": "products", - "relationshipType": "many-to-many", - "through": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products_tasks", - "type": "link", - "_id": "c3b91d00cd36c4cc1a347794725b9adbd", - "fieldName": "productid", - "throughFrom": "productid", - "throughTo": "taskid" - }, - "tasksToExecute": { - "tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__persons", - "name": "tasksToExecute", - "relationshipType": "one-to-many", - "type": "link", - "_id": "c0f440590bda04f28846242156c1dd60b", - "foreignKey": "executorid", - "fieldName": "personid" - }, - "tasksToQA": { - "tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__persons", - "name": "tasksToQA", - "relationshipType": "one-to-many", - "type": "link", - "_id": "c5fdf453a0ba743d58e29491d174c974b", - "foreignKey": "qaid", - "fieldName": "personid" - } - }, - "sourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", - "sourceType": "external", - "primaryDisplay": "taskname", - "sql": true, - "views": {} - } - }, - "tableAliases": { - "tasks": "a", - "products": "b", - "persons": "c", - "products_tasks": "d" - } -} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/updateRelationship.json b/packages/server/src/integrations/tests/sqlQueryJson/updateRelationship.json deleted file mode 100644 index 01e795bd6c..0000000000 --- a/packages/server/src/integrations/tests/sqlQueryJson/updateRelationship.json +++ /dev/null @@ -1,181 +0,0 @@ -{ - "endpoint": { - "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", - "entityId": "persons", - "operation": "UPDATE" - }, - "resource": { - "fields": [ - "a.year", - "a.firstname", - "a.personid", - "a.address", - "a.age", - "a.type", - "a.city", - "a.lastname" - ] - }, - "filters": { - "equal": { - "personid": 5 - } - }, - "relationships": [ - { - "tableName": "tasks", - "column": "QA", - "from": "personid", - "to": "qaid", - "aliases": { - "tasks": "b", - "persons": "a" - } - }, - { - "tableName": "tasks", - "column": "executor", - "from": "personid", - "to": "executorid", - "aliases": { - "tasks": "b", - "persons": "a" - } - } - ], - "body": { - "year": 1990, - "firstname": "C", - "address": "A Street", - "age": 34, - "type": "designer", - "city": "London", - "lastname": "B" - }, - "extra": { - "idFilter": { - "equal": { - "personid": 5 - } - } - }, - "meta": { - "table": { - "type": "table", - "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons", - "primary": [ - "personid" - ], - "name": "a", - "schema": { - "year": { - "type": "number", - "externalType": "integer", - "autocolumn": false, - "name": "year", - "constraints": { - "presence": false - } - }, - "firstname": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "firstname", - "constraints": { - "presence": false - } - }, - "personid": { - "type": "number", - "externalType": "integer", - "autocolumn": true, - "name": "personid", - "constraints": { - "presence": false - } - }, - "address": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "address", - "constraints": { - "presence": false - } - }, - "age": { - "type": "number", - "externalType": "integer", - "autocolumn": false, - "name": "age", - "constraints": { - "presence": false - } - }, - "type": { - "type": "options", - "externalType": "USER-DEFINED", - "autocolumn": false, - "name": "type", - "constraints": { - "presence": false, - "inclusion": [ - "support", - "designer", - "programmer", - "qa" - ] - } - }, - "city": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "city", - "constraints": { - "presence": false - } - }, - "lastname": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "lastname", - "constraints": { - "presence": false - } - }, - "QA": { - "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", - "name": "QA", - "relationshipType": "many-to-one", - "fieldName": "qaid", - "type": "link", - "main": true, - "_id": "ccb68481c80c34217a4540a2c6c27fe46", - "foreignKey": "personid" - }, - "executor": { - "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", - "name": "executor", - "relationshipType": "many-to-one", - "fieldName": "executorid", - "type": "link", - "main": true, - "_id": "c89530b9770d94bec851e062b5cff3001", - "foreignKey": "personid", - "tableName": "persons" - } - }, - "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", - "sourceType": "external", - "primaryDisplay": "firstname", - "views": {} - } - }, - "tableAliases": { - "persons": "a", - "tasks": "b" - } -} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/updateSimple.json b/packages/server/src/integrations/tests/sqlQueryJson/updateSimple.json deleted file mode 100644 index 01e795bd6c..0000000000 --- a/packages/server/src/integrations/tests/sqlQueryJson/updateSimple.json +++ /dev/null @@ -1,181 +0,0 @@ -{ - "endpoint": { - "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", - "entityId": "persons", - "operation": "UPDATE" - }, - "resource": { - "fields": [ - "a.year", - "a.firstname", - "a.personid", - "a.address", - "a.age", - "a.type", - "a.city", - "a.lastname" - ] - }, - "filters": { - "equal": { - "personid": 5 - } - }, - "relationships": [ - { - "tableName": "tasks", - "column": "QA", - "from": "personid", - "to": "qaid", - "aliases": { - "tasks": "b", - "persons": "a" - } - }, - { - "tableName": "tasks", - "column": "executor", - "from": "personid", - "to": "executorid", - "aliases": { - "tasks": "b", - "persons": "a" - } - } - ], - "body": { - "year": 1990, - "firstname": "C", - "address": "A Street", - "age": 34, - "type": "designer", - "city": "London", - "lastname": "B" - }, - "extra": { - "idFilter": { - "equal": { - "personid": 5 - } - } - }, - "meta": { - "table": { - "type": "table", - "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons", - "primary": [ - "personid" - ], - "name": "a", - "schema": { - "year": { - "type": "number", - "externalType": "integer", - "autocolumn": false, - "name": "year", - "constraints": { - "presence": false - } - }, - "firstname": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "firstname", - "constraints": { - "presence": false - } - }, - "personid": { - "type": "number", - "externalType": "integer", - "autocolumn": true, - "name": "personid", - "constraints": { - "presence": false - } - }, - "address": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "address", - "constraints": { - "presence": false - } - }, - "age": { - "type": "number", - "externalType": "integer", - "autocolumn": false, - "name": "age", - "constraints": { - "presence": false - } - }, - "type": { - "type": "options", - "externalType": "USER-DEFINED", - "autocolumn": false, - "name": "type", - "constraints": { - "presence": false, - "inclusion": [ - "support", - "designer", - "programmer", - "qa" - ] - } - }, - "city": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "city", - "constraints": { - "presence": false - } - }, - "lastname": { - "type": "string", - "externalType": "character varying", - "autocolumn": false, - "name": "lastname", - "constraints": { - "presence": false - } - }, - "QA": { - "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", - "name": "QA", - "relationshipType": "many-to-one", - "fieldName": "qaid", - "type": "link", - "main": true, - "_id": "ccb68481c80c34217a4540a2c6c27fe46", - "foreignKey": "personid" - }, - "executor": { - "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", - "name": "executor", - "relationshipType": "many-to-one", - "fieldName": "executorid", - "type": "link", - "main": true, - "_id": "c89530b9770d94bec851e062b5cff3001", - "foreignKey": "personid", - "tableName": "persons" - } - }, - "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", - "sourceType": "external", - "primaryDisplay": "firstname", - "views": {} - } - }, - "tableAliases": { - "persons": "a", - "tasks": "b" - } -} \ No newline at end of file diff --git a/packages/server/src/sdk/app/datasources/plus.ts b/packages/server/src/sdk/app/datasources/plus.ts index 31ec51c728..04cd508863 100644 --- a/packages/server/src/sdk/app/datasources/plus.ts +++ b/packages/server/src/sdk/app/datasources/plus.ts @@ -3,33 +3,12 @@ import { DatasourcePlus, IntegrationBase, Schema, - Table, } from "@budibase/types" import * as datasources from "./datasources" import tableSdk from "../tables" import { getIntegration } from "../../../integrations" import { context } from "@budibase/backend-core" -function checkForSchemaErrors(schema: Record) { - const errors: Record = {} - for (let [tableName, table] of Object.entries(schema)) { - if (tableName.includes(".")) { - errors[tableName] = "Table names containing dots are not supported." - } else { - const columnNames = Object.keys(table.schema) - const invalidColumnName = columnNames.find(columnName => - columnName.includes(".") - ) - if (invalidColumnName) { - errors[ - tableName - ] = `Column '${invalidColumnName}' is not supported as it contains a dot.` - } - } - } - return errors -} - export async function buildFilteredSchema( datasource: Datasource, filter?: string[] @@ -51,19 +30,16 @@ export async function buildFilteredSchema( filteredSchema.errors[key] = schema.errors[key] } } - - return { - ...filteredSchema, - errors: { - ...filteredSchema.errors, - ...checkForSchemaErrors(filteredSchema.tables), - }, - } + return filteredSchema } async function buildSchemaHelper(datasource: Datasource): Promise { const connector = (await getConnector(datasource)) as DatasourcePlus - return await connector.buildSchema(datasource._id!, datasource.entities!) + const externalSchema = await connector.buildSchema( + datasource._id!, + datasource.entities! + ) + return externalSchema } export async function getConnector( diff --git a/packages/server/src/sdk/app/rows/utils.ts b/packages/server/src/sdk/app/rows/utils.ts index a8052462a9..0ff85f40ac 100644 --- a/packages/server/src/sdk/app/rows/utils.ts +++ b/packages/server/src/sdk/app/rows/utils.ts @@ -1,21 +1,12 @@ import cloneDeep from "lodash/cloneDeep" import validateJs from "validate.js" -import { - FieldType, - QueryJson, - Row, - Table, - TableSchema, - DatasourcePlusQueryResponse, -} from "@budibase/types" +import { FieldType, Row, Table, TableSchema } from "@budibase/types" import { makeExternalQuery } from "../../../integrations/base/query" import { Format } from "../../../api/controllers/view/exporters" import sdk from "../.." import { isRelationshipColumn } from "../../../db/utils" -export async function getDatasourceAndQuery( - json: QueryJson -): DatasourcePlusQueryResponse { +export async function getDatasourceAndQuery(json: any) { const datasourceId = json.endpoint.datasourceId const datasource = await sdk.datasources.get(datasourceId) return makeExternalQuery(datasource, json) diff --git a/packages/types/src/sdk/datasources.ts b/packages/types/src/sdk/datasources.ts index 4cddb0c09e..7a335eb3b9 100644 --- a/packages/types/src/sdk/datasources.ts +++ b/packages/types/src/sdk/datasources.ts @@ -1,5 +1,4 @@ -import { Table, Row } from "../documents" -import { QueryJson } from "./search" +import { Table } from "../documents" export const PASSWORD_REPLACEMENT = "--secret-value--" @@ -181,24 +180,11 @@ export interface Schema { errors: Record } -// return these when an operation occurred but we got no response -enum DSPlusOperation { - CREATE = "create", - READ = "read", - UPDATE = "update", - DELETE = "delete", -} - -export type DatasourcePlusQueryResponse = Promise< - Row[] | Record[] | void -> - export interface DatasourcePlus extends IntegrationBase { // if the datasource supports the use of bindings directly (to protect against SQL injection) // this returns the format of the identifier getBindingIdentifier(): string getStringConcat(parts: string[]): string - query(json: QueryJson): DatasourcePlusQueryResponse buildSchema( datasourceId: string, entities: Record diff --git a/packages/types/src/sdk/search.ts b/packages/types/src/sdk/search.ts index 67c344d845..35fd148c05 100644 --- a/packages/types/src/sdk/search.ts +++ b/packages/types/src/sdk/search.ts @@ -94,7 +94,6 @@ export interface QueryJson { idFilter?: SearchFilters } relationships?: RelationshipsJson[] - tableAliases?: Record } export interface SqlQuery { diff --git a/yarn.lock b/yarn.lock index 260ae3870a..2c367e469b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1097,7 +1097,7 @@ "@babel/highlight@^7.23.4": version "7.23.4" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.23.4.tgz#edaadf4d8232e1a961432db785091207ead0621b" - integrity sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A== + integrity "sha1-7arfTYIy4alhQy23hQkSB+rQYhs= sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A==" dependencies: "@babel/helper-validator-identifier" "^7.22.20" chalk "^2.4.2" @@ -1988,14 +1988,14 @@ resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310" integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA== -"@babel/runtime@^7.10.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.21.0": +"@babel/runtime@^7.10.5": version "7.23.9" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.9.tgz#47791a15e4603bb5f905bc0753801cf21d6345f7" integrity sha512-0CX6F+BI2s9dkUqr08KFrAIZgNFj75rdBU/DjCyYLIaV/quFjkk6T+EJ2LkZHyZTbEV4L5p97mNkUsHl2wLFAw== dependencies: regenerator-runtime "^0.14.0" -"@babel/runtime@^7.12.5", "@babel/runtime@^7.15.4", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": +"@babel/runtime@^7.12.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.15.4", "@babel/runtime@^7.21.0", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": version "7.23.8" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.8.tgz#8ee6fe1ac47add7122902f257b8ddf55c898f650" integrity sha512-Y7KbAP984rn1VGMbGqKmBLio9V7y5Je9GvU4rQPCPinCyNfUcToxIXl06d59URp/F3LwinvODxab5N/G6qggkw== @@ -3419,9 +3419,9 @@ tar "^6.1.11" "@mongodb-js/saslprep@^1.1.0": - version "1.1.4" - resolved "https://registry.yarnpkg.com/@mongodb-js/saslprep/-/saslprep-1.1.4.tgz#24ec1c4915a65f5c506bb88c081731450d91bb1c" - integrity sha512-8zJ8N1x51xo9hwPh6AWnKdLGEC5N3lDa6kms1YHmFBoRhTpJR6HG8wWk0td1MVCu9cD4YBrvjZEtd5Obw0Fbnw== + version "1.1.1" + resolved "https://registry.yarnpkg.com/@mongodb-js/saslprep/-/saslprep-1.1.1.tgz#9a6c2516bc9188672c4d953ec99760ba49970da7" + integrity sha512-t7c5K033joZZMspnHg/gWPE4kandgc2OxE74aYOtGKfgB9VPuVJPix0H6fhmm2erj5PBJ21mqcx34lpIGtUCsQ== dependencies: sparse-bitfield "^3.0.3" @@ -4012,70 +4012,70 @@ estree-walker "^2.0.2" picomatch "^2.3.1" -"@rollup/rollup-android-arm-eabi@4.12.0": - version "4.12.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.12.0.tgz#38c3abd1955a3c21d492af6b1a1dca4bb1d894d6" - integrity sha512-+ac02NL/2TCKRrJu2wffk1kZ+RyqxVUlbjSagNgPm94frxtr+XDL12E5Ll1enWskLrtrZ2r8L3wED1orIibV/w== +"@rollup/rollup-android-arm-eabi@4.10.0": + version "4.10.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.10.0.tgz#786eaf6372be2fc209cc957c14aa9d3ff8fefe6a" + integrity sha512-/MeDQmcD96nVoRumKUljsYOLqfv1YFJps+0pTrb2Z9Nl/w5qNUysMaWQsrd1mvAlNT4yza1iVyIu4Q4AgF6V3A== -"@rollup/rollup-android-arm64@4.12.0": - version "4.12.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.12.0.tgz#3822e929f415627609e53b11cec9a4be806de0e2" - integrity sha512-OBqcX2BMe6nvjQ0Nyp7cC90cnumt8PXmO7Dp3gfAju/6YwG0Tj74z1vKrfRz7qAv23nBcYM8BCbhrsWqO7PzQQ== +"@rollup/rollup-android-arm64@4.10.0": + version "4.10.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.10.0.tgz#0114a042fd6396f4f3233e6171fd5b61a36ed539" + integrity sha512-lvu0jK97mZDJdpZKDnZI93I0Om8lSDaiPx3OiCk0RXn3E8CMPJNS/wxjAvSJJzhhZpfjXsjLWL8LnS6qET4VNQ== -"@rollup/rollup-darwin-arm64@4.12.0": - version "4.12.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.12.0.tgz#6c082de71f481f57df6cfa3701ab2a7afde96f69" - integrity sha512-X64tZd8dRE/QTrBIEs63kaOBG0b5GVEd3ccoLtyf6IdXtHdh8h+I56C2yC3PtC9Ucnv0CpNFJLqKFVgCYe0lOQ== +"@rollup/rollup-darwin-arm64@4.10.0": + version "4.10.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.10.0.tgz#944d007c1dc71a8c9174d11671c0c34bd74a2c81" + integrity sha512-uFpayx8I8tyOvDkD7X6n0PriDRWxcqEjqgtlxnUA/G9oS93ur9aZ8c8BEpzFmsed1TH5WZNG5IONB8IiW90TQg== -"@rollup/rollup-darwin-x64@4.12.0": - version "4.12.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.12.0.tgz#c34ca0d31f3c46a22c9afa0e944403eea0edcfd8" - integrity sha512-cc71KUZoVbUJmGP2cOuiZ9HSOP14AzBAThn3OU+9LcA1+IUqswJyR1cAJj3Mg55HbjZP6OLAIscbQsQLrpgTOg== +"@rollup/rollup-darwin-x64@4.10.0": + version "4.10.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.10.0.tgz#1d08cb4521a058d7736ab1c7fe988daf034a2598" + integrity sha512-nIdCX03qFKoR/MwQegQBK+qZoSpO3LESurVAC6s6jazLA1Mpmgzo3Nj3H1vydXp/JM29bkCiuF7tDuToj4+U9Q== -"@rollup/rollup-linux-arm-gnueabihf@4.12.0": - version "4.12.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.12.0.tgz#48e899c1e438629c072889b824a98787a7c2362d" - integrity sha512-a6w/Y3hyyO6GlpKL2xJ4IOh/7d+APaqLYdMf86xnczU3nurFTaVN9s9jOXQg97BE4nYm/7Ga51rjec5nfRdrvA== +"@rollup/rollup-linux-arm-gnueabihf@4.10.0": + version "4.10.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.10.0.tgz#4763eec1591bf0e99a54ad3d1ef39cb268ed7b19" + integrity sha512-Fz7a+y5sYhYZMQFRkOyCs4PLhICAnxRX/GnWYReaAoruUzuRtcf+Qnw+T0CoAWbHCuz2gBUwmWnUgQ67fb3FYw== -"@rollup/rollup-linux-arm64-gnu@4.12.0": - version "4.12.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.12.0.tgz#788c2698a119dc229062d40da6ada8a090a73a68" - integrity sha512-0fZBq27b+D7Ar5CQMofVN8sggOVhEtzFUwOwPppQt0k+VR+7UHMZZY4y+64WJ06XOhBTKXtQB/Sv0NwQMXyNAA== +"@rollup/rollup-linux-arm64-gnu@4.10.0": + version "4.10.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.10.0.tgz#e6dae70c53ace836973526c41803b877cffc6f7b" + integrity sha512-yPtF9jIix88orwfTi0lJiqINnlWo6p93MtZEoaehZnmCzEmLL0eqjA3eGVeyQhMtxdV+Mlsgfwhh0+M/k1/V7Q== -"@rollup/rollup-linux-arm64-musl@4.12.0": - version "4.12.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.12.0.tgz#3882a4e3a564af9e55804beeb67076857b035ab7" - integrity sha512-eTvzUS3hhhlgeAv6bfigekzWZjaEX9xP9HhxB0Dvrdbkk5w/b+1Sxct2ZuDxNJKzsRStSq1EaEkVSEe7A7ipgQ== +"@rollup/rollup-linux-arm64-musl@4.10.0": + version "4.10.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.10.0.tgz#5692e1a0feba0cc4a933864961afc3211177d242" + integrity sha512-9GW9yA30ib+vfFiwjX+N7PnjTnCMiUffhWj4vkG4ukYv1kJ4T9gHNg8zw+ChsOccM27G9yXrEtMScf1LaCuoWQ== -"@rollup/rollup-linux-riscv64-gnu@4.12.0": - version "4.12.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.12.0.tgz#0c6ad792e1195c12bfae634425a3d2aa0fe93ab7" - integrity sha512-ix+qAB9qmrCRiaO71VFfY8rkiAZJL8zQRXveS27HS+pKdjwUfEhqo2+YF2oI+H/22Xsiski+qqwIBxVewLK7sw== +"@rollup/rollup-linux-riscv64-gnu@4.10.0": + version "4.10.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.10.0.tgz#fbe3d80f7a7ac54a8847f5bddd1bc6f7b9ccb65f" + integrity sha512-X1ES+V4bMq2ws5fF4zHornxebNxMXye0ZZjUrzOrf7UMx1d6wMQtfcchZ8SqUnQPPHdOyOLW6fTcUiFgHFadRA== -"@rollup/rollup-linux-x64-gnu@4.12.0": - version "4.12.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.12.0.tgz#9d62485ea0f18d8674033b57aa14fb758f6ec6e3" - integrity sha512-TenQhZVOtw/3qKOPa7d+QgkeM6xY0LtwzR8OplmyL5LrgTWIXpTQg2Q2ycBf8jm+SFW2Wt/DTn1gf7nFp3ssVA== +"@rollup/rollup-linux-x64-gnu@4.10.0": + version "4.10.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.10.0.tgz#3f06b55ccf173446d390d0306643dff62ec99807" + integrity sha512-w/5OpT2EnI/Xvypw4FIhV34jmNqU5PZjZue2l2Y3ty1Ootm3SqhI+AmfhlUYGBTd9JnpneZCDnt3uNOiOBkMyw== -"@rollup/rollup-linux-x64-musl@4.12.0": - version "4.12.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.12.0.tgz#50e8167e28b33c977c1f813def2b2074d1435e05" - integrity sha512-LfFdRhNnW0zdMvdCb5FNuWlls2WbbSridJvxOvYWgSBOYZtgBfW9UGNJG//rwMqTX1xQE9BAodvMH9tAusKDUw== +"@rollup/rollup-linux-x64-musl@4.10.0": + version "4.10.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.10.0.tgz#e4ac9b27041c83d7faab6205f62763103eb317ba" + integrity sha512-q/meftEe3QlwQiGYxD9rWwB21DoKQ9Q8wA40of/of6yGHhZuGfZO0c3WYkN9dNlopHlNT3mf5BPsUSxoPuVQaw== -"@rollup/rollup-win32-arm64-msvc@4.12.0": - version "4.12.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.12.0.tgz#68d233272a2004429124494121a42c4aebdc5b8e" - integrity sha512-JPDxovheWNp6d7AHCgsUlkuCKvtu3RB55iNEkaQcf0ttsDU/JZF+iQnYcQJSk/7PtT4mjjVG8N1kpwnI9SLYaw== +"@rollup/rollup-win32-arm64-msvc@4.10.0": + version "4.10.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.10.0.tgz#6ad0d4fb0066f240778ee3f61eecf7aa0357f883" + integrity sha512-NrR6667wlUfP0BHaEIKgYM/2va+Oj+RjZSASbBMnszM9k+1AmliRjHc3lJIiOehtSSjqYiO7R6KLNrWOX+YNSQ== -"@rollup/rollup-win32-ia32-msvc@4.12.0": - version "4.12.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.12.0.tgz#366ca62221d1689e3b55a03f4ae12ae9ba595d40" - integrity sha512-fjtuvMWRGJn1oZacG8IPnzIV6GF2/XG+h71FKn76OYFqySXInJtseAqdprVTDTyqPxQOG9Exak5/E9Z3+EJ8ZA== +"@rollup/rollup-win32-ia32-msvc@4.10.0": + version "4.10.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.10.0.tgz#29d50292381311cc8d3623e73b427b7e2e40a653" + integrity sha512-FV0Tpt84LPYDduIDcXvEC7HKtyXxdvhdAOvOeWMWbQNulxViH2O07QXkT/FffX4FqEI02jEbCJbr+YcuKdyyMg== -"@rollup/rollup-win32-x64-msvc@4.12.0": - version "4.12.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.12.0.tgz#9ffdf9ed133a7464f4ae187eb9e1294413fab235" - integrity sha512-ZYmr5mS2wd4Dew/JjT0Fqi2NPB/ZhZ2VvPp7SmvPZb4Y1CG/LRcS6tcRo2cYU7zLK5A7cdbhWnnWmUjoI4qapg== +"@rollup/rollup-win32-x64-msvc@4.10.0": + version "4.10.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.10.0.tgz#4eedd01af3a82c1acb0fe6d837ebf339c4cbf839" + integrity sha512-OZoJd+o5TaTSQeFFQ6WjFCiltiYVjIdsXxwu/XZ8qRpsvMQr4UsVrE5UyT9RIvsnuF47DqkJKhhVZ2Q9YW9IpQ== "@roxi/routify@2.18.0": version "2.18.0" @@ -5219,16 +5219,16 @@ integrity sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w== "@types/chai-subset@^1.3.3": - version "1.3.3" - resolved "https://registry.yarnpkg.com/@types/chai-subset/-/chai-subset-1.3.3.tgz#97893814e92abd2c534de422cb377e0e0bdaac94" - integrity sha512-frBecisrNGz+F4T6bcc+NLeolfiojh5FxW2klu669+8BARtyQv2C/GkNW6FUodVe4BroGMP/wER/YDGc7rEllw== + version "1.3.5" + resolved "https://registry.yarnpkg.com/@types/chai-subset/-/chai-subset-1.3.5.tgz#3fc044451f26985f45625230a7f22284808b0a9a" + integrity sha512-c2mPnw+xHtXDoHmdtcCXGwyLMiauiAyxWMzhGpqHC4nqI/Y5G2XhTampslK2rb59kpcuHon03UH8W6iYUzw88A== dependencies: "@types/chai" "*" "@types/chai@*", "@types/chai@^4.3.4": - version "4.3.9" - resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.9.tgz#144d762491967db8c6dea38e03d2206c2623feec" - integrity sha512-69TtiDzu0bcmKQv3yg1Zx409/Kd7r0b5F1PfpYJfSHzLGtB53547V4u+9iqKYsTu/O2ai6KTb0TInNpvuQ3qmg== + version "4.3.11" + resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.11.tgz#e95050bf79a932cb7305dd130254ccdf9bde671c" + integrity sha512-qQR1dr2rGIHYlJulmr8Ioq3De0Le9E4MJ5AiaeAETJJpndT1uUNHsGFK3L/UIu+rbkQSdj8J/w2bCsBZc/Y5fQ== "@types/chance@1.1.3": version "1.1.3" @@ -5623,10 +5623,10 @@ "@types/node" "*" form-data "^3.0.0" -"@types/node@*", "@types/node@>=10.0.0", "@types/node@>=12.12.47", "@types/node@>=13.13.4", "@types/node@>=13.7.0": - version "20.10.7" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.10.7.tgz#40fe8faf25418a75de9fe68a8775546732a3a901" - integrity sha512-fRbIKb8C/Y2lXxB5eVMj4IU7xpdox0Lh8bUPEdtLysaylsml1hOOx1+STloRs/B9nf7C6kPRmmg/V7aQW7usNg== +"@types/node@*", "@types/node@>=10.0.0", "@types/node@>=12.12.47", "@types/node@>=13.13.4", "@types/node@>=13.7.0", "@types/node@>=8.1.0": + version "20.11.2" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.2.tgz#39cea3fe02fbbc2f80ed283e94e1d24f2d3856fb" + integrity sha512-cZShBaVa+UO1LjWWBPmWRR4+/eY/JR/UIEcDlVsw3okjWEu+rB7/mH6X3B/L+qJVHDLjk9QW/y2upp9wp1yDXA== dependencies: undici-types "~5.26.4" @@ -5652,17 +5652,10 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.37.tgz#0bfcd173e8e1e328337473a8317e37b3b14fd30d" integrity sha512-7GgtHCs/QZrBrDzgIJnQtuSvhFSwhyYSI2uafSwZoNt1iOGhEN5fwNrQMjtONyHm9+/LoA4453jH0CMYcr06Pg== -"@types/node@>=8.1.0": - version "20.11.10" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.10.tgz#6c3de8974d65c362f82ee29db6b5adf4205462f9" - integrity sha512-rZEfe/hJSGYmdfX9tvcPMYeYPW2sNl50nsw4jZmRcaG0HIAb0WYEpsB05GOb53vjqpyE9GUhlDQ4jLSoB5q9kg== - dependencies: - undici-types "~5.26.4" - "@types/node@^18.11.18": - version "18.19.10" - resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.10.tgz#4de314ab66faf6bc8ba691021a091ddcdf13a158" - integrity sha512-IZD8kAM02AW1HRDTPOlz3npFava678pr8Ie9Vp8uRhBROXAv8MXT2pCnGZZAKYdromsNQLHQcfWQ6EOatVLtqA== + version "18.19.13" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.13.tgz#c3e989ca967b862a1f6c8c4148fe31865eedaf1a" + integrity sha512-kgnbRDj8ioDyGxoiaXsiu1Ybm/K14ajCgMOkwiqpHrnF7d7QiYRoRqHIpglMMs3DwXinlK4qJ8TZGlj4hfleJg== dependencies: undici-types "~5.26.4" @@ -6082,9 +6075,9 @@ integrity sha512-xTE1E+YF4aWPJJeUzaZI5DRntlkY3+BCVJi0axFptnjGmAoWxkyREIh/XMrfxVLejwQxMCfDXdICo0VLxThrog== "@types/whatwg-url@^11.0.2": - version "11.0.4" - resolved "https://registry.yarnpkg.com/@types/whatwg-url/-/whatwg-url-11.0.4.tgz#ffed0dc8d89d91f62e3f368fcbda222a487c4f63" - integrity sha512-lXCmTWSHJvf0TRSO58nm978b8HJ/EdsSsEKLd3ODHFjo+3VGAyyTp4v50nWvwtzBxSMQrVOK7tcuN0zGPLICMw== + version "11.0.3" + resolved "https://registry.yarnpkg.com/@types/whatwg-url/-/whatwg-url-11.0.3.tgz#9f584c9a9421f0971029ee504dd62a831cb8f3aa" + integrity sha512-z1ELvMijRL1QmU7QuzDkeYXSF2+dXI0ITKoQsIoVKcNBOiK5RMmWy+pYYxJTHFt8vkpZe7UsvRErQwcxZkjoUw== dependencies: "@types/webidl-conversions" "*" @@ -6534,11 +6527,16 @@ acorn-walk@^7.1.1: resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== -acorn-walk@^8.0.2, acorn-walk@^8.1.1, acorn-walk@^8.2.0: +acorn-walk@^8.0.2, acorn-walk@^8.1.1: version "8.2.0" resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== +acorn-walk@^8.2.0: + version "8.3.2" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.2.tgz#7703af9415f1b6db9315d6895503862e231d34aa" + integrity sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A== + acorn@^5.2.1, acorn@^5.7.3: version "5.7.4" resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.7.4.tgz#3e8d8a9947d0599a1796d10225d7432f4a4acf5e" @@ -6549,10 +6547,10 @@ acorn@^7.1.1: resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== -acorn@^8.1.0, acorn@^8.10.0, acorn@^8.2.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.7.1, acorn@^8.8.1, acorn@^8.8.2, acorn@^8.9.0: - version "8.11.2" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.2.tgz#ca0d78b51895be5390a5903c5b3bdcdaf78ae40b" - integrity sha512-nc0Axzp/0FILLEVsm4fNwLCwMttvhEI263QtVPQcbpfZZ3ts0hLsZGOpE6czNlid7CJ9MlyH8reXkpsf3YUY4w== +acorn@^8.1.0, acorn@^8.10.0, acorn@^8.11.3, acorn@^8.2.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.7.1, acorn@^8.8.1, acorn@^8.8.2, acorn@^8.9.0: + version "8.11.3" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a" + integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg== add-stream@^1.0.0: version "1.0.0" @@ -6994,7 +6992,7 @@ asn1.js@^5.0.0, asn1.js@^5.2.0, asn1.js@^5.4.1: minimalistic-assert "^1.0.0" safer-buffer "^2.1.0" -asn1@^0.2.4, asn1@^0.2.6, asn1@~0.2.3: +asn1@^0.2.6, asn1@~0.2.3: version "0.2.6" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.6.tgz#0d3a7bb6e64e02a90c0303b31f292868ea09a08d" integrity sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ== @@ -7045,12 +7043,7 @@ async@^2.6.3: dependencies: lodash "^4.17.14" -async@^3.2.1, async@^3.2.3: - version "3.2.4" - resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" - integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== - -async@^3.2.4: +async@^3.2.1, async@^3.2.3, async@^3.2.4: version "3.2.5" resolved "https://registry.yarnpkg.com/async/-/async-3.2.5.tgz#ebd52a8fdaf7a2289a24df399f8d8485c8a46b66" integrity sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg== @@ -7653,11 +7646,6 @@ bufferutil@^4.0.1: dependencies: node-gyp-build "^4.3.0" -buildcheck@0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/buildcheck/-/buildcheck-0.0.3.tgz#70451897a95d80f7807e68fc412eb2e7e35ff4d5" - integrity sha512-pziaA+p/wdVImfcbsZLNF32EiWyujlQLwolMqUQE8xpKNOH7KmZQaY8sXN7DGOEzPAElo9QTaeNRfGnf3iOJbA== - buildcheck@~0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/buildcheck/-/buildcheck-0.0.6.tgz#89aa6e417cfd1e2196e3f8fe915eb709d2fe4238" @@ -7922,9 +7910,9 @@ catharsis@^0.9.0: lodash "^4.17.15" chai@^4.3.7: - version "4.3.10" - resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.10.tgz#d784cec635e3b7e2ffb66446a63b4e33bd390384" - integrity sha512-0UXG04VuVbruMUYbJ6JctvH0YnC/4q3/AkT18q4NaITo91CUm0liMS9VqzT9vZhVQ/1eqPanMWjBM+Juhfb/9g== + version "4.4.1" + resolved "https://registry.yarnpkg.com/chai/-/chai-4.4.1.tgz#3603fa6eba35425b0f2ac91a009fe924106e50d1" + integrity sha512-13sOfMv2+DWduEU+/xbun3LScLoqN17nBeTLUsmDfKdoiC1fr0n9PU4guu4AhRcOVFk/sW8LyZWHuhWtQZiF+g== dependencies: assertion-error "^1.1.0" check-error "^1.0.3" @@ -8666,14 +8654,6 @@ cosmiconfig@^8.2.0: parse-json "^5.0.0" path-type "^4.0.0" -cpu-features@~0.0.4: - version "0.0.4" - resolved "https://registry.yarnpkg.com/cpu-features/-/cpu-features-0.0.4.tgz#0023475bb4f4c525869c162e4108099e35bf19d8" - integrity sha512-fKiZ/zp1mUwQbnzb9IghXtHtDoTMtNeb8oYGx6kX2SYfhnG0HNdBEBIzB9b5KlXu5DQPhfy3mInbBxFcgwAr3A== - dependencies: - buildcheck "0.0.3" - nan "^2.15.0" - cpu-features@~0.0.9: version "0.0.9" resolved "https://registry.yarnpkg.com/cpu-features/-/cpu-features-0.0.9.tgz#5226b92f0f1c63122b0a3eb84cb8335a4de499fc" @@ -9573,9 +9553,9 @@ diff@^4.0.1: integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== diff@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/diff/-/diff-5.1.0.tgz#bc52d298c5ea8df9194800224445ed43ffc87e40" - integrity sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw== + version "5.2.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-5.2.0.tgz#26ded047cd1179b78b9537d5ef725503ce1ae531" + integrity sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A== diffie-hellman@^5.0.0: version "5.0.3" @@ -9631,16 +9611,7 @@ docker-modem@^3.0.0: split-ca "^1.0.1" ssh2 "^1.11.0" -dockerode@^3.2.1: - version "3.3.4" - resolved "https://registry.yarnpkg.com/dockerode/-/dockerode-3.3.4.tgz#875de614a1be797279caa9fe27e5637cf0e40548" - integrity sha512-3EUwuXnCU+RUlQEheDjmBE0B7q66PV9Rw5NiH1sXwINq0M9c5ERP9fxgkw36ZHOtzf4AGEEYySnkx/sACC9EgQ== - dependencies: - "@balena/dockerignore" "^1.0.2" - docker-modem "^3.0.0" - tar-fs "~2.0.1" - -dockerode@^3.3.5: +dockerode@^3.2.1, dockerode@^3.3.5: version "3.3.5" resolved "https://registry.yarnpkg.com/dockerode/-/dockerode-3.3.5.tgz#7ae3f40f2bec53ae5e9a741ce655fff459745629" integrity sha512-/0YNa3ZDNeLr/tSckmD69+Gq+qVNhvKfAHNeZJBnp7EOP6RGKV8ORrJHkUn20So5wU+xxT7+1n5u8PjHbfjbSA== @@ -9770,9 +9741,9 @@ dotenv@8.6.0, dotenv@^8.2.0: integrity sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g== dotenv@^16.3.1: - version "16.4.1" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.4.1.tgz#1d9931f1d3e5d2959350d1250efab299561f7f11" - integrity sha512-CjA3y+Dr3FyFDOAMnxZEGtnW9KBR2M0JvvUtXNW+dYJL5ROWxP9DUHCwgFqpMk0OXCc0ljhaNTr2w/kutYIcHQ== + version "16.3.1" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e" + integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ== dotenv@~10.0.0: version "10.0.0" @@ -10820,20 +10791,13 @@ fast-xml-parser@4.2.5: dependencies: strnum "^1.0.5" -fast-xml-parser@^4.1.3: +fast-xml-parser@^4.1.3, fast-xml-parser@^4.2.2, fast-xml-parser@^4.2.5: version "4.3.3" resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.3.3.tgz#aeaf5778392329f17168c40c51bcbfec8ff965be" integrity sha512-coV/D1MhrShMvU6D0I+VAK3umz6hUaxxhL0yp/9RjfiYUfAv14rDhGQL+PLForhMdr0wq3PiV07WtkkNjJjNHg== dependencies: strnum "^1.0.5" -fast-xml-parser@^4.2.2, fast-xml-parser@^4.2.5: - version "4.3.2" - resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.3.2.tgz#761e641260706d6e13251c4ef8e3f5694d4b0d79" - integrity "sha1-dh5kEmBwbW4TJRxO+OP1aU1LDXk= sha512-rmrXUXwbJedoXkStenj1kkljNF7ugn5ZjR9FJcwmCfcCbtOMDghPajbc+Tck6vE6F5XsDmx+Pr2le9fw8+pXBg==" - dependencies: - strnum "^1.0.5" - fastest-levenshtein@^1.0.12: version "1.0.16" resolved "https://registry.yarnpkg.com/fastest-levenshtein/-/fastest-levenshtein-1.0.16.tgz#210e61b6ff181de91ea9b3d1b84fdedd47e034e5" @@ -10893,7 +10857,7 @@ fetch-cookie@0.11.0: dependencies: tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0" -fflate@^0.4.1, fflate@^0.4.8: +fflate@^0.4.1: version "0.4.8" resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.4.8.tgz#f90b82aefbd8ac174213abb338bd7ef848f0f5ae" integrity sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA== @@ -15642,7 +15606,17 @@ mkdirp@^1.0.3, mkdirp@^1.0.4: resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== -mlly@^1.1.0, mlly@^1.2.0: +mlly@^1.1.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/mlly/-/mlly-1.6.0.tgz#0ecfbddc706857f5e170ccd28c6b0b9c81d3f548" + integrity sha512-YOvg9hfYQmnaB56Yb+KrJE2u0Yzz5zR+sLejEvF4fzwzV1Al6hkf2vyHTwqCRyv0hCi9rVCqVoXpyYevQIRwLQ== + dependencies: + acorn "^8.11.3" + pathe "^1.1.2" + pkg-types "^1.0.3" + ufo "^1.3.2" + +mlly@^1.2.0: version "1.4.2" resolved "https://registry.yarnpkg.com/mlly/-/mlly-1.4.2.tgz#7cf406aa319ff6563d25da6b36610a93f2a8007e" integrity sha512-i/Ykufi2t1EZ6NaPLdfnZk2AX8cs0d+mTzVKuPfqPKPatxLApaBoxJQ9x1/uckXtrS/U5oisPMDkNs0yQTaBRg== @@ -15835,11 +15809,6 @@ named-placeholders@^1.1.3: dependencies: lru-cache "^7.14.1" -nan@^2.15.0, nan@^2.16.0: - version "2.17.0" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.17.0.tgz#c0150a2368a182f033e9aa5195ec76ea41a199cb" - integrity sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ== - nan@^2.17.0, nan@^2.18.0: version "2.18.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.18.0.tgz#26a6faae7ffbeb293a39660e88a76b82e30b7554" @@ -17202,6 +17171,11 @@ pathe@^1.1.0, pathe@^1.1.1: resolved "https://registry.yarnpkg.com/pathe/-/pathe-1.1.1.tgz#1dd31d382b974ba69809adc9a7a347e65d84829a" integrity sha512-d+RQGp0MAYTIaDBIMmOfMwz3E+LOZnxx1HZd5R18mmCZY0QBlK0LDZfPc8FW8Ed2DlvsuE6PRjroDY+wg4+j/Q== +pathe@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/pathe/-/pathe-1.1.2.tgz#6c4cb47a945692e48a1ddd6e4094d170516437ec" + integrity sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ== + pathval@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.1.tgz#8534e77a77ce7ac5a2512ea21e0fdb8fcf6c3d8d" @@ -17826,18 +17800,10 @@ postgres-interval@^1.1.0: dependencies: xtend "^4.0.0" -posthog-js@^1.13.4: - version "1.103.1" - resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.103.1.tgz#f846c413c28aca204dc1527f49d39f651348f3c4" - integrity sha512-cFXFU4Z4kl/+RUUV4ju1DlfM7dwCGi6H9xWsfhljIhGcBbT8UfS4JGgZGXl9ABQDdgDPb9xciqnysFSsUQshTA== - dependencies: - fflate "^0.4.8" - preact "^10.19.3" - -posthog-js@^1.36.0: - version "1.96.1" - resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.96.1.tgz#4f9719a24e4e14037b0e72d430194d7cdb576447" - integrity sha512-kv1vQqYMt2BV3YHS+wxsbGuP+tz+M3y1AzNhz8TfkpY1HT8W/ONT0i0eQpeRr9Y+d4x/fZ6M4cXG5GMvi9lRCA== +posthog-js@^1.13.4, posthog-js@^1.36.0: + version "1.100.0" + resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.100.0.tgz#687b9a6e4ed226aa6572f4040b418ea0c8b3d353" + integrity sha512-r2XZEiHQ9mBK7D1G9k57I8uYZ2kZTAJ0OCX6K/OOdCWN8jKPhw3h5F9No5weilP6eVAn+hrsy7NvPV7SCX7gMg== dependencies: fflate "^0.4.1" @@ -18078,11 +18044,6 @@ pprof-format@^2.0.7: resolved "https://registry.yarnpkg.com/pprof-format/-/pprof-format-2.0.7.tgz#526e4361f8b37d16b2ec4bb0696b5292de5046a4" integrity sha512-1qWaGAzwMpaXJP9opRa23nPnt2Egi7RMNoNBptEE/XwHbcn4fC2b/4U4bKc5arkGkIh2ZabpF2bEb+c5GNHEKA== -preact@^10.19.3: - version "10.19.3" - resolved "https://registry.yarnpkg.com/preact/-/preact-10.19.3.tgz#7a7107ed2598a60676c943709ea3efb8aaafa899" - integrity sha512-nHHTeFVBTHRGxJXKkKu5hT8C/YWBkPso4/Gad6xuj5dbptt9iF9NZr9pHbPhBrnT2klheu7mHTxTZ/LjwJiEiQ== - prebuild-install@^7.1.1: version "7.1.1" resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-7.1.1.tgz#de97d5b34a70a0c81334fd24641f2a1702352e45" @@ -19298,25 +19259,25 @@ rollup@^3.27.1: fsevents "~2.3.2" rollup@^4.9.6: - version "4.12.0" - resolved "https://registry.yarnpkg.com/rollup/-/rollup-4.12.0.tgz#0b6d1e5f3d46bbcf244deec41a7421dc54cc45b5" - integrity sha512-wz66wn4t1OHIJw3+XU7mJJQV/2NAfw5OAk6G6Hoo3zcvz/XOfQ52Vgi+AN4Uxoxi0KBBwk2g8zPrTDA4btSB/Q== + version "4.10.0" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-4.10.0.tgz#244c2cb54a8de004a949fe6036a0801be9060456" + integrity sha512-t2v9G2AKxcQ8yrG+WGxctBes1AomT0M4ND7jTFBCVPXQ/WFTvNSefIrNSmLKhIKBrvN8SG+CZslimJcT3W2u2g== dependencies: "@types/estree" "1.0.5" optionalDependencies: - "@rollup/rollup-android-arm-eabi" "4.12.0" - "@rollup/rollup-android-arm64" "4.12.0" - "@rollup/rollup-darwin-arm64" "4.12.0" - "@rollup/rollup-darwin-x64" "4.12.0" - "@rollup/rollup-linux-arm-gnueabihf" "4.12.0" - "@rollup/rollup-linux-arm64-gnu" "4.12.0" - "@rollup/rollup-linux-arm64-musl" "4.12.0" - "@rollup/rollup-linux-riscv64-gnu" "4.12.0" - "@rollup/rollup-linux-x64-gnu" "4.12.0" - "@rollup/rollup-linux-x64-musl" "4.12.0" - "@rollup/rollup-win32-arm64-msvc" "4.12.0" - "@rollup/rollup-win32-ia32-msvc" "4.12.0" - "@rollup/rollup-win32-x64-msvc" "4.12.0" + "@rollup/rollup-android-arm-eabi" "4.10.0" + "@rollup/rollup-android-arm64" "4.10.0" + "@rollup/rollup-darwin-arm64" "4.10.0" + "@rollup/rollup-darwin-x64" "4.10.0" + "@rollup/rollup-linux-arm-gnueabihf" "4.10.0" + "@rollup/rollup-linux-arm64-gnu" "4.10.0" + "@rollup/rollup-linux-arm64-musl" "4.10.0" + "@rollup/rollup-linux-riscv64-gnu" "4.10.0" + "@rollup/rollup-linux-x64-gnu" "4.10.0" + "@rollup/rollup-linux-x64-musl" "4.10.0" + "@rollup/rollup-win32-arm64-msvc" "4.10.0" + "@rollup/rollup-win32-ia32-msvc" "4.10.0" + "@rollup/rollup-win32-x64-msvc" "4.10.0" fsevents "~2.3.2" rotating-file-stream@3.1.0: @@ -19348,14 +19309,7 @@ rxjs@^6.6.6: dependencies: tslib "^1.9.0" -rxjs@^7.5.5: - version "7.8.0" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.0.tgz#90a938862a82888ff4c7359811a595e14e1e09a4" - integrity sha512-F2+gxDshqmIub1KdvZkaEfGDwLNpPvk9Fs6LD/MyQxNgMds/WH9OdDDXOmxUZpME+iSK3rQCctkL0DYyytUqMg== - dependencies: - tslib "^2.1.0" - -rxjs@^7.8.1: +rxjs@^7.5.5, rxjs@^7.8.1: version "7.8.1" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== @@ -20049,18 +20003,7 @@ ssh-remote-port-forward@^1.0.4: "@types/ssh2" "^0.5.48" ssh2 "^1.4.0" -ssh2@^1.11.0: - version "1.11.0" - resolved "https://registry.yarnpkg.com/ssh2/-/ssh2-1.11.0.tgz#ce60186216971e12f6deb553dcf82322498fe2e4" - integrity sha512-nfg0wZWGSsfUe/IBJkXVll3PEZ//YH2guww+mP88gTpuSU4FtZN7zu9JoeTGOyCNx2dTDtT9fOpWwlzyj4uOOw== - dependencies: - asn1 "^0.2.4" - bcrypt-pbkdf "^1.0.2" - optionalDependencies: - cpu-features "~0.0.4" - nan "^2.16.0" - -ssh2@^1.4.0: +ssh2@^1.11.0, ssh2@^1.4.0: version "1.15.0" resolved "https://registry.yarnpkg.com/ssh2/-/ssh2-1.15.0.tgz#2f998455036a7f89e0df5847efb5421748d9871b" integrity sha512-C0PHgX4h6lBxYx7hcXwu3QWdh4tg6tZZsTfXcdvc5caW/EMxaB4H9dWsl7qk+F7LAW762hp8VbXOX7x4xUYvEw== @@ -20138,9 +20081,9 @@ statuses@2.0.1, statuses@^2.0.0: integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== std-env@^3.3.1: - version "3.4.3" - resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.4.3.tgz#326f11db518db751c83fd58574f449b7c3060910" - integrity sha512-f9aPhy8fYBuMN+sNfakZV18U39PbalgjXG3lLB9WkaYTxijru61wb57V9wxxNthXM5Sd88ETBWi29qLAsHO52Q== + version "3.7.0" + resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.7.0.tgz#c9f7386ced6ecf13360b6c6c55b8aaa4ef7481d2" + integrity sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg== step@0.0.x: version "0.0.6" @@ -20603,9 +20546,9 @@ svelte-spa-router@^4.0.1: regexparam "2.0.2" svelte@^4.2.10: - version "4.2.12" - resolved "https://registry.yarnpkg.com/svelte/-/svelte-4.2.12.tgz#13d98d2274d24d3ad216c8fdc801511171c70bb1" - integrity sha512-d8+wsh5TfPwqVzbm4/HCXC783/KPHV60NvwitJnyTA5lWn1elhXMNWhXGCJ7PwPa8qFUnyJNIyuIRt2mT0WMug== + version "4.2.10" + resolved "https://registry.yarnpkg.com/svelte/-/svelte-4.2.10.tgz#3bef8d79ca75eb53cc4d03f9fac1546e60393f77" + integrity sha512-Ep06yCaCdgG1Mafb/Rx8sJ1QS3RW2I2BxGp2Ui9LBHSZ2/tO/aGLc5WqPjgiAP6KAnLJGaIr/zzwQlOo1b8MxA== dependencies: "@ampproject/remapping" "^2.2.1" "@jridgewell/sourcemap-codec" "^1.4.15" @@ -21015,9 +20958,9 @@ tiny-queue@^0.2.0: integrity sha512-EijGsv7kzd9I9g0ByCl6h42BWNGUZrlCSejfrb3AKeHC33SGbASu1VDf5O3rRiiUOhAC9CHdZxFPbZu0HmR70A== tinybench@^2.3.1: - version "2.5.1" - resolved "https://registry.yarnpkg.com/tinybench/-/tinybench-2.5.1.tgz#3408f6552125e53a5a48adee31261686fd71587e" - integrity sha512-65NKvSuAVDP/n4CqH+a9w2kTlLReS9vhsAP06MWx+/89nMinJyB2icyl58RIcqCmIggpojIGeuJGhjU1aGMBSg== + version "2.6.0" + resolved "https://registry.yarnpkg.com/tinybench/-/tinybench-2.6.0.tgz#1423284ee22de07c91b3752c048d2764714b341b" + integrity sha512-N8hW3PG/3aOoZAN5V/NSAEDz0ZixDSSt5b/a05iqtpgfLWMSVuCo7w0k2vVvEjdrIoeGqZzweX2WlyioNIHchA== tinycolor2@^1.6.0: version "1.6.0" @@ -21457,6 +21400,11 @@ ufo@^1.3.0: resolved "https://registry.yarnpkg.com/ufo/-/ufo-1.3.1.tgz#e085842f4627c41d4c1b60ebea1f75cdab4ce86b" integrity sha512-uY/99gMLIOlJPwATcMVYfqDSxUR9//AUcgZMzwfSTJPDKzA1S8mX4VLqa+fiAtveraQUBCz4FFcwVZBGbwBXIw== +ufo@^1.3.2: + version "1.4.0" + resolved "https://registry.yarnpkg.com/ufo/-/ufo-1.4.0.tgz#39845b31be81b4f319ab1d99fd20c56cac528d32" + integrity sha512-Hhy+BhRBleFjpJ2vchUNN40qgkh0366FWJGqVLYBHev0vpHTrXSA0ryT+74UiW6KWsldNurQMKGqCm1M2zBciQ== + uglify-js@^3.1.4, uglify-js@^3.7.7: version "3.17.4" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.17.4.tgz#61678cf5fa3f5b7eb789bb345df29afb8257c22c" @@ -21501,9 +21449,9 @@ underscore@~1.13.2: integrity sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A== undici-types@^6.0.1: - version "6.6.2" - resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.6.2.tgz#48c65d30bfcae492c3c89b1d147fed9d43a16b79" - integrity sha512-acoBcoBobgsg3YUEO/Oht8JJCuFYpzWLFKbqEbcEZcXdkQrTzkF/yWj9JoLaFDa6ArI31dFEmNZkCjQZ7mlf7w== + version "6.0.1" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.0.1.tgz#62e2af9fcd3ce359634175658de39df8d0f37197" + integrity sha512-i9dNdkCziyqGpFxhatR9LITcInbFWh+ExlWkrZQpZHje8FfCcJKgps0IbmMd7D1o8c8syG4pIOV+aKIoC9JEyA== undici-types@~5.26.4: version "5.26.5" @@ -21516,9 +21464,9 @@ undici@^4.14.1: integrity sha512-tkZSECUYi+/T1i4u+4+lwZmQgLXd4BLGlrc7KZPcLIW7Jpq99+Xpc30ONv7nS6F5UNOxp/HBZSSL9MafUrvJbw== undici@^6.0.1: - version "6.6.2" - resolved "https://registry.yarnpkg.com/undici/-/undici-6.6.2.tgz#8dce5ae54e8a3bc7140c2b2a0972b5fde9a88efb" - integrity sha512-vSqvUE5skSxQJ5sztTZ/CdeJb1Wq0Hf44hlYMciqHghvz+K88U0l7D6u1VsndoFgskDcnU+nG3gYmMzJVzd9Qg== + version "6.0.1" + resolved "https://registry.yarnpkg.com/undici/-/undici-6.0.1.tgz#385572addca36d1c2b280629cb694b726170027e" + integrity sha512-eZFYQLeS9BiXpsU0cuFhCwfeda2MnC48EVmmOz/eCjsTgmyTdaHdVsPSC/kwC2GtW2e0uH0HIPbadf3/bRWSxw== dependencies: "@fastify/busboy" "^2.0.0" @@ -21842,7 +21790,18 @@ vite-plugin-static-copy@^0.17.0: fs-extra "^11.1.0" picocolors "^1.0.0" -"vite@^3.0.0 || ^4.0.0", vite@^4.5.0: +"vite@^3.0.0 || ^4.0.0": + version "4.5.2" + resolved "https://registry.yarnpkg.com/vite/-/vite-4.5.2.tgz#d6ea8610e099851dad8c7371599969e0f8b97e82" + integrity sha512-tBCZBNSBbHQkaGyhGCDUGqeo2ph8Fstyp6FMSvTtsXeZSPpSMGlviAOav2hxVTqFcx8Hj/twtWKsMJXNY0xI8w== + dependencies: + esbuild "^0.18.10" + postcss "^8.4.27" + rollup "^3.27.1" + optionalDependencies: + fsevents "~2.3.2" + +vite@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/vite/-/vite-4.5.0.tgz#ec406295b4167ac3bc23e26f9c8ff559287cff26" integrity sha512-ulr8rNLA6rkyFAlVWw2q5YJ91v098AFQ2R0PRFwPzREXOUJQPtFUG0t+/ZikhaOCDqFoDhN6/v8Sq0o4araFAw== @@ -22476,12 +22435,7 @@ yaml@^1.10.2: resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== -yaml@^2.1.1: - version "2.3.2" - resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.2.tgz#f522db4313c671a0ca963a75670f1c12ea909144" - integrity sha512-N/lyzTPaJasoDmfV7YTrYCI0G/3ivm/9wdG0aHuheKowWQwGTsK0Eoiw6utmzAnI6pkJa0DUVygvp3spqqEKXg== - -yaml@^2.2.2: +yaml@^2.1.1, yaml@^2.2.2: version "2.3.4" resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.4.tgz#53fc1d514be80aabf386dc6001eb29bf3b7523b2" integrity sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA== From 5ec4e90c6d3c2e5b75623a31a834410da82e03b6 Mon Sep 17 00:00:00 2001 From: Gerard Burns Date: Thu, 29 Feb 2024 12:51:56 +0000 Subject: [PATCH 46/54] Revert "App Preview Skeleton (#12898)" (#13159) This reverts commit 9a9b74579529cd061735c371b73ec862ea1856cc. --- .../[screenId]/_components/AppPreview.svelte | 43 +-- .../builder/portal/apps/[appId]/index.svelte | 51 +--- .../client/src/components/ClientApp.svelte | 256 ++++++++---------- .../client/src/components/FreeFooter.svelte | 1 - .../src/components/ClientAppSkeleton.svelte | 244 ----------------- .../frontend-core/src/components/index.js | 1 - .../frontend-core/src/themes/midnight.css | 3 - packages/frontend-core/src/themes/nord.css | 3 - packages/server/package.json | 1 - .../src/api/controllers/static/index.ts | 79 +----- .../static/templates/BudibaseApp.svelte | 7 - .../api/controllers/static/templates/app.hbs | 8 +- packages/server/src/api/routes/static.ts | 2 +- scripts/build.js | 14 +- 14 files changed, 151 insertions(+), 562 deletions(-) delete mode 100644 packages/frontend-core/src/components/ClientAppSkeleton.svelte diff --git a/packages/builder/src/pages/builder/app/[application]/design/[screenId]/_components/AppPreview.svelte b/packages/builder/src/pages/builder/app/[application]/design/[screenId]/_components/AppPreview.svelte index 4bd62c0049..fa126bbc99 100644 --- a/packages/builder/src/pages/builder/app/[application]/design/[screenId]/_components/AppPreview.svelte +++ b/packages/builder/src/pages/builder/app/[application]/design/[screenId]/_components/AppPreview.svelte @@ -12,11 +12,17 @@ hoverStore, } from "stores/builder" import ConfirmDialog from "components/common/ConfirmDialog.svelte" - import { Layout, Heading, Body, Icon, notifications } from "@budibase/bbui" + import { + ProgressCircle, + Layout, + Heading, + Body, + Icon, + notifications, + } from "@budibase/bbui" import ErrorSVG from "@budibase/frontend-core/assets/error.svg?raw" import { findComponent, findComponentPath } from "helpers/components" import { isActive, goto } from "@roxi/routify" - import { ClientAppSkeleton } from "@budibase/frontend-core" let iframe let layout @@ -234,16 +240,8 @@
{#if loading} -
- +
+
{:else if error}
@@ -260,6 +258,8 @@ bind:this={iframe} src="/app/preview" class:hidden={loading || error} + class:tablet={$previewStore.previewDevice === "tablet"} + class:mobile={$previewStore.previewDevice === "mobile"} />
diff --git a/packages/frontend-core/src/components/index.js b/packages/frontend-core/src/components/index.js index f71420b12b..f724e1e4d9 100644 --- a/packages/frontend-core/src/components/index.js +++ b/packages/frontend-core/src/components/index.js @@ -5,4 +5,3 @@ export { default as UserAvatar } from "./UserAvatar.svelte" export { default as UserAvatars } from "./UserAvatars.svelte" export { default as Updating } from "./Updating.svelte" export { Grid } from "./grid" -export { default as ClientAppSkeleton } from "./ClientAppSkeleton.svelte" diff --git a/packages/frontend-core/src/themes/midnight.css b/packages/frontend-core/src/themes/midnight.css index cf6a4fbd13..e311452262 100644 --- a/packages/frontend-core/src/themes/midnight.css +++ b/packages/frontend-core/src/themes/midnight.css @@ -17,8 +17,5 @@ --modal-background: var(--spectrum-global-color-gray-50); --drop-shadow: rgba(0, 0, 0, 0.25) !important; --spectrum-global-color-blue-100: rgba(35, 40, 50) !important; - - --spectrum-alias-background-color-secondary: var(--spectrum-global-color-gray-75); - --spectrum-alias-background-color-primary: var(--spectrum-global-color-gray-100); } diff --git a/packages/frontend-core/src/themes/nord.css b/packages/frontend-core/src/themes/nord.css index bc142db0fd..d47dbe8aa8 100644 --- a/packages/frontend-core/src/themes/nord.css +++ b/packages/frontend-core/src/themes/nord.css @@ -50,7 +50,4 @@ --modal-background: var(--spectrum-global-color-gray-50); --drop-shadow: rgba(0, 0, 0, 0.15) !important; --spectrum-global-color-blue-100: rgb(56, 65, 84) !important; - - --spectrum-alias-background-color-secondary: var(--spectrum-global-color-gray-75); - --spectrum-alias-background-color-primary: var(--spectrum-global-color-gray-100); } diff --git a/packages/server/package.json b/packages/server/package.json index 4f1a9fb3cc..45980a4be6 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -52,7 +52,6 @@ "@budibase/pro": "0.0.0", "@budibase/shared-core": "0.0.0", "@budibase/string-templates": "0.0.0", - "@budibase/frontend-core": "0.0.0", "@budibase/types": "0.0.0", "@bull-board/api": "5.10.2", "@bull-board/koa": "5.10.2", diff --git a/packages/server/src/api/controllers/static/index.ts b/packages/server/src/api/controllers/static/index.ts index 367934445a..5a3803e6d5 100644 --- a/packages/server/src/api/controllers/static/index.ts +++ b/packages/server/src/api/controllers/static/index.ts @@ -1,5 +1,7 @@ import { InvalidFileExtensions } from "@budibase/shared-core" + import AppComponent from "./templates/BudibaseApp.svelte" + import { join } from "../../../utilities/centralPath" import * as uuid from "uuid" import { ObjectStoreBuckets } from "../../../constants" @@ -22,13 +24,7 @@ import AWS from "aws-sdk" import fs from "fs" import sdk from "../../../sdk" import * as pro from "@budibase/pro" -import { - UserCtx, - App, - Ctx, - ProcessAttachmentResponse, - Feature, -} from "@budibase/types" +import { App, Ctx, ProcessAttachmentResponse } from "@budibase/types" import { getAppMigrationVersion, getLatestMigrationId, @@ -36,61 +32,6 @@ import { import send from "koa-send" -const getThemeVariables = (theme: string) => { - if (theme === "spectrum--lightest") { - return ` - --spectrum-global-color-gray-50: rgb(255, 255, 255); - --spectrum-global-color-gray-200: rgb(244, 244, 244); - --spectrum-global-color-gray-300: rgb(234, 234, 234); - --spectrum-alias-background-color-primary: var(--spectrum-global-color-gray-50); - ` - } - if (theme === "spectrum--light") { - return ` - --spectrum-global-color-gray-50: rgb(255, 255, 255); - --spectrum-global-color-gray-200: rgb(234, 234, 234); - --spectrum-global-color-gray-300: rgb(225, 225, 225); - --spectrum-alias-background-color-primary: var(--spectrum-global-color-gray-50); - - ` - } - if (theme === "spectrum--dark") { - return ` - --spectrum-global-color-gray-100: rgb(50, 50, 50); - --spectrum-global-color-gray-200: rgb(62, 62, 62); - --spectrum-global-color-gray-300: rgb(74, 74, 74); - --spectrum-alias-background-color-primary: var(--spectrum-global-color-gray-100); - ` - } - if (theme === "spectrum--darkest") { - return ` - --spectrum-global-color-gray-100: rgb(30, 30, 30); - --spectrum-global-color-gray-200: rgb(44, 44, 44); - --spectrum-global-color-gray-300: rgb(57, 57, 57); - --spectrum-alias-background-color-primary: var(--spectrum-global-color-gray-100); - ` - } - if (theme === "spectrum--nord") { - return ` - --spectrum-global-color-gray-100: #3b4252; - - --spectrum-global-color-gray-200: #424a5c; - --spectrum-global-color-gray-300: #4c566a; - --spectrum-alias-background-color-primary: var(--spectrum-global-color-gray-100); - ` - } - if (theme === "spectrum--midnight") { - return ` - --hue: 220; - --sat: 10%; - --spectrum-global-color-gray-100: hsl(var(--hue), var(--sat), 17%); - --spectrum-global-color-gray-200: hsl(var(--hue), var(--sat), 20%); - --spectrum-global-color-gray-300: hsl(var(--hue), var(--sat), 24%); - --spectrum-alias-background-color-primary: var(--spectrum-global-color-gray-100); - ` - } -} - export const toggleBetaUiFeature = async function (ctx: Ctx) { const cookieName = `beta:${ctx.params.feature}` @@ -205,7 +146,7 @@ const requiresMigration = async (ctx: Ctx) => { return requiresMigrations } -export const serveApp = async function (ctx: UserCtx) { +export const serveApp = async function (ctx: Ctx) { const needMigrations = await requiresMigration(ctx) const bbHeaderEmbed = @@ -226,19 +167,9 @@ export const serveApp = async function (ctx: UserCtx) { const appInfo = await db.get(DocumentType.APP_METADATA) let appId = context.getAppId() - const hideDevTools = !!ctx.params.appUrl - const sideNav = appInfo.navigation.navigation === "Left" - const hideFooter = - ctx?.user?.license?.features?.includes(Feature.BRANDING) || false - const themeVariables = getThemeVariables(appInfo?.theme) - if (!env.isJest()) { const plugins = objectStore.enrichPluginURLs(appInfo.usedPlugins) - const { head, html, css } = AppComponent.render({ - hideDevTools, - sideNav, - hideFooter, metaImage: branding?.metaImageUrl || "https://res.cloudinary.com/daog6scxm/image/upload/v1698759482/meta-images/plain-branded-meta-image-coral_ocxmgu.png", @@ -263,7 +194,7 @@ export const serveApp = async function (ctx: UserCtx) { ctx.body = await processString(appHbs, { head, body: html, - css: `:root{${themeVariables}} ${css.code}`, + style: css.code, appId, embedded: bbHeaderEmbed, }) diff --git a/packages/server/src/api/controllers/static/templates/BudibaseApp.svelte b/packages/server/src/api/controllers/static/templates/BudibaseApp.svelte index 63b293b4ca..7819368fc0 100644 --- a/packages/server/src/api/controllers/static/templates/BudibaseApp.svelte +++ b/packages/server/src/api/controllers/static/templates/BudibaseApp.svelte @@ -1,6 +1,4 @@ @@ -102,7 +96,6 @@ -
{#if clientLibPath}

There was an error loading your app

diff --git a/packages/server/src/api/controllers/static/templates/app.hbs b/packages/server/src/api/controllers/static/templates/app.hbs index b01b723c3e..8c445158a0 100644 --- a/packages/server/src/api/controllers/static/templates/app.hbs +++ b/packages/server/src/api/controllers/static/templates/app.hbs @@ -1,12 +1,8 @@ - + {{{head}}} - + -a11y-click-events-have-key-events
{displayValue} From 50bbbb2e0621422cc8b59d2fe77d633257ef3f87 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 29 Feb 2024 16:28:00 +0000 Subject: [PATCH 48/54] Updating datasource controller types, this branched out a little bit to removing as many anys as possible, and allowing RowValue to correctly be returned from the allDocs function of the DB. --- .../backend-core/src/db/couch/DatabaseImpl.ts | 3 +- .../backend-core/src/db/instrumentation.ts | 4 +- .../server/src/api/controllers/datasource.ts | 40 +++++++++++++------ .../server/src/api/controllers/integration.ts | 5 ++- .../db/defaultData/datasource_bb_default.ts | 8 ++-- packages/server/src/db/utils.ts | 14 ++++--- packages/server/src/integrations/index.ts | 2 + .../server/src/jsRunner/vm/isolated-vm.ts | 9 ++++- .../server/src/sdk/app/applications/import.ts | 4 +- .../src/sdk/app/datasources/datasources.ts | 2 +- packages/types/src/api/web/app/datasource.ts | 4 +- .../types/src/documents/app/datasource.ts | 17 ++++---- packages/types/src/documents/pouch.ts | 4 +- packages/types/src/sdk/datasources.ts | 1 + packages/types/src/sdk/db.ts | 10 ++++- 15 files changed, 84 insertions(+), 43 deletions(-) diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 0e2b4173b0..7e7c997cbe 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -11,6 +11,7 @@ import { Document, isDocument, RowResponse, + RowValue, } from "@budibase/types" import { getCouchInfo } from "./connections" import { directCouchUrlCall } from "./utils" @@ -221,7 +222,7 @@ export class DatabaseImpl implements Database { }) } - async allDocs( + async allDocs( params: DatabaseQueryOpts ): Promise> { return this.performCall(db => { diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index aa2ac424ae..03010d4c92 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -1,5 +1,4 @@ import { - DocumentScope, DocumentDestroyResponse, DocumentInsertResponse, DocumentBulkResponse, @@ -13,6 +12,7 @@ import { DatabasePutOpts, DatabaseQueryOpts, Document, + RowValue, } from "@budibase/types" import tracer from "dd-trace" import { Writable } from "stream" @@ -79,7 +79,7 @@ export class DDInstrumentedDatabase implements Database { }) } - allDocs( + allDocs( params: DatabaseQueryOpts ): Promise> { return tracer.trace("db.allDocs", span => { diff --git a/packages/server/src/api/controllers/datasource.ts b/packages/server/src/api/controllers/datasource.ts index d70c13f800..17217ea018 100644 --- a/packages/server/src/api/controllers/datasource.ts +++ b/packages/server/src/api/controllers/datasource.ts @@ -15,10 +15,14 @@ import { FieldType, RelationshipFieldMetadata, SourceName, + UpdateDatasourceRequest, UpdateDatasourceResponse, UserCtx, VerifyDatasourceRequest, VerifyDatasourceResponse, + Table, + RowValue, + DynamicVariable, } from "@budibase/types" import sdk from "../../sdk" import { builderSocket } from "../../websockets" @@ -90,8 +94,10 @@ async function invalidateVariables( existingDatasource: Datasource, updatedDatasource: Datasource ) { - const existingVariables: any = existingDatasource.config?.dynamicVariables - const updatedVariables: any = updatedDatasource.config?.dynamicVariables + const existingVariables: DynamicVariable[] = + existingDatasource.config?.dynamicVariables + const updatedVariables: DynamicVariable[] = + updatedDatasource.config?.dynamicVariables const toInvalidate = [] if (!existingVariables) { @@ -103,9 +109,9 @@ async function invalidateVariables( toInvalidate.push(...existingVariables) } else { // invaldate changed / removed - existingVariables.forEach((existing: any) => { + existingVariables.forEach(existing => { const unchanged = updatedVariables.find( - (updated: any) => + updated => existing.name === updated.name && existing.queryId === updated.queryId && existing.value === updated.value @@ -118,24 +124,32 @@ async function invalidateVariables( await invalidateDynamicVariables(toInvalidate) } -export async function update(ctx: UserCtx) { +export async function update( + ctx: UserCtx +) { const db = context.getAppDB() const datasourceId = ctx.params.datasourceId const baseDatasource = await sdk.datasources.get(datasourceId) - const auth = baseDatasource.config?.auth await invalidateVariables(baseDatasource, ctx.request.body) const isBudibaseSource = baseDatasource.type === dbCore.BUDIBASE_DATASOURCE_TYPE - const dataSourceBody = isBudibaseSource - ? { name: ctx.request.body?.name } + const dataSourceBody: Datasource = isBudibaseSource + ? { + name: ctx.request.body?.name, + type: dbCore.BUDIBASE_DATASOURCE_TYPE, + source: SourceName.BUDIBASE, + } : ctx.request.body let datasource: Datasource = { ...baseDatasource, ...sdk.datasources.mergeConfigs(dataSourceBody, baseDatasource), } + + // this block is specific to GSheets, if no auth set, set it back + const auth = baseDatasource.config?.auth if (auth && !ctx.request.body.auth) { // don't strip auth config from DB datasource.config!.auth = auth @@ -204,7 +218,7 @@ async function destroyInternalTablesBySourceId(datasourceId: string) { const db = context.getAppDB() // Get all internal tables - const internalTables = await db.allDocs( + const internalTables = await db.allDocs
( getTableParams(null, { include_docs: true, }) @@ -212,8 +226,8 @@ async function destroyInternalTablesBySourceId(datasourceId: string) { // Filter by datasource and return the docs. const datasourceTableDocs = internalTables.rows.reduce( - (acc: any, table: any) => { - if (table.doc.sourceId == datasourceId) { + (acc: Table[], table) => { + if (table.doc?.sourceId == datasourceId) { acc.push(table.doc) } return acc @@ -254,9 +268,9 @@ export async function destroy(ctx: UserCtx) { if (datasource.type === dbCore.BUDIBASE_DATASOURCE_TYPE) { await destroyInternalTablesBySourceId(datasourceId) } else { - const queries = await db.allDocs(getQueryParams(datasourceId)) + const queries = await db.allDocs(getQueryParams(datasourceId)) await db.bulkDocs( - queries.rows.map((row: any) => ({ + queries.rows.map(row => ({ _id: row.id, _rev: row.value.rev, _deleted: true, diff --git a/packages/server/src/api/controllers/integration.ts b/packages/server/src/api/controllers/integration.ts index 9cfde31e4c..57038f8401 100644 --- a/packages/server/src/api/controllers/integration.ts +++ b/packages/server/src/api/controllers/integration.ts @@ -1,7 +1,10 @@ import { getDefinition, getDefinitions } from "../../integrations" import { SourceName, UserCtx } from "@budibase/types" -const DISABLED_EXTERNAL_INTEGRATIONS = [SourceName.AIRTABLE] +const DISABLED_EXTERNAL_INTEGRATIONS = [ + SourceName.AIRTABLE, + SourceName.BUDIBASE, +] export async function fetch(ctx: UserCtx) { const definitions = await getDefinitions() diff --git a/packages/server/src/db/defaultData/datasource_bb_default.ts b/packages/server/src/db/defaultData/datasource_bb_default.ts index ac540cd8fc..03aed3c118 100644 --- a/packages/server/src/db/defaultData/datasource_bb_default.ts +++ b/packages/server/src/db/defaultData/datasource_bb_default.ts @@ -1,8 +1,8 @@ import { DEFAULT_BB_DATASOURCE_ID, - DEFAULT_INVENTORY_TABLE_ID, DEFAULT_EMPLOYEE_TABLE_ID, DEFAULT_EXPENSES_TABLE_ID, + DEFAULT_INVENTORY_TABLE_ID, DEFAULT_JOBS_TABLE_ID, } from "../../constants" import { importToRows } from "../../api/controllers/table/utils" @@ -15,19 +15,21 @@ import { expensesImport } from "./expensesImport" import { db as dbCore } from "@budibase/backend-core" import { AutoFieldSubType, + Datasource, FieldType, RelationshipType, Row, + SourceName, Table, TableSchema, TableSourceType, } from "@budibase/types" -const defaultDatasource = { +const defaultDatasource: Datasource = { _id: DEFAULT_BB_DATASOURCE_ID, type: dbCore.BUDIBASE_DATASOURCE_TYPE, name: "Sample Data", - source: "BUDIBASE", + source: SourceName.BUDIBASE, config: {}, } diff --git a/packages/server/src/db/utils.ts b/packages/server/src/db/utils.ts index 35d9b69e96..983cbf423c 100644 --- a/packages/server/src/db/utils.ts +++ b/packages/server/src/db/utils.ts @@ -1,13 +1,15 @@ import newid from "./newid" import { db as dbCore } from "@budibase/backend-core" import { - FieldType, + DatabaseQueryOpts, + Datasource, DocumentType, FieldSchema, - RelationshipFieldMetadata, - VirtualDocumentType, + FieldType, INTERNAL_TABLE_SOURCE_ID, - DatabaseQueryOpts, + RelationshipFieldMetadata, + SourceName, + VirtualDocumentType, } from "@budibase/types" export { DocumentType, VirtualDocumentType } from "@budibase/types" @@ -20,11 +22,11 @@ export const enum AppStatus { DEPLOYED = "published", } -export const BudibaseInternalDB = { +export const BudibaseInternalDB: Datasource = { _id: INTERNAL_TABLE_SOURCE_ID, type: dbCore.BUDIBASE_DATASOURCE_TYPE, name: "Budibase DB", - source: "BUDIBASE", + source: SourceName.BUDIBASE, config: {}, } diff --git a/packages/server/src/integrations/index.ts b/packages/server/src/integrations/index.ts index 49761bac85..ee2bb23f23 100644 --- a/packages/server/src/integrations/index.ts +++ b/packages/server/src/integrations/index.ts @@ -37,6 +37,7 @@ const DEFINITIONS: Record = { [SourceName.REDIS]: redis.schema, [SourceName.SNOWFLAKE]: snowflake.schema, [SourceName.ORACLE]: undefined, + [SourceName.BUDIBASE]: undefined, } const INTEGRATIONS: Record = { @@ -56,6 +57,7 @@ const INTEGRATIONS: Record = { [SourceName.REDIS]: redis.integration, [SourceName.SNOWFLAKE]: snowflake.integration, [SourceName.ORACLE]: undefined, + [SourceName.BUDIBASE]: undefined, } // optionally add oracle integration if the oracle binary can be installed diff --git a/packages/server/src/jsRunner/vm/isolated-vm.ts b/packages/server/src/jsRunner/vm/isolated-vm.ts index b0692f0fd1..928d1b8afa 100644 --- a/packages/server/src/jsRunner/vm/isolated-vm.ts +++ b/packages/server/src/jsRunner/vm/isolated-vm.ts @@ -99,7 +99,14 @@ export class IsolatedVM implements VM { } withContext(context: Record, executeWithContext: () => T) { - this.addToContext(context) + this.addToContext({ + ...context, + Snippets: { + specialFunction: function (special: string) { + return "hello world! " + special + }, + }, + }) try { return executeWithContext() diff --git a/packages/server/src/sdk/app/applications/import.ts b/packages/server/src/sdk/app/applications/import.ts index c3415bdb36..f712548fcb 100644 --- a/packages/server/src/sdk/app/applications/import.ts +++ b/packages/server/src/sdk/app/applications/import.ts @@ -85,7 +85,9 @@ async function getImportableDocuments(db: Database) { const docPromises = [] for (let docType of DocumentTypesToImport) { docPromises.push( - db.allDocs(dbCore.getDocParams(docType, null, { include_docs: true })) + db.allDocs( + dbCore.getDocParams(docType, null, { include_docs: true }) + ) ) } // map the responses to the document itself diff --git a/packages/server/src/sdk/app/datasources/datasources.ts b/packages/server/src/sdk/app/datasources/datasources.ts index c71c3f1b31..fd0d291d91 100644 --- a/packages/server/src/sdk/app/datasources/datasources.ts +++ b/packages/server/src/sdk/app/datasources/datasources.ts @@ -229,7 +229,7 @@ export async function removeSecretSingle(datasource: Datasource) { } export function mergeConfigs(update: Datasource, old: Datasource) { - if (!update.config) { + if (!update.config || !old.config) { return update } // specific to REST datasources, fix the auth configs again if required diff --git a/packages/types/src/api/web/app/datasource.ts b/packages/types/src/api/web/app/datasource.ts index 4a3d07a952..f931665917 100644 --- a/packages/types/src/api/web/app/datasource.ts +++ b/packages/types/src/api/web/app/datasource.ts @@ -32,9 +32,7 @@ export interface FetchDatasourceInfoResponse { tableNames: string[] } -export interface UpdateDatasourceRequest extends Datasource { - datasource: Datasource -} +export interface UpdateDatasourceRequest extends Datasource {} export interface BuildSchemaFromSourceRequest { tablesFilter?: string[] diff --git a/packages/types/src/documents/app/datasource.ts b/packages/types/src/documents/app/datasource.ts index 67035a2e72..8976e1cae3 100644 --- a/packages/types/src/documents/app/datasource.ts +++ b/packages/types/src/documents/app/datasource.ts @@ -6,6 +6,9 @@ export interface Datasource extends Document { type: string name?: string source: SourceName + // this is a googlesheets specific property which + // can be found in the GSheets schema - pertains to SSO creds + auth?: { type: string } // the config is defined by the schema config?: Record plus?: boolean @@ -36,6 +39,12 @@ export interface RestAuthConfig { config: RestBasicAuthConfig | RestBearerAuthConfig } +export interface DynamicVariable { + name: string + queryId: string + value: string +} + export interface RestConfig { url: string rejectUnauthorized: boolean @@ -47,11 +56,5 @@ export interface RestConfig { staticVariables: { [key: string]: string } - dynamicVariables: [ - { - name: string - queryId: string - value: string - } - ] + dynamicVariables: DynamicVariable[] } diff --git a/packages/types/src/documents/pouch.ts b/packages/types/src/documents/pouch.ts index 11efc502be..e1de89649c 100644 --- a/packages/types/src/documents/pouch.ts +++ b/packages/types/src/documents/pouch.ts @@ -5,7 +5,7 @@ export interface RowValue { deleted: boolean } -export interface RowResponse { +export interface RowResponse { id: string key: string error: string @@ -13,7 +13,7 @@ export interface RowResponse { doc?: T } -export interface AllDocsResponse { +export interface AllDocsResponse { offset: number total_rows: number rows: RowResponse[] diff --git a/packages/types/src/sdk/datasources.ts b/packages/types/src/sdk/datasources.ts index 7a335eb3b9..9c0f3a4165 100644 --- a/packages/types/src/sdk/datasources.ts +++ b/packages/types/src/sdk/datasources.ts @@ -56,6 +56,7 @@ export enum SourceName { FIRESTORE = "FIRESTORE", REDIS = "REDIS", SNOWFLAKE = "SNOWFLAKE", + BUDIBASE = "BUDIBASE", } export enum IncludeRelationship { diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index 9e44a4827f..c4e4a4f02f 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -1,5 +1,11 @@ import type Nano from "@budibase/nano" -import { AllDocsResponse, AnyDocument, Document, ViewTemplateOpts } from "../" +import { + AllDocsResponse, + AnyDocument, + Document, + RowValue, + ViewTemplateOpts, +} from "../" import { Writable } from "stream" export enum SearchIndex { @@ -135,7 +141,7 @@ export interface Database { opts?: DatabasePutOpts ): Promise bulkDocs(documents: AnyDocument[]): Promise - allDocs( + allDocs( params: DatabaseQueryOpts ): Promise> query( From 92ac417fdf8beb167fa4e29cc38630df48be415f Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 29 Feb 2024 16:31:50 +0000 Subject: [PATCH 49/54] Removing accidentally commited stuff. --- packages/server/src/jsRunner/vm/isolated-vm.ts | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/packages/server/src/jsRunner/vm/isolated-vm.ts b/packages/server/src/jsRunner/vm/isolated-vm.ts index 928d1b8afa..b0692f0fd1 100644 --- a/packages/server/src/jsRunner/vm/isolated-vm.ts +++ b/packages/server/src/jsRunner/vm/isolated-vm.ts @@ -99,14 +99,7 @@ export class IsolatedVM implements VM { } withContext(context: Record, executeWithContext: () => T) { - this.addToContext({ - ...context, - Snippets: { - specialFunction: function (special: string) { - return "hello world! " + special - }, - }, - }) + this.addToContext(context) try { return executeWithContext() From b4906b0db3c26bbc35f32afa603deed77a5b5784 Mon Sep 17 00:00:00 2001 From: melohagan <101575380+melohagan@users.noreply.github.com> Date: Thu, 29 Feb 2024 16:45:40 +0000 Subject: [PATCH 50/54] Fetch attachment URL in public API retrieve row (#13167) --- packages/server/src/api/controllers/row/internal.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/server/src/api/controllers/row/internal.ts b/packages/server/src/api/controllers/row/internal.ts index 3ee08fff2e..cc903bd74a 100644 --- a/packages/server/src/api/controllers/row/internal.ts +++ b/packages/server/src/api/controllers/row/internal.ts @@ -189,11 +189,12 @@ export async function fetchEnrichedRow(ctx: UserCtx) { const tableId = utils.getTableId(ctx) const rowId = ctx.params.rowId as string // need table to work out where links go in row, as well as the link docs - const [table, row, links] = await Promise.all([ + const [table, links] = await Promise.all([ sdk.tables.getTable(tableId), - utils.findRow(ctx, tableId, rowId), linkRows.getLinkDocuments({ tableId, rowId, fieldName }), ]) + let row = await utils.findRow(ctx, tableId, rowId) + row = await outputProcessing(table, row) const linkVals = links as LinkDocumentValue[] // look up the actual rows based on the ids From a91ecb8c1443c9d2d91d4c0e97418bf73de281cd Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 29 Feb 2024 17:03:04 +0000 Subject: [PATCH 51/54] Addressing PR comments. --- packages/server/src/api/controllers/datasource.ts | 4 ++-- packages/types/src/documents/pouch.ts | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/server/src/api/controllers/datasource.ts b/packages/server/src/api/controllers/datasource.ts index 17217ea018..0f17c5a2f5 100644 --- a/packages/server/src/api/controllers/datasource.ts +++ b/packages/server/src/api/controllers/datasource.ts @@ -95,9 +95,9 @@ async function invalidateVariables( updatedDatasource: Datasource ) { const existingVariables: DynamicVariable[] = - existingDatasource.config?.dynamicVariables + existingDatasource.config?.dynamicVariables || [] const updatedVariables: DynamicVariable[] = - updatedDatasource.config?.dynamicVariables + updatedDatasource.config?.dynamicVariables || [] const toInvalidate = [] if (!existingVariables) { diff --git a/packages/types/src/documents/pouch.ts b/packages/types/src/documents/pouch.ts index e1de89649c..6ff851a515 100644 --- a/packages/types/src/documents/pouch.ts +++ b/packages/types/src/documents/pouch.ts @@ -9,7 +9,7 @@ export interface RowResponse { id: string key: string error: string - value: T | RowValue + value: T doc?: T } From e79e7c7cec603cfe6f7978de2bf6a5c0eb07002b Mon Sep 17 00:00:00 2001 From: Budibase Staging Release Bot <> Date: Thu, 29 Feb 2024 18:11:02 +0000 Subject: [PATCH 52/54] Bump version to 2.20.14 --- lerna.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lerna.json b/lerna.json index 1b559f217d..4807a80646 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.20.13", + "version": "2.20.14", "npmClient": "yarn", "packages": [ "packages/*", From 5b09a130cc674c2802a6a367760d20c5ff7a9474 Mon Sep 17 00:00:00 2001 From: melohagan <101575380+melohagan@users.noreply.github.com> Date: Thu, 29 Feb 2024 18:33:57 +0000 Subject: [PATCH 53/54] Make use of 405 when appropriate (#13168) --- packages/server/src/startup.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/server/src/startup.ts b/packages/server/src/startup.ts index f9b5974eb2..abe931f503 100644 --- a/packages/server/src/startup.ts +++ b/packages/server/src/startup.ts @@ -38,6 +38,7 @@ async function initRoutes(app: Koa) { // api routes app.use(api.router.routes()) + app.use(api.router.allowedMethods()) } async function initPro() { From 873a8396e6c504671e68efff1599e6d522365adc Mon Sep 17 00:00:00 2001 From: cyd5538 Date: Fri, 1 Mar 2024 03:55:21 +0900 Subject: [PATCH 54/54] docs: README Korean version added (#13144) Co-authored-by: melohagan <101575380+melohagan@users.noreply.github.com> --- i18n/README.kr.md | 221 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 221 insertions(+) create mode 100644 i18n/README.kr.md diff --git a/i18n/README.kr.md b/i18n/README.kr.md new file mode 100644 index 0000000000..09fc83569b --- /dev/null +++ b/i18n/README.kr.md @@ -0,0 +1,221 @@ +

+ + Budibase + +

+

+ Budibase +

+

+ 자체 인프라에서 몇 분 만에 맞춤형 비즈니스 도구를 구축하세요. +

+

+ Budibase는 개발자와 IT 전문가가 몇 분 만에 맞춤형 애플리케이션을 구축하고 자동화할 수 있는 오픈 소스 로우코드 플랫폼입니다. +

+ +

+ 🤖 🎨 🚀 +

+ +

+ Budibase design ui +

+ +

+ + GitHub all releases + + + GitHub release (latest by date) + + + Follow @budibase + + Code of conduct + + + +

+ +

+ 소개 + · + 문서 + · + 기능 요청 + · + 버그 보고 + · + 지원: 토론 +

+ +

+## ✨ 특징 + +### "실제" 소프트웨어를 구축할 수 있습니다. +Budibase를 사용하면 고성능 단일 페이지 애플리케이션을 구축할 수 있습니다. 또한 반응형 디자인으로 제작하여 사용자에게 멋진 경험을 제공할 수 있습니다. +

+ +### 오픈 소스 및 확장성 +Budibase는 오픈소스이며, GPL v3 라이선스에 따라 공개되어 있습니다. 이는 Budibase가 항상 당신 곁에 있다는 안도감을 줄 것입니다. 그리고 우리는 개발자 친화적인 환경을 제공하고 있기 때문에, 당신은 원하는 만큼 소스 코드를 포크하여 수정하거나 Budibase에 직접 기여할 수 있습니다. +

+ +### 기존 데이터 또는 처음부터 시작 +Budibase를 사용하면 다음과 같은 여러 소스에서 데이터를 가져올 수 있습니다: MondoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB 또는 REST API. + +또는 원하는 경우 외부 도구 없이도 Budibase를 사용하여 처음부터 시작하여 자체 애플리케이션을 구축할 수 있습니다.[데이터 소스 제안](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas). + +

+ Budibase data +

+

+ +### 강력한 내장 구성 요소로 애플리케이션을 설계하고 구축할 수 있습니다. + +Budibase에는 아름답게 디자인된 강력한 컴포넌트들이 제공되며, 이를 사용하여 UI를 쉽게 구축할 수 있습니다. 또한, CSS를 통한 스타일링 옵션도 풍부하게 제공되어 보다 창의적인 표현도 가능하다. + [Request new component](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas). + +

+ Budibase design +

+

+ +### 프로세스를 자동화하고, 다른 도구와 연동하고, 웹훅으로 연결하세요! +워크플로우와 수동 프로세스를 자동화하여 시간을 절약하세요. 웹훅 이벤트 연결부터 이메일 자동화까지, Budibase에 수행할 작업을 지시하기만 하면 자동으로 처리됩니다. [새로운 자동화 만들기](https://github.com/Budibase/automations)또는[새로운 자동화를 요청할 수 있습니다](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas). + +

+ Budibase automations +

+

+ +### 선호하는 도구 +Budibase는 사용자의 선호도에 따라 애플리케이션을 구축할 수 있는 다양한 도구를 통합하고 있습니다. + +

+ Budibase integrations +

+

+ +### 관리자의 천국 +Budibase는 어떤 규모의 프로젝트에도 유연하게 대응할 수 있으며, Budibase를 사용하면 개인 또는 조직의 서버에서 자체 호스팅하고 사용자, 온보딩, SMTP, 앱, 그룹, 테마 등을 한꺼번에 관리할 수 있습니다. 또한, 사용자나 그룹에 앱 포털을 제공하고 그룹 관리자에게 사용자 관리를 맡길 수도 있다. +- 프로모션 비디오: https://youtu.be/xoljVpty_Kw + +


+ +## 🏁 시작 + +Docker, Kubernetes 또는 Digital Ocean을 사용하여 자체 인프라에서 Budibase를 호스팅하거나, 걱정 없이 빠르게 애플리케이션을 구축하려는 경우 클라우드에서 Budibase를 사용할 수 있습니다. + +### [Budibase 셀프 호스팅으로 시작하기](https://docs.budibase.com/docs/hosting-methods) + +- [Docker - single ARM compatible image](https://docs.budibase.com/docs/docker) +- [Docker Compose](https://docs.budibase.com/docs/docker-compose) +- [Kubernetes](https://docs.budibase.com/docs/kubernetes-k8s) +- [Digital Ocean](https://docs.budibase.com/docs/digitalocean) +- [Portainer](https://docs.budibase.com/docs/portainer) + + +### [클라우드에서 Budibase 시작하기](https://budibase.com) + +

+ +## 🎓 Budibase 알아보기 + +문서 [documentacion de Budibase](https://docs.budibase.com/docs). +
+ + +

+ +## 💬 커뮤니티 + +질문하고, 다른 사람을 돕고, 다른 Budibase 사용자와 즐거운 대화를 나눌 수 있는 Budibase 커뮤니티에 여러분을 초대합니다. +[깃허브 토론](https://github.com/Budibase/budibase/discussions) +


+ + +## ❗ 행동강령 + +Budibase 는 모든 계층의 사람들을 환영하고 상호 존중하는 환경을 제공하는 데 특별한 주의를 기울이고 있습니다. 저희는 커뮤니티에도 같은 기대를 가지고 있습니다. +[**행동 강령**](https://github.com/Budibase/budibase/blob/HEAD/.github/CODE_OF_CONDUCT.md). +
+ +

+ + +## 🙌 Contribuir en Budibase + +버그 신고부터 코드의 버그 수정에 이르기까지 모든 기여를 감사하고 환영합니다. 새로운 기능을 구현하거나 API를 변경할 계획이 있다면 [여기에 새 메시지](https://github.com/Budibase/budibase/issues), +이렇게 하면 여러분의 노력이 헛되지 않도록 보장할 수 있습니다. + +여기에는 다음을 위해 Budibase 환경을 설정하는 방법에 대한 지침이 나와 있습니다. [여기를 클릭하세요](https://github.com/Budibase/budibase/tree/HEAD/docs/CONTRIBUTING.md). + +### 어디서부터 시작해야 할지 혼란스러우신가요? +이곳은 기여를 시작하기에 최적의 장소입니다! [First time issues project](https://github.com/Budibase/budibase/projects/22). + +### 리포지토리 구성 + +Budibase는 Lerna에서 관리하는 단일 리포지토리입니다. Lerna는 변경 사항이 있을 때마다 이를 동기화하여 Budibase 패키지를 빌드하고 게시합니다. 크게 보면 이러한 패키지가 Budibase를 구성하는 패키지입니다: + +- [packages/builder](https://github.com/Budibase/budibase/tree/HEAD/packages/builder) - budibase builder 클라이언트 측의 svelte 애플리케이션 코드가 포함되어 있습니다. + +- [packages/client](https://github.com/Budibase/budibase/tree/HEAD/packages/client) - budibase builder 클라이언트 측의 svelte 애플리케이션 코드가 포함되어 있습니다. + +- [packages/server](https://github.com/Budibase/budibase/tree/HEAD/packages/server) - Budibase의 서버 부분입니다. 이 Koa 애플리케이션은 빌더에게 Budibase 애플리케이션을 생성하는 데 필요한 것을 제공하는 역할을 합니다. 또한 데이터베이스 및 파일 저장소와 상호 작용할 수 있는 API를 제공합니다. + +자세한 내용은 다음 문서를 참조하세요. [CONTRIBUTING.md](https://github.com/Budibase/budibase/blob/HEAD/docs/CONTRIBUTING.md) + +

+ + +## 📝 라이선스 + +Budibase는 오픈 소스이며, 라이선스는 다음과 같습니다 [GPL v3](https://www.gnu.org/licenses/gpl-3.0.en.html). 클라이언트 및 컴포넌트 라이브러리는 다음과 같이 라이선스가 부여됩니다. [MPL](https://directory.fsf.org/wiki/License:MPL-2.0) - 이렇게 하면 빌드한 애플리케이션에 원하는 대로 라이선스를 부여할 수 있습니다. + +

+ +## ⭐ 스타 수의 역사 + +[![Stargazers over time](https://starchart.cc/Budibase/budibase.svg)](https://starchart.cc/Budibase/budibase) + +빌더 업데이트 중 문제가 발생하는 경우 [여기](https://github.com/Budibase/budibase/blob/HEAD/docs/CONTRIBUTING.md#troubleshooting) 를 참고하여 환경을 정리해 주세요. + +

+ +## Contributors ✨ + +훌륭한 여러분께 감사할 따름입니다. ([emoji key](https://allcontributors.org/docs/en/emoji-key)): + + + + +
+ + + + + + + + + + + + + + + + + + + + +

Martin McKeaveney

💻 📖 ⚠️ 🚇

Michael Drury

📖 💻 ⚠️ 🚇

Andrew Kingston

📖 💻 ⚠️ 🎨

Michael Shanks

📖 💻 ⚠️

Kevin Åberg Kultalahti

📖 💻 ⚠️

Joe

📖 💻 🖋 🎨

Rory Powell

💻 📖 ⚠️

Peter Clement

💻 📖 ⚠️

Conor_Mack

💻 ⚠️

pngwn

💻 ⚠️

HugoLd

💻

victoriasloan

💻

yashank09

💻

SOVLOOKUP

💻

seoulaja

🌍

Maurits Lourens

⚠️ 💻
+ + + + + + +이 프로젝트는 다음 사양을 따릅니다. [all-contributors](https://github.com/all-contributors/all-contributors). +모든 종류의 기여를 환영합니다!