From c7c2cb48e8180a5e9cdd678e504de1cc2456616f Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 23 Jul 2024 14:41:34 +0100 Subject: [PATCH 01/94] wip --- .../src/api/routes/tests/search.spec.ts | 17 ++--- .../src/integrations/tests/utils/index.ts | 6 ++ .../src/integrations/tests/utils/oracle.ts | 62 +++++++++++++++++++ 3 files changed, 77 insertions(+), 8 deletions(-) create mode 100644 packages/server/src/integrations/tests/utils/oracle.ts diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index e774158c23..6e685b13ab 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -40,13 +40,14 @@ import { structures } from "@budibase/backend-core/tests" import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default" describe.each([ - ["in-memory", undefined], - ["lucene", undefined], - ["sqs", undefined], - [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], - [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], - [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], - [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], + //["in-memory", undefined], + //["lucene", undefined], + //["sqs", undefined], + //[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], + //[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], + //[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], + //[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], + [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)], ])("search (%s)", (name, dsProvider) => { const isSqs = name === "sqs" const isLucene = name === "lucene" @@ -291,7 +292,7 @@ describe.each([ }) describe("equal", () => { - it("successfully finds true row", async () => { + it.only("successfully finds true row", async () => { await expectQuery({ equal: { isTrue: true } }).toMatchExactly([ { isTrue: true }, ]) diff --git a/packages/server/src/integrations/tests/utils/index.ts b/packages/server/src/integrations/tests/utils/index.ts index b888f1adc1..da7d0d8666 100644 --- a/packages/server/src/integrations/tests/utils/index.ts +++ b/packages/server/src/integrations/tests/utils/index.ts @@ -4,6 +4,7 @@ import * as mongodb from "./mongodb" import * as mysql from "./mysql" import * as mssql from "./mssql" import * as mariadb from "./mariadb" +import * as oracle from "./oracle" import { GenericContainer, StartedTestContainer } from "testcontainers" import { testContainerUtils } from "@budibase/backend-core/tests" import cloneDeep from "lodash/cloneDeep" @@ -16,6 +17,7 @@ export enum DatabaseName { MYSQL = "mysql", SQL_SERVER = "mssql", MARIADB = "mariadb", + ORACLE = "oracle", } const providers: Record = { @@ -24,6 +26,7 @@ const providers: Record = { [DatabaseName.MYSQL]: mysql.getDatasource, [DatabaseName.SQL_SERVER]: mssql.getDatasource, [DatabaseName.MARIADB]: mariadb.getDatasource, + [DatabaseName.ORACLE]: oracle.getDatasource, } export function getDatasourceProviders( @@ -59,6 +62,9 @@ export async function knexClient(ds: Datasource) { case SourceName.SQL_SERVER: { return mssql.knexClient(ds) } + case SourceName.ORACLE: { + return oracle.knexClient(ds) + } default: { throw new Error(`Unsupported source: ${ds.source}`) } diff --git a/packages/server/src/integrations/tests/utils/oracle.ts b/packages/server/src/integrations/tests/utils/oracle.ts new file mode 100644 index 0000000000..a4f294d7ba --- /dev/null +++ b/packages/server/src/integrations/tests/utils/oracle.ts @@ -0,0 +1,62 @@ +import { Datasource, SourceName } from "@budibase/types" +import { GenericContainer, Wait } from "testcontainers" +import { generator, testContainerUtils } from "@budibase/backend-core/tests" +import { startContainer } from "." +import knex from "knex" + +let ports: Promise + +export async function getDatasource(): Promise { + if (!ports) { + let image = "oracle/database:19.3.0.0-ee" + if (process.arch.startsWith("arm")) { + image = "samhuang78/oracle-database:19.3.0-ee-slim-faststart" + } + + ports = startContainer( + new GenericContainer(image) + .withExposedPorts(1521) + .withEnvironment({ ORACLE_PASSWORD: "password" }) + .withWaitStrategy(Wait.forHealthCheck().withStartupTimeout(10000)) + ) + } + + const port = (await ports).find(x => x.container === 1521)?.host + if (!port) { + throw new Error("Oracle port not found") + } + + const datasource: Datasource = { + type: "datasource_plus", + source: SourceName.ORACLE, + plus: true, + config: { + host: "127.0.0.1", + port, + database: "postgres", + user: "SYS", + password: "password", + }, + } + + const database = generator.guid().replaceAll("-", "") + const client = await knexClient(datasource) + await client.raw(`CREATE DATABASE "${database}"`) + datasource.config!.database = database + + return datasource +} + +export async function knexClient(ds: Datasource) { + if (!ds.config) { + throw new Error("Datasource config is missing") + } + if (ds.source !== SourceName.ORACLE) { + throw new Error("Datasource source is not Oracle") + } + + return knex({ + client: "oracledb", + connection: ds.config, + }) +} From 50d1972127facb73c0c26c2e5b36b07b2d7bdb86 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Mon, 29 Jul 2024 09:57:24 +0100 Subject: [PATCH 02/94] wip --- packages/backend-core/src/sql/sql.ts | 45 ++++++++++++-- packages/server/__mocks__/oracledb.ts | 21 ------- .../src/api/routes/tests/search.spec.ts | 61 ++++++++++--------- packages/server/src/integrations/oracle.ts | 13 +++- .../src/integrations/tests/utils/oracle.ts | 39 +++++++----- .../src/utilities/rowProcessor/index.ts | 7 +++ 6 files changed, 114 insertions(+), 72 deletions(-) delete mode 100644 packages/server/__mocks__/oracledb.ts diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index a67da7bc10..621afe7f3e 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -109,6 +109,26 @@ function parseFilters(filters: SearchFilters | undefined): SearchFilters { return filters } +// OracleDB can't use character-large-objects (CLOBs) in WHERE clauses, +// so when we use them we need to wrap them in to_char(). This function +// converts a field name to the appropriate identifier. +function convertClobs(client: SqlClient, table: Table, field: string): string { + const parts = field.split(".") + const col = parts.pop()! + const schema = table.schema[col] + let identifier = quotedIdentifier(client, field) + if ( + schema.type === FieldType.STRING || + schema.type === FieldType.LONGFORM || + schema.type === FieldType.BB_REFERENCE_SINGLE || + schema.type === FieldType.OPTIONS || + schema.type === FieldType.BARCODEQR + ) { + identifier = `to_char(${identifier})` + } + return identifier +} + function generateSelectStatement( json: QueryJson, knex: Knex @@ -372,7 +392,15 @@ class InternalBuilder { iterate( filters.oneOf, (key: string, array) => { - query = query[fnc](key, Array.isArray(array) ? array : [array]) + if (this.client === SqlClient.ORACLE) { + key = convertClobs(this.client, table, key) + query = query.whereRaw( + `${key} IN (?)`, + Array.isArray(array) ? array : [array] + ) + } else { + query = query[fnc](key, Array.isArray(array) ? array : [array]) + } }, (key: string[], array) => { query = query[fnc](key, Array.isArray(array) ? array : [array]) @@ -436,8 +464,9 @@ class InternalBuilder { [value] ) } else if (this.client === SqlClient.ORACLE) { + const identifier = convertClobs(this.client, table, key) query = query[fnc]( - `COALESCE(${quotedIdentifier(this.client, key)}, -1) = ?`, + `(${identifier} IS NOT NULL AND ${identifier} = ?)`, [value] ) } else { @@ -460,8 +489,9 @@ class InternalBuilder { [value] ) } else if (this.client === SqlClient.ORACLE) { + const identifier = convertClobs(this.client, table, key) query = query[fnc]( - `COALESCE(${quotedIdentifier(this.client, key)}, -1) != ?`, + `(${identifier} IS NOT NULL AND ${identifier} != ?)`, [value] ) } else { @@ -707,8 +737,11 @@ class InternalBuilder { } const ret = query.insert(parsedBody).onConflict(primary).merge() return ret - } else if (this.client === SqlClient.MS_SQL) { - // No upsert or onConflict support in MSSQL yet, see: + } else if ( + this.client === SqlClient.MS_SQL || + this.client === SqlClient.ORACLE + ) { + // No upsert or onConflict support in MSSQL/Oracle yet, see: // https://github.com/knex/knex/pull/6050 return query.insert(parsedBody) } @@ -867,7 +900,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { const config: Knex.Config = { client: sqlClient, } - if (sqlClient === SqlClient.SQL_LITE) { + if (sqlClient === SqlClient.SQL_LITE || sqlClient === SqlClient.ORACLE) { config.useNullAsDefault = true } diff --git a/packages/server/__mocks__/oracledb.ts b/packages/server/__mocks__/oracledb.ts deleted file mode 100644 index 0172ace0e6..0000000000 --- a/packages/server/__mocks__/oracledb.ts +++ /dev/null @@ -1,21 +0,0 @@ -const executeMock = jest.fn(() => ({ - rows: [ - { - a: "string", - b: 1, - }, - ], -})) - -const closeMock = jest.fn() - -class Connection { - execute = executeMock - close = closeMock -} - -module.exports = { - getConnection: jest.fn(() => new Connection()), - executeMock, - closeMock, -} diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index 6e685b13ab..57afdb4853 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -40,13 +40,13 @@ import { structures } from "@budibase/backend-core/tests" import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default" describe.each([ - //["in-memory", undefined], - //["lucene", undefined], - //["sqs", undefined], - //[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], - //[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], - //[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], - //[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], + // ["in-memory", undefined], + // ["lucene", undefined], + // ["sqs", undefined], + // [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], + // [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], + // [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], + // [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)], ])("search (%s)", (name, dsProvider) => { const isSqs = name === "sqs" @@ -292,7 +292,7 @@ describe.each([ }) describe("equal", () => { - it.only("successfully finds true row", async () => { + it("successfully finds true row", async () => { await expectQuery({ equal: { isTrue: true } }).toMatchExactly([ { isTrue: true }, ]) @@ -1577,12 +1577,15 @@ describe.each([ }) }) - describe("bigints", () => { + describe.only("bigints", () => { const SMALL = "1" const MEDIUM = "10000000" // Our bigints are int64s in most datasources. - const BIG = "9223372036854775807" + let BIG = "9223372036854775807" + if (name === DatabaseName.ORACLE) { + // BIG = "9223372036854775808" + } beforeAll(async () => { table = await createTable({ @@ -2415,25 +2418,25 @@ describe.each([ describe.each([ "名前", // Japanese for "name" - "Benutzer-ID", // German for "user ID", includes a hyphen - "numéro", // French for "number", includes an accent - "år", // Swedish for "year", includes a ring above - "naïve", // English word borrowed from French, includes an umlaut - "الاسم", // Arabic for "name" - "оплата", // Russian for "payment" - "पता", // Hindi for "address" - "用戶名", // Chinese for "username" - "çalışma_zamanı", // Turkish for "runtime", includes an underscore and a cedilla - "preço", // Portuguese for "price", includes a cedilla - "사용자명", // Korean for "username" - "usuario_ñoño", // Spanish, uses an underscore and includes "ñ" - "файл", // Bulgarian for "file" - "δεδομένα", // Greek for "data" - "geändert_am", // German for "modified on", includes an umlaut - "ব্যবহারকারীর_নাম", // Bengali for "user name", includes an underscore - "São_Paulo", // Portuguese, includes an underscore and a tilde - "età", // Italian for "age", includes an accent - "ชื่อผู้ใช้", // Thai for "username" + // "Benutzer-ID", // German for "user ID", includes a hyphen + // "numéro", // French for "number", includes an accent + // "år", // Swedish for "year", includes a ring above + // "naïve", // English word borrowed from French, includes an umlaut + // "الاسم", // Arabic for "name" + // "оплата", // Russian for "payment" + // "पता", // Hindi for "address" + // "用戶名", // Chinese for "username" + // "çalışma_zamanı", // Turkish for "runtime", includes an underscore and a cedilla + // "preço", // Portuguese for "price", includes a cedilla + // "사용자명", // Korean for "username" + // "usuario_ñoño", // Spanish, uses an underscore and includes "ñ" + // "файл", // Bulgarian for "file" + // "δεδομένα", // Greek for "data" + // "geändert_am", // German for "modified on", includes an umlaut + // "ব্যবহারকারীর_নাম", // Bengali for "user name", includes an underscore + // "São_Paulo", // Portuguese, includes an underscore and a tilde + // "età", // Italian for "age", includes an accent + // "ชื่อผู้ใช้", // Thai for "username" ])("non-ascii column name: %s", name => { beforeAll(async () => { table = await createTable({ diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index 9f40372546..b36501525b 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -360,11 +360,20 @@ class OracleIntegration extends Sql implements DatasourcePlus { this.index = 1 connection = await this.getConnection() - const options: ExecuteOptions = { autoCommit: true } + const options: ExecuteOptions = { + autoCommit: true, + fetchTypeHandler: function (metaData) { + if (metaData.dbType === oracledb.CLOB) { + return { type: oracledb.STRING } + } + return undefined + }, + } const bindings: BindParameters = query.bindings || [] this.log(query.sql, bindings) - return await connection.execute(query.sql, bindings, options) + const result = await connection.execute(query.sql, bindings, options) + return result as Result } finally { if (connection) { try { diff --git a/packages/server/src/integrations/tests/utils/oracle.ts b/packages/server/src/integrations/tests/utils/oracle.ts index a4f294d7ba..9b75a52da7 100644 --- a/packages/server/src/integrations/tests/utils/oracle.ts +++ b/packages/server/src/integrations/tests/utils/oracle.ts @@ -8,7 +8,7 @@ let ports: Promise export async function getDatasource(): Promise { if (!ports) { - let image = "oracle/database:19.3.0.0-ee" + let image = "oracle/database:19.3.0.0-ee-slim-faststart" if (process.arch.startsWith("arm")) { image = "samhuang78/oracle-database:19.3.0-ee-slim-faststart" } @@ -17,7 +17,7 @@ export async function getDatasource(): Promise { new GenericContainer(image) .withExposedPorts(1521) .withEnvironment({ ORACLE_PASSWORD: "password" }) - .withWaitStrategy(Wait.forHealthCheck().withStartupTimeout(10000)) + .withWaitStrategy(Wait.forHealthCheck().withStartupTimeout(60000)) ) } @@ -26,23 +26,25 @@ export async function getDatasource(): Promise { throw new Error("Oracle port not found") } + const host = "127.0.0.1" + const user = "SYSTEM" + const password = "password" + const datasource: Datasource = { type: "datasource_plus", source: SourceName.ORACLE, plus: true, - config: { - host: "127.0.0.1", - port, - database: "postgres", - user: "SYS", - password: "password", - }, + config: { host, port, user, password, database: "FREEPDB1" }, } - const database = generator.guid().replaceAll("-", "") + const newUser = "a" + generator.guid().replaceAll("-", "") const client = await knexClient(datasource) - await client.raw(`CREATE DATABASE "${database}"`) - datasource.config!.database = database + await client.raw(`CREATE USER ${newUser} IDENTIFIED BY password`) + await client.raw( + `GRANT CONNECT, RESOURCE, CREATE VIEW, CREATE SESSION TO ${newUser}` + ) + await client.raw(`GRANT UNLIMITED TABLESPACE TO ${newUser}`) + datasource.config!.user = newUser return datasource } @@ -55,8 +57,17 @@ export async function knexClient(ds: Datasource) { throw new Error("Datasource source is not Oracle") } - return knex({ + const db = ds.config.database || "FREEPDB1" + const connectString = `${ds.config.host}:${ds.config.port}/${db}` + + const c = knex({ client: "oracledb", - connection: ds.config, + connection: { + connectString, + user: ds.config.user, + password: ds.config.password, + }, }) + + return c } diff --git a/packages/server/src/utilities/rowProcessor/index.ts b/packages/server/src/utilities/rowProcessor/index.ts index 71de056814..a367c6da1e 100644 --- a/packages/server/src/utilities/rowProcessor/index.ts +++ b/packages/server/src/utilities/rowProcessor/index.ts @@ -315,6 +315,13 @@ export async function outputProcessing( column.subtype ) } + } else if (column.type === FieldType.BIGINT) { + for (const row of enriched) { + if (row[property] == null) { + continue + } + row[property] = row[property].toString() + } } } From a4b66e00e46c97c46614961746bb047a6608bf16 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Mon, 29 Jul 2024 14:32:28 +0100 Subject: [PATCH 03/94] Fix bigints. --- packages/server/src/api/routes/tests/search.spec.ts | 2 +- packages/server/src/integrations/oracle.ts | 12 ++++++++++++ packages/server/src/utilities/rowProcessor/index.ts | 7 ------- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index 57afdb4853..4c7410eb76 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -1577,7 +1577,7 @@ describe.each([ }) }) - describe.only("bigints", () => { + describe("bigints", () => { const SMALL = "1" const MEDIUM = "10000000" diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index b36501525b..6b86fba00d 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -365,6 +365,18 @@ class OracleIntegration extends Sql implements DatasourcePlus { fetchTypeHandler: function (metaData) { if (metaData.dbType === oracledb.CLOB) { return { type: oracledb.STRING } + } else if ( + // When we create a new table in OracleDB from Budibase, bigints get + // created as NUMBER(20,0). Budibase expects bigints to be returned + // as strings, which is what we're doing here. However, this is + // likely to be brittle if we connect to externally created + // databases that have used different precisions and scales. + // We shold find a way to do better. + metaData.dbType === oracledb.NUMBER && + metaData.precision === 20 && + metaData.scale === 0 + ) { + return { type: oracledb.STRING } } return undefined }, diff --git a/packages/server/src/utilities/rowProcessor/index.ts b/packages/server/src/utilities/rowProcessor/index.ts index a367c6da1e..71de056814 100644 --- a/packages/server/src/utilities/rowProcessor/index.ts +++ b/packages/server/src/utilities/rowProcessor/index.ts @@ -315,13 +315,6 @@ export async function outputProcessing( column.subtype ) } - } else if (column.type === FieldType.BIGINT) { - for (const row of enriched) { - if (row[property] == null) { - continue - } - row[property] = row[property].toString() - } } } From 5cb294f33e5b1b0fd137ce8ae513271deb28b3a6 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Mon, 29 Jul 2024 14:54:58 +0100 Subject: [PATCH 04/94] Fix binding mismatch problem in oneOf queries. --- packages/backend-core/src/sql/sql.ts | 7 +++---- packages/backend-core/src/sql/utils.ts | 1 + 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index 621afe7f3e..797e4b646a 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -394,10 +394,9 @@ class InternalBuilder { (key: string, array) => { if (this.client === SqlClient.ORACLE) { key = convertClobs(this.client, table, key) - query = query.whereRaw( - `${key} IN (?)`, - Array.isArray(array) ? array : [array] - ) + array = Array.isArray(array) ? array : [array] + const binding = new Array(array.length).fill("?").join(",") + query = query.whereRaw(`${key} IN (${binding})`, array) } else { query = query[fnc](key, Array.isArray(array) ? array : [array]) } diff --git a/packages/backend-core/src/sql/utils.ts b/packages/backend-core/src/sql/utils.ts index 67b5d2081b..1b32cc6da7 100644 --- a/packages/backend-core/src/sql/utils.ts +++ b/packages/backend-core/src/sql/utils.ts @@ -22,6 +22,7 @@ export function getNativeSql( query: Knex.SchemaBuilder | Knex.QueryBuilder ): SqlQuery | SqlQuery[] { let sql = query.toSQL() + if (Array.isArray(sql)) { return sql as SqlQuery[] } From ad414b982e1cf58075ccce98932c495329523cc6 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Mon, 29 Jul 2024 16:54:59 +0100 Subject: [PATCH 05/94] Gone some way toward getting time-only fields to work. Still test failures though. --- packages/backend-core/src/sql/sql.ts | 398 +++++++++--------- .../src/api/routes/tests/search.spec.ts | 2 +- packages/server/src/integrations/oracle.ts | 1 - .../src/utilities/rowProcessor/index.ts | 9 + 4 files changed, 206 insertions(+), 204 deletions(-) diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index 797e4b646a..5d0a251900 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -42,176 +42,6 @@ const envLimit = environment.SQL_MAX_ROWS : null const BASE_LIMIT = envLimit || 5000 -// Takes a string like foo and returns a quoted string like [foo] for SQL Server -// and "foo" for Postgres. -function quote(client: SqlClient, str: string): string { - switch (client) { - case SqlClient.SQL_LITE: - case SqlClient.ORACLE: - case SqlClient.POSTGRES: - return `"${str}"` - case SqlClient.MS_SQL: - return `[${str}]` - case SqlClient.MY_SQL: - return `\`${str}\`` - } -} - -// Takes a string like a.b.c and returns a quoted identifier like [a].[b].[c] -// for SQL Server and `a`.`b`.`c` for MySQL. -function quotedIdentifier(client: SqlClient, key: string): string { - return key - .split(".") - .map(part => quote(client, part)) - .join(".") -} - -function parse(input: any) { - if (Array.isArray(input)) { - return JSON.stringify(input) - } - if (input == undefined) { - return null - } - if (typeof input !== "string") { - return input - } - if (isInvalidISODateString(input)) { - return null - } - if (isValidISODateString(input)) { - return new Date(input.trim()) - } - return input -} - -function parseBody(body: any) { - for (let [key, value] of Object.entries(body)) { - body[key] = parse(value) - } - return body -} - -function parseFilters(filters: SearchFilters | undefined): SearchFilters { - if (!filters) { - return {} - } - for (let [key, value] of Object.entries(filters)) { - let parsed - if (typeof value === "object") { - parsed = parseFilters(value) - } else { - parsed = parse(value) - } - // @ts-ignore - filters[key] = parsed - } - return filters -} - -// OracleDB can't use character-large-objects (CLOBs) in WHERE clauses, -// so when we use them we need to wrap them in to_char(). This function -// converts a field name to the appropriate identifier. -function convertClobs(client: SqlClient, table: Table, field: string): string { - const parts = field.split(".") - const col = parts.pop()! - const schema = table.schema[col] - let identifier = quotedIdentifier(client, field) - if ( - schema.type === FieldType.STRING || - schema.type === FieldType.LONGFORM || - schema.type === FieldType.BB_REFERENCE_SINGLE || - schema.type === FieldType.OPTIONS || - schema.type === FieldType.BARCODEQR - ) { - identifier = `to_char(${identifier})` - } - return identifier -} - -function generateSelectStatement( - json: QueryJson, - knex: Knex -): (string | Knex.Raw)[] | "*" { - const { resource, meta } = json - const client = knex.client.config.client as SqlClient - - if (!resource || !resource.fields || resource.fields.length === 0) { - return "*" - } - - const schema = meta.table.schema - return resource.fields.map(field => { - const parts = field.split(/\./g) - let table: string | undefined = undefined - let column: string | undefined = undefined - - // Just a column name, e.g.: "column" - if (parts.length === 1) { - column = parts[0] - } - - // A table name and a column name, e.g.: "table.column" - if (parts.length === 2) { - table = parts[0] - column = parts[1] - } - - // A link doc, e.g.: "table.doc1.fieldName" - if (parts.length > 2) { - table = parts[0] - column = parts.slice(1).join(".") - } - - if (!column) { - throw new Error(`Invalid field name: ${field}`) - } - - const columnSchema = schema[column] - - if ( - client === SqlClient.POSTGRES && - columnSchema?.externalType?.includes("money") - ) { - return knex.raw( - `${quotedIdentifier( - client, - [table, column].join(".") - )}::money::numeric as ${quote(client, field)}` - ) - } - - if ( - client === SqlClient.MS_SQL && - columnSchema?.type === FieldType.DATETIME && - columnSchema.timeOnly - ) { - // Time gets returned as timestamp from mssql, not matching the expected - // HH:mm format - return knex.raw(`CONVERT(varchar, ${field}, 108) as "${field}"`) - } - - // There's at least two edge cases being handled in the expression below. - // 1. The column name could start/end with a space, and in that case we - // want to preseve that space. - // 2. Almost all column names are specified in the form table.column, except - // in the case of relationships, where it's table.doc1.column. In that - // case, we want to split it into `table`.`doc1.column` for reasons that - // aren't actually clear to me, but `table`.`doc1` breaks things with the - // sample data tests. - if (table) { - return knex.raw( - `${quote(client, table)}.${quote(client, column)} as ${quote( - client, - field - )}` - ) - } else { - return knex.raw(`${quote(client, field)} as ${quote(client, field)}`) - } - }) -} - function getTableName(table?: Table): string | undefined { // SQS uses the table ID rather than the table name if ( @@ -247,6 +77,181 @@ class InternalBuilder { this.client = client } + // Takes a string like foo and returns a quoted string like [foo] for SQL Server + // and "foo" for Postgres. + private quote(str: string): string { + switch (this.client) { + case SqlClient.SQL_LITE: + case SqlClient.ORACLE: + case SqlClient.POSTGRES: + return `"${str}"` + case SqlClient.MS_SQL: + return `[${str}]` + case SqlClient.MY_SQL: + return `\`${str}\`` + } + } + + // Takes a string like a.b.c and returns a quoted identifier like [a].[b].[c] + // for SQL Server and `a`.`b`.`c` for MySQL. + private quotedIdentifier(key: string): string { + return key + .split(".") + .map(part => this.quote(part)) + .join(".") + } + + private generateSelectStatement( + json: QueryJson, + knex: Knex + ): (string | Knex.Raw)[] | "*" { + const { resource, meta } = json + const client = knex.client.config.client as SqlClient + + if (!resource || !resource.fields || resource.fields.length === 0) { + return "*" + } + + const schema = meta.table.schema + return resource.fields.map(field => { + const parts = field.split(/\./g) + let table: string | undefined = undefined + let column: string | undefined = undefined + + // Just a column name, e.g.: "column" + if (parts.length === 1) { + column = parts[0] + } + + // A table name and a column name, e.g.: "table.column" + if (parts.length === 2) { + table = parts[0] + column = parts[1] + } + + // A link doc, e.g.: "table.doc1.fieldName" + if (parts.length > 2) { + table = parts[0] + column = parts.slice(1).join(".") + } + + if (!column) { + throw new Error(`Invalid field name: ${field}`) + } + + const columnSchema = schema[column] + + if ( + client === SqlClient.POSTGRES && + columnSchema?.externalType?.includes("money") + ) { + return knex.raw( + `${this.quotedIdentifier( + [table, column].join(".") + )}::money::numeric as ${this.quote(field)}` + ) + } + + if ( + client === SqlClient.MS_SQL && + columnSchema?.type === FieldType.DATETIME && + columnSchema.timeOnly + ) { + // Time gets returned as timestamp from mssql, not matching the expected + // HH:mm format + return knex.raw(`CONVERT(varchar, ${field}, 108) as "${field}"`) + } + + // There's at least two edge cases being handled in the expression below. + // 1. The column name could start/end with a space, and in that case we + // want to preseve that space. + // 2. Almost all column names are specified in the form table.column, except + // in the case of relationships, where it's table.doc1.column. In that + // case, we want to split it into `table`.`doc1.column` for reasons that + // aren't actually clear to me, but `table`.`doc1` breaks things with the + // sample data tests. + if (table) { + return knex.raw( + `${this.quote(table)}.${this.quote(column)} as ${this.quote(field)}` + ) + } else { + return knex.raw(`${this.quote(field)} as ${this.quote(field)}`) + } + }) + } + + // OracleDB can't use character-large-objects (CLOBs) in WHERE clauses, + // so when we use them we need to wrap them in to_char(). This function + // converts a field name to the appropriate identifier. + private convertClobs(table: Table, field: string): string { + const parts = field.split(".") + const col = parts.pop()! + const schema = table.schema[col] + let identifier = this.quotedIdentifier(field) + if ( + schema.type === FieldType.STRING || + schema.type === FieldType.LONGFORM || + schema.type === FieldType.BB_REFERENCE_SINGLE || + schema.type === FieldType.OPTIONS || + schema.type === FieldType.BARCODEQR + ) { + identifier = `to_char(${identifier})` + } + return identifier + } + + private parse(input: any, schema: FieldSchema) { + if (schema.type === FieldType.DATETIME && schema.timeOnly) { + if (this.client === SqlClient.ORACLE) { + return new Date(`1970-01-01 ${input}`) + } + } + + if (Array.isArray(input)) { + return JSON.stringify(input) + } + if (input == undefined) { + return null + } + if (typeof input !== "string") { + return input + } + if (isInvalidISODateString(input)) { + return null + } + if (isValidISODateString(input)) { + return new Date(input.trim()) + } + return input + } + + private parseBody(body: any, table: Table) { + for (let [key, value] of Object.entries(body)) { + body[key] = this.parse(value, table.schema[key]) + } + return body + } + + private parseFilters( + filters: SearchFilters | undefined, + table: Table + ): SearchFilters { + if (!filters) { + return {} + } + for (let [key, value] of Object.entries(filters)) { + let parsed + if (typeof value === "object") { + parsed = this.parseFilters(value, table) + } else { + parsed = this.parse(value, table.schema[key]) + } + // @ts-ignore + filters[key] = parsed + } + return filters + } + // right now we only do filters on the specific table being queried addFilters( query: Knex.QueryBuilder, @@ -261,7 +266,7 @@ class InternalBuilder { if (!filters) { return query } - filters = parseFilters(filters) + filters = this.parseFilters(filters, table) // if all or specified in filters, then everything is an or const allOr = filters.allOr const sqlStatements = new SqlStatements(this.client, table, { @@ -318,10 +323,9 @@ class InternalBuilder { } else { const rawFnc = `${fnc}Raw` // @ts-ignore - query = query[rawFnc]( - `LOWER(${quotedIdentifier(this.client, key)}) LIKE ?`, - [`%${value.toLowerCase()}%`] - ) + query = query[rawFnc](`LOWER(${this.quotedIdentifier(key)}) LIKE ?`, [ + `%${value.toLowerCase()}%`, + ]) } } @@ -371,10 +375,7 @@ class InternalBuilder { } statement += (statement ? andOr : "") + - `COALESCE(LOWER(${quotedIdentifier( - this.client, - key - )}), '') LIKE ?` + `COALESCE(LOWER(${this.quotedIdentifier(key)}), '') LIKE ?` } if (statement === "") { @@ -393,7 +394,7 @@ class InternalBuilder { filters.oneOf, (key: string, array) => { if (this.client === SqlClient.ORACLE) { - key = convertClobs(this.client, table, key) + key = this.convertClobs(table, key) array = Array.isArray(array) ? array : [array] const binding = new Array(array.length).fill("?").join(",") query = query.whereRaw(`${key} IN (${binding})`, array) @@ -415,10 +416,9 @@ class InternalBuilder { } else { const rawFnc = `${fnc}Raw` // @ts-ignore - query = query[rawFnc]( - `LOWER(${quotedIdentifier(this.client, key)}) LIKE ?`, - [`${value.toLowerCase()}%`] - ) + query = query[rawFnc](`LOWER(${this.quotedIdentifier(key)}) LIKE ?`, [ + `${value.toLowerCase()}%`, + ]) } }) } @@ -456,21 +456,18 @@ class InternalBuilder { const fnc = allOr ? "orWhereRaw" : "whereRaw" if (this.client === SqlClient.MS_SQL) { query = query[fnc]( - `CASE WHEN ${quotedIdentifier( - this.client, - key - )} = ? THEN 1 ELSE 0 END = 1`, + `CASE WHEN ${this.quotedIdentifier(key)} = ? THEN 1 ELSE 0 END = 1`, [value] ) } else if (this.client === SqlClient.ORACLE) { - const identifier = convertClobs(this.client, table, key) + const identifier = this.convertClobs(table, key) query = query[fnc]( `(${identifier} IS NOT NULL AND ${identifier} = ?)`, [value] ) } else { query = query[fnc]( - `COALESCE(${quotedIdentifier(this.client, key)} = ?, FALSE)`, + `COALESCE(${this.quotedIdentifier(key)} = ?, FALSE)`, [value] ) } @@ -481,21 +478,18 @@ class InternalBuilder { const fnc = allOr ? "orWhereRaw" : "whereRaw" if (this.client === SqlClient.MS_SQL) { query = query[fnc]( - `CASE WHEN ${quotedIdentifier( - this.client, - key - )} = ? THEN 1 ELSE 0 END = 0`, + `CASE WHEN ${this.quotedIdentifier(key)} = ? THEN 1 ELSE 0 END = 0`, [value] ) } else if (this.client === SqlClient.ORACLE) { - const identifier = convertClobs(this.client, table, key) + const identifier = this.convertClobs(table, key) query = query[fnc]( `(${identifier} IS NOT NULL AND ${identifier} != ?)`, [value] ) } else { query = query[fnc]( - `COALESCE(${quotedIdentifier(this.client, key)} != ?, TRUE)`, + `COALESCE(${this.quotedIdentifier(key)} != ?, TRUE)`, [value] ) } @@ -692,7 +686,7 @@ class InternalBuilder { create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { const { endpoint, body } = json let query = this.knexWithAlias(knex, endpoint) - const parsedBody = parseBody(body) + const parsedBody = this.parseBody(body, json.meta.table) // make sure no null values in body for creation for (let [key, value] of Object.entries(parsedBody)) { if (value == null) { @@ -714,7 +708,7 @@ class InternalBuilder { if (!Array.isArray(body)) { return query } - const parsedBody = body.map(row => parseBody(row)) + const parsedBody = body.map(row => this.parseBody(row, json.meta.table)) return query.insert(parsedBody) } @@ -724,7 +718,7 @@ class InternalBuilder { if (!Array.isArray(body)) { return query } - const parsedBody = body.map(row => parseBody(row)) + const parsedBody = body.map(row => this.parseBody(row, json.meta.table)) if ( this.client === SqlClient.POSTGRES || this.client === SqlClient.SQL_LITE || @@ -806,7 +800,7 @@ class InternalBuilder { }) // if counting, use distinct count, else select preQuery = !counting - ? preQuery.select(generateSelectStatement(json, knex)) + ? preQuery.select(this.generateSelectStatement(json, knex)) : this.addDistinctCount(preQuery, json) // have to add after as well (this breaks MS-SQL) if (this.client !== SqlClient.MS_SQL && !counting) { @@ -837,7 +831,7 @@ class InternalBuilder { update(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { const { endpoint, body, filters, tableAliases } = json let query = this.knexWithAlias(knex, endpoint, tableAliases) - const parsedBody = parseBody(body) + const parsedBody = this.parseBody(body, json.meta.table) query = this.addFilters(query, filters, json.meta.table, { columnPrefix: json.meta.columnPrefix, aliases: tableAliases, @@ -861,7 +855,7 @@ class InternalBuilder { if (opts.disableReturning) { return query.delete() } else { - return query.delete().returning(generateSelectStatement(json, knex)) + return query.delete().returning(this.generateSelectStatement(json, knex)) } } } diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index 4c7410eb76..c6c5786e53 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -1318,7 +1318,7 @@ describe.each([ }) !isInternal && - describe("datetime - time only", () => { + describe.only("datetime - time only", () => { const T_1000 = "10:00:00" const T_1045 = "10:45:00" const T_1200 = "12:00:00" diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index 6b86fba00d..b8fcd63e7f 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -398,7 +398,6 @@ class OracleIntegration extends Sql implements DatasourcePlus { } private getConnection = async (): Promise => { - //connectString : "(DESCRIPTION =(ADDRESS = (PROTOCOL = TCP)(HOST = localhost)(PORT = 1521))(CONNECT_DATA =(SID= ORCL)))" const connectString = `${this.config.host}:${this.config.port || 1521}/${ this.config.database }` diff --git a/packages/server/src/utilities/rowProcessor/index.ts b/packages/server/src/utilities/rowProcessor/index.ts index 71de056814..139f3a5b8d 100644 --- a/packages/server/src/utilities/rowProcessor/index.ts +++ b/packages/server/src/utilities/rowProcessor/index.ts @@ -315,6 +315,15 @@ export async function outputProcessing( column.subtype ) } + } else if (column.type === FieldType.DATETIME && column.timeOnly) { + for (let row of enriched) { + if (row[property] instanceof Date) { + const hours = row[property].getHours().toString().padStart(2, "0") + const minutes = row[property].getMinutes().toString().padStart(2, "0") + const seconds = row[property].getSeconds().toString().padStart(2, "0") + row[property] = `${hours}:${minutes}:${seconds}` + } + } } } From e1ef66bf56df1fc8bd95832743a62dbc8329035c Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Mon, 29 Jul 2024 18:11:05 +0100 Subject: [PATCH 06/94] Refactor InternalBuilder to give me more access to query state. --- packages/backend-core/src/sql/sql.ts | 253 +++++++++--------- .../src/api/routes/tests/search.spec.ts | 64 ++--- .../src/sdk/app/rows/search/external.ts | 3 +- 3 files changed, 158 insertions(+), 162 deletions(-) diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index 5d0a251900..fd24c8e12c 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -34,6 +34,8 @@ import { } from "@budibase/types" import environment from "../environment" import { helpers } from "@budibase/shared-core" +import { isPlainObject } from "lodash" +import { ColumnSplitter } from "@budibase/shared-core/src/filters" type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any @@ -72,9 +74,15 @@ function convertBooleans(query: SqlQuery | SqlQuery[]): SqlQuery | SqlQuery[] { class InternalBuilder { private readonly client: SqlClient + private readonly query: QueryJson - constructor(client: SqlClient) { + constructor(client: SqlClient, query: QueryJson) { this.client = client + this.query = query + } + + get table(): Table { + return this.query.meta.table } // Takes a string like foo and returns a quoted string like [foo] for SQL Server @@ -101,11 +109,8 @@ class InternalBuilder { .join(".") } - private generateSelectStatement( - json: QueryJson, - knex: Knex - ): (string | Knex.Raw)[] | "*" { - const { resource, meta } = json + private generateSelectStatement(knex: Knex): (string | Knex.Raw)[] | "*" { + const { resource, meta } = this.query const client = knex.client.config.client as SqlClient if (!resource || !resource.fields || resource.fields.length === 0) { @@ -183,10 +188,10 @@ class InternalBuilder { // OracleDB can't use character-large-objects (CLOBs) in WHERE clauses, // so when we use them we need to wrap them in to_char(). This function // converts a field name to the appropriate identifier. - private convertClobs(table: Table, field: string): string { + private convertClobs(field: string): string { const parts = field.split(".") const col = parts.pop()! - const schema = table.schema[col] + const schema = this.table.schema[col] let identifier = this.quotedIdentifier(field) if ( schema.type === FieldType.STRING || @@ -201,54 +206,60 @@ class InternalBuilder { } private parse(input: any, schema: FieldSchema) { + if (input == undefined) { + return null + } + + if (isPlainObject(input)) { + for (const [key, value] of Object.entries(input)) { + input[key] = this.parse(value, schema) + } + return input + } + if (schema.type === FieldType.DATETIME && schema.timeOnly) { if (this.client === SqlClient.ORACLE) { return new Date(`1970-01-01 ${input}`) } } - if (Array.isArray(input)) { - return JSON.stringify(input) - } - if (input == undefined) { - return null - } - if (typeof input !== "string") { - return input - } - if (isInvalidISODateString(input)) { - return null - } - if (isValidISODateString(input)) { - return new Date(input.trim()) + if (typeof input === "string") { + if (isInvalidISODateString(input)) { + return null + } + if (isValidISODateString(input)) { + return new Date(input.trim()) + } } + return input } - private parseBody(body: any, table: Table) { + private parseBody(body: any) { for (let [key, value] of Object.entries(body)) { - body[key] = this.parse(value, table.schema[key]) + body[key] = this.parse(value, this.table.schema[key]) } return body } - private parseFilters( - filters: SearchFilters | undefined, - table: Table - ): SearchFilters { + private parseFilters(filters: SearchFilters | undefined): SearchFilters { if (!filters) { return {} } - for (let [key, value] of Object.entries(filters)) { - let parsed - if (typeof value === "object") { - parsed = this.parseFilters(value, table) - } else { - parsed = this.parse(value, table.schema[key]) + + for (const [_, filter] of Object.entries(filters)) { + for (const [key, value] of Object.entries(filter)) { + const { column } = new ColumnSplitter([this.table]).run(key) + const schema = this.table.schema[column] + if (!schema) { + throw new Error( + `Column ${key} does not exist in table ${this.table._id}` + ) + } + filter[key] = this.parse(value, schema) } - // @ts-ignore - filters[key] = parsed } + return filters } @@ -256,28 +267,26 @@ class InternalBuilder { addFilters( query: Knex.QueryBuilder, filters: SearchFilters | undefined, - table: Table, - opts: { - aliases?: Record + opts?: { relationship?: boolean - columnPrefix?: string } ): Knex.QueryBuilder { if (!filters) { return query } - filters = this.parseFilters(filters, table) + filters = this.parseFilters(filters) + const aliases = this.query.tableAliases // if all or specified in filters, then everything is an or const allOr = filters.allOr - const sqlStatements = new SqlStatements(this.client, table, { + const sqlStatements = new SqlStatements(this.client, this.table, { allOr, - columnPrefix: opts.columnPrefix, + columnPrefix: this.query.meta.columnPrefix, }) const tableName = - this.client === SqlClient.SQL_LITE ? table._id! : table.name + this.client === SqlClient.SQL_LITE ? this.table._id! : this.table.name function getTableAlias(name: string) { - const alias = opts.aliases?.[name] + const alias = aliases?.[name] return alias || name } function iterate( @@ -303,10 +312,10 @@ class InternalBuilder { ), castedTypeValue.values ) - } else if (!opts.relationship && !isRelationshipField) { + } else if (!opts?.relationship && !isRelationshipField) { const alias = getTableAlias(tableName) fn(alias ? `${alias}.${updatedKey}` : updatedKey, value) - } else if (opts.relationship && isRelationshipField) { + } else if (opts?.relationship && isRelationshipField) { const [filterTableName, property] = updatedKey.split(".") const alias = getTableAlias(filterTableName) fn(alias ? `${alias}.${property}` : property, value) @@ -394,7 +403,7 @@ class InternalBuilder { filters.oneOf, (key: string, array) => { if (this.client === SqlClient.ORACLE) { - key = this.convertClobs(table, key) + key = this.convertClobs(key) array = Array.isArray(array) ? array : [array] const binding = new Array(array.length).fill("?").join(",") query = query.whereRaw(`${key} IN (${binding})`, array) @@ -460,7 +469,7 @@ class InternalBuilder { [value] ) } else if (this.client === SqlClient.ORACLE) { - const identifier = this.convertClobs(table, key) + const identifier = this.convertClobs(key) query = query[fnc]( `(${identifier} IS NOT NULL AND ${identifier} = ?)`, [value] @@ -482,7 +491,7 @@ class InternalBuilder { [value] ) } else if (this.client === SqlClient.ORACLE) { - const identifier = this.convertClobs(table, key) + const identifier = this.convertClobs(key) query = query[fnc]( `(${identifier} IS NOT NULL AND ${identifier} != ?)`, [value] @@ -517,9 +526,9 @@ class InternalBuilder { contains(filters.containsAny, true) } - const tableRef = opts?.aliases?.[table._id!] || table._id + const tableRef = aliases?.[this.table._id!] || this.table._id // when searching internal tables make sure long looking for rows - if (filters.documentType && !isExternalTable(table) && tableRef) { + if (filters.documentType && !isExternalTable(this.table) && tableRef) { // has to be its own option, must always be AND onto the search query.andWhereLike( `${tableRef}._id`, @@ -530,29 +539,26 @@ class InternalBuilder { return query } - addDistinctCount( - query: Knex.QueryBuilder, - json: QueryJson - ): Knex.QueryBuilder { - const table = json.meta.table - const primary = table.primary - const aliases = json.tableAliases + addDistinctCount(query: Knex.QueryBuilder): Knex.QueryBuilder { + const primary = this.table.primary + const aliases = this.query.tableAliases const aliased = - table.name && aliases?.[table.name] ? aliases[table.name] : table.name + this.table.name && aliases?.[this.table.name] + ? aliases[this.table.name] + : this.table.name if (!primary) { throw new Error("SQL counting requires primary key to be supplied") } return query.countDistinct(`${aliased}.${primary[0]} as total`) } - addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { - let { sort } = json - const table = json.meta.table - const primaryKey = table.primary - const tableName = getTableName(table) - const aliases = json.tableAliases + addSorting(query: Knex.QueryBuilder): Knex.QueryBuilder { + let { sort } = this.query + const primaryKey = this.table.primary + const tableName = getTableName(this.table) + const aliases = this.query.tableAliases const aliased = - tableName && aliases?.[tableName] ? aliases[tableName] : table?.name + tableName && aliases?.[tableName] ? aliases[tableName] : this.table?.name if (!Array.isArray(primaryKey)) { throw new Error("Sorting requires primary key to be specified for table") } @@ -667,26 +673,28 @@ class InternalBuilder { return query } - knexWithAlias( + qualifiedKnex( knex: Knex, - endpoint: QueryJson["endpoint"], - aliases?: QueryJson["tableAliases"] + opts?: { alias?: string | boolean } ): Knex.QueryBuilder { - const tableName = endpoint.entityId - const tableAlias = aliases?.[tableName] - + let alias = this.query.tableAliases?.[this.query.endpoint.entityId] + if (opts?.alias === false) { + alias = undefined + } else if (typeof opts?.alias === "string") { + alias = opts.alias + } return knex( - this.tableNameWithSchema(tableName, { - alias: tableAlias, - schema: endpoint.schema, + this.tableNameWithSchema(this.query.endpoint.entityId, { + alias, + schema: this.query.endpoint.schema, }) ) } - create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { - const { endpoint, body } = json - let query = this.knexWithAlias(knex, endpoint) - const parsedBody = this.parseBody(body, json.meta.table) + create(knex: Knex, opts: QueryOptions): Knex.QueryBuilder { + const { body } = this.query + let query = this.qualifiedKnex(knex, { alias: false }) + const parsedBody = this.parseBody(body) // make sure no null values in body for creation for (let [key, value] of Object.entries(parsedBody)) { if (value == null) { @@ -702,29 +710,29 @@ class InternalBuilder { } } - bulkCreate(knex: Knex, json: QueryJson): Knex.QueryBuilder { - const { endpoint, body } = json - let query = this.knexWithAlias(knex, endpoint) + bulkCreate(knex: Knex): Knex.QueryBuilder { + const { body } = this.query + let query = this.qualifiedKnex(knex, { alias: false }) if (!Array.isArray(body)) { return query } - const parsedBody = body.map(row => this.parseBody(row, json.meta.table)) + const parsedBody = body.map(row => this.parseBody(row)) return query.insert(parsedBody) } - bulkUpsert(knex: Knex, json: QueryJson): Knex.QueryBuilder { - const { endpoint, body } = json - let query = this.knexWithAlias(knex, endpoint) + bulkUpsert(knex: Knex): Knex.QueryBuilder { + const { body } = this.query + let query = this.qualifiedKnex(knex, { alias: false }) if (!Array.isArray(body)) { return query } - const parsedBody = body.map(row => this.parseBody(row, json.meta.table)) + const parsedBody = body.map(row => this.parseBody(row)) if ( this.client === SqlClient.POSTGRES || this.client === SqlClient.SQL_LITE || this.client === SqlClient.MY_SQL ) { - const primary = json.meta.table.primary + const primary = this.table.primary if (!primary) { throw new Error("Primary key is required for upsert") } @@ -743,18 +751,18 @@ class InternalBuilder { read( knex: Knex, - json: QueryJson, opts: { limits?: { base: number; query: number } } = {} ): Knex.QueryBuilder { - let { endpoint, filters, paginate, relationships, tableAliases } = json + let { endpoint, filters, paginate, relationships, tableAliases } = + this.query const { limits } = opts const counting = endpoint.operation === Operation.COUNT const tableName = endpoint.entityId // start building the query - let query = this.knexWithAlias(knex, endpoint, tableAliases) + let query = this.qualifiedKnex(knex) // handle pagination let foundOffset: number | null = null let foundLimit = limits?.query || limits?.base @@ -782,13 +790,10 @@ class InternalBuilder { } // add sorting to pre-query // no point in sorting when counting - query = this.addSorting(query, json) + query = this.addSorting(query) } // add filters to the query (where) - query = this.addFilters(query, filters, json.meta.table, { - columnPrefix: json.meta.columnPrefix, - aliases: tableAliases, - }) + query = this.addFilters(query, filters) const alias = tableAliases?.[tableName] || tableName let preQuery: Knex.QueryBuilder = knex({ @@ -800,11 +805,11 @@ class InternalBuilder { }) // if counting, use distinct count, else select preQuery = !counting - ? preQuery.select(this.generateSelectStatement(json, knex)) - : this.addDistinctCount(preQuery, json) + ? preQuery.select(this.generateSelectStatement(knex)) + : this.addDistinctCount(preQuery) // have to add after as well (this breaks MS-SQL) if (this.client !== SqlClient.MS_SQL && !counting) { - preQuery = this.addSorting(preQuery, json) + preQuery = this.addSorting(preQuery) } // handle joins query = this.addRelationships( @@ -821,21 +826,14 @@ class InternalBuilder { query = query.limit(limits.base) } - return this.addFilters(query, filters, json.meta.table, { - columnPrefix: json.meta.columnPrefix, - relationship: true, - aliases: tableAliases, - }) + return this.addFilters(query, filters, { relationship: true }) } - update(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { - const { endpoint, body, filters, tableAliases } = json - let query = this.knexWithAlias(knex, endpoint, tableAliases) - const parsedBody = this.parseBody(body, json.meta.table) - query = this.addFilters(query, filters, json.meta.table, { - columnPrefix: json.meta.columnPrefix, - aliases: tableAliases, - }) + update(knex: Knex, opts: QueryOptions): Knex.QueryBuilder { + const { body, filters } = this.query + let query = this.qualifiedKnex(knex) + const parsedBody = this.parseBody(body) + query = this.addFilters(query, filters) // mysql can't use returning if (opts.disableReturning) { return query.update(parsedBody) @@ -844,18 +842,15 @@ class InternalBuilder { } } - delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { - const { endpoint, filters, tableAliases } = json - let query = this.knexWithAlias(knex, endpoint, tableAliases) - query = this.addFilters(query, filters, json.meta.table, { - columnPrefix: json.meta.columnPrefix, - aliases: tableAliases, - }) + delete(knex: Knex, opts: QueryOptions): Knex.QueryBuilder { + const { filters } = this.query + let query = this.qualifiedKnex(knex) + query = this.addFilters(query, filters) // mysql can't use returning if (opts.disableReturning) { return query.delete() } else { - return query.delete().returning(this.generateSelectStatement(json, knex)) + return query.delete().returning(this.generateSelectStatement(knex)) } } } @@ -899,13 +894,13 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { const client = knex(config) let query: Knex.QueryBuilder - const builder = new InternalBuilder(sqlClient) + const builder = new InternalBuilder(sqlClient, json) switch (this._operation(json)) { case Operation.CREATE: - query = builder.create(client, json, opts) + query = builder.create(client, opts) break case Operation.READ: - query = builder.read(client, json, { + query = builder.read(client, { limits: { query: this.limit, base: BASE_LIMIT, @@ -914,19 +909,19 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { break case Operation.COUNT: // read without any limits to count - query = builder.read(client, json) + query = builder.read(client) break case Operation.UPDATE: - query = builder.update(client, json, opts) + query = builder.update(client, opts) break case Operation.DELETE: - query = builder.delete(client, json, opts) + query = builder.delete(client, opts) break case Operation.BULK_CREATE: - query = builder.bulkCreate(client, json) + query = builder.bulkCreate(client) break case Operation.BULK_UPSERT: - query = builder.bulkUpsert(client, json) + query = builder.bulkUpsert(client) break case Operation.CREATE_TABLE: case Operation.UPDATE_TABLE: diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index c6c5786e53..a08fac7396 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -40,14 +40,14 @@ import { structures } from "@budibase/backend-core/tests" import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default" describe.each([ - // ["in-memory", undefined], - // ["lucene", undefined], - // ["sqs", undefined], - // [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], - // [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], - // [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], - // [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], - [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)], + ["in-memory", undefined], + ["lucene", undefined], + ["sqs", undefined], + [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], + [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], + [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], + [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], + // [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)], ])("search (%s)", (name, dsProvider) => { const isSqs = name === "sqs" const isLucene = name === "lucene" @@ -1318,7 +1318,7 @@ describe.each([ }) !isInternal && - describe.only("datetime - time only", () => { + describe("datetime - time only", () => { const T_1000 = "10:00:00" const T_1045 = "10:45:00" const T_1200 = "12:00:00" @@ -2389,9 +2389,9 @@ describe.each([ describe.each([ { low: "2024-07-03T00:00:00.000Z", high: "9999-00-00T00:00:00.000Z" }, - { low: "2024-07-03T00:00:00.000Z", high: "9998-00-00T00:00:00.000Z" }, - { low: "0000-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" }, - { low: "0001-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" }, + // { low: "2024-07-03T00:00:00.000Z", high: "9998-00-00T00:00:00.000Z" }, + // { low: "0000-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" }, + // { low: "0001-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" }, ])("date special cases", ({ low, high }) => { const earlyDate = "2024-07-03T10:00:00.000Z", laterDate = "2024-07-03T11:00:00.000Z" @@ -2405,7 +2405,7 @@ describe.each([ await createRows([{ date: earlyDate }, { date: laterDate }]) }) - it("should be able to handle a date search", async () => { + it.only("should be able to handle a date search", async () => { await expectSearch({ query: { range: { @@ -2418,25 +2418,25 @@ describe.each([ describe.each([ "名前", // Japanese for "name" - // "Benutzer-ID", // German for "user ID", includes a hyphen - // "numéro", // French for "number", includes an accent - // "år", // Swedish for "year", includes a ring above - // "naïve", // English word borrowed from French, includes an umlaut - // "الاسم", // Arabic for "name" - // "оплата", // Russian for "payment" - // "पता", // Hindi for "address" - // "用戶名", // Chinese for "username" - // "çalışma_zamanı", // Turkish for "runtime", includes an underscore and a cedilla - // "preço", // Portuguese for "price", includes a cedilla - // "사용자명", // Korean for "username" - // "usuario_ñoño", // Spanish, uses an underscore and includes "ñ" - // "файл", // Bulgarian for "file" - // "δεδομένα", // Greek for "data" - // "geändert_am", // German for "modified on", includes an umlaut - // "ব্যবহারকারীর_নাম", // Bengali for "user name", includes an underscore - // "São_Paulo", // Portuguese, includes an underscore and a tilde - // "età", // Italian for "age", includes an accent - // "ชื่อผู้ใช้", // Thai for "username" + "Benutzer-ID", // German for "user ID", includes a hyphen + "numéro", // French for "number", includes an accent + "år", // Swedish for "year", includes a ring above + "naïve", // English word borrowed from French, includes an umlaut + "الاسم", // Arabic for "name" + "оплата", // Russian for "payment" + "पता", // Hindi for "address" + "用戶名", // Chinese for "username" + "çalışma_zamanı", // Turkish for "runtime", includes an underscore and a cedilla + "preço", // Portuguese for "price", includes a cedilla + "사용자명", // Korean for "username" + "usuario_ñoño", // Spanish, uses an underscore and includes "ñ" + "файл", // Bulgarian for "file" + "δεδομένα", // Greek for "data" + "geändert_am", // German for "modified on", includes an umlaut + "ব্যবহারকারীর_নাম", // Bengali for "user name", includes an underscore + "São_Paulo", // Portuguese, includes an underscore and a tilde + "età", // Italian for "age", includes an accent + "ชื่อผู้ใช้", // Thai for "username" ])("non-ascii column name: %s", name => { beforeAll(async () => { table = await createTable({ diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index cd0650e4c4..815094eeeb 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -123,7 +123,8 @@ export async function search( } catch (err: any) { if (err.message && err.message.includes("does not exist")) { throw new Error( - `Table updated externally, please re-fetch - ${err.message}` + `Table updated externally, please re-fetch - ${err.message}`, + { cause: err } ) } else { throw err From 25ab2e26894bbecfbd4ff9aa60c1b1ec2db31768 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Mon, 29 Jul 2024 18:20:49 +0100 Subject: [PATCH 07/94] Revert schema plumbing, need to revisit. --- packages/backend-core/src/sql/sql.ts | 109 ++++++++++++------ .../src/api/routes/tests/search.spec.ts | 2 +- 2 files changed, 77 insertions(+), 34 deletions(-) diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index fd24c8e12c..917182f7d2 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -205,39 +205,28 @@ class InternalBuilder { return identifier } - private parse(input: any, schema: FieldSchema) { + private parse(input: any) { + if (Array.isArray(input)) { + return JSON.stringify(input) + } if (input == undefined) { return null } - - if (isPlainObject(input)) { - for (const [key, value] of Object.entries(input)) { - input[key] = this.parse(value, schema) - } + if (typeof input !== "string") { return input } - - if (schema.type === FieldType.DATETIME && schema.timeOnly) { - if (this.client === SqlClient.ORACLE) { - return new Date(`1970-01-01 ${input}`) - } + if (isInvalidISODateString(input)) { + return null } - - if (typeof input === "string") { - if (isInvalidISODateString(input)) { - return null - } - if (isValidISODateString(input)) { - return new Date(input.trim()) - } + if (isValidISODateString(input)) { + return new Date(input.trim()) } - return input } private parseBody(body: any) { for (let [key, value] of Object.entries(body)) { - body[key] = this.parse(value, this.table.schema[key]) + body[key] = this.parse(value) } return body } @@ -246,23 +235,77 @@ class InternalBuilder { if (!filters) { return {} } - - for (const [_, filter] of Object.entries(filters)) { - for (const [key, value] of Object.entries(filter)) { - const { column } = new ColumnSplitter([this.table]).run(key) - const schema = this.table.schema[column] - if (!schema) { - throw new Error( - `Column ${key} does not exist in table ${this.table._id}` - ) - } - filter[key] = this.parse(value, schema) + for (let [key, value] of Object.entries(filters)) { + let parsed + if (typeof value === "object") { + parsed = this.parseFilters(value) + } else { + parsed = this.parse(value) } + // @ts-ignore + filters[key] = parsed } - return filters } + // private parse(input: any, schema: FieldSchema) { + // if (input == undefined) { + // return null + // } + + // if (isPlainObject(input)) { + // for (const [key, value] of Object.entries(input)) { + // input[key] = this.parse(value, schema) + // } + // return input + // } + + // if (schema.type === FieldType.DATETIME && schema.timeOnly) { + // if (this.client === SqlClient.ORACLE) { + // return new Date(`1970-01-01 ${input}`) + // } + // } + + // if (typeof input === "string") { + // if (isInvalidISODateString(input)) { + // return null + // } + // if (isValidISODateString(input)) { + // return new Date(input.trim()) + // } + // } + + // return input + // } + + // private parseBody(body: any) { + // for (let [key, value] of Object.entries(body)) { + // body[key] = this.parse(value, this.table.schema[key]) + // } + // return body + // } + + // private parseFilters(filters: SearchFilters | undefined): SearchFilters { + // if (!filters) { + // return {} + // } + + // for (const [_, filter] of Object.entries(filters)) { + // for (const [key, value] of Object.entries(filter)) { + // const { column } = new ColumnSplitter([this.table]).run(key) + // const schema = this.table.schema[column] + // if (!schema) { + // throw new Error( + // `Column ${key} does not exist in table ${this.table._id}` + // ) + // } + // filter[key] = this.parse(value, schema) + // } + // } + + // return filters + // } + // right now we only do filters on the specific table being queried addFilters( query: Knex.QueryBuilder, diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index a08fac7396..110a9ae699 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -2405,7 +2405,7 @@ describe.each([ await createRows([{ date: earlyDate }, { date: laterDate }]) }) - it.only("should be able to handle a date search", async () => { + it("should be able to handle a date search", async () => { await expectSearch({ query: { range: { From fe36b76fe9a16f610a7763c0d7a34d01a59bb971 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 29 Jul 2024 18:56:15 +0100 Subject: [PATCH 08/94] Getting something working - using a different version and a pre-built image. --- .../scripts/integrations/oracle/docker-compose.yml | 6 +++--- .../server/src/integrations/tests/utils/oracle.ts | 11 +++++++---- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/packages/server/scripts/integrations/oracle/docker-compose.yml b/packages/server/scripts/integrations/oracle/docker-compose.yml index 586f0b683d..07992b6544 100644 --- a/packages/server/scripts/integrations/oracle/docker-compose.yml +++ b/packages/server/scripts/integrations/oracle/docker-compose.yml @@ -6,9 +6,9 @@ services: db: restart: unless-stopped platform: linux/x86_64 - image: container-registry.oracle.com/database/express:18.4.0-xe + image: gvenzl/oracle-free:23.2-slim-faststart environment: - ORACLE_PWD: oracle + ORACLE_PWD: Password1 ports: - 1521:1521 - 5500:5500 @@ -16,4 +16,4 @@ services: - oracle_data:/opt/oracle/oradata volumes: - oracle_data: \ No newline at end of file + oracle_data: diff --git a/packages/server/src/integrations/tests/utils/oracle.ts b/packages/server/src/integrations/tests/utils/oracle.ts index 9b75a52da7..c904c094e1 100644 --- a/packages/server/src/integrations/tests/utils/oracle.ts +++ b/packages/server/src/integrations/tests/utils/oracle.ts @@ -7,8 +7,10 @@ import knex from "knex" let ports: Promise export async function getDatasource(): Promise { + // password needs to conform to Oracle standards + const password = "password" if (!ports) { - let image = "oracle/database:19.3.0.0-ee-slim-faststart" + let image = "gvenzl/oracle-free:23.2-slim-faststart" if (process.arch.startsWith("arm")) { image = "samhuang78/oracle-database:19.3.0-ee-slim-faststart" } @@ -16,8 +18,10 @@ export async function getDatasource(): Promise { ports = startContainer( new GenericContainer(image) .withExposedPorts(1521) - .withEnvironment({ ORACLE_PASSWORD: "password" }) - .withWaitStrategy(Wait.forHealthCheck().withStartupTimeout(60000)) + .withEnvironment({ + ORACLE_PASSWORD: password, + }) + .withWaitStrategy(Wait.forLogMessage("DATABASE IS READY TO USE!")) ) } @@ -28,7 +32,6 @@ export async function getDatasource(): Promise { const host = "127.0.0.1" const user = "SYSTEM" - const password = "password" const datasource: Datasource = { type: "datasource_plus", From d448f469f14ae1ed528f019d5bbf7b85214d442c Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 29 Jul 2024 19:00:25 +0100 Subject: [PATCH 09/94] Updating image names. --- packages/server/src/integrations/tests/utils/oracle.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/server/src/integrations/tests/utils/oracle.ts b/packages/server/src/integrations/tests/utils/oracle.ts index c904c094e1..5c788fd130 100644 --- a/packages/server/src/integrations/tests/utils/oracle.ts +++ b/packages/server/src/integrations/tests/utils/oracle.ts @@ -10,9 +10,11 @@ export async function getDatasource(): Promise { // password needs to conform to Oracle standards const password = "password" if (!ports) { - let image = "gvenzl/oracle-free:23.2-slim-faststart" + // couldn't build 19.3.0 for X64 + let image = "budibase/oracle-database:23.2-slim-faststart" if (process.arch.startsWith("arm")) { - image = "samhuang78/oracle-database:19.3.0-ee-slim-faststart" + // there isn't an ARM compatible 23.2 build + image = "budibase/oracle-database:19.3.0-ee-slim-faststart" } ports = startContainer( From 5bce8e595d109678090a7fdaa08baf7b71e91611 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 30 Jul 2024 11:03:54 +0100 Subject: [PATCH 10/94] Plumb FieldSchema into parse. --- packages/backend-core/src/sql/sql.ts | 161 +++++++++--------- .../server/src/sdk/app/rows/search/sqs.ts | 10 +- .../server/src/sdk/app/tables/internal/sqs.ts | 22 +-- packages/shared-core/src/filters.ts | 43 ++++- packages/shared-core/src/helpers/schema.ts | 19 +++ 5 files changed, 143 insertions(+), 112 deletions(-) diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index 917182f7d2..26d6545868 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -12,6 +12,8 @@ import { SqlStatements } from "./sqlStatements" import SqlTableQueryBuilder from "./sqlTable" import { AnySearchFilter, + ArrayOperator, + BasicOperator, BBReferenceFieldMetadata, FieldSchema, FieldType, @@ -23,6 +25,7 @@ import { prefixed, QueryJson, QueryOptions, + RangeOperator, RelationshipsJson, SearchFilters, SortOrder, @@ -33,9 +36,7 @@ import { TableSourceType, } from "@budibase/types" import environment from "../environment" -import { helpers } from "@budibase/shared-core" -import { isPlainObject } from "lodash" -import { ColumnSplitter } from "@budibase/shared-core/src/filters" +import { dataFilters, helpers } from "@budibase/shared-core" type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any @@ -75,10 +76,16 @@ function convertBooleans(query: SqlQuery | SqlQuery[]): SqlQuery | SqlQuery[] { class InternalBuilder { private readonly client: SqlClient private readonly query: QueryJson + private readonly splitter: dataFilters.ColumnSplitter constructor(client: SqlClient, query: QueryJson) { this.client = client this.query = query + + this.splitter = new dataFilters.ColumnSplitter([this.table], { + aliases: this.query.tableAliases, + columnPrefix: this.query.meta.columnPrefix, + }) } get table(): Table { @@ -205,107 +212,95 @@ class InternalBuilder { return identifier } - private parse(input: any) { + private parse(input: any, schema: FieldSchema) { if (Array.isArray(input)) { return JSON.stringify(input) } if (input == undefined) { return null } - if (typeof input !== "string") { - return input - } - if (isInvalidISODateString(input)) { - return null - } - if (isValidISODateString(input)) { - return new Date(input.trim()) + if (typeof input === "string") { + if (isInvalidISODateString(input)) { + return null + } + if (isValidISODateString(input)) { + return new Date(input.trim()) + } } return input } private parseBody(body: any) { for (let [key, value] of Object.entries(body)) { - body[key] = this.parse(value) + const { column } = this.splitter.run(key) + const schema = this.table.schema[column] + if (!schema) { + continue + } + body[key] = this.parse(value, schema) } return body } - private parseFilters(filters: SearchFilters | undefined): SearchFilters { - if (!filters) { - return {} - } - for (let [key, value] of Object.entries(filters)) { - let parsed - if (typeof value === "object") { - parsed = this.parseFilters(value) - } else { - parsed = this.parse(value) + private parseFilters(filters: SearchFilters): SearchFilters { + for (const op of Object.values(BasicOperator)) { + const filter = filters[op] + if (!filter) { + continue + } + for (const key of Object.keys(filter)) { + if (Array.isArray(filter[key])) { + filter[key] = JSON.stringify(filter[key]) + continue + } + const { column } = this.splitter.run(key) + const schema = this.table.schema[column] + if (!schema) { + continue + } + filter[key] = this.parse(filter[key], schema) } - // @ts-ignore - filters[key] = parsed } + + for (const op of Object.values(ArrayOperator)) { + const filter = filters[op] + if (!filter) { + continue + } + for (const key of Object.keys(filter)) { + const { column } = this.splitter.run(key) + const schema = this.table.schema[column] + if (!schema) { + continue + } + filter[key] = filter[key].map(v => this.parse(v, schema)) + } + } + + for (const op of Object.values(RangeOperator)) { + const filter = filters[op] + if (!filter) { + continue + } + for (const key of Object.keys(filter)) { + const { column } = this.splitter.run(key) + const schema = this.table.schema[column] + if (!schema) { + continue + } + const value = filter[key] + if ("low" in value) { + value.low = this.parse(value.low, schema) + } + if ("high" in value) { + value.high = this.parse(value.high, schema) + } + } + } + return filters } - // private parse(input: any, schema: FieldSchema) { - // if (input == undefined) { - // return null - // } - - // if (isPlainObject(input)) { - // for (const [key, value] of Object.entries(input)) { - // input[key] = this.parse(value, schema) - // } - // return input - // } - - // if (schema.type === FieldType.DATETIME && schema.timeOnly) { - // if (this.client === SqlClient.ORACLE) { - // return new Date(`1970-01-01 ${input}`) - // } - // } - - // if (typeof input === "string") { - // if (isInvalidISODateString(input)) { - // return null - // } - // if (isValidISODateString(input)) { - // return new Date(input.trim()) - // } - // } - - // return input - // } - - // private parseBody(body: any) { - // for (let [key, value] of Object.entries(body)) { - // body[key] = this.parse(value, this.table.schema[key]) - // } - // return body - // } - - // private parseFilters(filters: SearchFilters | undefined): SearchFilters { - // if (!filters) { - // return {} - // } - - // for (const [_, filter] of Object.entries(filters)) { - // for (const [key, value] of Object.entries(filter)) { - // const { column } = new ColumnSplitter([this.table]).run(key) - // const schema = this.table.schema[column] - // if (!schema) { - // throw new Error( - // `Column ${key} does not exist in table ${this.table._id}` - // ) - // } - // filter[key] = this.parse(value, schema) - // } - // } - - // return filters - // } - // right now we only do filters on the specific table being queried addFilters( query: Knex.QueryBuilder, diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/sqs.ts index 44fd718871..650321a9a7 100644 --- a/packages/server/src/sdk/app/rows/search/sqs.ts +++ b/packages/server/src/sdk/app/rows/search/sqs.ts @@ -19,11 +19,7 @@ import { buildInternalRelationships, sqlOutputProcessing, } from "../../../../api/controllers/row/utils" -import { - decodeNonAscii, - mapToUserColumn, - USER_COLUMN_PREFIX, -} from "../../tables/internal/sqs" +import { mapToUserColumn, USER_COLUMN_PREFIX } from "../../tables/internal/sqs" import sdk from "../../../index" import { context, @@ -44,7 +40,7 @@ import { getRelationshipColumns, getTableIDList, } from "./filters" -import { dataFilters } from "@budibase/shared-core" +import { dataFilters, helpers } from "@budibase/shared-core" const builder = new sql.Sql(SqlClient.SQL_LITE) const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`) @@ -164,7 +160,7 @@ function reverseUserColumnMapping(rows: Row[]) { if (index !== -1) { // cut out the prefix const newKey = key.slice(0, index) + key.slice(index + prefixLength) - const decoded = decodeNonAscii(newKey) + const decoded = helpers.schema.decodeNonAscii(newKey) finalRow[decoded] = row[key] } else { finalRow[key] = row[key] diff --git a/packages/server/src/sdk/app/tables/internal/sqs.ts b/packages/server/src/sdk/app/tables/internal/sqs.ts index 3c14e2fc67..6199adcfba 100644 --- a/packages/server/src/sdk/app/tables/internal/sqs.ts +++ b/packages/server/src/sdk/app/tables/internal/sqs.ts @@ -16,6 +16,7 @@ import { } from "../../../../db/utils" import { isEqual } from "lodash" import { DEFAULT_TABLES } from "../../../../db/defaultData/datasource_bb_default" +import { helpers } from "@budibase/shared-core" const FieldTypeMap: Record = { [FieldType.BOOLEAN]: SQLiteType.NUMERIC, @@ -65,29 +66,10 @@ function buildRelationshipDefinitions( export const USER_COLUMN_PREFIX = "data_" -// SQS does not support non-ASCII characters in column names, so we need to -// replace them with unicode escape sequences. -function encodeNonAscii(str: string): string { - return str - .split("") - .map(char => { - return char.charCodeAt(0) > 127 - ? "\\u" + char.charCodeAt(0).toString(16).padStart(4, "0") - : char - }) - .join("") -} - -export function decodeNonAscii(str: string): string { - return str.replace(/\\u([0-9a-fA-F]{4})/g, (match, p1) => - String.fromCharCode(parseInt(p1, 16)) - ) -} - // utility function to denote that columns in SQLite are mapped to avoid overlap issues // the overlaps can occur due to case insensitivity and some of the columns which Budibase requires export function mapToUserColumn(key: string) { - return `${USER_COLUMN_PREFIX}${encodeNonAscii(key)}` + return `${USER_COLUMN_PREFIX}${helpers.schema.encodeNonAscii(key)}` } // this can generate relationship tables as part of the mapping diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index 6db89dd2f3..1c45cb4338 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -22,6 +22,7 @@ import dayjs from "dayjs" import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants" import { deepGet, schema } from "./helpers" import { isPlainObject, isEmpty } from "lodash" +import { decodeNonAscii } from "./helpers/schema" const HBS_REGEX = /{{([^{].*?)}}/g @@ -181,8 +182,16 @@ export class ColumnSplitter { tableIds: string[] relationshipColumnNames: string[] relationships: string[] + aliases?: Record + columnPrefix?: string - constructor(tables: Table[]) { + constructor( + tables: Table[], + opts?: { + aliases?: Record + columnPrefix?: string + } + ) { this.tableNames = tables.map(table => table.name) this.tableIds = tables.map(table => table._id!) this.relationshipColumnNames = tables.flatMap(table => @@ -195,16 +204,38 @@ export class ColumnSplitter { .concat(this.relationshipColumnNames) // sort by length - makes sure there's no mis-matches due to similarities (sub column names) .sort((a, b) => b.length - a.length) + + if (opts?.aliases) { + this.aliases = {} + for (const [key, value] of Object.entries(opts.aliases)) { + this.aliases[value] = key + } + } + + this.columnPrefix = opts?.columnPrefix } run(key: string): { numberPrefix?: string relationshipPrefix?: string + tableName?: string column: string } { let { prefix, key: splitKey } = getKeyNumbering(key) + + let tableName: string | undefined = undefined + if (this.aliases) { + for (const possibleAlias of Object.keys(this.aliases || {})) { + const withDot = `${possibleAlias}.` + if (splitKey.startsWith(withDot)) { + tableName = this.aliases[possibleAlias]! + splitKey = splitKey.slice(withDot.length) + } + } + } + let relationship: string | undefined - for (let possibleRelationship of this.relationships) { + for (const possibleRelationship of this.relationships) { const withDot = `${possibleRelationship}.` if (splitKey.startsWith(withDot)) { const finalKeyParts = splitKey.split(withDot) @@ -214,7 +245,15 @@ export class ColumnSplitter { break } } + + if (this.columnPrefix) { + if (splitKey.startsWith(this.columnPrefix)) { + splitKey = decodeNonAscii(splitKey.slice(this.columnPrefix.length)) + } + } + return { + tableName, numberPrefix: prefix, relationshipPrefix: relationship, column: splitKey, diff --git a/packages/shared-core/src/helpers/schema.ts b/packages/shared-core/src/helpers/schema.ts index caf562a8cb..d0035cc305 100644 --- a/packages/shared-core/src/helpers/schema.ts +++ b/packages/shared-core/src/helpers/schema.ts @@ -26,3 +26,22 @@ export function isRequired(constraints: FieldConstraints | undefined) { constraints.presence === true) return isRequired } + +// SQS does not support non-ASCII characters in column names, so we need to +// replace them with unicode escape sequences. +export function encodeNonAscii(str: string): string { + return str + .split("") + .map(char => { + return char.charCodeAt(0) > 127 + ? "\\u" + char.charCodeAt(0).toString(16).padStart(4, "0") + : char + }) + .join("") +} + +export function decodeNonAscii(str: string): string { + return str.replace(/\\u([0-9a-fA-F]{4})/g, (match, p1) => + String.fromCharCode(parseInt(p1, 16)) + ) +} From 05992579352fa25b50617593ea041681b49d272c Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 30 Jul 2024 11:26:16 +0100 Subject: [PATCH 11/94] Fix time-only columns. --- packages/backend-core/src/sql/sql.ts | 27 ++++++++++++++++--- .../src/api/routes/tests/search.spec.ts | 18 ++++++------- 2 files changed, 33 insertions(+), 12 deletions(-) diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index 26d6545868..7a76e09d3f 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -219,6 +219,23 @@ class InternalBuilder { if (input == undefined) { return null } + + if ( + this.client === SqlClient.ORACLE && + schema.type === FieldType.DATETIME && + schema.timeOnly + ) { + if (input instanceof Date) { + const hours = input.getHours().toString().padStart(2, "0") + const minutes = input.getMinutes().toString().padStart(2, "0") + const seconds = input.getSeconds().toString().padStart(2, "0") + return `${hours}:${minutes}:${seconds}` + } + if (typeof input === "string") { + return new Date(`1970-01-01 ${input}`) + } + } + if (typeof input === "string") { if (isInvalidISODateString(input)) { return null @@ -531,7 +548,7 @@ class InternalBuilder { } else if (this.client === SqlClient.ORACLE) { const identifier = this.convertClobs(key) query = query[fnc]( - `(${identifier} IS NOT NULL AND ${identifier} != ?)`, + `(${identifier} IS NOT NULL AND ${identifier} != ?) OR ${identifier} IS NULL`, [value] ) } else { @@ -605,8 +622,12 @@ class InternalBuilder { const direction = value.direction === SortOrder.ASCENDING ? "asc" : "desc" let nulls - if (this.client === SqlClient.POSTGRES) { - // All other clients already sort this as expected by default, and adding this to the rest of the clients is causing issues + if ( + this.client === SqlClient.POSTGRES || + this.client === SqlClient.ORACLE + ) { + // All other clients already sort this as expected by default, and + // adding this to the rest of the clients is causing issues nulls = value.direction === SortOrder.ASCENDING ? "first" : "last" } diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index 110a9ae699..d1fc361993 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -40,14 +40,14 @@ import { structures } from "@budibase/backend-core/tests" import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default" describe.each([ - ["in-memory", undefined], - ["lucene", undefined], - ["sqs", undefined], - [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], - [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], - [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], - [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], - // [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)], + // ["in-memory", undefined], + // ["lucene", undefined], + // ["sqs", undefined], + // [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], + // [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], + // [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], + // [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], + [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)], ])("search (%s)", (name, dsProvider) => { const isSqs = name === "sqs" const isLucene = name === "lucene" @@ -958,7 +958,7 @@ describe.each([ }).toMatchExactly([{ name: "bar" }, { name: "foo" }]) }) - it("sorts descending", async () => { + it.only("sorts descending", async () => { await expectSearch({ query: {}, sort: "name", From bc7501f72bca2ed8bc3057df4209fc96d793758e Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 30 Jul 2024 11:54:46 +0100 Subject: [PATCH 12/94] Fix range queries. --- packages/backend-core/src/sql/sql.ts | 150 +++++++++++------- .../backend-core/src/sql/sqlStatements.ts | 87 ---------- .../src/api/routes/tests/search.spec.ts | 2 +- 3 files changed, 96 insertions(+), 143 deletions(-) delete mode 100644 packages/backend-core/src/sql/sqlStatements.ts diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index 7a76e09d3f..9803f8588b 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -8,7 +8,6 @@ import { sqlLog, isInvalidISODateString, } from "./utils" -import { SqlStatements } from "./sqlStatements" import SqlTableQueryBuilder from "./sqlTable" import { AnySearchFilter, @@ -77,10 +76,12 @@ class InternalBuilder { private readonly client: SqlClient private readonly query: QueryJson private readonly splitter: dataFilters.ColumnSplitter + private readonly knex: Knex - constructor(client: SqlClient, query: QueryJson) { + constructor(client: SqlClient, knex: Knex, query: QueryJson) { this.client = client this.query = query + this.knex = knex this.splitter = new dataFilters.ColumnSplitter([this.table], { aliases: this.query.tableAliases, @@ -92,6 +93,11 @@ class InternalBuilder { return this.query.meta.table } + getFieldSchema(key: string): FieldSchema | undefined { + const { column } = this.splitter.run(key) + return this.table.schema[column] + } + // Takes a string like foo and returns a quoted string like [foo] for SQL Server // and "foo" for Postgres. private quote(str: string): string { @@ -116,9 +122,8 @@ class InternalBuilder { .join(".") } - private generateSelectStatement(knex: Knex): (string | Knex.Raw)[] | "*" { + private generateSelectStatement(): (string | Knex.Raw)[] | "*" { const { resource, meta } = this.query - const client = knex.client.config.client as SqlClient if (!resource || !resource.fields || resource.fields.length === 0) { return "*" @@ -154,10 +159,10 @@ class InternalBuilder { const columnSchema = schema[column] if ( - client === SqlClient.POSTGRES && + this.client === SqlClient.POSTGRES && columnSchema?.externalType?.includes("money") ) { - return knex.raw( + return this.knex.raw( `${this.quotedIdentifier( [table, column].join(".") )}::money::numeric as ${this.quote(field)}` @@ -165,13 +170,13 @@ class InternalBuilder { } if ( - client === SqlClient.MS_SQL && + this.client === SqlClient.MS_SQL && columnSchema?.type === FieldType.DATETIME && columnSchema.timeOnly ) { // Time gets returned as timestamp from mssql, not matching the expected // HH:mm format - return knex.raw(`CONVERT(varchar, ${field}, 108) as "${field}"`) + return this.knex.raw(`CONVERT(varchar, ${field}, 108) as "${field}"`) } // There's at least two edge cases being handled in the expression below. @@ -183,11 +188,11 @@ class InternalBuilder { // aren't actually clear to me, but `table`.`doc1` breaks things with the // sample data tests. if (table) { - return knex.raw( + return this.knex.raw( `${this.quote(table)}.${this.quote(column)} as ${this.quote(field)}` ) } else { - return knex.raw(`${this.quote(field)} as ${this.quote(field)}`) + return this.knex.raw(`${this.quote(field)} as ${this.quote(field)}`) } }) } @@ -333,10 +338,6 @@ class InternalBuilder { const aliases = this.query.tableAliases // if all or specified in filters, then everything is an or const allOr = filters.allOr - const sqlStatements = new SqlStatements(this.client, this.table, { - allOr, - columnPrefix: this.query.meta.columnPrefix, - }) const tableName = this.client === SqlClient.SQL_LITE ? this.table._id! : this.table.name @@ -506,12 +507,53 @@ class InternalBuilder { } const lowValid = isValidFilter(value.low), highValid = isValidFilter(value.high) + + const schema = this.getFieldSchema(key) + + if (this.client === SqlClient.ORACLE) { + // @ts-ignore + key = this.knex.raw(this.convertClobs(key)) + } + if (lowValid && highValid) { - query = sqlStatements.between(query, key, value.low, value.high) + if ( + schema?.type === FieldType.BIGINT && + this.client === SqlClient.SQL_LITE + ) { + query = query.whereRaw( + `CAST(${key} AS INTEGER) BETWEEN CAST(? AS INTEGER) AND CAST(? AS INTEGER)`, + [value.low, value.high] + ) + } else { + const fnc = allOr ? "orWhereBetween" : "whereBetween" + query = query[fnc](key, [value.low, value.high]) + } } else if (lowValid) { - query = sqlStatements.lte(query, key, value.low) + if ( + schema?.type === FieldType.BIGINT && + this.client === SqlClient.SQL_LITE + ) { + query = query.whereRaw( + `CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`, + [value.low] + ) + } else { + const fnc = allOr ? "orWhere" : "where" + query = query[fnc](key, ">=", value.low) + } } else if (highValid) { - query = sqlStatements.gte(query, key, value.high) + if ( + schema?.type === FieldType.BIGINT && + this.client === SqlClient.SQL_LITE + ) { + query = query.whereRaw( + `CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`, + [value.high] + ) + } else { + const fnc = allOr ? "orWhere" : "where" + query = query[fnc](key, "<=", value.high) + } } }) } @@ -621,17 +663,19 @@ class InternalBuilder { for (let [key, value] of Object.entries(sort)) { const direction = value.direction === SortOrder.ASCENDING ? "asc" : "desc" - let nulls - if ( - this.client === SqlClient.POSTGRES || - this.client === SqlClient.ORACLE - ) { - // All other clients already sort this as expected by default, and - // adding this to the rest of the clients is causing issues - nulls = value.direction === SortOrder.ASCENDING ? "first" : "last" - } + const nulls = value.direction === SortOrder.ASCENDING ? "first" : "last" - query = query.orderBy(`${aliased}.${key}`, direction, nulls) + let composite = `${aliased}.${key}` + if (this.client === SqlClient.ORACLE) { + query = query.orderBy( + // @ts-ignore + this.knex.raw(this.convertClobs(composite)), + direction, + nulls + ) + } else { + query = query.orderBy(composite, direction, nulls) + } } } @@ -732,17 +776,14 @@ class InternalBuilder { return query } - qualifiedKnex( - knex: Knex, - opts?: { alias?: string | boolean } - ): Knex.QueryBuilder { + qualifiedKnex(opts?: { alias?: string | boolean }): Knex.QueryBuilder { let alias = this.query.tableAliases?.[this.query.endpoint.entityId] if (opts?.alias === false) { alias = undefined } else if (typeof opts?.alias === "string") { alias = opts.alias } - return knex( + return this.knex( this.tableNameWithSchema(this.query.endpoint.entityId, { alias, schema: this.query.endpoint.schema, @@ -750,9 +791,9 @@ class InternalBuilder { ) } - create(knex: Knex, opts: QueryOptions): Knex.QueryBuilder { + create(opts: QueryOptions): Knex.QueryBuilder { const { body } = this.query - let query = this.qualifiedKnex(knex, { alias: false }) + let query = this.qualifiedKnex({ alias: false }) const parsedBody = this.parseBody(body) // make sure no null values in body for creation for (let [key, value] of Object.entries(parsedBody)) { @@ -769,9 +810,9 @@ class InternalBuilder { } } - bulkCreate(knex: Knex): Knex.QueryBuilder { + bulkCreate(): Knex.QueryBuilder { const { body } = this.query - let query = this.qualifiedKnex(knex, { alias: false }) + let query = this.qualifiedKnex({ alias: false }) if (!Array.isArray(body)) { return query } @@ -779,9 +820,9 @@ class InternalBuilder { return query.insert(parsedBody) } - bulkUpsert(knex: Knex): Knex.QueryBuilder { + bulkUpsert(): Knex.QueryBuilder { const { body } = this.query - let query = this.qualifiedKnex(knex, { alias: false }) + let query = this.qualifiedKnex({ alias: false }) if (!Array.isArray(body)) { return query } @@ -809,7 +850,6 @@ class InternalBuilder { } read( - knex: Knex, opts: { limits?: { base: number; query: number } } = {} @@ -821,7 +861,7 @@ class InternalBuilder { const tableName = endpoint.entityId // start building the query - let query = this.qualifiedKnex(knex) + let query = this.qualifiedKnex() // handle pagination let foundOffset: number | null = null let foundLimit = limits?.query || limits?.base @@ -855,7 +895,7 @@ class InternalBuilder { query = this.addFilters(query, filters) const alias = tableAliases?.[tableName] || tableName - let preQuery: Knex.QueryBuilder = knex({ + let preQuery: Knex.QueryBuilder = this.knex({ // the typescript definition for the knex constructor doesn't support this // syntax, but it is the only way to alias a pre-query result as part of // a query - there is an alias dictionary type, but it assumes it can only @@ -864,7 +904,7 @@ class InternalBuilder { }) // if counting, use distinct count, else select preQuery = !counting - ? preQuery.select(this.generateSelectStatement(knex)) + ? preQuery.select(this.generateSelectStatement()) : this.addDistinctCount(preQuery) // have to add after as well (this breaks MS-SQL) if (this.client !== SqlClient.MS_SQL && !counting) { @@ -888,9 +928,9 @@ class InternalBuilder { return this.addFilters(query, filters, { relationship: true }) } - update(knex: Knex, opts: QueryOptions): Knex.QueryBuilder { + update(opts: QueryOptions): Knex.QueryBuilder { const { body, filters } = this.query - let query = this.qualifiedKnex(knex) + let query = this.qualifiedKnex() const parsedBody = this.parseBody(body) query = this.addFilters(query, filters) // mysql can't use returning @@ -901,15 +941,15 @@ class InternalBuilder { } } - delete(knex: Knex, opts: QueryOptions): Knex.QueryBuilder { + delete(opts: QueryOptions): Knex.QueryBuilder { const { filters } = this.query - let query = this.qualifiedKnex(knex) + let query = this.qualifiedKnex() query = this.addFilters(query, filters) // mysql can't use returning if (opts.disableReturning) { return query.delete() } else { - return query.delete().returning(this.generateSelectStatement(knex)) + return query.delete().returning(this.generateSelectStatement()) } } } @@ -953,13 +993,13 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { const client = knex(config) let query: Knex.QueryBuilder - const builder = new InternalBuilder(sqlClient, json) + const builder = new InternalBuilder(sqlClient, client, json) switch (this._operation(json)) { case Operation.CREATE: - query = builder.create(client, opts) + query = builder.create(opts) break case Operation.READ: - query = builder.read(client, { + query = builder.read({ limits: { query: this.limit, base: BASE_LIMIT, @@ -968,19 +1008,19 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { break case Operation.COUNT: // read without any limits to count - query = builder.read(client) + query = builder.read() break case Operation.UPDATE: - query = builder.update(client, opts) + query = builder.update(opts) break case Operation.DELETE: - query = builder.delete(client, opts) + query = builder.delete(opts) break case Operation.BULK_CREATE: - query = builder.bulkCreate(client) + query = builder.bulkCreate() break case Operation.BULK_UPSERT: - query = builder.bulkUpsert(client) + query = builder.bulkUpsert() break case Operation.CREATE_TABLE: case Operation.UPDATE_TABLE: diff --git a/packages/backend-core/src/sql/sqlStatements.ts b/packages/backend-core/src/sql/sqlStatements.ts deleted file mode 100644 index 311f7c7d49..0000000000 --- a/packages/backend-core/src/sql/sqlStatements.ts +++ /dev/null @@ -1,87 +0,0 @@ -import { FieldType, Table, FieldSchema, SqlClient } from "@budibase/types" -import { Knex } from "knex" - -export class SqlStatements { - client: string - table: Table - allOr: boolean | undefined - columnPrefix: string | undefined - - constructor( - client: string, - table: Table, - { allOr, columnPrefix }: { allOr?: boolean; columnPrefix?: string } = {} - ) { - this.client = client - this.table = table - this.allOr = allOr - this.columnPrefix = columnPrefix - } - - getField(key: string): FieldSchema | undefined { - const fieldName = key.split(".")[1] - let found = this.table.schema[fieldName] - if (!found && this.columnPrefix) { - const prefixRemovedFieldName = fieldName.replace(this.columnPrefix, "") - found = this.table.schema[prefixRemovedFieldName] - } - return found - } - - between( - query: Knex.QueryBuilder, - key: string, - low: number | string, - high: number | string - ) { - // Use a between operator if we have 2 valid range values - const field = this.getField(key) - if ( - field?.type === FieldType.BIGINT && - this.client === SqlClient.SQL_LITE - ) { - query = query.whereRaw( - `CAST(${key} AS INTEGER) BETWEEN CAST(? AS INTEGER) AND CAST(? AS INTEGER)`, - [low, high] - ) - } else { - const fnc = this.allOr ? "orWhereBetween" : "whereBetween" - query = query[fnc](key, [low, high]) - } - return query - } - - lte(query: Knex.QueryBuilder, key: string, low: number | string) { - // Use just a single greater than operator if we only have a low - const field = this.getField(key) - if ( - field?.type === FieldType.BIGINT && - this.client === SqlClient.SQL_LITE - ) { - query = query.whereRaw(`CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`, [ - low, - ]) - } else { - const fnc = this.allOr ? "orWhere" : "where" - query = query[fnc](key, ">=", low) - } - return query - } - - gte(query: Knex.QueryBuilder, key: string, high: number | string) { - const field = this.getField(key) - // Use just a single less than operator if we only have a high - if ( - field?.type === FieldType.BIGINT && - this.client === SqlClient.SQL_LITE - ) { - query = query.whereRaw(`CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`, [ - high, - ]) - } else { - const fnc = this.allOr ? "orWhere" : "where" - query = query[fnc](key, "<=", high) - } - return query - } -} diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index d1fc361993..00badcbad5 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -958,7 +958,7 @@ describe.each([ }).toMatchExactly([{ name: "bar" }, { name: "foo" }]) }) - it.only("sorts descending", async () => { + it("sorts descending", async () => { await expectSearch({ query: {}, sort: "name", From b6560d1d7b4f58f8b644db2b655cf385a1d3b5fb Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 30 Jul 2024 11:58:28 +0100 Subject: [PATCH 13/94] Fix sorting. --- packages/backend-core/src/sql/sql.ts | 9 ++++++++- .../server/src/api/routes/tests/search.spec.ts | 16 ++++++++-------- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index 9803f8588b..df70ea6fea 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -663,7 +663,14 @@ class InternalBuilder { for (let [key, value] of Object.entries(sort)) { const direction = value.direction === SortOrder.ASCENDING ? "asc" : "desc" - const nulls = value.direction === SortOrder.ASCENDING ? "first" : "last" + + let nulls: "first" | "last" | undefined = undefined + if ( + this.client === SqlClient.ORACLE || + this.client === SqlClient.POSTGRES + ) { + nulls = value.direction === SortOrder.ASCENDING ? "first" : "last" + } let composite = `${aliased}.${key}` if (this.client === SqlClient.ORACLE) { diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index 00badcbad5..110a9ae699 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -40,14 +40,14 @@ import { structures } from "@budibase/backend-core/tests" import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default" describe.each([ - // ["in-memory", undefined], - // ["lucene", undefined], - // ["sqs", undefined], - // [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], - // [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], - // [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], - // [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], - [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)], + ["in-memory", undefined], + ["lucene", undefined], + ["sqs", undefined], + [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], + [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], + [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], + [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], + // [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)], ])("search (%s)", (name, dsProvider) => { const isSqs = name === "sqs" const isLucene = name === "lucene" From ff2802873ec24cd13b055d92bbfd93dae8f16aae Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 30 Jul 2024 12:29:16 +0100 Subject: [PATCH 14/94] Fixing an issue with to_char testing in sql.spec.ts. --- .../server/src/integrations/tests/sql.spec.ts | 39 ++++++++++++++----- 1 file changed, 30 insertions(+), 9 deletions(-) diff --git a/packages/server/src/integrations/tests/sql.spec.ts b/packages/server/src/integrations/tests/sql.spec.ts index fe7ab761ca..47fc4228e9 100644 --- a/packages/server/src/integrations/tests/sql.spec.ts +++ b/packages/server/src/integrations/tests/sql.spec.ts @@ -1,12 +1,16 @@ import { FieldType, Operation, + PaginationJson, QueryJson, + SearchFilters, + SortJson, + SqlClient, Table, TableSourceType, - SqlClient, } from "@budibase/types" import { sql } from "@budibase/backend-core" +import { merge } from "lodash" const Sql = sql.Sql @@ -25,7 +29,16 @@ const TABLE: Table = { primary: ["id"], } -function endpoint(table: any, operation: any) { +const ORACLE_TABLE: Partial = { + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + }, +} + +function endpoint(table: string, operation: Operation) { return { datasourceId: "Postgres", operation: operation, @@ -39,19 +52,25 @@ function generateReadJson({ filters, sort, paginate, -}: any = {}): QueryJson { - const tableObj = { ...TABLE } +}: { + table?: Partial
+ fields?: string[] + filters?: SearchFilters + sort?: SortJson + paginate?: PaginationJson +} = {}): QueryJson { + let tableObj: Table = { ...TABLE } if (table) { - tableObj.name = table + tableObj = merge(TABLE, table) } return { - endpoint: endpoint(table || TABLE_NAME, "READ"), + endpoint: endpoint(tableObj.name || TABLE_NAME, Operation.READ), resource: { fields: fields || [], }, filters: filters || {}, sort: sort || {}, - paginate: paginate || {}, + paginate: paginate || undefined, meta: { table: tableObj, }, @@ -212,6 +231,7 @@ describe("SQL query builder", () => { it("should use an oracle compatible coalesce query for oracle when using the equals filter", () => { let query = new Sql(SqlClient.ORACLE, limit)._query( generateReadJson({ + table: ORACLE_TABLE, filters: { equal: { name: "John", @@ -222,13 +242,14 @@ describe("SQL query builder", () => { expect(query).toEqual({ bindings: ["John", limit, 5000], - sql: `select * from (select * from (select * from (select * from "test" where COALESCE("test"."name", -1) = :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`, + sql: `select * from (select * from (select * from (select * from "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") = :1) order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`, }) }) it("should use an oracle compatible coalesce query for oracle when using the not equals filter", () => { let query = new Sql(SqlClient.ORACLE, limit)._query( generateReadJson({ + table: ORACLE_TABLE, filters: { notEqual: { name: "John", @@ -239,7 +260,7 @@ describe("SQL query builder", () => { expect(query).toEqual({ bindings: ["John", limit, 5000], - sql: `select * from (select * from (select * from (select * from "test" where COALESCE("test"."name", -1) != :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`, + sql: `select * from (select * from (select * from (select * from "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") != :1) OR to_char("test"."name") IS NULL order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`, }) }) }) From f4afa3270ebb06b5baa1ea7d2b7f0ccaebf08117 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 30 Jul 2024 12:44:31 +0100 Subject: [PATCH 15/94] Removing invalid test. --- .../src/integrations/tests/oracle.spec.ts | 100 ------------------ 1 file changed, 100 deletions(-) delete mode 100644 packages/server/src/integrations/tests/oracle.spec.ts diff --git a/packages/server/src/integrations/tests/oracle.spec.ts b/packages/server/src/integrations/tests/oracle.spec.ts deleted file mode 100644 index 7b620d68ad..0000000000 --- a/packages/server/src/integrations/tests/oracle.spec.ts +++ /dev/null @@ -1,100 +0,0 @@ -const oracledb = require("oracledb") - -import { default as OracleIntegration } from "../oracle" - -jest.mock("oracledb") - -class TestConfiguration { - integration: any - - constructor(config: any = {}) { - this.integration = new OracleIntegration.integration(config) - } -} - -const options = { autoCommit: true } - -describe("Oracle Integration", () => { - let config: any - - beforeEach(() => { - jest.clearAllMocks() - config = new TestConfiguration() - }) - - it("calls the create method with the correct params", async () => { - const sql = "insert into users (name, age) values ('Joe', 123);" - await config.integration.create({ - sql, - }) - expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options) - expect(oracledb.executeMock).toHaveBeenCalledTimes(1) - expect(oracledb.closeMock).toHaveBeenCalledTimes(1) - }) - - it("calls the read method with the correct params", async () => { - const sql = "select * from users;" - await config.integration.read({ - sql, - }) - expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options) - expect(oracledb.executeMock).toHaveBeenCalledTimes(1) - expect(oracledb.closeMock).toHaveBeenCalledTimes(1) - }) - - it("calls the update method with the correct params", async () => { - const sql = "update table users set name = 'test';" - await config.integration.update({ - sql, - }) - expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options) - expect(oracledb.executeMock).toHaveBeenCalledTimes(1) - expect(oracledb.closeMock).toHaveBeenCalledTimes(1) - }) - - it("calls the delete method with the correct params", async () => { - const sql = "delete from users where name = 'todelete';" - await config.integration.delete({ - sql, - }) - expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options) - expect(oracledb.executeMock).toHaveBeenCalledTimes(1) - expect(oracledb.closeMock).toHaveBeenCalledTimes(1) - }) - - describe("no rows returned", () => { - beforeEach(() => { - oracledb.executeMock.mockImplementation(() => ({ rows: [] })) - }) - - it("returns the correct response when the create response has no rows", async () => { - const sql = "insert into users (name, age) values ('Joe', 123);" - const response = await config.integration.create({ - sql, - }) - expect(response).toEqual([{ created: true }]) - expect(oracledb.executeMock).toHaveBeenCalledTimes(1) - expect(oracledb.closeMock).toHaveBeenCalledTimes(1) - }) - - it("returns the correct response when the update response has no rows", async () => { - const sql = "update table users set name = 'test';" - const response = await config.integration.update({ - sql, - }) - expect(response).toEqual([{ updated: true }]) - expect(oracledb.executeMock).toHaveBeenCalledTimes(1) - expect(oracledb.closeMock).toHaveBeenCalledTimes(1) - }) - - it("returns the correct response when the delete response has no rows", async () => { - const sql = "delete from users where name = 'todelete';" - const response = await config.integration.delete({ - sql, - }) - expect(response).toEqual([{ deleted: true }]) - expect(oracledb.executeMock).toHaveBeenCalledTimes(1) - expect(oracledb.closeMock).toHaveBeenCalledTimes(1) - }) - }) -}) From c6ec710abebbb71f6489e76ab48ba337cf348259 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 30 Jul 2024 16:03:03 +0100 Subject: [PATCH 16/94] Solve _some_ date problems. --- packages/backend-core/src/sql/sql.ts | 2 +- .../src/api/routes/tests/search.spec.ts | 22 +++++++++---------- packages/server/src/integrations/oracle.ts | 4 +++- 3 files changed, 15 insertions(+), 13 deletions(-) diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index df70ea6fea..a06af6e318 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -237,7 +237,7 @@ class InternalBuilder { return `${hours}:${minutes}:${seconds}` } if (typeof input === "string") { - return new Date(`1970-01-01 ${input}`) + return new Date(`1970-01-01T${input}Z`) } } diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index 32e4735f3a..e2df279603 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -40,14 +40,14 @@ import { structures } from "@budibase/backend-core/tests" import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default" describe.each([ - ["in-memory", undefined], - ["lucene", undefined], - ["sqs", undefined], - [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], - [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], - [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], - [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], - // [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)], + // ["in-memory", undefined], + // ["lucene", undefined], + // ["sqs", undefined], + // [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], + // [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], + // [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], + // [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], + [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)], ])("search (%s)", (name, dsProvider) => { const isSqs = name === "sqs" const isLucene = name === "lucene" @@ -2389,9 +2389,9 @@ describe.each([ describe.each([ { low: "2024-07-03T00:00:00.000Z", high: "9999-00-00T00:00:00.000Z" }, - // { low: "2024-07-03T00:00:00.000Z", high: "9998-00-00T00:00:00.000Z" }, - // { low: "0000-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" }, - // { low: "0001-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" }, + { low: "2024-07-03T00:00:00.000Z", high: "9998-00-00T00:00:00.000Z" }, + { low: "0000-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" }, + { low: "0001-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" }, ])("date special cases", ({ low, high }) => { const earlyDate = "2024-07-03T10:00:00.000Z", laterDate = "2024-07-03T11:00:00.000Z" diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index b8fcd63e7f..a9ce05302c 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -406,7 +406,9 @@ class OracleIntegration extends Sql implements DatasourcePlus { password: this.config.password, connectString, } - return oracledb.getConnection(attributes) + const connection = await oracledb.getConnection(attributes) + await connection.execute(`ALTER SESSION SET TIME_ZONE='UTC'`) + return connection } async create(query: SqlQuery | string): Promise { From aa7894604ff9111c012308ccf61c9ef5f49ee77b Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 30 Jul 2024 16:56:59 +0100 Subject: [PATCH 17/94] Fix dates and times for good? maybe? --- packages/backend-core/src/sql/sql.ts | 11 ++++------- packages/server/src/integrations/oracle.ts | 4 +--- packages/server/src/utilities/rowProcessor/index.ts | 12 +++++++++--- 3 files changed, 14 insertions(+), 13 deletions(-) diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index a06af6e318..e41d744812 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -666,19 +666,16 @@ class InternalBuilder { let nulls: "first" | "last" | undefined = undefined if ( - this.client === SqlClient.ORACLE || - this.client === SqlClient.POSTGRES + this.client === SqlClient.POSTGRES || + this.client === SqlClient.ORACLE ) { nulls = value.direction === SortOrder.ASCENDING ? "first" : "last" } let composite = `${aliased}.${key}` if (this.client === SqlClient.ORACLE) { - query = query.orderBy( - // @ts-ignore - this.knex.raw(this.convertClobs(composite)), - direction, - nulls + query = query.orderByRaw( + `${this.convertClobs(composite)} ${direction} nulls ${nulls}` ) } else { query = query.orderBy(composite, direction, nulls) diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index a9ce05302c..41762576dd 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -406,9 +406,7 @@ class OracleIntegration extends Sql implements DatasourcePlus { password: this.config.password, connectString, } - const connection = await oracledb.getConnection(attributes) - await connection.execute(`ALTER SESSION SET TIME_ZONE='UTC'`) - return connection + return await oracledb.getConnection(attributes) } async create(query: SqlQuery | string): Promise { diff --git a/packages/server/src/utilities/rowProcessor/index.ts b/packages/server/src/utilities/rowProcessor/index.ts index 139f3a5b8d..62a3b2dd74 100644 --- a/packages/server/src/utilities/rowProcessor/index.ts +++ b/packages/server/src/utilities/rowProcessor/index.ts @@ -318,9 +318,15 @@ export async function outputProcessing( } else if (column.type === FieldType.DATETIME && column.timeOnly) { for (let row of enriched) { if (row[property] instanceof Date) { - const hours = row[property].getHours().toString().padStart(2, "0") - const minutes = row[property].getMinutes().toString().padStart(2, "0") - const seconds = row[property].getSeconds().toString().padStart(2, "0") + const hours = row[property].getUTCHours().toString().padStart(2, "0") + const minutes = row[property] + .getUTCMinutes() + .toString() + .padStart(2, "0") + const seconds = row[property] + .getUTCSeconds() + .toString() + .padStart(2, "0") row[property] = `${hours}:${minutes}:${seconds}` } } From 383132d06c5301596f287f93d49a52b269651de8 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 30 Jul 2024 17:26:00 +0100 Subject: [PATCH 18/94] Actually fix time zone problems this time. --- packages/server/src/integrations/oracle.ts | 5 ++++- packages/server/src/utilities/rowProcessor/index.ts | 1 + 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index 41762576dd..7895692076 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -406,7 +406,10 @@ class OracleIntegration extends Sql implements DatasourcePlus { password: this.config.password, connectString, } - return await oracledb.getConnection(attributes) + const tz = Intl.DateTimeFormat().resolvedOptions().timeZone + const connection = await oracledb.getConnection(attributes) + await connection.execute(`ALTER SESSION SET TIME_ZONE = '${tz}'`) + return connection } async create(query: SqlQuery | string): Promise { diff --git a/packages/server/src/utilities/rowProcessor/index.ts b/packages/server/src/utilities/rowProcessor/index.ts index 62a3b2dd74..82676442dc 100644 --- a/packages/server/src/utilities/rowProcessor/index.ts +++ b/packages/server/src/utilities/rowProcessor/index.ts @@ -28,6 +28,7 @@ import { import { isExternalTableID } from "../../integrations/utils" import { helpers } from "@budibase/shared-core" import { processString } from "@budibase/string-templates" +import { DateTime } from "mssql" export * from "./utils" export * from "./attachments" From d7199c9def6099571d0cc542aeab07047800320d Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 30 Jul 2024 17:41:39 +0100 Subject: [PATCH 19/94] Fix a few more clob problems. --- packages/backend-core/src/sql/sql.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index e41d744812..fc712cb3c3 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -209,6 +209,7 @@ class InternalBuilder { schema.type === FieldType.STRING || schema.type === FieldType.LONGFORM || schema.type === FieldType.BB_REFERENCE_SINGLE || + schema.type === FieldType.BB_REFERENCE || schema.type === FieldType.OPTIONS || schema.type === FieldType.BARCODEQR ) { From f0bdbd5b4dbbe459f9b44a61dfe408e5ab540104 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 30 Jul 2024 17:53:53 +0100 Subject: [PATCH 20/94] Fixing not contains with oracle. --- packages/backend-core/src/sql/sql.ts | 12 +- yarn.lock | 192 ++++++++++++++++++++++++--- 2 files changed, 182 insertions(+), 22 deletions(-) diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index e41d744812..d499cf765d 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -438,9 +438,13 @@ class InternalBuilder { } else { value[i] = `%${value[i]}%` } - statement += - (statement ? andOr : "") + - `COALESCE(LOWER(${this.quotedIdentifier(key)}), '') LIKE ?` + const identifier = this.quotedIdentifier(key) + statement += statement ? andOr : "" + if (not) { + statement += `(NOT COALESCE(LOWER(${identifier}), '') LIKE ? OR ${identifier} IS NULL)` + } else { + statement += `COALESCE(LOWER(${identifier}), '') LIKE ?` + } } if (statement === "") { @@ -448,7 +452,7 @@ class InternalBuilder { } // @ts-ignore - query = query[rawFnc](`${not}(${statement})`, value) + query = query[rawFnc](statement, value) }) } } diff --git a/yarn.lock b/yarn.lock index 2d69b37cc6..607db0b7bb 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6707,22 +6707,39 @@ acorn-import-assertions@^1.9.0: resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz#507276249d684797c84e0734ef84860334cfb1ac" integrity sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA== -acorn-jsx@^5.3.2: +acorn-jsx-walk@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/acorn-jsx-walk/-/acorn-jsx-walk-2.0.0.tgz#a5ed648264e68282d7c2aead80216bfdf232573a" + integrity sha512-uuo6iJj4D4ygkdzd6jPtcxs8vZgDX9YFIkqczGImoypX2fQ4dVImmu3UzA4ynixCIMTrEOWW+95M2HuBaCEOVA== + +acorn-jsx@5.3.2, acorn-jsx@^5.3.2: version "5.3.2" resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== +acorn-loose@8.4.0: + version "8.4.0" + resolved "https://registry.yarnpkg.com/acorn-loose/-/acorn-loose-8.4.0.tgz#26d3e219756d1e180d006f5bcc8d261a28530f55" + integrity sha512-M0EUka6rb+QC4l9Z3T0nJEzNOO7JcoJlYMrBlyBCiFSXRyxjLKayd4TbQs2FDRWQU1h9FR7QVNHt+PEaoNL5rQ== + dependencies: + acorn "^8.11.0" + +acorn-walk@8.3.3, acorn-walk@^8.0.2, acorn-walk@^8.1.1, acorn-walk@^8.2.0, acorn-walk@^8.3.2: + version "8.3.3" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.3.tgz#9caeac29eefaa0c41e3d4c65137de4d6f34df43e" + integrity sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw== + dependencies: + acorn "^8.11.0" + acorn-walk@^7.1.1: version "7.2.0" resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== -acorn-walk@^8.0.2, acorn-walk@^8.1.1, acorn-walk@^8.2.0, acorn-walk@^8.3.2: - version "8.3.3" - resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.3.tgz#9caeac29eefaa0c41e3d4c65137de4d6f34df43e" - integrity sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw== - dependencies: - acorn "^8.11.0" +acorn@8.12.1: + version "8.12.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.12.1.tgz#71616bdccbe25e27a54439e0046e89ca76df2248" + integrity sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg== acorn@^5.2.1, acorn@^5.7.3: version "5.7.4" @@ -6791,6 +6808,16 @@ ajv-formats@^2.0.2: dependencies: ajv "^8.0.0" +ajv@8.17.1: + version "8.17.1" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.17.1.tgz#37d9a5c776af6bc92d7f4f9510eba4c0a60d11a6" + integrity sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g== + dependencies: + fast-deep-equal "^3.1.3" + fast-uri "^3.0.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + ajv@^6.12.3, ajv@^6.12.4: version "6.12.6" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" @@ -8484,6 +8511,11 @@ combos@^0.2.0: resolved "https://registry.yarnpkg.com/combos/-/combos-0.2.0.tgz#dc31c5a899b42293d55fe19c064d3e6e207ba4f7" integrity sha512-Z6YfvgiTCERWJTj3wQiXamFhssdvz1n4ok447rS330lw3uL72WAx8IvrLU7xiE71uyb5WF8JEP+BWB5KhOoGeg== +commander@12.1.0: + version "12.1.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-12.1.0.tgz#01423b36f501259fdaac4d0e4d60c96c991585d3" + integrity sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA== + commander@6.2.0: version "6.2.0" resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.0.tgz#b990bfb8ac030aedc6d11bc04d1488ffef56db75" @@ -9551,6 +9583,34 @@ depd@^1.1.0, depd@~1.1.2: resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== +dependency-cruiser@^16.3.7: + version "16.3.10" + resolved "https://registry.yarnpkg.com/dependency-cruiser/-/dependency-cruiser-16.3.10.tgz#fe26a50d5e10a4496bc2b70d027fca6ded48814f" + integrity sha512-WkCnibHBfvaiaQ+S46LZ6h4AR6oj42Vsf5/0Vgtrwdwn7ZekMJdZ/ALoTwNp/RaGlKW+MbV/fhSZOvmhAWVWzQ== + dependencies: + acorn "8.12.1" + acorn-jsx "5.3.2" + acorn-jsx-walk "2.0.0" + acorn-loose "8.4.0" + acorn-walk "8.3.3" + ajv "8.17.1" + commander "12.1.0" + enhanced-resolve "5.17.1" + ignore "5.3.1" + interpret "^3.1.1" + is-installed-globally "1.0.0" + json5 "2.2.3" + memoize "10.0.0" + picocolors "1.0.1" + picomatch "4.0.2" + prompts "2.4.2" + rechoir "^0.8.0" + safe-regex "2.1.1" + semver "^7.6.3" + teamcity-service-messages "0.1.14" + tsconfig-paths-webpack-plugin "4.1.0" + watskeburt "4.1.0" + dependency-tree@^9.0.0: version "9.0.0" resolved "https://registry.yarnpkg.com/dependency-tree/-/dependency-tree-9.0.0.tgz#9288dd6daf35f6510c1ea30d9894b75369aa50a2" @@ -10221,6 +10281,14 @@ engine.io@~6.5.2: engine.io-parser "~5.2.1" ws "~8.17.1" +enhanced-resolve@5.17.1, enhanced-resolve@^5.7.0: + version "5.17.1" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz#67bfbbcc2f81d511be77d686a90267ef7f898a15" + integrity sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.2.0" + enhanced-resolve@^5.8.3: version "5.14.1" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.14.1.tgz#de684b6803724477a4af5d74ccae5de52c25f6b3" @@ -11016,6 +11084,11 @@ fast-text-encoding@^1.0.0: resolved "https://registry.yarnpkg.com/fast-text-encoding/-/fast-text-encoding-1.0.6.tgz#0aa25f7f638222e3396d72bf936afcf1d42d6867" integrity sha512-VhXlQgj9ioXCqGstD37E/HBeqEGV/qOD/kmbVG8h5xKBYvM1L3lR1Zn4555cQ8GkYbJa8aJSipLPndE1k6zK2w== +fast-uri@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/fast-uri/-/fast-uri-3.0.1.tgz#cddd2eecfc83a71c1be2cc2ef2061331be8a7134" + integrity sha512-MWipKbbYiYI0UC7cl8m/i/IWTqfC8YXsqjzybjddLsFjStroQzsHXkc73JutMvBiXmOvapk+axIl79ig5t55Bw== + fast-url-parser@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/fast-url-parser/-/fast-url-parser-1.1.3.tgz#f4af3ea9f34d8a271cf58ad2b3759f431f0b318d" @@ -11877,6 +11950,13 @@ global-agent@3.0.0: semver "^7.3.2" serialize-error "^7.0.1" +global-directory@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/global-directory/-/global-directory-4.0.1.tgz#4d7ac7cfd2cb73f304c53b8810891748df5e361e" + integrity sha512-wHTUcDUoZ1H5/0iVqEudYW4/kAlN5cZ3j/bXn0Dpbizl9iaUVeWSHqiOjsgk6OW2bkLclbBjzewBz6weQ1zA2Q== + dependencies: + ini "4.1.1" + global-dirs@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-3.0.1.tgz#0c488971f066baceda21447aecb1a8b911d22485" @@ -12541,6 +12621,11 @@ ignore-walk@^6.0.0: dependencies: minimatch "^7.4.2" +ignore@5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.1.tgz#5073e554cd42c5b33b394375f538b8593e34d4ef" + integrity sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw== + ignore@^5.0.4, ignore@^5.2.0, ignore@^5.2.4: version "5.3.0" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.0.tgz#67418ae40d34d6999c95ff56016759c718c82f78" @@ -12666,6 +12751,11 @@ ini@2.0.0: resolved "https://registry.yarnpkg.com/ini/-/ini-2.0.0.tgz#e5fd556ecdd5726be978fa1001862eacb0a94bc5" integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA== +ini@4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/ini/-/ini-4.1.1.tgz#d95b3d843b1e906e56d6747d5447904ff50ce7a1" + integrity sha512-QQnnxNyfvmHFIsj7gkPcYymR8Jdw/o7mp5ZFihxn6h8Ci6fh3Dx4E1gPjpQEpIuPo9XVNY/ZUwh4BPMjGyL01g== + ini@^1.3.2, ini@^1.3.4, ini@^1.3.8, ini@~1.3.0: version "1.3.8" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" @@ -12743,6 +12833,11 @@ interpret@^2.2.0: resolved "https://registry.yarnpkg.com/interpret/-/interpret-2.2.0.tgz#1a78a0b5965c40a5416d007ad6f50ad27c417df9" integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw== +interpret@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/interpret/-/interpret-3.1.1.tgz#5be0ceed67ca79c6c4bc5cf0d7ee843dcea110c4" + integrity sha512-6xwYfHbajpoF0xLW+iwLkhwgvLoZDfjYfoFNu8ftMoXINzwuymNLd9u/KmwtdT2GbR+/Cz66otEGEVVUHX9QLQ== + into-stream@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/into-stream/-/into-stream-3.1.0.tgz#96fb0a936c12babd6ff1752a17d05616abd094c6" @@ -12973,6 +13068,14 @@ is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: dependencies: is-extglob "^2.1.1" +is-installed-globally@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-1.0.0.tgz#08952c43758c33d815692392f7f8437b9e436d5a" + integrity sha512-K55T22lfpQ63N4KEN57jZUAaAYqYHEe8veb/TycJRk9DdSCLLcovXz/mL6mOnhQaZsQGwPhuFopdQIlqGSEjiQ== + dependencies: + global-directory "^4.0.1" + is-path-inside "^4.0.0" + is-installed-globally@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.4.0.tgz#9a0fd407949c30f86eb6959ef1b7994ed0b7b520" @@ -13060,6 +13163,11 @@ is-path-inside@^3.0.2, is-path-inside@^3.0.3: resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== +is-path-inside@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-4.0.0.tgz#805aeb62c47c1b12fc3fd13bfb3ed1e7430071db" + integrity sha512-lJJV/5dYS+RcL8uQdBDW9c9uWFLLBNRyFhnAKXw5tVqLlKZ4RMGZKv+YQ/IA3OhD+RpbJa1LLFM1FQPGyIXvOA== + is-plain-obj@^1.0.0, is-plain-obj@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" @@ -14084,6 +14192,11 @@ json-stringify-safe@^5.0.1, json-stringify-safe@~5.0.1: resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" integrity sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA== +json5@2.2.3, json5@^2.2.1, json5@^2.2.2, json5@^2.2.3: + version "2.2.3" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" + integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== + json5@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" @@ -14091,11 +14204,6 @@ json5@^1.0.2: dependencies: minimist "^1.2.0" -json5@^2.2.1, json5@^2.2.2, json5@^2.2.3: - version "2.2.3" - resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" - integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== - jsonc-parser@3.2.0, jsonc-parser@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-3.2.0.tgz#31ff3f4c2b9793f89c67212627c51c6394f88e76" @@ -15441,6 +15549,13 @@ memdown@^5.1.0: ltgt "~2.2.0" safe-buffer "~5.2.0" +memoize@10.0.0: + version "10.0.0" + resolved "https://registry.yarnpkg.com/memoize/-/memoize-10.0.0.tgz#43fa66b2022363c7c50cf5dfab732a808a3d7147" + integrity sha512-H6cBLgsi6vMWOcCpvVCdFFnl3kerEXbrYh9q+lY6VXvQSmM6CkmV08VOwT+WE2tzIEqRPFfAq3fm4v/UIW6mSA== + dependencies: + mimic-function "^5.0.0" + memory-pager@^1.0.2: version "1.5.0" resolved "https://registry.yarnpkg.com/memory-pager/-/memory-pager-1.5.0.tgz#d8751655d22d384682741c972f2c3d6dfa3e66b5" @@ -15549,6 +15664,11 @@ mimic-fn@^4.0.0: resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-4.0.0.tgz#60a90550d5cb0b239cca65d893b1a53b29871ecc" integrity sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw== +mimic-function@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/mimic-function/-/mimic-function-5.0.1.tgz#acbe2b3349f99b9deaca7fb70e48b83e94e67076" + integrity sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA== + mimic-response@^1.0.0, mimic-response@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b" @@ -17412,15 +17532,20 @@ phin@^2.9.1: resolved "https://registry.yarnpkg.com/phin/-/phin-2.9.3.tgz#f9b6ac10a035636fb65dfc576aaaa17b8743125c" integrity sha512-CzFr90qM24ju5f88quFC/6qohjC144rehe5n6DH900lgXmUe86+xCKc10ev56gRKC4/BkHUoG4uSiQgBiIXwDA== +picocolors@1.0.1, picocolors@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.1.tgz#a8ad579b571952f0e5d25892de5445bcfe25aaa1" + integrity sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew== + picocolors@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== -picocolors@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.1.tgz#a8ad579b571952f0e5d25892de5445bcfe25aaa1" - integrity sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew== +picomatch@4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-4.0.2.tgz#77c742931e8f3b8820946c76cd0c1f13730d1dab" + integrity sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg== picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.1: version "2.3.1" @@ -18385,7 +18510,7 @@ promise.series@^0.2.0: resolved "https://registry.yarnpkg.com/promise.series/-/promise.series-0.2.0.tgz#2cc7ebe959fc3a6619c04ab4dbdc9e452d864bbd" integrity sha512-VWQJyU2bcDTgZw8kpfBpB/ejZASlCrzwz5f2hjb/zlujOEB4oeiAhHygAWq8ubsX2GVkD4kCU5V2dwOTaCY5EQ== -prompts@^2.0.1: +prompts@2.4.2, prompts@^2.0.1: version "2.4.2" resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== @@ -18952,6 +19077,11 @@ regenerator-transform@^0.15.1: dependencies: "@babel/runtime" "^7.8.4" +regexp-tree@~0.1.1: + version "0.1.27" + resolved "https://registry.yarnpkg.com/regexp-tree/-/regexp-tree-0.1.27.tgz#2198f0ef54518ffa743fe74d983b56ffd631b6cd" + integrity sha512-iETxpjK6YoRWJG5o6hXLwvjYAoW+FEZn9os0PD/b6AP6xQwsa/Y7lCVgIixBbUPMfhu+i2LtdeAqVTgGlQarfA== + regexp.prototype.flags@^1.4.3, regexp.prototype.flags@^1.5.2: version "1.5.2" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz#138f644a3350f981a858c44f6bb1a61ff59be334" @@ -19492,6 +19622,13 @@ safe-regex-test@^1.0.3: es-errors "^1.3.0" is-regex "^1.1.4" +safe-regex@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-2.1.1.tgz#f7128f00d056e2fe5c11e81a1324dd974aadced2" + integrity sha512-rx+x8AMzKb5Q5lQ95Zoi6ZbJqwCLkqi3XuJXp5P3rT8OEc6sZCJG5AE5dU3lsgRr/F4Bs31jSlVN+j5KrsGu9A== + dependencies: + regexp-tree "~0.1.1" + safe-stable-stringify@^2.1.0, safe-stable-stringify@^2.3.1: version "2.4.3" resolved "https://registry.yarnpkg.com/safe-stable-stringify/-/safe-stable-stringify-2.4.3.tgz#138c84b6f6edb3db5f8ef3ef7115b8f55ccbf886" @@ -19603,7 +19740,7 @@ semver-diff@^3.1.1: dependencies: semver "^6.3.0" -"semver@2 || 3 || 4 || 5", semver@7.5.3, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1, semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1, semver@^7.0.0, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4, semver@~2.3.1: +"semver@2 || 3 || 4 || 5", semver@7.5.3, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1, semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1, semver@^7.0.0, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4, semver@^7.6.3, semver@~2.3.1: version "7.5.3" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.3.tgz#161ce8c2c6b4b3bdca6caadc9fa3317a4c4fe88e" integrity sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ== @@ -20867,6 +21004,11 @@ tarn@^3.0.1, tarn@^3.0.2: resolved "https://registry.yarnpkg.com/tarn/-/tarn-3.0.2.tgz#73b6140fbb881b71559c4f8bfde3d9a4b3d27693" integrity sha512-51LAVKUSZSVfI05vjPESNc5vwqqZpbXCsU+/+wxlOrUjk2SnFTt97v9ZgQrD4YmxYW1Px6w2KjaDitCfkvgxMQ== +teamcity-service-messages@0.1.14: + version "0.1.14" + resolved "https://registry.yarnpkg.com/teamcity-service-messages/-/teamcity-service-messages-0.1.14.tgz#193d420a5e4aef8e5e50b8c39e7865e08fbb5d8a" + integrity sha512-29aQwaHqm8RMX74u2o/h1KbMLP89FjNiMxD9wbF2BbWOnbM+q+d1sCEC+MqCc4QW3NJykn77OMpTFw/xTHIc0w== + tedious@^16.4.0: version "16.7.1" resolved "https://registry.yarnpkg.com/tedious/-/tedious-16.7.1.tgz#1190f30fd99a413f1dc9250dee4835cf0788b650" @@ -21258,6 +21400,15 @@ ts-node@10.8.1: v8-compile-cache-lib "^3.0.1" yn "3.1.1" +tsconfig-paths-webpack-plugin@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/tsconfig-paths-webpack-plugin/-/tsconfig-paths-webpack-plugin-4.1.0.tgz#3c6892c5e7319c146eee1e7302ed9e6f2be4f763" + integrity sha512-xWFISjviPydmtmgeUAuXp4N1fky+VCtfhOkDUFIv5ea7p4wuTomI4QTrXvFBX2S4jZsmyTSrStQl+E+4w+RzxA== + dependencies: + chalk "^4.1.0" + enhanced-resolve "^5.7.0" + tsconfig-paths "^4.1.2" + tsconfig-paths@4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-4.0.0.tgz#1082f5d99fd127b72397eef4809e4dd06d229b64" @@ -22037,6 +22188,11 @@ walker@^1.0.8: dependencies: makeerror "1.0.12" +watskeburt@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/watskeburt/-/watskeburt-4.1.0.tgz#3c0227669be646a97424b631164b1afe3d4d5344" + integrity sha512-KkY5H51ajqy9HYYI+u9SIURcWnqeVVhdH0I+ab6aXPGHfZYxgRCwnR6Lm3+TYB6jJVt5jFqw4GAKmwf1zHmGQw== + wcwidth@^1.0.0, wcwidth@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" From cc1e466db91082d04f5d7e24b23b41876fb024aa Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 30 Jul 2024 17:57:21 +0100 Subject: [PATCH 21/94] Add Oracle to table.spec.ts --- .../server/src/api/routes/tests/table.spec.ts | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index 20c83549d2..a8bf9447e8 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -9,6 +9,7 @@ import { RelationshipType, Row, SaveTableRequest, + SourceName, Table, TableSourceType, User, @@ -33,7 +34,8 @@ describe.each([ [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], -])("/tables (%s)", (_, dsProvider) => { + [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)], +])("/tables (%s)", (name, dsProvider) => { const isInternal: boolean = !dsProvider let datasource: Datasource | undefined let config = setup.getConfig() @@ -52,15 +54,20 @@ describe.each([ jest.clearAllMocks() }) - it.each([ + let names = [ "alphanum", "with spaces", "with-dashes", "with_underscores", - 'with "double quotes"', - "with 'single quotes'", "with `backticks`", - ])("creates a table with name: %s", async name => { + ] + + if (name !== DatabaseName.ORACLE) { + names.push(`with "double quotes"`) + names.push(`with 'single quotes'`) + } + + it.each(names)("creates a table with name: %s", async name => { const table = await config.api.table.save( tableForDatasource(datasource, { name }) ) From 7cc000a838d92daa94a065806eecbf7420fe3367 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 30 Jul 2024 18:22:20 +0100 Subject: [PATCH 22/94] Fixes for not contains in oracle. --- packages/backend-core/src/sql/sql.ts | 27 ++++++++++--------- .../src/api/routes/tests/search.spec.ts | 14 +++++----- 2 files changed, 22 insertions(+), 19 deletions(-) diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index 8900a979c7..69d32fc5b9 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -3,10 +3,10 @@ import * as dbCore from "../db" import { getNativeSql, isExternalTable, - isValidISODateString, - isValidFilter, - sqlLog, isInvalidISODateString, + isValidFilter, + isValidISODateString, + sqlLog, } from "./utils" import SqlTableQueryBuilder from "./sqlTable" import { @@ -433,27 +433,30 @@ class InternalBuilder { const andOr = mode === filters?.containsAny ? " OR " : " AND " iterate(mode, (key, value) => { let statement = "" + const identifier = this.quotedIdentifier(key) for (let i in value) { if (typeof value[i] === "string") { value[i] = `%"${value[i].toLowerCase()}"%` } else { value[i] = `%${value[i]}%` } - const identifier = this.quotedIdentifier(key) - statement += statement ? andOr : "" - if (not) { - statement += `(NOT COALESCE(LOWER(${identifier}), '') LIKE ? OR ${identifier} IS NULL)` - } else { - statement += `COALESCE(LOWER(${identifier}), '') LIKE ?` - } + statement += `${ + statement ? andOr : "" + }COALESCE(LOWER(${identifier}), '') LIKE ?` } if (statement === "") { return } - // @ts-ignore - query = query[rawFnc](statement, value) + if (not) { + query = query[rawFnc]( + `(NOT (${statement}) OR ${identifier} IS NULL)`, + value + ) + } else { + query = query[rawFnc](statement, value) + } }) } } diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index e2df279603..4125e44999 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -40,13 +40,13 @@ import { structures } from "@budibase/backend-core/tests" import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default" describe.each([ - // ["in-memory", undefined], - // ["lucene", undefined], - // ["sqs", undefined], - // [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], - // [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], - // [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], - // [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], + ["in-memory", undefined], + ["lucene", undefined], + ["sqs", undefined], + [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], + [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], + [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], + [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)], ])("search (%s)", (name, dsProvider) => { const isSqs = name === "sqs" From 0202db3efe84c5ef244463bfb29b03a8171300f6 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 12:20:45 +0200 Subject: [PATCH 23/94] Unify enums --- packages/builder/src/constants/backend/index.js | 11 ++++------- .../server/src/api/controllers/view/exporters.ts | 14 +++++--------- packages/types/src/sdk/row.ts | 6 ++++++ 3 files changed, 15 insertions(+), 16 deletions(-) diff --git a/packages/builder/src/constants/backend/index.js b/packages/builder/src/constants/backend/index.js index ea8d35704f..6fbc36afe2 100644 --- a/packages/builder/src/constants/backend/index.js +++ b/packages/builder/src/constants/backend/index.js @@ -9,7 +9,10 @@ import { Constants } from "@budibase/frontend-core" const { TypeIconMap } = Constants -export { RelationshipType } from "@budibase/types" +export { + RelationshipType, + RowExportFormat as ROW_EXPORT_FORMATS, +} from "@budibase/types" export const AUTO_COLUMN_SUB_TYPES = AutoFieldSubType @@ -307,9 +310,3 @@ export const DatasourceTypes = { GRAPH: "Graph", API: "API", } - -export const ROW_EXPORT_FORMATS = { - CSV: "csv", - JSON: "json", - JSON_WITH_SCHEMA: "jsonWithSchema", -} diff --git a/packages/server/src/api/controllers/view/exporters.ts b/packages/server/src/api/controllers/view/exporters.ts index 9cf114f4e5..946a1b346a 100644 --- a/packages/server/src/api/controllers/view/exporters.ts +++ b/packages/server/src/api/controllers/view/exporters.ts @@ -1,4 +1,6 @@ -import { Row, TableSchema } from "@budibase/types" +import { Row, RowExportFormat, TableSchema } from "@budibase/types" + +export { RowExportFormat as Format } from "@budibase/types" function getHeaders( headers: string[], @@ -46,14 +48,8 @@ export function jsonWithSchema(schema: TableSchema, rows: Row[]) { return JSON.stringify({ schema: newSchema, rows }, undefined, 2) } -export enum Format { - CSV = "csv", - JSON = "json", - JSON_WITH_SCHEMA = "jsonWithSchema", -} - -export function isFormat(format: any): format is Format { - return Object.values(Format).includes(format as Format) +export function isFormat(format: any): format is RowExportFormat { + return Object.values(RowExportFormat).includes(format as RowExportFormat) } export function parseCsvExport(value: string) { diff --git a/packages/types/src/sdk/row.ts b/packages/types/src/sdk/row.ts index b0b137034b..6850359cc3 100644 --- a/packages/types/src/sdk/row.ts +++ b/packages/types/src/sdk/row.ts @@ -30,3 +30,9 @@ export interface SearchResponse { bookmark?: string | number totalRows?: number } + +export enum RowExportFormat { + CSV = "csv", + JSON = "json", + JSON_WITH_SCHEMA = "jsonWithSchema", +} From 58a47b801a54f0ed1b281cb5693f7b14458ffa4f Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 12:23:09 +0200 Subject: [PATCH 24/94] Remove magic strings --- packages/server/src/tests/utilities/api/legacyView.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/server/src/tests/utilities/api/legacyView.ts b/packages/server/src/tests/utilities/api/legacyView.ts index ae250a81e2..643969955c 100644 --- a/packages/server/src/tests/utilities/api/legacyView.ts +++ b/packages/server/src/tests/utilities/api/legacyView.ts @@ -1,5 +1,5 @@ import { Expectations, TestAPI } from "./base" -import { Row, View, ViewCalculation } from "@budibase/types" +import { Row, RowExportFormat, View, ViewCalculation } from "@budibase/types" export class LegacyViewAPI extends TestAPI { get = async ( @@ -24,7 +24,7 @@ export class LegacyViewAPI extends TestAPI { export = async ( viewName: string, - format: "json" | "csv" | "jsonWithSchema", + format: `${RowExportFormat}`, expectations?: Expectations ) => { const response = await this._requestRaw("get", `/api/views/export`, { From 4b7042be9fba0e082c4646e780078253537997c1 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 31 Jul 2024 12:00:50 +0100 Subject: [PATCH 25/94] Ignore Oracle on bulk upsert tests. --- .../server/src/api/routes/tests/row.spec.ts | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 9cc53168d0..eba50147f4 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -65,14 +65,16 @@ async function waitForEvent( } describe.each([ - ["internal", undefined], - [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], - [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], - [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], - [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], + // ["internal", undefined], + // [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], + // [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], + // [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], + // [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], + [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)], ])("/rows (%s)", (providerType, dsProvider) => { const isInternal = dsProvider === undefined const isMSSQL = providerType === DatabaseName.SQL_SERVER + const isOracle = providerType === DatabaseName.ORACLE const config = setup.getConfig() let table: Table @@ -127,7 +129,8 @@ describe.each([ primary: ["id"], schema: defaultSchema, } - return merge(req, ...overrides) + const merged = merge(req, ...overrides) + return merged } function defaultTable( @@ -1369,9 +1372,10 @@ describe.each([ expect(rows[2].description).toEqual("Row 3 description") }) - // Upserting isn't yet supported in MSSQL, see: + // Upserting isn't yet supported in MSSQL or Oracle, see: // https://github.com/knex/knex/pull/6050 !isMSSQL && + !isOracle && !isInternal && it("should be able to update existing rows with composite primary keys with bulkImport", async () => { const tableName = uuid.v4() @@ -1438,9 +1442,10 @@ describe.each([ expect(rows[2].description).toEqual("Row 3 description") }) - // Upserting isn't yet supported in MSSQL, see: + // Upserting isn't yet supported in MSSQL/Oracle, see: // https://github.com/knex/knex/pull/6050 !isMSSQL && + !isOracle && !isInternal && it("should be able to update existing rows an autoID primary key", async () => { const tableName = uuid.v4() From 62fa05a855205f43b0ef929fc051cd91b0fd8f58 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 13:28:28 +0200 Subject: [PATCH 26/94] Type --- packages/server/src/sdk/app/rows/utils.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/server/src/sdk/app/rows/utils.ts b/packages/server/src/sdk/app/rows/utils.ts index cd1b663f6a..e463397ad9 100644 --- a/packages/server/src/sdk/app/rows/utils.ts +++ b/packages/server/src/sdk/app/rows/utils.ts @@ -76,7 +76,7 @@ export async function getDatasourceAndQuery( } export function cleanExportRows( - rows: any[], + rows: Row[], schema: TableSchema, format: string, columns?: string[], From fe2b2bb097fb4ee106754630d2419f0f6a8a3cd2 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 13:33:20 +0200 Subject: [PATCH 27/94] Don't export couchdb fields --- .../server/src/sdk/app/rows/search/internal.ts | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/packages/server/src/sdk/app/rows/search/internal.ts b/packages/server/src/sdk/app/rows/search/internal.ts index 46d2cd8c61..f86b041597 100644 --- a/packages/server/src/sdk/app/rows/search/internal.ts +++ b/packages/server/src/sdk/app/rows/search/internal.ts @@ -11,6 +11,7 @@ import { SearchResponse, SortType, Table, + TableSchema, User, } from "@budibase/types" import { getGlobalUsersFromMetadata } from "../../../../utilities/global" @@ -137,6 +138,9 @@ export async function exportRows( let rows: Row[] = [] let schema = table.schema let headers + + result = trimFields(result, schema) + // Filter data to only specified columns if required if (columns && columns.length) { for (let i = 0; i < result.length; i++) { @@ -299,3 +303,13 @@ async function getView(db: Database, viewName: string) { } return viewInfo } + +function trimFields(rows: Row[], schema: TableSchema) { + const allowedFields = ["_id", ...Object.keys(schema)] + const result = rows.map(row => + Object.keys(row) + .filter(key => allowedFields.includes(key)) + .reduce((acc, key) => ({ ...acc, [key]: row[key] }), {} as Row) + ) + return result +} From 543d0e1ce619b7fbaeeb994c0d33c325b2d934eb Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 14:01:38 +0200 Subject: [PATCH 28/94] Add tests --- .../server/src/api/routes/tests/row.spec.ts | 45 ++++++++++++------- 1 file changed, 30 insertions(+), 15 deletions(-) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 8871841ee7..96a157893f 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -1640,23 +1640,38 @@ describe.each([ table = await config.api.table.save(defaultTable()) }) - it("should allow exporting all columns", async () => { - const existing = await config.api.row.save(table._id!, {}) - const res = await config.api.row.exportRows(table._id!, { - rows: [existing._id!], - }) - const results = JSON.parse(res) - expect(results.length).toEqual(1) - const row = results[0] + isInternal && + it("should not export internal couchdb fields", async () => { + const existing = await config.api.row.save(table._id!, { + name: generator.guid(), + description: generator.paragraph(), + }) + const res = await config.api.row.exportRows(table._id!, { + rows: [existing._id!], + }) + const results = JSON.parse(res) + expect(results.length).toEqual(1) + const row = results[0] - // Ensure all original columns were exported - expect(Object.keys(row).length).toBeGreaterThanOrEqual( - Object.keys(existing).length - ) - Object.keys(existing).forEach(key => { - expect(row[key]).toEqual(existing[key]) + expect(Object.keys(row)).toEqual(["_id", "name", "description"]) + }) + + !isInternal && + it("should allow exporting all columns", async () => { + const existing = await config.api.row.save(table._id!, {}) + const res = await config.api.row.exportRows(table._id!, { + rows: [existing._id!], + }) + const results = JSON.parse(res) + expect(results.length).toEqual(1) + const row = results[0] + + // Ensure all original columns were exported + expect(Object.keys(row).length).toBe(Object.keys(existing).length) + Object.keys(existing).forEach(key => { + expect(row[key]).toEqual(existing[key]) + }) }) - }) it("should allow exporting only certain columns", async () => { const existing = await config.api.row.save(table._id!, {}) From 4f65306c4fdf71deb90fe9283585c446a7b6c318 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 25 Jul 2024 17:20:10 +0200 Subject: [PATCH 29/94] Add basic validateNewTableImport test --- .../server/src/api/routes/tests/table.spec.ts | 33 +++++++++++++++++++ .../server/src/tests/utilities/api/table.ts | 22 ++++++++++++- 2 files changed, 54 insertions(+), 1 deletion(-) diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index a2cff7b395..b32983b8ad 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -10,6 +10,7 @@ import { Row, SaveTableRequest, Table, + TableSchema, TableSourceType, User, ViewCalculation, @@ -1022,4 +1023,36 @@ describe.each([ }) }) }) + + describe("import validation", () => { + const basicSchema: TableSchema = { + id: { + type: FieldType.NUMBER, + name: "id", + }, + name: { + type: FieldType.STRING, + name: "name", + }, + } + + describe("validateNewTableImport", () => { + it("can validate basic imports", async () => { + const result = await config.api.table.validateNewTableImport( + [{ id: generator.natural(), name: generator.first() }], + basicSchema + ) + + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + }, + }) + }) + }) + }) }) diff --git a/packages/server/src/tests/utilities/api/table.ts b/packages/server/src/tests/utilities/api/table.ts index d918ba8b9a..c42247dc59 100644 --- a/packages/server/src/tests/utilities/api/table.ts +++ b/packages/server/src/tests/utilities/api/table.ts @@ -3,9 +3,12 @@ import { BulkImportResponse, MigrateRequest, MigrateResponse, + Row, SaveTableRequest, SaveTableResponse, Table, + TableSchema, + ValidateTableImportResponse, } from "@budibase/types" import { Expectations, TestAPI } from "./base" @@ -61,8 +64,25 @@ export class TableAPI extends TestAPI { revId: string, expectations?: Expectations ): Promise => { - return await this._delete(`/api/tables/${tableId}/${revId}`, { + return await this._delete(`/api/tables/${tableId}/${revId}`, { expectations, }) } + + validateNewTableImport = async ( + rows: Row[], + schema: TableSchema, + expectations?: Expectations + ): Promise => { + return await this._post( + `/api/tables/validateNewTableImport`, + { + body: { + rows, + schema, + }, + expectations, + } + ) + } } From 5896e94e56992964064e5cd43d19b7f7a667c608 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 25 Jul 2024 17:32:53 +0200 Subject: [PATCH 30/94] Add basic validateExistingTableImport test --- .../server/src/api/routes/tests/table.spec.ts | 25 +++++++++++++++++++ .../server/src/tests/utilities/api/table.ts | 14 +++++++++++ 2 files changed, 39 insertions(+) diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index b32983b8ad..67b1d64ae1 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -1054,5 +1054,30 @@ describe.each([ }) }) }) + + describe("validateExistingTableImport", () => { + it("can validate basic imports", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + primary: ["id"], + schema: basicSchema, + }) + ) + const result = await config.api.table.validateExistingTableImport({ + tableId: table._id, + rows: [{ id: generator.natural(), name: generator.first() }], + }) + + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + }, + }) + }) + }) }) }) diff --git a/packages/server/src/tests/utilities/api/table.ts b/packages/server/src/tests/utilities/api/table.ts index c42247dc59..9d4a92250a 100644 --- a/packages/server/src/tests/utilities/api/table.ts +++ b/packages/server/src/tests/utilities/api/table.ts @@ -8,6 +8,7 @@ import { SaveTableResponse, Table, TableSchema, + ValidateTableImportRequest, ValidateTableImportResponse, } from "@budibase/types" import { Expectations, TestAPI } from "./base" @@ -85,4 +86,17 @@ export class TableAPI extends TestAPI { } ) } + + validateExistingTableImport = async ( + body: ValidateTableImportRequest, + expectations?: Expectations + ): Promise => { + return await this._post( + `/api/tables/validateExistingTableImport`, + { + body, + expectations, + } + ) + } } From 9d0fdeff68b19d0551498cbc72d2cf53568476bd Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 14:17:51 +0200 Subject: [PATCH 31/94] Add validateExistingTableImport _id support test --- .../server/src/api/routes/tests/table.spec.ts | 33 ++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index 67b1d64ae1..dd40a2420e 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -1,4 +1,4 @@ -import { context, events } from "@budibase/backend-core" +import { context, docIds, events } from "@budibase/backend-core" import { AutoFieldSubType, BBReferenceFieldSubType, @@ -1078,6 +1078,37 @@ describe.each([ }, }) }) + + isInternal && + it("can reimport _id fields for internal tables", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + primary: ["id"], + schema: basicSchema, + }) + ) + const result = await config.api.table.validateExistingTableImport({ + tableId: table._id, + rows: [ + { + _id: docIds.generateRowID(table._id!), + id: generator.natural(), + name: generator.first(), + }, + ], + }) + + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + _id: true, + id: true, + name: true, + }, + }) + }) }) }) }) From b28aaa3a936dfc27c9263efc83f6c6e169ad5d3f Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 14:22:54 +0200 Subject: [PATCH 32/94] Fix --- packages/server/src/api/controllers/table/index.ts | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/packages/server/src/api/controllers/table/index.ts b/packages/server/src/api/controllers/table/index.ts index ba861064bb..a02ecc665e 100644 --- a/packages/server/src/api/controllers/table/index.ts +++ b/packages/server/src/api/controllers/table/index.ts @@ -17,6 +17,7 @@ import { CsvToJsonRequest, CsvToJsonResponse, FetchTablesResponse, + FieldType, MigrateRequest, MigrateResponse, SaveTableRequest, @@ -178,9 +179,17 @@ export async function validateExistingTableImport( const { rows, tableId } = ctx.request.body let schema = null + if (tableId) { const table = await sdk.tables.getTable(tableId) schema = table.schema + + if (!isExternalTable(table)) { + schema._id = { + name: "_id", + type: FieldType.STRING, + } + } } else { ctx.status = 422 return From 24cdfb3443b28ab34f80ca9a7e6ed0db119a3fc0 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 14:55:25 +0200 Subject: [PATCH 33/94] Fix re-importing --- .../src/api/controllers/table/internal.ts | 23 +++++++-- .../server/src/api/controllers/table/utils.ts | 9 ++-- .../server/src/api/routes/tests/row.spec.ts | 50 +++++++++++++++++++ 3 files changed, 73 insertions(+), 9 deletions(-) diff --git a/packages/server/src/api/controllers/table/internal.ts b/packages/server/src/api/controllers/table/internal.ts index 4286d51d3e..6d1c67e800 100644 --- a/packages/server/src/api/controllers/table/internal.ts +++ b/packages/server/src/api/controllers/table/internal.ts @@ -3,6 +3,7 @@ import { handleDataImport } from "./utils" import { BulkImportRequest, BulkImportResponse, + FieldType, RenameColumn, SaveTableRequest, SaveTableResponse, @@ -69,10 +70,22 @@ export async function bulkImport( ) { const table = await sdk.tables.getTable(ctx.params.tableId) const { rows, identifierFields } = ctx.request.body - await handleDataImport(table, { - importRows: rows, - identifierFields, - user: ctx.user, - }) + await handleDataImport( + { + ...table, + schema: { + _id: { + name: "_id", + type: FieldType.STRING, + }, + ...table.schema, + }, + }, + { + importRows: rows, + identifierFields, + user: ctx.user, + } + ) return table } diff --git a/packages/server/src/api/controllers/table/utils.ts b/packages/server/src/api/controllers/table/utils.ts index e2036c8115..51f7b0e589 100644 --- a/packages/server/src/api/controllers/table/utils.ts +++ b/packages/server/src/api/controllers/table/utils.ts @@ -124,11 +124,12 @@ export async function importToRows( table: Table, user?: ContextUser ) { - let originalTable = table - let finalData: any = [] + const originalTable = table + const finalData: Row[] = [] + const keepCouchId = "_id" in table.schema for (let i = 0; i < data.length; i++) { let row = data[i] - row._id = generateRowID(table._id!) + row._id = (keepCouchId && row._id) || generateRowID(table._id!) row.type = "row" row.tableId = table._id @@ -180,7 +181,7 @@ export async function handleDataImport( const db = context.getAppDB() const data = parse(importRows, table) - let finalData: any = await importToRows(data, table, user) + const finalData = await importToRows(data, table, user) //Set IDs of finalData to match existing row if an update is expected if (identifierFields.length > 0) { diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 96a157893f..b448d46e6a 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -1298,6 +1298,56 @@ describe.each([ await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage) }) + isInternal && + it("should be able to update existing rows on bulkImport", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + description: { + type: FieldType.STRING, + name: "description", + }, + }, + }) + ) + + const existingRow = await config.api.row.save(table._id!, { + name: "Existing row", + description: "Existing description", + }) + + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + name: "Row 1", + description: "Row 1 description", + }, + { ...existingRow, name: "Updated existing row" }, + { + name: "Row 2", + description: "Row 2 description", + }, + ], + identifierFields: ["_id"], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(3) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1") + expect(rows[0].description).toEqual("Row 1 description") + expect(rows[1].name).toEqual("Row 2") + expect(rows[1].description).toEqual("Row 2 description") + expect(rows[2].name).toEqual("Updated existing row") + expect(rows[2].description).toEqual("Existing description") + }) + // Upserting isn't yet supported in MSSQL, see: // https://github.com/knex/knex/pull/6050 !isMSSQL && From f794f84e9035d92817259d63de39f178d3c94b04 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 15:00:30 +0200 Subject: [PATCH 34/94] Fix quote count --- packages/server/src/api/controllers/table/utils.ts | 6 +++++- packages/server/src/api/routes/tests/row.spec.ts | 3 +++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/server/src/api/controllers/table/utils.ts b/packages/server/src/api/controllers/table/utils.ts index 51f7b0e589..417cb22fe3 100644 --- a/packages/server/src/api/controllers/table/utils.ts +++ b/packages/server/src/api/controllers/table/utils.ts @@ -183,6 +183,8 @@ export async function handleDataImport( const finalData = await importToRows(data, table, user) + let newRowCount = finalData.length + //Set IDs of finalData to match existing row if an update is expected if (identifierFields.length > 0) { const allDocs = await db.allDocs( @@ -204,12 +206,14 @@ export async function handleDataImport( if (match) { finalItem._id = doc._id finalItem._rev = doc._rev + + newRowCount-- } }) }) } - await quotas.addRows(finalData.length, () => db.bulkDocs(finalData), { + await quotas.addRows(newRowCount, () => db.bulkDocs(finalData), { tableId: table._id, }) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index b448d46e6a..c4586263f4 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -1320,6 +1320,7 @@ describe.each([ description: "Existing description", }) + const rowUsage = await getRowUsage() await config.api.row.bulkImport(table._id!, { rows: [ @@ -1346,6 +1347,8 @@ describe.each([ expect(rows[1].description).toEqual("Row 2 description") expect(rows[2].name).toEqual("Updated existing row") expect(rows[2].description).toEqual("Existing description") + + await assertRowUsage(rowUsage + 2) }) // Upserting isn't yet supported in MSSQL, see: From a6beb0fa82f125cf43896d02c04480f7ffca53eb Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 15:14:29 +0200 Subject: [PATCH 35/94] Support no updating existing rows --- .../server/src/api/controllers/table/utils.ts | 9 ++-- .../server/src/api/routes/tests/row.spec.ts | 54 +++++++++++++++++++ 2 files changed, 60 insertions(+), 3 deletions(-) diff --git a/packages/server/src/api/controllers/table/utils.ts b/packages/server/src/api/controllers/table/utils.ts index 417cb22fe3..0e0a83e3b3 100644 --- a/packages/server/src/api/controllers/table/utils.ts +++ b/packages/server/src/api/controllers/table/utils.ts @@ -122,11 +122,12 @@ export function makeSureTableUpToDate(table: Table, tableToSave: Table) { export async function importToRows( data: Row[], table: Table, - user?: ContextUser + user?: ContextUser, + opts?: { keepCouchId: boolean } ) { const originalTable = table const finalData: Row[] = [] - const keepCouchId = "_id" in table.schema + const keepCouchId = !!opts?.keepCouchId for (let i = 0; i < data.length; i++) { let row = data[i] row._id = (keepCouchId && row._id) || generateRowID(table._id!) @@ -181,7 +182,9 @@ export async function handleDataImport( const db = context.getAppDB() const data = parse(importRows, table) - const finalData = await importToRows(data, table, user) + const finalData = await importToRows(data, table, user, { + keepCouchId: identifierFields.includes("_id"), + }) let newRowCount = finalData.length diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index c4586263f4..edceb925d6 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -1351,6 +1351,60 @@ describe.each([ await assertRowUsage(rowUsage + 2) }) + isInternal && + it("should create new rows if not identifierFields are provided", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + description: { + type: FieldType.STRING, + name: "description", + }, + }, + }) + ) + + const existingRow = await config.api.row.save(table._id!, { + name: "Existing row", + description: "Existing description", + }) + + const rowUsage = await getRowUsage() + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + name: "Row 1", + description: "Row 1 description", + }, + { ...existingRow, name: "Updated existing row" }, + { + name: "Row 2", + description: "Row 2 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(4) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Existing row") + expect(rows[0].description).toEqual("Existing description") + expect(rows[1].name).toEqual("Row 1") + expect(rows[1].description).toEqual("Row 1 description") + expect(rows[2].name).toEqual("Row 2") + expect(rows[2].description).toEqual("Row 2 description") + expect(rows[3].name).toEqual("Updated existing row") + expect(rows[3].description).toEqual("Existing description") + + await assertRowUsage(rowUsage + 3) + }) + // Upserting isn't yet supported in MSSQL, see: // https://github.com/knex/knex/pull/6050 !isMSSQL && From b74841d99da31ea616f34ab1d5741f4478dd354d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 15:20:06 +0200 Subject: [PATCH 36/94] Fix --- packages/server/src/db/defaultData/datasource_bb_default.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/server/src/db/defaultData/datasource_bb_default.ts b/packages/server/src/db/defaultData/datasource_bb_default.ts index a393888e51..6b553e2d36 100644 --- a/packages/server/src/db/defaultData/datasource_bb_default.ts +++ b/packages/server/src/db/defaultData/datasource_bb_default.ts @@ -651,10 +651,10 @@ export async function buildDefaultDocs() { return new LinkDocument( employeeData.table._id!, "Jobs", - employeeData.rows[index]._id, + employeeData.rows[index]._id!, jobData.table._id!, "Assigned", - jobData.rows[index]._id + jobData.rows[index]._id! ) } ) From 38da9012ea658fa6b36031983bd1a305ed7c7502 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 16:56:14 +0200 Subject: [PATCH 37/94] Display error --- .../backend/TableNavigator/modals/CreateTableModal.svelte | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte b/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte index 098369d3b4..b62c8af03d 100644 --- a/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte +++ b/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte @@ -78,7 +78,7 @@ await datasources.fetch() await afterSave(table) } catch (e) { - notifications.error(e) + notifications.error(e.message || e) // reload in case the table was created await tables.fetch() } From 785ab122378278ead729293911c9bfd0c1eb582d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 17:07:56 +0200 Subject: [PATCH 38/94] Add protected name validation test --- .../server/src/api/routes/tests/table.spec.ts | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index dd40a2420e..b96c08ea53 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -1,4 +1,5 @@ import { context, docIds, events } from "@budibase/backend-core" +import { PROTECTED_EXTERNAL_COLUMNS } from "@budibase/shared-core" import { AutoFieldSubType, BBReferenceFieldSubType, @@ -1053,6 +1054,42 @@ describe.each([ }, }) }) + + isInternal && + it.each(PROTECTED_EXTERNAL_COLUMNS)( + "don't allow protected names (%s)", + async columnName => { + const result = await config.api.table.validateNewTableImport( + [ + { + id: generator.natural(), + name: generator.first(), + [columnName]: generator.word(), + }, + ], + { + ...basicSchema, + [columnName]: { + name: columnName, + type: FieldType.STRING, + }, + } + ) + + expect(result).toEqual({ + allValid: false, + errors: { + [columnName]: `${columnName} is a protected name`, + }, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + [columnName]: false, + }, + }) + } + ) }) describe("validateExistingTableImport", () => { From 73eefa1046e9c24978572ff39014cf8f5b1a2007 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 17:14:02 +0200 Subject: [PATCH 39/94] Check protected names on validation --- packages/server/src/api/controllers/table/index.ts | 14 +++++++++++--- packages/server/src/utilities/schema.ts | 14 +++++++++++++- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/packages/server/src/api/controllers/table/index.ts b/packages/server/src/api/controllers/table/index.ts index a02ecc665e..ee6947cec8 100644 --- a/packages/server/src/api/controllers/table/index.ts +++ b/packages/server/src/api/controllers/table/index.ts @@ -34,7 +34,11 @@ import sdk from "../../../sdk" import { jsonFromCsvString } from "../../../utilities/csv" import { builderSocket } from "../../../websockets" import { cloneDeep, isEqual } from "lodash" -import { helpers } from "@budibase/shared-core" +import { + helpers, + PROTECTED_EXTERNAL_COLUMNS, + PROTECTED_INTERNAL_COLUMNS, +} from "@budibase/shared-core" function pickApi({ tableId, table }: { tableId?: string; table?: Table }) { if (table && isExternalTable(table)) { @@ -167,7 +171,7 @@ export async function validateNewTableImport( if (isRows(rows) && isSchema(schema)) { ctx.status = 200 - ctx.body = validateSchema(rows, schema) + ctx.body = validateSchema(rows, schema, PROTECTED_INTERNAL_COLUMNS) } else { ctx.status = 422 } @@ -180,6 +184,7 @@ export async function validateExistingTableImport( let schema = null + let protectedColumnNames if (tableId) { const table = await sdk.tables.getTable(tableId) schema = table.schema @@ -189,6 +194,9 @@ export async function validateExistingTableImport( name: "_id", type: FieldType.STRING, } + protectedColumnNames = PROTECTED_INTERNAL_COLUMNS.filter(x => x !== "_id") + } else { + protectedColumnNames = PROTECTED_EXTERNAL_COLUMNS } } else { ctx.status = 422 @@ -197,7 +205,7 @@ export async function validateExistingTableImport( if (tableId && isRows(rows) && isSchema(schema)) { ctx.status = 200 - ctx.body = validateSchema(rows, schema) + ctx.body = validateSchema(rows, schema, protectedColumnNames) } else { ctx.status = 422 } diff --git a/packages/server/src/utilities/schema.ts b/packages/server/src/utilities/schema.ts index 4bd4e8f583..c7a4427dd7 100644 --- a/packages/server/src/utilities/schema.ts +++ b/packages/server/src/utilities/schema.ts @@ -41,7 +41,11 @@ export function isRows(rows: any): rows is Rows { return Array.isArray(rows) && rows.every(row => typeof row === "object") } -export function validate(rows: Rows, schema: TableSchema): ValidationResults { +export function validate( + rows: Rows, + schema: TableSchema, + protectedColumnNames: readonly string[] +): ValidationResults { const results: ValidationResults = { schemaValidation: {}, allValid: false, @@ -49,6 +53,8 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults { errors: {}, } + protectedColumnNames = protectedColumnNames.map(x => x.toLowerCase()) + rows.forEach(row => { Object.entries(row).forEach(([columnName, columnData]) => { const { @@ -63,6 +69,12 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults { return } + if (protectedColumnNames.includes(columnName.toLowerCase())) { + results.schemaValidation[columnName] = false + results.errors[columnName] = `${columnName} is a protected name` + return + } + // If the columnType is not a string, then it's not present in the schema, and should be added to the invalid columns array if (typeof columnType !== "string") { results.invalidColumns.push(columnName) From c015f8d192416695e6b9200eb13749597ac475ad Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 17:18:03 +0200 Subject: [PATCH 40/94] Run for both internal and external --- packages/server/src/api/routes/tests/table.spec.ts | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index b96c08ea53..0d15919db4 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -1,5 +1,8 @@ import { context, docIds, events } from "@budibase/backend-core" -import { PROTECTED_EXTERNAL_COLUMNS } from "@budibase/shared-core" +import { + PROTECTED_EXTERNAL_COLUMNS, + PROTECTED_INTERNAL_COLUMNS, +} from "@budibase/shared-core" import { AutoFieldSubType, BBReferenceFieldSubType, @@ -1055,10 +1058,9 @@ describe.each([ }) }) - isInternal && - it.each(PROTECTED_EXTERNAL_COLUMNS)( - "don't allow protected names (%s)", - async columnName => { + it.each( + isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS + )("don't allow protected names (%s)", async columnName => { const result = await config.api.table.validateNewTableImport( [ { From 1d695be77c1ff595018c6632255037ea4c17183c Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 31 Jul 2024 16:21:49 +0100 Subject: [PATCH 41/94] This PR includes a change to pagination which makes sure if the 5000 max row limit is hit that pagination still kicks in. This means that you can eventually return all rows, although for very large tables you may hit rate limits (if you have thousands of rows related to each row in your table). --- .../scripts/integrations/postgres/init.sql | 123 +++++------------- .../api/controllers/row/ExternalRequest.ts | 15 ++- .../src/api/controllers/row/external.ts | 2 +- .../src/sdk/app/rows/search/external.ts | 65 ++++----- .../server/src/sdk/app/rows/search/sqs.ts | 40 +++--- 5 files changed, 104 insertions(+), 141 deletions(-) diff --git a/packages/server/scripts/integrations/postgres/init.sql b/packages/server/scripts/integrations/postgres/init.sql index 9624208deb..dce228dcfa 100644 --- a/packages/server/scripts/integrations/postgres/init.sql +++ b/packages/server/scripts/integrations/postgres/init.sql @@ -1,92 +1,35 @@ -SELECT 'CREATE DATABASE main' -WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec -CREATE SCHEMA "test-1"; -CREATE TYPE person_job AS ENUM ('qa', 'programmer', 'designer', 'support'); -CREATE TABLE Persons ( - PersonID SERIAL PRIMARY KEY, - LastName varchar(255), - FirstName varchar(255), - Address varchar(255), - City varchar(255) DEFAULT 'Belfast', - Age INTEGER DEFAULT 20 NOT NULL, - Year INTEGER, - Type person_job +-- Create the first table +CREATE TABLE first_table ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + description TEXT ); -CREATE TABLE Tasks ( - TaskID SERIAL PRIMARY KEY, - ExecutorID INT, - QaID INT, - Completed BOOLEAN, - TaskName varchar(255), - CONSTRAINT fkexecutor - FOREIGN KEY(ExecutorID) - REFERENCES Persons(PersonID), - CONSTRAINT fkqa - FOREIGN KEY(QaID) - REFERENCES Persons(PersonID) -); -CREATE TABLE Products ( - ProductID SERIAL PRIMARY KEY, - ProductName varchar(255) -); -CREATE TABLE Products_Tasks ( - ProductID INT NOT NULL, - TaskID INT NOT NULL, - CONSTRAINT fkProducts - FOREIGN KEY(ProductID) - REFERENCES Products(ProductID), - CONSTRAINT fkTasks - FOREIGN KEY(TaskID) - REFERENCES Tasks(TaskID), - PRIMARY KEY (ProductID, TaskID) -); -CREATE TABLE "test-1".table1 ( - id SERIAL PRIMARY KEY, - Name varchar(255) -); -CREATE TABLE CompositeTable ( - KeyPartOne varchar(128), - KeyPartTwo varchar(128), - Name varchar(255), - PRIMARY KEY (KeyPartOne, KeyPartTwo) -); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('Mike', 'Hughes', '123 Fake Street', 'Belfast', 'qa', 1999); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer', 1996); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Foo', 'Bar', 'Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Jonny', 'Muffin', 'Muffin Street', 'Cork', 'support'); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Dave', 'Bar', '2 Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('James', 'Bar', '3 Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Jenny', 'Bar', '4 Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Grace', 'Bar', '5 Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Sarah', 'Bar', '6 Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Kelly', 'Bar', '7 Foo Street', 'Bartown', 'support', 0, 1993); --- insert a lot of tasks for testing -WITH RECURSIVE generate_series AS ( - SELECT 1 AS n - UNION ALL - SELECT n + 1 FROM generate_series WHERE n < 6000 -), -random_data AS ( - SELECT - n, - (random() * 9 + 1)::int AS ExecutorID, - (random() * 9 + 1)::int AS QaID, - 'assembling' AS TaskName, - (random() < 0.5) AS Completed - FROM generate_series -) -INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) -SELECT ExecutorID, QaID, TaskName, Completed -FROM random_data; -INSERT INTO Products (ProductName) VALUES ('Computers'); -INSERT INTO Products (ProductName) VALUES ('Laptops'); -INSERT INTO Products (ProductName) VALUES ('Chairs'); -INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 1); -INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (2, 1); -INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (3, 1); -INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 2); -INSERT INTO "test-1".table1 (Name) VALUES ('Test'); -INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('aaa', 'bbb', 'Michael'); -INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('bbb', 'ccc', 'Andrew'); -INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('ddd', '', 'OneKey'); +-- Create the second table +CREATE TABLE second_table ( + id SERIAL PRIMARY KEY, + first_table_id INT REFERENCES first_table(id), + data TEXT NOT NULL +); + +-- Insert 50 rows into the first table +DO +$$ +BEGIN + FOR i IN 1..50 LOOP + INSERT INTO first_table (name, description) + VALUES ('Name ' || i, 'Description ' || i); + END LOOP; +END +$$; + +-- Insert 10,000 rows into the second table, all related to the first row in the first table +DO +$$ +BEGIN + FOR i IN 1..10000 LOOP + INSERT INTO second_table (first_table_id, data) + VALUES (1, 'Data ' || i); + END LOOP; +END +$$; diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index 2ecdf9a4cb..6538e7347a 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -66,9 +66,14 @@ export interface RunConfig { includeSqlRelationships?: IncludeRelationship } +export type ExternalReadRequestReturnType = { + rows: Row[] + rawResponseSize: number +} + export type ExternalRequestReturnType = T extends Operation.READ - ? Row[] + ? ExternalReadRequestReturnType : T extends Operation.COUNT ? number : { row: Row; table: Table } @@ -741,9 +746,11 @@ export class ExternalRequest { ) // if reading it'll just be an array of rows, return whole thing if (operation === Operation.READ) { - return ( - Array.isArray(output) ? output : [output] - ) as ExternalRequestReturnType + const rows = Array.isArray(output) ? output : [output] + return { + rows, + rawResponseSize: responseRows.length, + } as ExternalRequestReturnType } else { return { row: output[0], table } as ExternalRequestReturnType } diff --git a/packages/server/src/api/controllers/row/external.ts b/packages/server/src/api/controllers/row/external.ts index 06013d230c..78fae7aad8 100644 --- a/packages/server/src/api/controllers/row/external.ts +++ b/packages/server/src/api/controllers/row/external.ts @@ -136,7 +136,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) { includeSqlRelationships: IncludeRelationship.INCLUDE, }) const table: Table = tables[tableName] - const row = response[0] + const row = response.rows[0] // this seems like a lot of work, but basically we need to dig deeper for the enrich // for a single row, there is probably a better way to do this with some smart multi-layer joins for (let [fieldName, field] of Object.entries(table.schema)) { diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index e7fd2888de..76f00a25e2 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -23,6 +23,7 @@ import pick from "lodash/pick" import { outputProcessing } from "../../../../utilities/rowProcessor" import sdk from "../../../" import { isSearchingByRowID } from "./utils" +import { ExternalReadRequestReturnType } from "../../../../api/controllers/row/ExternalRequest" function getPaginationAndLimitParameters( filters: SearchFilters, @@ -47,7 +48,7 @@ function getPaginationAndLimitParameters( limit: limit + 1, } if (bookmark) { - paginateObj.offset = limit * bookmark + paginateObj.offset = bookmark } } else if (limit) { paginateObj = { @@ -105,37 +106,42 @@ export async function search( paginate: paginateObj as PaginationJson, includeSqlRelationships: IncludeRelationship.INCLUDE, } - const queries: Promise[] = [] - queries.push(handleRequest(Operation.READ, tableId, parameters)) + const queries: [ + Promise, + Promise | undefined + ] = [handleRequest(Operation.READ, tableId, parameters), undefined] if (countRows) { - queries.push(handleRequest(Operation.COUNT, tableId, parameters)) + queries[1] = handleRequest(Operation.COUNT, tableId, parameters) } const responses = await Promise.all(queries) - let rows = responses[0] as Row[] - const totalRows = - responses.length > 1 ? (responses[1] as number) : undefined + let rows = responses[0].rows + const rawResponseSize = responses[0].rawResponseSize + const totalRows = responses.length > 1 ? responses[1] : undefined - let hasNextPage = false - // remove the extra row if it's there - if (paginate && limit && rows.length > limit) { - rows.pop() - hasNextPage = true - } - - if (options.fields) { - const fields = [...options.fields, ...PROTECTED_EXTERNAL_COLUMNS] - rows = rows.map((r: any) => pick(r, fields)) - } - - rows = await outputProcessing(table, rows, { + let processed = await outputProcessing(table, rows, { preserveLinks: true, squash: true, }) + let hasNextPage = false + // if the raw rows is greater than the limit then we likely need to paginate + if (paginate && limit && rawResponseSize > limit) { + hasNextPage = true + // processed rows has merged relationships down, this might not be more than limit + if (processed.length > limit) { + processed.pop() + } + } + + if (options.fields) { + const fields = [...options.fields, ...PROTECTED_EXTERNAL_COLUMNS] + processed = processed.map((r: any) => pick(r, fields)) + } + // need wrapper object for bookmarks etc when paginating - const response: SearchResponse = { rows, hasNextPage } + const response: SearchResponse = { rows: processed, hasNextPage } if (hasNextPage && bookmark != null) { - response.bookmark = bookmark + 1 + response.bookmark = processed.length } if (totalRows != null) { response.totalRows = totalRows @@ -255,24 +261,21 @@ export async function exportRows( } export async function fetch(tableId: string): Promise { - const response = await handleRequest( - Operation.READ, - tableId, - { - includeSqlRelationships: IncludeRelationship.INCLUDE, - } - ) + const response = await handleRequest(Operation.READ, tableId, { + includeSqlRelationships: IncludeRelationship.INCLUDE, + }) const table = await sdk.tables.getTable(tableId) - return await outputProcessing(table, response, { + return await outputProcessing(table, response.rows, { preserveLinks: true, squash: true, }) } export async function fetchRaw(tableId: string): Promise { - return await handleRequest(Operation.READ, tableId, { + const response = await handleRequest(Operation.READ, tableId, { includeSqlRelationships: IncludeRelationship.INCLUDE, }) + return response.rows } export async function fetchView(viewName: string) { diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/sqs.ts index 5da2a7bcfb..2b3ed0c087 100644 --- a/packages/server/src/sdk/app/rows/search/sqs.ts +++ b/packages/server/src/sdk/app/rows/search/sqs.ts @@ -45,6 +45,7 @@ import { dataFilters, PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core" import { isSearchingByRowID } from "./utils" const builder = new sql.Sql(SqlClient.SQL_LITE) +const SQLITE_COLUMN_LIMIT = 2000 const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`) const MISSING_TABLE_REGX = new RegExp(`no such table: .+`) const DUPLICATE_COLUMN_REGEX = new RegExp(`duplicate column name: .+`) @@ -55,12 +56,14 @@ function buildInternalFieldList( opts?: { relationships?: RelationshipsJson[] } ) { let fieldList: string[] = [] - const addJunctionFields = (relatedTable: Table, fields: string[]) => { + const getJunctionFields = (relatedTable: Table, fields: string[]) => { + const junctionFields: string[] = [] fields.forEach(field => { - fieldList.push( + junctionFields.push( `${generateJunctionTableID(table._id!, relatedTable._id!)}.${field}` ) }) + return junctionFields } fieldList = fieldList.concat( PROTECTED_INTERNAL_COLUMNS.map(col => `${table._id}.${col}`) @@ -70,18 +73,22 @@ function buildInternalFieldList( if (!opts?.relationships && isRelationship) { continue } - if (isRelationship) { + if (!isRelationship) { + fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`) + } else { const linkCol = col as RelationshipFieldMetadata const relatedTable = tables.find(table => table._id === linkCol.tableId) - // no relationships provided, don't go more than a layer deep - if (relatedTable) { - fieldList = fieldList.concat( - buildInternalFieldList(relatedTable, tables) - ) - addJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"]) + if (!relatedTable) { + continue } - } else { - fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`) + const relatedFields = buildInternalFieldList(relatedTable, tables).concat( + getJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"]) + ) + // break out of the loop if we have reached the max number of columns + if (relatedFields.length + fieldList.length > SQLITE_COLUMN_LIMIT) { + break + } + fieldList = fieldList.concat(relatedFields) } } return [...new Set(fieldList)] @@ -315,7 +322,7 @@ export async function search( paginate = true request.paginate = { limit: params.limit + 1, - offset: bookmark * params.limit, + offset: bookmark, } } @@ -345,10 +352,13 @@ export async function search( ) // check for pagination final row - let nextRow: Row | undefined + let nextRow: boolean = false if (paginate && params.limit && rows.length > params.limit) { // remove the extra row that confirmed if there is another row to move to - nextRow = processed.pop() + nextRow = true + if (processed.length > params.limit) { + processed.pop() + } } // get the rows @@ -372,7 +382,7 @@ export async function search( // check for pagination if (paginate && nextRow) { response.hasNextPage = true - response.bookmark = bookmark + 1 + response.bookmark = processed.length } if (paginate && !nextRow) { response.hasNextPage = false From 788a16cf48cbddf82c041f4439a83df13246d4e6 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 17:26:42 +0200 Subject: [PATCH 42/94] Add safety tests --- .../server/src/api/routes/tests/table.spec.ts | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index 0d15919db4..16f7f68550 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -123,6 +123,29 @@ describe.each([ body: basicTable(), }) }) + + it("does not persist the row fields that are not on the table schema", async () => { + const table: SaveTableRequest = basicTable() + table.rows = [ + { + name: "test-name", + description: "test-desc", + nonValid: "test-non-valid", + }, + ] + + const res = await config.api.table.save(table) + + const persistedRows = await config.api.row.search(res._id!) + + expect(persistedRows.rows).toEqual([ + expect.objectContaining({ + name: "test-name", + description: "test-desc", + }), + ]) + expect(persistedRows.rows[0].nonValid).toBeUndefined() + }) }) describe("update", () => { From a2f11f17fd2947619ba488c6652f3d398be14776 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 31 Jul 2024 16:31:02 +0100 Subject: [PATCH 43/94] Type fix. --- packages/server/src/sdk/app/rows/external.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/server/src/sdk/app/rows/external.ts b/packages/server/src/sdk/app/rows/external.ts index 9ab1362606..f81d67f621 100644 --- a/packages/server/src/sdk/app/rows/external.ts +++ b/packages/server/src/sdk/app/rows/external.ts @@ -21,7 +21,8 @@ export async function getRow( ? IncludeRelationship.INCLUDE : IncludeRelationship.EXCLUDE, }) - return response ? response[0] : response + const rows = response?.rows || [] + return rows[0] } export async function save( From 3f4484af00dbbef7b4de8e0e3c8e64e2362e7e7c Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 17:32:19 +0200 Subject: [PATCH 44/94] Add extra tests --- .../server/src/api/routes/tests/table.spec.ts | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index 16f7f68550..594f95f9fa 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -146,6 +146,41 @@ describe.each([ ]) expect(persistedRows.rows[0].nonValid).toBeUndefined() }) + + it.each( + isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS + )( + "cannot use protected column names (%s) while importing a table", + async columnName => { + const table: SaveTableRequest = basicTable() + table.rows = [ + { + name: "test-name", + description: "test-desc", + }, + ] + + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + [columnName]: { + name: columnName, + type: FieldType.STRING, + }, + }, + }, + { + status: 400, + body: { + message: `Column(s) "${columnName}" are duplicated - check for other columns with these name (case in-sensitive)`, + status: 400, + }, + } + ) + } + ) }) describe("update", () => { From ad74eca709b96d4b14027110e1c835b7a59d6af1 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 17:32:36 +0200 Subject: [PATCH 45/94] Fix --- .../server/src/sdk/app/tables/internal/index.ts | 4 +--- packages/shared-core/src/table.ts | 14 +++++--------- 2 files changed, 6 insertions(+), 12 deletions(-) diff --git a/packages/server/src/sdk/app/tables/internal/index.ts b/packages/server/src/sdk/app/tables/internal/index.ts index 58f172e016..c0beed0db8 100644 --- a/packages/server/src/sdk/app/tables/internal/index.ts +++ b/packages/server/src/sdk/app/tables/internal/index.ts @@ -48,9 +48,7 @@ export async function save( } // check for case sensitivity - we don't want to allow duplicated columns - const duplicateColumn = findDuplicateInternalColumns(table, { - ignoreProtectedColumnNames: !oldTable && !!opts?.isImport, - }) + const duplicateColumn = findDuplicateInternalColumns(table) if (duplicateColumn.length) { throw new Error( `Column(s) "${duplicateColumn.join( diff --git a/packages/shared-core/src/table.ts b/packages/shared-core/src/table.ts index d5fd1dec00..9e7626cb1c 100644 --- a/packages/shared-core/src/table.ts +++ b/packages/shared-core/src/table.ts @@ -53,10 +53,7 @@ export function canBeSortColumn(type: FieldType): boolean { return !!allowSortColumnByType[type] } -export function findDuplicateInternalColumns( - table: Table, - opts?: { ignoreProtectedColumnNames: boolean } -): string[] { +export function findDuplicateInternalColumns(table: Table): string[] { // maintains the case of keys const casedKeys = Object.keys(table.schema) // get the column names @@ -72,11 +69,10 @@ export function findDuplicateInternalColumns( } } } - if (!opts?.ignoreProtectedColumnNames) { - for (let internalColumn of PROTECTED_INTERNAL_COLUMNS) { - if (casedKeys.find(key => key === internalColumn)) { - duplicates.push(internalColumn) - } + + for (let internalColumn of PROTECTED_INTERNAL_COLUMNS) { + if (casedKeys.find(key => key === internalColumn)) { + duplicates.push(internalColumn) } } return duplicates From 8f741ffe6ac1fa561311e6ecc35ea8f9e9b3cdef Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 31 Jul 2024 17:40:30 +0200 Subject: [PATCH 46/94] More validations --- .../server/src/api/routes/tests/table.spec.ts | 89 ++++++++++++------- packages/server/src/utilities/schema.ts | 9 +- 2 files changed, 67 insertions(+), 31 deletions(-) diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index 594f95f9fa..f383fed927 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -1118,38 +1118,67 @@ describe.each([ it.each( isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS - )("don't allow protected names (%s)", async columnName => { - const result = await config.api.table.validateNewTableImport( - [ - { - id: generator.natural(), - name: generator.first(), - [columnName]: generator.word(), - }, - ], - { - ...basicSchema, - [columnName]: { - name: columnName, - type: FieldType.STRING, - }, - } - ) - - expect(result).toEqual({ - allValid: false, - errors: { - [columnName]: `${columnName} is a protected name`, - }, - invalidColumns: [], - schemaValidation: { - id: true, - name: true, - [columnName]: false, - }, - }) + )("don't allow protected names in schema (%s)", async columnName => { + const result = await config.api.table.validateNewTableImport( + [ + { + id: generator.natural(), + name: generator.first(), + [columnName]: generator.word(), + }, + ], + { + ...basicSchema, } ) + + expect(result).toEqual({ + allValid: false, + errors: { + [columnName]: `${columnName} is a protected column name`, + }, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + [columnName]: false, + }, + }) + }) + + isInternal && + it.each( + isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS + )("don't allow protected names in the rows (%s)", async columnName => { + const result = await config.api.table.validateNewTableImport( + [ + { + id: generator.natural(), + name: generator.first(), + }, + ], + { + ...basicSchema, + [columnName]: { + name: columnName, + type: FieldType.STRING, + }, + } + ) + + expect(result).toEqual({ + allValid: false, + errors: { + [columnName]: `${columnName} is a protected column name`, + }, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + [columnName]: false, + }, + }) + }) }) describe("validateExistingTableImport", () => { diff --git a/packages/server/src/utilities/schema.ts b/packages/server/src/utilities/schema.ts index c7a4427dd7..c6b26b55c8 100644 --- a/packages/server/src/utilities/schema.ts +++ b/packages/server/src/utilities/schema.ts @@ -71,7 +71,7 @@ export function validate( if (protectedColumnNames.includes(columnName.toLowerCase())) { results.schemaValidation[columnName] = false - results.errors[columnName] = `${columnName} is a protected name` + results.errors[columnName] = `${columnName} is a protected column name` return } @@ -121,6 +121,13 @@ export function validate( }) }) + for (const schemaField of Object.keys(schema)) { + if (protectedColumnNames.includes(schemaField.toLowerCase())) { + results.schemaValidation[schemaField] = false + results.errors[schemaField] = `${schemaField} is a protected column name` + } + } + results.allValid = Object.values(results.schemaValidation).length > 0 && Object.values(results.schemaValidation).every(column => column) From de22a078c066da363095fcdc148f67dc5b4b8365 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 31 Jul 2024 16:50:20 +0100 Subject: [PATCH 47/94] Adding bookmark to each subsequent (thanks tests ). --- packages/server/src/sdk/app/rows/search/external.ts | 2 +- packages/server/src/sdk/app/rows/search/sqs.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index 76f00a25e2..f47f6f7998 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -141,7 +141,7 @@ export async function search( // need wrapper object for bookmarks etc when paginating const response: SearchResponse = { rows: processed, hasNextPage } if (hasNextPage && bookmark != null) { - response.bookmark = processed.length + response.bookmark = bookmark + processed.length } if (totalRows != null) { response.totalRows = totalRows diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/sqs.ts index 2b3ed0c087..f3168a4462 100644 --- a/packages/server/src/sdk/app/rows/search/sqs.ts +++ b/packages/server/src/sdk/app/rows/search/sqs.ts @@ -382,7 +382,7 @@ export async function search( // check for pagination if (paginate && nextRow) { response.hasNextPage = true - response.bookmark = processed.length + response.bookmark = bookmark + processed.length } if (paginate && !nextRow) { response.hasNextPage = false From b54157a6fb42ce38b4a7f858572abdfa8d092bab Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 31 Jul 2024 17:22:17 +0100 Subject: [PATCH 48/94] Fix for enrich endpoint discovered by tests. --- packages/server/src/api/controllers/row/external.ts | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/packages/server/src/api/controllers/row/external.ts b/packages/server/src/api/controllers/row/external.ts index 78fae7aad8..8b95d9c2f3 100644 --- a/packages/server/src/api/controllers/row/external.ts +++ b/packages/server/src/api/controllers/row/external.ts @@ -163,10 +163,14 @@ export async function fetchEnrichedRow(ctx: UserCtx) { }, includeSqlRelationships: IncludeRelationship.INCLUDE, }) - row[fieldName] = await outputProcessing(linkedTable, relatedRows, { - squash: true, - preserveLinks: true, - }) + row[fieldName] = await outputProcessing( + linkedTable, + relatedRows.rows, + { + squash: true, + preserveLinks: true, + } + ) } return row } From 6b78e599f0cfb9ea53d0fc2d64bc8c3ebddf3171 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 31 Jul 2024 17:37:16 +0100 Subject: [PATCH 49/94] Working on getting Oracle auto column imports working. --- packages/server/src/api/routes/tests/row.spec.ts | 2 +- packages/server/src/integrations/oracle.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 85a9b40c7d..de7714a876 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -864,7 +864,7 @@ describe.each([ }) !isInternal && - it("can update a row on an external table with a primary key", async () => { + it.only("can update a row on an external table with a primary key", async () => { const tableName = uuid.v4().substring(0, 10) await client!.schema.createTable(tableName, table => { table.increments("id").primary() diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index 7895692076..691c5167a7 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -98,7 +98,7 @@ const SCHEMA: Integration = { }, } -const UNSUPPORTED_TYPES = ["BLOB", "CLOB", "NCLOB"] +const UNSUPPORTED_TYPES = ["BLOB", "NCLOB"] const OracleContraintTypes = { PRIMARY: "P", From 8539f6d8532673e167d7ffae21725ba0292dc0b9 Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Wed, 31 Jul 2024 22:18:00 +0100 Subject: [PATCH 50/94] Add helper function / builder for creating Automations for tests and improved types (#14220) * basic class for generating and running an automation * change filename * add to existing tests * remove dupe tests * add types to automation steps * add types to triggers * update tests and typing * fix types * typo * move all step schema types do types folder * updated types * typing pr comments * remove unused param * some more typing and tests * more typing * improve type map * fix broken type * this will surely fix my issue --- packages/server/src/automations/actions.ts | 26 +- .../server/src/automations/automationUtils.ts | 9 +- packages/server/src/automations/steps/bash.ts | 11 +- .../server/src/automations/steps/collect.ts | 9 +- .../server/src/automations/steps/createRow.ts | 16 +- .../server/src/automations/steps/delay.ts | 9 +- .../server/src/automations/steps/deleteRow.ts | 14 +- .../server/src/automations/steps/discord.ts | 9 +- .../src/automations/steps/executeQuery.ts | 14 +- .../src/automations/steps/executeScript.ts | 11 +- .../server/src/automations/steps/filter.ts | 9 +- packages/server/src/automations/steps/make.ts | 9 +- packages/server/src/automations/steps/n8n.ts | 9 +- .../server/src/automations/steps/openai.ts | 9 +- .../src/automations/steps/outgoingWebhook.ts | 11 +- .../server/src/automations/steps/queryRows.ts | 13 +- .../src/automations/steps/sendSmtpEmail.ts | 26 +- .../server/src/automations/steps/serverLog.ts | 11 +- .../server/src/automations/steps/slack.ts | 9 +- .../automations/steps/triggerAutomationRun.ts | 13 +- .../server/src/automations/steps/updateRow.ts | 15 +- .../server/src/automations/steps/zapier.ts | 9 +- .../server/src/automations/tests/loop.spec.ts | 4 +- .../tests/scenarios/scenarios.spec.ts | 160 +++++++++ .../tests/utilities/AutomationBuilder.ts | 174 ++++++++++ .../src/automations/tests/utilities/index.ts | 5 +- .../server/src/automations/triggerInfo/app.ts | 8 + .../src/automations/triggerInfo/cron.ts | 8 + .../src/automations/triggerInfo/rowDeleted.ts | 9 + .../src/automations/triggerInfo/rowSaved.ts | 13 + .../src/automations/triggerInfo/rowUpdated.ts | 13 + .../unitTests/automationUtils.spec.ts | 2 +- .../server/src/definitions/automations.ts | 11 +- .../server/src/tests/utilities/structures.ts | 3 +- packages/server/src/threads/automation.ts | 7 +- .../server/src/utilities/workerRequests.ts | 4 +- .../app/{ => automation}/automation.ts | 24 +- .../src/documents/app/automation/index.ts | 2 + .../src/documents/app/automation/schema.ts | 320 ++++++++++++++++++ 39 files changed, 952 insertions(+), 86 deletions(-) create mode 100644 packages/server/src/automations/tests/scenarios/scenarios.spec.ts create mode 100644 packages/server/src/automations/tests/utilities/AutomationBuilder.ts rename packages/types/src/documents/app/{ => automation}/automation.ts (93%) create mode 100644 packages/types/src/documents/app/automation/index.ts create mode 100644 packages/types/src/documents/app/automation/schema.ts diff --git a/packages/server/src/automations/actions.ts b/packages/server/src/automations/actions.ts index eee8ab4a7b..cff269cd80 100644 --- a/packages/server/src/automations/actions.ts +++ b/packages/server/src/automations/actions.ts @@ -20,17 +20,21 @@ import * as triggerAutomationRun from "./steps/triggerAutomationRun" import env from "../environment" import { AutomationStepSchema, - AutomationStepInput, PluginType, AutomationStep, + AutomationActionStepId, + ActionImplementations, + Hosting, + ActionImplementation, } from "@budibase/types" import sdk from "../sdk" import { getAutomationPlugin } from "../utilities/fileSystem" -const ACTION_IMPLS: Record< - string, - (opts: AutomationStepInput) => Promise -> = { +type ActionImplType = ActionImplementations< + typeof env.SELF_HOSTED extends "true" ? Hosting.SELF : Hosting.CLOUD +> + +const ACTION_IMPLS: ActionImplType = { SEND_EMAIL_SMTP: sendSmtpEmail.run, CREATE_ROW: createRow.run, UPDATE_ROW: updateRow.run, @@ -51,6 +55,7 @@ const ACTION_IMPLS: Record< integromat: make.run, n8n: n8n.run, } + export const BUILTIN_ACTION_DEFINITIONS: Record = { SEND_EMAIL_SMTP: sendSmtpEmail.definition, @@ -86,7 +91,7 @@ if (env.SELF_HOSTED) { ACTION_IMPLS["EXECUTE_BASH"] = bash.run // @ts-ignore BUILTIN_ACTION_DEFINITIONS["EXECUTE_BASH"] = bash.definition - + // @ts-ignore ACTION_IMPLS.OPENAI = openai.run BUILTIN_ACTION_DEFINITIONS.OPENAI = openai.definition } @@ -107,10 +112,13 @@ export async function getActionDefinitions() { } /* istanbul ignore next */ -export async function getAction(stepId: string) { - if (ACTION_IMPLS[stepId] != null) { - return ACTION_IMPLS[stepId] +export async function getAction( + stepId: AutomationActionStepId +): Promise | undefined> { + if (ACTION_IMPLS[stepId as keyof ActionImplType] != null) { + return ACTION_IMPLS[stepId as keyof ActionImplType] } + // must be a plugin if (env.SELF_HOSTED) { const plugins = await sdk.plugins.fetch(PluginType.AUTOMATION) diff --git a/packages/server/src/automations/automationUtils.ts b/packages/server/src/automations/automationUtils.ts index bb63be8bce..6e42f8e4bc 100644 --- a/packages/server/src/automations/automationUtils.ts +++ b/packages/server/src/automations/automationUtils.ts @@ -4,8 +4,13 @@ import { encodeJSBinding, } from "@budibase/string-templates" import sdk from "../sdk" -import { AutomationAttachment, FieldType, Row } from "@budibase/types" -import { LoopInput, LoopStepType } from "../definitions/automations" +import { + AutomationAttachment, + FieldType, + Row, + LoopStepType, +} from "@budibase/types" +import { LoopInput } from "../definitions/automations" import { objectStore, context } from "@budibase/backend-core" import * as uuid from "uuid" import path from "path" diff --git a/packages/server/src/automations/steps/bash.ts b/packages/server/src/automations/steps/bash.ts index 1a13f651ec..d33bfb3d6c 100644 --- a/packages/server/src/automations/steps/bash.ts +++ b/packages/server/src/automations/steps/bash.ts @@ -7,9 +7,10 @@ import { AutomationCustomIOType, AutomationFeature, AutomationIOType, - AutomationStepInput, AutomationStepSchema, AutomationStepType, + BashStepInputs, + BashStepOutputs, } from "@budibase/types" export const definition: AutomationStepSchema = { @@ -51,7 +52,13 @@ export const definition: AutomationStepSchema = { }, } -export async function run({ inputs, context }: AutomationStepInput) { +export async function run({ + inputs, + context, +}: { + inputs: BashStepInputs + context: object +}): Promise { if (inputs.code == null) { return { stdout: "Budibase bash automation failed: Invalid inputs", diff --git a/packages/server/src/automations/steps/collect.ts b/packages/server/src/automations/steps/collect.ts index 035bd36a46..2451fd1cf6 100644 --- a/packages/server/src/automations/steps/collect.ts +++ b/packages/server/src/automations/steps/collect.ts @@ -1,9 +1,10 @@ import { AutomationActionStepId, AutomationStepSchema, - AutomationStepInput, AutomationStepType, AutomationIOType, + CollectStepInputs, + CollectStepOutputs, } from "@budibase/types" export const definition: AutomationStepSchema = { @@ -43,7 +44,11 @@ export const definition: AutomationStepSchema = { }, } -export async function run({ inputs }: AutomationStepInput) { +export async function run({ + inputs, +}: { + inputs: CollectStepInputs +}): Promise { if (!inputs.collection) { return { success: false, diff --git a/packages/server/src/automations/steps/createRow.ts b/packages/server/src/automations/steps/createRow.ts index c7f5fcff3b..9908f138b8 100644 --- a/packages/server/src/automations/steps/createRow.ts +++ b/packages/server/src/automations/steps/createRow.ts @@ -10,10 +10,12 @@ import { AutomationCustomIOType, AutomationFeature, AutomationIOType, - AutomationStepInput, AutomationStepSchema, AutomationStepType, + CreateRowStepInputs, + CreateRowStepOutputs, } from "@budibase/types" +import { EventEmitter } from "events" export const definition: AutomationStepSchema = { name: "Create Row", @@ -74,7 +76,15 @@ export const definition: AutomationStepSchema = { }, } -export async function run({ inputs, appId, emitter }: AutomationStepInput) { +export async function run({ + inputs, + appId, + emitter, +}: { + inputs: CreateRowStepInputs + appId: string + emitter: EventEmitter +}): Promise { if (inputs.row == null || inputs.row.tableId == null) { return { success: false, @@ -93,7 +103,7 @@ export async function run({ inputs, appId, emitter }: AutomationStepInput) { try { inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row) inputs.row = await sendAutomationAttachmentsToStorage( - inputs.row.tableId, + inputs.row.tableId!, inputs.row ) await save(ctx) diff --git a/packages/server/src/automations/steps/delay.ts b/packages/server/src/automations/steps/delay.ts index 4e04539998..5392f42b4b 100644 --- a/packages/server/src/automations/steps/delay.ts +++ b/packages/server/src/automations/steps/delay.ts @@ -2,9 +2,10 @@ import { wait } from "../../utilities" import { AutomationActionStepId, AutomationIOType, - AutomationStepInput, AutomationStepSchema, AutomationStepType, + DelayStepInputs, + DelayStepOutputs, } from "@budibase/types" export const definition: AutomationStepSchema = { @@ -39,7 +40,11 @@ export const definition: AutomationStepSchema = { type: AutomationStepType.LOGIC, } -export async function run({ inputs }: AutomationStepInput) { +export async function run({ + inputs, +}: { + inputs: DelayStepInputs +}): Promise { await wait(inputs.time) return { success: true, diff --git a/packages/server/src/automations/steps/deleteRow.ts b/packages/server/src/automations/steps/deleteRow.ts index c8b6585cae..fa26dddb45 100644 --- a/packages/server/src/automations/steps/deleteRow.ts +++ b/packages/server/src/automations/steps/deleteRow.ts @@ -1,14 +1,16 @@ +import { EventEmitter } from "events" import { destroy } from "../../api/controllers/row" import { buildCtx } from "./utils" import { getError } from "../automationUtils" import { AutomationActionStepId, - AutomationStepInput, AutomationStepSchema, AutomationStepType, AutomationIOType, AutomationCustomIOType, AutomationFeature, + DeleteRowStepInputs, + DeleteRowStepOutputs, } from "@budibase/types" export const definition: AutomationStepSchema = { @@ -59,7 +61,15 @@ export const definition: AutomationStepSchema = { }, } -export async function run({ inputs, appId, emitter }: AutomationStepInput) { +export async function run({ + inputs, + appId, + emitter, +}: { + inputs: DeleteRowStepInputs + appId: string + emitter: EventEmitter +}): Promise { if (inputs.id == null) { return { success: false, diff --git a/packages/server/src/automations/steps/discord.ts b/packages/server/src/automations/steps/discord.ts index c80e4ba66f..355f84b987 100644 --- a/packages/server/src/automations/steps/discord.ts +++ b/packages/server/src/automations/steps/discord.ts @@ -3,10 +3,11 @@ import { getFetchResponse } from "./utils" import { AutomationActionStepId, AutomationStepSchema, - AutomationStepInput, AutomationStepType, AutomationIOType, AutomationFeature, + ExternalAppStepOutputs, + DiscordStepInputs, } from "@budibase/types" const DEFAULT_USERNAME = "Budibase Automate" @@ -65,7 +66,11 @@ export const definition: AutomationStepSchema = { }, } -export async function run({ inputs }: AutomationStepInput) { +export async function run({ + inputs, +}: { + inputs: DiscordStepInputs +}): Promise { let { url, username, avatar_url, content } = inputs if (!username) { username = DEFAULT_USERNAME diff --git a/packages/server/src/automations/steps/executeQuery.ts b/packages/server/src/automations/steps/executeQuery.ts index a9517b01a0..eb033b8883 100644 --- a/packages/server/src/automations/steps/executeQuery.ts +++ b/packages/server/src/automations/steps/executeQuery.ts @@ -1,3 +1,4 @@ +import { EventEmitter } from "events" import * as queryController from "../../api/controllers/query" import { buildCtx } from "./utils" import * as automationUtils from "../automationUtils" @@ -6,9 +7,10 @@ import { AutomationCustomIOType, AutomationFeature, AutomationIOType, - AutomationStepInput, AutomationStepSchema, AutomationStepType, + ExecuteQueryStepInputs, + ExecuteQueryStepOutputs, } from "@budibase/types" export const definition: AutomationStepSchema = { @@ -62,7 +64,15 @@ export const definition: AutomationStepSchema = { }, } -export async function run({ inputs, appId, emitter }: AutomationStepInput) { +export async function run({ + inputs, + appId, + emitter, +}: { + inputs: ExecuteQueryStepInputs + appId: string + emitter: EventEmitter +}): Promise { if (inputs.query == null) { return { success: false, diff --git a/packages/server/src/automations/steps/executeScript.ts b/packages/server/src/automations/steps/executeScript.ts index 7ce6c1d0f4..3962da53ca 100644 --- a/packages/server/src/automations/steps/executeScript.ts +++ b/packages/server/src/automations/steps/executeScript.ts @@ -6,10 +6,12 @@ import { AutomationCustomIOType, AutomationFeature, AutomationIOType, - AutomationStepInput, AutomationStepSchema, AutomationStepType, + ExecuteScriptStepInputs, + ExecuteScriptStepOutputs, } from "@budibase/types" +import { EventEmitter } from "events" export const definition: AutomationStepSchema = { name: "JS Scripting", @@ -55,7 +57,12 @@ export async function run({ appId, context, emitter, -}: AutomationStepInput) { +}: { + inputs: ExecuteScriptStepInputs + appId: string + context: object + emitter: EventEmitter +}): Promise { if (inputs.code == null) { return { success: false, diff --git a/packages/server/src/automations/steps/filter.ts b/packages/server/src/automations/steps/filter.ts index 624619bb95..6d35a72698 100644 --- a/packages/server/src/automations/steps/filter.ts +++ b/packages/server/src/automations/steps/filter.ts @@ -1,9 +1,10 @@ import { AutomationActionStepId, AutomationStepSchema, - AutomationStepInput, AutomationStepType, AutomationIOType, + FilterStepInputs, + FilterStepOutputs, } from "@budibase/types" export const FilterConditions = { @@ -69,7 +70,11 @@ export const definition: AutomationStepSchema = { }, } -export async function run({ inputs }: AutomationStepInput) { +export async function run({ + inputs, +}: { + inputs: FilterStepInputs +}): Promise { try { let { field, condition, value } = inputs // coerce types so that we can use them diff --git a/packages/server/src/automations/steps/make.ts b/packages/server/src/automations/steps/make.ts index 555df8308a..45e31fbaa2 100644 --- a/packages/server/src/automations/steps/make.ts +++ b/packages/server/src/automations/steps/make.ts @@ -3,10 +3,11 @@ import { getFetchResponse } from "./utils" import { AutomationActionStepId, AutomationStepSchema, - AutomationStepInput, AutomationStepType, AutomationIOType, AutomationFeature, + ExternalAppStepOutputs, + MakeIntegrationInputs, } from "@budibase/types" export const definition: AutomationStepSchema = { @@ -57,7 +58,11 @@ export const definition: AutomationStepSchema = { }, } -export async function run({ inputs }: AutomationStepInput) { +export async function run({ + inputs, +}: { + inputs: MakeIntegrationInputs +}): Promise { const { url, body } = inputs let payload = {} diff --git a/packages/server/src/automations/steps/n8n.ts b/packages/server/src/automations/steps/n8n.ts index c400c7037a..b2fbce6de7 100644 --- a/packages/server/src/automations/steps/n8n.ts +++ b/packages/server/src/automations/steps/n8n.ts @@ -3,11 +3,12 @@ import { getFetchResponse } from "./utils" import { AutomationActionStepId, AutomationStepSchema, - AutomationStepInput, AutomationStepType, AutomationIOType, AutomationFeature, HttpMethod, + ExternalAppStepOutputs, + n8nStepInputs, } from "@budibase/types" export const definition: AutomationStepSchema = { @@ -67,7 +68,11 @@ export const definition: AutomationStepSchema = { }, } -export async function run({ inputs }: AutomationStepInput) { +export async function run({ + inputs, +}: { + inputs: n8nStepInputs +}): Promise { const { url, body, method, authorization } = inputs let payload = {} diff --git a/packages/server/src/automations/steps/openai.ts b/packages/server/src/automations/steps/openai.ts index 380a6b9536..279a0a9df0 100644 --- a/packages/server/src/automations/steps/openai.ts +++ b/packages/server/src/automations/steps/openai.ts @@ -3,9 +3,10 @@ import { OpenAI } from "openai" import { AutomationActionStepId, AutomationStepSchema, - AutomationStepInput, AutomationStepType, AutomationIOType, + OpenAIStepInputs, + OpenAIStepOutputs, } from "@budibase/types" import { env } from "@budibase/backend-core" import * as automationUtils from "../automationUtils" @@ -59,7 +60,11 @@ export const definition: AutomationStepSchema = { }, } -export async function run({ inputs }: AutomationStepInput) { +export async function run({ + inputs, +}: { + inputs: OpenAIStepInputs +}): Promise { if (!env.OPENAI_API_KEY) { return { success: false, diff --git a/packages/server/src/automations/steps/outgoingWebhook.ts b/packages/server/src/automations/steps/outgoingWebhook.ts index 269c8c7157..04972fefae 100644 --- a/packages/server/src/automations/steps/outgoingWebhook.ts +++ b/packages/server/src/automations/steps/outgoingWebhook.ts @@ -6,9 +6,10 @@ import { AutomationCustomIOType, AutomationFeature, AutomationIOType, - AutomationStepInput, AutomationStepSchema, AutomationStepType, + ExternalAppStepOutputs, + OutgoingWebhookStepInputs, } from "@budibase/types" enum RequestType { @@ -88,7 +89,13 @@ export const definition: AutomationStepSchema = { }, } -export async function run({ inputs }: AutomationStepInput) { +export async function run({ + inputs, +}: { + inputs: OutgoingWebhookStepInputs +}): Promise< + Omit | ExternalAppStepOutputs +> { let { requestMethod, url, requestBody, headers } = inputs if (!url.startsWith("http")) { url = `http://${url}` diff --git a/packages/server/src/automations/steps/queryRows.ts b/packages/server/src/automations/steps/queryRows.ts index aa3efa5425..172bbf7f55 100644 --- a/packages/server/src/automations/steps/queryRows.ts +++ b/packages/server/src/automations/steps/queryRows.ts @@ -8,13 +8,14 @@ import { AutomationCustomIOType, AutomationFeature, AutomationIOType, - AutomationStepInput, AutomationStepSchema, AutomationStepType, EmptyFilterOption, SearchFilters, Table, SortOrder, + QueryRowsStepInputs, + QueryRowsStepOutputs, } from "@budibase/types" import { db as dbCore } from "@budibase/backend-core" @@ -133,7 +134,13 @@ function hasNullFilters(filters: any[]) { ) } -export async function run({ inputs, appId }: AutomationStepInput) { +export async function run({ + inputs, + appId, +}: { + inputs: QueryRowsStepInputs + appId: string +}): Promise { const { tableId, filters, sortColumn, sortOrder, limit } = inputs if (!tableId) { return { @@ -145,7 +152,7 @@ export async function run({ inputs, appId }: AutomationStepInput) { } const table = await getTable(appId, tableId) let sortType = FieldType.STRING - if (table && table.schema && table.schema[sortColumn] && sortColumn) { + if (sortColumn && table && table.schema && table.schema[sortColumn]) { const fieldType = table.schema[sortColumn].type sortType = fieldType === FieldType.NUMBER ? FieldType.NUMBER : FieldType.STRING diff --git a/packages/server/src/automations/steps/sendSmtpEmail.ts b/packages/server/src/automations/steps/sendSmtpEmail.ts index bcb1699c6b..4950bfb3f3 100644 --- a/packages/server/src/automations/steps/sendSmtpEmail.ts +++ b/packages/server/src/automations/steps/sendSmtpEmail.ts @@ -3,11 +3,12 @@ import * as automationUtils from "../automationUtils" import { AutomationActionStepId, AutomationStepSchema, - AutomationStepInput, AutomationStepType, AutomationIOType, AutomationFeature, AutomationCustomIOType, + SmtpEmailStepInputs, + BaseAutomationOutputs, } from "@budibase/types" export const definition: AutomationStepSchema = { @@ -97,7 +98,11 @@ export const definition: AutomationStepSchema = { }, } -export async function run({ inputs }: AutomationStepInput) { +export async function run({ + inputs, +}: { + inputs: SmtpEmailStepInputs +}): Promise { let { to, from, @@ -116,17 +121,16 @@ export async function run({ inputs }: AutomationStepInput) { if (!contents) { contents = "

No content

" } - to = to || undefined - - if (attachments) { - if (Array.isArray(attachments)) { - attachments.forEach(item => automationUtils.guardAttachment(item)) - } else { - automationUtils.guardAttachment(attachments) - } - } try { + if (attachments) { + if (Array.isArray(attachments)) { + attachments.forEach(item => automationUtils.guardAttachment(item)) + } else { + automationUtils.guardAttachment(attachments) + } + } + let response = await sendSmtpEmail({ to, from, diff --git a/packages/server/src/automations/steps/serverLog.ts b/packages/server/src/automations/steps/serverLog.ts index eb75ca1f3b..482325b744 100644 --- a/packages/server/src/automations/steps/serverLog.ts +++ b/packages/server/src/automations/steps/serverLog.ts @@ -1,10 +1,11 @@ import { AutomationActionStepId, AutomationStepSchema, - AutomationStepInput, AutomationStepType, AutomationIOType, AutomationFeature, + ServerLogStepInputs, + ServerLogStepOutputs, } from "@budibase/types" /** @@ -53,7 +54,13 @@ export const definition: AutomationStepSchema = { }, } -export async function run({ inputs, appId }: AutomationStepInput) { +export async function run({ + inputs, + appId, +}: { + inputs: ServerLogStepInputs + appId: string +}): Promise { const message = `App ${appId} - ${inputs.text}` console.log(message) return { diff --git a/packages/server/src/automations/steps/slack.ts b/packages/server/src/automations/steps/slack.ts index 79544bf001..3ed462796b 100644 --- a/packages/server/src/automations/steps/slack.ts +++ b/packages/server/src/automations/steps/slack.ts @@ -3,10 +3,11 @@ import { getFetchResponse } from "./utils" import { AutomationActionStepId, AutomationStepSchema, - AutomationStepInput, AutomationStepType, AutomationIOType, AutomationFeature, + ExternalAppStepOutputs, + SlackStepInputs, } from "@budibase/types" export const definition: AutomationStepSchema = { @@ -54,7 +55,11 @@ export const definition: AutomationStepSchema = { }, } -export async function run({ inputs }: AutomationStepInput) { +export async function run({ + inputs, +}: { + inputs: SlackStepInputs +}): Promise { let { url, text } = inputs if (!url?.trim()?.length) { return { diff --git a/packages/server/src/automations/steps/triggerAutomationRun.ts b/packages/server/src/automations/steps/triggerAutomationRun.ts index 83e1722877..cc73200ab3 100644 --- a/packages/server/src/automations/steps/triggerAutomationRun.ts +++ b/packages/server/src/automations/steps/triggerAutomationRun.ts @@ -1,12 +1,13 @@ import { AutomationActionStepId, AutomationStepSchema, - AutomationStepInput, AutomationStepType, AutomationIOType, AutomationResults, Automation, AutomationCustomIOType, + TriggerAutomationStepInputs, + TriggerAutomationStepOutputs, } from "@budibase/types" import * as triggers from "../triggers" import { context } from "@budibase/backend-core" @@ -61,7 +62,11 @@ export const definition: AutomationStepSchema = { }, } -export async function run({ inputs }: AutomationStepInput) { +export async function run({ + inputs, +}: { + inputs: TriggerAutomationStepInputs +}): Promise { const { automationId, ...fieldParams } = inputs.automation if (await features.isTriggerAutomationRunEnabled()) { @@ -88,5 +93,9 @@ export async function run({ inputs }: AutomationStepInput) { value: response.steps, } } + } else { + return { + success: false, + } } } diff --git a/packages/server/src/automations/steps/updateRow.ts b/packages/server/src/automations/steps/updateRow.ts index c1e7e286ce..a029fb7413 100644 --- a/packages/server/src/automations/steps/updateRow.ts +++ b/packages/server/src/automations/steps/updateRow.ts @@ -1,3 +1,4 @@ +import { EventEmitter } from "events" import * as rowController from "../../api/controllers/row" import * as automationUtils from "../automationUtils" import { buildCtx } from "./utils" @@ -6,9 +7,10 @@ import { AutomationCustomIOType, AutomationFeature, AutomationIOType, - AutomationStepInput, AutomationStepSchema, AutomationStepType, + UpdateRowStepInputs, + UpdateRowStepOutputs, } from "@budibase/types" export const definition: AutomationStepSchema = { @@ -70,8 +72,15 @@ export const definition: AutomationStepSchema = { }, }, } - -export async function run({ inputs, appId, emitter }: AutomationStepInput) { +export async function run({ + inputs, + appId, + emitter, +}: { + inputs: UpdateRowStepInputs + appId: string + emitter: EventEmitter +}): Promise { if (inputs.rowId == null || inputs.row == null) { return { success: false, diff --git a/packages/server/src/automations/steps/zapier.ts b/packages/server/src/automations/steps/zapier.ts index e48d677228..6de94b0486 100644 --- a/packages/server/src/automations/steps/zapier.ts +++ b/packages/server/src/automations/steps/zapier.ts @@ -3,10 +3,11 @@ import { getFetchResponse } from "./utils" import { AutomationActionStepId, AutomationStepSchema, - AutomationStepInput, AutomationStepType, AutomationIOType, AutomationFeature, + ZapierStepInputs, + ZapierStepOutputs, } from "@budibase/types" export const definition: AutomationStepSchema = { @@ -50,7 +51,11 @@ export const definition: AutomationStepSchema = { }, } -export async function run({ inputs }: AutomationStepInput) { +export async function run({ + inputs, +}: { + inputs: ZapierStepInputs +}): Promise { const { url, body } = inputs let payload = {} diff --git a/packages/server/src/automations/tests/loop.spec.ts b/packages/server/src/automations/tests/loop.spec.ts index ba237d3253..372c3855b3 100644 --- a/packages/server/src/automations/tests/loop.spec.ts +++ b/packages/server/src/automations/tests/loop.spec.ts @@ -3,9 +3,9 @@ import * as triggers from "../triggers" import { loopAutomation } from "../../tests/utilities/structures" import { context } from "@budibase/backend-core" import * as setup from "./utilities" -import { Table } from "@budibase/types" +import { Table, LoopStepType } from "@budibase/types" import * as loopUtils from "../loopUtils" -import { LoopInput, LoopStepType } from "../../definitions/automations" +import { LoopInput } from "../../definitions/automations" describe("Attempt to run a basic loop automation", () => { let config = setup.getConfig(), diff --git a/packages/server/src/automations/tests/scenarios/scenarios.spec.ts b/packages/server/src/automations/tests/scenarios/scenarios.spec.ts new file mode 100644 index 0000000000..4f4ad70814 --- /dev/null +++ b/packages/server/src/automations/tests/scenarios/scenarios.spec.ts @@ -0,0 +1,160 @@ +import * as automation from "../../index" +import * as setup from "../utilities" +import { Table, LoopStepType } from "@budibase/types" +import { createAutomationBuilder } from "../utilities/AutomationBuilder" + +describe("Automation Scenarios", () => { + let config = setup.getConfig(), + table: Table + + beforeEach(async () => { + await automation.init() + await config.init() + table = await config.createTable() + await config.createRow() + }) + + afterAll(setup.afterAll) + + describe("Loop automations", () => { + it("should run an automation with a trigger, loop, and create row step", async () => { + const builder = createAutomationBuilder({ + name: "Test Trigger with Loop and Create Row", + }) + + const results = await builder + .rowSaved( + { tableId: table._id! }, + { + row: { + name: "Trigger Row", + description: "This row triggers the automation", + }, + id: "1234", + revision: "1", + } + ) + .loop({ + option: LoopStepType.ARRAY, + binding: [1, 2, 3], + }) + .createRow({ + row: { + name: "Item {{ loop.currentItem }}", + description: "Created from loop", + tableId: table._id, + }, + }) + .run() + + expect(results.trigger).toBeDefined() + expect(results.steps).toHaveLength(1) + + expect(results.steps[0].outputs.iterations).toBe(3) + expect(results.steps[0].outputs.items).toHaveLength(3) + + results.steps[0].outputs.items.forEach((output: any, index: number) => { + expect(output).toMatchObject({ + success: true, + row: { + name: `Item ${index + 1}`, + description: "Created from loop", + }, + }) + }) + }) + }) + + describe("Row Automations", () => { + it("should trigger an automation which then creates a row", async () => { + const table = await config.createTable() + + const builder = createAutomationBuilder({ + name: "Test Row Save and Create", + }) + + const results = await builder + .rowUpdated( + { tableId: table._id! }, + { + row: { name: "Test", description: "TEST" }, + id: "1234", + } + ) + .createRow({ + row: { + name: "{{trigger.row.name}}", + description: "{{trigger.row.description}}", + tableId: table._id, + }, + }) + .run() + + expect(results.steps).toHaveLength(1) + + expect(results.steps[0].outputs).toMatchObject({ + success: true, + row: { + name: "Test", + description: "TEST", + }, + }) + }) + }) + + it("should trigger an automation which querys the database", async () => { + const table = await config.createTable() + const row = { + name: "Test Row", + description: "original description", + tableId: table._id, + } + await config.createRow(row) + await config.createRow(row) + const builder = createAutomationBuilder({ + name: "Test Row Save and Create", + }) + + const results = await builder + .appAction({ fields: {} }) + .queryRows({ + tableId: table._id!, + }) + .run() + + expect(results.steps).toHaveLength(1) + expect(results.steps[0].outputs.rows).toHaveLength(2) + }) + + it("should trigger an automation which querys the database then deletes a row", async () => { + const table = await config.createTable() + const row = { + name: "DFN", + description: "original description", + tableId: table._id, + } + await config.createRow(row) + await config.createRow(row) + const builder = createAutomationBuilder({ + name: "Test Row Save and Create", + }) + + const results = await builder + .appAction({ fields: {} }) + .queryRows({ + tableId: table._id!, + }) + .deleteRow({ + tableId: table._id!, + id: "{{ steps.1.rows.0._id }}", + }) + .queryRows({ + tableId: table._id!, + }) + .run() + + expect(results.steps).toHaveLength(3) + expect(results.steps[1].outputs.success).toBeTruthy() + expect(results.steps[2].outputs.rows).toHaveLength(1) + }) +}) diff --git a/packages/server/src/automations/tests/utilities/AutomationBuilder.ts b/packages/server/src/automations/tests/utilities/AutomationBuilder.ts new file mode 100644 index 0000000000..49f60795d1 --- /dev/null +++ b/packages/server/src/automations/tests/utilities/AutomationBuilder.ts @@ -0,0 +1,174 @@ +import { v4 as uuidv4 } from "uuid" +import { testAutomation } from "../../../api/routes/tests/utilities/TestFunctions" +import { + RowCreatedTriggerInputs, + RowCreatedTriggerOutputs, +} from "../../triggerInfo/rowSaved" +import { + RowUpdatedTriggerInputs, + RowUpdatedTriggerOutputs, +} from "../../triggerInfo/rowUpdated" +import {} from "../../steps/createRow" +import { BUILTIN_ACTION_DEFINITIONS } from "../../actions" +import { TRIGGER_DEFINITIONS } from "../../triggers" +import { + RowDeletedTriggerInputs, + RowDeletedTriggerOutputs, +} from "../../triggerInfo/rowDeleted" +import { + AutomationStepSchema, + AutomationTriggerSchema, + LoopStepInputs, + DeleteRowStepInputs, + UpdateRowStepInputs, + CreateRowStepInputs, + Automation, + AutomationTrigger, + AutomationResults, + SmtpEmailStepInputs, + ExecuteQueryStepInputs, + QueryRowsStepInputs, +} from "@budibase/types" +import {} from "../../steps/loop" +import TestConfiguration from "../../../tests/utilities/TestConfiguration" +import * as setup from "../utilities" +import { + AppActionTriggerInputs, + AppActionTriggerOutputs, +} from "../../triggerInfo/app" +import { CronTriggerOutputs } from "../../triggerInfo/cron" + +type TriggerOutputs = + | RowCreatedTriggerOutputs + | RowUpdatedTriggerOutputs + | RowDeletedTriggerOutputs + | AppActionTriggerOutputs + | CronTriggerOutputs + | undefined + +class AutomationBuilder { + private automationConfig: Automation = { + name: "", + definition: { + steps: [], + trigger: {} as AutomationTrigger, + }, + type: "automation", + appId: setup.getConfig().getAppId(), + } + private config: TestConfiguration = setup.getConfig() + private triggerOutputs: TriggerOutputs + private triggerSet: boolean = false + + constructor(options: { name?: string } = {}) { + this.automationConfig.name = options.name || `Test Automation ${uuidv4()}` + } + + // TRIGGERS + rowSaved(inputs: RowCreatedTriggerInputs, outputs: RowCreatedTriggerOutputs) { + this.triggerOutputs = outputs + return this.trigger(TRIGGER_DEFINITIONS.ROW_SAVED, inputs, outputs) + } + + rowUpdated( + inputs: RowUpdatedTriggerInputs, + outputs: RowUpdatedTriggerOutputs + ) { + this.triggerOutputs = outputs + return this.trigger(TRIGGER_DEFINITIONS.ROW_UPDATED, inputs, outputs) + } + + rowDeleted( + inputs: RowDeletedTriggerInputs, + outputs: RowDeletedTriggerOutputs + ) { + this.triggerOutputs = outputs + return this.trigger(TRIGGER_DEFINITIONS.ROW_DELETED, inputs, outputs) + } + + appAction(outputs: AppActionTriggerOutputs, inputs?: AppActionTriggerInputs) { + this.triggerOutputs = outputs + return this.trigger(TRIGGER_DEFINITIONS.APP, inputs, outputs) + } + + // STEPS + createRow(inputs: CreateRowStepInputs): this { + return this.step(BUILTIN_ACTION_DEFINITIONS.CREATE_ROW, inputs) + } + + updateRow(inputs: UpdateRowStepInputs): this { + return this.step(BUILTIN_ACTION_DEFINITIONS.UPDATE_ROW, inputs) + } + + deleteRow(inputs: DeleteRowStepInputs): this { + return this.step(BUILTIN_ACTION_DEFINITIONS.DELETE_ROW, inputs) + } + + sendSmtpEmail(inputs: SmtpEmailStepInputs): this { + return this.step(BUILTIN_ACTION_DEFINITIONS.SEND_EMAIL_SMTP, inputs) + } + + executeQuery(inputs: ExecuteQueryStepInputs): this { + return this.step(BUILTIN_ACTION_DEFINITIONS.EXECUTE_QUERY, inputs) + } + + queryRows(inputs: QueryRowsStepInputs): this { + return this.step(BUILTIN_ACTION_DEFINITIONS.QUERY_ROWS, inputs) + } + loop(inputs: LoopStepInputs): this { + return this.step(BUILTIN_ACTION_DEFINITIONS.LOOP, inputs) + } + + private trigger( + triggerSchema: AutomationTriggerSchema, + inputs?: T, + outputs?: TriggerOutputs + ): this { + if (this.triggerSet) { + throw new Error("Only one trigger can be set for an automation.") + } + this.automationConfig.definition.trigger = { + ...triggerSchema, + inputs: inputs || {}, + id: uuidv4(), + } + this.triggerOutputs = outputs + this.triggerSet = true + + return this + } + + private step( + stepSchema: AutomationStepSchema, + inputs: T + ): this { + this.automationConfig.definition.steps.push({ + ...stepSchema, + inputs, + id: uuidv4(), + }) + return this + } + + async run() { + const automation = await this.config.createAutomation(this.automationConfig) + const results = await testAutomation( + this.config, + automation, + this.triggerOutputs + ) + return this.processResults(results) + } + + private processResults(results: { body: AutomationResults }) { + results.body.steps.shift() + return { + trigger: results.body.trigger, + steps: results.body.steps, + } + } +} + +export function createAutomationBuilder(options?: { name?: string }) { + return new AutomationBuilder(options) +} diff --git a/packages/server/src/automations/tests/utilities/index.ts b/packages/server/src/automations/tests/utilities/index.ts index cd3ea289ca..7952f7a80b 100644 --- a/packages/server/src/automations/tests/utilities/index.ts +++ b/packages/server/src/automations/tests/utilities/index.ts @@ -3,6 +3,7 @@ import { context } from "@budibase/backend-core" import { BUILTIN_ACTION_DEFINITIONS, getAction } from "../../actions" import emitter from "../../../events/index" import env from "../../../environment" +import { AutomationActionStepId } from "@budibase/types" let config: TestConfig @@ -33,7 +34,7 @@ export async function runInProd(fn: any) { export async function runStep(stepId: string, inputs: any, stepContext?: any) { async function run() { - let step = await getAction(stepId) + let step = await getAction(stepId as AutomationActionStepId) expect(step).toBeDefined() if (!step) { throw new Error("No step found") @@ -41,7 +42,7 @@ export async function runStep(stepId: string, inputs: any, stepContext?: any) { return step({ context: stepContext || {}, inputs, - appId: config ? config.getAppId() : null, + appId: config ? config.getAppId() : "", // don't really need an API key, mocked out usage quota, not being tested here apiKey, emitter, diff --git a/packages/server/src/automations/triggerInfo/app.ts b/packages/server/src/automations/triggerInfo/app.ts index bfd284cc53..c1945eb23d 100644 --- a/packages/server/src/automations/triggerInfo/app.ts +++ b/packages/server/src/automations/triggerInfo/app.ts @@ -39,3 +39,11 @@ export const definition: AutomationTriggerSchema = { }, type: AutomationStepType.TRIGGER, } + +export type AppActionTriggerInputs = { + fields: object +} + +export type AppActionTriggerOutputs = { + fields: object +} diff --git a/packages/server/src/automations/triggerInfo/cron.ts b/packages/server/src/automations/triggerInfo/cron.ts index be4b60cb27..781c1a8708 100644 --- a/packages/server/src/automations/triggerInfo/cron.ts +++ b/packages/server/src/automations/triggerInfo/cron.ts @@ -38,3 +38,11 @@ export const definition: AutomationTriggerSchema = { }, type: AutomationStepType.TRIGGER, } + +export type CronTriggerInputs = { + cron: string +} + +export type CronTriggerOutputs = { + timestamp: number +} diff --git a/packages/server/src/automations/triggerInfo/rowDeleted.ts b/packages/server/src/automations/triggerInfo/rowDeleted.ts index 06e53ce63f..0ebf908ec1 100644 --- a/packages/server/src/automations/triggerInfo/rowDeleted.ts +++ b/packages/server/src/automations/triggerInfo/rowDeleted.ts @@ -5,6 +5,7 @@ import { AutomationTriggerSchema, AutomationTriggerStepId, AutomationEventType, + Row, } from "@budibase/types" export const definition: AutomationTriggerSchema = { @@ -39,3 +40,11 @@ export const definition: AutomationTriggerSchema = { }, type: AutomationStepType.TRIGGER, } + +export type RowDeletedTriggerInputs = { + tableId: string +} + +export type RowDeletedTriggerOutputs = { + row: Row +} diff --git a/packages/server/src/automations/triggerInfo/rowSaved.ts b/packages/server/src/automations/triggerInfo/rowSaved.ts index d128934dcc..93f036d13a 100644 --- a/packages/server/src/automations/triggerInfo/rowSaved.ts +++ b/packages/server/src/automations/triggerInfo/rowSaved.ts @@ -5,7 +5,9 @@ import { AutomationTriggerSchema, AutomationTriggerStepId, AutomationEventType, + Row, } from "@budibase/types" +import { SearchFilters } from "aws-sdk/clients/elasticbeanstalk" export const definition: AutomationTriggerSchema = { name: "Row Created", @@ -52,3 +54,14 @@ export const definition: AutomationTriggerSchema = { }, type: AutomationStepType.TRIGGER, } + +export type RowCreatedTriggerInputs = { + tableId: string + filters?: SearchFilters +} + +export type RowCreatedTriggerOutputs = { + row: Row + id: string + revision: string +} diff --git a/packages/server/src/automations/triggerInfo/rowUpdated.ts b/packages/server/src/automations/triggerInfo/rowUpdated.ts index f6aefd1464..148a0bd3f3 100644 --- a/packages/server/src/automations/triggerInfo/rowUpdated.ts +++ b/packages/server/src/automations/triggerInfo/rowUpdated.ts @@ -5,6 +5,8 @@ import { AutomationTriggerSchema, AutomationTriggerStepId, AutomationEventType, + Row, + SearchFilters, } from "@budibase/types" export const definition: AutomationTriggerSchema = { @@ -59,3 +61,14 @@ export const definition: AutomationTriggerSchema = { }, type: AutomationStepType.TRIGGER, } + +export type RowUpdatedTriggerInputs = { + tableId: string + filters?: SearchFilters +} + +export type RowUpdatedTriggerOutputs = { + row: Row + id: string + revision?: string +} diff --git a/packages/server/src/automations/unitTests/automationUtils.spec.ts b/packages/server/src/automations/unitTests/automationUtils.spec.ts index afb6219ef2..7706dab4cc 100644 --- a/packages/server/src/automations/unitTests/automationUtils.spec.ts +++ b/packages/server/src/automations/unitTests/automationUtils.spec.ts @@ -1,9 +1,9 @@ -import { LoopStepType } from "../../definitions/automations" import { typecastForLooping, cleanInputValues, substituteLoopStep, } from "../automationUtils" +import { LoopStepType } from "@budibase/types" describe("automationUtils", () => { describe("substituteLoopStep", () => { diff --git a/packages/server/src/definitions/automations.ts b/packages/server/src/definitions/automations.ts index c205149a5b..2a51c737f2 100644 --- a/packages/server/src/definitions/automations.ts +++ b/packages/server/src/definitions/automations.ts @@ -1,9 +1,8 @@ -import { AutomationResults, AutomationStep } from "@budibase/types" - -export enum LoopStepType { - ARRAY = "Array", - STRING = "String", -} +import { + AutomationResults, + AutomationStep, + LoopStepType, +} from "@budibase/types" export interface LoopStep extends AutomationStep { inputs: LoopInput diff --git a/packages/server/src/tests/utilities/structures.ts b/packages/server/src/tests/utilities/structures.ts index 16ab049eb4..8f67ad1af9 100644 --- a/packages/server/src/tests/utilities/structures.ts +++ b/packages/server/src/tests/utilities/structures.ts @@ -25,8 +25,9 @@ import { Webhook, WebhookActionType, AutomationEventType, + LoopStepType, } from "@budibase/types" -import { LoopInput, LoopStepType } from "../../definitions/automations" +import { LoopInput } from "../../definitions/automations" import { merge } from "lodash" import { generator } from "@budibase/backend-core/tests" diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index a7cf71de4b..2e95542229 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -16,6 +16,7 @@ import { AutomationErrors, MAX_AUTOMATION_RECURRING_ERRORS } from "../constants" import { storeLog } from "../automations/logging" import { Automation, + AutomationActionStepId, AutomationData, AutomationJob, AutomationMetadata, @@ -108,7 +109,7 @@ class Orchestrator { return triggerOutput } - async getStepFunctionality(stepId: string) { + async getStepFunctionality(stepId: AutomationActionStepId) { let step = await actions.getAction(stepId) if (step == null) { throw `Cannot find automation step by name ${stepId}` @@ -422,7 +423,9 @@ class Orchestrator { continue } - let stepFn = await this.getStepFunctionality(step.stepId) + let stepFn = await this.getStepFunctionality( + step.stepId as AutomationActionStepId + ) let inputs = await processObject(originalStepInput, this._context) inputs = automationUtils.cleanInputValues( inputs, diff --git a/packages/server/src/utilities/workerRequests.ts b/packages/server/src/utilities/workerRequests.ts index 474f857b0a..0f487d9f31 100644 --- a/packages/server/src/utilities/workerRequests.ts +++ b/packages/server/src/utilities/workerRequests.ts @@ -103,8 +103,8 @@ export async function sendSmtpEmail({ from: string subject: string contents: string - cc: string - bcc: string + cc?: string + bcc?: string automation: boolean attachments?: EmailAttachment[] invite?: EmailInvite diff --git a/packages/types/src/documents/app/automation.ts b/packages/types/src/documents/app/automation/automation.ts similarity index 93% rename from packages/types/src/documents/app/automation.ts rename to packages/types/src/documents/app/automation/automation.ts index d5d7fe667c..d8fad4c8e8 100644 --- a/packages/types/src/documents/app/automation.ts +++ b/packages/types/src/documents/app/automation/automation.ts @@ -1,9 +1,9 @@ -import { Document } from "../document" +import { Document } from "../../document" import { EventEmitter } from "events" -import { User } from "../global" +import { User } from "../../global" import { ReadStream } from "fs" -import { Row } from "./row" -import { Table } from "./table" +import { Row } from "../row" +import { Table } from "../table" export enum AutomationIOType { OBJECT = "object", @@ -93,6 +93,7 @@ export interface EmailAttachment { } export interface SendEmailOpts { + to?: string // workspaceId If finer grain controls being used then this will lookup config for workspace. workspaceId?: string // user If sending to an existing user the object can be provided, this is used in the context. @@ -102,7 +103,7 @@ export interface SendEmailOpts { // contents If sending a custom email then can supply contents which will be added to it. contents?: string // subject A custom subject can be specified if the config one is not desired. - subject?: string + subject: string // info Pass in a structure of information to be stored alongside the invitation. info?: any cc?: boolean @@ -242,14 +243,18 @@ export interface AutomationLogPage { nextPage?: string } -export type AutomationStepInput = { - inputs: Record +export interface AutomationStepInputBase { context: Record emitter: EventEmitter appId: string apiKey?: string } +export type ActionImplementation = ( + params: { + inputs: TInputs + } & AutomationStepInputBase +) => Promise export interface AutomationMetadata extends Document { errorCount?: number automationChainCount?: number @@ -286,3 +291,8 @@ export type UpdatedRowEventEmitter = { table: Table appId: string } + +export enum LoopStepType { + ARRAY = "Array", + STRING = "String", +} diff --git a/packages/types/src/documents/app/automation/index.ts b/packages/types/src/documents/app/automation/index.ts new file mode 100644 index 0000000000..017596670d --- /dev/null +++ b/packages/types/src/documents/app/automation/index.ts @@ -0,0 +1,2 @@ +export * from "./automation" +export * from "./schema" diff --git a/packages/types/src/documents/app/automation/schema.ts b/packages/types/src/documents/app/automation/schema.ts new file mode 100644 index 0000000000..0da82f2f6e --- /dev/null +++ b/packages/types/src/documents/app/automation/schema.ts @@ -0,0 +1,320 @@ +import { SortOrder } from "../../../api" +import { EmptyFilterOption, Hosting, SearchFilters } from "../../../sdk" +import { HttpMethod } from "../query" +import { Row } from "../row" +import { + AutomationActionStepId, + AutomationResults, + EmailAttachment, + LoopStepType, + ActionImplementation, +} from "./automation" + +export type ActionImplementations = { + [AutomationActionStepId.COLLECT]: ActionImplementation< + CollectStepInputs, + CollectStepOutputs + > + [AutomationActionStepId.CREATE_ROW]: ActionImplementation< + CreateRowStepInputs, + CreateRowStepOutputs + > + [AutomationActionStepId.DELAY]: ActionImplementation< + DelayStepInputs, + DelayStepOutputs + > + [AutomationActionStepId.DELETE_ROW]: ActionImplementation< + DeleteRowStepInputs, + DeleteRowStepOutputs + > + + [AutomationActionStepId.EXECUTE_QUERY]: ActionImplementation< + ExecuteQueryStepInputs, + ExecuteQueryStepOutputs + > + [AutomationActionStepId.EXECUTE_SCRIPT]: ActionImplementation< + ExecuteScriptStepInputs, + ExecuteScriptStepOutputs + > + [AutomationActionStepId.FILTER]: ActionImplementation< + FilterStepInputs, + FilterStepOutputs + > + [AutomationActionStepId.QUERY_ROWS]: ActionImplementation< + QueryRowsStepInputs, + QueryRowsStepOutputs + > + [AutomationActionStepId.SEND_EMAIL_SMTP]: ActionImplementation< + SmtpEmailStepInputs, + BaseAutomationOutputs + > + [AutomationActionStepId.SERVER_LOG]: ActionImplementation< + ServerLogStepInputs, + ServerLogStepOutputs + > + [AutomationActionStepId.TRIGGER_AUTOMATION_RUN]: ActionImplementation< + TriggerAutomationStepInputs, + TriggerAutomationStepOutputs + > + [AutomationActionStepId.UPDATE_ROW]: ActionImplementation< + UpdateRowStepInputs, + UpdateRowStepOutputs + > + [AutomationActionStepId.OUTGOING_WEBHOOK]: ActionImplementation< + OutgoingWebhookStepInputs, + ExternalAppStepOutputs + > + [AutomationActionStepId.discord]: ActionImplementation< + DiscordStepInputs, + ExternalAppStepOutputs + > + [AutomationActionStepId.slack]: ActionImplementation< + SlackStepInputs, + ExternalAppStepOutputs + > + + [AutomationActionStepId.zapier]: ActionImplementation< + ZapierStepInputs, + ZapierStepOutputs + > + [AutomationActionStepId.integromat]: ActionImplementation< + MakeIntegrationInputs, + ExternalAppStepOutputs + > + [AutomationActionStepId.n8n]: ActionImplementation< + n8nStepInputs, + ExternalAppStepOutputs + > +} & (T extends "self" + ? { + [AutomationActionStepId.EXECUTE_BASH]: ActionImplementation< + BashStepInputs, + BashStepOutputs + > + [AutomationActionStepId.OPENAI]: ActionImplementation< + OpenAIStepInputs, + OpenAIStepOutputs + > + } + : {}) + +export type BaseAutomationOutputs = { + success?: boolean + response?: { + [key: string]: any + message?: string + } +} + +export type ExternalAppStepOutputs = { + httpStatus?: number + response: string + success: boolean +} + +export type BashStepInputs = { + code: string +} + +export type BashStepOutputs = BaseAutomationOutputs & { + stdout?: string +} + +export type CollectStepInputs = { + collection: string +} + +export type CollectStepOutputs = BaseAutomationOutputs & { + value?: any +} + +export type CreateRowStepInputs = { + row: Row +} + +export type CreateRowStepOutputs = BaseAutomationOutputs & { + row?: Row + id?: string + revision?: string +} + +export type DelayStepInputs = { + time: number +} + +export type DelayStepOutputs = BaseAutomationOutputs + +export type DeleteRowStepInputs = { + tableId: string + id: string + revision?: string +} + +export type DeleteRowStepOutputs = BaseAutomationOutputs & { + row?: Row +} + +export type DiscordStepInputs = { + url: string + username?: string + avatar_url?: string + content: string +} + +export type ExecuteQueryStepInputs = { + query: { + queryId: string + } +} + +export type ExecuteQueryStepOutputs = BaseAutomationOutputs & { + info?: any +} + +export type ExecuteScriptStepInputs = { + code: string +} + +export type ExecuteScriptStepOutputs = BaseAutomationOutputs & { + value?: string +} + +export type FilterStepInputs = { + field: any + condition: string + value: any +} + +export type FilterStepOutputs = BaseAutomationOutputs & { + result: boolean + refValue?: any + comparisonValue?: any +} + +export type LoopStepInputs = { + option: LoopStepType + binding: any + iterations?: number + failure?: string +} + +export type LoopStepOutputs = { + items: string + success: boolean + iterations: number +} + +export type MakeIntegrationInputs = { + url: string + body: any +} + +export type n8nStepInputs = { + url: string + method: HttpMethod + authorization: string + body: any +} + +export type OpenAIStepInputs = { + prompt: string + model: Model +} + +enum Model { + GPT_35_TURBO = "gpt-3.5-turbo", + // will only work with api keys that have access to the GPT4 API + GPT_4 = "gpt-4", +} + +export type OpenAIStepOutputs = Omit & { + response?: string | null +} + +export type QueryRowsStepInputs = { + tableId: string + filters?: SearchFilters + "filters-def"?: any + sortColumn?: string + sortOrder?: SortOrder + limit?: number + onEmptyFilter?: EmptyFilterOption +} + +export type QueryRowsStepOutputs = BaseAutomationOutputs & { + rows?: Row[] +} + +export type SmtpEmailStepInputs = { + to: string + from: string + subject: string + contents: string + cc: string + bcc: string + addInvite?: boolean + startTime: Date + endTime: Date + summary: string + location?: string + url?: string + attachments?: EmailAttachment[] +} +export type ServerLogStepInputs = { + text: string +} + +export type ServerLogStepOutputs = BaseAutomationOutputs & { + message: string +} +export type SlackStepInputs = { + url: string + text: string +} + +export type TriggerAutomationStepInputs = { + automation: { + automationId: string + } + timeout: number +} + +export type TriggerAutomationStepOutputs = BaseAutomationOutputs & { + value?: AutomationResults["steps"] +} + +export type UpdateRowStepInputs = { + meta: Record + row: Row + rowId: string +} + +export type UpdateRowStepOutputs = BaseAutomationOutputs & { + row?: Row + id?: string + revision?: string +} + +export type ZapierStepInputs = { + url: string + body: any +} + +export type ZapierStepOutputs = Omit & { + response: string +} + +enum RequestType { + POST = "POST", + GET = "GET", + PUT = "PUT", + DELETE = "DELETE", + PATCH = "PATCH", +} + +export type OutgoingWebhookStepInputs = { + requestMethod: RequestType + url: string + requestBody: string + headers: string +} From 0bd18a2832c886e433f6cc7d1ce36b992c8b7fbe Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 1 Aug 2024 09:34:56 +0100 Subject: [PATCH 51/94] wip trigger support --- .../server/src/integrations/base/types.ts | 8 +++++++ packages/server/src/integrations/oracle.ts | 21 +++++++++++++++---- 2 files changed, 25 insertions(+), 4 deletions(-) diff --git a/packages/server/src/integrations/base/types.ts b/packages/server/src/integrations/base/types.ts index 7144d20206..463d73444b 100644 --- a/packages/server/src/integrations/base/types.ts +++ b/packages/server/src/integrations/base/types.ts @@ -104,6 +104,14 @@ export interface OracleColumnsResponse { SEARCH_CONDITION: null | string } +export interface OracleTriggersResponse { + TABLE_NAME: string + TRIGGER_NAME: string + TRIGGER_TYPE: string + TRIGGERING_EVENT: string + TRIGGER_BODY: string +} + /** * An oracle constraint */ diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index 691c5167a7..956526e8cf 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -31,7 +31,12 @@ import oracledb, { ExecuteOptions, Result, } from "oracledb" -import { OracleTable, OracleColumn, OracleColumnsResponse } from "./base/types" +import { + OracleTable, + OracleColumn, + OracleColumnsResponse, + OracleTriggersResponse, +} from "./base/types" import { sql } from "@budibase/backend-core" const Sql = sql.Sql @@ -111,7 +116,7 @@ class OracleIntegration extends Sql implements DatasourcePlus { private readonly config: OracleConfig private index: number = 1 - private readonly COLUMNS_SQL = ` + private static readonly COLUMNS_SQL = ` SELECT tabs.table_name, cols.column_name, @@ -139,6 +144,11 @@ class OracleIntegration extends Sql implements DatasourcePlus { (cons.status = 'ENABLED' OR cons.status IS NULL) ` + + private static readonly TRIGGERS_SQL = ` + SELECT table_name, trigger_name, trigger_type, triggering_event, trigger_body FROM all_triggers WHERE status = 'ENABLED'; + ` + constructor(config: OracleConfig) { super(SqlClient.ORACLE) this.config = config @@ -255,7 +265,10 @@ class OracleIntegration extends Sql implements DatasourcePlus { entities: Record ): Promise { const columnsResponse = await this.internalQuery({ - sql: this.COLUMNS_SQL, + sql: OracleIntegration.COLUMNS_SQL, + }) + const triggersResponse = await this.internalQuery({ + sql: OracleIntegration.TRIGGERS_SQL, }) const oracleTables = this.mapColumns(columnsResponse) @@ -325,7 +338,7 @@ class OracleIntegration extends Sql implements DatasourcePlus { async getTableNames() { const columnsResponse = await this.internalQuery({ - sql: this.COLUMNS_SQL, + sql: OracleIntegration.COLUMNS_SQL, }) return (columnsResponse.rows || []).map(row => row.TABLE_NAME) } From f4bd3035721b0f778d6ff73d9b4653500d7baa37 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 1 Aug 2024 11:02:21 +0200 Subject: [PATCH 52/94] Handle frontend --- .../backend/TableNavigator/TableDataImport.svelte | 12 ++++++++---- packages/shared-core/src/utils.ts | 10 ++++++++++ 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/packages/builder/src/components/backend/TableNavigator/TableDataImport.svelte b/packages/builder/src/components/backend/TableNavigator/TableDataImport.svelte index 5bc3b9e728..4c9f4dd10f 100644 --- a/packages/builder/src/components/backend/TableNavigator/TableDataImport.svelte +++ b/packages/builder/src/components/backend/TableNavigator/TableDataImport.svelte @@ -1,9 +1,9 @@