1
0
Fork 0
mirror of synced 2024-09-09 22:16:26 +12:00

Merge branch 'master' of github.com:Budibase/budibase into feature/count-rows-to-return

This commit is contained in:
mike12345567 2024-06-18 11:07:34 +01:00
commit 75bf3c78e6
9 changed files with 204 additions and 316 deletions

View file

@ -110,7 +110,7 @@
"server-destroy": "1.0.1", "server-destroy": "1.0.1",
"snowflake-promise": "^4.5.0", "snowflake-promise": "^4.5.0",
"socket.io": "4.6.1", "socket.io": "4.6.1",
"tar": "6.1.15", "tar": "6.2.1",
"to-json-schema": "0.2.5", "to-json-schema": "0.2.5",
"uuid": "^8.3.2", "uuid": "^8.3.2",
"validate.js": "0.13.1", "validate.js": "0.13.1",

View file

@ -10,37 +10,11 @@ import * as setup from "../utilities"
import { import {
DatabaseName, DatabaseName,
getDatasource, getDatasource,
rawQuery, knexClient,
} from "../../../../integrations/tests/utils" } from "../../../../integrations/tests/utils"
import { Expectations } from "src/tests/utilities/api/base" import { Expectations } from "src/tests/utilities/api/base"
import { events } from "@budibase/backend-core" import { events } from "@budibase/backend-core"
import { Knex } from "knex"
const createTableSQL: Record<string, string> = {
[SourceName.POSTGRES]: `
CREATE TABLE test_table (
id serial PRIMARY KEY,
name VARCHAR ( 50 ) NOT NULL,
birthday TIMESTAMP,
number INT
);`,
[SourceName.MYSQL]: `
CREATE TABLE test_table (
id INT AUTO_INCREMENT PRIMARY KEY,
name VARCHAR(50) NOT NULL,
birthday TIMESTAMP,
number INT
);`,
[SourceName.SQL_SERVER]: `
CREATE TABLE test_table (
id INT IDENTITY(1,1) PRIMARY KEY,
name NVARCHAR(50) NOT NULL,
birthday DATETIME,
number INT
);`,
}
const insertSQL = `INSERT INTO test_table (name) VALUES ('one'), ('two'), ('three'), ('four'), ('five')`
const dropTableSQL = `DROP TABLE test_table;`
describe.each( describe.each(
[ [
@ -53,6 +27,7 @@ describe.each(
const config = setup.getConfig() const config = setup.getConfig()
let rawDatasource: Datasource let rawDatasource: Datasource
let datasource: Datasource let datasource: Datasource
let client: Knex
async function createQuery( async function createQuery(
query: Partial<Query>, query: Partial<Query>,
@ -82,21 +57,34 @@ describe.each(
rawDatasource = await dsProvider rawDatasource = await dsProvider
datasource = await config.api.datasource.create(rawDatasource) datasource = await config.api.datasource.create(rawDatasource)
// The Datasource API does not return the password, but we need // The Datasource API doesn ot return the password, but we need it later to
// it later to connect to the underlying database, so we fill it // connect to the underlying database, so we fill it back in here.
// back in here.
datasource.config!.password = rawDatasource.config!.password datasource.config!.password = rawDatasource.config!.password
await rawQuery(datasource, createTableSQL[datasource.source]) client = await knexClient(rawDatasource)
await rawQuery(datasource, insertSQL)
await client.schema.dropTableIfExists("test_table")
await client.schema.createTable("test_table", table => {
table.increments("id").primary()
table.string("name")
table.timestamp("birthday")
table.integer("number")
})
await client("test_table").insert([
{ name: "one" },
{ name: "two" },
{ name: "three" },
{ name: "four" },
{ name: "five" },
])
jest.clearAllMocks() jest.clearAllMocks()
}) })
afterEach(async () => { afterEach(async () => {
const ds = await config.api.datasource.get(datasource._id!) const ds = await config.api.datasource.get(datasource._id!)
config.api.datasource.delete(ds) await config.api.datasource.delete(ds)
await rawQuery(datasource, dropTableSQL)
}) })
afterAll(async () => { afterAll(async () => {
@ -207,7 +195,7 @@ describe.each(
}, },
}) })
await config.publish() await config.api.application.publish(config.getAppId())
const prodQuery = await config.api.query.getProd(query._id!) const prodQuery = await config.api.query.getProd(query._id!)
expect(prodQuery._id).toEqual(query._id) expect(prodQuery._id).toEqual(query._id)
@ -429,11 +417,11 @@ describe.each(
}, },
]) ])
const rows = await rawQuery( const rows = await client("test_table").where({ name: "baz" }).select()
datasource,
"SELECT * FROM test_table WHERE name = 'baz'"
)
expect(rows).toHaveLength(1) expect(rows).toHaveLength(1)
for (const row of rows) {
expect(row).toMatchObject({ name: "baz" })
}
}) })
it("should not allow handlebars as parameters", async () => { it("should not allow handlebars as parameters", async () => {
@ -490,11 +478,14 @@ describe.each(
expect(result.data).toEqual([{ created: true }]) expect(result.data).toEqual([{ created: true }])
const rows = await rawQuery( const rows = await client("test_table")
datasource, .where({ birthday: datetimeStr })
`SELECT * FROM test_table WHERE birthday = '${date.toISOString()}'` .select()
)
expect(rows).toHaveLength(1) expect(rows).toHaveLength(1)
for (const row of rows) {
expect(new Date(row.birthday)).toEqual(date)
}
} }
) )
@ -522,10 +513,9 @@ describe.each(
expect(result.data).toEqual([{ created: true }]) expect(result.data).toEqual([{ created: true }])
const rows = await rawQuery( const rows = await client("test_table")
datasource, .where({ name: notDateStr })
`SELECT * FROM test_table WHERE name = '${notDateStr}'` .select()
)
expect(rows).toHaveLength(1) expect(rows).toHaveLength(1)
} }
) )
@ -660,10 +650,7 @@ describe.each(
}, },
]) ])
const rows = await rawQuery( const rows = await client("test_table").where({ id: 1 }).select()
datasource,
"SELECT * FROM test_table WHERE id = 1"
)
expect(rows).toEqual([ expect(rows).toEqual([
{ id: 1, name: "foo", birthday: null, number: null }, { id: 1, name: "foo", birthday: null, number: null },
]) ])
@ -731,10 +718,7 @@ describe.each(
}, },
]) ])
const rows = await rawQuery( const rows = await client("test_table").where({ id: 1 }).select()
datasource,
"SELECT * FROM test_table WHERE id = 1"
)
expect(rows).toHaveLength(0) expect(rows).toHaveLength(0)
}) })
}) })

View file

@ -1,19 +1,12 @@
import fetch from "node-fetch"
import {
generateMakeRequest,
MakeRequestResponse,
} from "../api/routes/public/tests/utils"
import * as setup from "../api/routes/tests/utilities" import * as setup from "../api/routes/tests/utilities"
import { Datasource, FieldType } from "@budibase/types" import { Datasource, FieldType } from "@budibase/types"
import { import {
DatabaseName, DatabaseName,
getDatasource, getDatasource,
rawQuery, knexClient,
} from "../integrations/tests/utils" } from "../integrations/tests/utils"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { tableForDatasource } from "../../src/tests/utilities/structures" import { Knex } from "knex"
// @ts-ignore
fetch.mockSearch()
function uniqueTableName(length?: number): string { function uniqueTableName(length?: number): string {
return generator return generator
@ -24,129 +17,74 @@ function uniqueTableName(length?: number): string {
const config = setup.getConfig()! const config = setup.getConfig()!
jest.mock("../websockets", () => ({
clientAppSocket: jest.fn(),
gridAppSocket: jest.fn(),
initialise: jest.fn(),
builderSocket: {
emitTableUpdate: jest.fn(),
emitTableDeletion: jest.fn(),
emitDatasourceUpdate: jest.fn(),
emitDatasourceDeletion: jest.fn(),
emitScreenUpdate: jest.fn(),
emitAppMetadataUpdate: jest.fn(),
emitAppPublish: jest.fn(),
},
}))
describe("mysql integrations", () => { describe("mysql integrations", () => {
let makeRequest: MakeRequestResponse, let datasource: Datasource
rawDatasource: Datasource, let client: Knex
datasource: Datasource
beforeAll(async () => { beforeAll(async () => {
await config.init() await config.init()
const apiKey = await config.generateApiKey() const rawDatasource = await getDatasource(DatabaseName.MYSQL)
makeRequest = generateMakeRequest(apiKey, true)
rawDatasource = await getDatasource(DatabaseName.MYSQL)
datasource = await config.api.datasource.create(rawDatasource) datasource = await config.api.datasource.create(rawDatasource)
client = await knexClient(rawDatasource)
}) })
afterAll(config.end) afterAll(config.end)
it("validate table schema", async () => {
// Creating a table so that `entities` is populated.
await config.api.table.save(tableForDatasource(datasource))
const res = await makeRequest("get", `/api/datasources/${datasource._id}`)
expect(res.status).toBe(200)
expect(res.body).toEqual({
config: {
database: expect.any(String),
host: datasource.config!.host,
password: "--secret-value--",
port: datasource.config!.port,
user: "root",
},
plus: true,
source: "MYSQL",
type: "datasource_plus",
isSQL: true,
_id: expect.any(String),
_rev: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
entities: expect.any(Object),
})
})
describe("Integration compatibility with mysql search_path", () => { describe("Integration compatibility with mysql search_path", () => {
let datasource: Datasource, rawDatasource: Datasource let datasource: Datasource
let rawDatasource: Datasource
let client: Knex
const database = generator.guid() const database = generator.guid()
const database2 = generator.guid() const database2 = generator.guid()
beforeAll(async () => { beforeAll(async () => {
rawDatasource = await getDatasource(DatabaseName.MYSQL) rawDatasource = await getDatasource(DatabaseName.MYSQL)
client = await knexClient(rawDatasource)
await rawQuery(rawDatasource, `CREATE DATABASE \`${database}\`;`) await client.raw(`CREATE DATABASE \`${database}\`;`)
await rawQuery(rawDatasource, `CREATE DATABASE \`${database2}\`;`) await client.raw(`CREATE DATABASE \`${database2}\`;`)
const pathConfig: any = { rawDatasource.config!.database = database
...rawDatasource, datasource = await config.api.datasource.create(rawDatasource)
config: {
...rawDatasource.config!,
database,
},
}
datasource = await config.api.datasource.create(pathConfig)
}) })
afterAll(async () => { afterAll(async () => {
await rawQuery(rawDatasource, `DROP DATABASE \`${database}\`;`) await client.raw(`DROP DATABASE \`${database}\`;`)
await rawQuery(rawDatasource, `DROP DATABASE \`${database2}\`;`) await client.raw(`DROP DATABASE \`${database2}\`;`)
}) })
it("discovers tables from any schema in search path", async () => { it("discovers tables from any schema in search path", async () => {
await rawQuery( await client.schema.createTable(`${database}.table1`, table => {
rawDatasource, table.increments("id1").primary()
`CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);`
)
const response = await makeRequest("post", "/api/datasources/info", {
datasource: datasource,
}) })
expect(response.status).toBe(200) const res = await config.api.datasource.info(datasource)
expect(response.body.tableNames).toBeDefined() expect(res.tableNames).toBeDefined()
expect(response.body.tableNames).toEqual( expect(res.tableNames).toEqual(expect.arrayContaining(["table1"]))
expect.arrayContaining(["table1"])
)
}) })
it("does not mix columns from different tables", async () => { it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name" const repeated_table_name = "table_same_name"
await rawQuery( await client.schema.createTable(
rawDatasource, `${database}.${repeated_table_name}`,
`CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);` table => {
) table.increments("id").primary()
await rawQuery( table.string("val1")
rawDatasource,
`CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
)
const response = await makeRequest(
"post",
`/api/datasources/${datasource._id}/schema`,
{
tablesFilter: [repeated_table_name],
} }
) )
expect(response.status).toBe(200) await client.schema.createTable(
expect( `${database2}.${repeated_table_name}`,
response.body.datasource.entities[repeated_table_name].schema table => {
).toBeDefined() table.increments("id2").primary()
const schema = table.string("val2")
response.body.datasource.entities[repeated_table_name].schema }
)
const res = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
tablesFilter: [repeated_table_name],
})
expect(res.datasource.entities![repeated_table_name].schema).toBeDefined()
const schema = res.datasource.entities![repeated_table_name].schema
expect(Object.keys(schema).sort()).toEqual(["id", "val1"]) expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
}) })
}) })
@ -159,28 +97,27 @@ describe("mysql integrations", () => {
}) })
afterEach(async () => { afterEach(async () => {
await rawQuery(rawDatasource, `DROP TABLE IF EXISTS \`${tableName}\``) await client.schema.dropTableIfExists(tableName)
}) })
it("recognises enum columns as options", async () => { it("recognises enum columns as options", async () => {
const enumColumnName = "status" const enumColumnName = "status"
const createTableQuery = ` await client.schema.createTable(tableName, table => {
CREATE TABLE \`${tableName}\` ( table.increments("order_id").primary()
\`order_id\` INT AUTO_INCREMENT PRIMARY KEY, table.string("customer_name", 100).notNullable()
\`customer_name\` VARCHAR(100) NOT NULL, table.enum(
\`${enumColumnName}\` ENUM('pending', 'processing', 'shipped', 'delivered', 'cancelled') enumColumnName,
); ["pending", "processing", "shipped", "delivered", "cancelled"],
` { useNative: true, enumName: `${tableName}_${enumColumnName}` }
)
})
await rawQuery(rawDatasource, createTableQuery) const res = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const response = await makeRequest( const table = res.datasource.entities![tableName]
"post",
`/api/datasources/${datasource._id}/schema`
)
const table = response.body.datasource.entities[tableName]
expect(table).toBeDefined() expect(table).toBeDefined()
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS) expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)

View file

@ -1,9 +1,3 @@
import fetch from "node-fetch"
import {
generateMakeRequest,
MakeRequestResponse,
} from "../api/routes/public/tests/utils"
import * as setup from "../api/routes/tests/utilities" import * as setup from "../api/routes/tests/utilities"
import { Datasource, FieldType } from "@budibase/types" import { Datasource, FieldType } from "@budibase/types"
import _ from "lodash" import _ from "lodash"
@ -11,29 +5,21 @@ import { generator } from "@budibase/backend-core/tests"
import { import {
DatabaseName, DatabaseName,
getDatasource, getDatasource,
rawQuery, knexClient,
} from "../integrations/tests/utils" } from "../integrations/tests/utils"
import { Knex } from "knex"
// @ts-ignore
fetch.mockSearch()
const config = setup.getConfig()! const config = setup.getConfig()!
jest.mock("../websockets")
describe("postgres integrations", () => { describe("postgres integrations", () => {
let makeRequest: MakeRequestResponse, let datasource: Datasource
rawDatasource: Datasource, let client: Knex
datasource: Datasource
beforeAll(async () => { beforeAll(async () => {
await config.init() await config.init()
const apiKey = await config.generateApiKey() const rawDatasource = await getDatasource(DatabaseName.POSTGRES)
makeRequest = generateMakeRequest(apiKey, true)
rawDatasource = await getDatasource(DatabaseName.POSTGRES)
datasource = await config.api.datasource.create(rawDatasource) datasource = await config.api.datasource.create(rawDatasource)
client = await knexClient(rawDatasource)
}) })
afterAll(config.end) afterAll(config.end)
@ -46,11 +32,13 @@ describe("postgres integrations", () => {
}) })
afterEach(async () => { afterEach(async () => {
await rawQuery(rawDatasource, `DROP TABLE IF EXISTS "${tableName}"`) await client.schema.dropTableIfExists(tableName)
}) })
it("recognises when a table has no primary key", async () => { it("recognises when a table has no primary key", async () => {
await rawQuery(rawDatasource, `CREATE TABLE "${tableName}" (id SERIAL)`) await client.schema.createTable(tableName, table => {
table.increments("id", { primaryKey: false })
})
const response = await config.api.datasource.fetchSchema({ const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!, datasourceId: datasource._id!,
@ -62,10 +50,9 @@ describe("postgres integrations", () => {
}) })
it("recognises when a table is using a reserved column name", async () => { it("recognises when a table is using a reserved column name", async () => {
await rawQuery( await client.schema.createTable(tableName, table => {
rawDatasource, table.increments("_id").primary()
`CREATE TABLE "${tableName}" (_id SERIAL PRIMARY KEY) ` })
)
const response = await config.api.datasource.fetchSchema({ const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!, datasourceId: datasource._id!,
@ -81,20 +68,15 @@ describe("postgres integrations", () => {
.guid() .guid()
.replaceAll("-", "") .replaceAll("-", "")
.substring(0, 6)}` .substring(0, 6)}`
const enumColumnName = "status"
await rawQuery( await client.schema.createTable(tableName, table => {
rawDatasource, table.increments("order_id").primary()
` table.string("customer_name").notNullable()
CREATE TYPE order_status AS ENUM ('pending', 'processing', 'shipped', 'delivered', 'cancelled'); table.enum("status", ["pending", "processing", "shipped"], {
useNative: true,
CREATE TABLE ${tableName} ( enumName: `${tableName}_status`,
order_id SERIAL PRIMARY KEY, })
customer_name VARCHAR(100) NOT NULL, })
${enumColumnName} order_status
);
`
)
const response = await config.api.datasource.fetchSchema({ const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!, datasourceId: datasource._id!,
@ -103,69 +85,70 @@ describe("postgres integrations", () => {
const table = response.datasource.entities?.[tableName] const table = response.datasource.entities?.[tableName]
expect(table).toBeDefined() expect(table).toBeDefined()
expect(table?.schema[enumColumnName].type).toEqual(FieldType.OPTIONS) expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS)
}) })
}) })
describe("Integration compatibility with postgres search_path", () => { describe("Integration compatibility with postgres search_path", () => {
let rawDatasource: Datasource, let datasource: Datasource
datasource: Datasource, let client: Knex
schema1: string, let schema1: string
schema2: string let schema2: string
beforeEach(async () => { beforeEach(async () => {
schema1 = generator.guid().replaceAll("-", "") schema1 = generator.guid().replaceAll("-", "")
schema2 = generator.guid().replaceAll("-", "") schema2 = generator.guid().replaceAll("-", "")
rawDatasource = await getDatasource(DatabaseName.POSTGRES) const rawDatasource = await getDatasource(DatabaseName.POSTGRES)
const dbConfig = rawDatasource.config! client = await knexClient(rawDatasource)
await rawQuery(rawDatasource, `CREATE SCHEMA "${schema1}";`) await client.schema.createSchema(schema1)
await rawQuery(rawDatasource, `CREATE SCHEMA "${schema2}";`) await client.schema.createSchema(schema2)
const pathConfig: any = { rawDatasource.config!.schema = `${schema1}, ${schema2}`
...rawDatasource,
config: { client = await knexClient(rawDatasource)
...dbConfig, datasource = await config.api.datasource.create(rawDatasource)
schema: `${schema1}, ${schema2}`,
},
}
datasource = await config.api.datasource.create(pathConfig)
}) })
afterEach(async () => { afterEach(async () => {
await rawQuery(rawDatasource, `DROP SCHEMA "${schema1}" CASCADE;`) await client.schema.dropSchema(schema1, true)
await rawQuery(rawDatasource, `DROP SCHEMA "${schema2}" CASCADE;`) await client.schema.dropSchema(schema2, true)
}) })
it("discovers tables from any schema in search path", async () => { it("discovers tables from any schema in search path", async () => {
await rawQuery( await client.schema.createTable(`${schema1}.table1`, table => {
rawDatasource, table.increments("id1").primary()
`CREATE TABLE "${schema1}".table1 (id1 SERIAL PRIMARY KEY);`
)
await rawQuery(
rawDatasource,
`CREATE TABLE "${schema2}".table2 (id2 SERIAL PRIMARY KEY);`
)
const response = await makeRequest("post", "/api/datasources/info", {
datasource: datasource,
}) })
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined() await client.schema.createTable(`${schema2}.table2`, table => {
expect(response.body.tableNames).toEqual( table.increments("id2").primary()
})
const response = await config.api.datasource.info(datasource)
expect(response.tableNames).toBeDefined()
expect(response.tableNames).toEqual(
expect.arrayContaining(["table1", "table2"]) expect.arrayContaining(["table1", "table2"])
) )
}) })
it("does not mix columns from different tables", async () => { it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name" const repeated_table_name = "table_same_name"
await rawQuery(
rawDatasource, await client.schema.createTable(
`CREATE TABLE "${schema1}".${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);` `${schema1}.${repeated_table_name}`,
table => {
table.increments("id").primary()
table.string("val1")
}
) )
await rawQuery(
rawDatasource, await client.schema.createTable(
`CREATE TABLE "${schema2}".${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);` `${schema2}.${repeated_table_name}`,
table => {
table.increments("id2").primary()
table.string("val2")
}
) )
const response = await config.api.datasource.fetchSchema({ const response = await config.api.datasource.fetchSchema({
@ -182,15 +165,11 @@ describe("postgres integrations", () => {
describe("check custom column types", () => { describe("check custom column types", () => {
beforeAll(async () => { beforeAll(async () => {
await rawQuery( await client.schema.createTable("binaryTable", table => {
rawDatasource, table.binary("id").primary()
`CREATE TABLE binaryTable ( table.string("column1")
id BYTEA PRIMARY KEY, table.integer("column2")
column1 TEXT, })
column2 INT
);
`
)
}) })
it("should handle binary columns", async () => { it("should handle binary columns", async () => {
@ -198,7 +177,7 @@ describe("postgres integrations", () => {
datasourceId: datasource._id!, datasourceId: datasource._id!,
}) })
expect(response.datasource.entities).toBeDefined() expect(response.datasource.entities).toBeDefined()
const table = response.datasource.entities?.["binarytable"] const table = response.datasource.entities?.["binaryTable"]
expect(table).toBeDefined() expect(table).toBeDefined()
expect(table?.schema.id.externalType).toBe("bytea") expect(table?.schema.id.externalType).toBe("bytea")
const row = await config.api.row.save(table?._id!, { const row = await config.api.row.save(table?._id!, {
@ -214,14 +193,10 @@ describe("postgres integrations", () => {
describe("check fetching null/not null table", () => { describe("check fetching null/not null table", () => {
beforeAll(async () => { beforeAll(async () => {
await rawQuery( await client.schema.createTable("nullableTable", table => {
rawDatasource, table.increments("order_id").primary()
`CREATE TABLE nullableTable ( table.integer("order_number").notNullable()
order_id SERIAL PRIMARY KEY, })
order_number INT NOT NULL
);
`
)
}) })
it("should be able to change the table to allow nullable and refetch this", async () => { it("should be able to change the table to allow nullable and refetch this", async () => {
@ -230,25 +205,24 @@ describe("postgres integrations", () => {
}) })
const entities = response.datasource.entities const entities = response.datasource.entities
expect(entities).toBeDefined() expect(entities).toBeDefined()
const nullableTable = entities?.["nullabletable"] const nullableTable = entities?.["nullableTable"]
expect(nullableTable).toBeDefined() expect(nullableTable).toBeDefined()
expect( expect(
nullableTable?.schema["order_number"].constraints?.presence nullableTable?.schema["order_number"].constraints?.presence
).toEqual(true) ).toEqual(true)
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase // need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
// is aware of - therefore we can try to fetch and make sure BB updates correctly // is aware of - therefore we can try to fetch and make sure BB updates correctly
await rawQuery( await client.schema.alterTable("nullableTable", table => {
rawDatasource, table.setNullable("order_number")
`ALTER TABLE nullableTable })
ALTER COLUMN order_number DROP NOT NULL;
`
)
const responseAfter = await config.api.datasource.fetchSchema({ const responseAfter = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!, datasourceId: datasource._id!,
}) })
const entitiesAfter = responseAfter.datasource.entities const entitiesAfter = responseAfter.datasource.entities
expect(entitiesAfter).toBeDefined() expect(entitiesAfter).toBeDefined()
const nullableTableAfter = entitiesAfter?.["nullabletable"] const nullableTableAfter = entitiesAfter?.["nullableTable"]
expect(nullableTableAfter).toBeDefined() expect(nullableTableAfter).toBeDefined()
expect( expect(
nullableTableAfter?.schema["order_number"].constraints?.presence nullableTableAfter?.schema["order_number"].constraints?.presence

View file

@ -48,16 +48,16 @@ export async function getDatasources(
return Promise.all(sourceNames.map(sourceName => providers[sourceName]())) return Promise.all(sourceNames.map(sourceName => providers[sourceName]()))
} }
export async function rawQuery(ds: Datasource, sql: string): Promise<any> { export async function knexClient(ds: Datasource) {
switch (ds.source) { switch (ds.source) {
case SourceName.POSTGRES: { case SourceName.POSTGRES: {
return postgres.rawQuery(ds, sql) return postgres.knexClient(ds)
} }
case SourceName.MYSQL: { case SourceName.MYSQL: {
return mysql.rawQuery(ds, sql) return mysql.knexClient(ds)
} }
case SourceName.SQL_SERVER: { case SourceName.SQL_SERVER: {
return mssql.rawQuery(ds, sql) return mssql.knexClient(ds)
} }
default: { default: {
throw new Error(`Unsupported source: ${ds.source}`) throw new Error(`Unsupported source: ${ds.source}`)

View file

@ -1,9 +1,9 @@
import { Datasource, SourceName } from "@budibase/types" import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers" import { GenericContainer, Wait } from "testcontainers"
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy" import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
import { rawQuery } from "./mysql"
import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "." import { startContainer } from "."
import { knexClient } from "./mysql"
let ports: Promise<testContainerUtils.Port[]> let ports: Promise<testContainerUtils.Port[]>
@ -55,7 +55,8 @@ export async function getDatasource(): Promise<Datasource> {
} }
const database = generator.guid().replaceAll("-", "") const database = generator.guid().replaceAll("-", "")
await rawQuery(datasource, `CREATE DATABASE \`${database}\``) const client = await knexClient(datasource)
await client.raw(`CREATE DATABASE \`${database}\``)
datasource.config.database = database datasource.config.database = database
return datasource return datasource
} }

View file

@ -1,8 +1,8 @@
import { Datasource, SourceName } from "@budibase/types" import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers" import { GenericContainer, Wait } from "testcontainers"
import mssql from "mssql"
import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "." import { startContainer } from "."
import knex from "knex"
let ports: Promise<testContainerUtils.Port[]> let ports: Promise<testContainerUtils.Port[]>
@ -49,26 +49,23 @@ export async function getDatasource(): Promise<Datasource> {
} }
const database = generator.guid().replaceAll("-", "") const database = generator.guid().replaceAll("-", "")
await rawQuery(datasource, `CREATE DATABASE "${database}"`) const client = await knexClient(datasource)
await client.raw(`CREATE DATABASE "${database}"`)
datasource.config!.database = database datasource.config!.database = database
return datasource return datasource
} }
export async function rawQuery(ds: Datasource, sql: string) { export async function knexClient(ds: Datasource) {
if (!ds.config) { if (!ds.config) {
throw new Error("Datasource config is missing") throw new Error("Datasource config is missing")
} }
if (ds.source !== SourceName.SQL_SERVER) { if (ds.source !== SourceName.SQL_SERVER) {
throw new Error("Datasource source is not SQL Server") throw new Error("Datasource source is not MSSQL")
} }
const pool = new mssql.ConnectionPool(ds.config! as mssql.config) return knex({
const client = await pool.connect() client: "mssql",
try { connection: ds.config,
const { recordset } = await client.query(sql) })
return recordset
} finally {
await pool.close()
}
} }

View file

@ -1,9 +1,9 @@
import { Datasource, SourceName } from "@budibase/types" import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers" import { GenericContainer, Wait } from "testcontainers"
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy" import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
import mysql from "mysql2/promise"
import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "." import { startContainer } from "."
import knex from "knex"
let ports: Promise<testContainerUtils.Port[]> let ports: Promise<testContainerUtils.Port[]>
@ -56,12 +56,13 @@ export async function getDatasource(): Promise<Datasource> {
} }
const database = generator.guid().replaceAll("-", "") const database = generator.guid().replaceAll("-", "")
await rawQuery(datasource, `CREATE DATABASE \`${database}\``) const client = await knexClient(datasource)
await client.raw(`CREATE DATABASE \`${database}\``)
datasource.config!.database = database datasource.config!.database = database
return datasource return datasource
} }
export async function rawQuery(ds: Datasource, sql: string) { export async function knexClient(ds: Datasource) {
if (!ds.config) { if (!ds.config) {
throw new Error("Datasource config is missing") throw new Error("Datasource config is missing")
} }
@ -69,11 +70,8 @@ export async function rawQuery(ds: Datasource, sql: string) {
throw new Error("Datasource source is not MySQL") throw new Error("Datasource source is not MySQL")
} }
const connection = await mysql.createConnection(ds.config) return knex({
try { client: "mysql2",
const [rows] = await connection.query(sql) connection: ds.config,
return rows })
} finally {
connection.end()
}
} }

View file

@ -1,8 +1,8 @@
import { Datasource, SourceName } from "@budibase/types" import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers" import { GenericContainer, Wait } from "testcontainers"
import pg from "pg"
import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "." import { startContainer } from "."
import knex from "knex"
let ports: Promise<testContainerUtils.Port[]> let ports: Promise<testContainerUtils.Port[]>
@ -43,13 +43,14 @@ export async function getDatasource(): Promise<Datasource> {
} }
const database = generator.guid().replaceAll("-", "") const database = generator.guid().replaceAll("-", "")
await rawQuery(datasource, `CREATE DATABASE "${database}"`) const client = await knexClient(datasource)
await client.raw(`CREATE DATABASE "${database}"`)
datasource.config!.database = database datasource.config!.database = database
return datasource return datasource
} }
export async function rawQuery(ds: Datasource, sql: string) { export async function knexClient(ds: Datasource) {
if (!ds.config) { if (!ds.config) {
throw new Error("Datasource config is missing") throw new Error("Datasource config is missing")
} }
@ -57,12 +58,8 @@ export async function rawQuery(ds: Datasource, sql: string) {
throw new Error("Datasource source is not Postgres") throw new Error("Datasource source is not Postgres")
} }
const client = new pg.Client(ds.config) return knex({
await client.connect() client: "pg",
try { connection: ds.config,
const { rows } = await client.query(sql) })
return rows
} finally {
await client.end()
}
} }