1
0
Fork 0
mirror of synced 2024-08-05 13:21:26 +12:00

Merge pull request #13886 from Budibase/fix/BUDI-8258-update-presence

Make sure nullable can be updated and then re-fetched
This commit is contained in:
Michael Drury 2024-06-07 18:26:38 +01:00 committed by GitHub
commit 17e62586c9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 114 additions and 42 deletions

View file

@ -260,11 +260,12 @@ describe("/datasources", () => {
})
)
const stringName = "string"
const fullSchema: {
[type in SupportedSqlTypes]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: "string",
name: stringName,
type: FieldType.STRING,
constraints: {
presence: true,
@ -339,7 +340,7 @@ describe("/datasources", () => {
)
const persisted = await config.api.datasource.get(datasourceId)
await config.api.datasource.fetchSchema(datasourceId)
await config.api.datasource.fetchSchema({ datasourceId })
const updated = await config.api.datasource.get(datasourceId)
const expected: Datasource = {
@ -355,6 +356,10 @@ describe("/datasources", () => {
),
schema: Object.entries(table.schema).reduce<TableSchema>(
(acc, [fieldName, field]) => {
// the constraint will be unset - as the DB doesn't recognise it as not null
if (fieldName === stringName) {
field.constraints = {}
}
acc[fieldName] = expect.objectContaining({
...field,
})

View file

@ -1049,12 +1049,11 @@ describe("postgres integrations", () => {
it("recognises when a table has no primary key", async () => {
await rawQuery(rawDatasource, `CREATE TABLE "${tableName}" (id SERIAL)`)
const response = await makeRequest(
"post",
`/api/datasources/${datasource._id}/schema`
)
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(response.body.errors).toEqual({
expect(response.errors).toEqual({
[tableName]: "Table must have a primary key.",
})
})
@ -1065,12 +1064,11 @@ describe("postgres integrations", () => {
`CREATE TABLE "${tableName}" (_id SERIAL PRIMARY KEY) `
)
const response = await makeRequest(
"post",
`/api/datasources/${datasource._id}/schema`
)
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(response.body.errors).toEqual({
expect(response.errors).toEqual({
[tableName]: "Table contains invalid columns.",
})
})
@ -1095,15 +1093,14 @@ describe("postgres integrations", () => {
`
)
const response = await makeRequest(
"post",
`/api/datasources/${datasource._id}/schema`
)
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const table = response.body.datasource.entities[tableName]
const table = response.datasource.entities?.[tableName]
expect(table).toBeDefined()
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
expect(table?.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
})
})
@ -1167,20 +1164,16 @@ describe("postgres integrations", () => {
rawDatasource,
`CREATE TABLE "${schema2}".${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
)
const response = await makeRequest(
"post",
`/api/datasources/${datasource._id}/schema`,
{
tablesFilter: [repeated_table_name],
}
)
expect(response.status).toBe(200)
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
tablesFilter: [repeated_table_name],
})
expect(
response.body.datasource.entities[repeated_table_name].schema
response.datasource.entities?.[repeated_table_name].schema
).toBeDefined()
const schema =
response.body.datasource.entities[repeated_table_name].schema
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
const schema = response.datasource.entities?.[repeated_table_name].schema
expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"])
})
})
@ -1198,16 +1191,14 @@ describe("postgres integrations", () => {
})
it("should handle binary columns", async () => {
const response = await makeRequest(
"post",
`/api/datasources/${datasource._id}/schema`
)
expect(response.body).toBeDefined()
expect(response.body.datasource.entities).toBeDefined()
const table = response.body.datasource.entities["binarytable"]
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(response.datasource.entities).toBeDefined()
const table = response.datasource.entities?.["binarytable"]
expect(table).toBeDefined()
expect(table.schema.id.externalType).toBe("bytea")
const row = await config.api.row.save(table._id, {
expect(table?.schema.id.externalType).toBe("bytea")
const row = await config.api.row.save(table?._id!, {
id: "1111",
column1: "hello",
column2: 222,
@ -1217,4 +1208,48 @@ describe("postgres integrations", () => {
expect(JSON.parse(decoded)[0]).toBe("1111")
})
})
describe("check fetching null/not null table", () => {
beforeAll(async () => {
await rawQuery(
rawDatasource,
`CREATE TABLE nullableTable (
order_id SERIAL PRIMARY KEY,
order_number INT NOT NULL
);
`
)
})
it("should be able to change the table to allow nullable and refetch this", async () => {
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const entities = response.datasource.entities
expect(entities).toBeDefined()
const nullableTable = entities?.["nullabletable"]
expect(nullableTable).toBeDefined()
expect(
nullableTable?.schema["order_number"].constraints?.presence
).toEqual(true)
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
// is aware of - therefore we can try to fetch and make sure BB updates correctly
await rawQuery(
rawDatasource,
`ALTER TABLE nullableTable
ALTER COLUMN order_number DROP NOT NULL;
`
)
const responseAfter = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const entitiesAfter = responseAfter.datasource.entities
expect(entitiesAfter).toBeDefined()
const nullableTableAfter = entitiesAfter?.["nullabletable"]
expect(nullableTableAfter).toBeDefined()
expect(
nullableTableAfter?.schema["order_number"].constraints?.presence
).toBeUndefined()
})
})
})

View file

@ -280,12 +280,35 @@ function copyExistingPropsOver(
utils.unreachable(existingColumnType)
}
// copy the BB schema in case of special props
if (shouldKeepSchema) {
const fetchedColumnDefinition: FieldSchema | undefined =
table.schema[key]
table.schema[key] = {
...existingTableSchema[key],
externalType:
existingTableSchema[key].externalType ||
table.schema[key]?.externalType,
autocolumn: fetchedColumnDefinition?.autocolumn,
} as FieldSchema
// check constraints which can be fetched from the DB (they could be updated)
if (fetchedColumnDefinition?.constraints) {
// inclusions are the enum values (select/options)
const fetchedConstraints = fetchedColumnDefinition.constraints
const oldConstraints = table.schema[key].constraints
table.schema[key].constraints = {
...table.schema[key].constraints,
inclusion: fetchedConstraints.inclusion?.length
? fetchedConstraints.inclusion
: oldConstraints?.inclusion,
}
// true or undefined - consistent with old API
if (fetchedConstraints.presence) {
table.schema[key].constraints!.presence =
fetchedConstraints.presence
} else if (oldConstraints?.presence === true) {
delete table.schema[key].constraints?.presence
}
}
}
}

View file

@ -76,11 +76,20 @@ export class DatasourceAPI extends TestAPI {
})
}
fetchSchema = async (id: string, expectations?: Expectations) => {
fetchSchema = async (
{
datasourceId,
tablesFilter,
}: { datasourceId: string; tablesFilter?: string[] },
expectations?: Expectations
) => {
return await this._post<BuildSchemaFromSourceResponse>(
`/api/datasources/${id}/schema`,
`/api/datasources/${datasourceId}/schema`,
{
expectations,
expectations: expectations,
body: {
tablesFilter: tablesFilter,
},
}
)
}