1
0
Fork 0
mirror of synced 2024-08-14 01:21:41 +12:00

Fix conflicting columns issue

This commit is contained in:
adrinr 2023-02-07 12:25:02 +00:00
parent bc0208bc83
commit 3d917bfe22
3 changed files with 28 additions and 41 deletions

View file

@ -45,6 +45,7 @@ export interface RunConfig {
row?: Row
rows?: Row[]
tables?: Record<string, Table>
includeSqlRelationships?: IncludeRelationship
}
function buildFilters(
@ -707,7 +708,9 @@ export class ExternalRequest {
},
resource: {
// have to specify the fields to avoid column overlap (for SQL)
fields: isSql ? this.buildFields(table) : [],
fields: isSql
? this.buildFields(table, config.includeSqlRelationships)
: [],
},
filters,
sort,

View file

@ -18,6 +18,7 @@ import {
PaginationJson,
Table,
Datasource,
IncludeRelationship,
} from "@budibase/types"
import sdk from "../../../sdk"
@ -57,6 +58,7 @@ export async function patch(ctx: BBContext) {
return handleRequest(Operation.UPDATE, tableId, {
id: breakRowIdField(id),
row: inputs,
includeSqlRelationships: IncludeRelationship.EXCLUDE,
})
}
@ -65,6 +67,7 @@ export async function save(ctx: BBContext) {
const tableId = ctx.params.tableId
return handleRequest(Operation.CREATE, tableId, {
row: inputs,
includeSqlRelationships: IncludeRelationship.EXCLUDE,
})
}
@ -78,7 +81,9 @@ export async function fetchView(ctx: BBContext) {
export async function fetch(ctx: BBContext) {
const tableId = ctx.params.tableId
return handleRequest(Operation.READ, tableId)
return handleRequest(Operation.READ, tableId, {
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
}
export async function find(ctx: BBContext) {
@ -86,6 +91,7 @@ export async function find(ctx: BBContext) {
const tableId = ctx.params.tableId
const response = (await handleRequest(Operation.READ, tableId, {
id: breakRowIdField(id),
includeSqlRelationships: IncludeRelationship.EXCLUDE,
})) as Row[]
return response ? response[0] : response
}
@ -95,6 +101,7 @@ export async function destroy(ctx: BBContext) {
const id = ctx.request.body._id
const { row } = (await handleRequest(Operation.DELETE, tableId, {
id: breakRowIdField(id),
includeSqlRelationships: IncludeRelationship.EXCLUDE,
})) as { row: Row }
return { response: { ok: true }, row }
}
@ -107,6 +114,7 @@ export async function bulkDestroy(ctx: BBContext) {
promises.push(
handleRequest(Operation.DELETE, tableId, {
id: breakRowIdField(row._id),
includeSqlRelationships: IncludeRelationship.EXCLUDE,
})
)
}
@ -149,6 +157,7 @@ export async function search(ctx: BBContext) {
filters: query,
sort,
paginate: paginateObj as PaginationJson,
includeSqlRelationships: IncludeRelationship.INCLUDE,
})) as Row[]
let hasNextPage = false
if (paginate && rows.length === limit) {
@ -159,6 +168,7 @@ export async function search(ctx: BBContext) {
limit: 1,
page: bookmark * limit + 1,
},
includeSqlRelationships: IncludeRelationship.INCLUDE,
})) as Row[]
hasNextPage = nextRows.length > 0
}
@ -247,6 +257,7 @@ export async function fetchEnrichedRow(ctx: BBContext) {
const response = (await handleRequest(Operation.READ, tableId, {
id,
datasource,
includeSqlRelationships: IncludeRelationship.INCLUDE,
})) as Row[]
const table: Table = tables[tableName]
const row = response[0]
@ -274,6 +285,7 @@ export async function fetchEnrichedRow(ctx: BBContext) {
[primaryLink]: linkedIds,
},
},
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
}
return row

View file

@ -89,26 +89,13 @@ function parseFilters(filters: SearchFilters | undefined): SearchFilters {
function generateSelectStatement(
json: QueryJson,
knex: Knex,
opts?: { excludeJoinColumns: boolean }
): (string | Knex.Raw)[] | "*" {
knex: Knex
): (string | Knex.Raw)[] {
const { resource, meta } = json
const schema = meta?.table?.schema
if (!resource) {
return "*"
}
return resource.fields.reduce<(string | Knex.Raw)[]>((p, field) => {
return resource!.fields.map(field => {
const fieldNames = field.split(/\./g)
const tableName = fieldNames[0]
if (
meta?.table?.name &&
opts?.excludeJoinColumns &&
tableName !== meta.table.name
) {
return p
}
const columnName = fieldNames[1]
if (
columnName &&
@ -117,18 +104,13 @@ function generateSelectStatement(
) {
const externalType = schema[columnName].externalType
if (externalType?.includes("money")) {
p.push(
knex.raw(
`"${tableName}"."${columnName}"::money::numeric as "${field}"`
)
return knex.raw(
`"${tableName}"."${columnName}"::money::numeric as "${field}"`
)
return p
}
}
p.push(`${field} as ${field}`)
return p
}, [])
return `${field} as ${field}`
})
}
class InternalBuilder {
@ -417,9 +399,7 @@ class InternalBuilder {
} else {
return query
.insert(parsedBody)
.returning(
generateSelectStatement(json, knex, { excludeJoinColumns: true })
)
.returning(generateSelectStatement(json, knex))
}
}
@ -448,9 +428,7 @@ class InternalBuilder {
if (resource.fields && resource.fields.length > 0) {
// select the resources as the format "table.columnName" - this is what is provided
// by the resource builder further up
selectStatement = generateSelectStatement(json, knex, {
excludeJoinColumns: false,
})
selectStatement = generateSelectStatement(json, knex)
}
let foundLimit = limit || BASE_LIMIT
// handle pagination
@ -508,9 +486,7 @@ class InternalBuilder {
} else {
return query
.update(parsedBody)
.returning(
generateSelectStatement(json, knex, { excludeJoinColumns: true })
)
.returning(generateSelectStatement(json, knex))
}
}
@ -525,11 +501,7 @@ class InternalBuilder {
if (opts.disableReturning) {
return query.delete()
} else {
return query
.delete()
.returning(
generateSelectStatement(json, knex, { excludeJoinColumns: true })
)
return query.delete().returning(generateSelectStatement(json, knex))
}
}
}