1
0
Fork 0
mirror of synced 2024-06-03 02:55:14 +12:00
budibase/packages/server/src/api/controllers/row/external.js

222 lines
6 KiB
JavaScript
Raw Normal View History

2021-07-02 01:25:23 +12:00
const {
DataSourceOperation,
SortDirection,
FieldTypes,
NoEmptyFilterStrings,
2021-07-02 01:25:23 +12:00
} = require("../../../constants")
const {
breakExternalTableId,
2021-06-17 05:39:59 +12:00
breakRowIdField,
} = require("../../../integrations/utils")
const ExternalRequest = require("./ExternalRequest")
const { getAppDB } = require("@budibase/backend-core/context")
async function handleRequest(operation, tableId, opts = {}) {
// make sure the filters are cleaned up, no empty strings for equals, fuzzy or string
if (opts && opts.filters) {
for (let filterField of NoEmptyFilterStrings) {
if (!opts.filters[filterField]) {
continue
}
for (let [key, value] of Object.entries(opts.filters[filterField])) {
if (!value || value === "") {
delete opts.filters[filterField][key]
}
}
}
}
return new ExternalRequest(operation, tableId, opts.datasource).run(opts)
}
exports.handleRequest = handleRequest
2021-06-15 06:07:13 +12:00
exports.patch = async ctx => {
const inputs = ctx.request.body
const tableId = ctx.params.tableId
const id = breakRowIdField(inputs._id)
// don't save the ID to db
delete inputs._id
return handleRequest(DataSourceOperation.UPDATE, tableId, {
2021-06-15 06:07:13 +12:00
id,
row: inputs,
})
}
2021-06-15 06:07:13 +12:00
exports.save = async ctx => {
const inputs = ctx.request.body
const tableId = ctx.params.tableId
return handleRequest(DataSourceOperation.CREATE, tableId, {
2021-06-15 06:07:13 +12:00
row: inputs,
})
}
2021-06-15 06:07:13 +12:00
exports.fetchView = async ctx => {
// there are no views in external data sources, shouldn't ever be called
// for now just fetch
const split = ctx.params.viewName.split("all_")
ctx.params.tableId = split[1] ? split[1] : split[0]
return exports.fetch(ctx)
}
exports.fetch = async ctx => {
const tableId = ctx.params.tableId
return handleRequest(DataSourceOperation.READ, tableId)
}
2021-06-15 06:07:13 +12:00
exports.find = async ctx => {
const id = ctx.params.rowId
const tableId = ctx.params.tableId
const response = await handleRequest(DataSourceOperation.READ, tableId, {
id,
})
return response ? response[0] : response
}
2021-06-15 06:07:13 +12:00
exports.destroy = async ctx => {
const tableId = ctx.params.tableId
const id = ctx.request.body._id
const { row } = await handleRequest(DataSourceOperation.DELETE, tableId, {
id,
})
return { response: { ok: true }, row }
}
2021-06-15 06:07:13 +12:00
exports.bulkDestroy = async ctx => {
const { rows } = ctx.request.body
const tableId = ctx.params.tableId
let promises = []
for (let row of rows) {
2021-06-16 00:47:08 +12:00
promises.push(
handleRequest(DataSourceOperation.DELETE, tableId, {
id: breakRowIdField(row._id),
2021-06-16 00:47:08 +12:00
})
)
}
const responses = await Promise.all(promises)
return { response: { ok: true }, rows: responses.map(resp => resp.row) }
}
2021-06-15 06:07:13 +12:00
exports.search = async ctx => {
const tableId = ctx.params.tableId
const { paginate, query, ...params } = ctx.request.body
2021-06-18 02:56:41 +12:00
let { bookmark, limit } = params
if (!bookmark && paginate) {
bookmark = 1
}
let paginateObj = {}
2021-06-18 02:56:41 +12:00
if (paginate) {
paginateObj = {
2021-06-18 02:56:41 +12:00
// add one so we can track if there is another page
limit: limit,
page: bookmark,
}
2021-06-18 02:56:41 +12:00
} else if (params && limit) {
paginateObj = {
2021-06-18 02:56:41 +12:00
limit: limit,
}
}
let sort
if (params.sort) {
2021-06-15 06:07:13 +12:00
const direction =
params.sortOrder === "descending"
? SortDirection.DESCENDING
: SortDirection.ASCENDING
sort = {
2021-06-15 06:07:13 +12:00
[params.sort]: direction,
}
}
const rows = await handleRequest(DataSourceOperation.READ, tableId, {
2021-06-15 06:07:13 +12:00
filters: query,
sort,
paginate: paginateObj,
})
2021-06-18 02:56:41 +12:00
let hasNextPage = false
if (paginate && rows.length === limit) {
const nextRows = await handleRequest(DataSourceOperation.READ, tableId, {
filters: query,
sort,
paginate: {
limit: 1,
page: bookmark * limit + 1,
},
})
2021-06-18 02:56:41 +12:00
hasNextPage = nextRows.length > 0
}
// need wrapper object for bookmarks etc when paginating
2021-06-18 02:56:41 +12:00
return { rows, hasNextPage, bookmark: bookmark + 1 }
}
exports.validate = async () => {
// can't validate external right now - maybe in future
2021-06-16 00:50:41 +12:00
return { valid: true }
}
2022-03-04 23:05:46 +13:00
exports.exportRows = async ctx => {
const { datasourceId, tableName } = breakExternalTableId(ctx.params.tableId)
const db = getAppDB()
const datasource = await db.get(datasourceId)
if (!datasource || !datasource.entities) {
ctx.throw(400, "Datasource has not been configured for plus API.")
}
const tables = datasource.entities
const table = tables[tableName]
ctx.request.body = {
query: {
oneOf: {
[table.primaryDisplay]: ctx.request.body.map(
id => breakRowIdField(id)[0]
),
2022-03-04 23:05:46 +13:00
},
},
}
return exports.search(ctx)
}
exports.fetchEnrichedRow = async ctx => {
const id = ctx.params.rowId
const tableId = ctx.params.tableId
2021-07-02 01:10:44 +12:00
const { datasourceId, tableName } = breakExternalTableId(tableId)
const db = getAppDB()
const datasource = await db.get(datasourceId)
if (!datasource || !datasource.entities) {
ctx.throw(400, "Datasource has not been configured for plus API.")
}
const tables = datasource.entities
const response = await handleRequest(DataSourceOperation.READ, tableId, {
id,
datasource,
})
2021-07-02 01:10:44 +12:00
const table = tables[tableName]
const row = response[0]
// this seems like a lot of work, but basically we need to dig deeper for the enrich
// for a single row, there is probably a better way to do this with some smart multi-layer joins
for (let [fieldName, field] of Object.entries(table.schema)) {
2021-07-02 01:25:23 +12:00
if (
field.type !== FieldTypes.LINK ||
!row[fieldName] ||
row[fieldName].length === 0
) {
2021-07-02 01:10:44 +12:00
continue
}
const links = row[fieldName]
const linkedTableId = field.tableId
const linkedTable = tables[breakExternalTableId(linkedTableId).tableName]
// don't support composite keys right now
const linkedIds = links.map(link => breakRowIdField(link._id)[0])
row[fieldName] = await handleRequest(
DataSourceOperation.READ,
linkedTableId,
{
tables,
filters: {
oneOf: {
[linkedTable.primary]: linkedIds,
},
},
2021-07-02 01:25:23 +12:00
}
2021-07-02 01:10:44 +12:00
)
}
return row
}