1
0
Fork 0
mirror of synced 2024-06-03 02:55:14 +12:00
budibase/packages/server/src/api/controllers/row/external.js

254 lines
6.4 KiB
JavaScript
Raw Normal View History

const { makeExternalQuery } = require("./utils")
const { DataSourceOperation, SortDirection } = require("../../../constants")
2021-06-24 06:05:32 +12:00
const { getAllExternalTables } = require("../table/utils")
const {
breakExternalTableId,
2021-06-17 05:39:59 +12:00
breakRowIdField,
} = require("../../../integrations/utils")
const {
buildRelationships,
buildFilters,
inputProcessing,
outputProcessing,
generateIdForRow,
buildFields,
} = require("./externalUtils")
const { processObjectSync } = require("@budibase/string-templates")
2021-06-24 06:05:32 +12:00
2021-06-15 06:07:13 +12:00
async function handleRequest(
appId,
operation,
tableId,
{ id, row, filters, sort, paginate, fullDocs } = {}
2021-06-15 06:07:13 +12:00
) {
let { datasourceId, tableName } = breakExternalTableId(tableId)
2021-06-24 06:05:32 +12:00
const tables = await getAllExternalTables(appId, datasourceId)
const table = tables[tableName]
if (!table) {
throw `Unable to process query, table "${tableName}" not defined.`
}
// clean up row on ingress using schema
filters = buildFilters(id, filters, table)
const relationships = buildRelationships(table, tables)
const processed = inputProcessing(row, table, tables)
row = processed.row
if (
operation === DataSourceOperation.DELETE &&
(filters == null || Object.keys(filters).length === 0)
) {
throw "Deletion must be filtered"
}
let json = {
endpoint: {
datasourceId,
entityId: tableName,
operation,
},
2021-06-16 00:50:41 +12:00
resource: {
// have to specify the fields to avoid column overlap
fields: buildFields(table, tables),
2021-06-16 00:50:41 +12:00
},
filters,
sort,
paginate,
2021-06-24 06:05:32 +12:00
relationships,
body: row,
// pass an id filter into extra, purely for mysql/returning
extra: {
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
2021-06-19 00:33:44 +12:00
},
}
// can't really use response right now
const response = await makeExternalQuery(appId, json)
// handle many to many relationships now if we know the ID (could be auto increment)
if (processed.manyRelationships) {
const promises = []
for (let toInsert of processed.manyRelationships) {
const { tableName } = breakExternalTableId(toInsert.tableId)
delete toInsert.tableId
promises.push(
makeExternalQuery(appId, {
endpoint: {
...json.endpoint,
entityId: tableName,
},
// if we're doing many relationships then we're writing, only one response
body: processObjectSync(toInsert, response[0]),
})
)
}
await Promise.all(promises)
}
2021-07-01 05:33:55 +12:00
const output = outputProcessing(
response,
table,
relationships,
tables,
fullDocs
)
// if reading it'll just be an array of rows, return whole thing
2021-07-01 05:33:55 +12:00
return operation === DataSourceOperation.READ && Array.isArray(response)
? output
: { row: output[0], table }
}
2021-06-15 06:07:13 +12:00
exports.patch = async ctx => {
const appId = ctx.appId
const inputs = ctx.request.body
const tableId = ctx.params.tableId
const id = breakRowIdField(inputs._id)
// don't save the ID to db
delete inputs._id
2021-06-16 00:50:41 +12:00
return handleRequest(appId, DataSourceOperation.UPDATE, tableId, {
2021-06-15 06:07:13 +12:00
id,
row: inputs,
})
}
2021-06-15 06:07:13 +12:00
exports.save = async ctx => {
const appId = ctx.appId
const inputs = ctx.request.body
const tableId = ctx.params.tableId
2021-06-16 00:50:41 +12:00
return handleRequest(appId, DataSourceOperation.CREATE, tableId, {
2021-06-15 06:07:13 +12:00
row: inputs,
})
}
2021-06-15 06:07:13 +12:00
exports.fetchView = async ctx => {
// there are no views in external data sources, shouldn't ever be called
// for now just fetch
ctx.params.tableId = ctx.params.viewName.split("all_")[1]
return exports.fetch(ctx)
}
exports.fetch = async ctx => {
const appId = ctx.appId
const tableId = ctx.params.tableId
2021-06-16 00:50:41 +12:00
return handleRequest(appId, DataSourceOperation.READ, tableId)
}
2021-06-15 06:07:13 +12:00
exports.find = async ctx => {
const appId = ctx.appId
const id = ctx.params.rowId
const tableId = ctx.params.tableId
2021-07-01 05:33:55 +12:00
const response = await handleRequest(
appId,
DataSourceOperation.READ,
tableId,
{
id,
}
)
return response ? response[0] : response
}
2021-06-15 06:07:13 +12:00
exports.destroy = async ctx => {
const appId = ctx.appId
const tableId = ctx.params.tableId
const id = ctx.request.body._id
const { row } = await handleRequest(
appId,
DataSourceOperation.DELETE,
tableId,
{
id,
}
)
return { response: { ok: true }, row }
}
2021-06-15 06:07:13 +12:00
exports.bulkDestroy = async ctx => {
const appId = ctx.appId
const { rows } = ctx.request.body
const tableId = ctx.params.tableId
let promises = []
for (let row of rows) {
2021-06-16 00:47:08 +12:00
promises.push(
handleRequest(appId, DataSourceOperation.DELETE, tableId, {
id: breakRowIdField(row._id),
2021-06-16 00:47:08 +12:00
})
)
}
const responses = await Promise.all(promises)
return { response: { ok: true }, rows: responses.map(resp => resp.row) }
}
2021-06-15 06:07:13 +12:00
exports.search = async ctx => {
const appId = ctx.appId
const tableId = ctx.params.tableId
const { paginate, query, ...params } = ctx.request.body
2021-06-18 02:56:41 +12:00
let { bookmark, limit } = params
if (!bookmark && paginate) {
bookmark = 1
}
let paginateObj = {}
2021-06-18 02:56:41 +12:00
if (paginate) {
paginateObj = {
2021-06-18 02:56:41 +12:00
// add one so we can track if there is another page
limit: limit,
page: bookmark,
}
2021-06-18 02:56:41 +12:00
} else if (params && limit) {
paginateObj = {
2021-06-18 02:56:41 +12:00
limit: limit,
}
}
let sort
if (params.sort) {
2021-06-15 06:07:13 +12:00
const direction =
params.sortOrder === "descending"
? SortDirection.DESCENDING
: SortDirection.ASCENDING
sort = {
2021-06-15 06:07:13 +12:00
[params.sort]: direction,
}
}
const rows = await handleRequest(appId, DataSourceOperation.READ, tableId, {
2021-06-15 06:07:13 +12:00
filters: query,
sort,
paginate: paginateObj,
})
2021-06-18 02:56:41 +12:00
let hasNextPage = false
if (paginate && rows.length === limit) {
const nextRows = await handleRequest(
appId,
DataSourceOperation.READ,
tableId,
{
filters: query,
sort,
paginate: {
limit: 1,
page: bookmark * limit + 1,
},
2021-06-18 02:56:41 +12:00
}
)
2021-06-18 02:56:41 +12:00
hasNextPage = nextRows.length > 0
}
// need wrapper object for bookmarks etc when paginating
2021-06-18 02:56:41 +12:00
return { rows, hasNextPage, bookmark: bookmark + 1 }
}
exports.validate = async () => {
// can't validate external right now - maybe in future
2021-06-16 00:50:41 +12:00
return { valid: true }
}
exports.fetchEnrichedRow = async ctx => {
const appId = ctx.appId
const id = ctx.params.rowId
const tableId = ctx.params.tableId
// TODO: this only enriches the full docs 1 layer deep, need to join those as well
2021-07-01 05:33:55 +12:00
const response = await handleRequest(
appId,
DataSourceOperation.READ,
tableId,
{
id,
fullDocs: true,
}
)
return response ? response[0] : response
}