1
0
Fork 0
mirror of synced 2024-06-27 18:40:42 +12:00
budibase/packages/server/src/api/controllers/table/index.js

112 lines
3.3 KiB
JavaScript
Raw Normal View History

2021-10-29 07:39:42 +13:00
const internal = require("./internal")
const external = require("./external")
const csvParser = require("../../../utilities/csvParser")
const { isExternalTable, isSQL } = require("../../../integrations/utils")
const { getDatasourceParams } = require("../../../db/utils")
const { getAppDB } = require("@budibase/backend-core/context")
const { events } = require("@budibase/backend-core")
const sdk = require("../../../sdk")
2021-10-29 07:39:42 +13:00
function pickApi({ tableId, table }) {
if (table && !tableId) {
tableId = table._id
}
if (table && table.type === "external") {
return external
} else if (tableId && isExternalTable(tableId)) {
return external
}
return internal
}
// covers both internal and external
2021-05-03 19:31:09 +12:00
exports.fetch = async function (ctx) {
const db = getAppDB()
const internal = await sdk.tables.getAllInternalTables()
const externalTables = await db.allDocs(
getDatasourceParams("plus", {
include_docs: true,
})
)
const external = externalTables.rows.flatMap(tableDoc => {
let entities = tableDoc.doc.entities
if (entities) {
return Object.values(entities).map(entity => ({
...entity,
type: "external",
sourceId: tableDoc.doc._id,
sql: isSQL(tableDoc.doc),
}))
} else {
return []
}
})
ctx.body = [...internal, ...external]
}
2021-05-03 19:31:09 +12:00
exports.find = async function (ctx) {
2022-02-24 11:13:16 +13:00
const tableId = ctx.params.tableId
ctx.body = await sdk.tables.getTable(tableId)
}
2021-05-03 19:31:09 +12:00
exports.save = async function (ctx) {
const appId = ctx.appId
2021-10-29 07:39:42 +13:00
const table = ctx.request.body
2022-04-12 09:32:12 +12:00
const importFormat =
table.dataImport && table.dataImport.csvString ? "csv" : undefined
2021-10-29 07:39:42 +13:00
const savedTable = await pickApi({ table }).save(ctx)
if (!table._id) {
2022-05-24 09:14:44 +12:00
await events.table.created(savedTable)
} else {
2022-05-24 09:14:44 +12:00
await events.table.updated(savedTable)
}
2022-04-12 09:32:12 +12:00
if (importFormat) {
2022-05-24 09:14:44 +12:00
await events.table.imported(savedTable, importFormat)
2022-04-12 09:32:12 +12:00
}
ctx.status = 200
2021-10-29 07:39:42 +13:00
ctx.message = `Table ${table.name} saved successfully.`
ctx.eventEmitter &&
ctx.eventEmitter.emitTable(`table:save`, appId, savedTable)
ctx.body = savedTable
}
2021-05-03 19:31:09 +12:00
exports.destroy = async function (ctx) {
const appId = ctx.appId
2021-10-29 07:39:42 +13:00
const tableId = ctx.params.tableId
const deletedTable = await pickApi({ tableId }).destroy(ctx)
2022-05-24 09:14:44 +12:00
await events.table.deleted(deletedTable)
ctx.eventEmitter &&
2021-10-29 07:39:42 +13:00
ctx.eventEmitter.emitTable(`table:delete`, appId, deletedTable)
ctx.status = 200
2022-02-24 11:13:16 +13:00
ctx.table = deletedTable
2021-10-29 07:39:42 +13:00
ctx.body = { message: `Table ${tableId} deleted.` }
}
exports.bulkImport = async function (ctx) {
const tableId = ctx.params.tableId
await pickApi({ tableId }).bulkImport(ctx)
// right now we don't trigger anything for bulk import because it
// can only be done in the builder, but in the future we may need to
// think about events for bulk items
ctx.status = 200
ctx.body = { message: `Bulk rows created.` }
}
2021-05-03 19:31:09 +12:00
exports.validateCSVSchema = async function (ctx) {
// tableId being specified means its an import to an existing table
const { csvString, schema = {}, tableId } = ctx.request.body
let existingTable
if (tableId) {
existingTable = await sdk.tables.getTable(tableId)
}
let result = await csvParser.parse(csvString, schema)
if (existingTable) {
result = csvParser.updateSchema({ schema: result, existingTable })
}
ctx.body = { schema: result }
}