2021-10-29 07:39:42 +13:00
|
|
|
const internal = require("./internal")
|
|
|
|
const external = require("./external")
|
2021-02-23 00:39:58 +13:00
|
|
|
const csvParser = require("../../../utilities/csvParser")
|
2022-01-13 06:55:28 +13:00
|
|
|
const { isExternalTable, isSQL } = require("../../../integrations/utils")
|
2021-02-23 00:39:58 +13:00
|
|
|
const {
|
|
|
|
getTableParams,
|
2021-06-12 04:52:08 +12:00
|
|
|
getDatasourceParams,
|
2021-06-16 06:48:05 +12:00
|
|
|
BudibaseInternalDB,
|
2021-02-23 00:39:58 +13:00
|
|
|
} = require("../../../db/utils")
|
2021-10-29 07:39:42 +13:00
|
|
|
const { getTable } = require("./utils")
|
2022-01-28 07:18:31 +13:00
|
|
|
const { getAppDB } = require("@budibase/backend-core/context")
|
2021-02-23 00:39:58 +13:00
|
|
|
|
2021-10-29 07:39:42 +13:00
|
|
|
function pickApi({ tableId, table }) {
|
|
|
|
if (table && !tableId) {
|
|
|
|
tableId = table._id
|
|
|
|
}
|
|
|
|
if (table && table.type === "external") {
|
|
|
|
return external
|
|
|
|
} else if (tableId && isExternalTable(tableId)) {
|
|
|
|
return external
|
|
|
|
}
|
|
|
|
return internal
|
|
|
|
}
|
|
|
|
|
|
|
|
// covers both internal and external
|
2021-05-03 19:31:09 +12:00
|
|
|
exports.fetch = async function (ctx) {
|
2022-01-28 07:18:31 +13:00
|
|
|
const db = getAppDB()
|
2021-06-16 06:48:05 +12:00
|
|
|
|
2021-06-12 04:52:08 +12:00
|
|
|
const internalTables = await db.allDocs(
|
2021-02-23 00:39:58 +13:00
|
|
|
getTableParams(null, {
|
|
|
|
include_docs: true,
|
|
|
|
})
|
|
|
|
)
|
2021-06-16 06:48:05 +12:00
|
|
|
|
2022-01-18 23:43:21 +13:00
|
|
|
const internal = internalTables.rows.map(tableDoc => ({
|
|
|
|
...tableDoc.doc,
|
2021-06-17 10:27:38 +12:00
|
|
|
type: "internal",
|
2021-06-16 06:48:05 +12:00
|
|
|
sourceId: BudibaseInternalDB._id,
|
|
|
|
}))
|
2021-06-12 04:52:08 +12:00
|
|
|
|
|
|
|
const externalTables = await db.allDocs(
|
|
|
|
getDatasourceParams("plus", {
|
|
|
|
include_docs: true,
|
|
|
|
})
|
|
|
|
)
|
2021-06-16 06:48:05 +12:00
|
|
|
|
2022-01-18 23:43:21 +13:00
|
|
|
const external = externalTables.rows.flatMap(tableDoc => {
|
|
|
|
let entities = tableDoc.doc.entities
|
|
|
|
if (entities) {
|
|
|
|
return Object.values(entities).map(entity => ({
|
|
|
|
...entity,
|
|
|
|
type: "external",
|
|
|
|
sourceId: tableDoc.doc._id,
|
|
|
|
sql: isSQL(tableDoc.doc),
|
|
|
|
}))
|
|
|
|
} else {
|
|
|
|
return []
|
|
|
|
}
|
2021-06-16 06:48:05 +12:00
|
|
|
})
|
2021-06-12 04:52:08 +12:00
|
|
|
|
|
|
|
ctx.body = [...internal, ...external]
|
2021-02-23 00:39:58 +13:00
|
|
|
}
|
|
|
|
|
2021-05-03 19:31:09 +12:00
|
|
|
exports.find = async function (ctx) {
|
2021-06-17 03:27:33 +12:00
|
|
|
const tableId = ctx.params.id
|
2022-01-28 07:18:31 +13:00
|
|
|
ctx.body = await getTable(tableId)
|
2021-02-23 00:39:58 +13:00
|
|
|
}
|
|
|
|
|
2021-05-03 19:31:09 +12:00
|
|
|
exports.save = async function (ctx) {
|
2021-03-30 05:32:05 +13:00
|
|
|
const appId = ctx.appId
|
2021-10-29 07:39:42 +13:00
|
|
|
const table = ctx.request.body
|
|
|
|
const savedTable = await pickApi({ table }).save(ctx)
|
2021-02-23 00:39:58 +13:00
|
|
|
ctx.status = 200
|
2021-10-29 07:39:42 +13:00
|
|
|
ctx.message = `Table ${table.name} saved successfully.`
|
|
|
|
ctx.eventEmitter &&
|
|
|
|
ctx.eventEmitter.emitTable(`table:save`, appId, savedTable)
|
|
|
|
ctx.body = savedTable
|
2021-02-23 00:39:58 +13:00
|
|
|
}
|
|
|
|
|
2021-05-03 19:31:09 +12:00
|
|
|
exports.destroy = async function (ctx) {
|
2021-03-30 05:32:05 +13:00
|
|
|
const appId = ctx.appId
|
2021-10-29 07:39:42 +13:00
|
|
|
const tableId = ctx.params.tableId
|
|
|
|
const deletedTable = await pickApi({ tableId }).destroy(ctx)
|
2021-02-23 00:39:58 +13:00
|
|
|
ctx.eventEmitter &&
|
2021-10-29 07:39:42 +13:00
|
|
|
ctx.eventEmitter.emitTable(`table:delete`, appId, deletedTable)
|
2021-02-23 00:39:58 +13:00
|
|
|
ctx.status = 200
|
2021-10-29 07:39:42 +13:00
|
|
|
ctx.body = { message: `Table ${tableId} deleted.` }
|
2021-02-23 00:39:58 +13:00
|
|
|
}
|
|
|
|
|
2021-11-13 07:26:57 +13:00
|
|
|
exports.bulkImport = async function (ctx) {
|
|
|
|
const tableId = ctx.params.tableId
|
|
|
|
await pickApi({ tableId }).bulkImport(ctx)
|
|
|
|
// right now we don't trigger anything for bulk import because it
|
|
|
|
// can only be done in the builder, but in the future we may need to
|
|
|
|
// think about events for bulk items
|
|
|
|
ctx.status = 200
|
|
|
|
ctx.body = { message: `Bulk rows created.` }
|
|
|
|
}
|
|
|
|
|
2021-05-03 19:31:09 +12:00
|
|
|
exports.validateCSVSchema = async function (ctx) {
|
2021-11-13 07:26:57 +13:00
|
|
|
// tableId being specified means its an import to an existing table
|
|
|
|
const { csvString, schema = {}, tableId } = ctx.request.body
|
|
|
|
let existingTable
|
|
|
|
if (tableId) {
|
2022-01-28 07:18:31 +13:00
|
|
|
existingTable = await getTable(tableId)
|
2021-11-13 07:26:57 +13:00
|
|
|
}
|
|
|
|
let result = await csvParser.parse(csvString, schema)
|
|
|
|
if (existingTable) {
|
|
|
|
result = csvParser.updateSchema({ schema: result, existingTable })
|
|
|
|
}
|
2021-02-23 00:39:58 +13:00
|
|
|
ctx.body = { schema: result }
|
|
|
|
}
|