2022-03-20 14:13:54 +13:00
|
|
|
import { generateQueryID, getQueryParams, isProdAppID } from "../../../db/utils"
|
|
|
|
import { BaseQueryVerbs } from "../../../constants"
|
|
|
|
import { Thread, ThreadType } from "../../../threads"
|
|
|
|
import { save as saveDatasource } from "../datasource"
|
|
|
|
import { RestImporter } from "./import"
|
|
|
|
import { invalidateDynamicVariables } from "../../../threads/utils"
|
|
|
|
import { QUERY_THREAD_TIMEOUT } from "../../../environment"
|
|
|
|
import { getAppDB } from "@budibase/backend-core/context"
|
|
|
|
import { quotas } from "@budibase/pro"
|
2022-04-07 11:38:18 +12:00
|
|
|
import { events } from "@budibase/backend-core"
|
2022-06-24 01:29:19 +12:00
|
|
|
import { getCookie } from "@budibase/backend-core/utils"
|
2022-07-04 08:13:15 +12:00
|
|
|
import { Cookies, Configs } from "@budibase/backend-core/constants"
|
2021-11-12 01:11:09 +13:00
|
|
|
|
2022-01-18 03:57:31 +13:00
|
|
|
const Runner = new Thread(ThreadType.QUERY, {
|
2022-03-20 14:13:54 +13:00
|
|
|
timeoutMs: QUERY_THREAD_TIMEOUT || 10000,
|
2022-01-18 03:57:31 +13:00
|
|
|
})
|
2021-02-06 05:45:23 +13:00
|
|
|
|
|
|
|
// simple function to append "readable" to all read queries
|
2022-03-20 14:13:54 +13:00
|
|
|
function enrichQueries(input: any) {
|
2021-02-06 05:45:23 +13:00
|
|
|
const wasArray = Array.isArray(input)
|
|
|
|
const queries = wasArray ? input : [input]
|
|
|
|
for (let query of queries) {
|
|
|
|
if (query.queryVerb === BaseQueryVerbs.READ) {
|
|
|
|
query.readable = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return wasArray ? queries : queries[0]
|
|
|
|
}
|
2020-12-19 07:19:43 +13:00
|
|
|
|
2022-03-20 14:13:54 +13:00
|
|
|
export async function fetch(ctx: any) {
|
2022-01-28 13:05:39 +13:00
|
|
|
const db = getAppDB()
|
2020-12-19 07:19:43 +13:00
|
|
|
|
2021-01-07 01:28:51 +13:00
|
|
|
const body = await db.allDocs(
|
|
|
|
getQueryParams(null, {
|
|
|
|
include_docs: true,
|
|
|
|
})
|
|
|
|
)
|
2021-11-26 06:14:07 +13:00
|
|
|
|
2022-03-20 14:13:54 +13:00
|
|
|
ctx.body = enrichQueries(body.rows.map((row: any) => row.doc))
|
2021-01-07 01:28:51 +13:00
|
|
|
}
|
2020-12-19 07:19:43 +13:00
|
|
|
|
2022-03-20 14:13:54 +13:00
|
|
|
const _import = async (ctx: any) => {
|
2021-11-29 23:37:31 +13:00
|
|
|
const body = ctx.request.body
|
|
|
|
const data = body.data
|
|
|
|
|
2021-12-01 22:48:52 +13:00
|
|
|
const importer = new RestImporter(data)
|
|
|
|
await importer.init()
|
|
|
|
|
2021-11-29 23:37:31 +13:00
|
|
|
let datasourceId
|
|
|
|
if (!body.datasourceId) {
|
|
|
|
// construct new datasource
|
2022-03-20 14:13:54 +13:00
|
|
|
const info: any = await importer.getInfo()
|
2021-11-29 23:37:31 +13:00
|
|
|
let datasource = {
|
|
|
|
type: "datasource",
|
|
|
|
source: "REST",
|
|
|
|
config: {
|
|
|
|
url: info.url,
|
2021-12-01 22:48:52 +13:00
|
|
|
defaultHeaders: [],
|
2021-11-29 23:37:31 +13:00
|
|
|
},
|
|
|
|
name: info.name,
|
|
|
|
}
|
|
|
|
// save the datasource
|
|
|
|
const datasourceCtx = { ...ctx }
|
|
|
|
datasourceCtx.request.body.datasource = datasource
|
|
|
|
await saveDatasource(datasourceCtx)
|
|
|
|
datasourceId = datasourceCtx.body.datasource._id
|
2021-11-26 22:51:56 +13:00
|
|
|
} else {
|
2021-11-29 23:37:31 +13:00
|
|
|
// use existing datasource
|
|
|
|
datasourceId = body.datasourceId
|
2021-11-26 06:14:07 +13:00
|
|
|
}
|
|
|
|
|
2022-01-28 13:05:39 +13:00
|
|
|
const importResult = await importer.importQueries(datasourceId)
|
2021-11-26 06:14:07 +13:00
|
|
|
|
2021-11-29 23:37:31 +13:00
|
|
|
ctx.body = {
|
|
|
|
...importResult,
|
|
|
|
datasourceId,
|
2021-11-26 06:14:07 +13:00
|
|
|
}
|
|
|
|
ctx.status = 200
|
|
|
|
}
|
2022-03-20 14:13:54 +13:00
|
|
|
export { _import as import }
|
2021-11-26 06:14:07 +13:00
|
|
|
|
2022-03-20 14:13:54 +13:00
|
|
|
export async function save(ctx: any) {
|
2022-01-28 13:05:39 +13:00
|
|
|
const db = getAppDB()
|
2021-01-07 01:28:51 +13:00
|
|
|
const query = ctx.request.body
|
2020-12-19 07:19:43 +13:00
|
|
|
|
2022-04-07 11:38:18 +12:00
|
|
|
const datasource = await db.get(query.datasourceId)
|
|
|
|
|
|
|
|
let eventFn
|
2021-01-07 01:28:51 +13:00
|
|
|
if (!query._id) {
|
|
|
|
query._id = generateQueryID(query.datasourceId)
|
2022-04-07 11:38:18 +12:00
|
|
|
eventFn = () => events.query.created(datasource, query)
|
|
|
|
} else {
|
|
|
|
eventFn = () => events.query.updated(datasource, query)
|
2021-01-07 01:28:51 +13:00
|
|
|
}
|
2020-12-19 07:19:43 +13:00
|
|
|
|
2021-01-07 01:28:51 +13:00
|
|
|
const response = await db.put(query)
|
2022-05-24 09:14:44 +12:00
|
|
|
await eventFn()
|
2021-01-07 01:28:51 +13:00
|
|
|
query._rev = response.rev
|
2020-12-19 07:19:43 +13:00
|
|
|
|
2021-01-07 01:28:51 +13:00
|
|
|
ctx.body = query
|
|
|
|
ctx.message = `Query ${query.name} saved successfully.`
|
|
|
|
}
|
2020-12-19 07:19:43 +13:00
|
|
|
|
2022-03-20 14:13:54 +13:00
|
|
|
export async function find(ctx: any) {
|
2022-01-28 13:05:39 +13:00
|
|
|
const db = getAppDB()
|
2021-02-06 05:45:23 +13:00
|
|
|
const query = enrichQueries(await db.get(ctx.params.queryId))
|
|
|
|
// remove properties that could be dangerous in real app
|
2021-11-26 00:21:54 +13:00
|
|
|
if (isProdAppID(ctx.appId)) {
|
2021-02-06 05:45:23 +13:00
|
|
|
delete query.fields
|
|
|
|
delete query.parameters
|
|
|
|
}
|
|
|
|
ctx.body = query
|
|
|
|
}
|
|
|
|
|
2022-07-04 08:13:15 +12:00
|
|
|
//Required to discern between OIDC OAuth config entries
|
|
|
|
function getOAuthConfigCookieId(ctx: any) {
|
|
|
|
if (ctx.user.providerType === Configs.OIDC) {
|
|
|
|
return getCookie(ctx, Cookies.OIDC_CONFIG)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function getAuthConfig(ctx: any) {
|
|
|
|
const authCookie = getCookie(ctx, Cookies.Auth)
|
|
|
|
let authConfigCtx: any = {}
|
|
|
|
authConfigCtx["configId"] = getOAuthConfigCookieId(ctx)
|
|
|
|
authConfigCtx["sessionId"] = authCookie ? authCookie.sessionId : null
|
|
|
|
return authConfigCtx
|
|
|
|
}
|
|
|
|
|
2022-03-20 14:13:54 +13:00
|
|
|
export async function preview(ctx: any) {
|
2022-01-28 13:05:39 +13:00
|
|
|
const db = getAppDB()
|
2020-12-19 07:19:43 +13:00
|
|
|
|
2021-01-14 03:11:53 +13:00
|
|
|
const datasource = await db.get(ctx.request.body.datasourceId)
|
2022-06-01 08:04:41 +12:00
|
|
|
const query = ctx.request.body
|
2021-12-18 03:08:48 +13:00
|
|
|
// preview may not have a queryId as it hasn't been saved, but if it does
|
|
|
|
// this stops dynamic variables from calling the same query
|
2022-06-01 08:04:41 +12:00
|
|
|
const { fields, parameters, queryVerb, transformer, queryId } = query
|
2021-02-23 06:41:02 +13:00
|
|
|
|
2022-07-04 08:13:15 +12:00
|
|
|
const authConfigCtx: any = getAuthConfig(ctx)
|
2022-06-24 01:29:19 +12:00
|
|
|
|
2021-11-12 05:20:30 +13:00
|
|
|
try {
|
2022-03-20 14:13:54 +13:00
|
|
|
const runFn = () =>
|
|
|
|
Runner.run({
|
|
|
|
appId: ctx.appId,
|
|
|
|
datasource,
|
|
|
|
queryVerb,
|
|
|
|
fields,
|
|
|
|
parameters,
|
|
|
|
transformer,
|
|
|
|
queryId,
|
2022-07-04 08:13:15 +12:00
|
|
|
ctx: {
|
|
|
|
user: ctx.user,
|
|
|
|
auth: { ...authConfigCtx },
|
|
|
|
},
|
2022-03-20 14:13:54 +13:00
|
|
|
})
|
|
|
|
const { rows, keys, info, extra } = await quotas.addQuery(runFn)
|
2022-06-01 08:04:41 +12:00
|
|
|
await events.query.previewed(datasource, query)
|
2021-11-12 05:20:30 +13:00
|
|
|
ctx.body = {
|
|
|
|
rows,
|
|
|
|
schemaFields: [...new Set(keys)],
|
2021-12-07 07:23:18 +13:00
|
|
|
info,
|
2021-12-10 01:30:05 +13:00
|
|
|
extra,
|
2021-11-12 05:20:30 +13:00
|
|
|
}
|
|
|
|
} catch (err) {
|
|
|
|
ctx.throw(400, err)
|
2021-02-23 06:41:02 +13:00
|
|
|
}
|
2021-01-07 01:28:51 +13:00
|
|
|
}
|
|
|
|
|
2022-07-08 22:09:06 +12:00
|
|
|
async function execute(ctx: any, opts = { rowsOnly: false, isAutomation: false }) {
|
2022-01-28 13:05:39 +13:00
|
|
|
const db = getAppDB()
|
2021-01-07 01:28:51 +13:00
|
|
|
|
2021-01-09 01:06:37 +13:00
|
|
|
const query = await db.get(ctx.params.queryId)
|
|
|
|
const datasource = await db.get(query.datasourceId)
|
2022-07-08 22:09:06 +12:00
|
|
|
|
|
|
|
let authConfigCtx: any = {}
|
|
|
|
if (!opts.isAutomation) {
|
|
|
|
authConfigCtx = getAuthConfig(ctx)
|
|
|
|
}
|
2022-01-21 08:06:08 +13:00
|
|
|
const enrichedParameters = ctx.request.body.parameters || {}
|
|
|
|
// make sure parameters are fully enriched with defaults
|
|
|
|
if (query && query.parameters) {
|
|
|
|
for (let parameter of query.parameters) {
|
|
|
|
if (!enrichedParameters[parameter.name]) {
|
|
|
|
enrichedParameters[parameter.name] = parameter.default
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-13 05:49:11 +13:00
|
|
|
// call the relevant CRUD method on the integration class
|
2021-11-12 05:20:30 +13:00
|
|
|
try {
|
2022-03-20 14:13:54 +13:00
|
|
|
const runFn = () =>
|
|
|
|
Runner.run({
|
|
|
|
appId: ctx.appId,
|
|
|
|
datasource,
|
|
|
|
queryVerb: query.queryVerb,
|
|
|
|
fields: query.fields,
|
|
|
|
pagination: ctx.request.body.pagination,
|
|
|
|
parameters: enrichedParameters,
|
|
|
|
transformer: query.transformer,
|
|
|
|
queryId: ctx.params.queryId,
|
2022-07-04 08:13:15 +12:00
|
|
|
ctx: {
|
|
|
|
user: ctx.user,
|
2022-07-05 20:28:56 +12:00
|
|
|
auth: { ...authConfigCtx },
|
2022-07-04 08:13:15 +12:00
|
|
|
},
|
2022-03-20 14:13:54 +13:00
|
|
|
})
|
|
|
|
|
|
|
|
const { rows, pagination, extra } = await quotas.addQuery(runFn)
|
2021-12-17 00:41:28 +13:00
|
|
|
if (opts && opts.rowsOnly) {
|
|
|
|
ctx.body = rows
|
|
|
|
} else {
|
2021-12-18 08:00:23 +13:00
|
|
|
ctx.body = { data: rows, pagination, ...extra }
|
2021-12-17 00:41:28 +13:00
|
|
|
}
|
2021-11-12 05:20:30 +13:00
|
|
|
} catch (err) {
|
|
|
|
ctx.throw(400, err)
|
|
|
|
}
|
2021-01-07 01:28:51 +13:00
|
|
|
}
|
|
|
|
|
2022-03-20 14:13:54 +13:00
|
|
|
export async function executeV1(ctx: any) {
|
2022-07-08 22:09:06 +12:00
|
|
|
return execute(ctx, { rowsOnly: true, isAutomation: false })
|
2021-12-17 00:41:28 +13:00
|
|
|
}
|
|
|
|
|
2022-07-08 22:09:06 +12:00
|
|
|
export async function executeV2(ctx: any, isAutomation?: any) {
|
|
|
|
return execute(ctx, { rowsOnly: false, isAutomation })
|
2021-12-17 00:41:28 +13:00
|
|
|
}
|
|
|
|
|
2022-03-20 14:13:54 +13:00
|
|
|
const removeDynamicVariables = async (queryId: any) => {
|
2022-01-28 13:05:39 +13:00
|
|
|
const db = getAppDB()
|
2022-01-06 05:54:59 +13:00
|
|
|
const query = await db.get(queryId)
|
|
|
|
const datasource = await db.get(query.datasourceId)
|
|
|
|
const dynamicVariables = datasource.config.dynamicVariables
|
|
|
|
|
|
|
|
if (dynamicVariables) {
|
2022-01-06 08:49:01 +13:00
|
|
|
// delete dynamic variables from the datasource
|
2022-01-21 08:06:08 +13:00
|
|
|
datasource.config.dynamicVariables = dynamicVariables.filter(
|
2022-03-20 14:13:54 +13:00
|
|
|
(dv: any) => dv.queryId !== queryId
|
2022-01-21 08:06:08 +13:00
|
|
|
)
|
2022-01-06 08:49:01 +13:00
|
|
|
await db.put(datasource)
|
|
|
|
|
2022-01-06 05:54:59 +13:00
|
|
|
// invalidate the deleted variables
|
|
|
|
const variablesToDelete = dynamicVariables.filter(
|
2022-03-20 14:13:54 +13:00
|
|
|
(dv: any) => dv.queryId === queryId
|
2022-01-06 05:54:59 +13:00
|
|
|
)
|
|
|
|
await invalidateDynamicVariables(variablesToDelete)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-20 14:13:54 +13:00
|
|
|
export async function destroy(ctx: any) {
|
2022-01-28 13:05:39 +13:00
|
|
|
const db = getAppDB()
|
2022-04-08 12:28:22 +12:00
|
|
|
const queryId = ctx.params.queryId
|
|
|
|
await removeDynamicVariables(queryId)
|
|
|
|
const query = await db.get(queryId)
|
|
|
|
const datasource = await db.get(query.datasourceId)
|
2021-01-13 06:45:43 +13:00
|
|
|
await db.remove(ctx.params.queryId, ctx.params.revId)
|
2021-01-07 01:28:51 +13:00
|
|
|
ctx.message = `Query deleted.`
|
|
|
|
ctx.status = 200
|
2022-05-24 09:14:44 +12:00
|
|
|
await events.query.deleted(datasource, query)
|
2021-01-07 01:28:51 +13:00
|
|
|
}
|