1
0
Fork 0
mirror of synced 2024-07-03 05:20:32 +12:00

Merge branch 'feature/opinionated-sql' of github.com:Budibase/budibase into feature/opinionated-sql

This commit is contained in:
Martin McKeaveney 2021-06-16 23:27:47 +01:00
commit 2737949602
17 changed files with 158 additions and 130 deletions

View file

@ -2,23 +2,21 @@ import { store } from "./index"
import { get as svelteGet } from "svelte/store" import { get as svelteGet } from "svelte/store"
import { removeCookie, Cookies } from "./cookies" import { removeCookie, Cookies } from "./cookies"
const apiCall = method => async ( const apiCall =
url, method =>
body, async (url, body, headers = { "Content-Type": "application/json" }) => {
headers = { "Content-Type": "application/json" } headers["x-budibase-app-id"] = svelteGet(store).appId
) => { const json = headers["Content-Type"] === "application/json"
headers["x-budibase-app-id"] = svelteGet(store).appId const resp = await fetch(url, {
const json = headers["Content-Type"] === "application/json" method: method,
const resp = await fetch(url, { body: json ? JSON.stringify(body) : body,
method: method, headers,
body: json ? JSON.stringify(body) : body, })
headers, if (resp.status === 403) {
}) removeCookie(Cookies.Auth)
if (resp.status === 403) { }
removeCookie(Cookies.Auth) return resp
} }
return resp
}
export const post = apiCall("POST") export const post = apiCall("POST")
export const get = apiCall("GET") export const get = apiCall("GET")

View file

@ -100,9 +100,10 @@ const automationActions = store => ({
}, },
deleteAutomationBlock: block => { deleteAutomationBlock: block => {
store.update(state => { store.update(state => {
const idx = state.selectedAutomation.automation.definition.steps.findIndex( const idx =
x => x.id === block.id state.selectedAutomation.automation.definition.steps.findIndex(
) x => x.id === block.id
)
state.selectedAutomation.deleteBlock(block.id) state.selectedAutomation.deleteBlock(block.id)
// Select next closest step // Select next closest step

View file

@ -59,9 +59,7 @@
<section> <section>
<Heading size="XS">Columns</Heading> <Heading size="XS">Columns</Heading>
<ul> <ul>
{#each context.filter(context => {#each context.filter( context => context.readableBinding.match(searchRgx) ) as { readableBinding }}
context.readableBinding.match(searchRgx)
) as { readableBinding }}
<li <li
on:click={() => { on:click={() => {
value = addToText(value, getCaretPosition(), readableBinding) value = addToText(value, getCaretPosition(), readableBinding)
@ -77,9 +75,7 @@
<section> <section>
<Heading size="XS">Components</Heading> <Heading size="XS">Components</Heading>
<ul> <ul>
{#each instance.filter(instance => {#each instance.filter( instance => instance.readableBinding.match(searchRgx) ) as { readableBinding }}
instance.readableBinding.match(searchRgx)
) as { readableBinding }}
<li on:click={() => addToText(readableBinding)}> <li on:click={() => addToText(readableBinding)}>
{readableBinding} {readableBinding}
</li> </li>

View file

@ -49,9 +49,7 @@
<div class="section"> <div class="section">
{#each categories as [categoryName, bindings]} {#each categories as [categoryName, bindings]}
<Heading size="XS">{categoryName}</Heading> <Heading size="XS">{categoryName}</Heading>
{#each bindings.filter(binding => {#each bindings.filter( binding => binding.label.match(searchRgx) ) as binding}
binding.label.match(searchRgx)
) as binding}
<div <div
class="binding" class="binding"
on:click={() => { on:click={() => {

View file

@ -103,8 +103,9 @@
} }
function fetchQueryDefinition(query) { function fetchQueryDefinition(query) {
const source = $datasources.list.find(ds => ds._id === query.datasourceId) const source = $datasources.list.find(
.source ds => ds._id === query.datasourceId
).source
return $integrations[source].query[query.queryVerb] return $integrations[source].query[query.queryVerb]
} }
</script> </script>

View file

@ -18,8 +18,9 @@
) )
function fetchQueryDefinition(query) { function fetchQueryDefinition(query) {
const source = $datasources.list.find(ds => ds._id === query.datasourceId) const source = $datasources.list.find(
.source ds => ds._id === query.datasourceId
).source
return $integrations[source].query[query.queryVerb] return $integrations[source].query[query.queryVerb]
} }
</script> </script>

View file

@ -9,8 +9,7 @@ export const SOME_QUERY = {
queryVerb: "read", queryVerb: "read",
schema: {}, schema: {},
name: "Speakers", name: "Speakers",
_id: _id: "query_datasource_04b003a7b4a8428eadd3bb2f7eae0255_bcb8ffc6fcbc484e8d63121fc0bf986f",
"query_datasource_04b003a7b4a8428eadd3bb2f7eae0255_bcb8ffc6fcbc484e8d63121fc0bf986f",
_rev: "2-941f8699eb0adf995f8bd59c99203b26", _rev: "2-941f8699eb0adf995f8bd59c99203b26",
readable: true, readable: true,
} }
@ -75,8 +74,7 @@ export const SAVE_QUERY_RESPONSE = {
}, },
}, },
name: "Speakers", name: "Speakers",
_id: _id: "query_datasource_04b003a7b4a8428eadd3bb2f7eae0255_bcb8ffc6fcbc484e8d63121fc0bf986f",
"query_datasource_04b003a7b4a8428eadd3bb2f7eae0255_bcb8ffc6fcbc484e8d63121fc0bf986f",
_rev: "3-5a64adef494b1e9c793dc91b51ce73c6", _rev: "3-5a64adef494b1e9c793dc91b51ce73c6",
readable: true, readable: true,
} }

View file

@ -1,7 +1,11 @@
const { makeExternalQuery } = require("./utils") const { makeExternalQuery } = require("./utils")
const { DataSourceOperation, SortDirection } = require("../../../constants") const { DataSourceOperation, SortDirection } = require("../../../constants")
const { getExternalTable } = require("../table/utils") const { getExternalTable } = require("../table/utils")
const { breakExternalTableId } = require("../../../integrations/utils") const {
breakExternalTableId,
generateRowIdField,
breakRowIdField,
} = require("../../../integrations/utils")
function inputProcessing(row, table) { function inputProcessing(row, table) {
if (!row) { if (!row) {
@ -29,20 +33,35 @@ function outputProcessing(rows, table) {
for (let field of primary) { for (let field of primary) {
idParts.push(row[field]) idParts.push(row[field])
} }
row._id = idParts row._id = generateRowIdField(idParts)
row.tableId = table._id
} }
return rows return rows
} }
function buildIDFilter(id, table) { function buildFilters(id, filters, table) {
const primary = table.primary
if (filters) {
// need to map over the filters and make sure the _id field isn't present
for (let filter of Object.values(filters)) {
if (filter._id) {
const parts = breakRowIdField(filter._id)
for (let field of primary) {
filter[field] = parts.shift()
}
}
// make sure this field doesn't exist on any filter
delete filter._id
}
}
// there is no id, just use the user provided filters
if (!id || !table) { if (!id || !table) {
return null return filters
} }
// if used as URL parameter it will have been joined // if used as URL parameter it will have been joined
if (typeof id === "string") { if (typeof id === "string") {
id = id.split(",") id = breakRowIdField(id)
} }
const primary = table.primary
const equal = {} const equal = {}
for (let field of primary) { for (let field of primary) {
// work through the ID and get the parts // work through the ID and get the parts
@ -65,14 +84,13 @@ async function handleRequest(
throw `Unable to process query, table "${tableName}" not defined.` throw `Unable to process query, table "${tableName}" not defined.`
} }
// clean up row on ingress using schema // clean up row on ingress using schema
filters = buildFilters(id, filters, table)
row = inputProcessing(row, table) row = inputProcessing(row, table)
// try and build an id filter if required
let idFilters = buildIDFilter(id, table)
if ( if (
operation === DataSourceOperation.DELETE && operation === DataSourceOperation.DELETE &&
Object.keys(idFilters).length === 0 Object.keys(filters).length === 0
) { ) {
throw "Deletion must be filtered in someway" throw "Deletion must be filtered"
} }
let json = { let json = {
endpoint: { endpoint: {
@ -84,7 +102,7 @@ async function handleRequest(
// not specifying any fields means "*" // not specifying any fields means "*"
fields: [], fields: [],
}, },
filters: idFilters != null ? idFilters : filters, filters,
sort, sort,
paginate, paginate,
body: row, body: row,
@ -106,7 +124,7 @@ exports.patch = async ctx => {
const appId = ctx.appId const appId = ctx.appId
const inputs = ctx.request.body const inputs = ctx.request.body
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
const id = inputs._id const id = breakRowIdField(inputs._id)
// don't save the ID to db // don't save the ID to db
delete inputs._id delete inputs._id
return handleRequest(appId, DataSourceOperation.UPDATE, tableId, { return handleRequest(appId, DataSourceOperation.UPDATE, tableId, {
@ -153,7 +171,7 @@ exports.destroy = async ctx => {
const appId = ctx.appId const appId = ctx.appId
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
return handleRequest(appId, DataSourceOperation.DELETE, tableId, { return handleRequest(appId, DataSourceOperation.DELETE, tableId, {
id: ctx.request.body._id, id: breakRowIdField(ctx.request.body._id),
}) })
} }
@ -166,7 +184,7 @@ exports.bulkDestroy = async ctx => {
for (let row of rows) { for (let row of rows) {
promises.push( promises.push(
handleRequest(appId, DataSourceOperation.DELETE, tableId, { handleRequest(appId, DataSourceOperation.DELETE, tableId, {
id: row._id, id: breakRowIdField(row._id),
}) })
) )
} }
@ -185,6 +203,10 @@ exports.search = async ctx => {
// todo: need to handle bookmarks // todo: need to handle bookmarks
page: params.bookmark, page: params.bookmark,
} }
} else if (params && params.limit) {
paginateObj = {
limit: params.limit,
}
} }
let sort let sort
if (params.sort) { if (params.sort) {

View file

@ -1,4 +1,9 @@
const CouchDB = require("../db") const CouchDB = require("../db")
const {
isExternalTable,
breakExternalTableId,
} = require("../integrations/utils")
const { getExternalTable } = require("../api/controllers/table/utils")
/** /**
* When values are input to the system generally they will be of type string as this is required for template strings. * When values are input to the system generally they will be of type string as this is required for template strings.
@ -60,7 +65,13 @@ module.exports.cleanInputValues = (inputs, schema) => {
*/ */
module.exports.cleanUpRow = async (appId, tableId, row) => { module.exports.cleanUpRow = async (appId, tableId, row) => {
const db = new CouchDB(appId) const db = new CouchDB(appId)
const table = await db.get(tableId) let table
if (isExternalTable(tableId)) {
const { datasourceId, tableName } = breakExternalTableId(tableId)
table = await getExternalTable(appId, datasourceId, tableName)
} else {
table = await db.get(tableId)
}
return module.exports.cleanInputValues(row, { properties: table.schema }) return module.exports.cleanInputValues(row, { properties: table.schema })
} }

View file

@ -2,8 +2,7 @@ const { Client } = require("@elastic/elasticsearch")
const { QUERY_TYPES, FIELD_TYPES } = require("./Integration") const { QUERY_TYPES, FIELD_TYPES } = require("./Integration")
const SCHEMA = { const SCHEMA = {
docs: docs: "https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
"https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
description: description:
"Elasticsearch is a search engine based on the Lucene library. It provides a distributed, multitenant-capable full-text search engine with an HTTP web interface and schema-free JSON documents.", "Elasticsearch is a search engine based on the Lucene library. It provides a distributed, multitenant-capable full-text search engine with an HTTP web interface and schema-free JSON documents.",
friendlyName: "ElasticSearch", friendlyName: "ElasticSearch",

View file

@ -14,3 +14,19 @@ exports.breakExternalTableId = tableId => {
let datasourceId = parts.join(SEPARATOR) let datasourceId = parts.join(SEPARATOR)
return { datasourceId, tableName } return { datasourceId, tableName }
} }
exports.generateRowIdField = (keyProps = []) => {
if (!Array.isArray(keyProps)) {
keyProps = [keyProps]
}
// this conserves order and types
return encodeURIComponent(JSON.stringify(keyProps))
}
// should always return an array
exports.breakRowIdField = _id => {
if (!_id) {
return null
}
return JSON.parse(decodeURIComponent(_id))
}

View file

@ -14,50 +14,52 @@ const WEBHOOK_ENDPOINTS = new RegExp(
["webhooks/trigger", "webhooks/schema"].join("|") ["webhooks/trigger", "webhooks/schema"].join("|")
) )
module.exports = (permType, permLevel = null) => async (ctx, next) => { module.exports =
// webhooks don't need authentication, each webhook unique (permType, permLevel = null) =>
if (WEBHOOK_ENDPOINTS.test(ctx.request.url)) { async (ctx, next) => {
// webhooks don't need authentication, each webhook unique
if (WEBHOOK_ENDPOINTS.test(ctx.request.url)) {
return next()
}
if (!ctx.user) {
return ctx.throw(403, "No user info found")
}
// check general builder stuff, this middleware is a good way
// to find API endpoints which are builder focused
await builderMiddleware(ctx, permType)
const isAuthed = ctx.isAuthenticated
const { basePermissions, permissions } = await getUserPermissions(
ctx.appId,
ctx.roleId
)
// builders for now have permission to do anything
// TODO: in future should consider separating permissions with an require("@budibase/auth").isClient check
let isBuilder = ctx.user && ctx.user.builder && ctx.user.builder.global
const isBuilderApi = permType === PermissionTypes.BUILDER
if (isBuilder) {
return next()
} else if (isBuilderApi && !isBuilder) {
return ctx.throw(403, "Not Authorized")
}
if (
hasResource(ctx) &&
doesHaveResourcePermission(permissions, permLevel, ctx)
) {
return next()
}
if (!isAuthed) {
ctx.throw(403, "Session not authenticated")
}
if (!doesHaveBasePermission(permType, permLevel, basePermissions)) {
ctx.throw(403, "User does not have permission")
}
return next() return next()
} }
if (!ctx.user) {
return ctx.throw(403, "No user info found")
}
// check general builder stuff, this middleware is a good way
// to find API endpoints which are builder focused
await builderMiddleware(ctx, permType)
const isAuthed = ctx.isAuthenticated
const { basePermissions, permissions } = await getUserPermissions(
ctx.appId,
ctx.roleId
)
// builders for now have permission to do anything
// TODO: in future should consider separating permissions with an require("@budibase/auth").isClient check
let isBuilder = ctx.user && ctx.user.builder && ctx.user.builder.global
const isBuilderApi = permType === PermissionTypes.BUILDER
if (isBuilder) {
return next()
} else if (isBuilderApi && !isBuilder) {
return ctx.throw(403, "Not Authorized")
}
if (
hasResource(ctx) &&
doesHaveResourcePermission(permissions, permLevel, ctx)
) {
return next()
}
if (!isAuthed) {
ctx.throw(403, "Session not authenticated")
}
if (!doesHaveBasePermission(permType, permLevel, basePermissions)) {
ctx.throw(403, "User does not have permission")
}
return next()
}

View file

@ -1,9 +1,5 @@
const { const { getAppId, setCookie, getCookie, clearCookie } =
getAppId, require("@budibase/auth").utils
setCookie,
getCookie,
clearCookie,
} = require("@budibase/auth").utils
const { Cookies } = require("@budibase/auth").constants const { Cookies } = require("@budibase/auth").constants
const { getRole } = require("@budibase/auth/roles") const { getRole } = require("@budibase/auth/roles")
const { getGlobalSelf } = require("../utilities/workerRequests") const { getGlobalSelf } = require("../utilities/workerRequests")

View file

@ -90,15 +90,17 @@ const numericalConstraint = (constraint, error) => value => {
return null return null
} }
const inclusionConstraint = (options = []) => value => { const inclusionConstraint =
if (value == null || value === "") { (options = []) =>
value => {
if (value == null || value === "") {
return null
}
if (!options.includes(value)) {
return "Invalid value"
}
return null return null
} }
if (!options.includes(value)) {
return "Invalid value"
}
return null
}
const dateConstraint = (dateString, isEarliest) => { const dateConstraint = (dateString, isEarliest) => {
const dateLimit = Date.parse(dateString) const dateLimit = Date.parse(dateString)

View file

@ -5,15 +5,8 @@ const authPkg = require("@budibase/auth")
const GLOBAL_DB = authPkg.StaticDatabases.GLOBAL.name const GLOBAL_DB = authPkg.StaticDatabases.GLOBAL.name
exports.sendEmail = async ctx => { exports.sendEmail = async ctx => {
const { const { groupId, email, userId, purpose, contents, from, subject } =
groupId, ctx.request.body
email,
userId,
purpose,
contents,
from,
subject,
} = ctx.request.body
let user let user
if (userId) { if (userId) {
const db = new CouchDB(GLOBAL_DB) const db = new CouchDB(GLOBAL_DB)

View file

@ -1,9 +1,6 @@
const CouchDB = require("../../../db") const CouchDB = require("../../../db")
const { const { getGroupParams, generateGroupID, StaticDatabases } =
getGroupParams, require("@budibase/auth").db
generateGroupID,
StaticDatabases,
} = require("@budibase/auth").db
const GLOBAL_DB = StaticDatabases.GLOBAL.name const GLOBAL_DB = StaticDatabases.GLOBAL.name

View file

@ -1,9 +1,6 @@
const CouchDB = require("../../../db") const CouchDB = require("../../../db")
const { const { generateGlobalUserID, getGlobalUserParams, StaticDatabases } =
generateGlobalUserID, require("@budibase/auth").db
getGlobalUserParams,
StaticDatabases,
} = require("@budibase/auth").db
const { hash, getGlobalUserByEmail } = require("@budibase/auth").utils const { hash, getGlobalUserByEmail } = require("@budibase/auth").utils
const { UserStatus, EmailTemplatePurpose } = require("../../../constants") const { UserStatus, EmailTemplatePurpose } = require("../../../constants")
const { checkInviteCode } = require("../../../utilities/redis") const { checkInviteCode } = require("../../../utilities/redis")