1
0
Fork 0
mirror of synced 2024-06-26 10:00:41 +12:00

Merge pull request #1732 from Budibase/feature/opinionated-sql

Feature/opinionated sql
This commit is contained in:
Martin McKeaveney 2021-06-21 16:28:29 +01:00 committed by GitHub
commit 1f65427e90
106 changed files with 3553 additions and 607 deletions

View file

@ -38,6 +38,11 @@ static_resources:
route:
cluster: server-dev
- match: { prefix: "/app/" }
route:
cluster: server-dev
prefix_rewrite: "/"
# the below three cases are needed to make sure
# all traffic prefixed for the builder is passed through
# correctly.

View file

@ -2,7 +2,7 @@
export let value
</script>
<div>{value}</div>
<div>{typeof value === "object" ? JSON.stringify(value) : value}</div>
<style>
div {

View file

@ -37,7 +37,7 @@ Cypress.Commands.add("createApp", name => {
cy.contains("Create app").click()
})
.then(() => {
cy.get("[data-cy=new-table]", {
cy.get(".selected > .content", {
timeout: 20000,
}).should("be.visible")
})
@ -51,7 +51,7 @@ Cypress.Commands.add("deleteApp", () => {
.then(val => {
console.log(val)
if (val.length > 0) {
cy.get(".hoverable > use").click()
cy.get(".title > :nth-child(3) > .spectrum-Icon").click()
cy.contains("Delete").click()
cy.get(".spectrum-Button--warning").click()
}
@ -72,7 +72,8 @@ Cypress.Commands.add("createTestTableWithData", () => {
Cypress.Commands.add("createTable", tableName => {
// Enter table name
cy.get("[data-cy=new-table]").click()
cy.contains("Budibase DB").click()
cy.contains("Create new table").click()
cy.get(".spectrum-Modal").within(() => {
cy.get("input").first().type(tableName).blur()
cy.get(".spectrum-ButtonGroup").contains("Create").click()

View file

@ -5,6 +5,7 @@
import { initialise } from "builderStore"
import { NotificationDisplay } from "@budibase/bbui"
import { parse, stringify } from "qs"
import HelpIcon from "components/common/HelpIcon.svelte"
onMount(async () => {
await initialise()
@ -16,6 +17,7 @@
<NotificationDisplay />
<Router {routes} config={{ queryHandler }} />
<div class="modal-container" />
<HelpIcon />
<style>
.modal-container {

View file

@ -22,6 +22,7 @@ async function activate() {
if (sentryConfigured) Sentry.init({ dsn: process.env.SENTRY_DSN })
if (posthogConfigured) {
posthog.init(process.env.POSTHOG_TOKEN, {
autocapture: false,
api_host: process.env.POSTHOG_URL,
})
posthog.set_config({ persistence: "cookie" })

View file

@ -85,7 +85,7 @@ const createScreen = table => {
.customProps({
dataSource: {
label: table.name,
name: `all_${table._id}`,
name: table._id,
tableId: table._id,
type: "table",
},

View file

@ -76,7 +76,7 @@ const createScreen = table => {
.customProps({
dataSource: {
label: table.name,
name: `all_${table._id}`,
name: table._id,
tableId: table._id,
type: "table",
},

View file

@ -17,31 +17,31 @@
let data = []
let loading = false
$: isUsersTable = $tables.selected?._id === TableNames.USERS
$: title = $tables.selected.name
$: schema = $tables.selected.schema
$: title = $tables.selected?.name
$: schema = $tables.selected?.schema
$: tableView = {
schema,
name: $views.selected?.name,
}
$: type = $tables.selected?.type
$: isInternal = type === "internal"
// Fetch rows for specified table
$: {
if ($views.selected?.name?.startsWith("all_")) {
loading = true
const loadingTableId = $tables.selected?._id
api.fetchDataForView($views.selected).then(rows => {
loading = false
loading = true
const loadingTableId = $tables.selected?._id
api.fetchDataForTable($tables.selected?._id).then(rows => {
loading = false
// If we started a slow request then quickly change table, sometimes
// the old data overwrites the new data.
// This check ensures that we don't do that.
if (loadingTableId !== $tables.selected?._id) {
return
}
// If we started a slow request then quickly change table, sometimes
// the old data overwrites the new data.
// This check ensures that we don't do that.
if (loadingTableId !== $tables.selected?._id) {
return
}
data = rows || []
})
}
data = rows || []
})
}
</script>
@ -50,11 +50,14 @@
{schema}
tableId={$tables.selected?._id}
{data}
{type}
allowEditing={true}
bind:hideAutocolumns
{loading}
>
<CreateColumnButton />
{#if isInternal}
<CreateColumnButton />
{/if}
{#if schema && Object.keys(schema).length > 0}
{#if !isUsersTable}
<CreateRowButton
@ -62,13 +65,17 @@
modalContentComponent={CreateEditRow}
/>
{/if}
<CreateViewButton />
{#if isInternal}
<CreateViewButton />
{/if}
<ManageAccessButton resourceId={$tables.selected?._id} />
{#if isUsersTable}
<EditRolesButton />
{/if}
<HideAutocolumnButton bind:hideAutocolumns />
{#if isInternal}
<HideAutocolumnButton bind:hideAutocolumns />
{/if}
<!-- always have the export last -->
<ExportButton view={tableView} />
<ExportButton view={$tables.selected?._id} />
{/if}
</Table>

View file

@ -6,12 +6,13 @@
let loading = false
let error = false
let type = "external"
</script>
{#if error}
<div class="errors">{error}</div>
{/if}
<Table schema={query.schema} {data} {loading} rowCount={5} />
<Table schema={query.schema} {data} {loading} {type} rowCount={5} />
<style>
.errors {

View file

@ -15,6 +15,7 @@
$: linkedTable = $tables.list.find(table => table._id === linkedTableId)
$: schema = linkedTable?.schema
$: table = $tables.list.find(table => table._id === tableId)
$: type = table?.type
$: fetchData(tableId, rowId)
$: {
let rowLabel = row?.[table?.primaryDisplay]
@ -33,5 +34,5 @@
</script>
{#if row && row._id === rowId}
<Table {title} {schema} {data} />
<Table {title} {schema} {data} {type} />
{/if}

View file

@ -20,6 +20,7 @@
export let loading = false
export let hideAutocolumns
export let rowCount
export let type
let selectedRows = []
let editableColumn
@ -28,6 +29,7 @@
let editColumnModal
let customRenderers = []
$: isInternal = type !== "external"
$: isUsersTable = tableId === TableNames.USERS
$: data && resetSelectedRows()
$: editRowComponent = isUsersTable ? CreateEditUser : CreateEditRow
@ -73,9 +75,8 @@
}
const deleteRows = async () => {
await api.post(`/api/${tableId}/rows`, {
await api.delete(`/api/${tableId}/rows`, {
rows: selectedRows,
type: "delete",
})
data = data.filter(row => !selectedRows.includes(row))
notifications.success(`Successfully deleted ${selectedRows.length} rows`)
@ -125,7 +126,7 @@
bind:selectedRows
allowSelectRows={allowEditing && !isUsersTable}
allowEditRows={allowEditing}
allowEditColumns={allowEditing}
allowEditColumns={allowEditing && isInternal}
showAutoColumns={!hideAutocolumns}
on:editcolumn={e => editColumn(e.detail)}
on:editrow={e => editRow(e.detail)}

View file

@ -11,19 +11,18 @@
import HideAutocolumnButton from "./buttons/HideAutocolumnButton.svelte"
export let view = {}
let hideAutocolumns = true
let hideAutocolumns = true
let data = []
let loading = false
let type = "internal"
$: name = view.name
// Fetch rows for specified view
$: {
if (!name.startsWith("all_")) {
loading = true
fetchViewData(name, view.field, view.groupBy, view.calculation)
}
loading = true
fetchViewData(name, view.field, view.groupBy, view.calculation)
}
async function fetchViewData(name, field, groupBy, calculation) {
@ -32,6 +31,7 @@
const thisView = allTableViews.filter(
views => views != null && views[name] != null
)[0]
// don't fetch view data if the view no longer exists
if (!thisView) {
return
@ -57,6 +57,7 @@
tableId={view.tableId}
{data}
{loading}
{type}
allowEditing={!view?.calculation}
bind:hideAutocolumns
>

View file

@ -14,12 +14,15 @@ export async function saveRow(row, tableId) {
}
export async function deleteRow(row) {
const DELETE_ROWS_URL = `/api/${row.tableId}/rows/${row._id}/${row._rev}`
return api.delete(DELETE_ROWS_URL)
const DELETE_ROWS_URL = `/api/${row.tableId}/rows`
return api.delete(DELETE_ROWS_URL, {
_id: row._id,
_rev: row._rev,
})
}
export async function fetchDataForView(view) {
const FETCH_ROWS_URL = `/api/views/${view.name}`
export async function fetchDataForTable(tableId) {
const FETCH_ROWS_URL = `/api/${tableId}/rows`
const response = await api.get(FETCH_ROWS_URL)
const json = await response.json()

View file

@ -9,7 +9,7 @@
async function confirmDeletion() {
await deleteRows()
modal.hide()
modal?.hide()
}
</script>

View file

@ -20,7 +20,9 @@
export let view = {}
$: viewTable = $tables.list.find(({ _id }) => _id === $views.selected.tableId)
$: viewTable = $tables.list.find(
({ _id }) => _id === $views.selected?.tableId
)
$: fields =
viewTable &&
Object.keys(viewTable.schema).filter(

View file

@ -20,7 +20,7 @@
async function exportView() {
download(
`/api/views/export?view=${encodeURIComponent(
view.name
view
)}&format=${exportFormat}`
)
}

View file

@ -57,7 +57,9 @@
export let view = {}
$: viewTable = $tables.list.find(({ _id }) => _id === $views.selected.tableId)
$: viewTable = $tables.list.find(
({ _id }) => _id === $views.selected?.tableId
)
$: fields = viewTable && Object.keys(viewTable.schema)
function saveView() {

View file

@ -5,7 +5,9 @@
export let view = {}
$: viewTable = $tables.list.find(({ _id }) => _id === $views.selected.tableId)
$: viewTable = $tables.list.find(
({ _id }) => _id === $views.selected?.tableId
)
$: fields =
viewTable &&
Object.entries(viewTable.schema)

View file

@ -1,10 +1,12 @@
<script>
import { onMount } from "svelte"
import { goto } from "@roxi/routify"
import { BUDIBASE_INTERNAL_DB } from "constants"
import { database, datasources, queries } from "stores/backend"
import EditDatasourcePopover from "./popovers/EditDatasourcePopover.svelte"
import EditQueryPopover from "./popovers/EditQueryPopover.svelte"
import NavItem from "components/common/NavItem.svelte"
import TableNavigator from "components/backend/TableNavigator/TableNavigator.svelte"
import ICONS from "./icons"
function selectDatasource(datasource) {
@ -13,9 +15,6 @@
}
function onClickQuery(query) {
if ($queries.selected === query._id) {
return
}
queries.select(query)
$goto(`./datasource/${query.datasourceId}/${query._id}`)
}
@ -42,8 +41,13 @@
width="18"
/>
</div>
<EditDatasourcePopover {datasource} />
{#if datasource._id !== BUDIBASE_INTERNAL_DB}
<EditDatasourcePopover {datasource} />
{/if}
</NavItem>
<TableNavigator sourceId={datasource._id} />
{#each $queries.list.filter(query => query.datasourceId === datasource._id) as query}
<NavItem
indentLevel={1}

View file

@ -6,7 +6,6 @@
export let integration = {}
let schema
let integrations = []
async function fetchIntegrations() {
@ -18,13 +17,18 @@
}
function selectIntegration(integrationType) {
schema = integrations[integrationType].datasource
const selected = integrations[integrationType]
// build the schema
const schema = {}
for (let key in selected.datasource) {
schema[key] = selected.datasource[key].default
}
integration = {
type: integrationType,
...Object.keys(schema).reduce(
(acc, next) => ({ ...acc, [next]: schema[next].default }),
{}
),
plus: selected.plus,
...schema,
}
}

View file

@ -0,0 +1,126 @@
<script>
export let width = "100"
export let height = "100"
</script>
<svg
version="1.1"
id="Layer_1"
xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink"
x="0px"
y="0px"
viewBox="0 0 48 48"
style="enable-background:new 0 0 48 48;"
xml:space="preserve"
{height}
{width}
>
<style type="text/css">
.st0 {
fill: #393c44;
}
.st1 {
fill: #ffffff;
}
.st2 {
fill: #4285f4;
}
</style>
<rect x="-152.17" y="-24.17" class="st0" width="96.17" height="96.17" />
<path
class="st1"
d="M-83.19,48h-41.79c-1.76,0-3.19-1.43-3.19-3.19V3.02c0-1.76,1.43-3.19,3.19-3.19h41.79
c1.76,0,3.19,1.43,3.19,3.19v41.79C-80,46.57-81.43,48-83.19,48z"
/>
<g>
<g>
<path
class="st0"
d="M-99.62,12.57v9.94c1.15-1.21,2.59-1.81,4.32-1.81c1.03,0,1.97,0.19,2.82,0.58c0.86,0.39,1.59,0.91,2.19,1.57
c0.6,0.66,1.08,1.43,1.42,2.32c0.34,0.89,0.51,1.84,0.51,2.85c0,1.03-0.18,1.99-0.53,2.89c-0.35,0.9-0.84,1.68-1.47,2.35
c-0.63,0.67-1.37,1.19-2.23,1.58c-0.86,0.39-1.78,0.58-2.77,0.58c-1.8,0-3.22-0.66-4.27-1.97V35h-4.89V12.57H-99.62z
M-93.46,28.11c0-0.43-0.08-0.84-0.24-1.23c-0.16-0.39-0.39-0.72-0.68-1.01c-0.29-0.29-0.62-0.52-1-0.69
c-0.38-0.17-0.79-0.26-1.24-0.26c-0.43,0-0.84,0.08-1.22,0.24c-0.38,0.16-0.71,0.39-0.99,0.68c-0.28,0.29-0.5,0.63-0.68,1.01
c-0.17,0.39-0.26,0.8-0.26,1.23c0,0.43,0.08,0.84,0.24,1.22c0.16,0.38,0.39,0.71,0.68,0.99c0.29,0.28,0.63,0.5,1.01,0.68
c0.39,0.17,0.8,0.26,1.23,0.26c0.43,0,0.84-0.08,1.22-0.24c0.38-0.16,0.71-0.39,0.99-0.68c0.28-0.29,0.5-0.62,0.68-1
C-93.55,28.92-93.46,28.52-93.46,28.11z"
/>
</g>
<g>
<path
class="st0"
d="M-114.76,12.57v9.94c1.15-1.21,2.59-1.81,4.32-1.81c1.03,0,1.97,0.19,2.82,0.58
c0.86,0.39,1.59,0.91,2.19,1.57c0.6,0.66,1.08,1.43,1.42,2.32c0.34,0.89,0.51,1.84,0.51,2.85c0,1.03-0.18,1.99-0.53,2.89
c-0.35,0.9-0.84,1.68-1.47,2.35c-0.63,0.67-1.37,1.19-2.23,1.58c-0.86,0.39-1.78,0.58-2.77,0.58c-1.8,0-3.22-0.66-4.27-1.97V35
h-4.89V12.57H-114.76z M-108.6,28.11c0-0.43-0.08-0.84-0.24-1.23c-0.16-0.39-0.39-0.72-0.68-1.01c-0.29-0.29-0.62-0.52-1-0.69
c-0.38-0.17-0.79-0.26-1.24-0.26c-0.43,0-0.84,0.08-1.22,0.24c-0.38,0.16-0.71,0.39-0.99,0.68c-0.28,0.29-0.5,0.63-0.68,1.01
c-0.17,0.39-0.26,0.8-0.26,1.23c0,0.43,0.08,0.84,0.24,1.22c0.16,0.38,0.39,0.71,0.68,0.99c0.29,0.28,0.63,0.5,1.01,0.68
c0.39,0.17,0.8,0.26,1.23,0.26c0.43,0,0.84-0.08,1.22-0.24c0.38-0.16,0.71-0.39,0.99-0.68c0.28-0.29,0.5-0.62,0.68-1
C-108.68,28.92-108.6,28.52-108.6,28.11z"
/>
</g>
</g>
<path
class="st2"
d="M44.81,159H3.02c-1.76,0-3.19-1.43-3.19-3.19v-41.79c0-1.76,1.43-3.19,3.19-3.19h41.79
c1.76,0,3.19,1.43,3.19,3.19v41.79C48,157.57,46.57,159,44.81,159z"
/>
<g>
<g>
<path
class="st1"
d="M28.38,123.57v9.94c1.15-1.21,2.59-1.81,4.32-1.81c1.03,0,1.97,0.19,2.82,0.58c0.86,0.39,1.59,0.91,2.19,1.57
c0.6,0.66,1.08,1.43,1.42,2.32c0.34,0.89,0.51,1.84,0.51,2.85c0,1.03-0.18,1.99-0.53,2.89c-0.35,0.9-0.84,1.68-1.47,2.35
c-0.63,0.67-1.37,1.19-2.23,1.58c-0.86,0.39-1.78,0.58-2.77,0.58c-1.8,0-3.22-0.66-4.27-1.97V146h-4.89v-22.43H28.38z
M34.54,139.11c0-0.43-0.08-0.84-0.24-1.23c-0.16-0.39-0.39-0.72-0.68-1.01c-0.29-0.29-0.62-0.52-1-0.69
c-0.38-0.17-0.79-0.26-1.24-0.26c-0.43,0-0.84,0.08-1.22,0.24c-0.38,0.16-0.71,0.39-0.99,0.68c-0.28,0.29-0.5,0.63-0.68,1.01
c-0.17,0.39-0.26,0.8-0.26,1.23c0,0.43,0.08,0.84,0.24,1.22c0.16,0.38,0.39,0.71,0.68,0.99c0.29,0.28,0.63,0.5,1.01,0.68
c0.39,0.17,0.8,0.26,1.23,0.26c0.43,0,0.84-0.08,1.22-0.24c0.38-0.16,0.71-0.39,0.99-0.68c0.28-0.29,0.5-0.62,0.68-1
C34.45,139.92,34.54,139.52,34.54,139.11z"
/>
</g>
<g>
<path
class="st1"
d="M13.24,123.57v9.94c1.15-1.21,2.59-1.81,4.32-1.81c1.03,0,1.97,0.19,2.82,0.58c0.86,0.39,1.59,0.91,2.19,1.57
c0.6,0.66,1.08,1.43,1.42,2.32c0.34,0.89,0.51,1.84,0.51,2.85c0,1.03-0.18,1.99-0.53,2.89c-0.35,0.9-0.84,1.68-1.47,2.35
c-0.63,0.67-1.37,1.19-2.23,1.58c-0.86,0.39-1.78,0.58-2.77,0.58c-1.8,0-3.22-0.66-4.27-1.97V146H8.35v-22.43H13.24z M19.4,139.11
c0-0.43-0.08-0.84-0.24-1.23c-0.16-0.39-0.39-0.72-0.68-1.01c-0.29-0.29-0.62-0.52-1-0.69c-0.38-0.17-0.79-0.26-1.24-0.26
c-0.43,0-0.84,0.08-1.22,0.24c-0.38,0.16-0.71,0.39-0.99,0.68c-0.28,0.29-0.5,0.63-0.68,1.01c-0.17,0.39-0.26,0.8-0.26,1.23
c0,0.43,0.08,0.84,0.24,1.22c0.16,0.38,0.39,0.71,0.68,0.99c0.29,0.28,0.63,0.5,1.01,0.68c0.39,0.17,0.8,0.26,1.23,0.26
c0.43,0,0.84-0.08,1.22-0.24c0.38-0.16,0.71-0.39,0.99-0.68c0.28-0.29,0.5-0.62,0.68-1C19.32,139.92,19.4,139.52,19.4,139.11z"
/>
</g>
</g>
<g>
<path
class="st0"
d="M44,48H4c-2.21,0-4-1.79-4-4V4c0-2.21,1.79-4,4-4h40c2.21,0,4,1.79,4,4v40C48,46.21,46.21,48,44,48z"
/>
<g>
<path
class="st1"
d="M28.48,12v10.44c1.18-1.27,2.65-1.9,4.42-1.9c1.05,0,2.01,0.2,2.89,0.61c0.87,0.41,1.62,0.96,2.24,1.65
c0.62,0.69,1.1,1.5,1.45,2.44c0.35,0.94,0.52,1.93,0.52,2.99c0,1.08-0.18,2.09-0.54,3.04c-0.36,0.95-0.86,1.77-1.51,2.47
c-0.64,0.7-1.4,1.25-2.28,1.66C34.8,35.8,33.86,36,32.84,36c-1.84,0-3.3-0.69-4.37-2.07v1.62h-5V12H28.48z M34.78,28.31
c0-0.45-0.08-0.88-0.25-1.29c-0.17-0.41-0.4-0.76-0.69-1.06c-0.3-0.3-0.64-0.54-1.02-0.72c-0.39-0.18-0.81-0.27-1.27-0.27
c-0.44,0-0.86,0.09-1.24,0.26c-0.39,0.17-0.72,0.41-1.01,0.71c-0.29,0.3-0.52,0.66-0.69,1.06c-0.18,0.41-0.26,0.84-0.26,1.29
s0.08,0.88,0.25,1.28c0.17,0.4,0.4,0.74,0.69,1.04c0.29,0.29,0.64,0.53,1.04,0.71c0.4,0.18,0.82,0.27,1.26,0.27
c0.44,0,0.86-0.09,1.24-0.26c0.39-0.17,0.72-0.41,1.01-0.71c0.29-0.3,0.52-0.65,0.69-1.05C34.69,29.16,34.78,28.75,34.78,28.31z"
/>
</g>
<g>
<path
class="st1"
d="M13,12v10.44c1.18-1.27,2.65-1.9,4.42-1.9c1.05,0,2.01,0.2,2.89,0.61c0.87,0.41,1.62,0.96,2.24,1.65
c0.62,0.69,1.1,1.5,1.45,2.44c0.35,0.94,0.52,1.93,0.52,2.99c0,1.08-0.18,2.09-0.54,3.04c-0.36,0.95-0.86,1.77-1.51,2.47
c-0.64,0.7-1.4,1.25-2.28,1.66C19.32,35.8,18.38,36,17.37,36c-1.84,0-3.3-0.69-4.37-2.07v1.62H8V12H13z M19.3,28.31
c0-0.45-0.08-0.88-0.25-1.29c-0.17-0.41-0.4-0.76-0.69-1.06c-0.3-0.3-0.64-0.54-1.02-0.72c-0.39-0.18-0.81-0.27-1.27-0.27
c-0.44,0-0.86,0.09-1.24,0.26c-0.39,0.17-0.72,0.41-1.01,0.71c-0.29,0.3-0.52,0.66-0.69,1.06c-0.18,0.41-0.26,0.84-0.26,1.29
s0.08,0.88,0.25,1.28c0.17,0.4,0.4,0.74,0.69,1.04c0.29,0.29,0.64,0.53,1.04,0.71c0.4,0.18,0.82,0.27,1.26,0.27
c0.44,0,0.86-0.09,1.24-0.26c0.39-0.17,0.72-0.41,1.01-0.71c0.29-0.3,0.52-0.65,0.69-1.05C19.21,29.16,19.3,28.75,19.3,28.31z"
/>
</g>
</g>
</svg>

View file

@ -9,8 +9,10 @@ import SqlServer from "./SQLServer.svelte"
import MySQL from "./MySQL.svelte"
import ArangoDB from "./ArangoDB.svelte"
import Rest from "./Rest.svelte"
import Budibase from "./Budibase.svelte"
export default {
BUDIBASE: Budibase,
POSTGRES: Postgres,
DYNAMODB: DynamoDB,
MONGODB: MongoDB,

View file

@ -23,16 +23,17 @@
}
async function saveDatasource() {
const { type, ...config } = integration
const { type, plus, ...config } = integration
// Create datasource
const response = await datasources.save({
name,
source: type,
config,
plus,
})
notifications.success(`Datasource ${name} created successfully.`)
analytics.captureEvent("Datasource Created", { name })
analytics.captureEvent("Datasource Created", { name, type })
// Navigate to new datasource
$goto(`./datasource/${response._id}`)

View file

@ -6,6 +6,8 @@
import EditViewPopover from "./popovers/EditViewPopover.svelte"
import NavItem from "components/common/NavItem.svelte"
export let sourceId
$: selectedView = $views.selected && $views.selected.name
function selectTable(table) {
@ -31,12 +33,13 @@
{#if $database?._id}
<div class="hierarchy-items-container">
{#each $tables.list as table, idx}
{#each $tables.list.filter(table => table.sourceId === sourceId) as table, idx}
<NavItem
indentLevel={1}
border={idx > 0}
icon={table._id === TableNames.USERS ? "UserGroup" : "Table"}
text={table.name}
selected={selectedView === `all_${table._id}`}
selected={$tables.selected?._id === table._id}
on:click={() => selectTable(table)}
>
{#if table._id !== TableNames.USERS}
@ -45,7 +48,7 @@
</NavItem>
{#each Object.keys(table.views || {}) as viewName, idx (idx)}
<NavItem
indentLevel={1}
indentLevel={2}
icon="Remove"
text={viewName}
selected={selectedView === viewName}

View file

@ -91,7 +91,7 @@
}
// Navigate to new table
$goto(`./table/${table._id}`)
$goto(`../../table/${table._id}`)
}
</script>

View file

@ -1,15 +1,15 @@
<script>
import { goto } from "@roxi/routify"
import { store, allScreens } from "builderStore"
import { allScreens, store } from "builderStore"
import { tables } from "stores/backend"
import { notifications } from "@budibase/bbui"
import {
ActionMenu,
MenuItem,
Icon,
Input,
MenuItem,
Modal,
ModalContent,
Input,
notifications,
} from "@budibase/bbui"
import ConfirmDialog from "components/common/ConfirmDialog.svelte"
@ -22,9 +22,12 @@
let templateScreens
let willBeDeleted
$: external = table?.type === "external"
function showDeleteModal() {
const screens = $allScreens
templateScreens = screens.filter(screen => screen.autoTableId === table._id)
templateScreens = $allScreens.filter(
screen => screen.autoTableId === table._id
)
willBeDeleted = ["All table data"].concat(
templateScreens.map(screen => `Screen ${screen.props._instanceName}`)
)
@ -61,7 +64,9 @@
<Icon s hoverable name="MoreSmallList" />
</div>
<MenuItem icon="Edit" on:click={editorModal.show}>Edit</MenuItem>
<MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem>
{#if !external}
<MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem>
{/if}
</ActionMenu>
<Modal bind:this={editorModal}>

View file

@ -0,0 +1,17 @@
<script>
import { Icon } from "@budibase/bbui"
</script>
<a target="_blank" href="https://github.com/Budibase/budibase/discussions">
<Icon hoverable name="Help" size="XXL" />
</a>
<style>
a {
color: inherit;
position: absolute;
bottom: var(--spacing-m);
right: var(--spacing-m);
border-radius: 55%;
}
</style>

View file

@ -1,10 +1,10 @@
<script>
import { onMount, onDestroy } from "svelte"
import { Button, Modal, notifications, ModalContent } from "@budibase/bbui"
import FeedbackIframe from "../feedback/FeedbackIframe.svelte"
import { store } from "builderStore"
import api from "builderStore/api"
import analytics from "analytics"
import FeedbackIframe from "components/feedback/FeedbackIframe.svelte"
const DeploymentStatus = {
SUCCESS: "SUCCESS",
@ -29,10 +29,6 @@
} else {
notifications.success(`Application published successfully`)
}
if (analytics.requestFeedbackOnDeploy()) {
feedbackModal.show()
}
} catch (err) {
analytics.captureException(err)
notifications.error(`Error publishing app: ${err}`)

View file

@ -39,7 +39,7 @@
type: "table",
}))
$: views = $tablesStore.list.reduce((acc, cur) => {
let viewsArr = Object.entries(cur.views).map(([key, value]) => ({
let viewsArr = Object.entries(cur.views || {}).map(([key, value]) => ({
label: key,
name: key,
...value,

View file

@ -0,0 +1,14 @@
<script>
import { Body } from "@budibase/bbui"
</script>
<div class="root">
<Body size="S">This action doesn't require any additional settings.</Body>
</div>
<style>
.root {
max-width: 800px;
margin: 0 auto;
}
</style>

View file

@ -4,6 +4,7 @@ import DeleteRow from "./DeleteRow.svelte"
import ExecuteQuery from "./ExecuteQuery.svelte"
import TriggerAutomation from "./TriggerAutomation.svelte"
import ValidateForm from "./ValidateForm.svelte"
import LogOut from "./LogOut.svelte"
// Defines which actions are available to configure in the front end.
// Unfortunately the "name" property is used as the identifier so please don't
@ -37,4 +38,8 @@ export default [
name: "Validate Form",
component: ValidateForm,
},
{
name: "Log Out",
component: LogOut,
},
]

View file

@ -31,3 +31,5 @@ export const LAYOUT_NAMES = {
PUBLIC: "layout_private_master",
},
}
export const BUDIBASE_INTERNAL_DB = "bb_internal"

View file

@ -1,49 +1,27 @@
<script>
import { isActive, goto } from "@roxi/routify"
import { goto, params } from "@roxi/routify"
import { Icon, Modal, Tabs, Tab } from "@budibase/bbui"
import TableNavigator from "components/backend/TableNavigator/TableNavigator.svelte"
import { BUDIBASE_INTERNAL_DB } from "constants"
import DatasourceNavigator from "components/backend/DatasourceNavigator/DatasourceNavigator.svelte"
import CreateDatasourceModal from "components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte"
import CreateTableModal from "components/backend/TableNavigator/modals/CreateTableModal.svelte"
const tabs = [
{
title: "Internal",
key: "table",
},
{
title: "External",
key: "datasource",
},
]
let selected = $isActive("./datasource") ? "External" : "Internal"
function selectFirstTableOrSource({ detail }) {
const { key } = tabs.find(t => t.title === detail)
if (key === "datasource") {
$goto("./datasource")
} else {
$goto("./table")
}
}
let selected = "Sources"
let modal
$: isExternal =
$params.selectedDatasource &&
$params.selectedDatasource !== BUDIBASE_INTERNAL_DB
function selectFirstDatasource() {
$goto("./table")
}
</script>
<!-- routify:options index=0 -->
<div class="root">
<div class="nav">
<Tabs {selected} on:select={selectFirstTableOrSource}>
<Tab title="Internal">
<div class="tab-content-padding">
<TableNavigator />
<Modal bind:this={modal}>
<CreateTableModal />
</Modal>
</div>
</Tab>
<Tab title="External">
<Tabs {selected} on:select={selectFirstDatasource}>
<Tab title="Sources">
<div class="tab-content-padding">
<DatasourceNavigator />
<Modal bind:this={modal}>
@ -54,7 +32,7 @@
</Tabs>
<div
class="add-button"
data-cy={`new-${selected === "External" ? "datasource" : "table"}`}
data-cy={`new-${isExternal ? "datasource" : "table"}`}
>
<Icon hoverable name="AddCircle" on:click={modal.show} />
</div>

View file

@ -1,13 +1 @@
<script>
import { params } from "@roxi/routify"
import { queries } from "stores/backend"
if ($params.query) {
const query = $queries.list.find(m => m._id === $params.query)
if (query) {
queries.select(query)
}
}
</script>
<slot />

View file

@ -1,7 +1,7 @@
<script>
import { goto, beforeUrlChange } from "@roxi/routify"
import { Button, Heading, Body, Divider, Layout } from "@budibase/bbui"
import { datasources, integrations, queries } from "stores/backend"
import { datasources, integrations, queries, tables } from "stores/backend"
import { notifications } from "@budibase/bbui"
import IntegrationConfigForm from "components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte"
import ICONS from "components/backend/DatasourceNavigator/icons"
@ -13,10 +13,25 @@
$: integration = datasource && $integrations[datasource.source]
async function saveDatasource() {
// Create datasource
await datasources.save(datasource)
notifications.success(`Datasource ${name} saved successfully.`)
unsaved = false
try {
// Create datasource
await datasources.save(datasource)
notifications.success(`Datasource ${name} updated successfully.`)
unsaved = false
} catch (err) {
notifications.error(`Error saving datasource: ${err}`)
}
}
async function updateDatasourceSchema() {
try {
await datasources.updateSchema(datasource)
notifications.success(`Datasource ${name} tables updated successfully.`)
unsaved = false
await tables.fetch()
} catch (err) {
notifications.error(`Error updating datasource schema: ${err}`)
}
}
function onClickQuery(query) {
@ -24,6 +39,11 @@
$goto(`./${query._id}`)
}
function onClickTable(table) {
tables.select(table)
$goto(`../../table/${table._id}`)
}
function setUnsaved() {
unsaved = true
}
@ -39,7 +59,7 @@
})
</script>
{#if datasource}
{#if datasource && integration}
<section>
<Layout>
<header>
@ -66,6 +86,34 @@
on:change={setUnsaved}
/>
</div>
{#if datasource.plus}
<Divider />
<div class="query-header">
<Heading size="S">Tables</Heading>
<Button primary on:click={updateDatasourceSchema}
>Fetch Tables From Database</Button
>
</div>
<Body>
This datasource can determine tables automatically. Budibase can fetch
your tables directly from the database and you can use them without
having to write any queries at all.
</Body>
<div class="query-list">
{#if datasource.entities}
{#each Object.keys(datasource.entities) as entity}
<div
class="query-list-item"
on:click={() => onClickTable(datasource.entities[entity])}
>
<p class="query-name">{entity}</p>
<p>Primary Key: {datasource.entities[entity].primary}</p>
<p></p>
</div>
{/each}
{/if}
</div>
{/if}
<Divider />
<div class="query-header">
<Heading size="S">Queries</Heading>

View file

@ -0,0 +1,81 @@
<script>
import { Button, Heading, Body, Layout, Modal, Divider } from "@budibase/bbui"
import CreateTableModal from "components/backend/TableNavigator/modals/CreateTableModal.svelte"
import ICONS from "components/backend/DatasourceNavigator/icons"
import { tables } from "stores/backend"
import { goto } from "@roxi/routify"
let modal
</script>
<Modal bind:this={modal}>
<CreateTableModal />
</Modal>
<section>
<Layout>
<header>
<svelte:component this={ICONS.BUDIBASE} height="26" width="26" />
<Heading size="M">Budibase Internal</Heading>
</header>
<Body size="S" grey lh
>Budibase internal tables are part of your app, the data will be stored in
your apps context.</Body
>
<Divider />
<Heading size="S">Tables</Heading>
<div class="table-list">
{#each $tables.list.filter(table => table.type !== "external") as table}
<div
class="table-list-item"
on:click={$goto(`../../table/${table._id}`)}
>
<Body size="S">{table.name}</Body>
{#if table.primaryDisplay}
<Body size="S">display column: {table.primaryDisplay}</Body>
{/if}
</div>
{/each}
</div>
<div>
<Button cta on:click={modal.show}>Create new table</Button>
</div>
</Layout>
</section>
<style>
section {
margin: 0 auto;
width: 640px;
}
header {
margin: 0 0 var(--spacing-xs) 0;
display: flex;
gap: var(--spacing-l);
align-items: center;
}
.table-list {
display: flex;
flex-direction: column;
gap: var(--spacing-m);
}
.table-list-item {
border-radius: var(--border-radius-m);
background: var(--background);
border: var(--border-dark);
display: grid;
grid-template-columns: 2fr 0.75fr 20px;
align-items: center;
padding: var(--spacing-m);
gap: var(--layout-xs);
transition: 200ms background ease;
}
.table-list-item:hover {
background: var(--grey-1);
cursor: pointer;
}
</style>

View file

@ -0,0 +1,13 @@
<script>
import { params } from "@roxi/routify"
import { tables } from "stores/backend"
if ($params.selectedTable) {
const table = $tables.list.find(m => m._id === $params.selectedTable)
if (table) {
tables.select(table)
}
}
</script>
<slot />

View file

@ -0,0 +1,16 @@
<script>
import TableDataTable from "components/backend/DataTable/DataTable.svelte"
import { tables, database } from "stores/backend"
</script>
{#if $database?._id && $tables?.selected?.name}
<TableDataTable />
{:else}<i>Create your first table to start building</i>{/if}
<style>
i {
font-size: var(--font-size-m);
color: var(--grey-5);
margin-top: 2px;
}
</style>

View file

@ -0,0 +1,10 @@
<script>
import { params } from "@roxi/routify"
import RelationshipDataTable from "components/backend/DataTable/RelationshipDataTable.svelte"
</script>
<RelationshipDataTable
tableId={$params.selectedTable}
rowId={$params.selectedRow}
fieldName={decodeURI($params.selectedField)}
/>

View file

@ -0,0 +1,6 @@
<script>
import { goto } from "@roxi/routify"
$goto("../../")
</script>
<!-- routify:options index=false -->

View file

@ -0,0 +1,6 @@
<script>
import { goto } from "@roxi/routify"
$goto("../")
</script>
<!-- routify:options index=false -->

View file

@ -0,0 +1,19 @@
<script>
import { tables } from "stores/backend"
import { goto, leftover } from "@roxi/routify"
import { onMount } from "svelte"
onMount(async () => {
// navigate to first table in list, if not already selected
// and this is the final url (i.e. no selectedTable)
if (
!$leftover &&
$tables.list.length > 0
// (!$tables.selected || !$tables.selected._id)
) {
$goto(`./${$tables.list[0]._id}`)
}
})
</script>
<slot />

View file

@ -0,0 +1,21 @@
<script>
import { goto } from "@roxi/routify"
import { onMount } from "svelte"
import { tables } from "stores/backend"
onMount(async () => {
$tables.list.length > 0 && $goto(`./${$tables.list[0]._id}`)
})
</script>
{#if $tables.list.length === 0}
<i>Create your first table to start building</i>
{:else}<i>Select a table to edit</i>{/if}
<style>
i {
font-size: var(--font-size-m);
color: var(--grey-5);
margin-top: 2px;
}
</style>

View file

@ -1,4 +1,5 @@
<script>
import { onMount } from "svelte"
import {
Layout,
Heading,
@ -7,9 +8,12 @@
Divider,
notifications,
} from "@budibase/bbui"
import api from "builderStore/api"
import { auth } from "stores/portal"
import { redirect } from "@roxi/routify"
let version
// Only admins allowed here
$: {
if (!$auth.isAdmin) {
@ -26,10 +30,20 @@
},
})
notifications.success("Your budibase installation is up to date.")
getVersion()
} catch (err) {
notifications.error(`Error installing budibase update ${err}`)
}
}
async function getVersion() {
const response = await api.get("/api/dev/version")
version = await response.text()
}
onMount(() => {
getVersion()
})
</script>
{#if $auth.isAdmin}
@ -43,6 +57,11 @@
</Layout>
<Divider size="S" />
<div class="fields">
<div class="field">
{#if version}
Current Version: {version}
{/if}
</div>
<div class="field">
<Button cta on:click={updateBudibase}>Check For Updates</Button>
</div>

View file

@ -1,5 +1,5 @@
import { writable } from "svelte/store"
import { queries } from "./"
import { queries, tables, views } from "./"
import api from "../../builderStore/api"
export const INITIAL_DATASOURCE_VALUES = {
@ -21,17 +21,53 @@ export function createDatasourcesStore() {
fetch: async () => {
const response = await api.get(`/api/datasources`)
const json = await response.json()
update(state => ({ ...state, list: json }))
update(state => ({ ...state, list: json, selected: null }))
return json
},
select: async datasourceId => {
update(state => ({ ...state, selected: datasourceId }))
queries.update(state => ({ ...state, selected: null }))
queries.unselect()
tables.unselect()
views.unselect()
},
unselect: () => {
update(state => ({ ...state, selected: null }))
},
updateSchema: async datasource => {
let url = `/api/datasources/${datasource._id}/schema`
const response = await api.post(url)
const json = await response.json()
if (response.status !== 200) {
throw new Error(json.message)
}
update(state => {
const currentIdx = state.list.findIndex(ds => ds._id === json._id)
const sources = state.list
if (currentIdx >= 0) {
sources.splice(currentIdx, 1, json)
} else {
sources.push(json)
}
return { list: sources, selected: json._id }
})
return json
},
save: async datasource => {
const response = await api.post("/api/datasources", datasource)
let url = "/api/datasources"
const response = await api.post(url, datasource)
const json = await response.json()
if (response.status !== 200) {
throw new Error(json.message)
}
update(state => {
const currentIdx = state.list.findIndex(ds => ds._id === json._id)

View file

@ -1,5 +1,5 @@
import { writable, get } from "svelte/store"
import { datasources, integrations } from "./"
import { datasources, integrations, tables } from "./"
import api from "builderStore/api"
export function createQueriesStore() {
@ -55,11 +55,14 @@ export function createQueriesStore() {
},
select: query => {
update(state => ({ ...state, selected: query._id }))
datasources.update(state => ({
tables.update(state => ({
...state,
selected: query.datasourceId,
selected: null,
}))
},
unselect: () => {
update(state => ({ ...state, selected: null }))
},
delete: async query => {
const response = await api.delete(
`/api/queries/${query._id}/${query._rev}`

View file

@ -1,13 +1,13 @@
import { writable, get } from "svelte/store"
import { views } from "./"
import { tables } from "./"
export function createRowsStore() {
const { subscribe } = writable([])
return {
subscribe,
save: () => views.select(get(views).selected),
delete: () => views.select(get(views).selected),
save: () => tables.select(get(tables).selected),
delete: () => tables.select(get(tables).selected),
}
}

View file

@ -1,5 +1,5 @@
import { writable, get } from "svelte/store"
import { views } from "./"
import { views, queries, datasources } from "./"
import { cloneDeep } from "lodash/fp"
import api from "builderStore/api"
@ -25,7 +25,9 @@ export function createTablesStore() {
selected: table,
draft: cloneDeep(table),
}))
views.select({ name: `all_${table._id}` })
views.unselect()
queries.unselect()
datasources.unselect()
}
}
@ -66,8 +68,15 @@ export function createTablesStore() {
return {
subscribe,
update,
fetch,
select,
unselect: () => {
update(state => ({
...state,
selected: null,
}))
},
save,
init: async () => {
const response = await api.get("/api/tables")

View file

@ -24,10 +24,10 @@ describe("Datasources Store", () => {
})
it("fetches all the datasources and updates the store", async () => {
api.get.mockReturnValue({ json: () => [SOME_DATASOURCE]})
api.get.mockReturnValue({ json: () => [SOME_DATASOURCE] })
await store.fetch()
expect(get(store)).toEqual({ list: [SOME_DATASOURCE], selected: null})
expect(get(store)).toEqual({ list: [SOME_DATASOURCE], selected: null })
})
it("selects a datasource", async () => {
@ -44,7 +44,7 @@ describe("Datasources Store", () => {
})
it("saves the datasource, updates the store and returns status message", async () => {
api.post.mockReturnValue({ json: () => SAVE_DATASOURCE})
api.post.mockReturnValue({ status: 200, json: () => SAVE_DATASOURCE})
await store.save({
name: 'CoolDB',

View file

@ -30,13 +30,6 @@ describe("Queries Store", () => {
expect(get(store)).toEqual({ list: [SOME_QUERY], selected: null})
})
it("selects a query and updates selected datasource", async () => {
await store.select(SOME_QUERY)
expect(get(store).selected).toEqual(SOME_QUERY._id)
expect(get(datasources).selected).toEqual(SOME_QUERY.datasourceId)
})
it("saves the query, updates the store and returns status message", async () => {
api.post.mockReturnValue({ json: () => SAVE_QUERY_RESPONSE})

View file

@ -41,14 +41,6 @@ describe("Tables Store", () => {
expect(get(store).draft).toEqual({})
})
it("selecting a table updates the view store", async () => {
const tableToSelect = SOME_TABLES[0]
await store.select(tableToSelect)
expect(get(store).selected).toEqual(tableToSelect)
expect(get(views).selected).toEqual({ name: `all_${tableToSelect._id}` })
})
it("saving a table also selects it", async () => {
api.post.mockReturnValue({ json: () => SAVE_TABLES_RESPONSE})

View file

@ -1,5 +1,5 @@
import { writable, get } from "svelte/store"
import { tables } from "./"
import { tables, datasources, queries } from "./"
import api from "builderStore/api"
export function createViewsStore() {
@ -10,11 +10,20 @@ export function createViewsStore() {
return {
subscribe,
update,
select: async view => {
update(state => ({
...state,
selected: view,
}))
queries.unselect()
datasources.unselect()
},
unselect: () => {
update(state => ({
...state,
selected: null,
}))
},
delete: async view => {
await api.delete(`/api/views/${view}`)

File diff suppressed because it is too large Load diff

View file

@ -44,7 +44,7 @@ export const updateRow = async row => {
return
}
const res = await API.patch({
url: `/api/${row.tableId}/rows/${row._id}`,
url: `/api/${row.tableId}/rows`,
body: row,
})
res.error
@ -65,7 +65,11 @@ export const deleteRow = async ({ tableId, rowId, revId }) => {
return
}
const res = await API.del({
url: `/api/${tableId}/rows/${rowId}/${revId}`,
url: `/api/${tableId}/rows`,
body: {
_id: rowId,
_rev: revId,
},
})
res.error
? notificationStore.danger("An error has occurred")
@ -84,11 +88,10 @@ export const deleteRows = async ({ tableId, rows }) => {
if (!tableId || !rows) {
return
}
const res = await API.post({
const res = await API.del({
url: `/api/${tableId}/rows`,
body: {
rows,
type: "delete",
},
})
res.error

View file

@ -45,7 +45,7 @@ export const searchTable = async ({
}
}
const res = await API.post({
url: `/api/search/${tableId}/rows`,
url: `/api/${tableId}/search`,
body: {
query,
bookmark,

View file

@ -28,10 +28,10 @@
chalk "^2.0.0"
js-tokens "^4.0.0"
"@budibase/bbui@^0.9.47":
version "0.9.47"
resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.47.tgz#d8664a05203432d522cd91a0bad1cdd8518baf93"
integrity sha512-LXvJCgUSoc4EJKafBaKfUzU4GUOQGmts/8F4V6LTFtTyMZavgq2/KFAgPbR3QeYvidLsshtwop/pQfoszXTQnQ==
"@budibase/bbui@^0.9.53":
version "0.9.53"
resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.53.tgz#b6841a31ff2c28feb929c57f7f10a1dae1b3aea3"
integrity sha512-jO11Ky1KhPGRv922jMRO49FeTY1TeF3u2JaBJYBkVY95il3uPOI20M1AdA6w2emppDlyP6FSEHk+prdra4Lndw==
dependencies:
"@adobe/spectrum-css-workflow-icons" "^1.2.1"
"@spectrum-css/actionbutton" "^1.0.1"
@ -108,12 +108,12 @@
to-gfm-code-block "^0.1.1"
year "^0.2.1"
"@budibase/standard-components@^0.9.47":
version "0.9.47"
resolved "https://registry.yarnpkg.com/@budibase/standard-components/-/standard-components-0.9.47.tgz#8e4f27c43b5a6f65d3d296c61f842195e297f061"
integrity sha512-0+Ndg67Jgk7cqOYluGKpixNFvEqvy2oguKLEr1l83Sf0oWTQ3RCmUGs2mU66ljwnE+o4/JN/EdkA2uSqKInQtg==
"@budibase/standard-components@^0.9.53":
version "0.9.53"
resolved "https://registry.yarnpkg.com/@budibase/standard-components/-/standard-components-0.9.53.tgz#5e8d84bf4c3b1ceadfc40b5b5b6c0513b6283fc5"
integrity sha512-8QJmjwF51vh+rCiLbk+JLqCNZZBq9M8/LuyQOGvjnhB8l6DNrfjnCypP/xYoBf0uUvlki8TeNuZKQmDpBBnR7A==
dependencies:
"@budibase/bbui" "^0.9.47"
"@budibase/bbui" "^0.9.53"
"@spectrum-css/page" "^3.0.1"
"@spectrum-css/vars" "^3.0.1"
apexcharts "^3.22.1"
@ -121,10 +121,10 @@
svelte-apexcharts "^1.0.2"
svelte-flatpickr "^3.1.0"
"@budibase/string-templates@^0.9.47":
version "0.9.47"
resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.47.tgz#484ce5ce29a6ddaef3480368b1a24ce8c3852324"
integrity sha512-I16Ps4AW7VW8MrSdsoZdwLutiX7GhRkiH6m1AdFcmzh2mZI6YyFM000PuKGEt+sREXK2NI6cBzmi9ZpKIAPJJw==
"@budibase/string-templates@^0.9.53":
version "0.9.53"
resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.53.tgz#9228965afcef4cc19f7d016b291d5f298bb3b725"
integrity sha512-TL3Zx6VN+YpbIT5vtuTXEv2SOAwmx+YN42pbWxH3ExHj54bvhmRxSS+xJySs75kWdvQU4OMnYywQcMeMsqkOqg==
dependencies:
"@budibase/handlebars-helpers" "^0.11.4"
dayjs "^1.10.4"
@ -4030,9 +4030,9 @@ serialize-javascript@^4.0.0:
randombytes "^2.1.0"
set-getter@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/set-getter/-/set-getter-0.1.0.tgz#d769c182c9d5a51f409145f2fba82e5e86e80376"
integrity sha1-12nBgsnVpR9AkUXy+6guXoboA3Y=
version "0.1.1"
resolved "https://registry.yarnpkg.com/set-getter/-/set-getter-0.1.1.tgz#a3110e1b461d31a9cfc8c5c9ee2e9737ad447102"
integrity sha512-9sVWOy+gthr+0G9DzqqLaYNA7+5OKkSmcqjL9cBpDEaZrr3ShQlyX2cZ/O/ozE41oxn/Tt0LGEM/w4Rub3A3gw==
dependencies:
to-object-path "^0.3.0"
@ -4233,9 +4233,9 @@ string_decoder@~1.1.1:
safe-buffer "~5.1.0"
striptags@^3.1.0:
version "3.1.1"
resolved "https://registry.yarnpkg.com/striptags/-/striptags-3.1.1.tgz#c8c3e7fdd6fb4bb3a32a3b752e5b5e3e38093ebd"
integrity sha1-yMPn/db7S7OjKjt1LltePjgJPr0=
version "3.2.0"
resolved "https://registry.yarnpkg.com/striptags/-/striptags-3.2.0.tgz#cc74a137db2de8b0b9a370006334161f7dd67052"
integrity sha512-g45ZOGzHDMe2bdYMdIvdAfCQkCTDMGBazSw1ypMowwGIee7ZQ5dU0rBJ8Jqgl+jAKIv4dbeE1jscZq9wid1Tkw==
style-inject@^0.3.0:
version "0.3.0"

View file

@ -2,7 +2,9 @@ const mysql = {}
const client = {
connect: jest.fn(),
query: jest.fn(),
query: jest.fn((query, bindings, fn) => {
fn(null, [])
}),
}
mysql.createConnection = jest.fn(() => client)

View file

@ -78,6 +78,7 @@
"jimp": "0.16.1",
"joi": "17.2.1",
"jsonschema": "1.4.0",
"knex": "^0.95.6",
"koa": "2.7.0",
"koa-body": "4.2.0",
"koa-compress": "4.0.1",

View file

@ -0,0 +1,21 @@
# Use root/example as user/password credentials
version: '3.1'
services:
db:
image: mysql
restart: always
command: --init-file /data/application/init.sql --default-authentication-plugin=mysql_native_password
volumes:
- ./init.sql:/data/application/init.sql
environment:
MYSQL_ROOT_PASSWORD: root
ports:
- 3306:3306
adminer:
image: adminer
restart: always
ports:
- 8080:8080

View file

@ -0,0 +1,9 @@
CREATE DATABASE IF NOT EXISTS main;
USE main;
CREATE TABLE Persons (
PersonID int NOT NULL PRIMARY KEY,
LastName varchar(255),
FirstName varchar(255),
Address varchar(255),
City varchar(255)
);

View file

@ -0,0 +1,28 @@
version: "3.8"
services:
db:
container_name: postgres
image: postgres
restart: always
environment:
POSTGRES_USER: root
POSTGRES_PASSWORD: root
POSTGRES_DB: main
ports:
- "5432:5432"
volumes:
#- pg_data:/var/lib/postgresql/data/
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
pgadmin:
container_name: pgadmin
image: dpage/pgadmin4
restart: always
environment:
PGADMIN_DEFAULT_EMAIL: root@root.com
PGADMIN_DEFAULT_PASSWORD: root
ports:
- "5050:80"
#volumes:
# pg_data:

View file

@ -0,0 +1,9 @@
SELECT 'CREATE DATABASE main'
WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec
CREATE TABLE Persons (
PersonID int NOT NULL PRIMARY KEY,
LastName varchar(255),
FirstName varchar(255),
Address varchar(255),
City varchar(255)
);

View file

@ -59,7 +59,7 @@ async function checkForCronTriggers({ appId, oldAuto, newAuto }) {
const cronTriggerActivated = isLive(newAuto) && !isLive(oldAuto)
if (cronTriggerRemoved || cronTriggerDeactivated) {
if (cronTriggerRemoved || (cronTriggerDeactivated && oldTrigger.cronJobId)) {
await triggers.automationQueue.removeRepeatableByKey(oldTrigger.cronJobId)
}
// need to create cron job

View file

@ -3,26 +3,67 @@ const {
generateDatasourceID,
getDatasourceParams,
getQueryParams,
DocumentTypes,
BudibaseInternalDB,
getTableParams,
} = require("../../db/utils")
const { integrations } = require("../../integrations")
const { makeExternalQuery } = require("./row/utils")
exports.fetch = async function (ctx) {
const database = new CouchDB(ctx.appId)
ctx.body = (
// Get internal tables
const db = new CouchDB(ctx.appId)
const internalTables = await db.allDocs(
getTableParams(null, {
include_docs: true,
})
)
const internal = internalTables.rows.map(row => row.doc)
const bbInternalDb = {
...BudibaseInternalDB,
entities: internal,
}
// Get external datasources
const datasources = (
await database.allDocs(
getDatasourceParams(null, {
include_docs: true,
})
)
).rows.map(row => row.doc)
ctx.body = [bbInternalDb, ...datasources]
}
exports.buildSchemaFromDb = async function (ctx) {
const db = new CouchDB(ctx.appId)
const datasourceId = ctx.params.datasourceId
const datasource = await db.get(datasourceId)
const Connector = integrations[datasource.source]
// Connect to the DB and build the schema
const connector = new Connector(datasource.config)
await connector.buildSchema(datasource._id)
datasource.entities = connector.tables
const response = await db.post(datasource)
datasource._rev = response.rev
ctx.body = datasource
}
exports.save = async function (ctx) {
const db = new CouchDB(ctx.appId)
const plus = ctx.request.body.plus
const datasource = {
_id: generateDatasourceID(),
type: "datasource",
_id: generateDatasourceID({ plus }),
type: plus ? DocumentTypes.DATASOURCE_PLUS : DocumentTypes.DATASOURCE,
...ctx.request.body,
}
@ -30,9 +71,11 @@ exports.save = async function (ctx) {
datasource._rev = response.rev
// Drain connection pools when configuration is changed
const source = integrations[datasource.source]
if (source && source.pool) {
await source.pool.end()
if (datasource.source) {
const source = integrations[datasource.source]
if (source && source.pool) {
await source.pool.end()
}
}
ctx.status = 200
@ -58,3 +101,13 @@ exports.find = async function (ctx) {
const database = new CouchDB(ctx.appId)
ctx.body = await database.get(ctx.params.datasourceId)
}
// dynamic query functionality
exports.query = async function (ctx) {
const queryJson = ctx.request.body
try {
ctx.body = await makeExternalQuery(ctx.appId, queryJson)
} catch (err) {
ctx.throw(400, err)
}
}

View file

@ -93,3 +93,7 @@ exports.revert = async ctx => {
ctx.throw(400, `Unable to revert. ${err}`)
}
}
exports.getBudibaseVersion = async ctx => {
ctx.body = require("../../../package.json").version
}

View file

@ -160,6 +160,8 @@ exports.execute = async function (ctx) {
)
const integration = new Integration(datasource.config)
console.log(query)
// ctx.body = {}
// call the relevant CRUD method on the integration class
ctx.body = formatResponse(await integration[query.queryVerb](enrichedQuery))
// cleanup

View file

@ -0,0 +1,276 @@
const { makeExternalQuery } = require("./utils")
const { DataSourceOperation, SortDirection } = require("../../../constants")
const { getExternalTable } = require("../table/utils")
const {
breakExternalTableId,
generateRowIdField,
breakRowIdField,
} = require("../../../integrations/utils")
const { cloneDeep } = require("lodash/fp")
function inputProcessing(row, table) {
if (!row) {
return row
}
let newRow = {}
for (let key of Object.keys(table.schema)) {
// currently excludes empty strings
if (row[key]) {
newRow[key] = row[key]
}
}
return newRow
}
function generateIdForRow(row, table) {
if (!row) {
return
}
const primary = table.primary
// build id array
let idParts = []
for (let field of primary) {
idParts.push(row[field])
}
return generateRowIdField(idParts)
}
function outputProcessing(rows, table) {
// if no rows this is what is returned? Might be PG only
if (rows[0].read === true) {
return []
}
for (let row of rows) {
row._id = generateIdForRow(row, table)
row.tableId = table._id
row._rev = "rev"
}
return rows
}
function buildFilters(id, filters, table) {
const primary = table.primary
// if passed in array need to copy for shifting etc
let idCopy = cloneDeep(id)
if (filters) {
// need to map over the filters and make sure the _id field isn't present
for (let filter of Object.values(filters)) {
if (filter._id) {
const parts = breakRowIdField(filter._id)
for (let field of primary) {
filter[field] = parts.shift()
}
}
// make sure this field doesn't exist on any filter
delete filter._id
}
}
// there is no id, just use the user provided filters
if (!idCopy || !table) {
return filters
}
// if used as URL parameter it will have been joined
if (typeof idCopy === "string") {
idCopy = breakRowIdField(idCopy)
}
const equal = {}
for (let field of primary) {
// work through the ID and get the parts
equal[field] = idCopy.shift()
}
return {
equal,
}
}
async function handleRequest(
appId,
operation,
tableId,
{ id, row, filters, sort, paginate } = {}
) {
let { datasourceId, tableName } = breakExternalTableId(tableId)
const table = await getExternalTable(appId, datasourceId, tableName)
if (!table) {
throw `Unable to process query, table "${tableName}" not defined.`
}
// clean up row on ingress using schema
filters = buildFilters(id, filters, table)
row = inputProcessing(row, table)
if (
operation === DataSourceOperation.DELETE &&
(filters == null || Object.keys(filters).length === 0)
) {
throw "Deletion must be filtered"
}
let json = {
endpoint: {
datasourceId,
entityId: tableName,
operation,
},
resource: {
// not specifying any fields means "*"
fields: [],
},
filters,
sort,
paginate,
body: row,
// pass an id filter into extra, purely for mysql/returning
extra: {
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
},
}
// can't really use response right now
const response = await makeExternalQuery(appId, json)
// we searched for rows in someway
if (operation === DataSourceOperation.READ && Array.isArray(response)) {
return outputProcessing(response, table)
} else {
row = outputProcessing(response, table)[0]
return { row, table }
}
}
exports.patch = async ctx => {
const appId = ctx.appId
const inputs = ctx.request.body
const tableId = ctx.params.tableId
const id = breakRowIdField(inputs._id)
// don't save the ID to db
delete inputs._id
return handleRequest(appId, DataSourceOperation.UPDATE, tableId, {
id,
row: inputs,
})
}
exports.save = async ctx => {
const appId = ctx.appId
const inputs = ctx.request.body
const tableId = ctx.params.tableId
return handleRequest(appId, DataSourceOperation.CREATE, tableId, {
row: inputs,
})
}
exports.fetchView = async ctx => {
// there are no views in external data sources, shouldn't ever be called
// for now just fetch
ctx.params.tableId = ctx.params.viewName.split("all_")[1]
return exports.fetch(ctx)
}
exports.fetch = async ctx => {
const appId = ctx.appId
const tableId = ctx.params.tableId
return handleRequest(appId, DataSourceOperation.READ, tableId)
}
exports.find = async ctx => {
const appId = ctx.appId
const id = ctx.params.rowId
const tableId = ctx.params.tableId
return handleRequest(appId, DataSourceOperation.READ, tableId, {
id,
})
}
exports.destroy = async ctx => {
const appId = ctx.appId
const tableId = ctx.params.tableId
const id = ctx.request.body._id
const { row } = await handleRequest(
appId,
DataSourceOperation.DELETE,
tableId,
{
id,
}
)
return { response: { ok: true }, row }
}
exports.bulkDestroy = async ctx => {
const appId = ctx.appId
const { rows } = ctx.request.body
const tableId = ctx.params.tableId
let promises = []
for (let row of rows) {
promises.push(
handleRequest(appId, DataSourceOperation.DELETE, tableId, {
id: breakRowIdField(row._id),
})
)
}
const responses = await Promise.all(promises)
return { response: { ok: true }, rows: responses.map(resp => resp.row) }
}
exports.search = async ctx => {
const appId = ctx.appId
const tableId = ctx.params.tableId
const { paginate, query, ...params } = ctx.request.body
let { bookmark, limit } = params
if (!bookmark && paginate) {
bookmark = 1
}
let paginateObj = {}
if (paginate) {
paginateObj = {
// add one so we can track if there is another page
limit: limit,
page: bookmark,
}
} else if (params && limit) {
paginateObj = {
limit: limit,
}
}
let sort
if (params.sort) {
const direction =
params.sortOrder === "descending"
? SortDirection.DESCENDING
: SortDirection.ASCENDING
sort = {
[params.sort]: direction,
}
}
const rows = await handleRequest(appId, DataSourceOperation.READ, tableId, {
filters: query,
sort,
paginate: paginateObj,
})
let hasNextPage = false
if (paginate && rows.length === limit) {
const nextRows = await handleRequest(
appId,
DataSourceOperation.READ,
tableId,
{
filters: query,
sort,
paginate: {
limit: 1,
page: bookmark * limit + 1,
},
}
)
hasNextPage = nextRows.length > 0
}
// need wrapper object for bookmarks etc when paginating
return { rows, hasNextPage, bookmark: bookmark + 1 }
}
exports.validate = async () => {
// can't validate external right now - maybe in future
return { valid: true }
}
exports.fetchEnrichedRow = async () => {
// TODO: How does this work
throw "Not Implemented"
}

View file

@ -0,0 +1,138 @@
const internal = require("./internal")
const external = require("./external")
const { isExternalTable } = require("../../../integrations/utils")
function pickApi(tableId) {
if (isExternalTable(tableId)) {
return external
}
return internal
}
function getTableId(ctx) {
if (ctx.request.body && ctx.request.body.tableId) {
return ctx.request.body.tableId
}
if (ctx.params && ctx.params.tableId) {
return ctx.params.tableId
}
if (ctx.params && ctx.params.viewName) {
return ctx.params.viewName
}
}
exports.patch = async ctx => {
const appId = ctx.appId
const tableId = getTableId(ctx)
const body = ctx.request.body
// if it doesn't have an _id then its save
if (body && !body._id) {
return exports.save(ctx)
}
try {
const { row, table } = await pickApi(tableId).patch(ctx)
ctx.status = 200
ctx.eventEmitter &&
ctx.eventEmitter.emitRow(`row:update`, appId, row, table)
ctx.message = `${table.name} updated successfully.`
ctx.body = row
} catch (err) {
ctx.throw(400, err)
}
}
exports.save = async function (ctx) {
const appId = ctx.appId
const tableId = getTableId(ctx)
const body = ctx.request.body
// if it has an ID already then its a patch
if (body && body._id) {
return exports.patch(ctx)
}
try {
const { row, table } = await pickApi(tableId).save(ctx)
ctx.status = 200
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table)
ctx.message = `${table.name} saved successfully`
ctx.body = row
} catch (err) {
ctx.throw(400, err)
}
}
exports.fetchView = async function (ctx) {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).fetchView(ctx)
} catch (err) {
ctx.throw(400, err)
}
}
exports.fetch = async function (ctx) {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).fetch(ctx)
} catch (err) {
ctx.throw(400, err)
}
}
exports.find = async function (ctx) {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).find(ctx)
} catch (err) {
ctx.throw(400, err)
}
}
exports.destroy = async function (ctx) {
const appId = ctx.appId
const inputs = ctx.request.body
const tableId = getTableId(ctx)
let response, row
if (inputs.rows) {
let { rows } = await pickApi(tableId).bulkDestroy(ctx)
response = rows
for (let row of rows) {
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
}
} else {
let resp = await pickApi(tableId).destroy(ctx)
response = resp.response
row = resp.row
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
}
ctx.status = 200
// for automations include the row that was deleted
ctx.row = row || {}
ctx.body = response
}
exports.search = async ctx => {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).search(ctx)
} catch (err) {
ctx.throw(400, err)
}
}
exports.validate = async function (ctx) {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).validate(ctx)
} catch (err) {
ctx.throw(400, err)
}
}
exports.fetchEnrichedRow = async function (ctx) {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx)
} catch (err) {
ctx.throw(400, err)
}
}

View file

@ -1,23 +1,20 @@
const CouchDB = require("../../db")
const validateJs = require("validate.js")
const linkRows = require("../../db/linkedRows")
const CouchDB = require("../../../db")
const linkRows = require("../../../db/linkedRows")
const {
getRowParams,
generateRowID,
DocumentTypes,
SEPARATOR,
InternalTables,
} = require("../../db/utils")
const userController = require("./user")
} = require("../../../db/utils")
const userController = require("../user")
const {
inputProcessing,
outputProcessing,
} = require("../../utilities/rowProcessor")
const { FieldTypes } = require("../../constants")
} = require("../../../utilities/rowProcessor")
const { FieldTypes } = require("../../../constants")
const { isEqual } = require("lodash")
const { cloneDeep } = require("lodash/fp")
const TABLE_VIEW_BEGINS_WITH = `all${SEPARATOR}${DocumentTypes.TABLE}${SEPARATOR}`
const { validate, findRow } = require("./utils")
const { fullSearch, paginatedSearch } = require("./internalSearch")
const CALCULATION_TYPES = {
SUM: "sum",
@ -25,35 +22,7 @@ const CALCULATION_TYPES = {
STATS: "stats",
}
validateJs.extend(validateJs.validators.datetime, {
parse: function (value) {
return new Date(value).getTime()
},
// Input is a unix timestamp
format: function (value) {
return new Date(value).toISOString()
},
})
async function findRow(ctx, db, tableId, rowId) {
let row
// TODO remove special user case in future
if (tableId === InternalTables.USER_METADATA) {
ctx.params = {
id: rowId,
}
await userController.findMetadata(ctx)
row = ctx.body
} else {
row = await db.get(rowId)
}
if (row.tableId !== tableId) {
throw "Supplied tableId does not match the rows tableId"
}
return row
}
exports.patch = async function (ctx) {
exports.patch = async ctx => {
const appId = ctx.appId
const db = new CouchDB(appId)
const inputs = ctx.request.body
@ -61,7 +30,7 @@ exports.patch = async function (ctx) {
const isUserTable = tableId === InternalTables.USER_METADATA
let dbRow
try {
dbRow = await db.get(ctx.params.rowId)
dbRow = await db.get(inputs._id)
} catch (err) {
if (isUserTable) {
// don't include the rev, it'll be the global rev
@ -70,7 +39,7 @@ exports.patch = async function (ctx) {
_id: inputs._id,
}
} else {
ctx.throw(400, "Row does not exist")
throw "Row does not exist"
}
}
let dbTable = await db.get(tableId)
@ -88,12 +57,7 @@ exports.patch = async function (ctx) {
})
if (!validateResult.valid) {
ctx.status = 400
ctx.body = {
status: 400,
errors: validateResult.errors,
}
return
throw validateResult.errors
}
// returned row is cleaned and prepared for writing to DB
@ -109,7 +73,7 @@ exports.patch = async function (ctx) {
// the row has been updated, need to put it into the ctx
ctx.request.body = row
await userController.updateMetadata(ctx)
return
return { row: ctx.body, table }
}
const response = await db.put(row)
@ -119,10 +83,7 @@ exports.patch = async function (ctx) {
}
row._rev = response.rev
row.type = "row"
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:update`, appId, row, table)
ctx.body = row
ctx.status = 200
ctx.message = `${table.name} updated successfully.`
return { row, table }
}
exports.save = async function (ctx) {
@ -131,20 +92,6 @@ exports.save = async function (ctx) {
let inputs = ctx.request.body
inputs.tableId = ctx.params.tableId
// TODO: find usage of this and break out into own endpoint
if (inputs.type === "delete") {
await bulkDelete(ctx)
ctx.body = inputs.rows
return
}
// if the row obj had an _id then it will have been retrieved
if (inputs._id && inputs._rev) {
ctx.params.rowId = inputs._id
await exports.patch(ctx)
return
}
if (!inputs._rev && !inputs._id) {
inputs._id = generateRowID(inputs.tableId)
}
@ -158,12 +105,7 @@ exports.save = async function (ctx) {
})
if (!validateResult.valid) {
ctx.status = 400
ctx.body = {
status: 400,
errors: validateResult.errors,
}
return
throw validateResult.errors
}
// make sure link rows are up to date
@ -182,21 +124,17 @@ exports.save = async function (ctx) {
await db.put(table)
}
row._rev = response.rev
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table)
ctx.body = row
ctx.status = 200
ctx.message = `${table.name} saved successfully`
return { row, table }
}
exports.fetchView = async function (ctx) {
exports.fetchView = async ctx => {
const appId = ctx.appId
const viewName = ctx.params.viewName
// if this is a table view being looked for just transfer to that
if (viewName.startsWith(TABLE_VIEW_BEGINS_WITH)) {
ctx.params.tableId = viewName.substring(4)
await exports.fetchTableRows(ctx)
return
if (viewName.includes(DocumentTypes.TABLE)) {
ctx.params.tableId = viewName
return exports.fetch(ctx)
}
const db = new CouchDB(appId)
@ -204,13 +142,14 @@ exports.fetchView = async function (ctx) {
const designDoc = await db.get("_design/database")
const viewInfo = designDoc.views[viewName]
if (!viewInfo) {
ctx.throw(400, "View does not exist.")
throw "View does not exist."
}
const response = await db.query(`database/${viewName}`, {
include_docs: !calculation,
group: !!group,
})
let rows
if (!calculation) {
response.rows = response.rows.map(row => row.doc)
let table
@ -222,7 +161,7 @@ exports.fetchView = async function (ctx) {
schema: {},
}
}
ctx.body = await outputProcessing(appId, table, response.rows)
rows = await outputProcessing(appId, table, response.rows)
}
if (calculation === CALCULATION_TYPES.STATS) {
@ -232,26 +171,26 @@ exports.fetchView = async function (ctx) {
...row.value,
avg: row.value.sum / row.value.count,
}))
ctx.body = response.rows
rows = response.rows
}
if (
calculation === CALCULATION_TYPES.COUNT ||
calculation === CALCULATION_TYPES.SUM
) {
ctx.body = response.rows.map(row => ({
rows = response.rows.map(row => ({
group: row.key,
field,
value: row.value,
}))
}
return rows
}
exports.fetchTableRows = async function (ctx) {
exports.fetch = async ctx => {
const appId = ctx.appId
const db = new CouchDB(appId)
// TODO remove special user case in future
let rows,
table = await db.get(ctx.params.tableId)
if (ctx.params.tableId === InternalTables.USER_METADATA) {
@ -265,27 +204,26 @@ exports.fetchTableRows = async function (ctx) {
)
rows = response.rows.map(row => row.doc)
}
ctx.body = await outputProcessing(appId, table, rows)
return outputProcessing(appId, table, rows)
}
exports.find = async function (ctx) {
exports.find = async ctx => {
const appId = ctx.appId
const db = new CouchDB(appId)
try {
const table = await db.get(ctx.params.tableId)
const row = await findRow(ctx, db, ctx.params.tableId, ctx.params.rowId)
ctx.body = await outputProcessing(appId, table, row)
} catch (err) {
ctx.throw(400, err)
}
const table = await db.get(ctx.params.tableId)
let row = await findRow(ctx, db, ctx.params.tableId, ctx.params.rowId)
row = await outputProcessing(appId, table, row)
return row
}
exports.destroy = async function (ctx) {
const appId = ctx.appId
const db = new CouchDB(appId)
const row = await db.get(ctx.params.rowId)
const { _id, _rev } = ctx.request.body
const row = await db.get(_id)
if (row.tableId !== ctx.params.tableId) {
ctx.throw(400, "Supplied tableId doesn't match the row's tableId")
throw "Supplied tableId doesn't match the row's tableId"
}
await linkRows.updateLinks({
appId,
@ -293,54 +231,80 @@ exports.destroy = async function (ctx) {
row,
tableId: row.tableId,
})
// TODO remove special user case in future
if (ctx.params.tableId === InternalTables.USER_METADATA) {
ctx.params = {
id: ctx.params.rowId,
id: _id,
}
await userController.destroyMetadata(ctx)
return { response: ctx.body, row }
} else {
ctx.body = await db.remove(ctx.params.rowId, ctx.params.revId)
const response = await db.remove(_id, _rev)
return { response, row }
}
// for automations include the row that was deleted
ctx.row = row
ctx.status = 200
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
}
exports.validate = async function (ctx) {
const errors = await validate({
exports.bulkDestroy = async ctx => {
const appId = ctx.appId
const { rows } = ctx.request.body
const db = new CouchDB(appId)
let updates = rows.map(row =>
linkRows.updateLinks({
appId,
eventType: linkRows.EventType.ROW_DELETE,
row,
tableId: row.tableId,
})
)
// TODO remove special user case in future
if (ctx.params.tableId === InternalTables.USER_METADATA) {
updates = updates.concat(
rows.map(row => {
ctx.params = {
id: row._id,
}
return userController.destroyMetadata(ctx)
})
)
} else {
await db.bulkDocs(rows.map(row => ({ ...row, _deleted: true })))
}
await Promise.all(updates)
return { response: { ok: true }, rows }
}
exports.search = async ctx => {
const appId = ctx.appId
const { tableId } = ctx.params
const db = new CouchDB(appId)
const { paginate, query, ...params } = ctx.request.body
params.tableId = tableId
let response
if (paginate) {
response = await paginatedSearch(appId, query, params)
} else {
response = await fullSearch(appId, query, params)
}
// Enrich search results with relationships
if (response.rows && response.rows.length) {
const table = await db.get(tableId)
response.rows = await outputProcessing(appId, table, response.rows)
}
return response
}
exports.validate = async ctx => {
return validate({
appId: ctx.appId,
tableId: ctx.params.tableId,
row: ctx.request.body,
})
ctx.status = 200
ctx.body = errors
}
async function validate({ appId, tableId, row, table }) {
if (!table) {
const db = new CouchDB(appId)
table = await db.get(tableId)
}
const errors = {}
for (let fieldName of Object.keys(table.schema)) {
const constraints = cloneDeep(table.schema[fieldName].constraints)
// special case for options, need to always allow unselected (null)
if (
table.schema[fieldName].type === FieldTypes.OPTIONS &&
constraints.inclusion
) {
constraints.inclusion.push(null)
}
const res = validateJs.single(row[fieldName], constraints)
if (res) errors[fieldName] = res
}
return { valid: Object.keys(errors).length === 0, errors }
}
exports.fetchEnrichedRow = async function (ctx) {
exports.fetchEnrichedRow = async ctx => {
const appId = ctx.appId
const db = new CouchDB(appId)
const tableId = ctx.params.tableId
@ -381,39 +345,5 @@ exports.fetchEnrichedRow = async function (ctx) {
)
}
}
ctx.body = row
ctx.status = 200
}
async function bulkDelete(ctx) {
const appId = ctx.appId
const { rows } = ctx.request.body
const db = new CouchDB(appId)
let updates = rows.map(row =>
linkRows.updateLinks({
appId,
eventType: linkRows.EventType.ROW_DELETE,
row,
tableId: row.tableId,
})
)
// TODO remove special user case in future
if (ctx.params.tableId === InternalTables.USER_METADATA) {
updates = updates.concat(
rows.map(row => {
ctx.params = {
id: row._id,
}
return userController.destroyMetadata(ctx)
})
)
} else {
await db.bulkDocs(rows.map(row => ({ ...row, _deleted: true })))
}
await Promise.all(updates)
rows.forEach(row => {
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
})
return row
}

View file

@ -0,0 +1,70 @@
const validateJs = require("validate.js")
const { cloneDeep } = require("lodash/fp")
const CouchDB = require("../../../db")
const { InternalTables } = require("../../../db/utils")
const userController = require("../user")
const { FieldTypes } = require("../../../constants")
const { integrations } = require("../../../integrations")
validateJs.extend(validateJs.validators.datetime, {
parse: function (value) {
return new Date(value).getTime()
},
// Input is a unix timestamp
format: function (value) {
return new Date(value).toISOString()
},
})
exports.makeExternalQuery = async (appId, json) => {
const datasourceId = json.endpoint.datasourceId
const db = new CouchDB(appId)
const datasource = await db.get(datasourceId)
const Integration = integrations[datasource.source]
// query is the opinionated function
if (Integration.prototype.query) {
const integration = new Integration(datasource.config)
return integration.query(json)
} else {
throw "Datasource does not support query."
}
}
exports.findRow = async (ctx, db, tableId, rowId) => {
let row
// TODO remove special user case in future
if (tableId === InternalTables.USER_METADATA) {
ctx.params = {
id: rowId,
}
await userController.findMetadata(ctx)
row = ctx.body
} else {
row = await db.get(rowId)
}
if (row.tableId !== tableId) {
throw "Supplied tableId does not match the rows tableId"
}
return row
}
exports.validate = async ({ appId, tableId, row, table }) => {
if (!table) {
const db = new CouchDB(appId)
table = await db.get(tableId)
}
const errors = {}
for (let fieldName of Object.keys(table.schema)) {
const constraints = cloneDeep(table.schema[fieldName].constraints)
// special case for options, need to always allow unselected (null)
if (
table.schema[fieldName].type === FieldTypes.OPTIONS &&
constraints.inclusion
) {
constraints.inclusion.push(null)
}
const res = validateJs.single(row[fieldName], constraints)
if (res) errors[fieldName] = res
}
return { valid: Object.keys(errors).length === 0, errors }
}

View file

@ -1,26 +0,0 @@
const { fullSearch, paginatedSearch } = require("./utils")
const CouchDB = require("../../../db")
const { outputProcessing } = require("../../../utilities/rowProcessor")
exports.rowSearch = async ctx => {
const appId = ctx.appId
const { tableId } = ctx.params
const db = new CouchDB(appId)
const { paginate, query, ...params } = ctx.request.body
params.tableId = tableId
let response
if (paginate) {
response = await paginatedSearch(appId, query, params)
} else {
response = await fullSearch(appId, query, params)
}
// Enrich search results with relationships
if (response.rows && response.rows.length) {
const table = await db.get(tableId)
response.rows = await outputProcessing(appId, table, response.rows)
}
ctx.body = response
}

View file

@ -5,23 +5,57 @@ const {
getRowParams,
getTableParams,
generateTableID,
getDatasourceParams,
BudibaseInternalDB,
} = require("../../../db/utils")
const { FieldTypes } = require("../../../constants")
const { TableSaveFunctions } = require("./utils")
const { TableSaveFunctions, getExternalTable } = require("./utils")
const {
isExternalTable,
breakExternalTableId,
} = require("../../../integrations/utils")
exports.fetch = async function (ctx) {
const db = new CouchDB(ctx.appId)
const body = await db.allDocs(
const internalTables = await db.allDocs(
getTableParams(null, {
include_docs: true,
})
)
ctx.body = body.rows.map(row => row.doc)
const internal = internalTables.rows.map(row => ({
...row.doc,
type: "internal",
sourceId: BudibaseInternalDB._id,
}))
const externalTables = await db.allDocs(
getDatasourceParams("plus", {
include_docs: true,
})
)
const external = externalTables.rows.flatMap(row => {
return Object.values(row.doc.entities || {}).map(entity => ({
...entity,
type: "external",
sourceId: row.doc._id,
}))
})
ctx.body = [...internal, ...external]
}
exports.find = async function (ctx) {
const db = new CouchDB(ctx.appId)
ctx.body = await db.get(ctx.params.id)
const tableId = ctx.params.id
if (isExternalTable(tableId)) {
let { datasourceId, tableName } = breakExternalTableId(tableId)
ctx.body = await getExternalTable(ctx.appId, datasourceId, tableName)
} else {
ctx.body = await db.get(ctx.params.id)
}
}
exports.save = async function (ctx) {

View file

@ -204,4 +204,15 @@ class TableSaveFunctions {
}
}
exports.getExternalTable = async (appId, datasourceId, tableName) => {
const db = new CouchDB(appId)
const datasource = await db.get(datasourceId)
if (!datasource || !datasource.entities) {
throw "Datasource is not configured fully."
}
return Object.values(datasource.entities).find(
entity => entity.name === tableName
)
}
exports.TableSaveFunctions = TableSaveFunctions

View file

@ -1,14 +1,64 @@
const Router = require("@koa/router")
const datasourceController = require("../controllers/datasource")
const authorized = require("../../middleware/authorized")
const joiValidator = require("../../middleware/joi-validator")
const {
BUILDER,
PermissionLevels,
PermissionTypes,
} = require("@budibase/auth/permissions")
const Joi = require("joi")
const { DataSourceOperation } = require("../../constants")
const router = Router()
function generateDatasourceSchema() {
// prettier-ignore
return joiValidator.body(Joi.object({
_id: Joi.string(),
_rev: Joi.string(),
// source: Joi.string().valid("POSTGRES_PLUS"),
type: Joi.string().allow("datasource_plus"),
relationships: Joi.array().items(Joi.object({
from: Joi.string().required(),
to: Joi.string().required(),
cardinality: Joi.valid("1:N", "1:1", "N:N").required()
})),
// entities: Joi.array().items(Joi.object({
// type: Joi.string().valid(...Object.values(FieldTypes)).required(),
// name: Joi.string().required(),
// })),
}).unknown(true))
}
function generateQueryDatasourceSchema() {
// prettier-ignore
return joiValidator.body(Joi.object({
endpoint: Joi.object({
datasourceId: Joi.string().required(),
operation: Joi.string().required().valid(...Object.values(DataSourceOperation)),
entityId: Joi.string().required(),
}).required(),
resource: Joi.object({
fields: Joi.array().items(Joi.string()).optional(),
}).optional(),
body: Joi.object().optional(),
sort: Joi.object().optional(),
filters: Joi.object({
string: Joi.object().optional(),
range: Joi.object().optional(),
equal: Joi.object().optional(),
notEqual: Joi.object().optional(),
empty: Joi.object().optional(),
notEmpty: Joi.object().optional(),
}).optional(),
paginate: Joi.object({
page: Joi.string().alphanum().optional(),
limit: Joi.number().optional(),
}).optional(),
}))
}
router
.get("/api/datasources", authorized(BUILDER), datasourceController.fetch)
.get(
@ -16,7 +66,23 @@ router
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
datasourceController.find
)
.post("/api/datasources", authorized(BUILDER), datasourceController.save)
.post(
"/api/datasources/query",
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
generateQueryDatasourceSchema(),
datasourceController.query
)
.post(
"/api/datasources/:datasourceId/schema",
authorized(BUILDER),
datasourceController.buildSchemaFromDb
)
.post(
"/api/datasources",
authorized(BUILDER),
generateDatasourceSchema(),
datasourceController.save
)
.delete(
"/api/datasources/:datasourceId/:revId",
authorized(BUILDER),

View file

@ -14,6 +14,7 @@ if (env.isDev() || env.isTest()) {
}
router
.get("/api/dev/version", authorized(BUILDER), controller.getBudibaseVersion)
.delete("/api/dev/:appId/lock", authorized(BUILDER), controller.clearLock)
.post("/api/dev/:appId/revert", authorized(BUILDER), controller.revert)

View file

@ -23,7 +23,6 @@ const queryRoutes = require("./query")
const hostingRoutes = require("./hosting")
const backupRoutes = require("./backup")
const devRoutes = require("./dev")
const searchRoutes = require("./search")
exports.mainRoutes = [
authRoutes,
@ -52,7 +51,6 @@ exports.mainRoutes = [
// this could be breaking as koa may recognise other routes as this
tableRoutes,
rowRoutes,
searchRoutes,
]
exports.staticRoutes = staticRoutes

View file

@ -24,7 +24,7 @@ router
"/api/:tableId/rows",
paramResource("tableId"),
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
rowController.fetchTableRows
rowController.fetch
)
.get(
"/api/:tableId/rows/:rowId",
@ -32,6 +32,12 @@ router
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
rowController.find
)
.post(
"/api/:tableId/search",
paramResource("tableId"),
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
rowController.search
)
.post(
"/api/:tableId/rows",
paramResource("tableId"),
@ -40,8 +46,8 @@ router
rowController.save
)
.patch(
"/api/:tableId/rows/:rowId",
paramSubResource("tableId", "rowId"),
"/api/:tableId/rows",
paramResource("tableId"),
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
rowController.patch
)
@ -52,8 +58,8 @@ router
rowController.validate
)
.delete(
"/api/:tableId/rows/:rowId/:revId",
paramSubResource("tableId", "rowId"),
"/api/:tableId/rows",
paramResource("tableId"),
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
usage,
rowController.destroy

View file

@ -1,19 +0,0 @@
const Router = require("@koa/router")
const controller = require("../controllers/search")
const {
PermissionTypes,
PermissionLevels,
} = require("@budibase/auth/permissions")
const authorized = require("../../middleware/authorized")
const { paramResource } = require("../../middleware/resourceId")
const router = Router()
router.post(
"/api/search/:tableId/rows",
paramResource("tableId"),
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
controller.rowSearch
)
module.exports = router

View file

@ -17,7 +17,7 @@ function generateSaveValidator() {
return joiValidator.body(Joi.object({
_id: Joi.string(),
_rev: Joi.string(),
type: Joi.string().valid("table"),
type: Joi.string().valid("table", "internal", "external"),
primaryDisplay: Joi.string(),
schema: Joi.object().required(),
name: Joi.string().required(),

View file

@ -0,0 +1,89 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`/datasources fetch returns all the datasources from the server 1`] = `
Array [
Object {
"config": Object {},
"entities": Array [
Object {
"_id": "ta_users",
"_rev": "1-039883a06c1f9cb3945731d79838181e",
"name": "Users",
"primaryDisplay": "email",
"schema": Object {
"email": Object {
"constraints": Object {
"email": true,
"length": Object {
"maximum": "",
},
"presence": true,
"type": "string",
},
"fieldName": "email",
"name": "email",
"type": "string",
},
"firstName": Object {
"constraints": Object {
"presence": false,
"type": "string",
},
"fieldName": "firstName",
"name": "firstName",
"type": "string",
},
"lastName": Object {
"constraints": Object {
"presence": false,
"type": "string",
},
"fieldName": "lastName",
"name": "lastName",
"type": "string",
},
"roleId": Object {
"constraints": Object {
"inclusion": Array [
"ADMIN",
"POWER",
"BASIC",
"PUBLIC",
],
"presence": false,
"type": "string",
},
"fieldName": "roleId",
"name": "roleId",
"type": "options",
},
"status": Object {
"constraints": Object {
"inclusion": Array [
"active",
"inactive",
],
"presence": false,
"type": "string",
},
"fieldName": "status",
"name": "status",
"type": "options",
},
},
"type": "table",
"views": Object {},
},
],
"name": "Budibase DB",
"source": "BUDIBASE",
"type": "budibase",
},
Object {
"config": Object {},
"name": "Test",
"source": "POSTGRES",
"type": "datasource",
},
]
`;

View file

@ -2,6 +2,9 @@ let setup = require("./utilities")
let { basicDatasource } = setup.structures
let { checkBuilderEndpoint } = require("./utilities/TestFunctions")
jest.mock("pg")
const pg = require("pg")
describe("/datasources", () => {
let request = setup.getRequest()
let config = setup.getConfig()
@ -37,13 +40,14 @@ describe("/datasources", () => {
.expect(200)
const datasources = res.body
expect(datasources).toEqual([
{
"_id": datasources[0]._id,
"_rev": datasources[0]._rev,
...basicDatasource()
}
])
// remove non-deterministic fields
for (let source of datasources) {
delete source._id
delete source._rev
}
expect(datasources).toMatchSnapshot()
})
it("should apply authorization to endpoint", async () => {
@ -66,6 +70,34 @@ describe("/datasources", () => {
})
})
describe("query", () => {
it("should be able to query a pg datasource", async () => {
const res = await request
.post(`/api/datasources/query`)
.send({
endpoint: {
datasourceId: datasource._id,
operation: "READ",
// table name below
entityId: "users",
},
resource: {
fields: ["name", "age"],
},
filters: {
string: {
name: "John",
},
},
})
.set(config.defaultHeaders())
.expect(200)
// this is mock data, can't test it
expect(res.body).toBeDefined()
expect(pg.queryMock).toHaveBeenCalledWith(`select "name", "age" from "users" where "name" like $1 limit $2`, ["John%", 5000])
})
})
describe("destroy", () => {
it("deletes queries for the datasource after deletion and returns a success message", async () => {
await config.createQuery()
@ -81,7 +113,7 @@ describe("/datasources", () => {
.expect('Content-Type', /json/)
.expect(200)
expect(res.body).toEqual([])
expect(res.body.length).toEqual(1)
})
it("should apply authorization to endpoint", async () => {

View file

@ -201,7 +201,7 @@ describe("/rows", () => {
const existing = await config.createRow()
const res = await request
.patch(`/api/${table._id}/rows/${existing._id}`)
.patch(`/api/${table._id}/rows`)
.send({
_id: existing._id,
_rev: existing._rev,
@ -225,7 +225,7 @@ describe("/rows", () => {
it("should throw an error when given improper types", async () => {
const existing = await config.createRow()
await request
.patch(`/api/${table._id}/rows/${existing._id}`)
.patch(`/api/${table._id}/rows`)
.send({
_id: existing._id,
_rev: existing._rev,
@ -241,19 +241,16 @@ describe("/rows", () => {
it("should be able to delete a row", async () => {
const createdRow = await config.createRow(row)
const res = await request
.delete(`/api/${table._id}/rows/${createdRow._id}/${createdRow._rev}`)
.delete(`/api/${table._id}/rows`)
.send({
rows: [
createdRow
]
})
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect(200)
expect(res.body.ok).toEqual(true)
})
it("shouldn't allow deleting a row in a table which is different to the one the row was created on", async () => {
const createdRow = await config.createRow(row)
await request
.delete(`/api/wrong_table/rows/${createdRow._id}/${createdRow._rev}`)
.set(config.defaultHeaders())
.expect(400)
expect(res.body[0]._id).toEqual(createdRow._id)
})
})
@ -289,9 +286,8 @@ describe("/rows", () => {
const row1 = await config.createRow()
const row2 = await config.createRow()
const res = await request
.post(`/api/${table._id}/rows`)
.delete(`/api/${table._id}/rows`)
.send({
type: "delete",
rows: [
row1,
row2,
@ -309,7 +305,7 @@ describe("/rows", () => {
it("should be able to fetch tables contents via 'view'", async () => {
const row = await config.createRow()
const res = await request
.get(`/api/views/all_${table._id}`)
.get(`/api/views/${table._id}`)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect(200)

View file

@ -108,7 +108,7 @@ describe("/tables", () => {
.expect(200)
const fetchedTable = res.body[0]
expect(fetchedTable.name).toEqual(testTable.name)
expect(fetchedTable.type).toEqual("table")
expect(fetchedTable.type).toEqual("internal")
})
it("should apply authorization to endpoint", async () => {

View file

@ -7,11 +7,12 @@ const { BUILTIN_ROLE_IDS } = require("@budibase/auth/roles")
function Request(appId, params) {
this.appId = appId
this.params = params
this.request = {}
}
exports.getAllTableRows = async config => {
const req = new Request(config.appId, { tableId: config.table._id })
await rowController.fetchTableRows(req)
await rowController.fetch(req)
return req.body
}

View file

@ -25,6 +25,7 @@ app.use(
jsonLimit: "10mb",
textLimit: "10mb",
enableTypes: ["json", "form", "text"],
parsedMethods: ["POST", "PUT", "PATCH", "DELETE"],
})
)

View file

@ -1,4 +1,9 @@
const CouchDB = require("../db")
const {
isExternalTable,
breakExternalTableId,
} = require("../integrations/utils")
const { getExternalTable } = require("../api/controllers/table/utils")
/**
* When values are input to the system generally they will be of type string as this is required for template strings.
@ -60,7 +65,13 @@ module.exports.cleanInputValues = (inputs, schema) => {
*/
module.exports.cleanUpRow = async (appId, tableId, row) => {
const db = new CouchDB(appId)
const table = await db.get(tableId)
let table
if (isExternalTable(tableId)) {
const { datasourceId, tableName } = breakExternalTableId(tableId)
table = await getExternalTable(appId, datasourceId, tableName)
} else {
table = await db.get(tableId)
}
return module.exports.cleanInputValues(row, { properties: table.schema })
}

View file

@ -62,8 +62,12 @@ module.exports.run = async function ({ inputs, appId, apiKey, emitter }) {
let ctx = {
params: {
tableId: inputs.tableId,
rowId: inputs.id,
revId: inputs.revision,
},
request: {
body: {
_id: inputs.id,
_rev: inputs.revision,
},
},
appId,
eventEmitter: emitter,

View file

@ -8,6 +8,11 @@ const { getAutomationParams } = require("../db/utils")
const { coerce } = require("../utilities/rowProcessor")
const { utils } = require("@budibase/auth/redis")
const { JobQueues } = require("../constants")
const {
isExternalTable,
breakExternalTableId,
} = require("../integrations/utils")
const { getExternalTable } = require("../api/controllers/table/utils")
const { opts } = utils.getRedisOptions()
let automationQueue = new Queue(JobQueues.AUTOMATIONS, { redis: opts })
@ -288,9 +293,15 @@ emitter.on("row:delete", async function (event) {
async function fillRowOutput(automation, params) {
let triggerSchema = automation.definition.trigger
let tableId = triggerSchema.inputs.tableId
const db = new CouchDB(params.appId)
try {
let table = await db.get(tableId)
let table
if (!isExternalTable(tableId)) {
const db = new CouchDB(params.appId)
table = await db.get(tableId)
} else {
const { datasourceId, tableName } = breakExternalTableId(tableId)
table = await getExternalTable(params.appId, datasourceId, tableName)
}
let row = {}
for (let schemaKey of Object.keys(table.schema)) {
const paramValue = params[schemaKey]

View file

@ -31,6 +31,18 @@ exports.AuthTypes = {
EXTERNAL: "external",
}
exports.DataSourceOperation = {
CREATE: "CREATE",
READ: "READ",
UPDATE: "UPDATE",
DELETE: "DELETE",
}
exports.SortDirection = {
ASCENDING: "ASCENDING",
DESCENDING: "DESCENDING",
}
exports.USERS_TABLE_SCHEMA = {
_id: "ta_users",
type: "table",

View file

@ -32,6 +32,7 @@ const DocumentTypes = {
LAYOUT: "layout",
SCREEN: "screen",
DATASOURCE: "datasource",
DATASOURCE_PLUS: "datasource_plus",
QUERY: "query",
}
@ -56,6 +57,14 @@ exports.StaticDatabases = {
...StaticDatabases,
}
const BudibaseInternalDB = {
_id: "bb_internal",
type: "budibase",
name: "Budibase DB",
source: "BUDIBASE",
config: {},
}
exports.APP_PREFIX = APP_PREFIX
exports.APP_DEV_PREFIX = APP_DEV_PREFIX
exports.USER_METDATA_PREFIX = `${DocumentTypes.ROW}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
@ -67,6 +76,7 @@ exports.SEPARATOR = SEPARATOR
exports.UNICODE_MAX = UNICODE_MAX
exports.SearchIndexes = SearchIndexes
exports.AppStatus = AppStatus
exports.BudibaseInternalDB = BudibaseInternalDB
exports.generateRoleID = generateRoleID
exports.getRoleParams = getRoleParams
@ -283,8 +293,10 @@ exports.getWebhookParams = (webhookId = null, otherProps = {}) => {
* Generates a new datasource ID.
* @returns {string} The new datasource ID which the webhook doc can be stored under.
*/
exports.generateDatasourceID = () => {
return `${DocumentTypes.DATASOURCE}${SEPARATOR}${newid()}`
exports.generateDatasourceID = ({ plus = false } = {}) => {
return `${
plus ? DocumentTypes.DATASOURCE_PLUS : DocumentTypes.DATASOURCE
}${SEPARATOR}${newid()}`
}
/**

View file

@ -0,0 +1,11 @@
exports.Operation = {
CREATE: "CREATE",
READ: "READ",
UPDATE: "UPDATE",
DELETE: "DELETE",
}
exports.SortDirection = {
ASCENDING: "ASCENDING",
DESCENDING: "DESCENDING",
}

View file

@ -0,0 +1,175 @@
const { DataSourceOperation, SortDirection } = require("../../constants")
const BASE_LIMIT = 5000
function addFilters(query, filters) {
function iterate(structure, fn) {
for (let [key, value] of Object.entries(structure)) {
fn(key, value)
}
}
if (!filters) {
return query
}
// if all or specified in filters, then everything is an or
const allOr = !!filters.allOr
if (filters.string) {
iterate(filters.string, (key, value) => {
const fnc = allOr ? "orWhere" : "where"
query = query[fnc](key, "like", `${value}%`)
})
}
if (filters.range) {
iterate(filters.range, (key, value) => {
if (!value.high || !value.low) {
return
}
const fnc = allOr ? "orWhereBetween" : "whereBetween"
query = query[fnc](key, [value.low, value.high])
})
}
if (filters.equal) {
iterate(filters.equal, (key, value) => {
const fnc = allOr ? "orWhere" : "where"
query = query[fnc]({ [key]: value })
})
}
if (filters.notEqual) {
iterate(filters.notEqual, (key, value) => {
const fnc = allOr ? "orWhereNot" : "whereNot"
query = query[fnc]({ [key]: value })
})
}
if (filters.empty) {
iterate(filters.empty, key => {
const fnc = allOr ? "orWhereNull" : "whereNull"
query = query[fnc](key)
})
}
if (filters.notEmpty) {
iterate(filters.notEmpty, key => {
const fnc = allOr ? "orWhereNotNull" : "whereNotNull"
query = query[fnc](key)
})
}
return query
}
function buildCreate(knex, json, opts) {
const { endpoint, body } = json
let query = knex(endpoint.entityId)
// mysql can't use returning
if (opts.disableReturning) {
return query.insert(body)
} else {
return query.insert(body).returning("*")
}
}
function buildRead(knex, json, limit) {
let { endpoint, resource, filters, sort, paginate } = json
let query = knex(endpoint.entityId)
// select all if not specified
if (!resource) {
resource = { fields: [] }
}
// handle select
if (resource.fields && resource.fields.length > 0) {
query = query.select(resource.fields)
} else {
query = query.select("*")
}
// handle where
query = addFilters(query, filters)
// handle sorting
if (sort) {
for (let [key, value] of Object.entries(sort)) {
const direction = value === SortDirection.ASCENDING ? "asc" : "desc"
query = query.orderBy(key, direction)
}
}
// handle pagination
if (paginate && paginate.page && paginate.limit) {
const page = paginate.page <= 1 ? 0 : paginate.page - 1
const offset = page * paginate.limit
query = query.offset(offset).limit(paginate.limit)
} else if (paginate && paginate.limit) {
query = query.limit(paginate.limit)
} else {
query.limit(limit)
}
return query
}
function buildUpdate(knex, json, opts) {
const { endpoint, body, filters } = json
let query = knex(endpoint.entityId)
query = addFilters(query, filters)
// mysql can't use returning
if (opts.disableReturning) {
return query.update(body)
} else {
return query.update(body).returning("*")
}
}
function buildDelete(knex, json, opts) {
const { endpoint, filters } = json
let query = knex(endpoint.entityId)
query = addFilters(query, filters)
// mysql can't use returning
if (opts.disableReturning) {
return query.delete()
} else {
return query.delete().returning("*")
}
}
class SqlQueryBuilder {
// pass through client to get flavour of SQL
constructor(client, limit = BASE_LIMIT) {
this._client = client
this._limit = limit
}
/**
* @param json the input JSON structure from which an SQL query will be built.
* @return {string} the operation that was found in the JSON.
*/
_operation(json) {
if (!json || !json.endpoint) {
return ""
}
return json.endpoint.operation
}
/**
* @param json The JSON query DSL which is to be converted to SQL.
* @param opts extra options which are to be passed into the query builder, e.g. disableReturning
* which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes.
* @return {{ sql: string, bindings: object }} the query ready to be passed to the driver.
*/
_query(json, opts = {}) {
const knex = require("knex")({ client: this._client })
let query
switch (this._operation(json)) {
case DataSourceOperation.CREATE:
query = buildCreate(knex, json, opts)
break
case DataSourceOperation.READ:
query = buildRead(knex, json, this._limit, opts)
break
case DataSourceOperation.UPDATE:
query = buildUpdate(knex, json, opts)
break
case DataSourceOperation.DELETE:
query = buildDelete(knex, json, opts)
break
default:
throw `Operation type is not supported by SQL query builder`
}
return query.toSQL().toNative()
}
}
module.exports = SqlQueryBuilder

View file

@ -1,5 +1,6 @@
const sqlServer = require("mssql")
const { FIELD_TYPES } = require("./Integration")
const Sql = require("./base/sql")
const SCHEMA = {
docs: "https://github.com/tediousjs/node-mssql",
@ -50,10 +51,21 @@ const SCHEMA = {
},
}
class SqlServerIntegration {
async function internalQuery(client, query) {
const sql = typeof query === "string" ? query : query.sql
const bindings = typeof query === "string" ? {} : query.bindings
try {
return await client.query(sql, bindings)
} catch (err) {
throw new Error(err)
}
}
class SqlServerIntegration extends Sql {
static pool
constructor(config) {
super("mssql")
this.config = config
this.config.options = {
encrypt: this.config.encrypt,
@ -65,52 +77,43 @@ class SqlServerIntegration {
}
async connect() {
const client = await this.pool.connect()
this.client = client.request()
try {
const client = await this.pool.connect()
this.client = client.request()
} catch (err) {
throw new Error(err)
}
}
async read(query) {
try {
await this.connect()
const response = await this.client.query(query.sql)
return response.recordset
} catch (err) {
console.error("Error querying MS SQL Server", err)
throw err
}
await this.connect()
const response = await internalQuery(this.client, query)
return response.recordset
}
async create(query) {
try {
await this.connect()
const response = await this.client.query(query.sql)
return response.recordset || [{ created: true }]
} catch (err) {
console.error("Error querying MS SQL Server", err)
throw err
}
await this.connect()
const response = await internalQuery(this.client, query)
return response.recordset || [{ created: true }]
}
async update(query) {
try {
await this.connect()
const response = await this.client.query(query.sql)
return response.recordset
} catch (err) {
console.error("Error querying MS SQL Server", err)
throw err
}
await this.connect()
const response = await internalQuery(this.client, query)
return response.recordset || [{ updated: true }]
}
async delete(query) {
try {
await this.connect()
const response = await this.client.query(query.sql)
return response.recordset
} catch (err) {
console.error("Error querying MS SQL Server", err)
throw err
}
await this.connect()
const response = await internalQuery(this.client, query)
return response.recordset || [{ deleted: true }]
}
async query(json) {
const operation = this._operation(json).toLowerCase()
const input = this._query(json)
const response = await internalQuery(this.client, input)
return response.recordset ? response.recordset : [{ [operation]: true }]
}
}

View file

@ -1,8 +1,36 @@
const mysql = require("mysql")
const { FIELD_TYPES, QUERY_TYPES } = require("./Integration")
const Sql = require("./base/sql")
const { buildExternalTableId, convertType } = require("./utils")
const { FieldTypes } = require("../constants")
const { Operation } = require("./base/constants")
const TYPE_MAP = {
text: FieldTypes.LONGFORM,
blob: FieldTypes.LONGFORM,
enum: FieldTypes.STRING,
varchar: FieldTypes.STRING,
int: FieldTypes.NUMBER,
numeric: FieldTypes.NUMBER,
bigint: FieldTypes.NUMBER,
mediumint: FieldTypes.NUMBER,
decimal: FieldTypes.NUMBER,
dec: FieldTypes.NUMBER,
double: FieldTypes.NUMBER,
real: FieldTypes.NUMBER,
fixed: FieldTypes.NUMBER,
smallint: FieldTypes.NUMBER,
timestamp: FieldTypes.DATETIME,
date: FieldTypes.DATETIME,
datetime: FieldTypes.DATETIME,
time: FieldTypes.DATETIME,
tinyint: FieldTypes.BOOLEAN,
json: FIELD_TYPES.JSON,
}
const SCHEMA = {
docs: "https://github.com/mysqljs/mysql",
plus: true,
friendlyName: "MySQL",
description:
"MySQL Database Service is a fully managed database service to deploy cloud-native applications. ",
@ -52,45 +80,143 @@ const SCHEMA = {
},
}
class MySQLIntegration {
function internalQuery(client, query, connect = true) {
const sql = typeof query === "string" ? query : query.sql
const bindings = typeof query === "string" ? {} : query.bindings
// Node MySQL is callback based, so we must wrap our call in a promise
return new Promise((resolve, reject) => {
if (connect) {
client.connect()
}
return client.query(sql, bindings, (error, results) => {
if (error) {
reject(error)
} else {
resolve(results)
}
if (connect) {
client.end()
}
})
})
}
class MySQLIntegration extends Sql {
constructor(config) {
super("mysql")
this.config = config
if (Object.keys(config.ssl).length === 0) {
if (config.ssl && Object.keys(config.ssl).length === 0) {
delete config.ssl
}
this.client = mysql.createConnection(config)
}
query(query) {
// Node MySQL is callback based, so we must wrap our call in a promise
return new Promise((resolve, reject) => {
this.client.connect()
return this.client.query(query.sql, (error, results) => {
if (error) return reject(error)
resolve(results)
this.client.end()
})
})
async buildSchema(datasourceId) {
const tables = {}
const database = this.config.database
this.client.connect()
// get the tables first
const tablesResp = await internalQuery(this.client, "SHOW TABLES;", false)
const tableNames = tablesResp.map(obj => obj[`Tables_in_${database}`])
for (let tableName of tableNames) {
const primaryKeys = []
const schema = {}
const descResp = await internalQuery(
this.client,
`DESCRIBE ${tableName};`,
false
)
for (let column of descResp) {
const columnName = column.Field
if (column.Key === "PRI") {
primaryKeys.push(columnName)
}
const constraints = {}
if (column.Null !== "YES") {
constraints.required = true
}
schema[columnName] = {
name: columnName,
type: convertType(column.Type, TYPE_MAP),
constraints,
}
}
// for now just default to first column
if (primaryKeys.length === 0) {
primaryKeys.push(descResp[0].Field)
}
if (!tables[tableName]) {
tables[tableName] = {
_id: buildExternalTableId(datasourceId, tableName),
primary: primaryKeys,
name: tableName,
schema,
}
}
}
this.client.end()
this.tables = tables
}
async create(query) {
const results = await this.query(query)
const results = await internalQuery(this.client, query)
return results.length ? results : [{ created: true }]
}
read(query) {
return this.query(query)
return internalQuery(this.client, query)
}
async update(query) {
const results = await this.query(query)
const results = await internalQuery(this.client, query)
return results.length ? results : [{ updated: true }]
}
async delete(query) {
const results = await this.query(query)
const results = await internalQuery(this.client, query)
return results.length ? results : [{ deleted: true }]
}
async getReturningRow(json) {
if (!json.extra.idFilter) {
return {}
}
const input = this._query({
endpoint: {
...json.endpoint,
operation: Operation.READ,
},
fields: [],
filters: json.extra.idFilter,
paginate: {
limit: 1,
},
})
return internalQuery(this.client, input, false)
}
async query(json) {
const operation = this._operation(json)
this.client.connect()
const input = this._query(json, { disableReturning: true })
let row
// need to manage returning, a feature mySQL can't do
if (operation === Operation.DELETE) {
row = this.getReturningRow(json)
}
const results = await internalQuery(this.client, input, false)
// same as delete, manage returning
if (operation === Operation.CREATE || operation === Operation.UPDATE) {
row = this.getReturningRow(json)
}
this.client.end()
if (operation !== Operation.READ) {
return row
}
return results.length ? results : [{ [operation.toLowerCase()]: true }]
}
}
module.exports = {

View file

@ -1,8 +1,12 @@
const { Pool } = require("pg")
const { FIELD_TYPES } = require("./Integration")
const Sql = require("./base/sql")
const { FieldTypes } = require("../constants")
const { buildExternalTableId, convertType } = require("./utils")
const SCHEMA = {
docs: "https://node-postgres.com",
plus: true,
friendlyName: "PostgreSQL",
description:
"PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
@ -54,10 +58,47 @@ const SCHEMA = {
},
}
class PostgresIntegration {
const TYPE_MAP = {
text: FieldTypes.LONGFORM,
varchar: FieldTypes.STRING,
integer: FieldTypes.NUMBER,
bigint: FieldTypes.NUMBER,
decimal: FieldTypes.NUMBER,
smallint: FieldTypes.NUMBER,
timestamp: FieldTypes.DATETIME,
time: FieldTypes.DATETIME,
boolean: FieldTypes.BOOLEAN,
json: FIELD_TYPES.JSON,
}
async function internalQuery(client, query) {
const sql = typeof query === "string" ? query : query.sql
const bindings = typeof query === "string" ? {} : query.bindings
try {
return await client.query(sql, bindings)
} catch (err) {
throw new Error(err)
}
}
class PostgresIntegration extends Sql {
static pool
COLUMNS_SQL =
"select * from information_schema.columns where table_schema = 'public'"
PRIMARY_KEYS_SQL = `
select tc.table_schema, tc.table_name, kc.column_name as primary_key
from information_schema.table_constraints tc
join
information_schema.key_column_usage kc on kc.table_name = tc.table_name
and kc.table_schema = tc.table_schema
and kc.constraint_name = tc.constraint_name
where tc.constraint_type = 'PRIMARY KEY';
`
constructor(config) {
super("pg")
this.config = config
if (this.config.ssl) {
this.config.ssl = {
@ -72,33 +113,73 @@ class PostgresIntegration {
this.client = this.pool
}
async query(sql) {
/**
* Fetches the tables from the postgres table and assigns them to the datasource.
* @param {*} datasourceId - datasourceId to fetch
*/
async buildSchema(datasourceId) {
let keys = []
try {
return await this.client.query(sql)
const primaryKeysResponse = await this.client.query(this.PRIMARY_KEYS_SQL)
for (let table of primaryKeysResponse.rows) {
keys.push(table.column_name || table.primary_key)
}
} catch (err) {
throw new Error(err)
// TODO: this try catch method isn't right
keys = ["id"]
}
const columnsResponse = await this.client.query(this.COLUMNS_SQL)
const tables = {}
for (let column of columnsResponse.rows) {
const tableName = column.table_name
const columnName = column.column_name
// table key doesn't exist yet
if (!tables[tableName]) {
tables[tableName] = {
_id: buildExternalTableId(datasourceId, tableName),
primary: keys,
name: tableName,
schema: {},
}
}
tables[tableName].schema[columnName] = {
name: columnName,
type: convertType(column.data_type, TYPE_MAP),
}
}
this.tables = tables
}
async create({ sql }) {
const response = await this.query(sql)
async create(sql) {
const response = await internalQuery(this.client, sql)
return response.rows.length ? response.rows : [{ created: true }]
}
async read({ sql }) {
const response = await this.query(sql)
async read(sql) {
const response = await internalQuery(this.client, sql)
return response.rows
}
async update({ sql }) {
const response = await this.query(sql)
async update(sql) {
const response = await internalQuery(this.client, sql)
return response.rows.length ? response.rows : [{ updated: true }]
}
async delete({ sql }) {
const response = await this.query(sql)
async delete(sql) {
const response = await internalQuery(this.client, sql)
return response.rows.length ? response.rows : [{ deleted: true }]
}
async query(json) {
const operation = this._operation(json).toLowerCase()
const input = this._query(json)
const response = await internalQuery(this.client, input)
return response.rows.length ? response.rows : [{ [operation]: true }]
}
}
module.exports = {

View file

@ -20,7 +20,7 @@ describe("MS SQL Server Integration", () => {
const response = await config.integration.create({
sql
})
expect(config.integration.client.query).toHaveBeenCalledWith(sql)
expect(config.integration.client.query).toHaveBeenCalledWith(sql, undefined)
})
it("calls the read method with the correct params", async () => {
@ -28,7 +28,7 @@ describe("MS SQL Server Integration", () => {
const response = await config.integration.read({
sql
})
expect(config.integration.client.query).toHaveBeenCalledWith(sql)
expect(config.integration.client.query).toHaveBeenCalledWith(sql, undefined)
})
describe("no rows returned", () => {

View file

@ -1,12 +1,9 @@
const pg = require("mysql")
const MySQLIntegration = require("../mysql")
jest.mock("mysql")
class TestConfiguration {
constructor(config = { ssl: {} }) {
this.integration = new MySQLIntegration.integration(config)
this.query = jest.fn(() => [{ id: 1 }])
this.integration.query = this.query
this.integration = new MySQLIntegration.integration(config)
}
}
@ -19,43 +16,37 @@ describe("MySQL Integration", () => {
it("calls the create method with the correct params", async () => {
const sql = "insert into users (name, age) values ('Joe', 123);"
const response = await config.integration.create({
await config.integration.create({
sql
})
expect(config.query).toHaveBeenCalledWith({ sql })
expect(config.integration.client.query).toHaveBeenCalledWith(sql, undefined, expect.any(Function))
})
it("calls the read method with the correct params", async () => {
const sql = "select * from users;"
const response = await config.integration.read({
sql
})
expect(config.query).toHaveBeenCalledWith({
await config.integration.read({
sql
})
expect(config.integration.client.query).toHaveBeenCalledWith(sql, undefined, expect.any(Function))
})
it("calls the update method with the correct params", async () => {
const sql = "update table users set name = 'test';"
const response = await config.integration.update({
await config.integration.update({
sql
})
expect(config.query).toHaveBeenCalledWith({ sql })
expect(config.integration.client.query).toHaveBeenCalledWith(sql, undefined, expect.any(Function))
})
it("calls the delete method with the correct params", async () => {
const sql = "delete from users where name = 'todelete';"
const response = await config.integration.delete({
await config.integration.delete({
sql
})
expect(config.query).toHaveBeenCalledWith({ sql })
expect(config.integration.client.query).toHaveBeenCalledWith(sql, undefined, expect.any(Function))
})
describe("no rows returned", () => {
beforeEach(() => {
config.query.mockImplementation(() => [])
})
it("returns the correct response when the create response has no rows", async () => {
const sql = "insert into users (name, age) values ('Joe', 123);"
const response = await config.integration.create({

View file

@ -20,7 +20,7 @@ describe("Postgres Integration", () => {
const response = await config.integration.create({
sql
})
expect(pg.queryMock).toHaveBeenCalledWith(sql)
expect(pg.queryMock).toHaveBeenCalledWith(sql, undefined)
})
it("calls the read method with the correct params", async () => {
@ -28,7 +28,7 @@ describe("Postgres Integration", () => {
const response = await config.integration.read({
sql
})
expect(pg.queryMock).toHaveBeenCalledWith(sql)
expect(pg.queryMock).toHaveBeenCalledWith(sql, undefined)
})
it("calls the update method with the correct params", async () => {
@ -36,7 +36,7 @@ describe("Postgres Integration", () => {
const response = await config.integration.update({
sql
})
expect(pg.queryMock).toHaveBeenCalledWith(sql)
expect(pg.queryMock).toHaveBeenCalledWith(sql, undefined)
})
it("calls the delete method with the correct params", async () => {
@ -44,7 +44,7 @@ describe("Postgres Integration", () => {
await config.integration.delete({
sql
})
expect(pg.queryMock).toHaveBeenCalledWith(sql)
expect(pg.queryMock).toHaveBeenCalledWith(sql, undefined)
})
describe("no rows returned", () => {

View file

@ -0,0 +1,173 @@
const Sql = require("../base/sql")
const TABLE_NAME = "test"
function endpoint(table, operation) {
return {
datasourceId: "Postgres",
operation: operation,
entityId: table || TABLE_NAME,
}
}
function generateReadJson({ table, fields, filters, sort, paginate} = {}) {
return {
endpoint: endpoint(table || TABLE_NAME, "READ"),
resource: {
fields: fields || [],
},
filters: filters || {},
sort: sort || {},
paginate: paginate || {},
}
}
function generateCreateJson(table = TABLE_NAME, body = {}) {
return {
endpoint: endpoint(table, "CREATE"),
body,
}
}
function generateUpdateJson(table = TABLE_NAME, body = {}, filters = {}) {
return {
endpoint: endpoint(table, "UPDATE"),
filters,
body,
}
}
function generateDeleteJson(table = TABLE_NAME, filters = {}) {
return {
endpoint: endpoint(table, "DELETE"),
filters,
}
}
describe("SQL query builder", () => {
const limit = 500
const client = "pg"
let sql
beforeEach(() => {
sql = new Sql(client, limit)
})
it("should test a basic read", () => {
const query = sql._query(generateReadJson())
expect(query).toEqual({
bindings: [limit],
sql: `select * from "${TABLE_NAME}" limit $1`
})
})
it("should test a read with specific columns", () => {
const query = sql._query(generateReadJson({
fields: ["name", "age"]
}))
expect(query).toEqual({
bindings: [limit],
sql: `select "name", "age" from "${TABLE_NAME}" limit $1`
})
})
it("should test a where string starts with read", () => {
const query = sql._query(generateReadJson({
filters: {
string: {
name: "John",
}
}
}))
expect(query).toEqual({
bindings: ["John%", limit],
sql: `select * from "${TABLE_NAME}" where "name" like $1 limit $2`
})
})
it("should test a where range read", () => {
const query = sql._query(generateReadJson({
filters: {
range: {
age: {
low: 2,
high: 10,
}
}
}
}))
expect(query).toEqual({
bindings: [2, 10, limit],
sql: `select * from "${TABLE_NAME}" where "age" between $1 and $2 limit $3`
})
})
it("should test for multiple IDs with OR", () => {
const query = sql._query(generateReadJson({
filters: {
equal: {
age: 10,
name: "John",
},
allOr: true,
}
}))
expect(query).toEqual({
bindings: [10, "John", limit],
sql: `select * from "${TABLE_NAME}" where ("age" = $1) or ("name" = $2) limit $3`
})
})
it("should test an create statement", () => {
const query = sql._query(generateCreateJson(TABLE_NAME, {
name: "Michael",
age: 45,
}))
expect(query).toEqual({
bindings: [45, "Michael"],
sql: `insert into "${TABLE_NAME}" ("age", "name") values ($1, $2) returning *`
})
})
it("should test an update statement", () => {
const query = sql._query(generateUpdateJson(TABLE_NAME, {
name: "John"
}, {
equal: {
id: 1001,
}
}))
expect(query).toEqual({
bindings: ["John", 1001],
sql: `update "${TABLE_NAME}" set "name" = $1 where "id" = $2 returning *`
})
})
it("should test a delete statement", () => {
const query = sql._query(generateDeleteJson(TABLE_NAME, {
equal: {
id: 1001,
}
}))
expect(query).toEqual({
bindings: [1001],
sql: `delete from "${TABLE_NAME}" where "id" = $1 returning *`
})
})
it("should work with MS-SQL", () => {
const query = new Sql("mssql", 10)._query(generateReadJson())
expect(query).toEqual({
bindings: [10],
sql: `select top (@p0) * from [${TABLE_NAME}]`
})
})
it("should work with mySQL", () => {
const query = new Sql("mysql", 10)._query(generateReadJson())
expect(query).toEqual({
bindings: [10],
sql: `select * from \`${TABLE_NAME}\` limit ?`
})
})
})

Some files were not shown because too many files have changed in this diff Show more