1
0
Fork 0
mirror of synced 2024-09-20 11:27:56 +12:00

Merge remote-tracking branch 'origin/master' into feature/automation-row-ux-update

This commit is contained in:
Dean 2024-06-20 10:43:33 +01:00
commit d50a8e0746
102 changed files with 1895 additions and 2036 deletions

View file

@ -179,6 +179,13 @@ jobs:
- run: yarn --frozen-lockfile
- name: Test server
env:
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
DD_API_KEY: "${{ secrets.DATADOG_API_KEY }}"
DD_SITE: "datadoghq.eu"
NODE_OPTIONS: "-r dd-trace/ci/init"
DD_ENV: "ci"
DD_SERVICE: "budibase/packages/server"
run: |
if ${{ env.USE_NX_AFFECTED }}; then
yarn test --scope=@budibase/server --since=${{ env.NX_BASE_BRANCH }}

2
.gitignore vendored
View file

@ -8,6 +8,8 @@ bb-airgapped.tar.gz
packages/server/build/oldClientVersions/**/*
packages/builder/src/components/deploy/clientVersions.json
packages/server/src/integrations/tests/utils/*.lock
# Logs
logs
*.log

View file

@ -641,7 +641,7 @@ couchdb:
# @ignore
repository: budibase/couchdb
# @ignore
tag: v3.2.1
tag: v3.3.3
# @ignore
pullPolicy: Always

View file

@ -1,5 +1,5 @@
{
"version": "2.28.3",
"version": "2.28.6",
"npmClient": "yarn",
"packages": [
"packages/*",

@ -1 +1 @@
Subproject commit a03225549e3ce61f43d0da878da162e08941b939
Subproject commit 247f56d455abbd64da17d865275ed978f577549f

View file

@ -8,6 +8,7 @@ import {
DatabaseOpts,
DatabasePutOpts,
DatabaseQueryOpts,
DBError,
Document,
isDocument,
RowResponse,
@ -41,7 +42,7 @@ function buildNano(couchInfo: { url: string; cookie: string }) {
type DBCall<T> = () => Promise<T>
class CouchDBError extends Error {
class CouchDBError extends Error implements DBError {
status: number
statusCode: number
reason: string

View file

@ -93,15 +93,21 @@ function isApps() {
return environment.SERVICE_TYPE === ServiceType.APPS
}
function isQA() {
return environment.BUDIBASE_ENVIRONMENT === "QA"
}
const environment = {
isTest,
isJest,
isDev,
isWorker,
isApps,
isQA,
isProd: () => {
return !isDev()
},
BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
JS_BCRYPT: process.env.JS_BCRYPT,
JWT_SECRET: process.env.JWT_SECRET,
JWT_SECRET_FALLBACK: process.env.JWT_SECRET_FALLBACK,
@ -120,6 +126,7 @@ const environment = {
REDIS_CLUSTERED: process.env.REDIS_CLUSTERED,
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN,
AWS_REGION: process.env.AWS_REGION,
MINIO_URL: process.env.MINIO_URL,
MINIO_ENABLED: process.env.MINIO_ENABLED || 1,

View file

@ -101,6 +101,11 @@ export function ObjectStore(
}
}
// for AWS Credentials using temporary session token
if (!env.MINIO_ENABLED && env.AWS_SESSION_TOKEN) {
config.sessionToken = env.AWS_SESSION_TOKEN
}
// custom S3 is in use i.e. minio
if (env.MINIO_URL) {
if (opts.presigning && env.MINIO_ENABLED) {

View file

@ -63,12 +63,12 @@ class InMemoryQueue implements Partial<Queue> {
* Same callback API as Bull, each callback passed to this will consume messages as they are
* available. Please note this is a queue service, not a notification service, so each
* consumer will receive different messages.
* @param func The callback function which will return a "Job", the same
* as the Bull API, within this job the property "data" contains the JSON message. Please
* note this is incredibly limited compared to Bull as in reality the Job would contain
* a lot more information about the queue and current status of Bull cluster.
*/
async process(func: any) {
async process(concurrencyOrFunc: number | any, func?: any) {
func = typeof concurrencyOrFunc === "number" ? func : concurrencyOrFunc
this._emitter.on("message", async () => {
if (this._messages.length <= 0) {
return

View file

@ -21,6 +21,7 @@ let cleanupInterval: NodeJS.Timeout
async function cleanup() {
for (let queue of QUEUES) {
await queue.clean(CLEANUP_PERIOD_MS, "completed")
await queue.clean(CLEANUP_PERIOD_MS, "failed")
}
}

View file

@ -1,6 +1,16 @@
import { getDB } from "../db/db"
import { getGlobalDBName } from "../context"
import { TenantInfo } from "@budibase/types"
export function getTenantDB(tenantId: string) {
return getDB(getGlobalDBName(tenantId))
}
export async function saveTenantInfo(tenantInfo: TenantInfo) {
const db = getTenantDB(tenantInfo.tenantId)
// save the tenant info to db
return await db.put({
_id: "tenant_info",
...tenantInfo,
})
}

View file

@ -2,6 +2,7 @@
import TableSelector from "./TableSelector.svelte"
import FieldSelector from "./FieldSelector.svelte"
import SchemaSetup from "./SchemaSetup.svelte"
import RowSelector from "./RowSelector.svelte"
import {
Button,
Select,
@ -14,6 +15,8 @@
DatePicker,
DrawerContent,
Helpers,
Toggle,
Divider,
} from "@budibase/bbui"
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
import { automationStore, selectedAutomation, tables } from "stores/builder"
@ -40,7 +43,8 @@
EditorModes,
} from "components/common/CodeEditor"
import FilterBuilder from "components/design/settings/controls/FilterEditor/FilterBuilder.svelte"
import { LuceneUtils, Utils, memo } from "@budibase/frontend-core"
import { QueryUtils, Utils, memo } from "@budibase/frontend-core"
import {
getSchemaForDatasourcePlus,
getEnvironmentBindings,
@ -129,6 +133,7 @@
$: customStepLayouts($memoBlock, schemaProperties)
const customStepLayouts = block => {
console.log("BUILDING", inputData["row"])
if (
rowSteps.includes(block.stepId) ||
(rowTriggers.includes(block.stepId) && isTestModal)
@ -256,7 +261,6 @@
}).schema
delete request._tableId
}
try {
if (isTestModal) {
let newTestData = { schema }
@ -489,7 +493,7 @@
}
function saveFilters(key) {
const filters = LuceneUtils.buildLuceneQuery(tempFilters)
const filters = QueryUtils.buildQuery(tempFilters)
onChange({
[key]: filters,
@ -639,6 +643,24 @@
<div class="label-wrapper">
<Label>{label}</Label>
</div>
{JSON.stringify(inputData)}
<div class="toggle-container">
<Toggle
value={inputData?.meta?.useAttachmentBinding}
text={"Use bindings"}
size={"XS"}
on:change={e => {
// DEAN - review this
onChange({
row: { [key]: "" }, //null
meta: {
[key]: e.detail,
},
})
}}
/>
</div>
<div class="attachment-field-width">
<KeyValueBuilder
on:change={e =>

View file

@ -18,6 +18,7 @@
import { capitalise } from "helpers"
import { memo } from "@budibase/frontend-core"
import PropField from "./PropField.svelte"
import { cloneDeep, isPlainObject, mergeWith } from "lodash"
const dispatch = createEventDispatcher()
@ -42,21 +43,29 @@
let customPopover
let popoverAnchor
let editableRow = {}
let columns = new Set()
// Avoid unnecessary updates
//??
let editableMeta = {}
let editableFields = {}
// let columns = new Set()
// Avoid unnecessary updates - DEAN double check after refactor
$: memoStore.set({
row,
meta,
})
// Legacy support
$: fields = $memoStore?.meta?.fields
$: if ($memoStore?.meta?.columns) {
columns = new Set(meta?.columns)
$: if ($memoStore?.meta?.fields) {
editableFields = cloneDeep($memoStore?.meta?.fields)
}
// Needs to go now... entirely
// $: if ($memoStore?.meta?.columns) {
// columns = new Set(meta?.columns)
// }
$: parsedBindings = bindings.map(binding => {
let clone = Object.assign({}, binding)
clone.icon = "ShareAndroid"
@ -73,59 +82,62 @@
schemaFields = Object.entries(table?.schema ?? {})
.filter(entry => {
const [key, field] = entry
return field.type !== "formula" && !field.autocolumn
return field.type !== "formula" && !field.autocolumn // DEAN - revise autocolumn exclusion for testmodal
})
.sort(
([, schemaA], [, schemaB]) =>
(schemaA.type === "attachment") - (schemaB.type === "attachment")
)
// Parse out any unused data.
if ($memoStore?.meta?.columns) {
for (const column of meta?.columns) {
if (!(column in table?.schema)) {
columns.delete(column)
}
// Parse out any data not in the schema.
for (const column in editableFields) {
if (!(column in table?.schema)) {
delete editableFields[column]
}
columns = new Set(columns)
}
editableFields = editableFields
}
// Go through the table schema and build out the editable content
// schemaFields.forEach(entry => {
for (const entry of schemaFields) {
const [key, fieldSchema] = entry
if ($memoStore?.row?.[key] && !editableRow?.[key]) {
editableRow = {
...editableRow,
[key]: $memoStore?.row[key],
}
}
// Legacy
if (editableFields[key]?.clearRelationships) {
const emptyField = coerce(
!$memoStore?.row.hasOwnProperty(key) ? "" : $memoStore?.row[key],
fieldSchema.type
)
// remove this and place the field in the editable row.
delete editableFields[key]?.clearRelationships
// Default the field
editableRow = {
...editableRow,
[key]: emptyField,
}
console.log("DEAN EMPTY - clearRelationships", emptyField)
}
}
if (columns.size) {
for (const key of columns) {
const entry = schemaFields.find(entry => {
const [fieldKey] = entry
return fieldKey == key
})
if (entry) {
const [_, fieldSchema] = entry
editableRow = {
...editableRow,
[key]: coerce(
!(key in $memoStore?.row) ? "" : $memoStore?.row[key],
fieldSchema.type
),
}
}
}
} else {
schemaFields.forEach(entry => {
const [key] = entry
if ($memoStore?.row?.[key] && !editableRow?.[key]) {
editableRow = {
...editableRow,
[key]: $memoStore?.row[key],
}
columns.add(key)
}
})
columns = new Set(columns)
}
// Possible to go through the automation fields schema?
console.log("ACTUAL ROW", row)
console.log("EDITABLE FIELDS", editableFields)
console.log("EDITABLE ROW", editableRow)
}
// Legacy - add explicitly cleared relationships to the request.
$: if (schemaFields?.length && fields) {
// DEAN - review this
$: if (schemaFields?.length && fields && false) {
// Meta fields processing.
Object.keys(fields).forEach(key => {
if (fields[key]?.clearRelationships) {
@ -181,87 +193,121 @@
return value
}
const onChange = u => {
const update = {
_tableId: tableId,
row: { ...$memoStore.row },
meta: { ...$memoStore.meta },
...u,
const onChange = update => {
const customizer = (objValue, srcValue, key) => {
if (isPlainObject(objValue) && isPlainObject(srcValue)) {
const result = mergeWith({}, objValue, srcValue, customizer)
let outcome = Object.keys(result).reduce((acc, key) => {
if (result[key] !== null) {
acc[key] = result[key]
} else {
console.log(key + " is null", objValue)
}
return acc
}, {})
return outcome
}
return srcValue
}
dispatch("change", update)
}
const fieldUpdate = (e, field) => {
const update = {
row: {
...$memoStore?.row,
[field]: e.detail,
const result = mergeWith(
{},
{
row: editableRow,
meta: {
fields: editableFields,
},
},
}
onChange(update)
update,
customizer
)
console.log("Row Selector - MERGED", result)
dispatch("change", result)
}
</script>
{#if columns.size}
{#each schemaFields as [field, schema]}
{#if !schema.autocolumn && columns.has(field)}
<PropField
label={field}
fullWidth={attachmentTypes.includes(schema.type)}
>
<div class="prop-control-wrap">
{#if isTestModal}
{#each schemaFields || [] as [field, schema]}
{#if !schema.autocolumn && editableFields.hasOwnProperty(field)}
<PropField label={field} fullWidth={attachmentTypes.includes(schema.type)}>
<div class="prop-control-wrap">
{#if isTestModal}
<RowSelectorTypes
{isTestModal}
{field}
{schema}
bindings={parsedBindings}
value={editableRow}
meta={{
fields: editableFields,
}}
{onChange}
/>
{:else}
<DrawerBindableSlot
title={$memoStore?.row?.title || field}
panel={AutomationBindingPanel}
type={schema.type}
{schema}
value={editableRow[field]}
on:change={e => {
onChange({
row: {
[field]: e.detail.row[field],
},
})
}}
{bindings}
allowJS={true}
updateOnChange={false}
drawerLeft="260px"
>
<RowSelectorTypes
{isTestModal}
{field}
{schema}
bindings={parsedBindings}
value={$memoStore?.row}
onChange={fieldUpdate}
value={editableRow}
meta={{
fields: editableFields,
}}
onChange={change => {
console.log("RowSelectorTypes > RowSelector > ", change)
onChange(change)
}}
/>
{:else}
<DrawerBindableSlot
title={$memoStore?.row?.title || field}
panel={AutomationBindingPanel}
type={schema.type}
{schema}
value={editableRow[field]}
on:change={e => fieldUpdate(e, field)}
{bindings}
allowJS={true}
updateOnChange={false}
drawerLeft="260px"
>
<RowSelectorTypes
{isTestModal}
{field}
{schema}
bindings={parsedBindings}
value={editableRow}
onChange={fieldUpdate}
/>
</DrawerBindableSlot>
{/if}
<Icon
hoverable
name="Close"
on:click={() => {
columns.delete(field)
const update = { ...editableRow }
delete update[field]
onChange({ row: update, meta: { columns: Array.from(columns) } })
}}
/>
</div>
</PropField>
{/if}
{/each}
{/if}
</DrawerBindableSlot>
{/if}
<Icon
hoverable
name="Close"
on:click={() => {
// Clear row data
const update = { ...editableRow }
update[field] = null
// delete update[field]
// Clear any related metadata
// delete editableFields[field]
// editableFields[field] = null
console.log("REMOVE STATE", {
row: update,
meta: { fields: { ...editableFields, [field]: null } },
})
onChange({
row: update,
meta: { fields: { ...editableFields, [field]: null } },
})
}}
/>
</div>
</PropField>
{/if}
{/each}
{#if table && schemaFields}
<div
class="add-fields-btn"
class:empty={!columns?.size}
class:empty={Object.is(editableFields, {})}
bind:this={popoverAnchor}
>
<ActionButton
@ -292,14 +338,14 @@
{#if !schema.autocolumn}
<li
class="table_field spectrum-Menu-item"
class:is-selected={columns.has(field)}
class:is-selected={editableFields.hasOwnProperty(field)}
on:click={e => {
if (columns.has(field)) {
columns.delete(field)
if (editableFields.hasOwnProperty(field)) {
editableFields[field] = null
} else {
columns.add(field)
editableFields[field] = {}
}
onChange({ meta: { columns: Array.from(columns) } })
onChange({ meta: { fields: editableFields } })
}}
>
<Icon

View file

@ -1,5 +1,11 @@
<script>
import { Select, DatePicker, Multiselect, TextArea } from "@budibase/bbui"
import {
Select,
DatePicker,
Multiselect,
TextArea,
Toggle,
} from "@budibase/bbui"
import { FieldType } from "@budibase/types"
import LinkedRowSelector from "components/common/LinkedRowSelector.svelte"
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
@ -12,9 +18,12 @@
export let field
export let schema
export let value
export let meta
export let bindings
export let isTestModal
$: console.log(field + "VALUE???", value[field])
$: parsedBindings = bindings.map(binding => {
let clone = Object.assign({}, binding)
clone.icon = "ShareAndroid"
@ -33,38 +42,54 @@
function handleAttachmentParams(keyValueObj) {
let params = {}
if (
(schema.type === FieldType.ATTACHMENT_SINGLE ||
schema.type === FieldType.SIGNATURE_SINGLE) &&
Object.keys(keyValueObj).length === 0
) {
return []
// DEAN - review this
if (!keyValueObj) {
return null
}
if (!Array.isArray(keyValueObj) && keyValueObj) {
keyValueObj = [keyValueObj]
}
if (keyValueObj.length) {
for (let param of keyValueObj) {
params[param.url] = param.filename
params[param.url || ""] = param.filename || ""
}
}
console.log("handleAttachmentParams ", params)
return params
}
</script>
{#if schemaHasOptions(schema) && schema.type !== "array"}
<Select
on:change={e => onChange(e, field)}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
value={value[field]}
options={schema.constraints.inclusion}
/>
{:else if schema.type === "datetime"}
<DatePicker value={value[field]} on:change={e => onChange(e, field)} />
<DatePicker
value={value[field]}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
/>
{:else if schema.type === "boolean"}
<Select
on:change={e => onChange(e, field)}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
value={value[field]}
options={[
{ label: "True", value: "true" },
@ -75,10 +100,23 @@
<Multiselect
value={value[field]}
options={schema.constraints.inclusion}
on:change={e => onChange(e, field)}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
/>
{:else if schema.type === "longform"}
<TextArea value={value[field]} on:change={e => onChange(e, field)} />
<TextArea
value={value[field]}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
/>
{:else if schema.type === "json"}
<span>
<Editor
@ -86,7 +124,11 @@
mode="json"
on:change={e => {
if (e.detail?.value !== value[field]) {
onChange(e, field, schema.type)
onChange({
row: {
[field]: e.detail,
},
})
}
}}
value={value[field]}
@ -96,7 +138,12 @@
<LinkedRowSelector
linkedRows={value[field]}
{schema}
on:change={e => onChange(e, field)}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
useLabel={false}
/>
{:else if schema.type === "bb_reference" || schema.type === "bb_reference_single"}
@ -104,51 +151,113 @@
linkedRows={value[field]}
{schema}
linkedTableId={"ta_users"}
on:change={e => onChange(e, field)}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
useLabel={false}
/>
{:else if attachmentTypes.includes(schema.type)}
<div class="attachment-field-spacing">
<KeyValueBuilder
on:change={e =>
onChange(
{
detail:
schema.type === FieldType.ATTACHMENT_SINGLE ||
schema.type === FieldType.SIGNATURE_SINGLE
? e.detail.length > 0
? {
url: e.detail[0].name,
filename: e.detail[0].value,
}
: {}
: e.detail.map(({ name, value }) => ({
url: name,
filename: value,
})),
},
field
)}
object={handleAttachmentParams(value[field] || {})}
allowJS
{bindings}
keyBindings
customButtonText={schema.type === FieldType.SIGNATURE_SINGLE
? "Add signature"
: "Add attachment"}
keyPlaceholder={"URL"}
valuePlaceholder={"Filename"}
actionButtonDisabled={(schema.type === FieldType.ATTACHMENT_SINGLE ||
schema.type === FieldType.SIGNATURE_SINGLE) &&
Object.keys(value[field] || {}).length >= 1}
/>
<div class="attachment-field-container">
<div class="toggle-container">
<Toggle
value={meta?.fields?.[field]?.useAttachmentBinding}
text={"Use bindings"}
size={"XS"}
on:change={e => {
const fromFalse =
!meta?.fields?.[field]?.useAttachmentBinding && e.detail === true
onChange({
...(fromFalse
? {
row: {
[field]: "", //clear the value if switching
},
}
: {}),
meta: {
fields: {
[field]: {
useAttachmentBinding: e.detail,
},
},
},
})
}}
/>
</div>
{#if !meta?.fields?.[field]?.useAttachmentBinding}
<div class="attachment-field-spacing">
<KeyValueBuilder
on:change={e =>
onChange({
row: {
[field]:
schema.type === FieldType.ATTACHMENT_SINGLE ||
schema.type === FieldType.SIGNATURE_SINGLE
? e.detail.length > 0
? {
url: e.detail[0].name,
filename: e.detail[0].value,
}
: {}
: e.detail.map(({ name, value }) => ({
url: name,
filename: value,
})),
},
})}
object={handleAttachmentParams(value[field], false)}
allowJS
{bindings}
keyBindings
customButtonText={schema.type === FieldType.SIGNATURE_SINGLE
? "Add signature"
: "Add attachment"}
keyPlaceholder={"URL"}
valuePlaceholder={"Filename"}
actionButtonDisabled={(schema.type === FieldType.ATTACHMENT_SINGLE ||
schema.type === FieldType.SIGNATURE_SINGLE) &&
Object.keys(value[field] || {}).length >= 1}
/>
</div>
{:else}
<div class="json-input-spacing">
{JSON.stringify(value[field])}
<svelte:component
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
panel={AutomationBindingPanel}
value={value[field]}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
type="string"
bindings={parsedBindings}
allowJS={true}
updateOnChange={false}
title={schema.name}
/>
</div>
{/if}
</div>
{:else if ["string", "number", "bigint", "barcodeqr", "array"].includes(schema.type)}
{JSON.stringify(value[field])}
<svelte:component
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
panel={AutomationBindingPanel}
value={value[field]}
on:change={e => onChange(e, field)}
on:change={e =>
onChange({
row: {
[field]: e.detail,
},
})}
type="string"
bindings={parsedBindings}
allowJS={true}
@ -159,7 +268,8 @@
{/if}
<style>
.attachment-field-spacing {
.attachment-field-spacing,
.json-input-spacing {
margin-top: var(--spacing-s);
border: 1px solid var(--spectrum-global-color-gray-400);
border-radius: 4px;

View file

@ -334,7 +334,7 @@
// Add in defaults and initial definition
const definition = fieldDefinitions[type?.toUpperCase()]
if (definition?.constraints) {
editableColumn.constraints = definition.constraints
editableColumn.constraints = cloneDeep(definition.constraints)
}
editableColumn.type = definition.type

View file

@ -8,7 +8,7 @@
} from "@budibase/bbui"
import download from "downloadjs"
import { API } from "api"
import { LuceneUtils } from "@budibase/frontend-core"
import { QueryUtils } from "@budibase/frontend-core"
import { utils } from "@budibase/shared-core"
import { ROW_EXPORT_FORMATS } from "constants/backend"
@ -49,7 +49,7 @@
exportFormat = Array.isArray(options) ? options[0]?.key : []
}
$: luceneFilter = LuceneUtils.buildLuceneQuery(appliedFilters)
$: query = QueryUtils.buildQuery(appliedFilters)
$: exportOpDisplay = buildExportOpDisplay(
sorting,
filterDisplay,
@ -139,7 +139,7 @@
tableId: view,
format: exportFormat,
search: {
query: luceneFilter,
query,
sort: sorting?.sortColumn,
sortOrder: sorting?.sortOrder,
paginate: false,

View file

@ -38,4 +38,5 @@
{processFiles}
handleFileTooLarge={$admin.cloud ? handleFileTooLarge : null}
{fileSizeLimit}
on:change
/>

View file

@ -29,7 +29,7 @@
on:click={() => onSelect(data)}
>
<span class="spectrum-Menu-itemLabel">
{data.label}
{data.datasource?.name ? `${data.datasource.name} - ` : ""}{data.label}
</span>
<svg
class="spectrum-Icon spectrum-UIIcon-Checkmark100 spectrum-Menu-checkmark spectrum-Menu-itemIcon"

View file

@ -55,6 +55,9 @@
label: m.name,
tableId: m._id,
type: "table",
datasource: $datasources.list.find(
ds => ds._id === m.sourceId || m.datasourceId
),
}))
$: viewsV1 = $viewsStore.list.map(view => ({
...view,

View file

@ -12,7 +12,7 @@
import { dndzone } from "svelte-dnd-action"
import { generate } from "shortid"
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
import { LuceneUtils, Constants } from "@budibase/frontend-core"
import { QueryUtils, Constants } from "@budibase/frontend-core"
import { selectedComponent, componentStore } from "stores/builder"
import { getComponentForSetting } from "components/design/settings/componentSettings"
import PropertyControl from "components/design/settings/controls/PropertyControl.svelte"
@ -119,7 +119,7 @@
}
const getOperatorOptions = condition => {
return LuceneUtils.getValidOperatorsForType({ type: condition.valueType })
return QueryUtils.getValidOperatorsForType({ type: condition.valueType })
}
const onOperatorChange = (condition, newOperator) => {
@ -138,7 +138,7 @@
condition.referenceValue = null
// Ensure a valid operator is set
const validOperators = LuceneUtils.getValidOperatorsForType({
const validOperators = QueryUtils.getValidOperatorsForType({
type: newType,
}).map(x => x.value)
if (!validOperators.includes(condition.operator)) {

View file

@ -5,8 +5,6 @@
const { styleable, builderStore } = getContext("sdk")
const component = getContext("component")
let handlingOnClick = false
export let disabled = false
export let text = ""
export let onClick
@ -19,17 +17,9 @@
// For internal use only for now - not defined in the manifest
export let active = false
const handleOnClick = async () => {
handlingOnClick = true
if (onClick) {
await onClick()
}
handlingOnClick = false
}
let node
let touched = false
let handlingOnClick = false
$: $component.editing && node?.focus()
$: componentText = getComponentText(text, $builderStore, $component)
@ -42,7 +32,18 @@
}
const updateText = e => {
builderStore.actions.updateProp("text", e.target.textContent)
if (touched) {
builderStore.actions.updateProp("text", e.target.textContent)
}
touched = false
}
const handleOnClick = async () => {
handlingOnClick = true
if (onClick) {
await onClick()
}
handlingOnClick = false
}
</script>
@ -57,6 +58,7 @@
on:blur={$component.editing ? updateText : null}
bind:this={node}
class:active
on:input={() => (touched = true)}
>
{#if icon}
<i class="{icon} {size}" />

View file

@ -1,7 +1,7 @@
<script>
import { getContext } from "svelte"
import { Pagination, ProgressCircle } from "@budibase/bbui"
import { fetchData, LuceneUtils } from "@budibase/frontend-core"
import { fetchData, QueryUtils } from "@budibase/frontend-core"
export let dataSource
export let filter
@ -19,7 +19,7 @@
// We need to manage our lucene query manually as we want to allow components
// to extend it
$: defaultQuery = LuceneUtils.buildLuceneQuery(filter)
$: defaultQuery = QueryUtils.buildQuery(filter)
$: query = extendQuery(defaultQuery, queryExtensions)
$: fetch = createFetch(dataSource)
$: fetch.update({

View file

@ -90,9 +90,11 @@
columns.forEach((column, idx) => {
overrides[column.field] = {
displayName: column.label,
width: column.width,
order: idx,
}
if (column.width) {
overrides[column.field].width = column.width
}
})
return overrides
}

View file

@ -14,6 +14,7 @@
export let size
let node
let touched = false
$: $component.editing && node?.focus()
$: placeholder = $builderStore.inBuilder && !text && !$component.editing
@ -47,7 +48,10 @@
// Convert contenteditable HTML to text and save
const updateText = e => {
builderStore.actions.updateProp("text", e.target.textContent)
if (touched) {
builderStore.actions.updateProp("text", e.target.textContent)
}
touched = false
}
</script>
@ -62,6 +66,7 @@
class:underline
class="spectrum-Heading {sizeClass} {alignClass}"
on:blur={$component.editing ? updateText : null}
on:input={() => (touched = true)}
>
{componentText}
</h1>

View file

@ -16,6 +16,7 @@
export let size
let node
let touched = false
$: $component.editing && node?.focus()
$: externalLink = url && typeof url === "string" && !url.startsWith("/")
@ -62,7 +63,10 @@
}
const updateText = e => {
builderStore.actions.updateProp("text", e.target.textContent)
if (touched) {
builderStore.actions.updateProp("text", e.target.textContent)
}
touched = false
}
</script>
@ -76,6 +80,7 @@
class:underline
class="align--{align || 'left'} size--{size || 'M'}"
on:blur={$component.editing ? updateText : null}
on:input={() => (touched = true)}
>
{componentText}
</div>

View file

@ -13,6 +13,7 @@
export let size
let node
let touched = false
$: $component.editing && node?.focus()
$: placeholder = $builderStore.inBuilder && !text && !$component.editing
@ -46,7 +47,10 @@
// Convert contenteditable HTML to text and save
const updateText = e => {
builderStore.actions.updateProp("text", e.target.textContent)
if (touched) {
builderStore.actions.updateProp("text", e.target.textContent)
}
touched = false
}
</script>
@ -61,6 +65,7 @@
class:underline
class="spectrum-Body {sizeClass} {alignClass}"
on:blur={$component.editing ? updateText : null}
on:input={() => (touched = true)}
>
{componentText}
</p>

View file

@ -3,7 +3,7 @@
import { getContext, onDestroy } from "svelte"
import { ModalContent, Modal } from "@budibase/bbui"
import FilterModal from "./FilterModal.svelte"
import { LuceneUtils } from "@budibase/frontend-core"
import { QueryUtils } from "@budibase/frontend-core"
import Button from "../Button.svelte"
export let dataProvider
@ -36,7 +36,7 @@
// Add query extension to data provider
$: {
if (filters?.length) {
const queryExtension = LuceneUtils.buildLuceneQuery(filters)
const queryExtension = QueryUtils.buildQuery(filters)
addExtension?.($component.id, queryExtension)
} else {
removeExtension?.($component.id)

View file

@ -26,6 +26,10 @@
// Register field with form
const formApi = formContext?.formApi
const labelPos = fieldGroupContext?.labelPosition || "above"
let touched = false
let labelNode
$: formStep = formStepContext ? $formStepContext || 1 : 1
$: formField = formApi?.registerField(
field,
@ -36,14 +40,12 @@
validation,
formStep
)
$: schemaType =
fieldSchema?.type !== "formula" && fieldSchema?.type !== "bigint"
? fieldSchema?.type
: "string"
// Focus label when editing
let labelNode
$: $component.editing && labelNode?.focus()
// Update form properties in parent component on every store change
@ -57,7 +59,10 @@
$: labelClass = labelPos === "above" ? "" : `spectrum-FieldLabel--${labelPos}`
const updateLabel = e => {
builderStore.actions.updateProp("label", e.target.textContent)
if (touched) {
builderStore.actions.updateProp("label", e.target.textContent)
}
touched = false
}
onDestroy(() => {
@ -79,6 +84,7 @@
bind:this={labelNode}
contenteditable={$component.editing}
on:blur={$component.editing ? updateLabel : null}
on:input={() => (touched = true)}
class:hidden={!label}
class:readonly
for={fieldState?.fieldId}

View file

@ -31,7 +31,7 @@ import { enrichButtonActions } from "./utils/buttonActions.js"
import { processStringSync, makePropSafe } from "@budibase/string-templates"
import {
fetchData,
LuceneUtils,
QueryUtils,
Constants,
RowUtils,
memo,
@ -65,7 +65,7 @@ export default {
getAction,
fetchDatasourceSchema,
fetchData,
LuceneUtils,
QueryUtils,
ContextScopes: Constants.ContextScopes,
getAPIKey,
enrichButtonActions,

View file

@ -1,4 +1,4 @@
import { LuceneUtils } from "@budibase/frontend-core"
import { QueryUtils } from "@budibase/frontend-core"
export const getActiveConditions = conditions => {
if (!conditions?.length) {
@ -33,8 +33,8 @@ export const getActiveConditions = conditions => {
value: condition.referenceValue,
}
const query = LuceneUtils.buildLuceneQuery([luceneCondition])
const result = LuceneUtils.runLuceneQuery([luceneCondition], query)
const query = QueryUtils.buildQuery([luceneCondition])
const result = QueryUtils.runQuery([luceneCondition], query)
return result.length > 0
})
}

View file

@ -13,7 +13,7 @@
} from "@budibase/bbui"
import { FieldType, SearchFilterOperator } from "@budibase/types"
import { generate } from "shortid"
import { LuceneUtils, Constants } from "@budibase/frontend-core"
import { QueryUtils, Constants } from "@budibase/frontend-core"
import { getContext } from "svelte"
import FilterUsers from "./FilterUsers.svelte"
import { getFields } from "../utils/searchFields"
@ -112,7 +112,7 @@
return []
}
return LuceneUtils.getValidOperatorsForType(
return QueryUtils.getValidOperatorsForType(
filter,
filter.field || filter.name,
datasource

View file

@ -81,6 +81,7 @@
}
input {
flex: 1 1 auto;
width: 0;
border: none;
padding: var(--cell-padding);
overflow: hidden;

View file

@ -116,7 +116,9 @@
{#each displayColumns as column}
<div class="column">
<Icon size="S" name={getColumnIcon(column)} />
{column.label}
<div class="column-label" title={column.label}>
{column.label}
</div>
</div>
<ToggleActionButtonGroup
on:click={e => toggleColumn(column, e.detail)}
@ -139,7 +141,8 @@
display: grid;
align-items: center;
grid-template-columns: 1fr auto;
gap: 8px;
grid-row-gap: 8px;
grid-column-gap: 24px;
}
.columns :global(.spectrum-Switch) {
margin-right: 0;
@ -148,4 +151,11 @@
display: flex;
gap: 8px;
}
.column-label {
min-width: 80px;
max-width: 200px;
text-overflow: ellipsis;
white-space: nowrap;
overflow: hidden;
}
</style>

View file

@ -29,7 +29,6 @@
.permissionPicker {
display: flex;
gap: var(--spacing-xs);
padding-left: calc(var(--spacing-xl) * 2);
}
.permissionPicker :global(.spectrum-Icon) {

View file

@ -23,14 +23,24 @@
0
)
const updateBounds = () => {
bounds.set(body.getBoundingClientRect())
}
onMount(() => {
// Observe and record the height of the body
const observer = new ResizeObserver(() => {
bounds.set(body.getBoundingClientRect())
})
observer.observe(body)
const resizeObserver = new ResizeObserver(updateBounds)
resizeObserver.observe(body)
// Capture any wheel events on the page to ensure our scroll offset is
// correct. We don't care about touch events as we only need this for
// hovering over rows with a mouse.
window.addEventListener("wheel", updateBounds, true)
// Clean up listeners
return () => {
observer.disconnect()
resizeObserver.disconnect()
window.removeEventListener("wheel", updateBounds, true)
}
})
</script>

View file

@ -94,6 +94,7 @@ export const createActions = context => {
nonPlus,
schemaMutations,
schema,
notifications,
} = context
// Gets the appropriate API for the configured datasource type
@ -125,16 +126,25 @@ export const createActions = context => {
// Saves the datasource definition
const saveDefinition = async newDefinition => {
// Update local state
const originalDefinition = get(definition)
definition.set(newDefinition)
// Update server
if (get(config).canSaveSchema) {
await getAPI()?.actions.saveDefinition(newDefinition)
try {
await getAPI()?.actions.saveDefinition(newDefinition)
// Broadcast change so external state can be updated, as this change
// will not be received by the builder websocket because we caused it
// ourselves
dispatch("updatedatasource", newDefinition)
// Broadcast change so external state can be updated, as this change
// will not be received by the builder websocket because we caused it
// ourselves
dispatch("updatedatasource", newDefinition)
} catch (error) {
const msg = error?.message || error || "Unknown error"
get(notifications).error(`Error saving schema: ${msg}`)
// Reset the definition if saving failed
definition.set(originalDefinition)
}
}
}

View file

@ -1,10 +1,9 @@
import { writable, derived, get } from "svelte/store"
import { cloneDeep } from "lodash/fp"
import { LuceneUtils } from "../utils"
import { QueryUtils } from "../utils"
import { convertJSONSchemaToTableSchema } from "../utils/json"
const { buildLuceneQuery, luceneLimit, runLuceneQuery, luceneSort } =
LuceneUtils
const { buildQuery, limit: queryLimit, runQuery, sort } = QueryUtils
/**
* Parent class which handles the implementation of fetching data from an
@ -177,10 +176,10 @@ export default class DataFetch {
}
}
// Build the lucene query
// Build the query
let query = this.options.query
if (!query) {
query = buildLuceneQuery(filter)
query = buildQuery(filter)
}
// Update store
@ -229,17 +228,17 @@ export default class DataFetch {
// If we don't support searching, do a client search
if (!this.features.supportsSearch && clientSideSearching) {
rows = runLuceneQuery(rows, query)
rows = runQuery(rows, query)
}
// If we don't support sorting, do a client-side sort
if (!this.features.supportsSort && clientSideSorting) {
rows = luceneSort(rows, sortColumn, sortOrder, sortType)
rows = sort(rows, sortColumn, sortOrder, sortType)
}
// If we don't support pagination, do a client-side limit
if (!this.features.supportsPagination && clientSideLimiting) {
rows = luceneLimit(rows, limit)
rows = queryLimit(rows, limit)
}
return {

View file

@ -1,7 +1,7 @@
import { get } from "svelte/store"
import DataFetch from "./DataFetch.js"
import { TableNames } from "../constants"
import { LuceneUtils } from "../utils"
import { QueryUtils } from "../utils"
export default class UserFetch extends DataFetch {
constructor(opts) {
@ -33,7 +33,7 @@ export default class UserFetch extends DataFetch {
let finalQuery
// convert old format to new one - we now allow use of the lucene format
const { appId, paginated, ...rest } = query
if (!LuceneUtils.hasFilters(query) && rest.email != null) {
if (!QueryUtils.hasFilters(query) && rest.email != null) {
finalQuery = { string: { email: rest.email } }
} else {
finalQuery = rest

View file

@ -1,4 +1,4 @@
export { dataFilters as LuceneUtils } from "@budibase/shared-core"
export { dataFilters as QueryUtils } from "@budibase/shared-core"
export * as JSONUtils from "./json"
export * as CookieUtils from "./cookies"
export * as RoleUtils from "./roles"

View file

@ -48,6 +48,7 @@ async function init() {
HTTP_LOGGING: "0",
VERSION: "0.0.0+local",
PASSWORD_MIN_LENGTH: "1",
SQS_SEARCH_ENABLE: "1",
}
config = { ...config, ...existingConfig }

View file

@ -860,8 +860,10 @@
"json",
"internal",
"barcodeqr",
"signature_single",
"bigint",
"bb_reference"
"bb_reference",
"bb_reference_single"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},
@ -1067,8 +1069,10 @@
"json",
"internal",
"barcodeqr",
"signature_single",
"bigint",
"bb_reference"
"bb_reference",
"bb_reference_single"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},
@ -1285,8 +1289,10 @@
"json",
"internal",
"barcodeqr",
"signature_single",
"bigint",
"bb_reference"
"bb_reference",
"bb_reference_single"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},

View file

@ -782,8 +782,10 @@ components:
- json
- internal
- barcodeqr
- signature_single
- bigint
- bb_reference
- bb_reference_single
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:
@ -948,8 +950,10 @@ components:
- json
- internal
- barcodeqr
- signature_single
- bigint
- bb_reference
- bb_reference_single
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:
@ -1121,8 +1125,10 @@ components:
- json
- internal
- barcodeqr
- signature_single
- bigint
- bb_reference
- bb_reference_single
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:

View file

@ -358,11 +358,14 @@ async function performAppCreate(ctx: UserCtx<CreateAppRequest, App>) {
await createApp(appId)
}
// Initialise the app migration version as the latest one
await appMigrations.updateAppMigrationMetadata({
appId,
version: appMigrations.getLatestMigrationId(),
})
const latestMigrationId = appMigrations.getLatestEnabledMigrationId()
if (latestMigrationId) {
// Initialise the app migration version as the latest one
await appMigrations.updateAppMigrationMetadata({
appId,
version: latestMigrationId,
})
}
await cache.app.invalidateAppMetadata(appId, newApplication)
return newApplication

View file

@ -3,7 +3,7 @@ import { migrate as migrationImpl, MIGRATIONS } from "../../migrations"
import { Ctx } from "@budibase/types"
import {
getAppMigrationVersion,
getLatestMigrationId,
getLatestEnabledMigrationId,
} from "../../appMigrations"
export async function migrate(ctx: Ctx) {
@ -27,7 +27,9 @@ export async function getMigrationStatus(ctx: Ctx) {
const latestAppliedMigration = await getAppMigrationVersion(appId)
const migrated = latestAppliedMigration === getLatestMigrationId()
const latestMigrationId = getLatestEnabledMigrationId()
const migrated =
!latestMigrationId || latestAppliedMigration >= latestMigrationId
ctx.body = { migrated }
ctx.status = 200

View file

@ -25,6 +25,7 @@ import {
outputProcessing,
} from "../../../utilities/rowProcessor"
import { cloneDeep } from "lodash"
import { generateIdForRow } from "./utils"
export async function handleRequest<T extends Operation>(
operation: T,
@ -55,11 +56,19 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
throw { validation: validateResult.errors }
}
const beforeRow = await sdk.rows.external.getRow(tableId, _id, {
relationships: true,
})
const response = await handleRequest(Operation.UPDATE, tableId, {
id: breakRowIdField(_id),
row: dataToUpdate,
})
const row = await sdk.rows.external.getRow(tableId, _id, {
// The id might have been changed, so the refetching would fail. Recalculating the id just in case
const updatedId =
generateIdForRow({ ...beforeRow, ...dataToUpdate }, table) || _id
const row = await sdk.rows.external.getRow(tableId, updatedId, {
relationships: true,
})
const enrichedRow = await outputProcessing(table, row, {

View file

@ -84,9 +84,11 @@ export const save = async (ctx: UserCtx<Row, Row>) => {
if (body && body._id) {
return patch(ctx as UserCtx<PatchRowRequest, PatchRowResponse>)
}
const { row, table, squashed } = await quotas.addRow(() =>
sdk.rows.save(tableId, ctx.request.body, ctx.user?._id)
)
const { row, table, squashed } = tableId.includes("datasource_plus")
? await sdk.rows.save(tableId, ctx.request.body, ctx.user?._id)
: await quotas.addRow(() =>
sdk.rows.save(tableId, ctx.request.body, ctx.user?._id)
)
ctx.status = 200
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table)
ctx.message = `${table.name} saved successfully`
@ -152,7 +154,9 @@ async function deleteRows(ctx: UserCtx<DeleteRowRequest>) {
deleteRequest.rows = await processDeleteRowsRequest(ctx)
const { rows } = await pickApi(tableId).bulkDestroy(ctx)
await quotas.removeRows(rows.length)
if (!tableId.includes("datasource_plus")) {
await quotas.removeRows(rows.length)
}
for (let row of rows) {
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
@ -167,7 +171,9 @@ async function deleteRow(ctx: UserCtx<DeleteRowRequest>) {
const tableId = utils.getTableId(ctx)
const resp = await pickApi(tableId).destroy(ctx)
await quotas.removeRow()
if (!tableId.includes("datasource_plus")) {
await quotas.removeRow()
}
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, resp.row)
gridSocket?.emitRowDeletion(ctx, resp.row)

View file

@ -31,7 +31,7 @@ export async function searchView(
// Enrich saved query with ephemeral query params.
// We prevent searching on any fields that are saved as part of the query, as
// that could let users find rows they should not be allowed to access.
let query = dataFilters.buildLuceneQuery(view.query || [])
let query = dataFilters.buildQuery(view.query || [])
if (body.query) {
// Extract existing fields
const existingFields =

View file

@ -31,7 +31,7 @@ import {
} from "@budibase/types"
import {
getAppMigrationVersion,
getLatestMigrationId,
getLatestEnabledMigrationId,
} from "../../../appMigrations"
import send from "koa-send"
@ -133,7 +133,7 @@ const requiresMigration = async (ctx: Ctx) => {
ctx.throw("AppId could not be found")
}
const latestMigration = getLatestMigrationId()
const latestMigration = getLatestEnabledMigrationId()
if (!latestMigration) {
return false
}

View file

@ -1,91 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`/datasources fetch returns all the datasources from the server 1`] = `
[
{
"config": {},
"entities": [
{
"_id": "ta_users",
"_rev": "1-73b7912e6cbdd3d696febc60f3715844",
"createdAt": "2020-01-01T00:00:00.000Z",
"name": "Users",
"primaryDisplay": "email",
"schema": {
"email": {
"constraints": {
"email": true,
"length": {
"maximum": "",
},
"presence": true,
"type": "string",
},
"name": "email",
"type": "string",
},
"firstName": {
"constraints": {
"presence": false,
"type": "string",
},
"name": "firstName",
"type": "string",
},
"lastName": {
"constraints": {
"presence": false,
"type": "string",
},
"name": "lastName",
"type": "string",
},
"roleId": {
"constraints": {
"inclusion": [
"ADMIN",
"POWER",
"BASIC",
"PUBLIC",
],
"presence": false,
"type": "string",
},
"name": "roleId",
"type": "options",
},
"status": {
"constraints": {
"inclusion": [
"active",
"inactive",
],
"presence": false,
"type": "string",
},
"name": "status",
"type": "options",
},
},
"sourceId": "bb_internal",
"sourceType": "internal",
"type": "table",
"updatedAt": "2020-01-01T00:00:00.000Z",
"views": {},
},
],
"name": "Budibase DB",
"source": "BUDIBASE",
"type": "budibase",
},
{
"config": {},
"createdAt": "2020-01-01T00:00:00.000Z",
"isSQL": true,
"name": "Test",
"source": "POSTGRES",
"type": "datasource",
"updatedAt": "2020-01-01T00:00:00.000Z",
},
]
`;

View file

@ -4,14 +4,12 @@ import { getCachedVariable } from "../../../threads/utils"
import { context, events } from "@budibase/backend-core"
import sdk from "../../../sdk"
import tk from "timekeeper"
import { mocks } from "@budibase/backend-core/tests"
import { generator } from "@budibase/backend-core/tests"
import {
Datasource,
FieldSchema,
BBReferenceFieldSubType,
FieldType,
QueryPreview,
RelationshipType,
SourceName,
Table,
@ -21,36 +19,34 @@ import {
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import { tableForDatasource } from "../../../tests/utilities/structures"
tk.freeze(mocks.date.MOCK_DATE)
let { basicDatasource } = setup.structures
describe("/datasources", () => {
let request = setup.getRequest()
let config = setup.getConfig()
let datasource: any
const config = setup.getConfig()
let datasource: Datasource
beforeAll(async () => {
await config.init()
})
afterAll(setup.afterAll)
async function setupTest() {
await config.init()
datasource = await config.createDatasource()
beforeEach(async () => {
datasource = await config.api.datasource.create({
type: "datasource",
name: "Test",
source: SourceName.POSTGRES,
config: {},
})
jest.clearAllMocks()
}
beforeAll(setupTest)
})
describe("create", () => {
it("should create a new datasource", async () => {
const res = await request
.post(`/api/datasources`)
.send(basicDatasource())
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.datasource.name).toEqual("Test")
expect(res.body.errors).toEqual({})
const ds = await config.api.datasource.create({
type: "datasource",
name: "Test",
source: SourceName.POSTGRES,
config: {},
})
expect(ds.name).toEqual("Test")
expect(events.datasource.created).toHaveBeenCalledTimes(1)
})
@ -72,88 +68,71 @@ describe("/datasources", () => {
})
})
describe("update", () => {
it("should update an existing datasource", async () => {
datasource.name = "Updated Test"
const res = await request
.put(`/api/datasources/${datasource._id}`)
.send(datasource)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
describe("dynamic variables", () => {
it("should invalidate changed or removed variables", async () => {
let datasource = await config.api.datasource.create({
type: "datasource",
name: "Rest",
source: SourceName.REST,
config: {},
})
expect(res.body.datasource.name).toEqual("Updated Test")
expect(res.body.errors).toBeUndefined()
expect(events.datasource.updated).toHaveBeenCalledTimes(1)
})
const query = await config.api.query.save({
datasourceId: datasource._id!,
fields: {
path: "www.google.com",
},
parameters: [],
transformer: null,
queryVerb: "read",
name: datasource.name!,
schema: {},
readable: true,
})
describe("dynamic variables", () => {
async function preview(
datasource: any,
fields: { path: string; queryString: string }
) {
const queryPreview: QueryPreview = {
fields,
datasourceId: datasource._id,
parameters: [],
transformer: null,
queryVerb: "read",
name: datasource.name,
schema: {},
readable: true,
}
return config.api.query.preview(queryPreview)
}
datasource = await config.api.datasource.update({
...datasource,
config: {
dynamicVariables: [
{
queryId: query._id,
name: "variable3",
value: "{{ data.0.[value] }}",
},
],
},
})
it("should invalidate changed or removed variables", async () => {
const { datasource, query } = await config.dynamicVariableDatasource()
// preview once to cache variables
await preview(datasource, {
// preview once to cache variables
await config.api.query.preview({
fields: {
path: "www.example.com",
queryString: "test={{ variable3 }}",
})
// check variables in cache
let contents = await getCachedVariable(query._id!, "variable3")
expect(contents.rows.length).toEqual(1)
// update the datasource to remove the variables
datasource.config!.dynamicVariables = []
const res = await request
.put(`/api/datasources/${datasource._id}`)
.send(datasource)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.errors).toBeUndefined()
// check variables no longer in cache
contents = await getCachedVariable(query._id!, "variable3")
expect(contents).toBe(null)
},
datasourceId: datasource._id!,
parameters: [],
transformer: null,
queryVerb: "read",
name: datasource.name!,
schema: {},
readable: true,
})
// check variables in cache
let contents = await getCachedVariable(query._id!, "variable3")
expect(contents.rows.length).toEqual(1)
// update the datasource to remove the variables
datasource.config!.dynamicVariables = []
await config.api.datasource.update(datasource)
// check variables no longer in cache
contents = await getCachedVariable(query._id!, "variable3")
expect(contents).toBe(null)
})
})
describe("fetch", () => {
beforeAll(setupTest)
it("returns all the datasources from the server", async () => {
const res = await request
.get(`/api/datasources`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
const datasources = res.body
// remove non-deterministic fields
for (let source of datasources) {
delete source._id
delete source._rev
}
expect(datasources).toMatchSnapshot()
})
describe("permissions", () => {
it("should apply authorization to endpoint", async () => {
await checkBuilderEndpoint({
config,
@ -161,41 +140,8 @@ describe("/datasources", () => {
url: `/api/datasources`,
})
})
})
describe("find", () => {
it("should be able to find a datasource", async () => {
const res = await request
.get(`/api/datasources/${datasource._id}`)
.set(config.defaultHeaders())
.expect(200)
expect(res.body._rev).toBeDefined()
expect(res.body._id).toEqual(datasource._id)
})
})
describe("destroy", () => {
beforeAll(setupTest)
it("deletes queries for the datasource after deletion and returns a success message", async () => {
await config.createQuery()
await request
.delete(`/api/datasources/${datasource._id}/${datasource._rev}`)
.set(config.defaultHeaders())
.expect(200)
const res = await request
.get(`/api/datasources`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.length).toEqual(1)
expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
})
it("should apply authorization to endpoint", async () => {
it("should apply authorization to delete endpoint", async () => {
await checkBuilderEndpoint({
config,
method: "DELETE",
@ -204,175 +150,296 @@ describe("/datasources", () => {
})
})
describe("check secret replacement", () => {
async function makeDatasource() {
datasource = basicDatasource()
datasource.datasource.config.password = "testing"
const res = await request
.post(`/api/datasources`)
.send(datasource)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
return res.body.datasource
}
it("should save a datasource with password", async () => {
const datasource = await makeDatasource()
expect(datasource.config.password).toBe("--secret-value--")
})
it("should not the password on update with the --secret-value--", async () => {
const datasource = await makeDatasource()
await request
.put(`/api/datasources/${datasource._id}`)
.send(datasource)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
await context.doInAppContext(config.getAppId(), async () => {
const dbDatasource: any = await sdk.datasources.get(datasource._id)
expect(dbDatasource.config.password).toBe("testing")
})
})
})
describe.each([
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("fetch schema (%s)", (_, dsProvider) => {
beforeAll(async () => {
datasource = await config.api.datasource.create(await dsProvider)
])("%s", (_, dsProvider) => {
let rawDatasource: Datasource
beforeEach(async () => {
rawDatasource = await dsProvider
datasource = await config.api.datasource.create(rawDatasource)
})
it("fetching schema will not drop tables or columns", async () => {
const datasourceId = datasource!._id!
describe("get", () => {
it("should be able to get a datasource", async () => {
const ds = await config.api.datasource.get(datasource._id!)
expect(ds).toEqual({
config: expect.any(Object),
plus: datasource.plus,
source: datasource.source,
isSQL: true,
type: "datasource_plus",
_id: datasource._id,
_rev: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
})
})
const simpleTable = await config.api.table.save(
tableForDatasource(datasource, {
name: "simple",
schema: {
name: {
name: "name",
type: FieldType.STRING,
it("should not return database password", async () => {
const ds = await config.api.datasource.get(datasource._id!)
expect(ds.config!.password).toBe("--secret-value--")
})
})
describe("list", () => {
it("returns all the datasources", async () => {
const datasources = await config.api.datasource.fetch()
expect(datasources).toContainEqual(expect.objectContaining(datasource))
})
})
describe("put", () => {
it("should update an existing datasource", async () => {
const newName = generator.guid()
datasource.name = newName
const updatedDs = await config.api.datasource.update(datasource)
expect(updatedDs.name).toEqual(newName)
expect(events.datasource.updated).toHaveBeenCalledTimes(1)
})
it("should not overwrite database password with --secret-value--", async () => {
const password = await context.doInAppContext(
config.getAppId(),
async () => {
const ds = await sdk.datasources.get(datasource._id!)
return ds.config!.password
}
)
expect(password).not.toBe("--secret-value--")
const ds = await config.api.datasource.get(datasource._id!)
expect(ds.config!.password).toBe("--secret-value--")
await config.api.datasource.update(
await config.api.datasource.get(datasource._id!)
)
const newPassword = await context.doInAppContext(
config.getAppId(),
async () => {
const ds = await sdk.datasources.get(datasource._id!)
return ds.config!.password
}
)
expect(newPassword).not.toBe("--secret-value--")
expect(newPassword).toBe(password)
})
})
describe("destroy", () => {
it("deletes queries for the datasource after deletion and returns a success message", async () => {
await config.api.query.save({
datasourceId: datasource._id!,
name: "Test Query",
parameters: [],
fields: {},
schema: {},
queryVerb: "read",
transformer: null,
readable: true,
})
await config.api.datasource.delete(datasource)
const datasources = await config.api.datasource.fetch()
expect(datasources).not.toContainEqual(
expect.objectContaining(datasource)
)
expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
})
})
describe("schema", () => {
it("fetching schema will not drop tables or columns", async () => {
const datasourceId = datasource!._id!
const simpleTable = await config.api.table.save(
tableForDatasource(datasource, {
name: "simple",
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
},
})
)
const stringName = "string"
const fullSchema: {
[type in SupportedSqlTypes]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: stringName,
type: FieldType.STRING,
constraints: {
presence: true,
},
},
})
)
const fullSchema: {
[type in SupportedSqlTypes]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: "string",
type: FieldType.STRING,
constraints: {
presence: true,
[FieldType.LONGFORM]: {
name: "longform",
type: FieldType.LONGFORM,
},
},
[FieldType.LONGFORM]: {
name: "longform",
type: FieldType.LONGFORM,
},
[FieldType.OPTIONS]: {
name: "options",
type: FieldType.OPTIONS,
constraints: {
presence: { allowEmpty: false },
},
},
[FieldType.NUMBER]: {
name: "number",
type: FieldType.NUMBER,
},
[FieldType.BOOLEAN]: {
name: "boolean",
type: FieldType.BOOLEAN,
},
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
},
[FieldType.DATETIME]: {
name: "datetime",
type: FieldType.DATETIME,
dateOnly: true,
timeOnly: false,
},
[FieldType.LINK]: {
name: "link",
type: FieldType.LINK,
tableId: simpleTable._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
fieldName: "link",
},
[FieldType.FORMULA]: {
name: "formula",
type: FieldType.FORMULA,
formula: "any formula",
},
[FieldType.BARCODEQR]: {
name: "barcodeqr",
type: FieldType.BARCODEQR,
},
[FieldType.BIGINT]: {
name: "bigint",
type: FieldType.BIGINT,
},
[FieldType.BB_REFERENCE]: {
name: "bb_reference",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
},
[FieldType.BB_REFERENCE_SINGLE]: {
name: "bb_reference_single",
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
},
}
await config.api.table.save(
tableForDatasource(datasource, {
name: "full",
schema: fullSchema,
})
)
const persisted = await config.api.datasource.get(datasourceId)
await config.api.datasource.fetchSchema(datasourceId)
const updated = await config.api.datasource.get(datasourceId)
const expected: Datasource = {
...persisted,
entities:
persisted?.entities &&
Object.entries(persisted.entities).reduce<Record<string, Table>>(
(acc, [tableName, table]) => {
acc[tableName] = {
...table,
primaryDisplay: expect.not.stringMatching(
new RegExp(`^${table.primaryDisplay || ""}$`)
),
schema: Object.entries(table.schema).reduce<TableSchema>(
(acc, [fieldName, field]) => {
acc[fieldName] = expect.objectContaining({
...field,
})
return acc
},
{}
),
}
return acc
[FieldType.OPTIONS]: {
name: "options",
type: FieldType.OPTIONS,
constraints: {
presence: { allowEmpty: false },
},
{}
),
},
[FieldType.NUMBER]: {
name: "number",
type: FieldType.NUMBER,
},
[FieldType.BOOLEAN]: {
name: "boolean",
type: FieldType.BOOLEAN,
},
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
},
[FieldType.DATETIME]: {
name: "datetime",
type: FieldType.DATETIME,
dateOnly: true,
timeOnly: false,
},
[FieldType.LINK]: {
name: "link",
type: FieldType.LINK,
tableId: simpleTable._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
fieldName: "link",
},
[FieldType.FORMULA]: {
name: "formula",
type: FieldType.FORMULA,
formula: "any formula",
},
[FieldType.BARCODEQR]: {
name: "barcodeqr",
type: FieldType.BARCODEQR,
},
[FieldType.BIGINT]: {
name: "bigint",
type: FieldType.BIGINT,
},
[FieldType.BB_REFERENCE]: {
name: "bb_reference",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
},
[FieldType.BB_REFERENCE_SINGLE]: {
name: "bb_reference_single",
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
},
}
_rev: expect.any(String),
}
expect(updated).toEqual(expected)
await config.api.table.save(
tableForDatasource(datasource, {
name: "full",
schema: fullSchema,
})
)
const persisted = await config.api.datasource.get(datasourceId)
await config.api.datasource.fetchSchema({ datasourceId })
const updated = await config.api.datasource.get(datasourceId)
const expected: Datasource = {
...persisted,
entities:
persisted?.entities &&
Object.entries(persisted.entities).reduce<Record<string, Table>>(
(acc, [tableName, table]) => {
acc[tableName] = {
...table,
primaryDisplay: expect.not.stringMatching(
new RegExp(`^${table.primaryDisplay || ""}$`)
),
schema: Object.entries(table.schema).reduce<TableSchema>(
(acc, [fieldName, field]) => {
// the constraint will be unset - as the DB doesn't recognise it as not null
if (fieldName === stringName) {
field.constraints = {}
}
acc[fieldName] = expect.objectContaining({
...field,
})
return acc
},
{}
),
}
return acc
},
{}
),
_rev: expect.any(String),
updatedAt: expect.any(String),
}
expect(updated).toEqual(expected)
})
})
describe("verify", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
datasource: rawDatasource,
},
{
body: {
connected: true,
},
}
)
})
it("should state an invalid datasource cannot connect", async () => {
await config.api.datasource.verify(
{
datasource: {
...rawDatasource,
config: {
...rawDatasource.config,
password: "wrongpassword",
},
},
},
{
body: {
connected: false,
error: /.*/, // error message differs between databases
},
}
)
})
})
describe("info", () => {
it("should fetch information about postgres datasource", async () => {
const table = await config.api.table.save(
tableForDatasource(datasource, {
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
},
})
)
const info = await config.api.datasource.info(datasource)
expect(info.tableNames).toContain(table.name)
})
})
})
})

View file

@ -38,7 +38,7 @@ describe.each([
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/rows (%s)", (__, dsProvider) => {
])("/rows (%s)", (providerType, dsProvider) => {
const isInternal = dsProvider === undefined
const config = setup.getConfig()
@ -134,6 +134,10 @@ describe.each([
// error. This is to account for the fact that parallel writes can result
// in some quota updates getting lost. We don't have any need to solve this
// right now, so we just allow for some error.
if (expected === 0) {
expect(usage).toEqual(0)
return
}
expect(usage).toBeGreaterThan(expected * 0.9)
expect(usage).toBeLessThan(expected * 1.1)
}
@ -158,7 +162,7 @@ describe.each([
})
expect(row.name).toEqual("Test Contact")
expect(row._rev).toBeDefined()
await assertRowUsage(rowUsage + 1)
await assertRowUsage(isInternal ? rowUsage + 1 : rowUsage)
})
it("fails to create a row for a table that does not exist", async () => {
@ -230,7 +234,7 @@ describe.each([
expect(row["Row ID"]).toBeGreaterThan(previousId)
previousId = row["Row ID"]
}
await assertRowUsage(rowUsage + 10)
await assertRowUsage(isInternal ? rowUsage + 10 : rowUsage)
})
isInternal &&
@ -693,6 +697,49 @@ describe.each([
})
expect(resp.relationship.length).toBe(1)
})
!isInternal &&
// TODO: SQL is having issues creating composite keys
providerType !== DatabaseName.SQL_SERVER &&
it("should support updating fields that are part of a composite key", async () => {
const tableRequest = saveTableRequest({
primary: ["number", "string"],
schema: {
string: {
type: FieldType.STRING,
name: "string",
},
number: {
type: FieldType.NUMBER,
name: "number",
},
},
})
delete tableRequest.schema.id
const table = await config.api.table.save(tableRequest)
const stringValue = generator.word()
const naturalValue = generator.integer({ min: 0, max: 1000 })
const existing = await config.api.row.save(table._id!, {
string: stringValue,
number: naturalValue,
})
expect(existing._id).toEqual(`%5B${naturalValue}%2C'${stringValue}'%5D`)
const row = await config.api.row.patch(table._id!, {
_id: existing._id!,
_rev: existing._rev!,
tableId: table._id!,
string: stringValue,
number: 1500,
})
expect(row._id).toEqual(`%5B${"1500"}%2C'${stringValue}'%5D`)
})
})
describe("destroy", () => {
@ -708,18 +755,21 @@ describe.each([
rows: [createdRow],
})
expect(res[0]._id).toEqual(createdRow._id)
await assertRowUsage(rowUsage - 1)
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
})
it("should be able to bulk delete rows, including a row that doesn't exist", async () => {
const createdRow = await config.api.row.save(table._id!, {})
const createdRow2 = await config.api.row.save(table._id!, {})
const res = await config.api.row.bulkDelete(table._id!, {
rows: [createdRow, { _id: "9999999" }],
rows: [createdRow, createdRow2, { _id: "9999999" }],
})
expect(res[0]._id).toEqual(createdRow._id)
expect(res.length).toEqual(1)
expect(res.map(r => r._id)).toEqual(
expect.arrayContaining([createdRow._id, createdRow2._id])
)
expect(res.length).toEqual(2)
})
})
@ -771,7 +821,7 @@ describe.each([
expect(res.length).toEqual(2)
await config.api.row.get(table._id!, row1._id!, { status: 404 })
await assertRowUsage(rowUsage - 2)
await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage)
})
it("should be able to delete a variety of row set types", async () => {
@ -788,7 +838,7 @@ describe.each([
expect(res.length).toEqual(3)
await config.api.row.get(table._id!, row1._id!, { status: 404 })
await assertRowUsage(rowUsage - 3)
await assertRowUsage(isInternal ? rowUsage - 3 : rowUsage)
})
it("should accept a valid row object and delete the row", async () => {
@ -799,7 +849,7 @@ describe.each([
expect(res.id).toEqual(row1._id)
await config.api.row.get(table._id!, row1._id!, { status: 404 })
await assertRowUsage(rowUsage - 1)
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
})
it("Should ignore malformed/invalid delete requests", async () => {
@ -1637,3 +1687,5 @@ describe.each([
})
})
})
// todo: remove me

View file

@ -1111,7 +1111,7 @@ describe.each([
const createdRow = await config.api.row.save(table._id!, {})
const rowUsage = await getRowUsage()
await config.api.row.bulkDelete(view.id, { rows: [createdRow] })
await assertRowUsage(rowUsage - 1)
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
await config.api.row.get(table._id!, createdRow._id!, {
status: 404,
})
@ -1127,7 +1127,7 @@ describe.each([
await config.api.row.bulkDelete(view.id, { rows: [rows[0], rows[2]] })
await assertRowUsage(rowUsage - 2)
await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage)
await config.api.row.get(table._id!, rows[0]._id!, {
status: 404,

View file

@ -1,4 +1,4 @@
import { Duration, cache, context, db, env } from "@budibase/backend-core"
import { Duration, cache, db, env } from "@budibase/backend-core"
import { Database, DocumentType, Document } from "@budibase/types"
export interface AppMigrationDoc extends Document {
@ -25,15 +25,15 @@ export async function getAppMigrationVersion(appId: string): Promise<string> {
let metadata: AppMigrationDoc | undefined = await cache.get(cacheKey)
// We don't want to cache in dev, in order to be able to tweak it
if (metadata && !env.isDev()) {
// returned cached version if we found one
if (metadata?.version) {
return metadata.version
}
let version
try {
metadata = await getFromDB(appId)
version = metadata.version
version = metadata.version || ""
} catch (err: any) {
if (err.status !== 404) {
throw err
@ -42,7 +42,10 @@ export async function getAppMigrationVersion(appId: string): Promise<string> {
version = ""
}
await cache.store(cacheKey, version, EXPIRY_SECONDS)
// only cache if we have a valid version
if (version) {
await cache.store(cacheKey, version, EXPIRY_SECONDS)
}
return version
}
@ -54,8 +57,7 @@ export async function updateAppMigrationMetadata({
appId: string
version: string
}): Promise<void> {
const db = context.getAppDB()
const appDb = db.getDB(appId)
let appMigrationDoc: AppMigrationDoc
try {
@ -70,7 +72,7 @@ export async function updateAppMigrationMetadata({
version: "",
history: {},
}
await db.put(appMigrationDoc)
await appDb.put(appMigrationDoc)
appMigrationDoc = await getFromDB(appId)
}
@ -82,7 +84,7 @@ export async function updateAppMigrationMetadata({
[version]: { runAt: new Date().toISOString() },
},
}
await db.put(updatedMigrationDoc)
await appDb.put(updatedMigrationDoc)
const cacheKey = getCacheKey(appId)

View file

@ -1,4 +1,4 @@
import queue from "./queue"
import { getAppMigrationQueue } from "./queue"
import { Next } from "koa"
import { getAppMigrationVersion } from "./appMigrationMetadata"
import { MIGRATIONS } from "./migrations"
@ -10,32 +10,55 @@ export * from "./appMigrationMetadata"
export type AppMigration = {
id: string
func: () => Promise<void>
// disabled so that by default all migrations listed are enabled
disabled?: boolean
}
export const getLatestMigrationId = () =>
MIGRATIONS.map(m => m.id)
.sort()
.reverse()[0]
export function getLatestEnabledMigrationId(migrations?: AppMigration[]) {
let latestMigrationId: string | undefined
if (!migrations) {
migrations = MIGRATIONS
}
for (let migration of migrations) {
// if a migration is disabled, all migrations after it are disabled
if (migration.disabled) {
break
}
latestMigrationId = migration.id
}
return latestMigrationId
}
const getTimestamp = (versionId: string) => versionId?.split("_")[0] || ""
function getTimestamp(versionId: string) {
return versionId?.split("_")[0] || ""
}
export async function checkMissingMigrations(
ctx: UserCtx,
next: Next,
appId: string
) {
const currentVersion = await getAppMigrationVersion(appId)
const latestMigration = getLatestMigrationId()
const latestMigration = getLatestEnabledMigrationId()
if (getTimestamp(currentVersion) < getTimestamp(latestMigration)) {
// no migrations set - edge case, don't try to do anything
if (!latestMigration) {
return next()
}
const currentVersion = await getAppMigrationVersion(appId)
const queue = getAppMigrationQueue()
if (
queue &&
latestMigration &&
getTimestamp(currentVersion) < getTimestamp(latestMigration)
) {
await queue.add(
{
appId,
},
{
jobId: `${appId}_${latestMigration}`,
removeOnComplete: true,
removeOnFail: true,
}
)

View file

@ -1,7 +1,15 @@
// This file should never be manually modified, use `yarn add-app-migration` in order to add a new one
import env from "../environment"
import { AppMigration } from "."
import m20240604153647_initial_sqs from "./migrations/20240604153647_initial_sqs"
// Migrations will be executed sorted by ID
export const MIGRATIONS: AppMigration[] = [
// Migrations will be executed sorted by id
{
id: "20240604153647_initial_sqs",
func: m20240604153647_initial_sqs,
disabled: !env.SQS_SEARCH_ENABLE,
},
]

View file

@ -0,0 +1,52 @@
import { context } from "@budibase/backend-core"
import { allLinkDocs } from "../../db/utils"
import LinkDocumentImpl from "../../db/linkedRows/LinkDocument"
import sdk from "../../sdk"
import env from "../../environment"
const migration = async () => {
const linkDocs = await allLinkDocs()
const docsToUpdate = []
for (const linkDoc of linkDocs) {
if (linkDoc.tableId) {
// It already had the required data
continue
}
// it already has the junction table ID - no need to migrate
if (!linkDoc.tableId) {
const newLink = new LinkDocumentImpl(
linkDoc.doc1.tableId,
linkDoc.doc1.fieldName,
linkDoc.doc1.rowId,
linkDoc.doc2.tableId,
linkDoc.doc2.fieldName,
linkDoc.doc2.rowId
)
newLink._id = linkDoc._id!
newLink._rev = linkDoc._rev
docsToUpdate.push(newLink)
}
}
const db = context.getAppDB()
if (docsToUpdate.length) {
await db.bulkDocs(docsToUpdate)
}
// at the end make sure design doc is ready
await sdk.tables.sqs.syncDefinition()
// only do initial search if environment is using SQS already
// initial search makes sure that all the indexes have been created
// and are ready to use, avoiding any initial waits for large tables
if (env.SQS_SEARCH_ENABLE) {
const tables = await sdk.tables.getAllInternalTables()
// do these one by one - running in parallel could cause problems
for (let table of tables) {
await db.sql(`select * from ${table._id} limit 1`)
}
}
}
export default migration

View file

@ -0,0 +1,120 @@
import * as setup from "../../../api/routes/tests/utilities"
import { basicTable } from "../../../tests/utilities/structures"
import { db as dbCore, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core"
import {
LinkDocument,
DocumentType,
SQLiteDefinition,
SQLiteType,
} from "@budibase/types"
import {
generateJunctionTableID,
generateLinkID,
generateRowID,
} from "../../../db/utils"
import { processMigrations } from "../../migrationsProcessor"
import migration from "../20240604153647_initial_sqs"
import { AppMigration } from "src/appMigrations"
const MIGRATIONS: AppMigration[] = [
{
id: "20240604153647_initial_sqs",
func: migration,
disabled: false,
},
]
const config = setup.getConfig()
let tableId: string
function oldLinkDocInfo() {
const tableId1 = `${DocumentType.TABLE}_a`,
tableId2 = `${DocumentType.TABLE}_b`
return {
tableId1,
tableId2,
rowId1: generateRowID(tableId1, "b"),
rowId2: generateRowID(tableId2, "a"),
col1: "columnB",
col2: "columnA",
}
}
function oldLinkDocID() {
const { tableId1, tableId2, rowId1, rowId2, col1, col2 } = oldLinkDocInfo()
return generateLinkID(tableId1, tableId2, rowId1, rowId2, col1, col2)
}
function oldLinkDocument(): Omit<LinkDocument, "tableId"> {
const { tableId1, tableId2, rowId1, rowId2, col1, col2 } = oldLinkDocInfo()
return {
type: "link",
_id: oldLinkDocID(),
doc1: {
tableId: tableId1,
fieldName: col1,
rowId: rowId1,
},
doc2: {
tableId: tableId2,
fieldName: col2,
rowId: rowId2,
},
}
}
async function sqsDisabled(cb: () => Promise<void>) {
await config.withEnv({ SQS_SEARCH_ENABLE: "" }, cb)
}
async function sqsEnabled(cb: () => Promise<void>) {
await config.withEnv({ SQS_SEARCH_ENABLE: "1" }, cb)
}
beforeAll(async () => {
await sqsDisabled(async () => {
await config.init()
const table = await config.api.table.save(basicTable())
tableId = table._id!
const db = dbCore.getDB(config.appId!)
// old link document
await db.put(oldLinkDocument())
})
})
describe("SQS migration", () => {
it("test migration runs as expected against an older DB", async () => {
const db = dbCore.getDB(config.appId!)
// confirm nothing exists initially
await sqsDisabled(async () => {
let error: any | undefined
try {
await db.get(SQLITE_DESIGN_DOC_ID)
} catch (err: any) {
error = err
}
expect(error).toBeDefined()
expect(error.status).toBe(404)
})
await sqsEnabled(async () => {
await processMigrations(config.appId!, MIGRATIONS)
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
expect(designDoc.sql.tables).toBeDefined()
const mainTableDef = designDoc.sql.tables[tableId]
expect(mainTableDef).toBeDefined()
expect(mainTableDef.fields.name).toEqual(SQLiteType.TEXT)
expect(mainTableDef.fields.description).toEqual(SQLiteType.TEXT)
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
expect(linkDoc.tableId).toEqual(
generateJunctionTableID(tableId1, tableId2)
)
// should have swapped the documents
expect(linkDoc.doc1.tableId).toEqual(tableId2)
expect(linkDoc.doc1.rowId).toEqual(rowId2)
expect(linkDoc.doc2.tableId).toEqual(tableId1)
expect(linkDoc.doc2.rowId).toEqual(rowId1)
})
})
})

View file

@ -1,4 +1,4 @@
import { context, locks } from "@budibase/backend-core"
import { context, locks, logging } from "@budibase/backend-core"
import { LockName, LockType } from "@budibase/types"
import {
@ -12,47 +12,58 @@ export async function processMigrations(
migrations: AppMigration[]
) {
console.log(`Processing app migration for "${appId}"`)
try {
// first step - setup full context - tenancy, app and guards
await context.doInAppMigrationContext(appId, async () => {
console.log(`Acquiring app migration lock for "${appId}"`)
await locks.doWithLock(
{
name: LockName.APP_MIGRATION,
type: LockType.AUTO_EXTEND,
resource: appId,
},
async () => {
console.log(`Lock acquired starting app migration for "${appId}"`)
let currentVersion = await getAppMigrationVersion(appId)
await locks.doWithLock(
{
name: LockName.APP_MIGRATION,
type: LockType.AUTO_EXTEND,
resource: appId,
},
async () => {
await context.doInAppMigrationContext(appId, async () => {
let currentVersion = await getAppMigrationVersion(appId)
const pendingMigrations = migrations
.filter(m => m.id > currentVersion)
.sort((a, b) => a.id.localeCompare(b.id))
const pendingMigrations = migrations
.filter(m => m.id > currentVersion)
.sort((a, b) => a.id.localeCompare(b.id))
const migrationIds = migrations.map(m => m.id).sort()
console.log(
`App migrations to run for "${appId}" - ${migrationIds.join(",")}`
)
const migrationIds = migrations.map(m => m.id).sort()
let index = 0
for (const { id, func } of pendingMigrations) {
const expectedMigration =
migrationIds[migrationIds.indexOf(currentVersion) + 1]
let index = 0
for (const { id, func } of pendingMigrations) {
const expectedMigration =
migrationIds[migrationIds.indexOf(currentVersion) + 1]
if (expectedMigration !== id) {
throw new Error(
`Migration ${id} could not run, update for "${id}" is running but ${expectedMigration} is expected`
)
}
if (expectedMigration !== id) {
throw `Migration ${id} could not run, update for "${id}" is running but ${expectedMigration} is expected`
const counter = `(${++index}/${pendingMigrations.length})`
console.info(`Running migration ${id}... ${counter}`, {
migrationId: id,
appId,
})
await func()
await updateAppMigrationMetadata({
appId,
version: id,
})
currentVersion = id
}
const counter = `(${++index}/${pendingMigrations.length})`
console.info(`Running migration ${id}... ${counter}`, {
migrationId: id,
appId,
})
await func()
await updateAppMigrationMetadata({
appId,
version: id,
})
currentVersion = id
}
})
}
)
console.log(`App migration for "${appId}" processed`)
)
})
console.log(`App migration for "${appId}" processed`)
} catch (err) {
logging.logAlert("Failed to run app migration", err)
throw err
}
}

View file

@ -1,15 +1,45 @@
import { queue } from "@budibase/backend-core"
import { queue, logging } from "@budibase/backend-core"
import { Job } from "bull"
import { MIGRATIONS } from "./migrations"
import { processMigrations } from "./migrationsProcessor"
const appMigrationQueue = queue.createQueue(queue.JobQueue.APP_MIGRATION)
appMigrationQueue.process(processMessage)
const MAX_ATTEMPTS = 3
// max number of migrations to run at same time, per node
const MIGRATION_CONCURRENCY = 5
async function processMessage(job: Job) {
export type AppMigrationJob = {
appId: string
}
let appMigrationQueue: queue.Queue<AppMigrationJob> | undefined
export function init() {
appMigrationQueue = queue.createQueue<AppMigrationJob>(
queue.JobQueue.APP_MIGRATION,
{
jobOptions: {
attempts: MAX_ATTEMPTS,
removeOnComplete: true,
removeOnFail: true,
},
maxStalledCount: MAX_ATTEMPTS,
removeStalledCb: async (job: Job) => {
logging.logAlert(
`App migration failed, queue job ID: ${job.id} - reason: ${job.failedReason}`
)
},
}
)
return appMigrationQueue.process(MIGRATION_CONCURRENCY, processMessage)
}
async function processMessage(job: Job<AppMigrationJob>) {
const { appId } = job.data
await processMigrations(appId, MIGRATIONS)
}
export default appMigrationQueue
export function getAppMigrationQueue() {
return appMigrationQueue
}

View file

@ -1,6 +1,7 @@
import { Header } from "@budibase/backend-core"
import * as setup from "../../api/routes/tests/utilities"
import * as migrations from "../migrations"
import { AppMigration, getLatestEnabledMigrationId } from "../index"
import { getAppMigrationVersion } from "../appMigrationMetadata"
jest.mock<typeof migrations>("../migrations", () => ({
@ -52,4 +53,29 @@ describe("migrations", () => {
},
})
})
it("should disable all migrations after one that is disabled", () => {
const MIGRATION_ID1 = "20231211105810_new-test",
MIGRATION_ID2 = "20231211105812_new-test",
MIGRATION_ID3 = "20231211105814_new-test"
// create some migrations to test with
const migrations: AppMigration[] = [
{
id: MIGRATION_ID1,
func: async () => {},
},
{
id: MIGRATION_ID2,
func: async () => {},
},
{
id: MIGRATION_ID3,
func: async () => {},
},
]
expect(getLatestEnabledMigrationId(migrations)).toBe(MIGRATION_ID3)
migrations[1].disabled = true
expect(getLatestEnabledMigrationId(migrations)).toBe(MIGRATION_ID1)
})
})

View file

@ -99,6 +99,15 @@ export function getError(err: any) {
return typeof err !== "string" ? err.toString() : err
}
export function guardAttachment(attachmentObject: any) {
if (!("url" in attachmentObject) || !("filename" in attachmentObject)) {
const providedKeys = Object.keys(attachmentObject).join(", ")
throw new Error(
`Attachments must have both "url" and "filename" keys. You have provided: ${providedKeys}`
)
}
}
export async function sendAutomationAttachmentsToStorage(
tableId: string,
row: Row
@ -116,9 +125,15 @@ export async function sendAutomationAttachmentsToStorage(
schema?.type === FieldType.ATTACHMENT_SINGLE ||
schema?.type === FieldType.SIGNATURE_SINGLE
) {
if (Array.isArray(value)) {
value.forEach(item => guardAttachment(item))
} else {
guardAttachment(value)
}
attachmentRows[prop] = value
}
}
for (const [prop, attachments] of Object.entries(attachmentRows)) {
if (!attachments) {
continue
@ -135,7 +150,6 @@ export async function sendAutomationAttachmentsToStorage(
return row
}
async function generateAttachmentRow(attachment: AutomationAttachment) {
const prodAppId = context.getProdAppId()

View file

@ -3,6 +3,7 @@ import { KoaAdapter } from "@bull-board/koa"
import { queue } from "@budibase/backend-core"
import * as automation from "../threads/automation"
import { backups } from "@budibase/pro"
import { getAppMigrationQueue } from "../appMigrations/queue"
import { createBullBoard } from "@bull-board/api"
import BullQueue from "bull"
@ -16,10 +17,14 @@ const PATH_PREFIX = "/bulladmin"
export async function init() {
// Set up queues for bull board admin
const backupQueue = backups.getBackupQueue()
const appMigrationQueue = getAppMigrationQueue()
const queues = [automationQueue]
if (backupQueue) {
queues.push(backupQueue)
}
if (appMigrationQueue) {
queues.push(appMigrationQueue)
}
const adapters = []
const serverAdapter: any = new KoaAdapter()
for (let queue of queues) {

View file

@ -90,7 +90,6 @@ export async function run({ inputs, appId, emitter }: AutomationStepInput) {
tableId: inputs.row.tableId,
},
})
try {
inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row)
inputs.row = await sendAutomationAttachmentsToStorage(

View file

@ -118,6 +118,14 @@ export async function run({ inputs }: AutomationStepInput) {
}
to = to || undefined
if (attachments) {
if (Array.isArray(attachments)) {
attachments.forEach(item => automationUtils.guardAttachment(item))
} else {
automationUtils.guardAttachment(attachments)
}
}
try {
let response = await sendSmtpEmail({
to,

View file

@ -128,4 +128,31 @@ describe("test the create row action", () => {
expect(objectData).toBeDefined()
expect(objectData.ContentLength).toBeGreaterThan(0)
})
it("should check that attachment without the correct keys throws an error", async () => {
let attachmentTable = await config.createTable(
basicTableWithAttachmentField()
)
let attachmentRow: any = {
tableId: attachmentTable._id,
}
let filename = "test2.txt"
let presignedUrl = await uploadTestFile(filename)
let attachmentObject = {
wrongKey: presignedUrl,
anotherWrongKey: filename,
}
attachmentRow.single_file_attachment = attachmentObject
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
row: attachmentRow,
})
expect(res.success).toEqual(false)
expect(res.response).toEqual(
'Error: Attachments must have both "url" and "filename" keys. You have provided: wrongKey, anotherWrongKey'
)
})
})

View file

@ -59,6 +59,9 @@ class LinkDocumentImpl implements LinkDocument {
this.doc1 = docA.tableId > docB.tableId ? docA : docB
this.doc2 = docA.tableId > docB.tableId ? docB : docA
}
_rev?: string | undefined
createdAt?: string | number | undefined
updatedAt?: string | undefined
}
export default LinkDocumentImpl

View file

@ -1,5 +1,5 @@
import newid from "./newid"
import { db as dbCore } from "@budibase/backend-core"
import { context, db as dbCore } from "@budibase/backend-core"
import {
DatabaseQueryOpts,
Datasource,
@ -10,6 +10,7 @@ import {
RelationshipFieldMetadata,
SourceName,
VirtualDocumentType,
LinkDocument,
} from "@budibase/types"
export { DocumentType, VirtualDocumentType } from "@budibase/types"
@ -137,10 +138,24 @@ export function generateLinkID(
/**
* Gets parameters for retrieving link docs, this is a utility function for the getDocParams function.
*/
export function getLinkParams(otherProps: any = {}) {
function getLinkParams(otherProps: Partial<DatabaseQueryOpts> = {}) {
return getDocParams(DocumentType.LINK, null, otherProps)
}
/**
* Gets all the link docs document from the current app db.
*/
export async function allLinkDocs() {
const db = context.getAppDB()
const response = await db.allDocs<LinkDocument>(
getLinkParams({
include_docs: true,
})
)
return response.rows.map(row => row.doc!)
}
/**
* Generates a new layout ID.
* @returns The new layout ID which the layout doc can be stored under.

View file

@ -48,6 +48,7 @@ const environment = {
MINIO_URL: process.env.MINIO_URL,
WORKER_URL: process.env.WORKER_URL,
AWS_REGION: process.env.AWS_REGION,
AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN,
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
REDIS_URL: process.env.REDIS_URL,
@ -96,6 +97,7 @@ const environment = {
DISABLE_THREADING: process.env.DISABLE_THREADING,
DISABLE_AUTOMATION_LOGS: process.env.DISABLE_AUTOMATION_LOGS,
DISABLE_RATE_LIMITING: process.env.DISABLE_RATE_LIMITING,
DISABLE_APP_MIGRATIONS: process.env.SKIP_APP_MIGRATIONS || false,
MULTI_TENANCY: process.env.MULTI_TENANCY,
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
SELF_HOSTED: process.env.SELF_HOSTED,

View file

@ -4,19 +4,14 @@ import {
MakeRequestResponse,
} from "../api/routes/public/tests/utils"
import * as setup from "../api/routes/tests/utilities"
import {
Datasource,
FieldType,
Table,
TableRequest,
TableSourceType,
} from "@budibase/types"
import { Datasource, FieldType } from "@budibase/types"
import {
DatabaseName,
getDatasource,
rawQuery,
} from "../integrations/tests/utils"
import { generator } from "@budibase/backend-core/tests"
import { tableForDatasource } from "../../src/tests/utilities/structures"
// @ts-ignore
fetch.mockSearch()
@ -47,8 +42,7 @@ jest.mock("../websockets", () => ({
describe("mysql integrations", () => {
let makeRequest: MakeRequestResponse,
rawDatasource: Datasource,
datasource: Datasource,
primaryMySqlTable: Table
datasource: Datasource
beforeAll(async () => {
await config.init()
@ -60,38 +54,12 @@ describe("mysql integrations", () => {
datasource = await config.api.datasource.create(rawDatasource)
})
beforeEach(async () => {
primaryMySqlTable = await config.createTable({
name: uniqueTableName(),
type: "table",
primary: ["id"],
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
},
name: {
name: "name",
type: FieldType.STRING,
},
description: {
name: "description",
type: FieldType.STRING,
},
value: {
name: "value",
type: FieldType.NUMBER,
},
},
sourceId: datasource._id,
sourceType: TableSourceType.EXTERNAL,
})
})
afterAll(config.end)
it("validate table schema", async () => {
// Creating a table so that `entities` is populated.
await config.api.table.save(tableForDatasource(datasource))
const res = await makeRequest("get", `/api/datasources/${datasource._id}`)
expect(res.status).toBe(200)
@ -115,54 +83,6 @@ describe("mysql integrations", () => {
})
})
describe("POST /api/datasources/verify", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
datasource: rawDatasource,
},
{
body: {
connected: true,
},
}
)
})
it("should state an invalid datasource cannot connect", async () => {
await config.api.datasource.verify(
{
datasource: {
...rawDatasource,
config: {
...rawDatasource.config,
password: "wrongpassword",
},
},
},
{
body: {
connected: false,
error:
"Access denied for the specified user. User does not have the necessary privileges or the provided credentials are incorrect. Please verify the credentials, and ensure that the user has appropriate permissions.",
},
}
)
})
})
describe("POST /api/datasources/info", () => {
it("should fetch information about mysql datasource", async () => {
const primaryName = primaryMySqlTable.name
const response = await makeRequest("post", "/api/datasources/info", {
datasource: datasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1)
})
})
describe("Integration compatibility with mysql search_path", () => {
let datasource: Datasource, rawDatasource: Datasource
const database = generator.guid()
@ -231,57 +151,6 @@ describe("mysql integrations", () => {
})
})
describe("POST /api/tables/", () => {
it("will rename a column", async () => {
await makeRequest("post", "/api/tables/", primaryMySqlTable)
let renameColumnOnTable: TableRequest = {
...primaryMySqlTable,
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
externalType: "unsigned integer",
},
name: {
name: "name",
type: FieldType.STRING,
externalType: "text",
},
description: {
name: "description",
type: FieldType.STRING,
externalType: "text",
},
age: {
name: "age",
type: FieldType.NUMBER,
externalType: "float(8,2)",
},
},
}
const response = await makeRequest(
"post",
"/api/tables/",
renameColumnOnTable
)
const ds = (
await makeRequest("post", `/api/datasources/${datasource._id}/schema`)
).body.datasource
expect(response.status).toEqual(200)
expect(Object.keys(ds.entities![primaryMySqlTable.name].schema)).toEqual([
"id",
"name",
"description",
"age",
])
})
})
describe("POST /api/datasources/:datasourceId/schema", () => {
let tableName: string

File diff suppressed because it is too large Load diff

View file

@ -566,7 +566,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
query.filters.equal[`_${GOOGLE_SHEETS_PRIMARY_KEY}`] = id
}
}
let filtered = dataFilters.runLuceneQuery(rows, query.filters)
let filtered = dataFilters.runQuery(rows, query.filters)
if (hasFilters && query.paginate) {
filtered = filtered.slice(offset, offset + limit)
}
@ -585,7 +585,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
})
}
const [sortField, sortInfo] = Object.entries(query.sort)[0]
response = dataFilters.luceneSort(
response = dataFilters.sort(
response,
sortField,
sortInfo.direction,

View file

@ -4,8 +4,9 @@ import * as mongodb from "./mongodb"
import * as mysql from "./mysql"
import * as mssql from "./mssql"
import * as mariadb from "./mariadb"
import { GenericContainer } from "testcontainers"
import { GenericContainer, StartedTestContainer } from "testcontainers"
import { testContainerUtils } from "@budibase/backend-core/tests"
import cloneDeep from "lodash/cloneDeep"
export type DatasourceProvider = () => Promise<Datasource>
@ -65,9 +66,39 @@ export async function rawQuery(ds: Datasource, sql: string): Promise<any> {
}
export async function startContainer(container: GenericContainer) {
container = container.withReuse().withLabels({ "com.budibase": "true" })
const imageName = (container as any).imageName.string as string
const key = imageName.replaceAll("/", "-").replaceAll(":", "-")
const startedContainer = await container.start()
container = container
.withReuse()
.withLabels({ "com.budibase": "true" })
.withName(key)
let startedContainer: StartedTestContainer | undefined = undefined
let lastError = undefined
for (let i = 0; i < 10; i++) {
try {
// container.start() is not an idempotent operation, calling `start`
// modifies the internal state of a GenericContainer instance such that
// the hash it uses to determine reuse changes. We need to clone the
// container before calling start to ensure that we're using the same
// reuse hash every time.
const containerCopy = cloneDeep(container)
startedContainer = await containerCopy.start()
lastError = undefined
break
} catch (e: any) {
lastError = e
await new Promise(resolve => setTimeout(resolve, 1000))
}
}
if (!startedContainer) {
if (lastError) {
throw lastError
}
throw new Error(`failed to start container: ${imageName}`)
}
const info = testContainerUtils.getContainerById(startedContainer.getId())
if (!info) {

View file

@ -29,6 +29,9 @@ export async function getDatasource(): Promise<Datasource> {
}
const port = (await ports).find(x => x.container === 1433)?.host
if (!port) {
throw new Error("SQL Server port not found")
}
const datasource: Datasource = {
type: "datasource_plus",

View file

@ -38,6 +38,9 @@ export async function getDatasource(): Promise<Datasource> {
}
const port = (await ports).find(x => x.container === 3306)?.host
if (!port) {
throw new Error("MySQL port not found")
}
const datasource: Datasource = {
type: "datasource_plus",

View file

@ -21,6 +21,9 @@ export async function getDatasource(): Promise<Datasource> {
}
const port = (await ports).find(x => x.container === 5432)?.host
if (!port) {
throw new Error("Postgres port not found")
}
const datasource: Datasource = {
type: "datasource_plus",

View file

@ -280,12 +280,35 @@ function copyExistingPropsOver(
utils.unreachable(existingColumnType)
}
// copy the BB schema in case of special props
if (shouldKeepSchema) {
const fetchedColumnDefinition: FieldSchema | undefined =
table.schema[key]
table.schema[key] = {
...existingTableSchema[key],
externalType:
existingTableSchema[key].externalType ||
table.schema[key]?.externalType,
autocolumn: fetchedColumnDefinition?.autocolumn,
} as FieldSchema
// check constraints which can be fetched from the DB (they could be updated)
if (fetchedColumnDefinition?.constraints) {
// inclusions are the enum values (select/options)
const fetchedConstraints = fetchedColumnDefinition.constraints
const oldConstraints = table.schema[key].constraints
table.schema[key].constraints = {
...table.schema[key].constraints,
inclusion: fetchedConstraints.inclusion?.length
? fetchedConstraints.inclusion
: oldConstraints?.inclusion,
}
// true or undefined - consistent with old API
if (fetchedConstraints.presence) {
table.schema[key].constraints!.presence =
fetchedConstraints.presence
} else if (oldConstraints?.presence === true) {
delete table.schema[key].constraints?.presence
}
}
}
}

View file

@ -1,9 +1,16 @@
import { UserCtx } from "@budibase/types"
import { checkMissingMigrations } from "../appMigrations"
import env from "../environment"
export default async (ctx: UserCtx, next: any) => {
const { appId } = ctx
// migrations can be disabled via environment variable if you
// need to completely disable migrations, e.g. for testing
if (env.DISABLE_APP_MIGRATIONS) {
return next()
}
if (!appId) {
return next()
}

View file

@ -14,6 +14,7 @@ import {
CONSTANT_INTERNAL_ROW_COLS,
generateJunctionTableID,
} from "../../../../db/utils"
import { isEqual } from "lodash"
const FieldTypeMap: Record<FieldType, SQLiteType> = {
[FieldType.BOOLEAN]: SQLiteType.NUMERIC,
@ -107,8 +108,22 @@ async function buildBaseDefinition(): Promise<PreSaveSQLiteDefinition> {
export async function syncDefinition(): Promise<void> {
const db = context.getAppDB()
let existing: SQLiteDefinition | undefined
try {
existing = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
} catch (err: any) {
if (err.status !== 404) {
throw err
}
}
const definition = await buildBaseDefinition()
await db.put(definition)
if (existing) {
definition._rev = existing._rev
}
// only write if something has changed
if (!existing || !isEqual(existing.sql, definition.sql)) {
await db.put(definition)
}
}
export async function addTable(table: Table) {

View file

@ -15,6 +15,7 @@ import * as fileSystem from "../utilities/fileSystem"
import { default as eventEmitter, init as eventInit } from "../events"
import * as migrations from "../migrations"
import * as bullboard from "../automations/bullboard"
import * as appMigrations from "../appMigrations/queue"
import * as pro from "@budibase/pro"
import * as api from "../api"
import sdk from "../sdk"
@ -69,6 +70,9 @@ export async function startup(
return
}
printFeatures()
if (env.BUDIBASE_ENVIRONMENT) {
console.log(`service running environment: "${env.BUDIBASE_ENVIRONMENT}"`)
}
STARTUP_RAN = true
if (app && server && !env.CLUSTER_MODE) {
console.log(`Budibase running on ${JSON.stringify(server.address())}`)
@ -114,8 +118,10 @@ export async function startup(
// configure events to use the pro audit log write
// can't integrate directly into backend-core due to cyclic issues
queuePromises.push(events.processors.init(pro.sdk.auditLogs.write))
// app migrations and automations on other service
if (automationsEnabled()) {
queuePromises.push(automations.init())
queuePromises.push(appMigrations.init())
}
queuePromises.push(initPro())
if (app) {

View file

@ -6,6 +6,7 @@ import {
UpdateDatasourceRequest,
QueryJson,
BuildSchemaFromSourceResponse,
FetchDatasourceInfoResponse,
} from "@budibase/types"
import { Expectations, TestAPI } from "./base"
@ -61,6 +62,10 @@ export class DatasourceAPI extends TestAPI {
})
}
fetch = async (expectations?: Expectations) => {
return await this._get<Datasource[]>(`/api/datasources`, { expectations })
}
query = async (
query: Omit<QueryJson, "meta"> & Partial<Pick<QueryJson, "meta">>,
expectations?: Expectations
@ -71,10 +76,29 @@ export class DatasourceAPI extends TestAPI {
})
}
fetchSchema = async (id: string, expectations?: Expectations) => {
fetchSchema = async (
{
datasourceId,
tablesFilter,
}: { datasourceId: string; tablesFilter?: string[] },
expectations?: Expectations
) => {
return await this._post<BuildSchemaFromSourceResponse>(
`/api/datasources/${id}/schema`,
`/api/datasources/${datasourceId}/schema`,
{
expectations: expectations,
body: {
tablesFilter: tablesFilter,
},
}
)
}
info = async (datasource: Datasource, expectations?: Expectations) => {
return await this._post<FetchDatasourceInfoResponse>(
`/api/datasources/info`,
{
body: { datasource },
expectations,
}
)

View file

@ -138,10 +138,10 @@ export const removeKeyNumbering = (key: string): string => {
}
/**
* Builds a lucene JSON query from the filter structure generated in the builder
* Builds a JSON query from the filter structure generated in the builder
* @param filter the builder filter structure
*/
export const buildLuceneQuery = (filter: SearchFilter[]) => {
export const buildQuery = (filter: SearchFilter[]) => {
let query: SearchFilters = {
string: {},
fuzzy: {},
@ -260,11 +260,11 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => {
}
/**
* Performs a client-side lucene search on an array of data
* Performs a client-side search on an array of data
* @param docs the data
* @param query the JSON lucene query
* @param query the JSON query
*/
export const runLuceneQuery = (docs: any[], query?: SearchFilters) => {
export const runQuery = (docs: any[], query?: SearchFilters) => {
if (!docs || !Array.isArray(docs)) {
return []
}
@ -451,7 +451,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchFilters) => {
* @param sortOrder the sort order ("ascending" or "descending")
* @param sortType the type of sort ("string" or "number")
*/
export const luceneSort = (
export const sort = (
docs: any[],
sort: string,
sortOrder: SortDirection,
@ -481,7 +481,7 @@ export const luceneSort = (
* @param docs the data
* @param limit the number of docs to limit to
*/
export const luceneLimit = (docs: any[], limit: string) => {
export const limit = (docs: any[], limit: string) => {
const numLimit = parseFloat(limit)
if (isNaN(numLimit)) {
return docs

View file

@ -4,9 +4,9 @@ import {
FieldType,
SearchFilter,
} from "@budibase/types"
import { buildLuceneQuery, runLuceneQuery } from "../filters"
import { buildQuery, runQuery } from "../filters"
describe("runLuceneQuery", () => {
describe("runQuery", () => {
const docs = [
{
order_id: 1,
@ -70,14 +70,14 @@ describe("runLuceneQuery", () => {
}
it("should return input docs if no search query is provided", () => {
expect(runLuceneQuery(docs)).toBe(docs)
expect(runQuery(docs)).toBe(docs)
})
it("should return matching rows for equal filter", () => {
const query = buildQuery({
equal: { order_status: 4 },
})
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1, 2])
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([1, 2])
})
it("should return matching row for notEqual filter", () => {
@ -85,12 +85,12 @@ describe("runLuceneQuery", () => {
notEqual: { order_status: 4 },
})
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3])
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([3])
})
it("should return starts with matching rows for fuzzy and string filters", () => {
expect(
runLuceneQuery(
runQuery(
docs,
buildQuery({
fuzzy: { description: "sm" },
@ -98,7 +98,7 @@ describe("runLuceneQuery", () => {
).map(row => row.description)
).toEqual(["Small box"])
expect(
runLuceneQuery(
runQuery(
docs,
buildQuery({
string: { description: "SM" },
@ -117,7 +117,7 @@ describe("runLuceneQuery", () => {
},
})
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3])
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([3])
})
it("should return rows with numeric strings within a range filter", () => {
@ -129,7 +129,7 @@ describe("runLuceneQuery", () => {
},
},
})
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3])
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([3])
})
it("should return rows with ISO date strings within a range filter", () => {
@ -142,7 +142,7 @@ describe("runLuceneQuery", () => {
},
})
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2])
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([2])
})
it("should return return all docs if an invalid doc value is passed into a range filter", async () => {
@ -170,7 +170,7 @@ describe("runLuceneQuery", () => {
},
})
expect(runLuceneQuery(docs, query)).toEqual(docs)
expect(runQuery(docs, query)).toEqual(docs)
})
it("should return rows with matches on empty filter", () => {
@ -180,7 +180,7 @@ describe("runLuceneQuery", () => {
},
})
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1])
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([1])
})
it("should return rows with matches on notEmpty filter", () => {
@ -190,7 +190,7 @@ describe("runLuceneQuery", () => {
},
})
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2, 3])
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([2, 3])
})
it.each([[523, 259], "523,259"])(
@ -202,7 +202,7 @@ describe("runLuceneQuery", () => {
},
})
expect(runLuceneQuery(docs, query).map(row => row.customer_id)).toEqual([
expect(runQuery(docs, query).map(row => row.customer_id)).toEqual([
259, 523,
])
}
@ -218,7 +218,7 @@ describe("runLuceneQuery", () => {
contains: { description: ["box"] },
})
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual(
expect(runQuery(docs, query).map(row => row.order_id)).toEqual(
expectedResult
)
})
@ -230,7 +230,7 @@ describe("runLuceneQuery", () => {
oneOf: { label: ["FRAGILE"] },
})
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1, 2])
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([1, 2])
})
it("should handle when a value is null or undefined", () => {
@ -240,14 +240,14 @@ describe("runLuceneQuery", () => {
oneOf: { label: ["FRAGILE"] },
})
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2])
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([2])
})
})
describe("buildLuceneQuery", () => {
describe("buildQuery", () => {
it("should return a basic search query template if the input is not an array", () => {
const filter: any = "NOT_AN_ARRAY"
expect(buildLuceneQuery(filter)).toEqual({
expect(buildQuery(filter)).toEqual({
string: {},
fuzzy: {},
range: {},
@ -277,7 +277,7 @@ describe("buildLuceneQuery", () => {
value: "1000,1212,3400",
},
]
expect(buildLuceneQuery(filter)).toEqual({
expect(buildQuery(filter)).toEqual({
string: {},
fuzzy: {},
range: {},
@ -311,7 +311,7 @@ describe("buildLuceneQuery", () => {
value: "{{ list_of_customer_ids }}",
},
]
expect(buildLuceneQuery(filter)).toEqual({
expect(buildQuery(filter)).toEqual({
string: {},
fuzzy: {},
range: {},
@ -351,7 +351,7 @@ describe("buildLuceneQuery", () => {
value: "true",
},
]
expect(buildLuceneQuery(filter)).toEqual({
expect(buildQuery(filter)).toEqual({
string: {},
fuzzy: {},
range: {},
@ -392,7 +392,7 @@ describe("buildLuceneQuery", () => {
value: "Large box,Heavy box,Small box",
},
]
expect(buildLuceneQuery(filter)).toEqual({
expect(buildQuery(filter)).toEqual({
string: {},
fuzzy: {},
range: {},

View file

@ -1 +1,2 @@
export * from "./environment"
export * from "./status"

View file

@ -0,0 +1,11 @@
export type SystemStatusResponse = {
passing?: boolean
checks?: {
login: boolean
search: boolean
}
health?: {
passing: boolean
}
version?: string
}

View file

@ -30,4 +30,7 @@ export interface SQLiteDefinition {
}
}
export type PreSaveSQLiteDefinition = Omit<SQLiteDefinition, "_rev">
export interface PreSaveSQLiteDefinition
extends Omit<SQLiteDefinition, "_rev"> {
_rev?: string
}

View file

@ -7,3 +7,4 @@ export * from "./schedule"
export * from "./templates"
export * from "./environmentVariables"
export * from "./auditLogs"
export * from "./tenantInfo"

View file

@ -0,0 +1,13 @@
import { Document } from "../document"
export interface TenantInfo extends Document {
owner: {
email: string
password?: string
ssoId?: string
givenName?: string
familyName?: string
budibaseUserId?: string
}
tenantId: string
}

View file

@ -165,3 +165,13 @@ export interface Database {
deleteIndex(...args: any[]): Promise<any>
getIndexes(...args: any[]): Promise<any>
}
export interface DBError extends Error {
status: number
statusCode: number
reason: string
name: string
errid: string
error: string
description: string
}

View file

@ -31,6 +31,7 @@ async function init() {
HTTP_LOGGING: "0",
VERSION: "0.0.0+local",
PASSWORD_MIN_LENGTH: "1",
SQS_SEARCH_ENABLE: "1",
}
config = { ...config, ...existingConfig }

View file

@ -0,0 +1,10 @@
import { tenancy } from "@budibase/backend-core"
import { TenantInfo, Ctx } from "@budibase/types"
export const save = async (ctx: Ctx<TenantInfo>) => {
const response = await tenancy.saveTenantInfo(ctx.request.body)
ctx.body = {
_id: response.id,
_rev: response.rev,
}
}

View file

@ -1,16 +1,24 @@
import { accounts } from "@budibase/backend-core"
import { accounts, env as coreEnv } from "@budibase/backend-core"
import { Ctx, SystemStatusResponse } from "@budibase/types"
import env from "../../../environment"
import { BBContext } from "@budibase/types"
export const fetch = async (ctx: BBContext) => {
export const fetch = async (ctx: Ctx<void, SystemStatusResponse>) => {
let status: SystemStatusResponse | undefined
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
const status = await accounts.getStatus()
ctx.body = status
} else {
ctx.body = {
status = await accounts.getStatus()
}
if (!status) {
status = {
health: {
passing: true,
},
}
}
if (coreEnv.VERSION) {
status.version = coreEnv.VERSION
}
ctx.body = status
}

View file

@ -76,6 +76,10 @@ const PUBLIC_ENDPOINTS = [
route: "/api/global/users/invite",
method: "GET",
},
{
route: "/api/global/tenant",
method: "POST",
},
]
const NO_TENANCY_ENDPOINTS = [
@ -121,6 +125,10 @@ const NO_TENANCY_ENDPOINTS = [
route: "/api/global/users/invite/:code",
method: "GET",
},
{
route: "/api/global/tenant",
method: "POST",
},
]
// most public endpoints are gets, but some are posts

View file

@ -0,0 +1,33 @@
import Router from "@koa/router"
import Joi from "joi"
import { auth } from "@budibase/backend-core"
import * as controller from "../../controllers/global/tenant"
import cloudRestricted from "../../../middleware/cloudRestricted"
const router: Router = new Router()
const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("")
function buildTenantInfoValidation() {
return auth.joiValidator.body(
Joi.object({
owner: Joi.object({
email: Joi.string().required(),
password: OPTIONAL_STRING,
ssoId: OPTIONAL_STRING,
givenName: OPTIONAL_STRING,
familyName: OPTIONAL_STRING,
budibaseUserId: OPTIONAL_STRING,
}).required(),
tenantId: Joi.string().required(),
}).required()
)
}
router.post(
"/api/global/tenant",
cloudRestricted,
buildTenantInfoValidation(),
controller.save
)
export default router

View file

@ -0,0 +1,47 @@
import { TenantInfo } from "@budibase/types"
import { TestConfiguration } from "../../../../tests"
import { tenancy as _tenancy } from "@budibase/backend-core"
const tenancy = jest.mocked(_tenancy)
describe("/api/global/tenant", () => {
const config = new TestConfiguration()
beforeAll(async () => {
await config.beforeAll()
})
afterAll(async () => {
await config.afterAll()
})
beforeEach(() => {
jest.clearAllMocks()
})
describe("POST /api/global/tenant", () => {
it("should save the tenantInfo", async () => {
tenancy.saveTenantInfo = jest.fn().mockImplementation(async () => ({
id: "DOC_ID",
ok: true,
rev: "DOC_REV",
}))
const tenantInfo: TenantInfo = {
owner: {
email: "test@example.com",
password: "PASSWORD",
ssoId: "SSO_ID",
givenName: "Jane",
familyName: "Doe",
budibaseUserId: "USER_ID",
},
tenantId: "tenant123",
}
const response = await config.api.tenants.saveTenantInfo(tenantInfo)
expect(_tenancy.saveTenantInfo).toHaveBeenCalledTimes(1)
expect(_tenancy.saveTenantInfo).toHaveBeenCalledWith(tenantInfo)
expect(response.text).toEqual('{"_id":"DOC_ID","_rev":"DOC_REV"}')
})
})
})

View file

@ -4,7 +4,6 @@ import { auth } from "@budibase/backend-core"
import Joi from "joi"
import cloudRestricted from "../../../middleware/cloudRestricted"
import { users } from "../validation"
import * as selfController from "../../controllers/global/self"
const router: Router = new Router()
const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("")
@ -140,12 +139,5 @@ router
.get("/api/global/users/tenant/:id", controller.tenantUserLookup)
// global endpoint but needs to come at end (blocks other endpoints otherwise)
.get("/api/global/users/:id", auth.builderOrAdmin, controller.find)
// DEPRECATED - use new versions with self API
.get("/api/global/users/self", selfController.getSelf)
.post(
"/api/global/users/self",
users.buildUserSaveValidation(),
selfController.updateSelf
)
export default router

View file

@ -1,6 +1,7 @@
import Router from "@koa/router"
import { api as pro } from "@budibase/pro"
import userRoutes from "./global/users"
import tenantRoutes from "./global/tenant"
import configRoutes from "./global/configs"
import workspaceRoutes from "./global/workspaces"
import templateRoutes from "./global/templates"
@ -40,6 +41,7 @@ export const routes: Router[] = [
accountRoutes,
restoreRoutes,
eventRoutes,
tenantRoutes,
pro.scim,
]

View file

@ -27,6 +27,7 @@ describe("/api/system/status", () => {
health: {
passing: true,
},
version: expect.any(String),
})
expect(accounts.getStatus).toHaveBeenCalledTimes(0)
config.cloudHosted()

View file

@ -24,6 +24,7 @@ const environment = {
// auth
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN,
SALT_ROUNDS: process.env.SALT_ROUNDS,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
@ -46,6 +47,7 @@ const environment = {
SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED,
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE,
BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
// smtp
SMTP_USER: process.env.SMTP_USER,
SMTP_PASSWORD: process.env.SMTP_PASSWORD,

View file

@ -88,7 +88,11 @@ const shutdown = () => {
}
export default server.listen(parseInt(env.PORT || "4002"), async () => {
console.log(`Worker running on ${JSON.stringify(server.address())}`)
let startupLog = `Worker running on ${JSON.stringify(server.address())}`
if (env.BUDIBASE_ENVIRONMENT) {
startupLog = `${startupLog} - environment: "${env.BUDIBASE_ENVIRONMENT}"`
}
console.log(startupLog)
await initPro()
await redis.clients.init()
cache.docWritethrough.init()

View file

@ -1,3 +1,4 @@
import { TenantInfo } from "@budibase/types"
import TestConfiguration from "../TestConfiguration"
import { TestAPI, TestAPIOpts } from "./base"
@ -14,4 +15,12 @@ export class TenantAPI extends TestAPI {
.set(opts?.headers)
.expect(opts?.status ? opts.status : 204)
}
saveTenantInfo = (tenantInfo: TenantInfo) => {
return this.request
.post("/api/global/tenant")
.set(this.config.internalAPIHeaders())
.send(tenantInfo)
.expect(200)
}
}

Some files were not shown because too many files have changed in this diff Show more