1
0
Fork 0
mirror of synced 2024-06-28 11:00:55 +12:00

Merge master.

This commit is contained in:
Sam Rose 2024-01-12 14:56:10 +00:00
commit 290dde125e
No known key found for this signature in database
66 changed files with 1476 additions and 1337 deletions

View file

@ -76,6 +76,6 @@ done
# CouchDB needs the `_users` and `_replicator` databases to exist before it will
# function correctly, so we create them here.
curl -X PUT http://${COUCHDB_USER}:${COUCHDB_PASSWORD}@localhost:5984/_users
curl -X PUT http://${COUCHDB_USER}:${COUCHDB_PASSWORD}@localhost:5984/_replicator
curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_users
curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_replicator
sleep infinity

View file

@ -1,5 +1,5 @@
{
"version": "2.14.3",
"version": "2.14.7",
"npmClient": "yarn",
"packages": [
"packages/*",

@ -1 +1 @@
Subproject commit b11e6b47370d9b77c63648b45929c86bfed6360c
Subproject commit b23fb3b17961fb04badd9487913a683fcf26dbe6

View file

@ -19,6 +19,8 @@ import { WriteStream, ReadStream } from "fs"
import { newid } from "../../docIds/newid"
import { DDInstrumentedDatabase } from "../instrumentation"
const DATABASE_NOT_FOUND = "Database does not exist."
function buildNano(couchInfo: { url: string; cookie: string }) {
return Nano({
url: couchInfo.url,
@ -31,6 +33,8 @@ function buildNano(couchInfo: { url: string; cookie: string }) {
})
}
type DBCall<T> = () => Promise<T>
export function DatabaseWithConnection(
dbName: string,
connection: string,
@ -78,7 +82,11 @@ export class DatabaseImpl implements Database {
return this.instanceNano || DatabaseImpl.nano
}
async checkSetup() {
private getDb() {
return this.nano().db.use(this.name)
}
private async checkAndCreateDb() {
let shouldCreate = !this.pouchOpts?.skip_setup
// check exists in a lightweight fashion
let exists = await this.exists()
@ -95,14 +103,22 @@ export class DatabaseImpl implements Database {
}
}
}
return this.nano().db.use(this.name)
return this.getDb()
}
private async updateOutput(fnc: any) {
// this function fetches the DB and handles if DB creation is needed
private async performCall<T>(
call: (db: Nano.DocumentScope<any>) => Promise<DBCall<T>> | DBCall<T>
): Promise<any> {
const db = this.getDb()
const fnc = await call(db)
try {
return await fnc()
} catch (err: any) {
if (err.statusCode) {
if (err.statusCode === 404 && err.reason === DATABASE_NOT_FOUND) {
await this.checkAndCreateDb()
return await this.performCall(call)
} else if (err.statusCode) {
err.status = err.statusCode
}
throw err
@ -110,11 +126,12 @@ export class DatabaseImpl implements Database {
}
async get<T extends Document>(id?: string): Promise<T> {
const db = await this.checkSetup()
if (!id) {
throw new Error("Unable to get doc without a valid _id.")
}
return this.updateOutput(() => db.get(id))
return this.performCall(db => {
if (!id) {
throw new Error("Unable to get doc without a valid _id.")
}
return () => db.get(id)
})
}
async getMultiple<T extends Document>(
@ -147,22 +164,23 @@ export class DatabaseImpl implements Database {
}
async remove(idOrDoc: string | Document, rev?: string) {
const db = await this.checkSetup()
let _id: string
let _rev: string
return this.performCall(db => {
let _id: string
let _rev: string
if (isDocument(idOrDoc)) {
_id = idOrDoc._id!
_rev = idOrDoc._rev!
} else {
_id = idOrDoc
_rev = rev!
}
if (isDocument(idOrDoc)) {
_id = idOrDoc._id!
_rev = idOrDoc._rev!
} else {
_id = idOrDoc
_rev = rev!
}
if (!_id || !_rev) {
throw new Error("Unable to remove doc without a valid _id and _rev.")
}
return this.updateOutput(() => db.destroy(_id, _rev))
if (!_id || !_rev) {
throw new Error("Unable to remove doc without a valid _id and _rev.")
}
return () => db.destroy(_id, _rev)
})
}
async post(document: AnyDocument, opts?: DatabasePutOpts) {
@ -176,45 +194,49 @@ export class DatabaseImpl implements Database {
if (!document._id) {
throw new Error("Cannot store document without _id field.")
}
const db = await this.checkSetup()
if (!document.createdAt) {
document.createdAt = new Date().toISOString()
}
document.updatedAt = new Date().toISOString()
if (opts?.force && document._id) {
try {
const existing = await this.get(document._id)
if (existing) {
document._rev = existing._rev
}
} catch (err: any) {
if (err.status !== 404) {
throw err
return this.performCall(async db => {
if (!document.createdAt) {
document.createdAt = new Date().toISOString()
}
document.updatedAt = new Date().toISOString()
if (opts?.force && document._id) {
try {
const existing = await this.get(document._id)
if (existing) {
document._rev = existing._rev
}
} catch (err: any) {
if (err.status !== 404) {
throw err
}
}
}
}
return this.updateOutput(() => db.insert(document))
return () => db.insert(document)
})
}
async bulkDocs(documents: AnyDocument[]) {
const db = await this.checkSetup()
return this.updateOutput(() => db.bulk({ docs: documents }))
return this.performCall(db => {
return () => db.bulk({ docs: documents })
})
}
async allDocs<T extends Document>(
params: DatabaseQueryOpts
): Promise<AllDocsResponse<T>> {
const db = await this.checkSetup()
return this.updateOutput(() => db.list(params))
return this.performCall(db => {
return () => db.list(params)
})
}
async query<T extends Document>(
viewName: string,
params: DatabaseQueryOpts
): Promise<AllDocsResponse<T>> {
const db = await this.checkSetup()
const [database, view] = viewName.split("/")
return this.updateOutput(() => db.view(database, view, params))
return this.performCall(db => {
const [database, view] = viewName.split("/")
return () => db.view(database, view, params)
})
}
async destroy() {
@ -231,8 +253,9 @@ export class DatabaseImpl implements Database {
}
async compact() {
const db = await this.checkSetup()
return this.updateOutput(() => db.compact())
return this.performCall(db => {
return () => db.compact()
})
}
// All below functions are in-frequently called, just utilise PouchDB

View file

@ -31,13 +31,6 @@ export class DDInstrumentedDatabase implements Database {
})
}
checkSetup(): Promise<DocumentScope<any>> {
return tracer.trace("db.checkSetup", span => {
span?.addTags({ db_name: this.name })
return this.db.checkSetup()
})
}
get<T extends Document>(id?: string | undefined): Promise<T> {
return tracer.trace("db.get", span => {
span?.addTags({ db_name: this.name, doc_id: id })

View file

@ -172,11 +172,8 @@ export default function (
tracer.setUser({
id: user?._id,
tenantId: user?.tenantId,
admin: user?.admin,
builder: user?.builder,
budibaseAccess: user?.budibaseAccess,
status: user?.status,
roles: user?.roles,
})
}

View file

@ -40,7 +40,7 @@
loading = false
}
async function confirm() {
export async function confirm() {
loading = true
if (!onConfirm || (await onConfirm()) !== keepOpen) {
hide()

View file

@ -5,7 +5,7 @@ import {
} from "@budibase/frontend-core"
import { store } from "./builderStore"
import { get } from "svelte/store"
import { auth } from "./stores/portal"
import { auth, navigation } from "./stores/portal"
export const API = createAPIClient({
attachHeaders: headers => {
@ -45,4 +45,15 @@ export const API = createAPIClient({
}
}
},
onMigrationDetected: appId => {
const updatingUrl = `/builder/app/updating/${appId}`
if (window.location.pathname === updatingUrl) {
return
}
get(navigation).goto(
`${updatingUrl}?returnUrl=${encodeURIComponent(window.location.pathname)}`
)
},
})

View file

@ -85,7 +85,6 @@ const INITIAL_FRONTEND_STATE = {
selectedScreenId: null,
selectedComponentId: null,
selectedLayoutId: null,
hoverComponentId: null,
// Client state
selectedComponentInstance: null,
@ -93,6 +92,9 @@ const INITIAL_FRONTEND_STATE = {
// Onboarding
onboarding: false,
tourNodes: null,
// UI state
hoveredComponentId: null,
}
export const getFrontendStore = () => {
@ -1413,6 +1415,18 @@ export const getFrontendStore = () => {
return state
})
},
hover: (componentId, notifyClient = true) => {
if (componentId === get(store).hoveredComponentId) {
return
}
store.update(state => {
state.hoveredComponentId = componentId
return state
})
if (notifyClient) {
store.actions.preview.sendEvent("hover-component", componentId)
}
},
},
links: {
save: async (url, title) => {

View file

@ -152,7 +152,7 @@
{#if isDisabled && !syncAutomationsEnabled && action.stepId === ActionStepID.COLLECT}
<div class="tag-color">
<Tags>
<Tag icon="LockClosed">Business</Tag>
<Tag icon="LockClosed">Premium</Tag>
</Tags>
</div>
{:else if isDisabled}

View file

@ -41,7 +41,7 @@
{ label: "False", value: "false" },
]}
/>
{:else if schema.type === "array"}
{:else if schemaHasOptions(schema) && schema.type === "array"}
<Multiselect
bind:value={value[field]}
options={schema.constraints.inclusion}
@ -77,7 +77,7 @@
on:change={e => onChange(e, field)}
useLabel={false}
/>
{:else if ["string", "number", "bigint", "barcodeqr"].includes(schema.type)}
{:else if ["string", "number", "bigint", "barcodeqr", "array"].includes(schema.type)}
<svelte:component
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
panel={AutomationBindingPanel}

View file

@ -13,6 +13,7 @@
const appPrefix = "/app"
let touched = false
let error
let modal
$: appUrl = screenUrl
? `${window.location.origin}${appPrefix}${screenUrl}`
@ -50,6 +51,7 @@
</script>
<ModalContent
bind:this={modal}
size="M"
title={"Screen details"}
{confirmText}
@ -58,15 +60,17 @@
cancelText={"Back"}
disabled={!screenUrl || error || !touched}
>
<Input
label="Enter a URL for the new screen"
{error}
bind:value={screenUrl}
on:change={routeChanged}
/>
<div class="app-server" title={appUrl}>
{appUrl}
</div>
<form on:submit|preventDefault={() => modal.confirm()}>
<Input
label="Enter a URL for the new screen"
{error}
bind:value={screenUrl}
on:change={routeChanged}
/>
<div class="app-server" title={appUrl}>
{appUrl}
</div>
</form>
</ModalContent>
<style>

View file

@ -6,7 +6,7 @@
import { Helpers } from "@budibase/bbui"
import { derived, writable } from "svelte/store"
import { Utils } from "@budibase/frontend-core"
import { cloneDeep } from "lodash"
import { cloneDeep, isEqual } from "lodash"
export let componentInstance
export let componentBindings
@ -21,21 +21,32 @@
const currentStep = derived(multiStepStore, state => state.currentStep)
const componentType = "@budibase/standard-components/multistepformblockstep"
let cachedValue
let cachedInstance = {}
$: if (!isEqual(cachedValue, value)) {
cachedValue = value
}
$: if (!isEqual(componentInstance, cachedInstance)) {
cachedInstance = componentInstance
}
setContext("multi-step-form-block", multiStepStore)
$: stepCount = value?.length || 0
$: stepCount = cachedValue?.length || 0
$: updateStore(stepCount)
$: dataSource = getDatasourceForProvider($currentAsset, componentInstance)
$: dataSource = getDatasourceForProvider($currentAsset, cachedInstance)
$: emitCurrentStep($currentStep)
$: stepLabel = getStepLabel($multiStepStore)
$: stepDef = getDefinition(stepLabel)
$: stepSettings = value?.[$currentStep] || {}
$: stepSettings = cachedValue?.[$currentStep] || {}
$: defaults = Utils.buildMultiStepFormBlockDefaultProps({
_id: componentInstance._id,
_id: cachedInstance._id,
stepCount: $multiStepStore.stepCount,
currentStep: $multiStepStore.currentStep,
actionType: componentInstance.actionType,
dataSource: componentInstance.dataSource,
actionType: cachedInstance.actionType,
dataSource: cachedInstance.dataSource,
})
$: stepInstance = {
_id: Helpers.uuid(),

View file

@ -1,6 +1,9 @@
<script>
import { currentAsset } from "builderStore"
import { findClosestMatchingComponent } from "builderStore/componentUtils"
import { currentAsset, store } from "builderStore"
import {
findClosestMatchingComponent,
findComponent,
} from "builderStore/componentUtils"
import {
getDatasourceForProvider,
getSchemaForDatasource,
@ -20,8 +23,23 @@
component => component._component.endsWith("/form")
)
const resolveDatasource = (currentAsset, componentInstance, form) => {
if (!form && componentInstance._id != $store.selectedComponentId) {
const block = findComponent(
currentAsset.props,
$store.selectedComponentId
)
const def = store.actions.components.getDefinition(block._component)
return def?.block === true
? getDatasourceForProvider(currentAsset, block)
: {}
} else {
return getDatasourceForProvider(currentAsset, form)
}
}
// Get that form's schema
$: datasource = getDatasourceForProvider($currentAsset, form)
$: datasource = resolveDatasource($currentAsset, componentInstance, form)
$: formSchema = getSchemaForDatasource($currentAsset, datasource)?.schema
// Get the schema for the relationship field that this picker is using

View file

@ -1,6 +1,6 @@
<script>
import { isActive, redirect, params } from "@roxi/routify"
import { admin, auth, licensing } from "stores/portal"
import { admin, auth, licensing, navigation } from "stores/portal"
import { onMount } from "svelte"
import { CookieUtils, Constants } from "@budibase/frontend-core"
import { API } from "api"
@ -17,6 +17,8 @@
$: useAccountPortal = cloud && !$admin.disableAccountPortal
navigation.actions.init($redirect)
const validateTenantId = async () => {
const host = window.location.host
if (host.includes("localhost:") || !baseUrl) {

View file

@ -108,6 +108,8 @@
{componentInstance}
{componentDefinition}
{bindings}
iconTooltip={componentName}
componentTitle={title}
/>
{/if}
{#if section == "conditions"}

View file

@ -5,6 +5,9 @@
Drawer,
Button,
notifications,
AbsTooltip,
Icon,
Body,
} from "@budibase/bbui"
import { selectedScreen, store } from "builderStore"
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
@ -15,6 +18,9 @@
} from "builderStore/dataBinding"
export let componentInstance
export let componentDefinition
export let iconTooltip
export let componentTitle
let tempValue
let drawer
@ -24,6 +30,8 @@
$store.selectedComponentId
)
$: icon = componentDefinition?.icon
const openDrawer = () => {
tempValue = runtimeToReadableBinding(
bindings,
@ -54,7 +62,19 @@
{#key componentInstance?._id}
<Drawer bind:this={drawer} title="Custom CSS">
<svelte:fragment slot="description">
Custom CSS overrides all other component styles.
<div class="header">
Your CSS will overwrite styles for:
{#if icon}
<AbsTooltip type="info" text={iconTooltip}>
<Icon
color={`var(--spectrum-global-color-gray-600)`}
size="S"
name={icon}
/>
</AbsTooltip>
<Body size="S"><b>{componentTitle || ""}</b></Body>
{/if}
</div>
</svelte:fragment>
<Button cta slot="buttons" on:click={save}>Save</Button>
<svelte:component
@ -68,3 +88,13 @@
/>
</Drawer>
{/key}
<style>
.header {
display: flex;
flex-direction: row;
justify-content: space-between;
align-items: center;
gap: var(--spacing-m);
}
</style>

View file

@ -36,14 +36,12 @@
// Determine selected component ID
$: selectedComponentId = $store.selectedComponentId
$: hoverComponentId = $store.hoverComponentId
$: previewData = {
appId: $store.appId,
layout,
screen,
selectedComponentId,
hoverComponentId,
theme: $store.theme,
customTheme: $store.customTheme,
previewDevice: $store.previewDevice,
@ -119,8 +117,8 @@
error = event.error || "An unknown error occurred"
} else if (type === "select-component" && data.id) {
$store.selectedComponentId = data.id
} else if (type === "hover-component" && data.id) {
$store.hoverComponentId = data.id
} else if (type === "hover-component") {
store.actions.components.hover(data.id, false)
} else if (type === "update-prop") {
await store.actions.components.updateSetting(data.prop, data.value)
} else if (type === "update-styles") {

View file

@ -7,6 +7,7 @@
$: definition = store.actions.components.getDefinition(component?._component)
$: noPaste = !$store.componentToPaste
$: isBlock = definition?.block === true
$: canEject = !(definition?.ejectable === false)
const keyboardEvent = (key, ctrlKey = false) => {
document.dispatchEvent(
@ -32,7 +33,7 @@
>
Delete
</MenuItem>
{#if isBlock}
{#if isBlock && canEject}
<MenuItem
icon="Export"
keyBind="Ctrl+E"

View file

@ -32,8 +32,15 @@
await store.actions.components.paste(component, "below")
},
["Ctrl+e"]: component => {
componentToEject = component
confirmEjectDialog.show()
const definition = store.actions.components.getDefinition(
component._component
)
const isBlock = definition?.block === true
const canEject = !(definition?.ejectable === false)
if (isBlock && canEject) {
componentToEject = component
confirmEjectDialog.show()
}
},
["Ctrl+Enter"]: () => {
$goto(`./:componentId/new`)

View file

@ -90,16 +90,7 @@
return findComponentPath($selectedComponent, component._id)?.length > 0
}
const handleMouseover = componentId => {
if ($store.hoverComponentId !== componentId) {
$store.hoverComponentId = componentId
}
}
const handleMouseout = componentId => {
if ($store.hoverComponentId === componentId) {
$store.hoverComponentId = null
}
}
const hover = store.actions.components.hover
</script>
<ul>
@ -120,9 +111,9 @@
on:dragover={dragover(component, index)}
on:iconClick={() => toggleNodeOpen(component._id)}
on:drop={onDrop}
hovering={$store.hoverComponentId === component._id}
on:mouseenter={() => handleMouseover(component._id)}
on:mouseleave={() => handleMouseout(component._id)}
hovering={$store.hoveredComponentId === component._id}
on:mouseenter={() => hover(component._id)}
on:mouseleave={() => hover(null)}
text={getComponentText(component)}
icon={getComponentIcon(component)}
iconTooltip={getComponentName(component)}

View file

@ -12,6 +12,9 @@
let scrolling = false
$: screenComponentId = `${$store.selectedScreenId}-screen`
$: navComponentId = `${$store.selectedScreenId}-navigation`
const toNewComponentRoute = () => {
if ($isActive(`./:componentId/new`)) {
$goto(`./:componentId`)
@ -33,16 +36,7 @@
scrolling = e.target.scrollTop !== 0
}
const handleMouseover = componentId => {
if ($store.hoverComponentId !== componentId) {
$store.hoverComponentId = componentId
}
}
const handleMouseout = componentId => {
if ($store.hoverComponentId === componentId) {
$store.hoverComponentId = null
}
}
const hover = store.actions.components.hover
</script>
<div class="components">
@ -65,46 +59,31 @@
scrollable
icon="WebPage"
on:drop={onDrop}
on:click={() => {
$store.selectedComponentId = `${$store.selectedScreenId}-screen`
}}
hovering={$store.hoverComponentId ===
`${$store.selectedScreenId}-screen`}
on:mouseenter={() =>
handleMouseover(`${$store.selectedScreenId}-screen`)}
on:mouseleave={() =>
handleMouseout(`${$store.selectedScreenId}-screen`)}
id={`component-screen`}
selectedBy={$userSelectedResourceMap[
`${$store.selectedScreenId}-screen`
]}
on:click={() => ($store.selectedComponentId = screenComponentId)}
hovering={$store.hoveredComponentId === screenComponentId}
on:mouseenter={() => hover(screenComponentId)}
on:mouseleave={() => hover(null)}
id="component-screen"
selectedBy={$userSelectedResourceMap[screenComponentId]}
>
<ScreenslotDropdownMenu component={$selectedScreen?.props} />
</NavItem>
<NavItem
text="Navigation"
indentLevel={0}
selected={$store.selectedComponentId ===
`${$store.selectedScreenId}-navigation`}
selected={$store.selectedComponentId === navComponentId}
opened
scrollable
icon={$selectedScreen.showNavigation
? "Visibility"
: "VisibilityOff"}
on:drop={onDrop}
on:click={() => {
$store.selectedComponentId = `${$store.selectedScreenId}-navigation`
}}
hovering={$store.hoverComponentId ===
`${$store.selectedScreenId}-navigation`}
on:mouseenter={() =>
handleMouseover(`${$store.selectedScreenId}-navigation`)}
on:mouseleave={() =>
handleMouseout(`${$store.selectedScreenId}-navigation`)}
id={`component-nav`}
selectedBy={$userSelectedResourceMap[
`${$store.selectedScreenId}-navigation`
]}
on:click={() => ($store.selectedComponentId = navComponentId)}
hovering={$store.hoveredComponentId === navComponentId}
on:mouseenter={() => hover(navComponentId)}
on:mouseleave={() => hover(null)}
id="component-nav"
selectedBy={$userSelectedResourceMap[navComponentId]}
/>
<ComponentTree
level={0}

View file

@ -0,0 +1,19 @@
<script>
import { Updating } from "@budibase/frontend-core"
import { redirect, params } from "@roxi/routify"
import { API } from "api"
async function isMigrationDone() {
const response = await API.getMigrationStatus()
return response.migrated
}
async function onMigrationDone() {
// For some reason routify params is not stripping the ? properly, so we need to check both with and without ?
const returnUrl = $params.returnUrl || $params["?returnUrl"]
$redirect(returnUrl)
}
</script>
<Updating {isMigrationDone} {onMigrationDone} />

View file

@ -214,7 +214,7 @@
<Heading size="M">Branding</Heading>
{#if !isCloud && !brandingEnabled}
<Tags>
<Tag icon="LockClosed">Business</Tag>
<Tag icon="LockClosed">Premium</Tag>
</Tags>
{/if}
{#if isCloud && !brandingEnabled}

View file

@ -97,7 +97,7 @@
<Heading size="M">Groups</Heading>
{#if !$licensing.groupsEnabled}
<Tags>
<Tag icon="LockClosed">Business</Tag>
<Tag icon="LockClosed">Enterpise</Tag>
</Tags>
{/if}
</div>

View file

@ -16,5 +16,6 @@ export { environment } from "./environment"
export { menu } from "./menu"
export { auditLogs } from "./auditLogs"
export { features } from "./features"
export { navigation } from "./navigation"
export const sideBarCollapsed = writable(false)

View file

@ -0,0 +1,31 @@
import { writable } from "svelte/store"
export function createNavigationStore() {
const store = writable({
initialisated: false,
goto: undefined,
})
const { set, subscribe } = store
const init = gotoFunc => {
if (typeof gotoFunc !== "function") {
throw new Error(
`gotoFunc must be a function, found a "${typeof gotoFunc}" instead`
)
}
set({
initialisated: true,
goto: gotoFunc,
})
}
return {
subscribe,
actions: {
init,
},
}
}
export const navigation = createNavigationStore()

View file

@ -37,7 +37,6 @@
"downloadjs": "1.4.7",
"html5-qrcode": "^2.2.1",
"leaflet": "^1.7.1",
"regexparam": "^1.3.0",
"sanitize-html": "^2.7.0",
"screenfull": "^6.0.1",
"shortid": "^2.2.15",

View file

@ -77,4 +77,10 @@ export const API = createAPIClient({
// Log all errors to console
console.warn(`[Client] HTTP ${status} on ${method}:${url}\n\t${message}`)
},
onMigrationDetected: _appId => {
if (!window.MIGRATING_APP) {
// We will force a reload, that will display the updating screen until the migration is running
window.location.reload()
}
},
})

View file

@ -0,0 +1,23 @@
<script>
import { Updating } from "@budibase/frontend-core"
import { API } from "../api"
async function isMigrationDone() {
const response = await API.getMigrationStatus()
return response.migrated
}
async function onMigrationDone() {
window.location.reload()
}
</script>
<div class="updating">
<Updating {isMigrationDone} {onMigrationDone} />
</div>
<style>
.updating {
font-family: var(--font-sans);
}
</style>

View file

@ -14,6 +14,7 @@
const { fetchDatasourceSchema } = getContext("sdk")
const component = getContext("component")
const context = getContext("context")
// Set current step context to force child form to use it
const currentStep = writable(1)
@ -157,18 +158,23 @@
<BlockComponent type="heading" props={{ text: step.title }} />
</BlockComponent>
<BlockComponent type="text" props={{ text: step.desc }} order={1} />
<BlockComponent type="fieldgroup" order={2}>
{#each step.fields as field, fieldIdx (`${field.field || field.name}_${stepIdx}_${fieldIdx}`)}
{#if getComponentForField(field)}
<BlockComponent
type={getComponentForField(field)}
props={getPropsForField(field)}
order={fieldIdx}
interactive
name={field.field}
/>
{/if}
{/each}
<BlockComponent type="container" order={2}>
<div
class="form-block fields"
class:mobile={$context.device.mobile}
>
{#each step.fields as field, fieldIdx (`${field.field || field.name}_${stepIdx}_${fieldIdx}`)}
{#if getComponentForField(field)}
<BlockComponent
type={getComponentForField(field)}
props={getPropsForField(field)}
order={fieldIdx}
interactive
name={field.field}
/>
{/if}
{/each}
</div>
</BlockComponent>
<BlockComponent
type="buttongroup"
@ -185,3 +191,14 @@
{/each}
</BlockComponent>
</FormBlockWrapper>
<style>
.fields {
display: grid;
grid-template-columns: repeat(6, 1fr);
gap: 8px 16px;
}
.fields.mobile :global(.spectrum-Form-item) {
grid-column: span 6 !important;
}
</style>

View file

@ -1,9 +1,9 @@
<script>
import { onMount, onDestroy } from "svelte"
import IndicatorSet from "./IndicatorSet.svelte"
import { builderStore, dndIsDragging } from "stores"
import { builderStore, dndIsDragging, hoverStore } from "stores"
$: componentId = $builderStore.hoverComponentId
$: componentId = $hoverStore.hoveredComponentId
$: zIndex = componentId === $builderStore.selectedComponentId ? 900 : 920
const onMouseOver = e => {
@ -23,12 +23,12 @@
}
if (newId !== componentId) {
builderStore.actions.hoverComponent(newId)
hoverStore.actions.hoverComponent(newId)
}
}
const onMouseLeave = () => {
builderStore.actions.hoverComponent(null)
hoverStore.actions.hoverComponent(null)
}
onMount(() => {

View file

@ -1,4 +1,5 @@
import ClientApp from "./components/ClientApp.svelte"
import UpdatingApp from "./components/UpdatingApp.svelte"
import {
builderStore,
appStore,
@ -7,6 +8,7 @@ import {
environmentStore,
dndStore,
eventStore,
hoverStore,
} from "./stores"
import loadSpectrumIcons from "@budibase/bbui/spectrum-icons-rollup.js"
import { get } from "svelte/store"
@ -32,7 +34,6 @@ const loadBudibase = async () => {
layout: window["##BUDIBASE_PREVIEW_LAYOUT##"],
screen: window["##BUDIBASE_PREVIEW_SCREEN##"],
selectedComponentId: window["##BUDIBASE_SELECTED_COMPONENT_ID##"],
hoverComponentId: window["##BUDIBASE_HOVER_COMPONENT_ID##"],
previewId: window["##BUDIBASE_PREVIEW_ID##"],
theme: window["##BUDIBASE_PREVIEW_THEME##"],
customTheme: window["##BUDIBASE_PREVIEW_CUSTOM_THEME##"],
@ -52,6 +53,13 @@ const loadBudibase = async () => {
window["##BUDIBASE_APP_EMBEDDED##"] === "true"
)
if (window.MIGRATING_APP) {
new UpdatingApp({
target: window.document.body,
})
return
}
// Fetch environment info
if (!get(environmentStore)?.loaded) {
await environmentStore.actions.fetchEnvironment()
@ -76,6 +84,8 @@ const loadBudibase = async () => {
} else {
dndStore.actions.reset()
}
} else if (type === "hover-component") {
hoverStore.actions.hoverComponent(data)
} else if (type === "builder-meta") {
builderStore.actions.setMetadata(data)
}

View file

@ -8,7 +8,6 @@ const createBuilderStore = () => {
inBuilder: false,
screen: null,
selectedComponentId: null,
hoverComponentId: null,
editMode: false,
previewId: null,
theme: null,
@ -25,16 +24,6 @@ const createBuilderStore = () => {
}
const store = writable(initialState)
const actions = {
hoverComponent: id => {
if (id === get(store).hoverComponentId) {
return
}
store.update(state => ({
...state,
hoverComponentId: id,
}))
eventStore.actions.dispatchEvent("hover-component", { id })
},
selectComponent: id => {
if (id === get(store).selectedComponentId) {
return

View file

@ -0,0 +1,25 @@
import { get, writable } from "svelte/store"
import { eventStore } from "./events.js"
const createHoverStore = () => {
const store = writable({
hoveredComponentId: null,
})
const hoverComponent = id => {
if (id === get(store).hoveredComponentId) {
return
}
store.set({ hoveredComponentId: id })
eventStore.actions.dispatchEvent("hover-component", { id })
}
return {
...store,
actions: {
hoverComponent,
},
}
}
export const hoverStore = createHoverStore()

View file

@ -27,6 +27,7 @@ export {
dndIsDragging,
} from "./dnd"
export { sidePanelStore } from "./sidePanel"
export { hoverStore } from "./hover"
// Context stores are layered and duplicated, so it is not a singleton
export { createContextStore } from "./context"

View file

@ -33,6 +33,7 @@ import { buildEnvironmentVariableEndpoints } from "./environmentVariables"
import { buildEventEndpoints } from "./events"
import { buildAuditLogsEndpoints } from "./auditLogs"
import { buildLogsEndpoints } from "./logs"
import { buildMigrationEndpoints } from "./migrations"
/**
* Random identifier to uniquely identify a session in a tab. This is
@ -298,6 +299,7 @@ export const createAPIClient = config => {
...buildEventEndpoints(API),
...buildAuditLogsEndpoints(API),
...buildLogsEndpoints(API),
...buildMigrationEndpoints(API),
viewV2: buildViewV2Endpoints(API),
}
}

View file

@ -0,0 +1,10 @@
export const buildMigrationEndpoints = API => ({
/**
* Gets the info about the current app migration
*/
getMigrationStatus: async () => {
return await API.get({
url: "/api/migrations/status",
})
},
})

View file

@ -0,0 +1,79 @@
<script>
export let isMigrationDone
export let onMigrationDone
export let timeoutSeconds = 10 // 3 minutes
const loadTime = Date.now()
let timedOut = false
async function checkMigrationsFinished() {
setTimeout(async () => {
const isMigrated = await isMigrationDone()
const timeoutMs = timeoutSeconds * 1000
if (!isMigrated) {
if (loadTime + timeoutMs > Date.now()) {
return checkMigrationsFinished()
}
return migrationTimeout()
}
onMigrationDone()
}, 1000)
}
checkMigrationsFinished()
function migrationTimeout() {
timedOut = true
}
</script>
<div class="loading" class:timeout={timedOut}>
<span class="header">
{#if !timedOut}
System update
{:else}
Something went wrong!
{/if}
</span>
<span class="subtext">
{#if !timedOut}
Please wait and we will be back in a second!
{:else}
An error occurred, please try again later.
<br />
Contact
<a href="https://budibase.com/support/" target="_blank">support</a> if the
issue persists.
{/if}</span
>
</div>
<style>
.loading {
display: flex;
justify-content: center;
flex-direction: column;
gap: var(--spacing-l);
height: 100vh;
text-align: center;
font-size: 18px;
}
.header {
font-weight: 700;
}
.timeout .header {
color: rgb(196, 46, 46);
}
.subtext {
font-size: 16px;
color: var(--grey-7);
}
.subtext a {
color: var(--grey-7);
font-weight: 700;
}
</style>

View file

@ -3,4 +3,5 @@ export { default as TestimonialPage } from "./TestimonialPage.svelte"
export { default as Testimonial } from "./Testimonial.svelte"
export { default as UserAvatar } from "./UserAvatar.svelte"
export { default as UserAvatars } from "./UserAvatars.svelte"
export { default as Updating } from "./Updating.svelte"
export { Grid } from "./grid"

View file

@ -7,7 +7,7 @@
"../shared-core",
"../string-templates"
],
"ext": "js,ts,json",
"ext": "js,ts,json,svelte",
"ignore": ["src/**/*.spec.ts", "src/**/*.spec.js", "../*/dist/**/*"],
"exec": "yarn build && node --no-node-snapshot ./dist/index.js"
}

View file

@ -82,7 +82,7 @@
"koa": "2.13.4",
"koa-body": "4.2.0",
"koa-compress": "4.0.1",
"koa-send": "5.0.0",
"koa-send": "5.0.1",
"koa-useragent": "^4.1.0",
"koa2-ratelimit": "1.1.1",
"lodash": "4.17.21",
@ -121,6 +121,7 @@
"@types/jest": "29.5.5",
"@types/koa": "2.13.4",
"@types/koa__router": "8.0.8",
"@types/koa-send": "^4.1.6",
"@types/lodash": "4.14.200",
"@types/mssql": "9.1.4",
"@types/node-fetch": "2.6.4",

View file

@ -1,23 +1,17 @@
import {
DocumentType,
generateDatasourceID,
getQueryParams,
getTableParams,
} from "../../db/utils"
import { getQueryParams, getTableParams } from "../../db/utils"
import { getIntegration } from "../../integrations"
import { invalidateDynamicVariables } from "../../threads/utils"
import { context, db as dbCore, events } from "@budibase/backend-core"
import {
BuildSchemaFromSourceRequest,
BuildSchemaFromSourceResponse,
CreateDatasourceRequest,
CreateDatasourceResponse,
Datasource,
DatasourcePlus,
FetchDatasourceInfoRequest,
FetchDatasourceInfoResponse,
IntegrationBase,
Schema,
SourceName,
Table,
UpdateDatasourceResponse,
UserCtx,
VerifyDatasourceRequest,
@ -25,68 +19,8 @@ import {
} from "@budibase/types"
import sdk from "../../sdk"
import { builderSocket } from "../../websockets"
import { setupCreationAuth as googleSetupCreationAuth } from "../../integrations/googlesheets"
import { isEqual } from "lodash"
async function getConnector(
datasource: Datasource
): Promise<IntegrationBase | DatasourcePlus> {
const Connector = await getIntegration(datasource.source)
// can't enrich if it doesn't have an ID yet
if (datasource._id) {
datasource = await sdk.datasources.enrich(datasource)
}
// Connect to the DB and build the schema
return new Connector(datasource.config)
}
async function getAndMergeDatasource(datasource: Datasource) {
let existingDatasource: undefined | Datasource
if (datasource._id) {
existingDatasource = await sdk.datasources.get(datasource._id)
}
let enrichedDatasource = datasource
if (existingDatasource) {
enrichedDatasource = sdk.datasources.mergeConfigs(
datasource,
existingDatasource
)
}
return await sdk.datasources.enrich(enrichedDatasource)
}
async function buildSchemaHelper(datasource: Datasource): Promise<Schema> {
const connector = (await getConnector(datasource)) as DatasourcePlus
return await connector.buildSchema(
datasource._id!,
datasource.entities! as Record<string, Table>
)
}
async function buildFilteredSchema(
datasource: Datasource,
filter?: string[]
): Promise<Schema> {
let schema = await buildSchemaHelper(datasource)
if (!filter) {
return schema
}
let filteredSchema: Schema = { tables: {}, errors: {} }
for (let key in schema.tables) {
if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
filteredSchema.tables[key] = schema.tables[key]
}
}
for (let key in schema.errors) {
if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
filteredSchema.errors[key] = schema.errors[key]
}
}
return filteredSchema
}
export async function fetch(ctx: UserCtx) {
ctx.body = await sdk.datasources.fetch()
}
@ -95,8 +29,10 @@ export async function verify(
ctx: UserCtx<VerifyDatasourceRequest, VerifyDatasourceResponse>
) {
const { datasource } = ctx.request.body
const enrichedDatasource = await getAndMergeDatasource(datasource)
const connector = await getConnector(enrichedDatasource)
const enrichedDatasource = await sdk.datasources.getAndMergeDatasource(
datasource
)
const connector = await sdk.datasources.getConnector(enrichedDatasource)
if (!connector.testConnection) {
ctx.throw(400, "Connection information verification not supported")
}
@ -112,8 +48,12 @@ export async function information(
ctx: UserCtx<FetchDatasourceInfoRequest, FetchDatasourceInfoResponse>
) {
const { datasource } = ctx.request.body
const enrichedDatasource = await getAndMergeDatasource(datasource)
const connector = (await getConnector(enrichedDatasource)) as DatasourcePlus
const enrichedDatasource = await sdk.datasources.getAndMergeDatasource(
datasource
)
const connector = (await sdk.datasources.getConnector(
enrichedDatasource
)) as DatasourcePlus
if (!connector.getTableNames) {
ctx.throw(400, "Table name fetching not supported by datasource")
}
@ -123,19 +63,16 @@ export async function information(
}
}
export async function buildSchemaFromDb(ctx: UserCtx) {
const db = context.getAppDB()
export async function buildSchemaFromSource(
ctx: UserCtx<BuildSchemaFromSourceRequest, BuildSchemaFromSourceResponse>
) {
const datasourceId = ctx.params.datasourceId
const tablesFilter = ctx.request.body.tablesFilter
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
const { tables, errors } = await buildFilteredSchema(datasource, tablesFilter)
datasource.entities = tables
setDefaultDisplayColumns(datasource)
const dbResp = await db.put(
sdk.tables.populateExternalTableSchemas(datasource)
const { datasource, errors } = await sdk.datasources.buildSchemaFromSource(
datasourceId,
tablesFilter
)
datasource._rev = dbResp.rev
ctx.body = {
datasource: await sdk.datasources.removeSecretSingle(datasource),
@ -143,24 +80,6 @@ export async function buildSchemaFromDb(ctx: UserCtx) {
}
}
/**
* Make sure all datasource entities have a display name selected
*/
function setDefaultDisplayColumns(datasource: Datasource) {
//
for (let entity of Object.values(datasource.entities || {})) {
if (entity.primaryDisplay) {
continue
}
const notAutoColumn = Object.values(entity.schema).find(
schema => !schema.autocolumn
)
if (notAutoColumn) {
entity.primaryDisplay = notAutoColumn.name
}
}
}
/**
* Check for variables that have been updated or removed and invalidate them.
*/
@ -258,51 +177,18 @@ export async function update(ctx: UserCtx<any, UpdateDatasourceResponse>) {
}
}
const preSaveAction: Partial<Record<SourceName, any>> = {
[SourceName.GOOGLE_SHEETS]: async (datasource: Datasource) => {
await googleSetupCreationAuth(datasource.config as any)
},
}
export async function save(
ctx: UserCtx<CreateDatasourceRequest, CreateDatasourceResponse>
) {
const db = context.getAppDB()
const plus = ctx.request.body.datasource.plus
const fetchSchema = ctx.request.body.fetchSchema
const tablesFilter = ctx.request.body.tablesFilter
const datasource = {
_id: generateDatasourceID({ plus }),
...ctx.request.body.datasource,
type: plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE,
}
let errors: Record<string, string> = {}
if (fetchSchema) {
const schema = await buildFilteredSchema(datasource, tablesFilter)
datasource.entities = schema.tables
setDefaultDisplayColumns(datasource)
errors = schema.errors
}
if (preSaveAction[datasource.source]) {
await preSaveAction[datasource.source](datasource)
}
const dbResp = await db.put(
sdk.tables.populateExternalTableSchemas(datasource)
)
await events.datasource.created(datasource)
datasource._rev = dbResp.rev
// Drain connection pools when configuration is changed
if (datasource.source) {
const source = await getIntegration(datasource.source)
if (source && source.pool) {
await source.pool.end()
}
}
const {
datasource: datasourceData,
fetchSchema,
tablesFilter,
} = ctx.request.body
const { datasource, errors } = await sdk.datasources.save(datasourceData, {
fetchSchema,
tablesFilter,
})
ctx.body = {
datasource: await sdk.datasources.removeSecretSingle(datasource),
@ -384,8 +270,10 @@ export async function query(ctx: UserCtx) {
export async function getExternalSchema(ctx: UserCtx) {
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
const enrichedDatasource = await getAndMergeDatasource(datasource)
const connector = await getConnector(enrichedDatasource)
const enrichedDatasource = await sdk.datasources.getAndMergeDatasource(
datasource
)
const connector = await sdk.datasources.getConnector(enrichedDatasource)
if (!connector.getExternalSchema) {
ctx.throw(400, "Datasource does not support exporting external schema")

View file

@ -25,8 +25,12 @@ import fs from "fs"
import sdk from "../../../sdk"
import * as pro from "@budibase/pro"
import { App, Ctx, ProcessAttachmentResponse } from "@budibase/types"
import {
getAppMigrationVersion,
getLatestMigrationId,
} from "../../../appMigrations"
const send = require("koa-send")
import send from "koa-send"
export const toggleBetaUiFeature = async function (ctx: Ctx) {
const cookieName = `beta:${ctx.params.feature}`
@ -125,7 +129,26 @@ export const deleteObjects = async function (ctx: Ctx) {
)
}
const requiresMigration = async (ctx: Ctx) => {
const appId = context.getAppId()
if (!appId) {
ctx.throw("AppId could not be found")
}
const latestMigration = getLatestMigrationId()
if (!latestMigration) {
return false
}
const latestMigrationApplied = await getAppMigrationVersion(appId)
const requiresMigrations = latestMigrationApplied !== latestMigration
return requiresMigrations
}
export const serveApp = async function (ctx: Ctx) {
const needMigrations = await requiresMigration(ctx)
const bbHeaderEmbed =
ctx.request.get("x-budibase-embed")?.toLowerCase() === "true"
@ -145,8 +168,8 @@ export const serveApp = async function (ctx: Ctx) {
let appId = context.getAppId()
if (!env.isJest()) {
const App = require("./templates/BudibaseApp.svelte").default
const plugins = objectStore.enrichPluginURLs(appInfo.usedPlugins)
const App = require("./templates/BudibaseApp.svelte").default
const { head, html, css } = App.render({
metaImage:
branding?.metaImageUrl ||
@ -167,6 +190,7 @@ export const serveApp = async function (ctx: Ctx) {
config?.logoUrl !== ""
? objectStore.getGlobalFileUrl("settings", "logoUrl")
: "",
appMigrating: needMigrations,
})
const appHbs = loadHandlebarsFile(appHbsPath)
ctx.body = await processString(appHbs, {
@ -273,7 +297,6 @@ export const getSignedUploadURL = async function (ctx: Ctx) {
const { bucket, key } = ctx.request.body || {}
if (!bucket || !key) {
ctx.throw(400, "bucket and key values are required")
return
}
try {
const s3 = new AWS.S3({

View file

@ -8,6 +8,7 @@
export let clientLibPath
export let usedPlugins
export let appMigrating
</script>
<svelte:head>
@ -110,6 +111,11 @@
<script type="application/javascript">
window.INIT_TIME = Date.now()
</script>
{#if appMigrating}
<script type="application/javascript">
window.MIGRATING_APP = true
</script>
{/if}
<script type="application/javascript" src={clientLibPath}>
</script>
<!-- Custom components need inserted after the core client library -->

View file

@ -63,7 +63,6 @@
// Extract data from message
const {
selectedComponentId,
hoverComponentId,
layout,
screen,
appId,
@ -82,7 +81,6 @@
window["##BUDIBASE_PREVIEW_LAYOUT##"] = layout
window["##BUDIBASE_PREVIEW_SCREEN##"] = screen
window["##BUDIBASE_SELECTED_COMPONENT_ID##"] = selectedComponentId
window["##BUDIBASE_HOVER_COMPONENT_ID##"] = hoverComponentId
window["##BUDIBASE_PREVIEW_ID##"] = Math.random()
window["##BUDIBASE_PREVIEW_THEME##"] = theme
window["##BUDIBASE_PREVIEW_CUSTOM_THEME##"] = customTheme

View file

@ -53,7 +53,7 @@ router
.post(
"/api/datasources/:datasourceId/schema",
authorized(permissions.BUILDER),
datasourceController.buildSchemaFromDb
datasourceController.buildSchemaFromSource
)
.post(
"/api/datasources",

View file

@ -17,7 +17,7 @@ export const getLatestMigrationId = () =>
.sort()
.reverse()[0]
const getTimestamp = (versionId: string) => versionId?.split("_")[0]
const getTimestamp = (versionId: string) => versionId?.split("_")[0] || ""
export async function checkMissingMigrations(
ctx: UserCtx,

View file

@ -103,8 +103,7 @@ function typeCoercion(filters: SearchFilters, table: Table) {
return filters
}
for (let key of Object.keys(filters)) {
// @ts-ignore
const searchParam = filters[key]
const searchParam = filters[key as keyof SearchFilters]
if (typeof searchParam === "object") {
for (let [property, value] of Object.entries(searchParam)) {
// We need to strip numerical prefixes here, so that we can look up
@ -117,7 +116,13 @@ function typeCoercion(filters: SearchFilters, table: Table) {
continue
}
if (column.type === FieldTypes.NUMBER) {
searchParam[property] = parseFloat(value)
if (key === "oneOf") {
searchParam[property] = value
.split(",")
.map(item => parseFloat(item))
} else {
searchParam[property] = parseFloat(value)
}
}
}
}

View file

@ -3,5 +3,9 @@ import apm from "dd-trace"
// enable APM if configured
if (process.env.DD_APM_ENABLED) {
console.log("Starting dd-trace")
apm.init()
apm.init({
// @ts-ignore for some reason dd-trace types don't include this options,
// even though it's spoken about in the docs.
debug: process.env.DD_ENV === "qa",
})
}

View file

@ -1118,4 +1118,76 @@ describe("postgres integrations", () => {
})
})
})
describe("Integration compatibility with postgres search_path", () => {
let client: Client, pathDatasource: Datasource
const schema1 = "test1",
schema2 = "test-2"
beforeAll(async () => {
const dsConfig = await databaseTestProviders.postgres.getDsConfig()
const dbConfig = dsConfig.config!
client = new Client(dbConfig)
await client.connect()
await client.query(`CREATE SCHEMA "${schema1}";`)
await client.query(`CREATE SCHEMA "${schema2}";`)
const pathConfig: any = {
...dsConfig,
config: {
...dbConfig,
schema: `${schema1}, ${schema2}`,
},
}
pathDatasource = await config.api.datasource.create(pathConfig)
})
afterAll(async () => {
await client.query(`DROP SCHEMA "${schema1}" CASCADE;`)
await client.query(`DROP SCHEMA "${schema2}" CASCADE;`)
await client.end()
})
it("discovers tables from any schema in search path", async () => {
await client.query(
`CREATE TABLE "${schema1}".table1 (id1 SERIAL PRIMARY KEY);`
)
await client.query(
`CREATE TABLE "${schema2}".table2 (id2 SERIAL PRIMARY KEY);`
)
const response = await makeRequest("post", "/api/datasources/info", {
datasource: pathDatasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
expect(response.body.tableNames).toEqual(
expect.arrayContaining(["table1", "table2"])
)
})
it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name"
await client.query(
`CREATE TABLE "${schema1}".${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
)
await client.query(
`CREATE TABLE "${schema2}".${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
)
const response = await makeRequest(
"post",
`/api/datasources/${pathDatasource._id}/schema`,
{
tablesFilter: [repeated_table_name],
}
)
expect(response.status).toBe(200)
expect(
response.body.datasource.entities[repeated_table_name].schema
).toBeDefined()
const schema =
response.body.datasource.entities[repeated_table_name].schema
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
})
})
})

View file

@ -149,8 +149,6 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
private index: number = 1
private open: boolean
COLUMNS_SQL!: string
PRIMARY_KEYS_SQL = () => `
SELECT pg_namespace.nspname table_schema
, pg_class.relname table_name
@ -159,7 +157,8 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
JOIN pg_index ON pg_class.oid = pg_index.indrelid AND pg_index.indisprimary
JOIN pg_attribute ON pg_attribute.attrelid = pg_class.oid AND pg_attribute.attnum = ANY(pg_index.indkey)
JOIN pg_namespace ON pg_namespace.oid = pg_class.relnamespace
WHERE pg_namespace.nspname = '${this.config.schema}';
WHERE pg_namespace.nspname = ANY(current_schemas(false))
AND pg_table_is_visible(pg_class.oid);
`
ENUM_VALUES = () => `
@ -170,6 +169,11 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace;
`
COLUMNS_SQL = () => `
select * from information_schema.columns where table_schema = ANY(current_schemas(false))
AND pg_table_is_visible(to_regclass(format('%I.%I', table_schema, table_name)));
`
constructor(config: PostgresConfig) {
super(SqlClient.POSTGRES)
this.config = config
@ -219,8 +223,10 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
if (!this.config.schema) {
this.config.schema = "public"
}
await this.client.query(`SET search_path TO "${this.config.schema}"`)
this.COLUMNS_SQL = `select * from information_schema.columns where table_schema = '${this.config.schema}'`
const search_path = this.config.schema
.split(",")
.map(item => `"${item.trim()}"`)
await this.client.query(`SET search_path TO ${search_path.join(",")};`)
this.open = true
}
@ -307,7 +313,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
try {
const columnsResponse: { rows: PostgresColumn[] } =
await this.client.query(this.COLUMNS_SQL)
await this.client.query(this.COLUMNS_SQL())
const tables: { [key: string]: Table } = {}
@ -362,8 +368,8 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
})
}
let finalizedTables = finaliseExternalTables(tables, entities)
let errors = checkExternalTables(finalizedTables)
const finalizedTables = finaliseExternalTables(tables, entities)
const errors = checkExternalTables(finalizedTables)
return { tables: finalizedTables, errors }
} catch (err) {
// @ts-ignore
@ -377,7 +383,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
try {
await this.openConnection()
const columnsResponse: { rows: PostgresColumn[] } =
await this.client.query(this.COLUMNS_SQL)
await this.client.query(this.COLUMNS_SQL())
const names = columnsResponse.rows.map(row => row.table_name)
return [...new Set(names)]
} finally {

View file

@ -12,11 +12,19 @@ export function init() {
const perRequestLimit = env.JS_PER_REQUEST_TIME_LIMIT_MS
let track: TrackerFn = f => f()
if (perRequestLimit) {
const bbCtx = context.getCurrentContext()
const bbCtx = tracer.trace("runJS.getCurrentContext", {}, span =>
context.getCurrentContext()
)
if (bbCtx) {
if (!bbCtx.jsExecutionTracker) {
bbCtx.jsExecutionTracker =
timers.ExecutionTimeTracker.withLimit(perRequestLimit)
span?.addTags({
createdExecutionTracker: true,
})
bbCtx.jsExecutionTracker = tracer.trace(
"runJS.createExecutionTimeTracker",
{},
span => timers.ExecutionTimeTracker.withLimit(perRequestLimit)
)
}
span?.addTags({
js: {
@ -26,8 +34,12 @@ export function init() {
})
// We call checkLimit() here to prevent paying the cost of creating
// a new VM context below when we don't need to.
bbCtx.jsExecutionTracker.checkLimit()
track = bbCtx.jsExecutionTracker.track.bind(bbCtx.jsExecutionTracker)
tracer.trace("runJS.checkLimitAndBind", {}, span => {
bbCtx.jsExecutionTracker!.checkLimit()
track = bbCtx.jsExecutionTracker!.track.bind(
bbCtx.jsExecutionTracker
)
})
}
}

View file

@ -1,4 +1,4 @@
import { context, db as dbCore } from "@budibase/backend-core"
import { context, db as dbCore, events } from "@budibase/backend-core"
import { findHBSBlocks, processObjectSync } from "@budibase/string-templates"
import {
Datasource,
@ -14,16 +14,22 @@ import {
} from "@budibase/types"
import { cloneDeep } from "lodash/fp"
import { getEnvironmentVariables } from "../../utils"
import { getDefinitions, getDefinition } from "../../../integrations"
import {
getDefinitions,
getDefinition,
getIntegration,
} from "../../../integrations"
import merge from "lodash/merge"
import {
BudibaseInternalDB,
generateDatasourceID,
getDatasourceParams,
getDatasourcePlusParams,
getTableParams,
DocumentType,
} from "../../../db/utils"
import sdk from "../../index"
import datasource from "../../../api/routes/datasource"
import { setupCreationAuth as googleSetupCreationAuth } from "../../../integrations/googlesheets"
const ENV_VAR_PREFIX = "env."
@ -273,3 +279,75 @@ export async function getExternalDatasources(): Promise<Datasource[]> {
return externalDatasources.rows.map(r => r.doc!)
}
export async function save(
datasource: Datasource,
opts?: { fetchSchema?: boolean; tablesFilter?: string[] }
): Promise<{ datasource: Datasource; errors: Record<string, string> }> {
const db = context.getAppDB()
const plus = datasource.plus
const fetchSchema = opts?.fetchSchema || false
const tablesFilter = opts?.tablesFilter || []
datasource = {
_id: generateDatasourceID({ plus }),
...datasource,
type: plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE,
}
let errors: Record<string, string> = {}
if (fetchSchema) {
const schema = await sdk.datasources.buildFilteredSchema(
datasource,
tablesFilter
)
datasource.entities = schema.tables
setDefaultDisplayColumns(datasource)
errors = schema.errors
}
if (preSaveAction[datasource.source]) {
await preSaveAction[datasource.source](datasource)
}
const dbResp = await db.put(
sdk.tables.populateExternalTableSchemas(datasource)
)
await events.datasource.created(datasource)
datasource._rev = dbResp.rev
// Drain connection pools when configuration is changed
if (datasource.source) {
const source = await getIntegration(datasource.source)
if (source && source.pool) {
await source.pool.end()
}
}
return { datasource, errors }
}
const preSaveAction: Partial<Record<SourceName, any>> = {
[SourceName.GOOGLE_SHEETS]: async (datasource: Datasource) => {
await googleSetupCreationAuth(datasource.config as any)
},
}
/**
* Make sure all datasource entities have a display name selected
*/
export function setDefaultDisplayColumns(datasource: Datasource) {
//
for (let entity of Object.values(datasource.entities || {})) {
if (entity.primaryDisplay) {
continue
}
const notAutoColumn = Object.values(entity.schema).find(
schema => !schema.autocolumn
)
if (notAutoColumn) {
entity.primaryDisplay = notAutoColumn.name
}
}
}

View file

@ -1,5 +1,7 @@
import * as datasources from "./datasources"
import * as plus from "./plus"
export default {
...datasources,
...plus,
}

View file

@ -0,0 +1,85 @@
import {
Datasource,
DatasourcePlus,
IntegrationBase,
Schema,
} from "@budibase/types"
import * as datasources from "./datasources"
import tableSdk from "../tables"
import { getIntegration } from "../../../integrations"
import { context } from "@budibase/backend-core"
export async function buildFilteredSchema(
datasource: Datasource,
filter?: string[]
): Promise<Schema> {
const schema = await buildSchemaHelper(datasource)
if (!filter) {
return schema
}
let filteredSchema: Schema = { tables: {}, errors: {} }
for (let key in schema.tables) {
if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
filteredSchema.tables[key] = schema.tables[key]
}
}
for (let key in schema.errors) {
if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
filteredSchema.errors[key] = schema.errors[key]
}
}
return filteredSchema
}
async function buildSchemaHelper(datasource: Datasource): Promise<Schema> {
const connector = (await getConnector(datasource)) as DatasourcePlus
const externalSchema = await connector.buildSchema(
datasource._id!,
datasource.entities!
)
return externalSchema
}
export async function getConnector(
datasource: Datasource
): Promise<IntegrationBase | DatasourcePlus> {
const Connector = await getIntegration(datasource.source)
// can't enrich if it doesn't have an ID yet
if (datasource._id) {
datasource = await datasources.enrich(datasource)
}
// Connect to the DB and build the schema
return new Connector(datasource.config)
}
export async function getAndMergeDatasource(datasource: Datasource) {
if (datasource._id) {
const existingDatasource = await datasources.get(datasource._id)
datasource = datasources.mergeConfigs(datasource, existingDatasource)
}
return await datasources.enrich(datasource)
}
export async function buildSchemaFromSource(
datasourceId: string,
tablesFilter?: string[]
) {
const db = context.getAppDB()
const datasource = await datasources.get(datasourceId)
const { tables, errors } = await buildFilteredSchema(datasource, tablesFilter)
datasource.entities = tables
datasources.setDefaultDisplayColumns(datasource)
const dbResp = await db.put(tableSdk.populateExternalTableSchemas(datasource))
datasource._rev = dbResp.rev
return {
datasource,
errors,
}
}

View file

@ -143,100 +143,104 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => {
oneOf: {},
containsAny: {},
}
if (Array.isArray(filter)) {
filter.forEach(expression => {
let { operator, field, type, value, externalType, onEmptyFilter } =
expression
const isHbs =
typeof value === "string" && (value.match(HBS_REGEX) || []).length > 0
// Parse all values into correct types
if (operator === "allOr") {
query.allOr = true
if (!Array.isArray(filter)) {
return query
}
filter.forEach(expression => {
let { operator, field, type, value, externalType, onEmptyFilter } =
expression
const isHbs =
typeof value === "string" && (value.match(HBS_REGEX) || []).length > 0
// Parse all values into correct types
if (operator === "allOr") {
query.allOr = true
return
}
if (onEmptyFilter) {
query.onEmptyFilter = onEmptyFilter
return
}
if (
type === "datetime" &&
!isHbs &&
operator !== "empty" &&
operator !== "notEmpty"
) {
// Ensure date value is a valid date and parse into correct format
if (!value) {
return
}
if (onEmptyFilter) {
query.onEmptyFilter = onEmptyFilter
try {
value = new Date(value).toISOString()
} catch (error) {
return
}
if (
type === "datetime" &&
!isHbs &&
operator !== "empty" &&
operator !== "notEmpty"
}
if (type === "number" && typeof value === "string" && !isHbs) {
if (operator === "oneOf") {
value = value.split(",").map(item => parseFloat(item))
} else {
value = parseFloat(value)
}
}
if (type === "boolean") {
value = `${value}`?.toLowerCase() === "true"
}
if (
["contains", "notContains", "containsAny"].includes(operator) &&
type === "array" &&
typeof value === "string"
) {
value = value.split(",")
}
if (operator.startsWith("range") && query.range) {
const minint =
SqlNumberTypeRangeMap[
externalType as keyof typeof SqlNumberTypeRangeMap
]?.min || Number.MIN_SAFE_INTEGER
const maxint =
SqlNumberTypeRangeMap[
externalType as keyof typeof SqlNumberTypeRangeMap
]?.max || Number.MAX_SAFE_INTEGER
if (!query.range[field]) {
query.range[field] = {
low: type === "number" ? minint : "0000-00-00T00:00:00.000Z",
high: type === "number" ? maxint : "9999-00-00T00:00:00.000Z",
}
}
if ((operator as any) === "rangeLow" && value != null && value !== "") {
query.range[field].low = value
} else if (
(operator as any) === "rangeHigh" &&
value != null &&
value !== ""
) {
// Ensure date value is a valid date and parse into correct format
if (!value) {
return
}
try {
value = new Date(value).toISOString()
} catch (error) {
return
}
}
if (type === "number" && typeof value === "string") {
if (operator === "oneOf") {
value = value.split(",").map(item => parseFloat(item))
} else if (!isHbs) {
value = parseFloat(value)
}
query.range[field].high = value
}
} else if (query[operator] && operator !== "onEmptyFilter") {
if (type === "boolean") {
value = `${value}`?.toLowerCase() === "true"
}
if (
["contains", "notContains", "containsAny"].includes(operator) &&
type === "array" &&
typeof value === "string"
) {
value = value.split(",")
}
if (operator.startsWith("range") && query.range) {
const minint =
SqlNumberTypeRangeMap[
externalType as keyof typeof SqlNumberTypeRangeMap
]?.min || Number.MIN_SAFE_INTEGER
const maxint =
SqlNumberTypeRangeMap[
externalType as keyof typeof SqlNumberTypeRangeMap
]?.max || Number.MAX_SAFE_INTEGER
if (!query.range[field]) {
query.range[field] = {
low: type === "number" ? minint : "0000-00-00T00:00:00.000Z",
high: type === "number" ? maxint : "9999-00-00T00:00:00.000Z",
}
}
if ((operator as any) === "rangeLow" && value != null && value !== "") {
query.range[field].low = value
} else if (
(operator as any) === "rangeHigh" &&
value != null &&
value !== ""
) {
query.range[field].high = value
}
} else if (query[operator] && operator !== "onEmptyFilter") {
if (type === "boolean") {
// Transform boolean filters to cope with null.
// "equals false" needs to be "not equals true"
// "not equals false" needs to be "equals true"
if (operator === "equal" && value === false) {
query.notEqual = query.notEqual || {}
query.notEqual[field] = true
} else if (operator === "notEqual" && value === false) {
query.equal = query.equal || {}
query.equal[field] = true
} else {
query[operator] = query[operator] || {}
query[operator]![field] = value
}
// Transform boolean filters to cope with null.
// "equals false" needs to be "not equals true"
// "not equals false" needs to be "equals true"
if (operator === "equal" && value === false) {
query.notEqual = query.notEqual || {}
query.notEqual[field] = true
} else if (operator === "notEqual" && value === false) {
query.equal = query.equal || {}
query.equal[field] = true
} else {
query[operator] = query[operator] || {}
query[operator]![field] = value
}
} else {
query[operator] = query[operator] || {}
query[operator]![field] = value
}
})
}
}
})
return query
}

View file

@ -1,6 +1,11 @@
import { SearchQuery, SearchQueryOperators } from "@budibase/types"
import { runLuceneQuery } from "../filters"
import { expect, describe, it } from "vitest"
import {
SearchQuery,
SearchQueryOperators,
FieldType,
SearchFilter,
} from "@budibase/types"
import { buildLuceneQuery, runLuceneQuery } from "../filters"
import { expect, describe, it, test } from "vitest"
describe("runLuceneQuery", () => {
const docs = [
@ -167,4 +172,186 @@ describe("runLuceneQuery", () => {
})
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2, 3])
})
test.each([[523, 259], "523,259"])(
"should return rows with matches on numeric oneOf filter",
input => {
let query = buildQuery("oneOf", {
customer_id: input,
})
expect(runLuceneQuery(docs, query).map(row => row.customer_id)).toEqual([
259, 523,
])
}
)
})
describe("buildLuceneQuery", () => {
it("should return a basic search query template if the input is not an array", () => {
const filter: any = "NOT_AN_ARRAY"
expect(buildLuceneQuery(filter)).toEqual({
string: {},
fuzzy: {},
range: {},
equal: {},
notEqual: {},
empty: {},
notEmpty: {},
contains: {},
notContains: {},
oneOf: {},
containsAny: {},
})
})
it("should parseFloat if the type is a number, but the value is a numeric string", () => {
const filter: SearchFilter[] = [
{
operator: SearchQueryOperators.EQUAL,
field: "customer_id",
type: FieldType.NUMBER,
value: "1212",
},
{
operator: SearchQueryOperators.ONE_OF,
field: "customer_id",
type: FieldType.NUMBER,
value: "1000,1212,3400",
},
]
expect(buildLuceneQuery(filter)).toEqual({
string: {},
fuzzy: {},
range: {},
equal: {
customer_id: 1212,
},
notEqual: {},
empty: {},
notEmpty: {},
contains: {},
notContains: {},
oneOf: {
customer_id: [1000, 1212, 3400],
},
containsAny: {},
})
})
it("should not parseFloat if the type is a number, but the value is a handlebars binding string", () => {
const filter: SearchFilter[] = [
{
operator: SearchQueryOperators.EQUAL,
field: "customer_id",
type: FieldType.NUMBER,
value: "{{ customer_id }}",
},
{
operator: SearchQueryOperators.ONE_OF,
field: "customer_id",
type: FieldType.NUMBER,
value: "{{ list_of_customer_ids }}",
},
]
expect(buildLuceneQuery(filter)).toEqual({
string: {},
fuzzy: {},
range: {},
equal: {
customer_id: "{{ customer_id }}",
},
notEqual: {},
empty: {},
notEmpty: {},
contains: {},
notContains: {},
oneOf: {
customer_id: "{{ list_of_customer_ids }}",
},
containsAny: {},
})
})
it("should cast string to boolean if the type is boolean", () => {
const filter: SearchFilter[] = [
{
operator: SearchQueryOperators.EQUAL,
field: "a",
type: FieldType.BOOLEAN,
value: "not_true",
},
{
operator: SearchQueryOperators.NOT_EQUAL,
field: "b",
type: FieldType.BOOLEAN,
value: "not_true",
},
{
operator: SearchQueryOperators.EQUAL,
field: "c",
type: FieldType.BOOLEAN,
value: "true",
},
]
expect(buildLuceneQuery(filter)).toEqual({
string: {},
fuzzy: {},
range: {},
equal: {
b: true,
c: true,
},
notEqual: {
a: true,
},
empty: {},
notEmpty: {},
contains: {},
notContains: {},
oneOf: {},
containsAny: {},
})
})
it("should split the string for contains operators", () => {
const filter: SearchFilter[] = [
{
operator: SearchQueryOperators.CONTAINS,
field: "description",
type: FieldType.ARRAY,
value: "Large box,Heavy box,Small box",
},
{
operator: SearchQueryOperators.NOT_CONTAINS,
field: "description",
type: FieldType.ARRAY,
value: "Large box,Heavy box,Small box",
},
{
operator: SearchQueryOperators.CONTAINS_ANY,
field: "description",
type: FieldType.ARRAY,
value: "Large box,Heavy box,Small box",
},
]
expect(buildLuceneQuery(filter)).toEqual({
string: {},
fuzzy: {},
range: {},
equal: {},
notEqual: {},
empty: {},
notEmpty: {},
contains: {
description: ["Large box", "Heavy box", "Small box"],
},
notContains: {
description: ["Large box", "Heavy box", "Small box"],
},
oneOf: {},
containsAny: {
description: ["Large box", "Heavy box", "Small box"],
},
})
})
})

View file

@ -25,10 +25,10 @@
"manifest": "node ./scripts/gen-collection-info.js"
},
"dependencies": {
"@budibase/handlebars-helpers": "^0.11.11",
"@budibase/handlebars-helpers": "^0.12.0",
"dayjs": "^1.10.8",
"handlebars": "^4.7.6",
"lodash": "4.17.21",
"lodash.clonedeep": "^4.5.0",
"vm2": "^3.9.19"
},
"devDependencies": {

View file

@ -1,5 +1,5 @@
const { atob } = require("../utilities")
const { cloneDeep } = require("lodash/fp")
const cloneDeep = require("lodash.clonedeep")
const { LITERAL_MARKER } = require("../helpers/constants")
const { getHelperList } = require("./list")

View file

@ -35,3 +35,12 @@ export interface FetchDatasourceInfoResponse {
export interface UpdateDatasourceRequest extends Datasource {
datasource: Datasource
}
export interface BuildSchemaFromSourceRequest {
tablesFilter?: string[]
}
export interface BuildSchemaFromSourceResponse {
datasource: Datasource
errors: Record<string, string>
}

View file

@ -121,7 +121,6 @@ export interface Database {
name: string
exists(): Promise<boolean>
checkSetup(): Promise<Nano.DocumentScope<any>>
get<T extends Document>(id?: string): Promise<T>
getMultiple<T extends Document>(
ids: string[],

View file

@ -24,6 +24,8 @@
},
"devDependencies": {
"@budibase/types": "^2.3.17",
"@swc/core": "1.3.71",
"@swc/jest": "0.2.27",
"@trendyol/jest-testcontainers": "2.1.1",
"@types/jest": "29.5.3",
"@types/node-fetch": "2.6.4",
@ -32,8 +34,6 @@
"jest": "29.7.0",
"prettier": "2.7.1",
"start-server-and-test": "1.14.0",
"@swc/core": "1.3.71",
"@swc/jest": "0.2.27",
"timekeeper": "2.2.0",
"ts-jest": "29.1.1",
"ts-node": "10.8.1",
@ -43,6 +43,7 @@
"dependencies": {
"@budibase/backend-core": "^2.3.17",
"form-data": "^4.0.0",
"node-fetch": "2.6.7"
"node-fetch": "2.6.7",
"stripe": "^14.11.0"
}
}

View file

@ -39,10 +39,10 @@ describe("license management", () => {
let premiumPriceId = null
let businessPriceId = ""
for (const plan of planBody) {
if (plan.type === PlanType.PREMIUM) {
if (plan.type === PlanType.PREMIUM_PLUS) {
premiumPriceId = plan.prices[0].priceId
}
if (plan.type === PlanType.BUSINESS) {
if (plan.type === PlanType.ENTERPRISE_BASIC) {
businessPriceId = plan.prices[0].priceId
}
}
@ -97,7 +97,7 @@ describe("license management", () => {
await config.loginAsAccount(createAccountRequest)
await config.api.stripe.linkStripeCustomer(account.accountId, customer.id)
const [_, selfBodyPremium] = await config.api.accounts.self()
expect(selfBodyPremium.license.plan.type).toBe(PlanType.PREMIUM)
expect(selfBodyPremium.license.plan.type).toBe(PlanType.PREMIUM_PLUS)
// Create portal session - Check URL
const [portalRes, portalSessionBody] =
@ -109,7 +109,7 @@ describe("license management", () => {
// License updated to Business
const [selfRes, selfBodyBusiness] = await config.api.accounts.self()
expect(selfBodyBusiness.license.plan.type).toBe(PlanType.BUSINESS)
expect(selfBodyBusiness.license.plan.type).toBe(PlanType.ENTERPRISE_BASIC)
})
})
})

View file

@ -951,6 +951,13 @@
resolved "https://registry.npmjs.org/@types/node/-/node-18.15.1.tgz"
integrity sha512-U2TWca8AeHSmbpi314QBESRk7oPjSZjDsR+c+H4ECC1l+kFgpZf8Ydhv3SJpPy51VyZHHqxlb6mTTqYNNRVAIw==
"@types/node@>=8.1.0":
version "20.11.0"
resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.0.tgz#8e0b99e70c0c1ade1a86c4a282f7b7ef87c9552f"
integrity sha512-o9bjXmDNcF7GbM4CNQpmi+TutCgap/K3w1JyKgxAjqx41zp9qlIAVFi0IhCNsJcXolEqLWhbFbEeL0PvYm4pcQ==
dependencies:
undici-types "~5.26.4"
"@types/stack-utils@^2.0.0":
version "2.0.1"
resolved "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.1.tgz"
@ -4549,6 +4556,14 @@ strip-json-comments@^3.1.1:
resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz"
integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
stripe@^14.11.0:
version "14.11.0"
resolved "https://registry.yarnpkg.com/stripe/-/stripe-14.11.0.tgz#1df63c31bcff3b136457c2b7584f917509e8030c"
integrity sha512-NmFEkDC0PldP7CQtdPgKs5dVZA/pF+IepldbmB+Kk9B4d7EBkWnbANp0y+/zJcbRGul48s8hmQzeqNWUlWW0wg==
dependencies:
"@types/node" ">=8.1.0"
qs "^6.11.0"
supports-color@^5.3.0:
version "5.5.0"
resolved "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz"
@ -4807,6 +4822,11 @@ uid2@0.0.x:
resolved "https://registry.npmjs.org/uid2/-/uid2-0.0.4.tgz"
integrity sha512-IevTus0SbGwQzYh3+fRsAMTVVPOoIVufzacXcHPmdlle1jUpq7BRL+mw3dgeLanvGZdwwbWhRV6XrcFNdBmjWA==
undici-types@~5.26.4:
version "5.26.5"
resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617"
integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==
universalify@^0.2.0:
version "0.2.0"
resolved "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz"

1221
yarn.lock

File diff suppressed because it is too large Load diff