1
0
Fork 0
mirror of synced 2024-06-30 20:10:54 +12:00

Merge branch 'develop' of github.com:Budibase/budibase into fix/airgap-templates-call

This commit is contained in:
mike12345567 2023-06-19 18:57:17 +01:00
commit 3955b00bbc
93 changed files with 2286 additions and 885 deletions

View file

@ -1,5 +1,5 @@
{
"version": "2.7.17",
"version": "2.7.25-alpha.6",
"npmClient": "yarn",
"packages": [
"packages/backend-core",

View file

@ -2,13 +2,13 @@
"name": "root",
"private": true,
"devDependencies": {
"@esbuild-plugins/node-resolve": "^0.2.2",
"@esbuild-plugins/tsconfig-paths": "^0.1.2",
"@nx/js": "16.2.1",
"@rollup/plugin-json": "^4.0.2",
"@typescript-eslint/parser": "5.45.0",
"babel-eslint": "^10.0.3",
"esbuild": "^0.17.18",
"esbuild-node-externals": "^1.7.0",
"eslint": "^7.28.0",
"eslint-plugin-cypress": "^2.11.3",
"eslint-plugin-svelte3": "^3.2.0",
@ -48,9 +48,9 @@
"kill-builder": "kill-port 3000",
"kill-server": "kill-port 4001 4002",
"kill-all": "yarn run kill-builder && yarn run kill-server",
"dev": "yarn run kill-all && lerna run --stream --parallel dev:builder --stream",
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream --parallel dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
"dev:server": "yarn run kill-server && lerna run --stream --parallel dev:builder --scope @budibase/worker --scope @budibase/server",
"dev": "yarn run kill-all && lerna run --stream --parallel dev:builder --stream",
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream --parallel dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
"dev:server": "yarn run kill-server && yarn build --projects=@budibase/client && lerna run --stream --parallel dev:builder --scope @budibase/worker --scope @budibase/server",
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream --parallel dev:built",
"dev:docker": "yarn build:docker:pre && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
"test": "lerna run --stream test --stream",

View file

@ -31,4 +31,6 @@ const config: Config.InitialOptions = {
coverageReporters: ["lcov", "json", "clover"],
}
process.env.DISABLE_PINO_LOGGER = "1"
export default config

View file

@ -27,7 +27,7 @@
"@techpass/passport-openidconnect": "0.3.2",
"aws-cloudfront-sign": "2.2.0",
"aws-sdk": "2.1030.0",
"bcrypt": "5.0.1",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3",
"bull": "4.10.1",
"correlation-id": "4.0.0",

View file

@ -1,10 +1,11 @@
import * as google from "../sso/google"
import { Cookie } from "../../../constants"
import { clearCookie, getCookie } from "../../../utils"
import { doWithDB } from "../../../db"
import * as configs from "../../../configs"
import { BBContext, Database, SSOProfile } from "@budibase/types"
import * as cache from "../../../cache"
import * as utils from "../../../utils"
import { UserCtx, SSOProfile } from "@budibase/types"
import { ssoSaveUserNoOp } from "../sso/sso"
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
type Passport = {
@ -22,7 +23,7 @@ async function fetchGoogleCreds() {
export async function preAuth(
passport: Passport,
ctx: BBContext,
ctx: UserCtx,
next: Function
) {
// get the relevant config
@ -36,8 +37,8 @@ export async function preAuth(
ssoSaveUserNoOp
)
if (!ctx.query.appId || !ctx.query.datasourceId) {
ctx.throw(400, "appId and datasourceId query params not present.")
if (!ctx.query.appId) {
ctx.throw(400, "appId query param not present.")
}
return passport.authenticate(strategy, {
@ -49,7 +50,7 @@ export async function preAuth(
export async function postAuth(
passport: Passport,
ctx: BBContext,
ctx: UserCtx,
next: Function
) {
// get the relevant config
@ -57,7 +58,7 @@ export async function postAuth(
const platformUrl = await configs.getPlatformUrl({ tenantAware: false })
let callbackUrl = `${platformUrl}/api/global/auth/datasource/google/callback`
const authStateCookie = getCookie(ctx, Cookie.DatasourceAuth)
const authStateCookie = utils.getCookie(ctx, Cookie.DatasourceAuth)
return passport.authenticate(
new GoogleStrategy(
@ -69,33 +70,26 @@ export async function postAuth(
(
accessToken: string,
refreshToken: string,
profile: SSOProfile,
_profile: SSOProfile,
done: Function
) => {
clearCookie(ctx, Cookie.DatasourceAuth)
utils.clearCookie(ctx, Cookie.DatasourceAuth)
done(null, { accessToken, refreshToken })
}
),
{ successRedirect: "/", failureRedirect: "/error" },
async (err: any, tokens: string[]) => {
const baseUrl = `/builder/app/${authStateCookie.appId}/data`
// update the DB for the datasource with all the user info
await doWithDB(authStateCookie.appId, async (db: Database) => {
let datasource
try {
datasource = await db.get(authStateCookie.datasourceId)
} catch (err: any) {
if (err.status === 404) {
ctx.redirect(baseUrl)
}
const id = utils.newid()
await cache.store(
`datasource:creation:${authStateCookie.appId}:google:${id}`,
{
tokens,
}
if (!datasource.config) {
datasource.config = {}
}
datasource.config.auth = { type: "google", ...tokens }
await db.put(datasource)
ctx.redirect(`${baseUrl}/datasource/${authStateCookie.datasourceId}`)
})
)
ctx.redirect(`${baseUrl}/new?continue_google_setup=${id}`)
}
)(ctx, next)
}

View file

@ -1,12 +1,17 @@
import crypto from "crypto"
import fs from "fs"
import zlib from "zlib"
import env from "../environment"
import { join } from "path"
const ALGO = "aes-256-ctr"
const SEPARATOR = "-"
const ITERATIONS = 10000
const RANDOM_BYTES = 16
const STRETCH_LENGTH = 32
const SALT_LENGTH = 16
const IV_LENGTH = 16
export enum SecretOption {
API = "api",
ENCRYPTION = "encryption",
@ -31,15 +36,15 @@ export function getSecret(secretOption: SecretOption): string {
return secret
}
function stretchString(string: string, salt: Buffer) {
return crypto.pbkdf2Sync(string, salt, ITERATIONS, STRETCH_LENGTH, "sha512")
function stretchString(secret: string, salt: Buffer) {
return crypto.pbkdf2Sync(secret, salt, ITERATIONS, STRETCH_LENGTH, "sha512")
}
export function encrypt(
input: string,
secretOption: SecretOption = SecretOption.API
) {
const salt = crypto.randomBytes(RANDOM_BYTES)
const salt = crypto.randomBytes(SALT_LENGTH)
const stretched = stretchString(getSecret(secretOption), salt)
const cipher = crypto.createCipheriv(ALGO, stretched, salt)
const base = cipher.update(input)
@ -60,3 +65,115 @@ export function decrypt(
const final = decipher.final()
return Buffer.concat([base, final]).toString()
}
export async function encryptFile(
{ dir, filename }: { dir: string; filename: string },
secret: string
) {
const outputFileName = `${filename}.enc`
const filePath = join(dir, filename)
const inputFile = fs.createReadStream(filePath)
const outputFile = fs.createWriteStream(join(dir, outputFileName))
const salt = crypto.randomBytes(SALT_LENGTH)
const iv = crypto.randomBytes(IV_LENGTH)
const stretched = stretchString(secret, salt)
const cipher = crypto.createCipheriv(ALGO, stretched, iv)
outputFile.write(salt)
outputFile.write(iv)
inputFile.pipe(zlib.createGzip()).pipe(cipher).pipe(outputFile)
return new Promise<{ filename: string; dir: string }>(r => {
outputFile.on("finish", () => {
r({
filename: outputFileName,
dir,
})
})
})
}
async function getSaltAndIV(path: string) {
const fileStream = fs.createReadStream(path)
const salt = await readBytes(fileStream, SALT_LENGTH)
const iv = await readBytes(fileStream, IV_LENGTH)
fileStream.close()
return { salt, iv }
}
export async function decryptFile(
inputPath: string,
outputPath: string,
secret: string
) {
const { salt, iv } = await getSaltAndIV(inputPath)
const inputFile = fs.createReadStream(inputPath, {
start: SALT_LENGTH + IV_LENGTH,
})
const outputFile = fs.createWriteStream(outputPath)
const stretched = stretchString(secret, salt)
const decipher = crypto.createDecipheriv(ALGO, stretched, iv)
const unzip = zlib.createGunzip()
inputFile.pipe(decipher).pipe(unzip).pipe(outputFile)
return new Promise<void>((res, rej) => {
outputFile.on("finish", () => {
outputFile.close()
res()
})
inputFile.on("error", e => {
outputFile.close()
rej(e)
})
decipher.on("error", e => {
outputFile.close()
rej(e)
})
unzip.on("error", e => {
outputFile.close()
rej(e)
})
outputFile.on("error", e => {
outputFile.close()
rej(e)
})
})
}
function readBytes(stream: fs.ReadStream, length: number) {
return new Promise<Buffer>((resolve, reject) => {
let bytesRead = 0
const data: Buffer[] = []
stream.on("readable", () => {
let chunk
while ((chunk = stream.read(length - bytesRead)) !== null) {
data.push(chunk)
bytesRead += chunk.length
}
resolve(Buffer.concat(data))
})
stream.on("end", () => {
reject(new Error("Insufficient data in the stream."))
})
stream.on("error", error => {
reject(error)
})
})
}

View file

@ -140,9 +140,13 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {
* Gets the role object, this is mainly useful for two purposes, to check if the level exists and
* to check if the role inherits any others.
* @param {string|null} roleId The level ID to lookup.
* @param {object|null} opts options for the function, like whether to halt errors, instead return public.
* @returns {Promise<Role|object|null>} The role object, which may contain an "inherits" property.
*/
export async function getRole(roleId?: string): Promise<RoleDoc | undefined> {
export async function getRole(
roleId?: string,
opts?: { defaultPublic?: boolean }
): Promise<RoleDoc | undefined> {
if (!roleId) {
return undefined
}
@ -161,6 +165,9 @@ export async function getRole(roleId?: string): Promise<RoleDoc | undefined> {
// finalise the ID
role._id = getExternalRoleID(role._id)
} catch (err) {
if (!isBuiltin(roleId) && opts?.defaultPublic) {
return cloneDeep(BUILTIN_ROLES.PUBLIC)
}
// only throw an error if there is no role at all
if (Object.keys(role).length === 0) {
throw err

View file

@ -8,6 +8,8 @@
export let disabled = false
export let error = null
export let validate = null
export let indeterminate = false
export let compact = false
const dispatch = createEventDispatcher()
@ -21,11 +23,19 @@
}
</script>
<FancyField {error} {value} {validate} {disabled} clickable on:click={onChange}>
<FancyField
{error}
{value}
{validate}
{disabled}
{compact}
clickable
on:click={onChange}
>
<span>
<Checkbox {disabled} {value} />
<Checkbox {disabled} {value} {indeterminate} />
</span>
<div class="text">
<div class="text" class:compact>
{#if text}
{text}
{/if}
@ -47,6 +57,10 @@
line-clamp: 2;
-webkit-box-orient: vertical;
}
.text.compact {
font-size: 13px;
line-height: 15px;
}
.text > :global(*) {
font-size: inherit !important;
}

View file

@ -0,0 +1,68 @@
<script>
import FancyCheckbox from "./FancyCheckbox.svelte"
import FancyForm from "./FancyForm.svelte"
import { createEventDispatcher } from "svelte"
export let options = []
export let selected = []
export let showSelectAll = true
export let selectAllText = "Select all"
let selectedBooleans = reset()
const dispatch = createEventDispatcher()
$: updateSelected(selectedBooleans)
$: dispatch("change", selected)
$: allSelected = selected?.length === options.length
$: noneSelected = !selected?.length
function reset() {
return Array(options.length).fill(true)
}
function updateSelected(selectedArr) {
const array = []
for (let [i, isSelected] of Object.entries(selectedArr)) {
if (isSelected) {
array.push(options[i])
}
}
selected = array
}
function toggleSelectAll() {
if (allSelected === true) {
selectedBooleans = []
} else {
selectedBooleans = reset()
}
}
</script>
{#if options && Array.isArray(options)}
<div class="checkbox-group" class:has-select-all={showSelectAll}>
<FancyForm on:change>
{#if showSelectAll}
<FancyCheckbox
bind:value={allSelected}
on:change={toggleSelectAll}
text={selectAllText}
indeterminate={!allSelected && !noneSelected}
compact
/>
{/if}
{#each options as option, i}
<FancyCheckbox bind:value={selectedBooleans[i]} text={option} compact />
{/each}
</FancyForm>
</div>
{/if}
<style>
.checkbox-group.has-select-all :global(.fancy-field:first-of-type) {
background: var(--spectrum-global-color-gray-100);
}
.checkbox-group.has-select-all :global(.fancy-field:first-of-type:hover) {
background: var(--spectrum-global-color-gray-200);
}
</style>

View file

@ -11,6 +11,7 @@
export let value
export let ref
export let autoHeight
export let compact = false
const formContext = getContext("fancy-form")
const id = Math.random()
@ -42,6 +43,7 @@
class:disabled
class:focused
class:clickable
class:compact
class:auto-height={autoHeight}
>
<div class="content" on:click>
@ -61,7 +63,6 @@
<style>
.fancy-field {
max-width: 400px;
background: var(--spectrum-global-color-gray-75);
border: 1px solid var(--spectrum-global-color-gray-300);
border-radius: 4px;
@ -69,6 +70,12 @@
transition: border-color 130ms ease-out, background 130ms ease-out,
background 130ms ease-out;
color: var(--spectrum-global-color-gray-800);
--padding: 16px;
--height: 64px;
}
.fancy-field.compact {
--padding: 8px;
--height: 36px;
}
.fancy-field:hover {
border-color: var(--spectrum-global-color-gray-400);
@ -91,8 +98,8 @@
}
.content {
position: relative;
height: 64px;
padding: 0 16px;
height: var(--height);
padding: 0 var(--padding);
}
.fancy-field.auto-height .content {
height: auto;
@ -103,7 +110,7 @@
flex-direction: row;
justify-content: flex-start;
align-items: center;
gap: 16px;
gap: var(--padding);
}
.field {
flex: 1 1 auto;

View file

@ -4,4 +4,5 @@ export { default as FancySelect } from "./FancySelect.svelte"
export { default as FancyButton } from "./FancyButton.svelte"
export { default as FancyForm } from "./FancyForm.svelte"
export { default as FancyButtonRadio } from "./FancyButtonRadio.svelte"
export { default as FancyCheckboxGroup } from "./FancyCheckboxGroup.svelte"
export { default as ErrorMessage } from "./ErrorMessage.svelte"

View file

@ -9,6 +9,7 @@
export let text = null
export let disabled = false
export let size
export let indeterminate = false
const dispatch = createEventDispatcher()
const onChange = event => {
@ -22,6 +23,7 @@
class="spectrum-Checkbox spectrum-Checkbox--emphasized {sizeClass}"
class:is-invalid={!!error}
class:checked={value}
class:is-indeterminate={indeterminate}
>
<input
checked={value}

View file

@ -8,6 +8,7 @@
export let fixed = false
export let inline = false
export let disableCancel = false
const dispatch = createEventDispatcher()
let visible = fixed || inline
@ -38,7 +39,7 @@
}
export function cancel() {
if (!visible) {
if (!visible || disableCancel) {
return
}
dispatch("cancel")

View file

@ -5,10 +5,10 @@
<meta charset='utf8'>
<meta name='viewport' content='width=device-width'>
<title>Budibase</title>
<link href="/fonts/source-sans-pro/400.css" rel="stylesheet" />
<link href="/fonts/source-sans-pro/600.css" rel="stylesheet" />
<link href="/fonts/source-sans-pro/700.css" rel="stylesheet" />
<link href="/fonts/remixicon.css" rel="stylesheet" />
<link href="/builder/fonts/source-sans-pro/400.css" rel="stylesheet" />
<link href="/builder/fonts/source-sans-pro/600.css" rel="stylesheet" />
<link href="/builder/fonts/source-sans-pro/700.css" rel="stylesheet" />
<link href="/builder/fonts/remixicon.css" rel="stylesheet" />
</head>
<body id="app">

View file

@ -23,10 +23,11 @@ function prepareData(config) {
return datasource
}
export async function saveDatasource(config, skipFetch = false) {
export async function saveDatasource(config, { skipFetch, tablesFilter } = {}) {
const datasource = prepareData(config)
// Create datasource
const resp = await datasources.save(datasource, !skipFetch && datasource.plus)
const fetchSchema = !skipFetch && datasource.plus
const resp = await datasources.save(datasource, { fetchSchema, tablesFilter })
// update the tables incase datasource plus
await tables.fetch()
@ -41,6 +42,13 @@ export async function createRestDatasource(integration) {
export async function validateDatasourceConfig(config) {
const datasource = prepareData(config)
const resp = await API.validateDatasource(datasource)
return resp
return await API.validateDatasource(datasource)
}
export async function getDatasourceInfo(config) {
let datasource = config
if (!config._id) {
datasource = prepareData(config)
}
return await API.fetchInfoForDatasource(datasource)
}

View file

@ -74,6 +74,7 @@ const INITIAL_FRONTEND_STATE = {
propertyFocus: null,
builderSidePanel: false,
hasLock: true,
showPreview: false,
// URL params
selectedScreenId: null,

View file

@ -13,6 +13,8 @@
Modal,
notifications,
Icon,
Checkbox,
DatePicker,
} from "@budibase/bbui"
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
import { automationStore, selectedAutomation } from "builderStore"
@ -306,6 +308,11 @@
drawer.hide()
}
function canShowField(key, value) {
const dependsOn = value.dependsOn
return !dependsOn || !!inputData[dependsOn]
}
onMount(async () => {
try {
await environment.loadVariables()
@ -317,210 +324,233 @@
<div class="fields">
{#each deprecatedSchemaProperties as [key, value]}
<div class="block-field">
{#if key !== "fields"}
<Label
tooltip={value.title === "Binding / Value"
? "If using the String input type, please use a comma or newline separated string"
: null}>{value.title || (key === "row" ? "Table" : key)}</Label
>
{/if}
{#if value.type === "string" && value.enum}
<Select
on:change={e => onChange(e, key)}
value={inputData[key]}
placeholder={false}
options={value.enum}
getOptionLabel={(x, idx) => (value.pretty ? value.pretty[idx] : x)}
/>
{:else if value.type === "json"}
<Editor
editorHeight="250"
editorWidth="448"
mode="json"
value={inputData[key]?.value}
on:change={e => {
/**
* TODO - Remove after November 2023
* *******************************
* Code added to provide backwards compatibility between Values 1,2,3,4,5
* and the new JSON body.
*/
delete inputData.value1
delete inputData.value2
delete inputData.value3
delete inputData.value4
delete inputData.value5
/***********************/
onChange(e, key)
}}
/>
{:else if value.customType === "column"}
<Select
on:change={e => onChange(e, key)}
value={inputData[key]}
options={Object.keys(table?.schema || {})}
/>
{:else if value.customType === "filters"}
<ActionButton on:click={drawer.show}>Define filters</ActionButton>
<Drawer bind:this={drawer} {fillWidth} title="Filtering">
<Button cta slot="buttons" on:click={() => saveFilters(key)}>
Save
</Button>
<FilterDrawer
slot="body"
{filters}
{bindings}
{schemaFields}
datasource={{ type: "table", tableId }}
panel={AutomationBindingPanel}
fillWidth
on:change={e => (tempFilters = e.detail)}
/>
</Drawer>
{:else if value.customType === "password"}
<Input
type="password"
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "email"}
{#if isTestModal}
<ModalBindableInput
title={value.title}
value={inputData[key]}
panel={AutomationBindingPanel}
type="email"
on:change={e => onChange(e, key)}
{bindings}
fillWidth
updateOnChange={false}
/>
{:else}
<DrawerBindableInput
fillWidth
title={value.title}
panel={AutomationBindingPanel}
type="email"
value={inputData[key]}
on:change={e => onChange(e, key)}
{bindings}
allowJS={false}
updateOnChange={false}
drawerLeft="260px"
/>
{#if canShowField(key, value)}
<div class="block-field">
{#if key !== "fields" && value.type !== "boolean"}
<Label
tooltip={value.title === "Binding / Value"
? "If using the String input type, please use a comma or newline separated string"
: null}>{value.title || (key === "row" ? "Table" : key)}</Label
>
{/if}
{:else if value.customType === "query"}
<QuerySelector
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "cron"}
<CronBuilder on:change={e => onChange(e, key)} value={inputData[key]} />
{:else if value.customType === "queryParams"}
<QueryParamSelector
on:change={e => onChange(e, key)}
value={inputData[key]}
{bindings}
/>
{:else if value.customType === "table"}
<TableSelector
{isTrigger}
value={inputData[key]}
on:change={e => onChange(e, key)}
/>
{:else if value.customType === "row"}
<RowSelector
{block}
value={inputData[key]}
meta={inputData["meta"] || {}}
on:change={e => {
if (e.detail?.key) {
onChange(e, e.detail.key)
} else {
onChange(e, key)
}
}}
{bindings}
{isTestModal}
{isUpdateRow}
/>
{:else if value.customType === "webhookUrl"}
<WebhookDisplay
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "fields"}
<FieldSelector
{block}
value={inputData[key]}
on:change={e => onChange(e, key)}
{bindings}
{isTestModal}
/>
{:else if value.customType === "triggerSchema"}
<SchemaSetup on:change={e => onChange(e, key)} value={inputData[key]} />
{:else if value.customType === "code"}
<CodeEditorModal>
<CodeEditor
value={inputData[key]}
on:change={e => {
// need to pass without the value inside
onChange({ detail: e.detail }, key)
inputData[key] = e.detail
}}
completions={[
jsAutocomplete([
...bindingsToCompletions(bindings, EditorModes.JS),
]),
]}
mode={EditorModes.JS}
height={500}
/>
<div class="messaging">
<Icon name="FlashOn" />
<div class="messaging-wrap">
<div>Add available bindings by typing <strong>$</strong></div>
</div>
</div>
</CodeEditorModal>
{:else if value.customType === "loopOption"}
<Select
on:change={e => onChange(e, key)}
autoWidth
value={inputData[key]}
options={["Array", "String"]}
defaultValue={"Array"}
/>
{:else if value.type === "string" || value.type === "number" || value.type === "integer"}
{#if isTestModal}
<ModalBindableInput
title={value.title}
value={inputData[key]}
panel={AutomationBindingPanel}
type={value.customType}
{#if value.type === "string" && value.enum && canShowField(key)}
<Select
on:change={e => onChange(e, key)}
{bindings}
updateOnChange={false}
value={inputData[key]}
placeholder={false}
options={value.enum}
getOptionLabel={(x, idx) => (value.pretty ? value.pretty[idx] : x)}
/>
{:else}
<div class="test">
{:else if value.type === "json"}
<Editor
editorHeight="250"
editorWidth="448"
mode="json"
value={inputData[key]?.value}
on:change={e => {
/**
* TODO - Remove after November 2023
* *******************************
* Code added to provide backwards compatibility between Values 1,2,3,4,5
* and the new JSON body.
*/
delete inputData.value1
delete inputData.value2
delete inputData.value3
delete inputData.value4
delete inputData.value5
/***********************/
onChange(e, key)
}}
/>
{:else if value.type === "boolean"}
<div style="margin-top: 10px">
<Checkbox
text={value.title}
value={inputData[key]}
on:change={e => onChange(e, key)}
/>
</div>
{:else if value.type === "date"}
<DatePicker
value={inputData[key]}
on:change={e => onChange(e, key)}
/>
{:else if value.customType === "column"}
<Select
on:change={e => onChange(e, key)}
value={inputData[key]}
options={Object.keys(table?.schema || {})}
/>
{:else if value.customType === "filters"}
<ActionButton on:click={drawer.show}>Define filters</ActionButton>
<Drawer bind:this={drawer} {fillWidth} title="Filtering">
<Button cta slot="buttons" on:click={() => saveFilters(key)}>
Save
</Button>
<FilterDrawer
slot="body"
{filters}
{bindings}
{schemaFields}
datasource={{ type: "table", tableId }}
panel={AutomationBindingPanel}
fillWidth
on:change={e => (tempFilters = e.detail)}
/>
</Drawer>
{:else if value.customType === "password"}
<Input
type="password"
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "email"}
{#if isTestModal}
<ModalBindableInput
title={value.title}
value={inputData[key]}
panel={AutomationBindingPanel}
type="email"
on:change={e => onChange(e, key)}
{bindings}
fillWidth
updateOnChange={false}
/>
{:else}
<DrawerBindableInput
fillWidth={true}
fillWidth
title={value.title}
panel={AutomationBindingPanel}
type={value.customType}
type="email"
value={inputData[key]}
on:change={e => onChange(e, key)}
{bindings}
allowJS={false}
updateOnChange={false}
placeholder={value.customType === "queryLimit" ? queryLimit : ""}
drawerLeft="260px"
/>
</div>
{/if}
{:else if value.customType === "query"}
<QuerySelector
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "cron"}
<CronBuilder
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "queryParams"}
<QueryParamSelector
on:change={e => onChange(e, key)}
value={inputData[key]}
{bindings}
/>
{:else if value.customType === "table"}
<TableSelector
{isTrigger}
value={inputData[key]}
on:change={e => onChange(e, key)}
/>
{:else if value.customType === "row"}
<RowSelector
{block}
value={inputData[key]}
meta={inputData["meta"] || {}}
on:change={e => {
if (e.detail?.key) {
onChange(e, e.detail.key)
} else {
onChange(e, key)
}
}}
{bindings}
{isTestModal}
{isUpdateRow}
/>
{:else if value.customType === "webhookUrl"}
<WebhookDisplay
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "fields"}
<FieldSelector
{block}
value={inputData[key]}
on:change={e => onChange(e, key)}
{bindings}
{isTestModal}
/>
{:else if value.customType === "triggerSchema"}
<SchemaSetup
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "code"}
<CodeEditorModal>
<CodeEditor
value={inputData[key]}
on:change={e => {
// need to pass without the value inside
onChange({ detail: e.detail }, key)
inputData[key] = e.detail
}}
completions={[
jsAutocomplete([
...bindingsToCompletions(bindings, EditorModes.JS),
]),
]}
mode={EditorModes.JS}
height={500}
/>
<div class="messaging">
<Icon name="FlashOn" />
<div class="messaging-wrap">
<div>Add available bindings by typing <strong>$</strong></div>
</div>
</div>
</CodeEditorModal>
{:else if value.customType === "loopOption"}
<Select
on:change={e => onChange(e, key)}
autoWidth
value={inputData[key]}
options={["Array", "String"]}
defaultValue={"Array"}
/>
{:else if value.type === "string" || value.type === "number" || value.type === "integer"}
{#if isTestModal}
<ModalBindableInput
title={value.title}
value={inputData[key]}
panel={AutomationBindingPanel}
type={value.customType}
on:change={e => onChange(e, key)}
{bindings}
updateOnChange={false}
/>
{:else}
<div class="test">
<DrawerBindableInput
fillWidth={true}
title={value.title}
panel={AutomationBindingPanel}
type={value.customType}
value={inputData[key]}
on:change={e => onChange(e, key)}
{bindings}
updateOnChange={false}
placeholder={value.customType === "queryLimit"
? queryLimit
: ""}
drawerLeft="260px"
/>
</div>
{/if}
{/if}
{/if}
</div>
</div>
{/if}
{/each}
</div>
<Modal bind:this={webhookModal} width="30%">

View file

@ -8,7 +8,7 @@
notifications,
Modal,
Table,
Toggle,
FancyCheckboxGroup,
} from "@budibase/bbui"
import { datasources, integrations, tables } from "stores/backend"
import CreateEditRelationship from "components/backend/Datasources/CreateEditRelationship.svelte"
@ -16,7 +16,7 @@
import ArrayRenderer from "components/common/renderers/ArrayRenderer.svelte"
import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import { goto } from "@roxi/routify"
import ValuesList from "components/common/ValuesList.svelte"
import { getDatasourceInfo } from "builderStore/datasource"
export let datasource
export let save
@ -34,7 +34,7 @@
let selectedFromRelationship, selectedToRelationship
let confirmDialog
let specificTables = null
let requireSpecificTables = false
let tableList
$: integration = datasource && $integrations[datasource.source]
$: plusTables = datasource?.plus
@ -153,30 +153,28 @@
warning={false}
title="Confirm table fetch"
>
<Toggle
bind:value={requireSpecificTables}
on:change={e => {
requireSpecificTables = e.detail
specificTables = null
}}
thin
text="Fetch listed tables only (one per line)"
/>
{#if requireSpecificTables}
<ValuesList label="" bind:values={specificTables} />
{/if}
<br />
<Body>
If you have fetched tables from this database before, this action may
overwrite any changes you made after your initial fetch.
</Body>
<br />
<div class="table-checkboxes">
<FancyCheckboxGroup options={tableList} bind:selected={specificTables} />
</div>
</ConfirmDialog>
<Divider />
<div class="query-header">
<Heading size="S">Tables</Heading>
<div class="table-buttons">
<Button secondary on:click={() => confirmDialog.show()}>
<Button
secondary
on:click={async () => {
const info = await getDatasourceInfo(datasource)
tableList = info.tableNames
confirmDialog.show()
}}
>
Fetch tables
</Button>
<Button cta icon="Add" on:click={createNewTable}>New table</Button>
@ -246,4 +244,8 @@
display: flex;
gap: var(--spacing-m);
}
.table-checkboxes {
width: 100%;
}
</style>

View file

@ -3,8 +3,6 @@
import { store } from "builderStore"
import { auth } from "stores/portal"
export let preAuthStep
export let datasource
export let disabled
export let samePage
@ -15,18 +13,8 @@
class:disabled
{disabled}
on:click={async () => {
let ds = datasource
let appId = $store.appId
if (!ds) {
const resp = await preAuthStep()
if (resp.datasource && resp.appId) {
ds = resp.datasource
appId = resp.appId
} else {
ds = resp
}
}
const url = `/api/global/auth/${tenantId}/datasource/google?datasourceId=${ds._id}&appId=${appId}`
const url = `/api/global/auth/${tenantId}/datasource/google?appId=${appId}`
if (samePage) {
window.location = url
} else {

View file

@ -44,6 +44,9 @@ export default ICONS
export function getIcon(integrationType, schema) {
const integrationList = get(integrations)
if (!integrationList) {
return
}
if (integrationList[integrationType]?.iconUrl) {
return { url: integrationList[integrationType].iconUrl }
} else if (schema?.custom || !ICONS[integrationType]) {

View file

@ -1,12 +1,19 @@
<script>
import { goto } from "@roxi/routify"
import { ModalContent, notifications, Body, Layout } from "@budibase/bbui"
import {
ModalContent,
notifications,
Body,
Layout,
FancyCheckboxGroup,
} from "@budibase/bbui"
import IntegrationConfigForm from "components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte"
import { IntegrationNames } from "constants/backend"
import cloneDeep from "lodash/cloneDeepWith"
import {
saveDatasource as save,
validateDatasourceConfig,
getDatasourceInfo,
} from "builderStore/datasource"
import { DatasourceFeature } from "@budibase/types"
@ -15,11 +22,24 @@
// kill the reference so the input isn't saved
let datasource = cloneDeep(integration)
let isValid = false
let fetchTableStep = false
let selectedTables = []
let tableList = []
$: name =
IntegrationNames[datasource.type] || datasource.name || datasource.type
IntegrationNames[datasource?.type] || datasource?.name || datasource?.type
$: datasourcePlus = datasource?.plus
$: title = fetchTableStep ? "Fetch your tables" : `Connect to ${name}`
$: confirmText = fetchTableStep
? "Continue"
: datasourcePlus
? "Connect"
: "Save and continue to query"
async function validateConfig() {
if (!integration.features?.[DatasourceFeature.CONNECTION_CHECKING]) {
return true
}
const displayError = message =>
notifications.error(message ?? "Error validating datasource")
@ -47,35 +67,75 @@
if (!datasource.name) {
datasource.name = name
}
const resp = await save(datasource)
const opts = {}
if (datasourcePlus && selectedTables) {
opts.tablesFilter = selectedTables
}
const resp = await save(datasource, opts)
$goto(`./datasource/${resp._id}`)
notifications.success(`Datasource created successfully.`)
notifications.success("Datasource created successfully.")
} catch (err) {
notifications.error(err?.message ?? "Error saving datasource")
// prevent the modal from closing
return false
}
}
async function nextStep() {
let connected = true
if (datasourcePlus) {
connected = await validateConfig()
}
if (!connected) {
return false
}
if (datasourcePlus && !fetchTableStep) {
notifications.success("Connected to datasource successfully.")
const info = await getDatasourceInfo(datasource)
tableList = info.tableNames
fetchTableStep = true
return false
} else {
await saveDatasource()
return true
}
}
</script>
<ModalContent
title={`Connect to ${name}`}
onConfirm={() => saveDatasource()}
confirmText={datasource.plus ? "Connect" : "Save and continue to query"}
cancelText="Back"
showSecondaryButton={datasource.plus}
{title}
onConfirm={() => nextStep()}
{confirmText}
cancelText={fetchTableStep ? "Cancel" : "Back"}
showSecondaryButton={datasourcePlus}
size="L"
disabled={!isValid}
>
<Layout noPadding>
<Body size="XS"
>Connect your database to Budibase using the config below.
<Body size="XS">
{#if !fetchTableStep}
Connect your database to Budibase using the config below
{:else}
Choose what tables you want to sync with Budibase
{/if}
</Body>
</Layout>
<IntegrationConfigForm
schema={datasource.schema}
bind:datasource
creating={true}
on:valid={e => (isValid = e.detail)}
/>
{#if !fetchTableStep}
<IntegrationConfigForm
schema={datasource?.schema}
bind:datasource
creating={true}
on:valid={e => (isValid = e.detail)}
/>
{:else}
<div class="table-checkboxes">
<FancyCheckboxGroup options={tableList} bind:selected={selectedTables} />
</div>
{/if}
</ModalContent>
<style>
.table-checkboxes {
width: 100%;
}
</style>

View file

@ -1,43 +1,207 @@
<script>
import { ModalContent, Body, Layout, Link } from "@budibase/bbui"
import { IntegrationNames } from "constants/backend"
import cloneDeep from "lodash/cloneDeepWith"
import {
Body,
FancyCheckboxGroup,
InlineAlert,
Layout,
Link,
ModalContent,
notifications,
} from "@budibase/bbui"
import { IntegrationNames, IntegrationTypes } from "constants/backend"
import GoogleButton from "../_components/GoogleButton.svelte"
import { saveDatasource as save } from "builderStore/datasource"
import { organisation } from "stores/portal"
import { onMount } from "svelte"
import { onDestroy, onMount } from "svelte"
import {
getDatasourceInfo,
saveDatasource,
validateDatasourceConfig,
} from "builderStore/datasource"
import cloneDeep from "lodash/cloneDeepWith"
import IntegrationConfigForm from "../TableIntegrationMenu/IntegrationConfigForm.svelte"
import { goto } from "@roxi/routify"
import { DatasourceFeature } from "@budibase/types"
import { API } from "api"
export let integration
export let continueSetupId = false
// kill the reference so the input isn't saved
let datasource = cloneDeep(integration)
datasource.config.continueSetupId = continueSetupId
let { schema } = datasource
$: isGoogleConfigured = !!$organisation.googleDatasourceConfigured
onMount(async () => {
await organisation.init()
})
const integrationName = IntegrationNames[IntegrationTypes.GOOGLE_SHEETS]
export const GoogleDatasouceConfigStep = {
AUTH: "auth",
SET_URL: "set_url",
SET_SHEETS: "set_sheets",
}
let step = continueSetupId
? GoogleDatasouceConfigStep.SET_URL
: GoogleDatasouceConfigStep.AUTH
let isValid = false
let allSheets
let selectedSheets
let setSheetsErrorTitle, setSheetsErrorMessage
$: modalConfig = {
[GoogleDatasouceConfigStep.AUTH]: {
title: `Connect to ${integrationName}`,
},
[GoogleDatasouceConfigStep.SET_URL]: {
title: `Connect your spreadsheet`,
confirmButtonText: "Connect",
onConfirm: async () => {
const checkConnection =
integration.features[DatasourceFeature.CONNECTION_CHECKING]
if (checkConnection) {
const resp = await validateDatasourceConfig(datasource)
if (!resp.connected) {
notifications.error(`Unable to connect - ${resp.error}`)
return false
}
}
try {
datasource = await saveDatasource(datasource, {
tablesFilter: selectedSheets,
skipFetch: true,
})
} catch (err) {
notifications.error(err?.message ?? "Error saving datasource")
// prevent the modal from closing
return false
}
if (!integration.features[DatasourceFeature.FETCH_TABLE_NAMES]) {
notifications.success(`Datasource created successfully.`)
return
}
const info = await getDatasourceInfo(datasource)
allSheets = info.tableNames
step = GoogleDatasouceConfigStep.SET_SHEETS
notifications.success(
checkConnection
? "Connection Successful"
: `Datasource created successfully.`
)
// prevent the modal from closing
return false
},
},
[GoogleDatasouceConfigStep.SET_SHEETS]: {
title: `Choose your sheets`,
confirmButtonText: selectedSheets?.length
? "Fetch sheets"
: "Continue without fetching",
onConfirm: async () => {
try {
if (selectedSheets.length) {
await API.buildDatasourceSchema({
datasourceId: datasource._id,
tablesFilter: selectedSheets,
})
}
return
} catch (err) {
const message = err?.message ?? "Error fetching the sheets"
// Handling message with format: Error title - error description
const indexSeparator = message.indexOf(" - ")
if (indexSeparator >= 0) {
setSheetsErrorTitle = message.substr(0, indexSeparator)
setSheetsErrorMessage =
message[indexSeparator + 3].toUpperCase() +
message.substr(indexSeparator + 4)
} else {
setSheetsErrorTitle = null
setSheetsErrorMessage = message
}
// prevent the modal from closing
return false
}
},
},
}
// This will handle the user closing the modal pressing outside the modal
onDestroy(() => {
if (step === GoogleDatasouceConfigStep.SET_SHEETS) {
$goto(`./datasource/${datasource._id}`)
}
})
</script>
<ModalContent
title={`Connect to ${IntegrationNames[datasource.type]}`}
cancelText="Back"
title={modalConfig[step].title}
cancelText="Cancel"
size="L"
confirmText={modalConfig[step].confirmButtonText}
showConfirmButton={!!modalConfig[step].onConfirm}
onConfirm={modalConfig[step].onConfirm}
disabled={!isValid}
>
<!-- check true and false directly, don't render until flag is set -->
{#if isGoogleConfigured === true}
<Layout noPadding>
{#if step === GoogleDatasouceConfigStep.AUTH}
<!-- check true and false directly, don't render until flag is set -->
{#if isGoogleConfigured === true}
<Layout noPadding>
<Body size="S"
>Authenticate with your google account to use the {integrationName} integration.</Body
>
</Layout>
<GoogleButton samePage />
{:else if isGoogleConfigured === false}
<Body size="S"
>Authenticate with your google account to use the {IntegrationNames[
datasource.type
]} integration.</Body
>Google authentication is not enabled, please complete Google SSO
configuration.</Body
>
<Link href="/builder/portal/settings/auth">Configure Google SSO</Link>
{/if}
{/if}
{#if step === GoogleDatasouceConfigStep.SET_URL}
<Layout noPadding no>
<Body size="S">Add the URL of the sheet you want to connect.</Body>
<IntegrationConfigForm
{schema}
bind:datasource
creating={true}
on:valid={e => (isValid = e.detail)}
/>
</Layout>
{/if}
{#if step === GoogleDatasouceConfigStep.SET_SHEETS}
<Layout noPadding no>
<Body size="S">Select which spreadsheets you want to connect.</Body>
<FancyCheckboxGroup
options={allSheets}
bind:selected={selectedSheets}
selectAllText="Select all sheets"
/>
{#if setSheetsErrorTitle || setSheetsErrorMessage}
<InlineAlert
type="error"
header={setSheetsErrorTitle}
message={setSheetsErrorMessage}
/>
{/if}
</Layout>
<GoogleButton preAuthStep={() => save(datasource, true)} />
{:else if isGoogleConfigured === false}
<Body size="S"
>Google authentication is not enabled, please complete Google SSO
configuration.</Body
>
<Link href="/builder/portal/settings/auth">Configure Google SSO</Link>
{/if}
</ModalContent>

View file

@ -69,7 +69,7 @@
name: "App",
description: "",
icon: "Play",
action: () => window.open(`/${$store.appId}`),
action: () => store.update(state => ({ ...state, showPreview: true })),
},
{
type: "Preview",

View file

@ -62,7 +62,10 @@
}
const previewApp = () => {
window.open(`/${application}`)
store.update(state => ({
...state,
showPreview: true,
}))
}
const viewApp = () => {

View file

@ -11,6 +11,7 @@
import TemplateCard from "components/common/TemplateCard.svelte"
import createFromScratchScreen from "builderStore/store/screenTemplates/createFromScratchScreen"
import { Roles } from "constants/backend"
import { lowercase } from "helpers"
export let template
@ -19,6 +20,7 @@
const values = writable({ name: "", url: null })
const validation = createValidationStore()
const encryptionValidation = createValidationStore()
$: {
const { url } = $values
@ -27,8 +29,11 @@
...$values,
url: url?.[0] === "/" ? url.substring(1, url.length) : url,
})
encryptionValidation.check({ ...$values })
}
$: encryptedFile = $values.file?.name?.endsWith(".enc.tar.gz")
onMount(async () => {
const lastChar = $auth.user?.firstName
? $auth.user?.firstName[$auth.user?.firstName.length - 1]
@ -87,6 +92,9 @@
appValidation.name(validation, { apps: applications })
appValidation.url(validation, { apps: applications })
appValidation.file(validation, { template })
encryptionValidation.addValidatorType("encryptionPassword", "text", true)
// init validation
const { url } = $values
validation.check({
@ -110,6 +118,9 @@
data.append("templateName", template.name)
data.append("templateKey", template.key)
data.append("templateFile", $values.file)
if ($values.encryptionPassword?.trim()) {
data.append("encryptionPassword", $values.encryptionPassword.trim())
}
}
// Create App
@ -143,67 +154,119 @@
$goto(`/builder/app/${createdApp.instance._id}`)
} catch (error) {
creating = false
console.error(error)
notifications.error("Error creating app")
throw error
}
}
const Step = { CONFIG: "config", SET_PASSWORD: "set_password" }
let currentStep = Step.CONFIG
$: stepConfig = {
[Step.CONFIG]: {
title: "Create your app",
confirmText: template?.fromFile ? "Import app" : "Create app",
onConfirm: async () => {
if (encryptedFile) {
currentStep = Step.SET_PASSWORD
return false
} else {
try {
await createNewApp()
} catch (error) {
notifications.error("Error creating app")
}
}
},
isValid: $validation.valid,
},
[Step.SET_PASSWORD]: {
title: "Provide the export password",
confirmText: "Import app",
onConfirm: async () => {
try {
await createNewApp()
} catch (e) {
let message = "Error creating app"
if (e.message) {
message += `: ${lowercase(e.message)}`
}
notifications.error(message)
return false
}
},
isValid: $encryptionValidation.valid,
},
}
</script>
<ModalContent
title={"Create your app"}
confirmText={template?.fromFile ? "Import app" : "Create app"}
onConfirm={createNewApp}
disabled={!$validation.valid}
title={stepConfig[currentStep].title}
confirmText={stepConfig[currentStep].confirmText}
onConfirm={stepConfig[currentStep].onConfirm}
disabled={!stepConfig[currentStep].isValid}
>
{#if template && !template?.fromFile}
<TemplateCard
name={template.name}
imageSrc={template.image}
backgroundColour={template.background}
overlayEnabled={false}
icon={template.icon}
/>
{/if}
{#if template?.fromFile}
<Dropzone
error={$validation.touched.file && $validation.errors.file}
gallery={false}
label="File to import"
value={[$values.file]}
on:change={e => {
$values.file = e.detail?.[0]
$validation.touched.file = true
}}
/>
{/if}
<Input
autofocus={true}
bind:value={$values.name}
disabled={creating}
error={$validation.touched.name && $validation.errors.name}
on:blur={() => ($validation.touched.name = true)}
on:change={nameToUrl($values.name)}
label="Name"
placeholder={defaultAppName}
/>
<span>
<Input
bind:value={$values.url}
disabled={creating}
error={$validation.touched.url && $validation.errors.url}
on:blur={() => ($validation.touched.url = true)}
on:change={tidyUrl($values.url)}
label="URL"
placeholder={$values.url
? $values.url
: `/${resolveAppUrl(template, $values.name)}`}
/>
{#if $values.url && $values.url !== "" && !$validation.errors.url}
<div class="app-server" title={appUrl}>
{appUrl}
</div>
{#if currentStep === Step.CONFIG}
{#if template && !template?.fromFile}
<TemplateCard
name={template.name}
imageSrc={template.image}
backgroundColour={template.background}
overlayEnabled={false}
icon={template.icon}
/>
{/if}
</span>
{#if template?.fromFile}
<Dropzone
error={$validation.touched.file && $validation.errors.file}
gallery={false}
label="File to import"
value={[$values.file]}
on:change={e => {
$values.file = e.detail?.[0]
$validation.touched.file = true
}}
/>
{/if}
<Input
autofocus={true}
bind:value={$values.name}
disabled={creating}
error={$validation.touched.name && $validation.errors.name}
on:blur={() => ($validation.touched.name = true)}
on:change={nameToUrl($values.name)}
label="Name"
placeholder={defaultAppName}
/>
<span>
<Input
bind:value={$values.url}
disabled={creating}
error={$validation.touched.url && $validation.errors.url}
on:blur={() => ($validation.touched.url = true)}
on:change={tidyUrl($values.url)}
label="URL"
placeholder={$values.url
? $values.url
: `/${resolveAppUrl(template, $values.name)}`}
/>
{#if $values.url && $values.url !== "" && !$validation.errors.url}
<div class="app-server" title={appUrl}>
{appUrl}
</div>
{/if}
</span>
{/if}
{#if currentStep === Step.SET_PASSWORD}
<Input
autofocus={true}
label="Imported file password"
type="password"
bind:value={$values.encryptionPassword}
disabled={creating}
on:blur={() => ($encryptionValidation.touched.encryptionPassword = true)}
error={$encryptionValidation.touched.encryptionPassword &&
$encryptionValidation.errors.encryptionPassword}
/>
{/if}
</ModalContent>
<style>

View file

@ -1,27 +1,128 @@
<script>
import { ModalContent, Toggle, Body, InlineAlert } from "@budibase/bbui"
import {
ModalContent,
Toggle,
Body,
InlineAlert,
Input,
notifications,
} from "@budibase/bbui"
import { createValidationStore } from "helpers/validation/yup"
export let app
export let published
let excludeRows = false
let includeInternalTablesRows = true
let encypt = true
$: title = published ? "Export published app" : "Export latest app"
$: confirmText = published ? "Export published" : "Export latest"
let password = null
const validation = createValidationStore()
validation.addValidatorType("password", "password", true)
$: validation.observe("password", password)
const exportApp = () => {
const Step = { CONFIG: "config", SET_PASSWORD: "set_password" }
let currentStep = Step.CONFIG
$: exportButtonText = published ? "Export published" : "Export latest"
$: stepConfig = {
[Step.CONFIG]: {
title: published ? "Export published app" : "Export latest app",
confirmText: encypt ? "Continue" : exportButtonText,
onConfirm: () => {
if (!encypt) {
exportApp()
} else {
currentStep = Step.SET_PASSWORD
return false
}
},
isValid: true,
},
[Step.SET_PASSWORD]: {
title: "Add password to encrypt your export",
confirmText: exportButtonText,
onConfirm: async () => {
await validation.check({ password })
if (!$validation.valid) {
return false
}
exportApp(password)
},
isValid: $validation.valid,
},
}
const exportApp = async () => {
const id = published ? app.prodId : app.devId
const appName = encodeURIComponent(app.name)
window.location = `/api/backups/export?appId=${id}&appname=${appName}&excludeRows=${excludeRows}`
const url = `/api/backups/export?appId=${id}`
await downloadFile(url, {
excludeRows: !includeInternalTablesRows,
encryptPassword: password,
})
}
async function downloadFile(url, body) {
try {
const response = await fetch(url, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(body),
})
if (response.ok) {
const contentDisposition = response.headers.get("Content-Disposition")
const matches = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec(
contentDisposition
)
const filename = matches[1].replace(/['"]/g, "")
const url = URL.createObjectURL(await response.blob())
const link = document.createElement("a")
link.href = url
link.download = filename
link.click()
URL.revokeObjectURL(url)
} else {
notifications.error("Error exporting the app.")
}
} catch (error) {
notifications.error(error.message || "Error downloading the exported app")
}
}
</script>
<ModalContent {title} {confirmText} onConfirm={exportApp}>
<InlineAlert
header="Do not share your budibase application exports publicly as they may contain sensitive information such as database credentials or secret keys."
/>
<Body
>Apps can be exported with or without data that is within internal tables -
select this below.</Body
>
<Toggle text="Exclude Rows" bind:value={excludeRows} />
<ModalContent
title={stepConfig[currentStep].title}
confirmText={stepConfig[currentStep].confirmText}
onConfirm={stepConfig[currentStep].onConfirm}
disabled={!stepConfig[currentStep].isValid}
>
{#if currentStep === Step.CONFIG}
<Body>
<Toggle
text="Export rows from internal tables"
bind:value={includeInternalTablesRows}
/>
<Toggle text="Encrypt my export" bind:value={encypt} />
</Body>
{#if !encypt}
<InlineAlert
header="Do not share your budibase application exports publicly as they may contain sensitive information such as database credentials or secret keys."
/>
{/if}
{/if}
{#if currentStep === Step.SET_PASSWORD}
<Input
type="password"
label="Password"
placeholder="Type here..."
bind:value={password}
error={$validation.errors.password}
/>
{/if}
</ModalContent>

View file

@ -6,7 +6,6 @@ export function createValidationStore(initialValue, ...validators) {
let touched = false
const value = writable(initialValue || "")
const error = derived(value, $v => validate($v, validators))
const touchedStore = derived(value, () => {
if (!touched) {
touched = true
@ -14,6 +13,10 @@ export function createValidationStore(initialValue, ...validators) {
}
return touched
})
const error = derived(
[value, touchedStore],
([$v, $t]) => $t && validate($v, validators)
)
return [value, error, touchedStore]
}

View file

@ -5,6 +5,7 @@ import { notifications } from "@budibase/bbui"
export const createValidationStore = () => {
const DEFAULT = {
values: {},
errors: {},
touched: {},
valid: false,
@ -33,6 +34,9 @@ export const createValidationStore = () => {
case "email":
propertyValidator = string().email().nullable()
break
case "password":
propertyValidator = string().nullable()
break
default:
propertyValidator = string().nullable()
}
@ -41,9 +45,68 @@ export const createValidationStore = () => {
propertyValidator = propertyValidator.required()
}
// We want to do this after the possible required validation, to prioritise the required error
switch (type) {
case "password":
propertyValidator = propertyValidator.min(8)
break
}
validator[propertyName] = propertyValidator
}
const observe = async (propertyName, value) => {
const values = get(validation).values
let fieldIsValid
if (!Object.prototype.hasOwnProperty.call(values, propertyName)) {
// Initial setup
values[propertyName] = value
return
}
if (value === values[propertyName]) {
return
}
const obj = object().shape(validator)
try {
validation.update(store => {
store.errors[propertyName] = null
return store
})
await obj.validateAt(propertyName, { [propertyName]: value })
fieldIsValid = true
} catch (error) {
const [fieldError] = error.errors
if (fieldError) {
validation.update(store => {
store.errors[propertyName] = capitalise(fieldError)
store.valid = false
return store
})
}
}
if (fieldIsValid) {
// Validate the rest of the fields
try {
await obj.validate(
{ ...values, [propertyName]: value },
{ abortEarly: false }
)
validation.update(store => {
store.valid = true
return store
})
} catch {
validation.update(store => {
store.valid = false
return store
})
}
}
}
const check = async values => {
const obj = object().shape(validator)
// clear the previous errors
@ -87,5 +150,6 @@ export const createValidationStore = () => {
check,
addValidator,
addValidatorType,
observe,
}
}

View file

@ -0,0 +1,91 @@
<script>
import { onMount } from "svelte"
import { fade, fly } from "svelte/transition"
import { store, selectedScreen } from "builderStore"
import { ProgressCircle } from "@budibase/bbui"
$: route = $selectedScreen?.routing.route || "/"
$: src = `/${$store.appId}#${route}`
const close = () => {
store.update(state => ({
...state,
showPreview: false,
}))
}
onMount(() => {
window.closePreview = () => {
store.update(state => ({
...state,
showPreview: false,
}))
}
})
</script>
<div
class="preview-overlay"
transition:fade={{ duration: 260 }}
on:click|self={close}
>
<div
class="container spectrum {$store.theme}"
transition:fly={{ duration: 260, y: 130 }}
>
<div class="header placeholder" />
<div class="loading placeholder">
<ProgressCircle />
</div>
<iframe title="Budibase App Preview" {src} />
</div>
</div>
<style>
.preview-overlay {
top: 0;
right: 0;
left: 0;
bottom: 0;
z-index: 999;
position: absolute;
background: rgba(255, 255, 255, 0.1);
display: flex;
align-items: stretch;
padding: 48px;
}
.container {
flex: 1 1 auto;
background: var(--spectrum-global-color-gray-75);
border-radius: 4px;
display: flex;
flex-direction: column;
overflow: hidden;
position: relative;
box-shadow: 0 0 80px 0 rgba(0, 0, 0, 0.5);
}
iframe {
position: absolute;
height: 100%;
width: 100%;
border: none;
outline: none;
z-index: 1;
}
.header {
height: 60px;
width: 100%;
background: black;
top: 0;
position: absolute;
}
.loading {
position: absolute;
top: 50%;
left: 50%;
transform: translateY(-50%) translateX(-50%);
}
.placeholder {
z-index: 0;
}
</style>

View file

@ -24,6 +24,7 @@
import BuilderSidePanel from "./_components/BuilderSidePanel.svelte"
import UserAvatars from "./_components/UserAvatars.svelte"
import { TOUR_KEYS, TOURS } from "components/portal/onboarding/tours.js"
import PreviewOverlay from "./_components/PreviewOverlay.svelte"
export let application
@ -140,7 +141,7 @@
<BuilderSidePanel />
{/if}
<div class="root">
<div class="root" class:blur={$store.showPreview}>
<div class="top-nav">
{#if $store.initialised}
<div class="topleftnav">
@ -230,6 +231,10 @@
{/await}
</div>
{#if $store.showPreview}
<PreviewOverlay />
{/if}
<svelte:window on:keydown={handleKeyDown} />
<Modal bind:this={commandPaletteModal}>
<CommandPalette />
@ -248,6 +253,10 @@
width: 100%;
display: flex;
flex-direction: column;
transition: filter 260ms ease-out;
}
.root.blur {
filter: blur(8px);
}
.top-nav {

View file

@ -22,6 +22,7 @@
import ImportRestQueriesModal from "components/backend/DatasourceNavigator/modals/ImportRestQueriesModal.svelte"
import { API } from "api"
import { DatasourceFeature } from "@budibase/types"
import Spinner from "components/common/Spinner.svelte"
const querySchema = {
name: {},
@ -33,6 +34,7 @@
let isValid = true
let integration, baseDatasource, datasource
let queryList
let loading = false
$: baseDatasource = $datasources.selected
$: queryList = $queries.list.filter(
@ -65,9 +67,11 @@
}
const saveDatasource = async () => {
loading = true
if (integration.features?.[DatasourceFeature.CONNECTION_CHECKING]) {
const valid = await validateConfig()
if (!valid) {
loading = false
return false
}
}
@ -82,6 +86,8 @@
baseDatasource = cloneDeep(datasource)
} catch (err) {
notifications.error(`Error saving datasource: ${err}`)
} finally {
loading = false
}
}
@ -119,8 +125,17 @@
<Divider />
<div class="config-header">
<Heading size="S">Configuration</Heading>
<Button disabled={!changed || !isValid} cta on:click={saveDatasource}>
Save
<Button
disabled={!changed || !isValid || loading}
cta
on:click={saveDatasource}
>
<div class="save-button-content">
{#if loading}
<Spinner size="10">Save</Spinner>
{/if}
Save
</div>
</Button>
</div>
<IntegrationConfigForm
@ -216,4 +231,10 @@
flex-direction: column;
gap: var(--spacing-m);
}
.save-button-content {
display: flex;
align-items: center;
gap: var(--spacing-s);
}
</style>

View file

@ -17,6 +17,7 @@
import IntegrationIcon from "components/backend/DatasourceNavigator/IntegrationIcon.svelte"
import ICONS from "components/backend/DatasourceNavigator/icons/index.js"
import FontAwesomeIcon from "components/common/FontAwesomeIcon.svelte"
import { onMount } from "svelte"
let internalTableModal
let externalDatasourceModal
@ -129,9 +130,19 @@
return integrationsArray
}
let continueGoogleSetup
onMount(() => {
const urlParams = new URLSearchParams(window.location.search)
continueGoogleSetup = urlParams.get("continue_google_setup")
})
const fetchIntegrations = async () => {
const unsortedIntegrations = await API.getIntegrations()
integrations = sortIntegrations(unsortedIntegrations)
if (continueGoogleSetup) {
handleIntegrationSelect(IntegrationTypes.GOOGLE_SHEETS)
}
}
$: fetchIntegrations()
@ -141,9 +152,17 @@
<CreateTableModal {promptUpload} afterSave={handleInternalTableSave} />
</Modal>
<Modal bind:this={externalDatasourceModal}>
<Modal
bind:this={externalDatasourceModal}
on:hide={() => {
continueGoogleSetup = null
}}
>
{#if integration?.auth?.type === "google"}
<GoogleDatasourceConfigModal {integration} />
<GoogleDatasourceConfigModal
continueSetupId={continueGoogleSetup}
{integration}
/>
{:else}
<DatasourceConfigModal {integration} />
{/if}

View file

@ -0,0 +1,235 @@
<script>
import GoogleLogo from "./_logos/Google.svelte"
import { isEqual, cloneDeep } from "lodash/fp"
import {
Button,
Heading,
Divider,
Label,
notifications,
Layout,
Input,
Body,
Toggle,
Icon,
Helpers,
Link,
} from "@budibase/bbui"
import { onMount } from "svelte"
import { API } from "api"
import { organisation, admin } from "stores/portal"
const ConfigTypes = {
Google: "google",
}
// Some older google configs contain a manually specified value - retain the functionality to edit the field
// When there is no value or we are in the cloud - prohibit editing the field, must use platform url to change
$: googleCallbackUrl = undefined
$: googleCallbackReadonly = $admin.cloud || !googleCallbackUrl
// Indicate to user that callback is based on platform url
// If there is an existing value, indicate that it may be removed to return to default behaviour
$: googleCallbackTooltip = $admin.cloud
? null
: googleCallbackReadonly
? "Visit the organisation page to update the platform URL"
: "Leave blank to use the default callback URL"
$: googleSheetsCallbackUrl = `${$organisation.platformUrl}/api/global/auth/datasource/google/callback`
$: GoogleConfigFields = {
Google: [
{ name: "clientID", label: "Client ID" },
{ name: "clientSecret", label: "Client secret" },
{
name: "callbackURL",
label: "Callback URL",
readonly: googleCallbackReadonly,
tooltip: googleCallbackTooltip,
placeholder: $organisation.googleCallbackUrl,
copyButton: true,
},
{
name: "sheetsURL",
label: "Sheets URL",
readonly: googleCallbackReadonly,
tooltip: googleCallbackTooltip,
placeholder: googleSheetsCallbackUrl,
copyButton: true,
},
],
}
let google
const providers = { google }
// control the state of the save button depending on whether form has changed
let originalGoogleDoc
let googleSaveButtonDisabled
$: {
isEqual(providers.google?.config, originalGoogleDoc?.config)
? (googleSaveButtonDisabled = true)
: (googleSaveButtonDisabled = false)
}
$: googleComplete = !!(
providers.google?.config?.clientID && providers.google?.config?.clientSecret
)
async function saveConfig(config) {
// Delete unsupported fields
delete config.createdAt
delete config.updatedAt
return API.saveConfig(config)
}
async function saveGoogle() {
if (!googleComplete) {
notifications.error(
`Please fill in all required ${ConfigTypes.Google} fields`
)
return
}
const google = providers.google
try {
const res = await saveConfig(google)
providers[res.type]._rev = res._rev
providers[res.type]._id = res._id
notifications.success(`Settings saved`)
} catch (e) {
notifications.error(e.message)
return
}
googleSaveButtonDisabled = true
originalGoogleDoc = cloneDeep(providers.google)
}
const copyToClipboard = async value => {
await Helpers.copyToClipboard(value)
notifications.success("Copied")
}
onMount(async () => {
try {
await organisation.init()
} catch (error) {
notifications.error("Error getting org config")
}
// Fetch Google config
let googleDoc
try {
googleDoc = await API.getConfig(ConfigTypes.Google)
} catch (error) {
notifications.error("Error fetching Google OAuth config")
}
if (!googleDoc?._id) {
providers.google = {
type: ConfigTypes.Google,
config: { activated: false },
}
originalGoogleDoc = cloneDeep(googleDoc)
} else {
// Default activated to true for older configs
if (googleDoc.config.activated === undefined) {
googleDoc.config.activated = true
}
originalGoogleDoc = cloneDeep(googleDoc)
providers.google = googleDoc
}
googleCallbackUrl = providers?.google?.config?.callbackURL
})
</script>
{#if providers.google}
<Divider />
<Layout gap="XS" noPadding>
<Heading size="S">
<div class="provider-title">
<GoogleLogo />
<span>Google</span>
</div>
</Heading>
<Body size="S">
To allow users to authenticate using their Google accounts, fill out the
fields below. Read the <Link
size="M"
href={"https://docs.budibase.com/docs/sso-with-google"}
>documentation</Link
> for more information.
</Body>
</Layout>
<Layout gap="XS" noPadding>
{#each GoogleConfigFields.Google as field}
<div class="form-row">
<Label size="L" tooltip={field.tooltip}>{field.label}</Label>
<div class="inputContainer">
<div class="input">
<Input
bind:value={providers.google.config[field.name]}
readonly={field.readonly}
placeholder={field.placeholder}
/>
</div>
{#if field.copyButton}
<div
class="copy"
on:click={() => copyToClipboard(field.placeholder)}
>
<Icon size="S" name="Copy" />
</div>
{/if}
</div>
</div>
{/each}
<div class="form-row">
<Label size="L">Activated</Label>
<Toggle text="" bind:value={providers.google.config.activated} />
</div>
</Layout>
<div>
<Button
disabled={googleSaveButtonDisabled}
cta
on:click={() => saveGoogle()}
>
Save
</Button>
</div>
{/if}
<style>
.form-row {
display: grid;
grid-template-columns: 120px 1fr;
grid-gap: var(--spacing-l);
align-items: center;
}
.provider-title {
display: flex;
flex-direction: row;
justify-content: space-between;
align-items: center;
gap: var(--spacing-m);
}
.provider-title span {
flex: 1 1 auto;
}
.inputContainer {
display: flex;
flex-direction: row;
}
.input {
flex: 1;
}
.copy {
display: flex;
align-items: center;
margin-left: 10px;
}
</style>

View file

@ -1,5 +1,4 @@
<script>
import GoogleLogo from "./_logos/Google.svelte"
import OidcLogo from "./_logos/OIDC.svelte"
import MicrosoftLogo from "assets/microsoft-logo.png"
import Auth0Logo from "assets/auth0-logo.png"
@ -28,9 +27,9 @@
import { API } from "api"
import { organisation, admin, licensing } from "stores/portal"
import Scim from "./scim.svelte"
import Google from "./google.svelte"
const ConfigTypes = {
Google: "google",
OIDC: "oidc",
}
@ -38,43 +37,6 @@
$: enforcedSSO = $organisation.isSSOEnforced
// Some older google configs contain a manually specified value - retain the functionality to edit the field
// When there is no value or we are in the cloud - prohibit editing the field, must use platform url to change
$: googleCallbackUrl = undefined
$: googleCallbackReadonly = $admin.cloud || !googleCallbackUrl
// Indicate to user that callback is based on platform url
// If there is an existing value, indicate that it may be removed to return to default behaviour
$: googleCallbackTooltip = $admin.cloud
? null
: googleCallbackReadonly
? "Visit the organisation page to update the platform URL"
: "Leave blank to use the default callback URL"
$: googleSheetsCallbackUrl = `${$organisation.platformUrl}/api/global/auth/datasource/google/callback`
$: GoogleConfigFields = {
Google: [
{ name: "clientID", label: "Client ID" },
{ name: "clientSecret", label: "Client secret" },
{
name: "callbackURL",
label: "Callback URL",
readonly: googleCallbackReadonly,
tooltip: googleCallbackTooltip,
placeholder: $organisation.googleCallbackUrl,
copyButton: true,
},
{
name: "sheetsURL",
label: "Sheets URL",
readonly: googleCallbackReadonly,
tooltip: googleCallbackTooltip,
placeholder: googleSheetsCallbackUrl,
copyButton: true,
},
],
}
$: OIDCConfigFields = {
Oidc: [
{ name: "configUrl", label: "Config URL" },
@ -133,15 +95,9 @@
const providers = { google, oidc }
// control the state of the save button depending on whether form has changed
let originalGoogleDoc
let originalOidcDoc
let googleSaveButtonDisabled
let oidcSaveButtonDisabled
$: {
isEqual(providers.google?.config, originalGoogleDoc?.config)
? (googleSaveButtonDisabled = true)
: (googleSaveButtonDisabled = false)
// delete the callback url which is never saved to the oidc
// config doc, to ensure an accurate comparison
delete providers.oidc?.config.configs[0].callbackURL
@ -151,10 +107,6 @@
: (oidcSaveButtonDisabled = false)
}
$: googleComplete = !!(
providers.google?.config?.clientID && providers.google?.config?.clientSecret
)
$: oidcComplete = !!(
providers.oidc?.config?.configs[0].configUrl &&
providers.oidc?.config?.configs[0].clientID &&
@ -230,30 +182,6 @@
originalOidcDoc = cloneDeep(providers.oidc)
}
async function saveGoogle() {
if (!googleComplete) {
notifications.error(
`Please fill in all required ${ConfigTypes.Google} fields`
)
return
}
const google = providers.google
try {
const res = await saveConfig(google)
providers[res.type]._rev = res._rev
providers[res.type]._id = res._id
notifications.success(`Settings saved`)
} catch (e) {
notifications.error(e.message)
return
}
googleSaveButtonDisabled = true
originalGoogleDoc = cloneDeep(providers.google)
}
let defaultScopes = ["profile", "email", "offline_access"]
const refreshScopes = idx => {
@ -281,29 +209,6 @@
notifications.error("Error getting org config")
}
// Fetch Google config
let googleDoc
try {
googleDoc = await API.getConfig(ConfigTypes.Google)
} catch (error) {
notifications.error("Error fetching Google OAuth config")
}
if (!googleDoc?._id) {
providers.google = {
type: ConfigTypes.Google,
config: { activated: false },
}
originalGoogleDoc = cloneDeep(googleDoc)
} else {
// Default activated to true for older configs
if (googleDoc.config.activated === undefined) {
googleDoc.config.activated = true
}
originalGoogleDoc = cloneDeep(googleDoc)
providers.google = googleDoc
}
googleCallbackUrl = providers?.google?.config?.callbackURL
// Get the list of user uploaded logos and push it to the dropdown options.
// This needs to be done before the config call so they're available when
// the dropdown renders.
@ -395,62 +300,7 @@
> before enabling this feature.
</Body>
</Layout>
{#if providers.google}
<Divider />
<Layout gap="XS" noPadding>
<Heading size="S">
<div class="provider-title">
<GoogleLogo />
<span>Google</span>
</div>
</Heading>
<Body size="S">
To allow users to authenticate using their Google accounts, fill out the
fields below. Read the <Link
size="M"
href={"https://docs.budibase.com/docs/sso-with-google"}
>documentation</Link
> for more information.
</Body>
</Layout>
<Layout gap="XS" noPadding>
{#each GoogleConfigFields.Google as field}
<div class="form-row">
<Label size="L" tooltip={field.tooltip}>{field.label}</Label>
<div class="inputContainer">
<div class="input">
<Input
bind:value={providers.google.config[field.name]}
readonly={field.readonly}
placeholder={field.placeholder}
/>
</div>
{#if field.copyButton}
<div
class="copy"
on:click={() => copyToClipboard(field.placeholder)}
>
<Icon size="S" name="Copy" />
</div>
{/if}
</div>
</div>
{/each}
<div class="form-row">
<Label size="L">Activated</Label>
<Toggle text="" bind:value={providers.google.config.activated} />
</div>
</Layout>
<div>
<Button
disabled={googleSaveButtonDisabled}
cta
on:click={() => saveGoogle()}
>
Save
</Button>
</div>
{/if}
<Google />
{#if providers.oidc}
<Divider />
<Layout gap="XS" noPadding>

View file

@ -373,7 +373,7 @@
<OnboardingTypeModal {chooseCreationType} />
</Modal>
<Modal bind:this={passwordModal}>
<Modal bind:this={passwordModal} disableCancel={true}>
<PasswordModal
createUsersResponse={bulkSaveResponse}
userData={userData.users}

View file

@ -57,7 +57,10 @@ export function createDatasourcesStore() {
return updateDatasource(response)
}
const save = async (body, fetchSchema = false) => {
const save = async (body, { fetchSchema, tablesFilter } = {}) => {
if (fetchSchema == null) {
fetchSchema = false
}
let response
if (body._id) {
response = await API.updateDatasource(body)
@ -65,6 +68,7 @@ export function createDatasourcesStore() {
response = await API.createDatasource({
datasource: body,
fetchSchema,
tablesFilter,
})
}
return updateDatasource(response)

View file

@ -12,9 +12,31 @@ const ignoredWarnings = [
"a11y-click-events-have-key-events",
]
const copyFonts = dest =>
viteStaticCopy({
targets: [
{
src: "../../node_modules/@fontsource/source-sans-pro",
dest,
},
{
src: "../../node_modules/remixicon/fonts/*",
dest,
},
],
})
export default defineConfig(({ mode }) => {
const isProduction = mode === "production"
const env = loadEnv(mode, process.cwd())
// Plugins to only run in dev
const devOnlyPlugins = [
// Copy fonts to an additional path so that svelte's automatic
// prefixing of the base URL path can still resolve assets
copyFonts("builder/fonts"),
]
return {
test: {
setupFiles: ["./vitest.setup.js"],
@ -60,18 +82,8 @@ export default defineConfig(({ mode }) => {
),
"process.env.SENTRY_DSN": JSON.stringify(process.env.SENTRY_DSN),
}),
viteStaticCopy({
targets: [
{
src: "../../node_modules/@fontsource/source-sans-pro",
dest: "fonts",
},
{
src: "../../node_modules/remixicon/fonts/*",
dest: "fonts",
},
],
}),
copyFonts("fonts"),
...(isProduction ? [] : devOnlyPlugins),
],
optimizeDeps: {
exclude: ["@roxi/routify"],

View file

@ -49,7 +49,7 @@
"pouchdb": "7.3.0",
"pouchdb-replication-stream": "1.2.9",
"randomstring": "1.1.5",
"tar": "6.1.11",
"tar": "6.1.15",
"yaml": "^2.1.1"
},
"devDependencies": {

View file

@ -1,7 +1,6 @@
import { createAPIClient } from "@budibase/frontend-core"
import { notificationStore } from "../stores/notification.js"
import { authStore } from "../stores/auth.js"
import { devToolsStore } from "../stores/devTools.js"
import { notificationStore, devToolsEnabled, devToolsStore } from "../stores/"
import { get } from "svelte/store"
export const API = createAPIClient({
@ -25,9 +24,10 @@ export const API = createAPIClient({
}
// Add role header
const devToolsState = get(devToolsStore)
if (devToolsState.enabled && devToolsState.role) {
headers["x-budibase-role"] = devToolsState.role
const $devToolsStore = get(devToolsStore)
const $devToolsEnabled = get(devToolsEnabled)
if ($devToolsEnabled && $devToolsStore.role) {
headers["x-budibase-role"] = $devToolsStore.role
}
},

View file

@ -17,6 +17,7 @@
appStore,
devToolsStore,
environmentStore,
devToolsEnabled,
} from "stores"
import NotificationDisplay from "components/overlay/NotificationDisplay.svelte"
import ConfirmationDisplay from "components/overlay/ConfirmationDisplay.svelte"
@ -47,10 +48,7 @@
let permissionError = false
// Determine if we should show devtools or not
$: showDevTools =
!$builderStore.inBuilder &&
$devToolsStore.enabled &&
!$routeStore.queryParams?.peek
$: showDevTools = $devToolsEnabled && !$routeStore.queryParams?.peek
// Handle no matching route
$: {
@ -107,6 +105,7 @@
lang="en"
dir="ltr"
class="spectrum spectrum--medium {$themeStore.baseTheme} {$themeStore.theme}"
class:builder={$builderStore.inBuilder}
>
<DeviceBindingsProvider>
<UserBindingsProvider>
@ -223,12 +222,14 @@
overflow: hidden;
height: 100%;
width: 100%;
background: transparent;
display: flex;
flex-direction: row;
justify-content: center;
align-items: center;
}
#spectrum-root.builder {
background: transparent;
}
#clip-root {
max-width: 100%;

View file

@ -1,5 +1,5 @@
<script>
import { Heading, Button, Select } from "@budibase/bbui"
import { Heading, Select, ActionButton } from "@budibase/bbui"
import { devToolsStore } from "../../stores"
import { getContext } from "svelte"
@ -30,7 +30,7 @@
</script>
<div class="dev-preview-header" class:mobile={$context.device.mobile}>
<Heading size="XS">Budibase App Preview</Heading>
<Heading size="XS">Preview</Heading>
<Select
quiet
options={previewOptions}
@ -40,36 +40,57 @@
on:change={e => devToolsStore.actions.changeRole(e.detail)}
/>
{#if !$context.device.mobile}
<Button
<ActionButton
quiet
overBackground
icon="Code"
on:click={() => devToolsStore.actions.setVisible(!$devToolsStore.visible)}
>
{$devToolsStore.visible ? "Close" : "Open"} DevTools
</Button>
</ActionButton>
{/if}
<ActionButton
quiet
icon="Close"
on:click={() => window.parent.closePreview?.()}
>
Close preview
</ActionButton>
</div>
<style>
.dev-preview-header {
flex: 0 0 50px;
height: 50px;
flex: 0 0 60px;
display: grid;
align-items: center;
background-color: var(--spectrum-global-color-blue-400);
background-color: black;
padding: 0 var(--spacing-xl);
grid-template-columns: 1fr auto auto;
grid-template-columns: 1fr auto auto auto;
grid-gap: var(--spacing-xl);
}
.dev-preview-header.mobile {
flex: 0 0 50px;
grid-template-columns: 1fr auto;
grid-template-columns: 1fr auto auto;
}
.dev-preview-header :global(.spectrum-Heading),
.dev-preview-header :global(.spectrum-Picker-menuIcon),
.dev-preview-header :global(.spectrum-Picker-label) {
color: white !important;
.dev-preview-header :global(.spectrum-Icon),
.dev-preview-header :global(.spectrum-Picker-label),
.dev-preview-header :global(.spectrum-ActionButton) {
font-weight: 600;
color: white;
}
.dev-preview-header :global(.spectrum-Picker) {
padding-left: 8px;
padding-right: 8px;
transition: background 130ms ease-out;
border-radius: 4px;
}
.dev-preview-header :global(.spectrum-ActionButton:hover),
.dev-preview-header :global(.spectrum-Picker:hover),
.dev-preview-header :global(.spectrum-Picker.is-open) {
background: rgba(255, 255, 255, 0.1);
}
.dev-preview-header :global(.spectrum-ActionButton:active) {
background: rgba(255, 255, 255, 0.2);
}
@media print {
.dev-preview-header {

View file

@ -2,7 +2,6 @@ import ClientApp from "./components/ClientApp.svelte"
import {
builderStore,
appStore,
devToolsStore,
blockStore,
componentStore,
environmentStore,
@ -51,11 +50,6 @@ const loadBudibase = async () => {
await environmentStore.actions.fetchEnvironment()
}
// Enable dev tools or not. We need to be using a dev app and not inside
// the builder preview to enable them.
const enableDevTools = !get(builderStore).inBuilder && get(appStore).isDevApp
devToolsStore.actions.setEnabled(enableDevTools)
// Register handler for runtime events from the builder
window.handleBuilderRuntimeEvent = (type, data) => {
if (!window["##BUDIBASE_IN_BUILDER##"]) {

View file

@ -1,18 +1,30 @@
import { authStore } from "../stores/auth.js"
import { appStore } from "../stores/app.js"
import { get } from "svelte/store"
import { Constants } from "@budibase/frontend-core"
const getLicense = () => {
const getUserLicense = () => {
const user = get(authStore)
if (user) {
return user.license
}
}
const getAppLicenseType = () => {
const appDef = get(appStore)
if (appDef?.licenseType) {
return appDef.licenseType
}
}
export const isFreePlan = () => {
const license = getLicense()
if (license) {
return license.plan.type === Constants.PlanType.FREE
let licenseType = getAppLicenseType()
if (!licenseType) {
const license = getUserLicense()
licenseType = license?.plan?.type
}
if (licenseType) {
return licenseType === Constants.PlanType.FREE
} else {
// safety net - no license means free plan
return true

View file

@ -2,13 +2,14 @@ import { derived } from "svelte/store"
import { Constants } from "@budibase/frontend-core"
import { devToolsStore } from "../devTools.js"
import { authStore } from "../auth.js"
import { devToolsEnabled } from "./devToolsEnabled.js"
// Derive the current role of the logged-in user
export const currentRole = derived(
[devToolsStore, authStore],
([$devToolsStore, $authStore]) => {
[devToolsEnabled, devToolsStore, authStore],
([$devToolsEnabled, $devToolsStore, $authStore]) => {
return (
($devToolsStore.enabled && $devToolsStore.role) ||
($devToolsEnabled && $devToolsStore.role) ||
$authStore?.roleId ||
Constants.Roles.PUBLIC
)

View file

@ -0,0 +1,10 @@
import { derived } from "svelte/store"
import { appStore } from "../app.js"
import { builderStore } from "../builder.js"
export const devToolsEnabled = derived(
[appStore, builderStore],
([$appStore, $builderStore]) => {
return !$builderStore.inBuilder && $appStore.isDevApp
}
)

View file

@ -3,3 +3,4 @@
// separately we can keep our actual stores lean and performant.
export { currentRole } from "./currentRole.js"
export { dndComponentPath } from "./dndComponentPath.js"
export { devToolsEnabled } from "./devToolsEnabled.js"

View file

@ -4,7 +4,6 @@ import { authStore } from "./auth"
import { API } from "../api"
const initialState = {
enabled: false,
visible: false,
allowSelection: false,
role: null,
@ -13,13 +12,6 @@ const initialState = {
const createDevToolStore = () => {
const store = createLocalStorageStore("bb-devtools", initialState)
const setEnabled = enabled => {
store.update(state => ({
...state,
enabled,
}))
}
const setVisible = visible => {
store.update(state => ({
...state,
@ -46,7 +38,7 @@ const createDevToolStore = () => {
return {
subscribe: store.subscribe,
actions: { setEnabled, setVisible, setAllowSelection, changeRole },
actions: { setVisible, setAllowSelection, changeRole },
}
}

View file

@ -26,13 +26,16 @@ export const buildDatasourceEndpoints = API => ({
* Creates a datasource
* @param datasource the datasource to create
* @param fetchSchema whether to fetch the schema or not
* @param tablesFilter a list of tables to actually fetch rather than simply
* all that are accessible.
*/
createDatasource: async ({ datasource, fetchSchema }) => {
createDatasource: async ({ datasource, fetchSchema, tablesFilter }) => {
return await API.post({
url: "/api/datasources",
body: {
datasource,
fetchSchema,
tablesFilter,
},
})
},
@ -69,4 +72,15 @@ export const buildDatasourceEndpoints = API => ({
body: { datasource },
})
},
/**
* Fetch table names available within the datasource, for filtering out undesired tables
* @param datasource the datasource configuration to use for fetching tables
*/
fetchInfoForDatasource: async datasource => {
return await API.post({
url: `/api/datasources/info`,
body: { datasource },
})
},
})

@ -1 +1 @@
Subproject commit 46f06fdfeb3b8f0cfdc45e09a000bd84aeacee0c
Subproject commit f4b8449aac9bd265214396afbdce7ff984a2ae34

View file

@ -6,5 +6,5 @@
"src/**/*.spec.js",
"../backend-core/dist/**/*"
],
"exec": "node ./scripts/build.js && node ./dist/index.js"
"exec": "yarn build && node ./dist/index.js"
}

View file

@ -63,6 +63,7 @@
"airtable": "0.10.1",
"arangojs": "7.2.0",
"aws-sdk": "2.1030.0",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3",
"bull": "4.10.1",
"chmodr": "1.2.0",
@ -97,7 +98,7 @@
"koa2-ratelimit": "1.1.1",
"lodash": "4.17.21",
"memorystream": "0.3.1",
"mongodb": "4.9",
"mongodb": "5.6",
"mssql": "6.2.3",
"mysql2": "2.3.3",
"node-fetch": "2.6.7",
@ -117,7 +118,7 @@
"socket.io": "4.6.1",
"svelte": "3.49.0",
"swagger-parser": "10.0.3",
"tar": "6.1.11",
"tar": "6.1.15",
"to-json-schema": "0.2.5",
"uuid": "3.3.2",
"validate.js": "0.13.1",
@ -150,7 +151,7 @@
"@types/redis": "4.0.11",
"@types/server-destroy": "1.0.1",
"@types/supertest": "2.0.12",
"@types/tar": "6.1.3",
"@types/tar": "6.1.5",
"@typescript-eslint/parser": "5.45.0",
"apidoc": "0.50.4",
"babel-jest": "29.5.0",

View file

@ -1,53 +1,54 @@
import env from "../../environment"
import {
createAllSearchIndex,
createLinkView,
createRoutingView,
createAllSearchIndex,
} from "../../db/views/staticViews"
import { createApp, deleteApp } from "../../utilities/fileSystem"
import {
backupClientLibrary,
createApp,
deleteApp,
revertClientLibrary,
updateClientLibrary,
} from "../../utilities/fileSystem"
import {
AppStatus,
DocumentType,
generateAppID,
generateDevAppID,
getLayoutParams,
getScreenParams,
generateDevAppID,
DocumentType,
AppStatus,
} from "../../db/utils"
import {
db as dbCore,
roles,
cache,
tenancy,
context,
db as dbCore,
env as envCore,
ErrorCode,
events,
migrations,
objectStore,
ErrorCode,
env as envCore,
roles,
tenancy,
} from "@budibase/backend-core"
import { USERS_TABLE_SCHEMA } from "../../constants"
import {
DEFAULT_BB_DATASOURCE_ID,
buildDefaultDocs,
DEFAULT_BB_DATASOURCE_ID,
} from "../../db/defaultData/datasource_bb_default"
import { removeAppFromUserRoles } from "../../utilities/workerRequests"
import { stringToReadStream, isQsTrue } from "../../utilities"
import { getLocksById, doesUserHaveLock } from "../../utilities/redis"
import {
updateClientLibrary,
backupClientLibrary,
revertClientLibrary,
} from "../../utilities/fileSystem"
import { stringToReadStream } from "../../utilities"
import { doesUserHaveLock, getLocksById } from "../../utilities/redis"
import { cleanupAutomations } from "../../automations/utils"
import { checkAppMetadata } from "../../automations/logging"
import { getUniqueRows } from "../../utilities/usageQuota/rows"
import { quotas, groups } from "@budibase/pro"
import { groups, licensing, quotas } from "@budibase/pro"
import {
App,
Layout,
Screen,
MigrationType,
Database,
PlanType,
Screen,
UserCtx,
} from "@budibase/types"
import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
@ -114,7 +115,18 @@ function checkAppName(
}
}
async function createInstance(appId: string, template: any) {
interface AppTemplate {
templateString: string
useTemplate: string
file?: {
type: string
path: string
password?: string
}
key?: string
}
async function createInstance(appId: string, template: AppTemplate) {
const db = context.getAppDB()
await db.put({
_id: "_design/database",
@ -207,6 +219,7 @@ export async function fetchAppPackage(ctx: UserCtx) {
let application = await db.get(DocumentType.APP_METADATA)
const layouts = await getLayouts()
let screens = await getScreens()
const license = await licensing.cache.getCachedLicense()
// Enrich plugin URLs
application.usedPlugins = objectStore.enrichPluginURLs(
@ -227,6 +240,7 @@ export async function fetchAppPackage(ctx: UserCtx) {
ctx.body = {
application: { ...application, upgradableVersion: envCore.VERSION },
licenseType: license?.plan.type || PlanType.FREE,
screens,
layouts,
clientLibPath,
@ -237,19 +251,24 @@ export async function fetchAppPackage(ctx: UserCtx) {
async function performAppCreate(ctx: UserCtx) {
const apps = (await dbCore.getAllApps({ dev: true })) as App[]
const name = ctx.request.body.name,
possibleUrl = ctx.request.body.url
possibleUrl = ctx.request.body.url,
encryptionPassword = ctx.request.body.encryptionPassword
checkAppName(ctx, apps, name)
const url = sdk.applications.getAppUrl({ name, url: possibleUrl })
checkAppUrl(ctx, apps, url)
const { useTemplate, templateKey, templateString } = ctx.request.body
const instanceConfig: any = {
const instanceConfig: AppTemplate = {
useTemplate,
key: templateKey,
templateString,
}
if (ctx.request.files && ctx.request.files.templateFile) {
instanceConfig.file = ctx.request.files.templateFile
instanceConfig.file = {
...(ctx.request.files.templateFile as any),
password: encryptionPassword,
}
}
const tenantId = tenancy.isMultiTenant() ? tenancy.getTenantId() : null
const appId = generateDevAppID(generateAppID(tenantId))

View file

@ -1,17 +1,31 @@
import sdk from "../../sdk"
import { events, context } from "@budibase/backend-core"
import { events, context, db } from "@budibase/backend-core"
import { DocumentType } from "../../db/utils"
import { isQsTrue } from "../../utilities"
import { Ctx } from "@budibase/types"
interface ExportAppDumpRequest {
excludeRows: boolean
encryptPassword?: string
}
export async function exportAppDump(ctx: Ctx<ExportAppDumpRequest>) {
const { appId } = ctx.query as any
const { excludeRows, encryptPassword } = ctx.request.body
const [app] = await db.getAppsByIDs([appId])
const appName = app.name
export async function exportAppDump(ctx: any) {
let { appId, excludeRows } = ctx.query
// remove the 120 second limit for the request
ctx.req.setTimeout(0)
const appName = decodeURI(ctx.query.appname)
excludeRows = isQsTrue(excludeRows)
const backupIdentifier = `${appName}-export-${new Date().getTime()}.tar.gz`
const extension = encryptPassword ? "enc.tar.gz" : "tar.gz"
const backupIdentifier = `${appName}-export-${new Date().getTime()}.${extension}`
ctx.attachment(backupIdentifier)
ctx.body = await sdk.backups.streamExportApp(appId, excludeRows)
ctx.body = await sdk.backups.streamExportApp({
appId,
excludeRows,
encryptPassword,
})
await context.doInAppContext(appId, async () => {
const appDb = context.getAppDB()

View file

@ -11,7 +11,7 @@ import { BuildSchemaErrors, InvalidColumns } from "../../constants"
import { getIntegration } from "../../integrations"
import { getDatasourceAndQuery } from "./row/utils"
import { invalidateDynamicVariables } from "../../threads/utils"
import { db as dbCore, context, events } from "@budibase/backend-core"
import { db as dbCore, context, events, cache } from "@budibase/backend-core"
import {
UserCtx,
Datasource,
@ -25,9 +25,11 @@ import {
FetchDatasourceInfoResponse,
IntegrationBase,
DatasourcePlus,
SourceName,
} from "@budibase/types"
import sdk from "../../sdk"
import { builderSocket } from "../../websockets"
import { setupCreationAuth as googleSetupCreationAuth } from "../../integrations/googlesheets"
function getErrorTables(errors: any, errorType: string) {
return Object.entries(errors)
@ -101,6 +103,22 @@ async function buildSchemaHelper(datasource: Datasource) {
return { tables: connector.tables, error }
}
async function buildFilteredSchema(datasource: Datasource, filter?: string[]) {
let { tables, error } = await buildSchemaHelper(datasource)
let finalTables = tables
if (filter) {
finalTables = {}
for (let key in tables) {
if (
filter.some((filter: any) => filter.toLowerCase() === key.toLowerCase())
) {
finalTables[key] = tables[key]
}
}
}
return { tables: finalTables, error }
}
export async function fetch(ctx: UserCtx) {
// Get internal tables
const db = context.getAppDB()
@ -172,43 +190,28 @@ export async function information(
}
const tableNames = await connector.getTableNames()
ctx.body = {
tableNames,
tableNames: tableNames.sort(),
}
}
export async function buildSchemaFromDb(ctx: UserCtx) {
const db = context.getAppDB()
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
const tablesFilter = ctx.request.body.tablesFilter
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
let { tables, error } = await buildSchemaHelper(datasource)
if (tablesFilter) {
if (!datasource.entities) {
datasource.entities = {}
}
for (let key in tables) {
if (
tablesFilter.some(
(filter: any) => filter.toLowerCase() === key.toLowerCase()
)
) {
datasource.entities[key] = tables[key]
}
}
} else {
datasource.entities = tables
}
const { tables, error } = await buildFilteredSchema(datasource, tablesFilter)
datasource.entities = tables
setDefaultDisplayColumns(datasource)
const dbResp = await db.put(datasource)
datasource._rev = dbResp.rev
const cleanedDatasource = await sdk.datasources.removeSecretSingle(datasource)
const response: any = { datasource: cleanedDatasource }
const res: any = { datasource: cleanedDatasource }
if (error) {
response.error = error
res.error = error
}
ctx.body = response
ctx.body = res
}
/**
@ -306,12 +309,19 @@ export async function update(ctx: UserCtx<any, UpdateDatasourceResponse>) {
builderSocket?.emitDatasourceUpdate(ctx, datasource)
}
const preSaveAction: Partial<Record<SourceName, any>> = {
[SourceName.GOOGLE_SHEETS]: async (datasource: Datasource) => {
await googleSetupCreationAuth(datasource.config as any)
},
}
export async function save(
ctx: UserCtx<CreateDatasourceRequest, CreateDatasourceResponse>
) {
const db = context.getAppDB()
const plus = ctx.request.body.datasource.plus
const fetchSchema = ctx.request.body.fetchSchema
const tablesFilter = ctx.request.body.tablesFilter
const datasource = {
_id: generateDatasourceID({ plus }),
@ -321,12 +331,19 @@ export async function save(
let schemaError = null
if (fetchSchema) {
const { tables, error } = await buildSchemaHelper(datasource)
const { tables, error } = await buildFilteredSchema(
datasource,
tablesFilter
)
schemaError = error
datasource.entities = tables
setDefaultDisplayColumns(datasource)
}
if (preSaveAction[datasource.source]) {
await preSaveAction[datasource.source](datasource)
}
const dbResp = await db.put(datasource)
await events.datasource.created(datasource)
datasource._rev = dbResp.rev

View file

@ -4,7 +4,7 @@ import {
getUserMetadataParams,
InternalTables,
} from "../../db/utils"
import { BBContext, Database } from "@budibase/types"
import { UserCtx, Database } from "@budibase/types"
const UpdateRolesOptions = {
CREATED: "created",
@ -38,15 +38,15 @@ async function updateRolesOnUserTable(
}
}
export async function fetch(ctx: BBContext) {
export async function fetch(ctx: UserCtx) {
ctx.body = await roles.getAllRoles()
}
export async function find(ctx: BBContext) {
export async function find(ctx: UserCtx) {
ctx.body = await roles.getRole(ctx.params.roleId)
}
export async function save(ctx: BBContext) {
export async function save(ctx: UserCtx) {
const db = context.getAppDB()
let { _id, name, inherits, permissionId } = ctx.request.body
let isCreate = false
@ -72,7 +72,7 @@ export async function save(ctx: BBContext) {
ctx.message = `Role '${role.name}' created successfully.`
}
export async function destroy(ctx: BBContext) {
export async function destroy(ctx: UserCtx) {
const db = context.getAppDB()
const roleId = ctx.params.roleId
const role = await db.get(roleId)

View file

@ -1,6 +1,6 @@
import { getRoutingInfo } from "../../utilities/routing"
import { roles } from "@budibase/backend-core"
import { BBContext } from "@budibase/types"
import { UserCtx } from "@budibase/types"
const URL_SEPARATOR = "/"
@ -56,11 +56,11 @@ async function getRoutingStructure() {
return { routes: routing.json }
}
export async function fetch(ctx: BBContext) {
export async function fetch(ctx: UserCtx) {
ctx.body = await getRoutingStructure()
}
export async function clientFetch(ctx: BBContext) {
export async function clientFetch(ctx: UserCtx) {
const routing = await getRoutingStructure()
let roleId = ctx.user?.role?._id
const roleIds = (await roles.getUserRoleHierarchy(roleId, {

View file

@ -5,7 +5,7 @@ import { permissions } from "@budibase/backend-core"
const router: Router = new Router()
router.get(
router.post(
"/api/backups/export",
authorized(permissions.BUILDER),
controller.exportAppDump

View file

@ -1,7 +1,9 @@
import tk from "timekeeper"
import * as setup from "./utilities"
import { events } from "@budibase/backend-core"
import sdk from "../../../sdk"
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import { mocks } from "@budibase/backend-core/tests"
describe("/backups", () => {
let request = setup.getRequest()
@ -16,7 +18,7 @@ describe("/backups", () => {
describe("exportAppDump", () => {
it("should be able to export app", async () => {
const res = await request
.get(`/api/backups/export?appId=${config.getAppId()}&appname=test`)
.post(`/api/backups/export?appId=${config.getAppId()}`)
.set(config.defaultHeaders())
.expect(200)
expect(res.headers["content-type"]).toEqual("application/gzip")
@ -26,10 +28,24 @@ describe("/backups", () => {
it("should apply authorization to endpoint", async () => {
await checkBuilderEndpoint({
config,
method: "GET",
method: "POST",
url: `/api/backups/export?appId=${config.getAppId()}`,
})
})
it("should infer the app name from the app", async () => {
tk.freeze(mocks.date.MOCK_DATE)
const res = await request
.post(`/api/backups/export?appId=${config.getAppId()}`)
.set(config.defaultHeaders())
expect(res.headers["content-disposition"]).toEqual(
`attachment; filename="${
config.getApp()!.name
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`
)
})
})
describe("calculateBackupStats", () => {

View file

@ -15,7 +15,6 @@ import * as api from "./api"
import * as automations from "./automations"
import { Thread } from "./threads"
import * as redis from "./utilities/redis"
import { initialise as initialiseWebsockets } from "./websockets"
import { events, logging, middleware, timers } from "@budibase/backend-core"
import { startup } from "./startup"
const Sentry = require("@sentry/node")

View file

@ -48,6 +48,35 @@ export const definition: AutomationStepSchema = {
type: AutomationIOType.STRING,
title: "HTML Contents",
},
addInvite: {
type: AutomationIOType.BOOLEAN,
title: "Add calendar invite",
},
startTime: {
type: AutomationIOType.DATE,
title: "Start Time",
dependsOn: "addInvite",
},
endTime: {
type: AutomationIOType.DATE,
title: "End Time",
dependsOn: "addInvite",
},
summary: {
type: AutomationIOType.STRING,
title: "Meeting Summary",
dependsOn: "addInvite",
},
location: {
type: AutomationIOType.STRING,
title: "Location",
dependsOn: "addInvite",
},
url: {
type: AutomationIOType.STRING,
title: "URL",
dependsOn: "addInvite",
},
},
required: ["to", "from", "subject", "contents"],
},
@ -68,21 +97,43 @@ export const definition: AutomationStepSchema = {
}
export async function run({ inputs }: AutomationStepInput) {
let { to, from, subject, contents, cc, bcc } = inputs
let {
to,
from,
subject,
contents,
cc,
bcc,
addInvite,
startTime,
endTime,
summary,
location,
url,
} = inputs
if (!contents) {
contents = "<h1>No content</h1>"
}
to = to || undefined
try {
let response = await sendSmtpEmail(
let response = await sendSmtpEmail({
to,
from,
subject,
contents,
cc,
bcc,
true
)
automation: true,
invite: addInvite
? {
startTime,
endTime,
summary,
location,
url,
}
: undefined,
})
return {
success: true,
response,

View file

@ -1,71 +0,0 @@
function generateResponse(to, from) {
return {
"success": true,
"response": {
"accepted": [
to
],
"envelope": {
"from": from,
"to": [
to
]
},
"message": `Email sent to ${to}.`
}
}
}
const mockFetch = jest.fn(() => ({
headers: {
raw: () => {
return { "content-type": ["application/json"] }
},
get: () => ["application/json"],
},
json: jest.fn(() => response),
status: 200,
text: jest.fn(),
}))
jest.mock("node-fetch", () => mockFetch)
const setup = require("./utilities")
describe("test the outgoing webhook action", () => {
let inputs
let config = setup.getConfig()
beforeAll(async () => {
await config.init()
})
afterAll(setup.afterAll)
it("should be able to run the action", async () => {
inputs = {
to: "user1@test.com",
from: "admin@test.com",
subject: "hello",
contents: "testing",
}
let resp = generateResponse(inputs.to, inputs.from)
mockFetch.mockImplementationOnce(() => ({
headers: {
raw: () => {
return { "content-type": ["application/json"] }
},
get: () => ["application/json"],
},
json: jest.fn(() => resp),
status: 200,
text: jest.fn(),
}))
const res = await setup.runStep(setup.actions.SEND_EMAIL_SMTP.stepId, inputs)
expect(res.response).toEqual(resp)
expect(res.success).toEqual(true)
})
})

View file

@ -0,0 +1,74 @@
import * as workerRequests from "../../utilities/workerRequests"
jest.mock("../../utilities/workerRequests", () => ({
sendSmtpEmail: jest.fn(),
}))
function generateResponse(to: string, from: string) {
return {
success: true,
response: {
accepted: [to],
envelope: {
from: from,
to: [to],
},
message: `Email sent to ${to}.`,
},
}
}
const setup = require("./utilities")
describe("test the outgoing webhook action", () => {
let inputs
let config = setup.getConfig()
beforeAll(async () => {
await config.init()
})
afterAll(setup.afterAll)
it("should be able to run the action", async () => {
jest
.spyOn(workerRequests, "sendSmtpEmail")
.mockImplementationOnce(async () =>
generateResponse("user1@test.com", "admin@test.com")
)
const invite = {
startTime: new Date(),
endTime: new Date(),
summary: "summary",
location: "location",
url: "url",
}
inputs = {
to: "user1@test.com",
from: "admin@test.com",
subject: "hello",
contents: "testing",
cc: "cc",
bcc: "bcc",
addInvite: true,
...invite,
}
let resp = generateResponse(inputs.to, inputs.from)
const res = await setup.runStep(
setup.actions.SEND_EMAIL_SMTP.stepId,
inputs
)
expect(res.response).toEqual(resp)
expect(res.success).toEqual(true)
expect(workerRequests.sendSmtpEmail).toHaveBeenCalledTimes(1)
expect(workerRequests.sendSmtpEmail).toHaveBeenCalledWith({
to: "user1@test.com",
from: "admin@test.com",
subject: "hello",
contents: "testing",
cc: "cc",
bcc: "bcc",
invite,
automation: true,
})
})
})

View file

@ -26,6 +26,10 @@ export default function process(updateCb?: UpdateCallback) {
// if something not found - no changes to perform
if (err?.status === 404) {
return
}
// The user has already been sync in another process
else if (err?.status === 409) {
return
} else {
logging.logAlert("Failed to perform user/group app sync", err)
}

View file

@ -1,5 +1,6 @@
import {
ConnectionInfo,
Datasource,
DatasourceFeature,
DatasourceFieldType,
DatasourcePlus,
@ -19,13 +20,15 @@ import { OAuth2Client } from "google-auth-library"
import { buildExternalTableId, finaliseExternalTables } from "./utils"
import { GoogleSpreadsheet, GoogleSpreadsheetRow } from "google-spreadsheet"
import fetch from "node-fetch"
import { configs, HTTPError } from "@budibase/backend-core"
import { dataFilters } from "@budibase/shared-core"
import { cache, configs, context, HTTPError } from "@budibase/backend-core"
import { dataFilters, utils } from "@budibase/shared-core"
import { GOOGLE_SHEETS_PRIMARY_KEY } from "../constants"
import sdk from "../sdk"
interface GoogleSheetsConfig {
spreadsheetId: string
auth: OAuthClientConfig
continueSetupId?: string
}
interface OAuthClientConfig {
@ -72,7 +75,7 @@ const SCHEMA: Integration = {
},
datasource: {
spreadsheetId: {
display: "Google Sheet URL",
display: "Spreadsheet URL",
type: DatasourceFieldType.STRING,
required: true,
},
@ -207,6 +210,8 @@ class GoogleSheetsIntegration implements DatasourcePlus {
async connect() {
try {
await setupCreationAuth(this.config)
// Initialise oAuth client
let googleConfig = await configs.getGoogleDatasourceConfig()
if (!googleConfig) {
@ -269,24 +274,24 @@ class GoogleSheetsIntegration implements DatasourcePlus {
}
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
// not fully configured yet
if (!this.config.auth) {
return
}
await this.connect()
const sheets = this.client.sheetsByIndex
const tables: Record<string, Table> = {}
for (let sheet of sheets) {
// must fetch rows to determine schema
await sheet.getRows()
await utils.parallelForeach(
sheets,
async sheet => {
// must fetch rows to determine schema
await sheet.getRows({ limit: 0, offset: 0 })
const id = buildExternalTableId(datasourceId, sheet.title)
tables[sheet.title] = this.getTableSchema(
sheet.title,
sheet.headerValues,
id
)
}
const id = buildExternalTableId(datasourceId, sheet.title)
tables[sheet.title] = this.getTableSchema(
sheet.title,
sheet.headerValues,
id
)
},
10
)
const final = finaliseExternalTables(tables, entities)
this.tables = final.tables
this.schemaErrors = final.errors
@ -566,6 +571,18 @@ class GoogleSheetsIntegration implements DatasourcePlus {
}
}
export async function setupCreationAuth(datasouce: GoogleSheetsConfig) {
if (datasouce.continueSetupId) {
const appId = context.getAppId()
const tokens = await cache.get(
`datasource:creation:${appId}:google:${datasouce.continueSetupId}`
)
datasouce.auth = tokens.tokens
delete datasouce.continueSetupId
}
}
export default {
schema: SCHEMA,
integration: GoogleSheetsIntegration,

View file

@ -351,7 +351,7 @@ const SCHEMA: Integration = getSchema()
class MongoIntegration implements IntegrationBase {
private config: MongoDBConfig
private client: any
private client: MongoClient
constructor(config: MongoDBConfig) {
this.config = config
@ -372,6 +372,8 @@ class MongoIntegration implements IntegrationBase {
response.connected = true
} catch (e: any) {
response.error = e.message as string
} finally {
await this.client.close()
}
return response
}
@ -380,7 +382,7 @@ class MongoIntegration implements IntegrationBase {
return this.client.connect()
}
createObjectIds(json: any): object {
createObjectIds(json: any) {
const self = this
function interpolateObjectIds(json: any) {
for (let field of Object.keys(json)) {
@ -487,7 +489,11 @@ class MongoIntegration implements IntegrationBase {
switch (query.extra.actionType) {
case "find": {
return await collection.find(json).toArray()
if (json) {
return await collection.find(json).toArray()
} else {
return await collection.find().toArray()
}
}
case "findOne": {
return await collection.findOne(json)

View file

@ -20,7 +20,7 @@ import Sql from "./base/sql"
import { PostgresColumn } from "./base/types"
import { escapeDangerousCharacters } from "../utilities"
import { Client, types } from "pg"
import { Client, ClientConfig, types } from "pg"
// Return "date" and "timestamp" types as plain strings.
// This lets us reference the original stored timezone.
@ -42,6 +42,8 @@ interface PostgresConfig {
schema: string
ssl?: boolean
ca?: string
clientKey?: string
clientCert?: string
rejectUnauthorized?: boolean
}
@ -98,6 +100,19 @@ const SCHEMA: Integration = {
required: false,
},
ca: {
display: "Server CA",
type: DatasourceFieldType.LONGFORM,
default: false,
required: false,
},
clientKey: {
display: "Client key",
type: DatasourceFieldType.LONGFORM,
default: false,
required: false,
},
clientCert: {
display: "Client cert",
type: DatasourceFieldType.LONGFORM,
default: false,
required: false,
@ -144,12 +159,14 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
super(SqlClient.POSTGRES)
this.config = config
let newConfig = {
let newConfig: ClientConfig = {
...this.config,
ssl: this.config.ssl
? {
rejectUnauthorized: this.config.rejectUnauthorized,
ca: this.config.ca,
key: this.config.clientKey,
cert: this.config.clientCert,
}
: undefined,
}
@ -322,7 +339,8 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
await this.openConnection()
const columnsResponse: { rows: PostgresColumn[] } =
await this.client.query(this.COLUMNS_SQL)
return columnsResponse.rows.map(row => row.table_name)
const names = columnsResponse.rows.map(row => row.table_name)
return [...new Set(names)]
} finally {
await this.closeConnection()
}

View file

@ -103,7 +103,7 @@ export default async (ctx: UserCtx, next: any) => {
userId,
globalId,
roleId,
role: await roles.getRole(roleId),
role: await roles.getRole(roleId, { defaultPublic: true }),
}
}

View file

@ -1,4 +1,4 @@
import { db as dbCore, objectStore } from "@budibase/backend-core"
import { db as dbCore, encryption, objectStore } from "@budibase/backend-core"
import { budibaseTempDir } from "../../../utilities/budibaseDir"
import { streamFile, createTempFolder } from "../../../utilities/fileSystem"
import { ObjectStoreBuckets } from "../../../constants"
@ -18,7 +18,8 @@ import { join } from "path"
import env from "../../../environment"
const uuid = require("uuid/v4")
const tar = require("tar")
import tar from "tar"
const MemoryStream = require("memorystream")
interface DBDumpOpts {
@ -30,16 +31,18 @@ interface ExportOpts extends DBDumpOpts {
tar?: boolean
excludeRows?: boolean
excludeLogs?: boolean
encryptPassword?: string
}
function tarFilesToTmp(tmpDir: string, files: string[]) {
const exportFile = join(budibaseTempDir(), `${uuid()}.tar.gz`)
const fileName = `${uuid()}.tar.gz`
const exportFile = join(budibaseTempDir(), fileName)
tar.create(
{
sync: true,
gzip: true,
file: exportFile,
recursive: true,
noDirRecurse: false,
cwd: tmpDir,
},
files
@ -124,6 +127,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
)
}
}
const downloadedPath = join(tmpPath, appPath)
if (fs.existsSync(downloadedPath)) {
const allFiles = fs.readdirSync(downloadedPath)
@ -141,12 +145,27 @@ export async function exportApp(appId: string, config?: ExportOpts) {
filter: defineFilter(config?.excludeRows, config?.excludeLogs),
exportPath: dbPath,
})
if (config?.encryptPassword) {
for (let file of fs.readdirSync(tmpPath)) {
const path = join(tmpPath, file)
await encryption.encryptFile(
{ dir: tmpPath, filename: file },
config.encryptPassword
)
fs.rmSync(path)
}
}
// if tar requested, return where the tarball is
if (config?.tar) {
// now the tmpPath contains both the DB export and attachments, tar this
const tarPath = tarFilesToTmp(tmpPath, fs.readdirSync(tmpPath))
// cleanup the tmp export files as tarball returned
fs.rmSync(tmpPath, { recursive: true, force: true })
return tarPath
}
// tar not requested, turn the directory where export is
@ -161,11 +180,20 @@ export async function exportApp(appId: string, config?: ExportOpts) {
* @param {boolean} excludeRows Flag to state whether the export should include data.
* @returns {*} a readable stream of the backup which is written in real time
*/
export async function streamExportApp(appId: string, excludeRows: boolean) {
export async function streamExportApp({
appId,
excludeRows,
encryptPassword,
}: {
appId: string
excludeRows: boolean
encryptPassword?: string
}) {
const tmpPath = await exportApp(appId, {
excludeRows,
excludeLogs: true,
tar: true,
encryptPassword,
})
return streamFile(tmpPath)
}

View file

@ -1,4 +1,4 @@
import { db as dbCore, objectStore } from "@budibase/backend-core"
import { db as dbCore, encryption, objectStore } from "@budibase/backend-core"
import { Database, Row } from "@budibase/types"
import { getAutomationParams, TABLE_ROW_PREFIX } from "../../../db/utils"
import { budibaseTempDir } from "../../../utilities/budibaseDir"
@ -20,6 +20,7 @@ type TemplateType = {
file?: {
type: string
path: string
password?: string
}
key?: string
}
@ -123,6 +124,22 @@ export function untarFile(file: { path: string }) {
return tmpPath
}
async function decryptFiles(path: string, password: string) {
try {
for (let file of fs.readdirSync(path)) {
const inputPath = join(path, file)
const outputPath = inputPath.replace(/\.enc$/, "")
await encryption.decryptFile(inputPath, outputPath, password)
fs.rmSync(inputPath)
}
} catch (err: any) {
if (err.message === "incorrect header check") {
throw new Error("File cannot be imported")
}
throw err
}
}
export function getGlobalDBFile(tmpPath: string) {
return fs.readFileSync(join(tmpPath, GLOBAL_DB_EXPORT_FILE), "utf8")
}
@ -143,6 +160,9 @@ export async function importApp(
template.file && fs.lstatSync(template.file.path).isDirectory()
if (template.file && (isTar || isDirectory)) {
const tmpPath = isTar ? untarFile(template.file) : template.file.path
if (isTar && template.file.password) {
await decryptFiles(tmpPath, template.file.password)
}
const contents = fs.readdirSync(tmpPath)
// have to handle object import
if (contents.length) {

View file

@ -164,5 +164,6 @@ export function mergeConfigs(update: Datasource, old: Datasource) {
delete update.config[key]
}
}
return update
}

View file

@ -9,7 +9,7 @@ import {
env as coreEnv,
} from "@budibase/backend-core"
import { updateAppRole } from "./global"
import { BBContext, User } from "@budibase/types"
import { BBContext, User, EmailInvite } from "@budibase/types"
export function request(ctx?: BBContext, request?: any) {
if (!request.headers) {
@ -65,15 +65,25 @@ async function checkResponse(
}
// have to pass in the tenant ID as this could be coming from an automation
export async function sendSmtpEmail(
to: string,
from: string,
subject: string,
contents: string,
cc: string,
bcc: string,
export async function sendSmtpEmail({
to,
from,
subject,
contents,
cc,
bcc,
automation,
invite,
}: {
to: string
from: string
subject: string
contents: string
cc: string
bcc: string
automation: boolean
) {
invite?: EmailInvite
}) {
// tenant ID will be set in header
const response = await fetch(
checkSlashesInUrl(env.WORKER_URL + `/api/global/email/send`),
@ -88,6 +98,7 @@ export async function sendSmtpEmail(
bcc,
purpose: "custom",
automation,
invite,
},
})
)

View file

@ -4,3 +4,42 @@ export function unreachable(
) {
throw new Error(message)
}
export async function parallelForeach<T>(
items: T[],
task: (item: T) => Promise<void>,
maxConcurrency: number
): Promise<void> {
const promises: Promise<void>[] = []
let index = 0
const processItem = async (item: T) => {
try {
await task(item)
} finally {
processNext()
}
}
const processNext = () => {
if (index >= items.length) {
// No more items to process
return
}
const item = items[index]
index++
const promise = processItem(item)
promises.push(promise)
if (promises.length >= maxConcurrency) {
Promise.race(promises).then(processNext)
} else {
processNext()
}
}
processNext()
await Promise.all(promises)
}

View file

@ -12,6 +12,7 @@ export interface UpdateDatasourceResponse {
export interface CreateDatasourceRequest {
datasource: Datasource
fetchSchema?: boolean
tablesFilter: string[]
}
export interface VerifyDatasourceRequest {

View file

@ -1,5 +1,6 @@
import { Document } from "../document"
import { EventEmitter } from "events"
import { User } from "../global"
export enum AutomationIOType {
OBJECT = "object",
@ -8,6 +9,7 @@ export enum AutomationIOType {
NUMBER = "number",
ARRAY = "array",
JSON = "json",
DATE = "date",
}
export enum AutomationCustomIOType {
@ -66,6 +68,33 @@ export enum AutomationActionStepId {
integromat = "integromat",
}
export interface EmailInvite {
startTime: Date
endTime: Date
summary: string
location?: string
url?: string
}
export interface SendEmailOpts {
// workspaceId If finer grain controls being used then this will lookup config for workspace.
workspaceId?: string
// user If sending to an existing user the object can be provided, this is used in the context.
user: User
// from If sending from an address that is not what is configured in the SMTP config.
from?: string
// contents If sending a custom email then can supply contents which will be added to it.
contents?: string
// subject A custom subject can be specified if the config one is not desired.
subject?: string
// info Pass in a structure of information to be stored alongside the invitation.
info?: any
cc?: boolean
bcc?: boolean
automation?: boolean
invite?: EmailInvite
}
export const AutomationStepIdArray = [
...Object.values(AutomationActionStepId),
...Object.values(AutomationTriggerStepId),
@ -90,6 +119,7 @@ interface BaseIOStructure {
customType?: AutomationCustomIOType
title?: string
description?: string
dependsOn?: string
enum?: string[]
pretty?: string[]
properties?: {

View file

@ -47,12 +47,14 @@
"@techpass/passport-openidconnect": "0.3.2",
"@types/global-agent": "2.1.1",
"aws-sdk": "2.1030.0",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3",
"dd-trace": "3.13.2",
"dotenv": "8.6.0",
"elastic-apm-node": "3.38.0",
"global-agent": "3.0.0",
"got": "11.8.3",
"ical-generator": "4.1.0",
"joi": "17.6.0",
"koa": "2.13.4",
"koa-body": "4.2.0",

View file

@ -140,7 +140,6 @@ export const datasourcePreAuth = async (ctx: any, next: any) => {
{
provider,
appId: ctx.query.appId,
datasourceId: ctx.query.datasourceId,
},
Cookie.DatasourceAuth
)

View file

@ -14,6 +14,7 @@ export async function sendEmail(ctx: BBContext) {
cc,
bcc,
automation,
invite,
} = ctx.request.body
let user
if (userId) {
@ -29,6 +30,7 @@ export async function sendEmail(ctx: BBContext) {
cc,
bcc,
automation,
invite,
})
ctx.body = {
...response,

View file

@ -38,7 +38,7 @@ const MAX_USERS_UPLOAD_LIMIT = 1000
export const save = async (ctx: UserCtx<User, SaveUserResponse>) => {
try {
const currentUserId = ctx.user._id
const currentUserId = ctx.user?._id
const requestUser = ctx.request.body
const user = await userSdk.save(requestUser, { currentUserId })

View file

@ -4,28 +4,11 @@ import { getTemplateByPurpose, EmailTemplates } from "../constants/templates"
import { getSettingsTemplateContext } from "./templates"
import { processString } from "@budibase/string-templates"
import { getResetPasswordCode, getInviteCode } from "./redis"
import { User, SMTPInnerConfig } from "@budibase/types"
import { User, SendEmailOpts, SMTPInnerConfig } from "@budibase/types"
import { configs } from "@budibase/backend-core"
import ical from "ical-generator"
const nodemailer = require("nodemailer")
type SendEmailOpts = {
// workspaceId If finer grain controls being used then this will lookup config for workspace.
workspaceId?: string
// user If sending to an existing user the object can be provided, this is used in the context.
user: User
// from If sending from an address that is not what is configured in the SMTP config.
from?: string
// contents If sending a custom email then can supply contents which will be added to it.
contents?: string
// subject A custom subject can be specified if the config one is not desired.
subject?: string
// info Pass in a structure of information to be stored alongside the invitation.
info?: any
cc?: boolean
bcc?: boolean
automation?: boolean
}
const TEST_MODE = env.ENABLE_EMAIL_TEST_MODE && env.isDev()
const TYPE = TemplateType.EMAIL
@ -200,6 +183,26 @@ export async function sendEmail(
context
)
}
if (opts?.invite) {
const calendar = ical({
name: "Invite",
})
calendar.createEvent({
start: opts.invite.startTime,
end: opts.invite.endTime,
summary: opts.invite.summary,
location: opts.invite.location,
url: opts.invite.url,
})
message = {
...message,
icalEvent: {
method: "request",
content: calendar.toString(),
},
}
}
const response = await transport.sendMail(message)
if (TEST_MODE) {
console.log("Test email URL: " + nodemailer.getTestMessageUrl(response))

View file

@ -10,7 +10,8 @@
},
"scripts": {
"setup": "yarn && node scripts/createEnv.js",
"test": "jest --runInBand --json --outputFile=testResults.json",
"user": "yarn && node scripts/createEnv.js && node scripts/createUser.js",
"test": "jest --runInBand --json --outputFile=testResults.json --forceExit",
"test:watch": "yarn run test --watch",
"test:debug": "DEBUG=1 yarn run test",
"test:notify": "node scripts/testResultsWebhook",

View file

@ -0,0 +1,49 @@
const dotenv = require("dotenv")
const { join } = require("path")
const fs = require("fs")
const fetch = require("node-fetch")
function getVarFromDotEnv(path, varName) {
const parsed = dotenv.parse(fs.readFileSync(path))
return parsed[varName]
}
async function createUser() {
const serverPath = join(__dirname, "..", "..", "packages", "server", ".env")
const qaCorePath = join(__dirname, "..", ".env")
const apiKey = getVarFromDotEnv(serverPath, "INTERNAL_API_KEY")
const username = getVarFromDotEnv(qaCorePath, "BB_ADMIN_USER_EMAIL")
const password = getVarFromDotEnv(qaCorePath, "BB_ADMIN_USER_PASSWORD")
const url = getVarFromDotEnv(qaCorePath, "BUDIBASE_URL")
const resp = await fetch(`${url}/api/public/v1/users`, {
method: "POST",
headers: {
"Content-Type": "application/json",
"x-budibase-api-key": apiKey,
},
body: JSON.stringify({
email: username,
password,
builder: {
global: true,
},
admin: {
global: true,
},
roles: {},
}),
})
if (resp.status !== 200) {
throw new Error(await resp.text())
} else {
return await resp.json()
}
}
createUser()
.then(() => {
console.log("User created - ready to use")
})
.catch(err => {
console.error("Failed to create user - ", err)
})

View file

@ -15,6 +15,12 @@ async function generateReport() {
return JSON.parse(report)
}
const env = process.argv.slice(2)[0]
if (!env) {
throw new Error("environment argument is required")
}
async function discordResultsNotification(report) {
const {
numTotalTestSuites,
@ -39,8 +45,8 @@ async function discordResultsNotification(report) {
content: `**Nightly Tests Status**: ${OUTCOME}`,
embeds: [
{
title: "Budi QA Bot",
description: `Nightly Tests`,
title: `Budi QA Bot - ${env}`,
description: `API Integration Tests`,
url: GITHUB_ACTIONS_RUN_URL,
color: OUTCOME === "success" ? 3066993 : 15548997,
timestamp: new Date(),

View file

@ -67,11 +67,12 @@ export default class AccountInternalAPIClient {
}
const message = `${method} ${url} - ${response.status}`
const isDebug = process.env.LOG_LEVEL === "debug"
if (response.status > 499) {
console.error(message, data)
} else if (response.status >= 400) {
console.warn(message, data)
} else {
} else if (isDebug) {
console.debug(message, data)
}

View file

@ -60,8 +60,16 @@ export default class AccountAPI {
}
async delete(accountID: string) {
const [response, json] = await this.client.del(`/api/accounts/${accountID}`)
expect(response).toHaveStatusCode(200)
const [response, json] = await this.client.del(
`/api/accounts/${accountID}`,
{
internal: true,
}
)
// can't use expect here due to use in global teardown
if (response.status !== 204) {
throw new Error(`Could not delete accountId=${accountID}`)
}
return response
}
}

View file

@ -93,7 +93,7 @@ describe("datasource validators", () => {
const result = await integration.testConnection()
expect(result).toEqual({
connected: false,
error: "Error: getaddrinfo ENOTFOUND http",
error: "getaddrinfo ENOTFOUND http",
})
})
})

View file

@ -1,4 +1,5 @@
import { GenericContainer } from "testcontainers"
import postgres from "../../../../packages/server/src/integrations/postgres"
jest.unmock("pg")

View file

@ -58,11 +58,12 @@ class BudibaseInternalAPIClient {
}
const message = `${method} ${url} - ${response.status}`
const isDebug = process.env.LOG_LEVEL === "debug"
if (response.status > 499) {
console.error(message, data)
} else if (response.status >= 400) {
console.warn(message, data)
} else {
} else if (isDebug) {
console.debug(message, data)
}

View file

@ -2,7 +2,7 @@ import TestConfiguration from "../../config/TestConfiguration"
import * as fixtures from "../../fixtures"
import { Query } from "@budibase/types"
describe("Internal API - Data Sources: MongoDB", () => {
xdescribe("Internal API - Data Sources: MongoDB", () => {
const config = new TestConfiguration()
beforeAll(async () => {

View file

@ -1,3 +1,4 @@
process.env.DISABLE_PINO_LOGGER = "1"
import { DEFAULT_TENANT_ID, logging } from "@budibase/backend-core"
import { AccountInternalAPI } from "../account-api"
import * as fixtures from "../internal-api/fixtures"

View file

@ -10,6 +10,7 @@ const API_OPTS: APIRequestOpts = { doExpect: false }
async function deleteAccount() {
// @ts-ignore
const accountID = global.qa.accountId
// can't run 'expect' blocks in teardown
await accountsApi.accounts.delete(accountID)
}

View file

@ -57,11 +57,12 @@ class BudibasePublicAPIClient {
}
const message = `${method} ${url} - ${response.status}`
const isDebug = process.env.LOG_LEVEL === "debug"
if (response.status > 499) {
console.error(message, data)
} else if (response.status >= 400) {
console.warn(message, data)
} else {
} else if (isDebug) {
console.debug(message, data)
}

View file

@ -8,10 +8,10 @@ const path = require("path")
const { build } = require("esbuild")
const { default: NodeResolve } = require("@esbuild-plugins/node-resolve")
const {
default: TsconfigPathsPlugin,
} = require("@esbuild-plugins/tsconfig-paths")
const { nodeExternalsPlugin } = require("esbuild-node-externals")
var argv = require("minimist")(process.argv.slice(2))
@ -25,32 +25,28 @@ function runBuild(entry, outfile) {
minify: !isDev,
sourcemap: isDev,
tsconfig,
plugins: [
TsconfigPathsPlugin({ tsconfig }),
NodeResolve({
extensions: [".ts", ".js"],
onResolved: resolved => {
if (resolved.includes("node_modules") && !resolved.includes("/@budibase/pro/")) {
return {
external: true,
}
}
return resolved
},
}),
],
plugins: [TsconfigPathsPlugin({ tsconfig }), nodeExternalsPlugin()],
target: "node14",
preserveSymlinks: true,
loader: {
".svelte": "copy",
},
metafile: true,
external: [
"deasync",
"mock-aws-s3",
"nock",
"pino",
"koa-pino-logger",
"bull",
],
}
build({
...sharedConfig,
platform: "node",
outfile,
}).then(() => {
}).then(result => {
glob(`${process.cwd()}/src/**/*.hbs`, {}, (err, files) => {
for (const file of files) {
fs.copyFileSync(file, `${process.cwd()}/dist/${path.basename(file)}`)
@ -61,6 +57,11 @@ function runBuild(entry, outfile) {
`Build successfully in ${(Date.now() - start) / 1000} seconds`
)
})
fs.writeFileSync(
`dist/${path.basename(outfile)}.meta.json`,
JSON.stringify(result.metafile)
)
})
}

133
yarn.lock
View file

@ -2115,16 +2115,6 @@
pump "^3.0.0"
secure-json-parse "^2.1.0"
"@esbuild-plugins/node-resolve@^0.2.2":
version "0.2.2"
resolved "https://registry.yarnpkg.com/@esbuild-plugins/node-resolve/-/node-resolve-0.2.2.tgz#4f1b8d265a1b6e8b2438a03770239277687f0c17"
integrity sha512-+t5FdX3ATQlb53UFDBRb4nqjYBz492bIrnVWvpQHpzZlu9BQL5HasMZhqc409ygUwOWCXZhrWr6NyZ6T6Y+cxw==
dependencies:
"@types/resolve" "^1.17.1"
debug "^4.3.1"
escape-string-regexp "^4.0.0"
resolve "^1.19.0"
"@esbuild-plugins/tsconfig-paths@^0.1.2":
version "0.1.2"
resolved "https://registry.yarnpkg.com/@esbuild-plugins/tsconfig-paths/-/tsconfig-paths-0.1.2.tgz#1955de0a124ecf4364717a2fadbfbea876955232"
@ -2489,6 +2479,11 @@
minimatch "^3.0.4"
strip-json-comments "^3.1.1"
"@fontsource/source-sans-pro@^5.0.3":
version "5.0.3"
resolved "https://registry.yarnpkg.com/@fontsource/source-sans-pro/-/source-sans-pro-5.0.3.tgz#7d6e84a8169ba12fa5e6ce70757aa2ca7e74d855"
integrity sha512-mQnjuif/37VxwRloHZ+wQdoozd2VPWutbFSt1AuSkk7nFXIBQxHJLw80rgCF/osL0t7N/3Gx1V7UJuOX2zxzhQ==
"@fortawesome/fontawesome-common-types@6.3.0":
version "6.3.0"
resolved "https://registry.yarnpkg.com/@fortawesome/fontawesome-common-types/-/fontawesome-common-types-6.3.0.tgz#51f734e64511dbc3674cd347044d02f4dd26e86b"
@ -3646,7 +3641,7 @@
dependencies:
"@lezer/common" "^1.0.0"
"@mapbox/node-pre-gyp@^1.0.0":
"@mapbox/node-pre-gyp@^1.0.10":
version "1.0.10"
resolved "https://registry.yarnpkg.com/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.10.tgz#8e6735ccebbb1581e5a7e652244cadc8a844d03c"
integrity sha512-4ySo4CjzStuprMwk35H5pPbkymjv1SF3jGLj6rAHp/xT/RF7TL7bd9CTm1xDY49K2qF7jmR/g7k+SkLETP6opA==
@ -6120,11 +6115,6 @@
dependencies:
"@types/node" "*"
"@types/resolve@^1.17.1":
version "1.20.2"
resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-1.20.2.tgz#97d26e00cd4a0423b4af620abecf3e6f442b7975"
integrity sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==
"@types/responselike@^1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@types/responselike/-/responselike-1.0.0.tgz#251f4fe7d154d2bad125abe1b429b23afd262e29"
@ -6197,13 +6187,13 @@
dependencies:
"@types/node" "*"
"@types/tar@6.1.3":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/tar/-/tar-6.1.3.tgz#46a2ce7617950c4852dfd7e9cd41aa8161b9d750"
integrity sha512-YzDOr5kdAeqS8dcO6NTTHTMJ44MUCBDoLEIyPtwEn7PssKqUYL49R1iCVJPeiPzPlKi6DbH33eZkpeJ27e4vHg==
"@types/tar@6.1.5":
version "6.1.5"
resolved "https://registry.yarnpkg.com/@types/tar/-/tar-6.1.5.tgz#90ccb3b6a35430e7427410d50eed564e85feaaff"
integrity sha512-qm2I/RlZij5RofuY7vohTpYNaYcrSQlN2MyjucQc7ZweDwaEWkdN/EeNh6e9zjK6uEm6PwjdMXkcj05BxZdX1Q==
dependencies:
"@types/node" "*"
minipass "^3.3.5"
minipass "^4.0.0"
"@types/tern@*":
version "0.23.4"
@ -7671,13 +7661,13 @@ bcrypt-pbkdf@^1.0.0, bcrypt-pbkdf@^1.0.2:
dependencies:
tweetnacl "^0.14.3"
bcrypt@5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/bcrypt/-/bcrypt-5.0.1.tgz#f1a2c20f208e2ccdceea4433df0c8b2c54ecdf71"
integrity sha512-9BTgmrhZM2t1bNuDtrtIMVSmmxZBrJ71n8Wg+YgdjHuIWYF7SjjmCPZFB+/5i/o/PIeRpwVJR3P+NrpIItUjqw==
bcrypt@5.1.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/bcrypt/-/bcrypt-5.1.0.tgz#bbb27665dbc400480a524d8991ac7434e8529e17"
integrity sha512-RHBS7HI5N5tEnGTmtR/pppX0mmDSBpQ4aCBsj7CEQfYXDcO74A8sIBYcJMuCsis2E81zDxeENYhv66oZwLiA+Q==
dependencies:
"@mapbox/node-pre-gyp" "^1.0.0"
node-addon-api "^3.1.0"
"@mapbox/node-pre-gyp" "^1.0.10"
node-addon-api "^5.0.0"
bcryptjs@2.4.3:
version "2.4.3"
@ -7989,12 +7979,10 @@ bson@*:
resolved "https://registry.yarnpkg.com/bson/-/bson-5.0.1.tgz#4cd3eeeabf6652ef0d6ab600f9a18212d39baac3"
integrity sha512-y09gBGusgHtinMon/GVbv1J6FrXhnr/+6hqLlSmEFzkz6PodqF6TxjyvfvY3AfO+oG1mgUtbC86xSbOlwvM62Q==
bson@^4.7.0:
version "4.7.2"
resolved "https://registry.yarnpkg.com/bson/-/bson-4.7.2.tgz#320f4ad0eaf5312dd9b45dc369cc48945e2a5f2e"
integrity sha512-Ry9wCtIZ5kGqkJoi6aD8KjxFZEx78guTQDnpXWiNthsxzrxAK/i8E6pCHAIZTbaEFWcOCvbecMukfK7XUvyLpQ==
dependencies:
buffer "^5.6.0"
bson@^5.3.0:
version "5.3.0"
resolved "https://registry.yarnpkg.com/bson/-/bson-5.3.0.tgz#37b006df4cd91ed125cb686467c1dd6d4606b514"
integrity sha512-ukmCZMneMlaC5ebPHXIkP8YJzNl5DC41N5MAIvKDqLggdao342t4McltoJBQfQya/nHBWAcSsYRqlXPoQkTJag==
buffer-alloc-unsafe@^1.1.0:
version "1.1.0"
@ -8408,7 +8396,7 @@ chmodr@1.2.0:
resolved "https://registry.yarnpkg.com/chmodr/-/chmodr-1.2.0.tgz#720e96caa09b7f1cdbb01529b7d0ab6bc5e118b9"
integrity sha512-Y5uI7Iq/Az6HgJEL6pdw7THVd7jbVOTPwsmcPOBjQL8e3N+pz872kzK5QxYGEy21iRys+iHWV0UZQXDFJo1hyA==
chokidar@3.5.3, chokidar@^3.0.0, chokidar@^3.5.1, chokidar@^3.5.2:
chokidar@3.5.3, chokidar@^3.0.0, chokidar@^3.5.1, chokidar@^3.5.2, chokidar@^3.5.3:
version "3.5.3"
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd"
integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==
@ -11000,6 +10988,14 @@ esbuild-netbsd-64@0.15.18:
resolved "https://registry.yarnpkg.com/esbuild-netbsd-64/-/esbuild-netbsd-64-0.15.18.tgz#ae75682f60d08560b1fe9482bfe0173e5110b998"
integrity sha512-98ukeCdvdX7wr1vUYQzKo4kQ0N2p27H7I11maINv73fVEXt2kyh4K4m9f35U1K43Xc2QGXlzAw0K9yoU7JUjOg==
esbuild-node-externals@^1.7.0:
version "1.7.0"
resolved "https://registry.yarnpkg.com/esbuild-node-externals/-/esbuild-node-externals-1.7.0.tgz#f6d755c577aec1ffa8548b0a648f13df27551805"
integrity sha512-nfY3hxtO2anCTZ87LgfzCTfBuyG6de+NyiCNMF1mgrBufS0NgoYlBwF77HHuOInsJLxsAJf0BfLeV6ekZ3hRuA==
dependencies:
find-up "^5.0.0"
tslib "^2.4.1"
esbuild-openbsd-64@0.15.18:
version "0.15.18"
resolved "https://registry.yarnpkg.com/esbuild-openbsd-64/-/esbuild-openbsd-64-0.15.18.tgz#79591a90aa3b03e4863f93beec0d2bab2853d0a8"
@ -11852,7 +11848,7 @@ fast-glob@3.2.7:
merge2 "^1.3.0"
micromatch "^4.0.4"
fast-glob@^3.0.3:
fast-glob@^3.0.3, fast-glob@^3.2.11:
version "3.2.12"
resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80"
integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==
@ -13668,6 +13664,13 @@ husky@^8.0.3:
resolved "https://registry.yarnpkg.com/husky/-/husky-8.0.3.tgz#4936d7212e46d1dea28fef29bb3a108872cd9184"
integrity sha512-+dQSyqPh4x1hlO1swXBiNb2HzTDN1I2IGLQx1GrBuiqFJfoMrnZWwVmatvSiO+Iz8fBUnf+lekwNo4c2LlXItg==
ical-generator@4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/ical-generator/-/ical-generator-4.1.0.tgz#2a336c951864c5583a2aa715d16f2edcdfd2d90b"
integrity sha512-5GrFDJ8SAOj8cB9P1uEZIfKrNxSZ1R2eOQfZePL+CtdWh4RwNXWe8b0goajz+Hu37vcipG3RVldoa2j57Y20IA==
dependencies:
uuid-random "^1.3.2"
iconv-lite@0.4.24, iconv-lite@^0.4.15, iconv-lite@^0.4.24, iconv-lite@^0.4.5:
version "0.4.24"
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
@ -18029,7 +18032,7 @@ minipass-sized@^1.0.3:
dependencies:
minipass "^3.0.0"
minipass@^3.0.0, minipass@^3.1.1, minipass@^3.1.6, minipass@^3.3.5:
minipass@^3.0.0, minipass@^3.1.1, minipass@^3.1.6:
version "3.3.6"
resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a"
integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==
@ -18203,7 +18206,7 @@ moment@^2.29.4:
resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108"
integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==
mongodb-connection-string-url@^2.5.3:
mongodb-connection-string-url@^2.6.0:
version "2.6.0"
resolved "https://registry.yarnpkg.com/mongodb-connection-string-url/-/mongodb-connection-string-url-2.6.0.tgz#57901bf352372abdde812c81be47b75c6b2ec5cf"
integrity sha512-WvTZlI9ab0QYtTYnuMLgobULWhokRjtC7db9LtcVfJ+Hsnyr5eo6ZtNAt3Ly24XZScGMelOcGtm7lSn0332tPQ==
@ -18211,15 +18214,14 @@ mongodb-connection-string-url@^2.5.3:
"@types/whatwg-url" "^8.2.1"
whatwg-url "^11.0.0"
mongodb@4.9:
version "4.9.1"
resolved "https://registry.yarnpkg.com/mongodb/-/mongodb-4.9.1.tgz#0c769448228bcf9a6aa7d16daa3625b48312479e"
integrity sha512-ZhgI/qBf84fD7sI4waZBoLBNJYPQN5IOC++SBCiPiyhzpNKOxN/fi0tBHvH2dEC42HXtNEbFB0zmNz4+oVtorQ==
mongodb@5.6:
version "5.6.0"
resolved "https://registry.yarnpkg.com/mongodb/-/mongodb-5.6.0.tgz#caff5278341bfc0f1ef6f394bb403d207de03d1e"
integrity sha512-z8qVs9NfobHJm6uzK56XBZF8XwM9H294iRnB7wNjF0SnY93si5HPziIJn+qqvUR5QOff/4L0gCD6SShdR/GtVQ==
dependencies:
bson "^4.7.0"
denque "^2.1.0"
mongodb-connection-string-url "^2.5.3"
socks "^2.7.0"
bson "^5.3.0"
mongodb-connection-string-url "^2.6.0"
socks "^2.7.1"
optionalDependencies:
saslprep "^1.0.3"
@ -18449,11 +18451,16 @@ node-abort-controller@^3.0.1:
resolved "https://registry.yarnpkg.com/node-abort-controller/-/node-abort-controller-3.1.1.tgz#a94377e964a9a37ac3976d848cb5c765833b8548"
integrity sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ==
node-addon-api@^3.1.0, node-addon-api@^3.2.1:
node-addon-api@^3.2.1:
version "3.2.1"
resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.2.1.tgz#81325e0a2117789c0128dab65e7e38f07ceba161"
integrity sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==
node-addon-api@^5.0.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-5.1.0.tgz#49da1ca055e109a23d537e9de43c09cca21eb762"
integrity sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==
node-duration@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/node-duration/-/node-duration-1.0.4.tgz#3e94ecc0e473691c89c4560074503362071cecac"
@ -23093,7 +23100,7 @@ socks-proxy-agent@^7.0.0:
debug "^4.3.3"
socks "^2.6.2"
socks@^2.3.3, socks@^2.6.2, socks@^2.7.0:
socks@^2.3.3, socks@^2.6.2, socks@^2.7.1:
version "2.7.1"
resolved "https://registry.yarnpkg.com/socks/-/socks-2.7.1.tgz#d8e651247178fde79c0663043e07240196857d55"
integrity sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==
@ -24156,6 +24163,18 @@ tar@6.1.11:
mkdirp "^1.0.3"
yallist "^4.0.0"
tar@6.1.15:
version "6.1.15"
resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.15.tgz#c9738b0b98845a3b344d334b8fa3041aaba53a69"
integrity sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==
dependencies:
chownr "^2.0.0"
fs-minipass "^2.0.0"
minipass "^5.0.0"
minizlib "^2.1.1"
mkdirp "^1.0.3"
yallist "^4.0.0"
tar@^6.1.11, tar@^6.1.2:
version "6.1.13"
resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.13.tgz#46e22529000f612180601a6fe0680e7da508847b"
@ -24740,6 +24759,11 @@ tslib@^2.0.1, tslib@^2.1.0, tslib@^2.2.0, tslib@^2.3.0, tslib@^2.4.0:
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.5.0.tgz#42bfed86f5787aeb41d031866c8f402429e0fddf"
integrity sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==
tslib@^2.4.1:
version "2.5.3"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.5.3.tgz#24944ba2d990940e6e982c4bea147aba80209913"
integrity sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w==
tsscmp@1.0.6:
version "1.0.6"
resolved "https://registry.yarnpkg.com/tsscmp/-/tsscmp-1.0.6.tgz#85b99583ac3589ec4bfef825b5000aa911d605eb"
@ -25279,6 +25303,11 @@ utils-merge@1.0.1, utils-merge@1.x.x:
resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==
uuid-random@^1.3.2:
version "1.3.2"
resolved "https://registry.yarnpkg.com/uuid-random/-/uuid-random-1.3.2.tgz#96715edbaef4e84b1dcf5024b00d16f30220e2d0"
integrity sha512-UOzej0Le/UgkbWEO8flm+0y+G+ljUon1QWTEZOq1rnMAsxo2+SckbiZdKzAHHlVh6gJqI1TjC/xwgR50MuCrBQ==
uuid@3.3.2:
version "3.3.2"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131"
@ -25402,6 +25431,16 @@ vite-node@0.29.8:
picocolors "^1.0.0"
vite "^3.0.0 || ^4.0.0"
vite-plugin-static-copy@^0.16.0:
version "0.16.0"
resolved "https://registry.yarnpkg.com/vite-plugin-static-copy/-/vite-plugin-static-copy-0.16.0.tgz#2f65227037f17fc99c0782fd0b344e962935e69e"
integrity sha512-dMVEg5Z2SwYRgQnHZaeokvSKB4p/TOTf65JU4sP3U6ccSBsukqdtDOjpmT+xzTFHAA8WJjcS31RMLjUdWQCBzw==
dependencies:
chokidar "^3.5.3"
fast-glob "^3.2.11"
fs-extra "^11.1.0"
picocolors "^1.0.0"
"vite@^3.0.0 || ^4.0.0":
version "4.2.2"
resolved "https://registry.yarnpkg.com/vite/-/vite-4.2.2.tgz#014c30e5163844f6e96d7fe18fbb702236516dc6"