1
0
Fork 0
mirror of synced 2024-06-26 18:10:51 +12:00

Merge branch 'develop' into feature/day-pass-pricing

This commit is contained in:
Rory Powell 2022-09-15 08:55:33 +01:00
commit 193a262705
138 changed files with 1935 additions and 481 deletions

View file

@ -65,10 +65,6 @@ http {
proxy_pass http://{{ address }}:4001;
}
location /preview {
proxy_pass http://{{ address }}:4001;
}
location /builder {
proxy_pass http://{{ address }}:3000;
rewrite ^/builder(.*)$ /builder/$1 break;
@ -84,11 +80,20 @@ http {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
location /vite {
location /vite/ {
proxy_pass http://{{ address }}:3000;
rewrite ^/vite(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://{{ address }}:4001;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View file

@ -88,10 +88,6 @@ http {
proxy_pass http://$apps:4002;
}
location /preview {
proxy_pass http://$apps:4002;
}
location = / {
proxy_pass http://$apps:4002;
}
@ -162,6 +158,15 @@ http {
rewrite ^/db/(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://$apps:4002;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View file

@ -4,9 +4,9 @@ echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
DATA_DIR=/home
mkdir -p $DATA_DIR/{search,minio,couchdb}
mkdir -p $DATA_DIR/couchdb/{dbs,views}
chown -R couchdb:couchdb $DATA_DIR/couchdb/
mkdir -p $DATA_DIR/{search,minio,couch}
mkdir -p $DATA_DIR/couch/{dbs,views}
chown -R couchdb:couchdb $DATA_DIR/couch/
apt update
apt-get install -y openssh-server
sed -i "s/#Port 22/Port 2222/" /etc/ssh/sshd_config
@ -16,5 +16,4 @@ if [[ "${TARGETBUILD}" = "aas" ]]; then
else
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
fi

View file

@ -1,5 +1,5 @@
; CouchDB Configuration Settings
[couchdb]
database_dir = DATA_DIR/couchdb/dbs
view_index_dir = DATA_DIR/couchdb/views
database_dir = DATA_DIR/couch/dbs
view_index_dir = DATA_DIR/couch/views

View file

@ -66,6 +66,15 @@ server {
rewrite ^/db/(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://127.0.0.1:4001;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View file

@ -36,10 +36,10 @@ fi
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984
# make these directories in runner, incase of mount
mkdir -p ${DATA_DIR}/couchdb/{dbs,views}
mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/minio
mkdir -p ${DATA_DIR}/search
chown -R couchdb:couchdb ${DATA_DIR}/couchdb
chown -R couchdb:couchdb ${DATA_DIR}/couch
redis-server --requirepass $REDIS_PASSWORD &
/opt/clouseau/bin/clouseau &
/minio/minio server ${DATA_DIR}/minio &

View file

@ -1,5 +1,5 @@
{
"version": "1.3.12-alpha.3",
"version": "1.3.15-alpha.9",
"npmClient": "yarn",
"packages": [
"packages/*"

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "1.3.12-alpha.3",
"version": "1.3.15-alpha.9",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@ -20,11 +20,12 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
"@budibase/types": "1.3.12-alpha.3",
"@budibase/types": "1.3.15-alpha.9",
"@shopify/jest-koa-mocks": "5.0.1",
"@techpass/passport-openidconnect": "0.3.2",
"aws-sdk": "2.1030.0",
"bcrypt": "5.0.1",
"bcryptjs": "2.4.3",
"dotenv": "16.0.1",
"emitter-listener": "1.1.2",
"ioredis": "4.28.0",

View file

@ -0,0 +1,3 @@
module.exports = {
...require("./src/plugin"),
}

View file

@ -1,4 +1,5 @@
import { dangerousGetDB, closeDB } from "."
import { DocumentType } from "./constants"
class Replication {
source: any
@ -53,6 +54,14 @@ class Replication {
return this.replication
}
appReplicateOpts() {
return {
filter: (doc: any) => {
return doc._id !== DocumentType.APP_METADATA
},
}
}
/**
* Rollback the target DB back to the state of the source DB
*/
@ -60,6 +69,7 @@ class Replication {
await this.target.destroy()
// Recreate the DB again
this.target = dangerousGetDB(this.target.name)
// take the opportunity to remove deleted tombstones
await this.replicate()
}

View file

@ -254,8 +254,17 @@ export async function getAllApps({ dev, all, idsOnly, efficient }: any = {}) {
return false
})
if (idsOnly) {
const devAppIds = appDbNames.filter(appId => isDevAppID(appId))
const prodAppIds = appDbNames.filter(appId => !isDevAppID(appId))
switch (dev) {
case true:
return devAppIds
case false:
return prodAppIds
default:
return appDbNames
}
}
const appPromises = appDbNames.map((app: any) =>
// skip setup otherwise databases could be re-created
getAppMetadata(app)

View file

@ -19,6 +19,7 @@ if (!LOADED && isDev() && !isTest()) {
const env = {
isTest,
isDev,
JS_BCRYPT: process.env.JS_BCRYPT,
JWT_SECRET: process.env.JWT_SECRET,
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,

View file

@ -1,5 +1,5 @@
const bcrypt = require("bcrypt")
const env = require("./environment")
const bcrypt = env.JS_BCRYPT ? require("bcryptjs") : require("bcrypt")
const { v4 } = require("uuid")
const SALT_ROUNDS = env.SALT_ROUNDS || 10

View file

@ -17,6 +17,7 @@ import * as dbConstants from "./db/constants"
import * as logging from "./logging"
import pino from "./pino"
import * as middleware from "./middleware"
import plugins from "./plugin"
// mimic the outer package exports
import * as db from "./pkg/db"
@ -55,6 +56,7 @@ const core = {
errors,
logging,
roles,
plugins,
...pino,
...errorClasses,
middleware,

View file

@ -307,9 +307,13 @@ export const uploadDirectory = async (
return files
}
exports.downloadTarballDirect = async (url: string, path: string) => {
exports.downloadTarballDirect = async (
url: string,
path: string,
headers = {}
) => {
path = sanitizeKey(path)
const response = await fetch(url)
const response = await fetch(url, { headers })
if (!response.ok) {
throw new Error(`unexpected response ${response.statusText}`)
}

View file

@ -0,0 +1,7 @@
import * as utils from "./utils"
const pkg = {
...utils,
}
export = pkg

View file

@ -1,5 +1,8 @@
const { PluginTypes } = require("./constants")
const { DatasourceFieldType, QueryType } = require("@budibase/types")
const {
DatasourceFieldType,
QueryType,
PluginType,
} = require("@budibase/types")
const joi = require("joi")
const DATASOURCE_TYPES = [
@ -78,11 +81,11 @@ function validateDatasource(schema) {
}
exports.validate = schema => {
switch (schema.type) {
case PluginTypes.COMPONENT:
switch (schema?.type) {
case PluginType.COMPONENT:
validateComponent(schema)
break
case PluginTypes.DATASOURCE:
case PluginType.DATASOURCE:
validateDatasource(schema)
break
default:

View file

@ -1377,6 +1377,11 @@ bcrypt@5.0.1:
"@mapbox/node-pre-gyp" "^1.0.0"
node-addon-api "^3.1.0"
bcryptjs@2.4.3:
version "2.4.3"
resolved "https://registry.yarnpkg.com/bcryptjs/-/bcryptjs-2.4.3.tgz#9ab5627b93e60621ff7cdac5da9733027df1d0cb"
integrity sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ==
binary-extensions@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d"

View file

@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
"version": "1.3.12-alpha.3",
"version": "1.3.15-alpha.9",
"license": "MPL-2.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
@ -38,7 +38,7 @@
],
"dependencies": {
"@adobe/spectrum-css-workflow-icons": "^1.2.1",
"@budibase/string-templates": "1.3.12-alpha.3",
"@budibase/string-templates": "1.3.15-alpha.9",
"@spectrum-css/actionbutton": "^1.0.1",
"@spectrum-css/actiongroup": "^1.0.1",
"@spectrum-css/avatar": "^3.0.2",

View file

@ -4,10 +4,15 @@
export let size = "M"
export let tooltip = ""
export let muted
</script>
<TooltipWrapper {tooltip} {size}>
<label for="" class={`spectrum-FieldLabel spectrum-FieldLabel--size${size}`}>
<label
class:muted
for=""
class={`spectrum-FieldLabel spectrum-FieldLabel--size${size}`}
>
<slot />
</label>
</TooltipWrapper>
@ -17,4 +22,8 @@
padding: 0;
white-space: nowrap;
}
.muted {
opacity: 0.5;
}
</style>

View file

@ -24,7 +24,6 @@
export let secondaryAction = undefined
export let secondaryButtonWarning = false
export let dataCy = null
const { hide, cancel } = getContext(Context.Modal)
let loading = false
$: confirmDisabled = disabled || loading
@ -88,12 +87,11 @@
<section class="spectrum-Dialog-content content-grid">
<slot />
</section>
{#if showCancelButton || showConfirmButton}
{#if showCancelButton || showConfirmButton || $$slots.footer}
<div
class="spectrum-ButtonGroup spectrum-Dialog-buttonGroup spectrum-Dialog-buttonGroup--noFooter"
>
<slot name="footer" />
{#if showSecondaryButton && secondaryButtonText && secondaryAction}
<div class="secondary-action">
<Button

View file

@ -10,6 +10,7 @@
export let noHorizPadding = false
export let quiet = false
export let emphasized = false
export let onTop = false
export let size = "M"
let thisSelected = undefined
@ -75,6 +76,7 @@
bind:this={container}
class:spectrum-Tabs--quiet={quiet}
class:noHorizPadding
class:onTop
class:spectrum-Tabs--vertical={vertical}
class:spectrum-Tabs--horizontal={!vertical}
class="spectrum-Tabs spectrum-Tabs--size{size}"
@ -122,4 +124,7 @@
.noPadding {
margin: 0;
}
.onTop {
z-index: 100;
}
</style>

View file

@ -779,7 +779,7 @@ Cypress.Commands.add("navigateToAutogeneratedModal", () => {
Cypress.Commands.add("selectExternalDatasource", datasourceName => {
// Navigates to Data Section
cy.navigateToDataSection()
// Open Data Source modal
// Open Datasource modal
cy.get(".nav").within(() => {
cy.get(".add-button").click()
})

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/builder",
"version": "1.3.12-alpha.3",
"version": "1.3.15-alpha.9",
"license": "GPL-3.0",
"private": true,
"scripts": {
@ -69,10 +69,10 @@
}
},
"dependencies": {
"@budibase/bbui": "1.3.12-alpha.3",
"@budibase/client": "1.3.12-alpha.3",
"@budibase/frontend-core": "1.3.12-alpha.3",
"@budibase/string-templates": "1.3.12-alpha.3",
"@budibase/bbui": "1.3.15-alpha.9",
"@budibase/client": "1.3.15-alpha.9",
"@budibase/frontend-core": "1.3.15-alpha.9",
"@budibase/string-templates": "1.3.15-alpha.9",
"@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1",

View file

@ -1,6 +1,7 @@
<script>
import { Button, Select, Input, Label } from "@budibase/bbui"
import { createEventDispatcher } from "svelte"
import { onMount, createEventDispatcher } from "svelte"
import { flags } from "stores/backend"
const dispatch = createEventDispatcher()
export let value
@ -29,11 +30,16 @@
label: "Every Night at Midnight",
value: "0 0 * * *",
},
{
]
onMount(() => {
if (!$flags.cloud) {
CRON_EXPRESSIONS.push({
label: "Every Budibase Reboot",
value: "@reboot",
},
]
})
}
})
</script>
<div class="block-field">

View file

@ -8,6 +8,7 @@
notifications,
Modal,
Table,
Toggle,
} from "@budibase/bbui"
import { datasources, integrations, tables } from "stores/backend"
import CreateEditRelationship from "components/backend/Datasources/CreateEditRelationship.svelte"
@ -15,6 +16,7 @@
import ArrayRenderer from "components/common/renderers/ArrayRenderer.svelte"
import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import { goto } from "@roxi/routify"
import ValuesList from "components/common/ValuesList.svelte"
export let datasource
export let save
@ -31,6 +33,8 @@
let createExternalTableModal
let selectedFromRelationship, selectedToRelationship
let confirmDialog
let specificTables = null
let requireSpecificTables = false
$: integration = datasource && $integrations[datasource.source]
$: plusTables = datasource?.plus
@ -87,7 +91,7 @@
async function updateDatasourceSchema() {
try {
await datasources.updateSchema(datasource)
await datasources.updateSchema(datasource, specificTables)
notifications.success(`Datasource ${name} tables updated successfully.`)
await tables.fetch()
} catch (error) {
@ -150,6 +154,19 @@
warning={false}
title="Confirm table fetch"
>
<Toggle
bind:value={requireSpecificTables}
on:change={e => {
requireSpecificTables = e.detail
specificTables = null
}}
thin
text="Fetch listed tables only (one per line)"
/>
{#if requireSpecificTables}
<ValuesList label="" bind:values={specificTables} />
{/if}
<br />
<Body>
If you have fetched tables from this database before, this action may
overwrite any changes you made after your initial fetch.

View file

@ -23,6 +23,7 @@
const dispatch = createEventDispatcher()
let bindingDrawer
let valid = true
let currentVal = value
$: readableValue = runtimeToReadableBinding(bindings, value)
$: tempValue = readableValue
@ -30,11 +31,17 @@
const saveBinding = () => {
onChange(tempValue)
onBlur()
bindingDrawer.hide()
}
const onChange = value => {
dispatch("change", readableToRuntimeBinding(bindings, value))
currentVal = readableToRuntimeBinding(bindings, value)
dispatch("change", currentVal)
}
const onBlur = () => {
dispatch("blur", currentVal)
}
</script>
@ -45,6 +52,7 @@
readonly={isJS}
value={isJS ? "(JavaScript function)" : readableValue}
on:change={event => onChange(event.detail)}
on:blur={onBlur}
{placeholder}
{updateOnChange}
/>

View file

@ -52,7 +52,7 @@
)
newBlock.inputs = {
fields: Object.keys(parameters.fields).reduce((fields, key) => {
fields: Object.keys(parameters.fields ?? {}).reduce((fields, key) => {
fields[key] = "string"
return fields
}, {}),

View file

@ -79,7 +79,7 @@
</Body>
<div class="params">
<Label small>Data Source</Label>
<Label small>Datasource</Label>
<Select
bind:value={parameters.providerId}
options={providerOptions}

View file

@ -71,13 +71,13 @@
<div class="root">
<Body size="S">
Choosing a Data Source will automatically use the data it provides, but it's
Choosing a Datasource will automatically use the data it provides, but it's
optional.<br />
You can always add or override fields manually.
</Body>
<div class="params">
<Label small>Data Source</Label>
<Label small>Datasource</Label>
<Select
bind:value={parameters.providerId}
options={providerOptions}

View file

@ -107,7 +107,7 @@
placeholder={keyPlaceholder}
readonly={readOnly}
bind:value={field.name}
on:change={changed}
on:blur={changed}
/>
{#if options}
<Select bind:value={field.value} on:change={changed} {options} />
@ -115,7 +115,10 @@
<DrawerBindableInput
{bindings}
placeholder="Value"
on:change={e => (field.value = e.detail)}
on:blur={e => {
field.value = e.detail
changed()
}}
disabled={readOnly}
value={field.value}
allowJS={false}
@ -127,7 +130,7 @@
placeholder={valuePlaceholder}
readonly={readOnly}
bind:value={field.value}
on:change={changed}
on:blur={changed}
/>
{/if}
{#if toggle}

View file

@ -1,16 +1,24 @@
<script>
import { ModalContent, Toggle } from "@budibase/bbui"
import { ModalContent, Toggle, Body } from "@budibase/bbui"
export let app
export let published
let excludeRows = false
$: title = published ? "Export published app" : "Export latest app"
$: confirmText = published ? "Export published" : "Export latest"
const exportApp = () => {
const id = app.deployed ? app.prodId : app.devId
const id = published ? app.prodId : app.devId
const appName = encodeURIComponent(app.name)
window.location = `/api/backups/export?appId=${id}&appname=${appName}&excludeRows=${excludeRows}`
}
</script>
<ModalContent title={"Export"} confirmText={"Export"} onConfirm={exportApp}>
<ModalContent {title} {confirmText} onConfirm={exportApp}>
<Body
>Apps can be exported with or without data that is within internal tables -
select this below.</Body
>
<Toggle text="Exclude Rows" bind:value={excludeRows} />
</ModalContent>

View file

@ -64,3 +64,10 @@ export const PlanType = {
BUSINESS: "business",
ENTERPRISE: "enterprise",
}
export const PluginSource = {
URL: "URL",
NPM: "NPM",
GITHUB: "Github",
FILE: "File Upload",
}

View file

@ -46,7 +46,7 @@ export function buildQueryString(obj) {
if (str !== "") {
str += "&"
}
str += `${key}=${value || ""}`
str += `${key}=${encodeURIComponent(value || "")}`
}
}
return str

View file

@ -28,25 +28,25 @@
import { onMount } from "svelte"
import restUtils from "helpers/data/utils"
import {
RestBodyTypes as bodyTypes,
SchemaTypeOptions,
PaginationLocations,
PaginationTypes,
RawRestBodyTypes,
RestBodyTypes as bodyTypes,
SchemaTypeOptions,
} from "constants/backend"
import JSONPreview from "components/integration/JSONPreview.svelte"
import AccessLevelSelect from "components/integration/AccessLevelSelect.svelte"
import DynamicVariableModal from "../../_components/DynamicVariableModal.svelte"
import Placeholder from "assets/bb-spaceship.svg"
import { cloneDeep } from "lodash/fp"
import { RawRestBodyTypes } from "constants/backend"
import {
getRestBindings,
toBindingsArray,
runtimeToReadableBinding,
readableToRuntimeBinding,
runtimeToReadableMap,
readableToRuntimeMap,
runtimeToReadableBinding,
runtimeToReadableMap,
toBindingsArray,
} from "builderStore/dataBinding"
let query, datasource
@ -95,7 +95,7 @@
$: runtimeUrlQueries = readableToRuntimeMap(mergedBindings, breakQs)
function getSelectedQuery() {
const cloneQuery = cloneDeep(
return cloneDeep(
$queries.list.find(q => q._id === $queries.selected) || {
datasourceId: $params.selectedDatasource,
parameters: [],
@ -107,7 +107,6 @@
queryVerb: "read",
}
)
return cloneQuery
}
function checkQueryName(inputUrl = null) {
@ -121,14 +120,15 @@
if (!base) {
return base
}
const qs = restUtils.buildQueryString(
let qs = restUtils.buildQueryString(
runtimeToReadableMap(mergedBindings, qsObj)
)
let newUrl = base
if (base.includes("?")) {
newUrl = base.split("?")[0]
const split = base.split("?")
newUrl = split[0]
}
return qs.length > 0 ? `${newUrl}?${qs}` : newUrl
return qs.length === 0 ? newUrl : `${newUrl}?${qs}`
}
function buildQuery() {
@ -314,6 +314,25 @@
}
}
const paramsChanged = evt => {
breakQs = {}
for (let param of evt.detail) {
breakQs[param.name] = param.value
}
}
const urlChanged = evt => {
breakQs = {}
const qs = evt.target.value.split("?")[1]
if (qs && qs.length > 0) {
const parts = qs.split("&")
for (let part of parts) {
const [key, value] = part.split("=")
breakQs[key] = value
}
}
}
onMount(async () => {
query = getSelectedQuery()
@ -426,7 +445,11 @@
/>
</div>
<div class="url">
<Input bind:value={url} placeholder="http://www.api.com/endpoint" />
<Input
on:blur={urlChanged}
bind:value={url}
placeholder="http://www.api.com/endpoint"
/>
</div>
<Button primary disabled={!url} on:click={runQuery}>Send</Button>
<Button
@ -456,13 +479,16 @@
/>
</Tab>
<Tab title="Params">
{#key breakQs}
<KeyValueBuilder
bind:object={breakQs}
on:change={paramsChanged}
object={breakQs}
name="param"
headings
bindings={mergedBindings}
bindingDrawerLeft="260px"
/>
{/key}
</Tab>
<Tab title="Headers">
<KeyValueBuilder

View file

@ -86,6 +86,7 @@
: [],
isBudibaseEvent: true,
usedPlugins: $store.usedPlugins,
location: window.location,
}
// Refresh the preview when required
@ -291,7 +292,7 @@
<iframe
title="componentPreview"
bind:this={iframe}
src="/preview"
src="/app/preview"
class:hidden={loading || error}
class:tablet={$store.previewDevice === "tablet"}
class:mobile={$store.previewDevice === "mobile"}

View file

@ -54,6 +54,8 @@
: undefined,
{ title: "Auth", href: "/builder/portal/manage/auth" },
{ title: "Email", href: "/builder/portal/manage/email" },
{ title: "Plugins", href: "/builder/portal/manage/plugins" },
{
title: "Organisation",
href: "/builder/portal/settings/organisation",

View file

@ -15,7 +15,6 @@
import Spinner from "components/common/Spinner.svelte"
import CreateAppModal from "components/start/CreateAppModal.svelte"
import UpdateAppModal from "components/start/UpdateAppModal.svelte"
import ExportAppModal from "components/start/ExportAppModal.svelte"
import { store, automationStore } from "builderStore"
import { API } from "api"
@ -33,7 +32,6 @@
let selectedApp
let creationModal
let updatingModal
let exportModal
let creatingApp = false
let loaded = $apps?.length || $templates?.length
let searchTerm = ""
@ -407,10 +405,6 @@
<UpdateAppModal app={selectedApp} />
</Modal>
<Modal bind:this={exportModal} padding={false} width="600px">
<ExportAppModal app={selectedApp} />
</Modal>
<style>
.appTable {
border-top: var(--border-light);

View file

@ -0,0 +1,127 @@
<script>
import {
ModalContent,
Label,
Input,
Select,
Dropzone,
Body,
notifications,
} from "@budibase/bbui"
import KeyValueBuilder from "components/integration/KeyValueBuilder.svelte"
import { plugins } from "stores/portal"
import { PluginSource } from "constants"
function opt(name, optional) {
if (optional) {
return { name, optional }
}
return { name }
}
let authOptions = {
[PluginSource.URL]: [opt("URL"), opt("Headers", true)],
[PluginSource.NPM]: [opt("URL")],
[PluginSource.GITHUB]: [opt("URL"), opt("Github Token", true)],
[PluginSource.FILE]: [opt("File Upload")],
}
let file
let source = PluginSource.URL
let dynamicValues = {}
let validation
$: validation = source === "File Upload" ? file : dynamicValues["URL"]
function infoMessage(optionName) {
switch (optionName) {
case PluginSource.URL:
return "Please specify a URL which directs to a built plugin TAR archive. You can provide headers if authentication is required."
case PluginSource.NPM:
return "Please specify the URL to a public NPM package which contains the built version of the plugin you wish to install."
case PluginSource.GITHUB:
return "Please specify the URL to a Github repository which contains built plugin releases. If this is a private repo you can provide a token to access it."
case PluginSource.FILE:
return "Please provide a built plugin TAR archive. You can build a plugin locally using the Budibase CLI."
}
}
async function save() {
try {
if (source === PluginSource.FILE) {
await plugins.uploadPlugin(file)
} else {
const url = dynamicValues["URL"]
let auth =
source === PluginSource.GITHUB
? dynamicValues["Github Token"]
: source === PluginSource.URL
? dynamicValues["Headers"]
: undefined
await plugins.createPlugin(source, url, auth)
}
notifications.success("Plugin added successfully.")
} catch (err) {
const msg = err?.message ? err.message : JSON.stringify(err)
notifications.error(`Failed to add plugin: ${msg}`)
}
}
</script>
<ModalContent
confirmText={"Save"}
onConfirm={save}
disabled={!validation}
size="M"
title="Add new plugin"
>
<div class="form-row">
<Label size="M">Source</Label>
<Select
placeholder={null}
bind:value={source}
options={Object.values(PluginSource)}
/>
</div>
<Body size="S">{infoMessage(source)}</Body>
{#each authOptions[source] as option}
{#if option.name === PluginSource.FILE}
<div class="form-row">
<Label size="M">{option.name}</Label>
<Dropzone
gallery={false}
value={[file]}
on:change={e => {
if (!e.detail || e.detail.length === 0) {
file = null
} else {
file = e.detail[0]
}
}}
/>
</div>
{:else}
<div class="form-row">
<div>
<Label size="M">{option.name}</Label>
{#if option.optional}
<Label size="S" muted><i>Optional</i></Label>
{/if}
</div>
{#if option.name === "Headers"}
<KeyValueBuilder bind:object={dynamicValues[option.name]} />
{:else}
<Input bind:value={dynamicValues[option.name]} />
{/if}
</div>
{/if}
{/each}
</ModalContent>
<style>
.form-row {
display: grid;
grid-template-columns: 60px 1fr;
grid-gap: var(--spacing-l);
align-items: center;
}
</style>

View file

@ -0,0 +1,33 @@
<script>
import { Body, ModalContent, notifications } from "@budibase/bbui"
import { plugins } from "stores/portal"
import { createEventDispatcher } from "svelte"
export let plugin
let dispatch = createEventDispatcher()
async function deletePlugin() {
try {
await plugins.deletePlugin(plugin._id)
notifications.success(`Plugin ${plugin?.name} deleted`)
dispatch("deleted")
} catch (error) {
const msg = error?.message ? error.message : JSON.stringify(error)
notifications.error(`Error deleting plugin: ${msg}`)
}
}
</script>
<ModalContent
warning
onConfirm={deletePlugin}
title="Delete Plugin"
confirmText="Delete plugin"
cancelText="Cancel"
showCloseIcon={false}
>
<Body>
Are you sure you want to delete <strong>{plugin?.name}</strong>
</Body>
</ModalContent>

View file

@ -0,0 +1,155 @@
<script>
import {
Icon,
Body,
Modal,
ModalContent,
Button,
Label,
Input,
} from "@budibase/bbui"
import DeletePluginModal from "../_components/DeletePluginModal.svelte"
export let plugin
let detailsModal
let deleteModal
let icon =
plugin.schema.type === "component"
? plugin.schema.schema.icon || "Book"
: plugin.schema.schema.icon || "Beaker"
$: friendlyName = plugin?.schema?.schema?.friendlyName
function pluginDeleted() {
if (detailsModal) {
detailsModal.hide()
}
}
</script>
<div class="row" on:click={() => detailsModal.show()}>
<div class="title">
<div class="name">
<div>
<Icon size="M" name={icon} />
</div>
<div>
<Body
size="S"
color="var(--spectrum-global-color-gray-900)"
weight="800"
>
{plugin.name}
</Body>
<Body size="XS" color="var(--spectrum-global-color-gray-900)">
{friendlyName}
</Body>
</div>
</div>
</div>
<div class="desktop">{plugin.version}</div>
<div class="desktop">
{plugin.schema.type.charAt(0).toUpperCase() + plugin.schema.type.slice(1)}
</div>
<div>
<Icon name="ChevronRight" />
</div>
</div>
<Modal bind:this={detailsModal}>
<ModalContent
size="M"
title="Plugin details"
showConfirmButton={false}
showCancelButton={false}
>
<div class="details-row">
<Label size="M">Name</Label>
<Input disabled value={plugin.name} />
</div>
<div class="details-row">
<Label size="M">Friendly name</Label>
<Input disabled value={friendlyName} />
</div>
<div class="details-row">
<Label size="M">Type</Label>
<Input
disabled
value={plugin.schema.type.charAt(0).toUpperCase() +
plugin.schema.type.slice(1)}
/>
</div>
<div class="details-row">
<Label size="M">Source</Label>
<Input disabled value={plugin.source || "N/A"} />
</div>
<div class="details-row">
<Label size="M">Version</Label>
<Input disabled value={plugin.version} />
</div>
<div class="details-row">
<Label size="M">License</Label>
<Input disabled value={plugin.package.license} />
</div>
<div class="details-row">
<Label size="M">Author</Label>
<Input disabled value={plugin.package.author || "N/A"} />
</div>
<div class="footer" slot="footer">
<Button newStyles on:click={deleteModal.show()} warning>Delete</Button>
</div>
</ModalContent>
<Modal bind:this={deleteModal}>
<DeletePluginModal {plugin} on:deleted={pluginDeleted} />
</Modal>
</Modal>
<style>
.row {
display: grid;
grid-template-columns: 1fr 110px 140px 20px;
align-items: center;
background: var(--background);
border-radius: 4px;
padding: 0 16px;
height: 56px;
background: var(--spectrum-global-color-gray-50);
border: 1px solid var(--spectrum-global-color-gray-300);
transition: background 130ms ease-out;
}
.row:hover {
cursor: pointer;
background: var(--spectrum-global-color-gray-75);
}
.name {
grid-gap: var(--spacing-m);
grid-template-columns: 75px 75px;
align-items: center;
display: flex;
}
.details-row {
display: grid;
grid-template-columns: 70px 1fr;
grid-gap: var(--spacing-l) var(--spacing-l);
align-items: center;
}
@media (max-width: 640px) {
.desktop {
display: none !important;
}
}
.footer {
display: flex;
gap: var(--spacing-l);
}
</style>

View file

@ -0,0 +1,95 @@
<script>
import {
Layout,
Heading,
Body,
Button,
Select,
Divider,
Modal,
Search,
} from "@budibase/bbui"
import { onMount } from "svelte"
import { plugins } from "stores/portal"
import PluginRow from "./_components/PluginRow.svelte"
import AddPluginModal from "./_components/AddPluginModal.svelte"
let modal
let searchTerm = ""
let filter = "all"
let filterOptions = [
{ label: "All plugins", value: "all" },
{ label: "Components", value: "component" },
{ label: "Datasources", value: "datasource" },
]
$: filteredPlugins = $plugins
.filter(plugin => {
return filter === "all" || plugin.schema.type === filter
})
.filter(plugin => {
return (
!searchTerm ||
plugin?.name?.toLowerCase().includes(searchTerm.toLowerCase())
)
})
onMount(async () => {
await plugins.load()
})
</script>
<Layout noPadding>
<Layout gap="XS" noPadding>
<Heading size="M">Plugins</Heading>
<Body>Add your own custom datasources and components</Body>
</Layout>
<Divider size="S" />
<Layout noPadding>
<div class="controls">
<div>
<Button on:click={modal.show} newStyles cta icon={"Add"}>
Add plugin
</Button>
</div>
<div class="filters">
<div class="select">
<Select
bind:value={filter}
placeholder={null}
options={filterOptions}
autoWidth
quiet
/>
</div>
<Search bind:value={searchTerm} placeholder="Search plugins" />
</div>
</div>
{#if filteredPlugins?.length}
<Layout noPadding gap="S">
{#each filteredPlugins as plugin (plugin._id)}
<PluginRow {plugin} />
{/each}
</Layout>
{/if}
</Layout>
</Layout>
<Modal bind:this={modal}>
<AddPluginModal />
</Modal>
<style>
.filters {
display: flex;
gap: var(--spacing-xl);
}
.controls {
display: flex;
gap: var(--spacing-xl);
justify-content: space-between;
}
.controls :global(.spectrum-Search) {
width: 200px;
}
</style>

View file

@ -16,6 +16,7 @@
MenuItem,
Icon,
Helpers,
Modal,
} from "@budibase/bbui"
import OverviewTab from "../_components/OverviewTab.svelte"
import SettingsTab from "../_components/SettingsTab.svelte"
@ -29,6 +30,7 @@
import EditableIcon from "components/common/EditableIcon.svelte"
import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import HistoryTab from "components/portal/overview/automation/HistoryTab.svelte"
import ExportAppModal from "components/start/ExportAppModal.svelte"
import { checkIncomingDeploymentStatus } from "components/deploy/utils"
import { onDestroy, onMount } from "svelte"
@ -38,7 +40,9 @@
let loaded = false
let deletionModal
let unpublishModal
let exportModal
let appName = ""
let published
// App
$: filteredApps = $apps.filter(app => app.devId === application)
@ -140,11 +144,9 @@
notifications.success("App ID copied to clipboard.")
}
const exportApp = (app, opts = { published: false }) => {
const appName = encodeURIComponent(app.name)
const id = opts?.published ? app.prodId : app.devId
// always export the development version
window.location = `/api/backups/export?appId=${id}&appname=${appName}`
const exportApp = opts => {
published = opts.published
exportModal.show()
}
const unpublishApp = app => {
@ -206,6 +208,10 @@
})
</script>
<Modal bind:this={exportModal} padding={false} width="600px">
<ExportAppModal app={selectedApp} {published} />
</Modal>
<span class="overview-wrap">
<Page wide noPadding>
{#await promise}
@ -269,14 +275,14 @@
<Icon hoverable name="More" />
</span>
<MenuItem
on:click={() => exportApp(selectedApp, { published: false })}
on:click={() => exportApp({ published: false })}
icon="DownloadFromCloud"
>
Export latest
</MenuItem>
{#if isPublished}
<MenuItem
on:click={() => exportApp(selectedApp, { published: true })}
on:click={() => exportApp({ published: true })}
icon="DownloadFromCloudOutline"
>
Export published

View file

@ -15,7 +15,6 @@
import { API } from "api"
import { writable } from "svelte/store"
import { redirect } from "@roxi/routify"
import { onMount } from "svelte"
// Only admins allowed here
$: {
@ -34,12 +33,11 @@
})
let loading = false
async function uploadLogo() {
async function uploadLogo(file) {
try {
let data = new FormData()
data.append("file", $values.logo)
await API.uploadPlugin(data)
notifications.success("Plugin uploaded successfully")
data.append("file", file)
await API.uploadLogo(data)
} catch (error) {
notifications.error("Error uploading logo")
}
@ -73,11 +71,6 @@
}
loading = false
}
onMount(async () => {
const plugins = await API.getPlugins()
console.log(plugins)
})
</script>
{#if $auth.isAdmin}
@ -95,14 +88,14 @@
<Heading size="S">Information</Heading>
<Body size="S">Here you can update your logo and organization name.</Body>
</Layout>
<div class="fields">
<div class="field">
<div className="fields">
<div className="field">
<Label size="L">Org. name</Label>
<Input thin bind:value={$values.company} />
</div>
<div class="field logo">
<div className="field logo">
<Label size="L">Logo</Label>
<div class="file">
<div className="file">
<Dropzone
value={[$values.logo]}
on:change={e => {
@ -113,7 +106,6 @@
}
}}
/>
<button on:click={uploadLogo}>Upload</button>
</div>
</div>
</div>
@ -123,8 +115,8 @@
<Heading size="S">Platform</Heading>
<Body size="S">Here you can set up general platform settings.</Body>
</Layout>
<div class="fields">
<div class="field">
<div className="fields">
<div className="field">
<Label
size="L"
tooltip={"Update the Platform URL to match your Budibase web URL. This keeps email templates and authentication configs up to date."}
@ -158,15 +150,18 @@
display: grid;
grid-gap: var(--spacing-m);
}
.field {
display: grid;
grid-template-columns: 100px 1fr;
grid-gap: var(--spacing-l);
align-items: center;
}
.file {
max-width: 30ch;
}
.logo {
align-items: start;
}

View file

@ -62,8 +62,11 @@ export function createDatasourcesStore() {
unselect: () => {
update(state => ({ ...state, selected: null }))
},
updateSchema: async datasource => {
const response = await API.buildDatasourceSchema(datasource?._id)
updateSchema: async (datasource, tablesFilter) => {
const response = await API.buildDatasourceSchema({
datasourceId: datasource?._id,
tablesFilter,
})
return await updateDatasource(response)
},
save: async (body, fetchSchema = false) => {

View file

@ -8,3 +8,4 @@ export { oidc } from "./oidc"
export { templates } from "./templates"
export { licensing } from "./licensing"
export { groups } from "./groups"
export { plugins } from "./plugins"

View file

@ -0,0 +1,77 @@
import { writable } from "svelte/store"
import { API } from "api"
import { PluginSource } from "constants"
export function createPluginsStore() {
const { subscribe, set, update } = writable([])
async function load() {
const plugins = await API.getPlugins()
set(plugins)
}
async function deletePlugin(pluginId) {
await API.deletePlugin(pluginId)
update(state => {
state = state.filter(existing => existing._id !== pluginId)
return state
})
}
async function createPlugin(source, url, auth = null) {
let pluginData = {
source,
url,
}
switch (source) {
case PluginSource.URL:
pluginData.headers = auth
break
case PluginSource.GITHUB:
pluginData.githubToken = auth
break
}
let res = await API.createPlugin(pluginData)
let newPlugin = res.plugins[0]
update(state => {
const currentIdx = state.findIndex(plugin => plugin._id === newPlugin._id)
if (currentIdx >= 0) {
state.splice(currentIdx, 1, newPlugin)
} else {
state.push(newPlugin)
}
return state
})
}
async function uploadPlugin(file) {
if (!file) {
return
}
let data = new FormData()
data.append("file", file)
let resp = await API.uploadPlugin(data)
let newPlugin = resp.plugins[0]
update(state => {
const currentIdx = state.findIndex(plugin => plugin._id === newPlugin._id)
if (currentIdx >= 0) {
state.splice(currentIdx, 1, newPlugin)
} else {
state.push(newPlugin)
}
return state
})
}
return {
subscribe,
load,
createPlugin,
deletePlugin,
uploadPlugin,
}
}
export const plugins = createPluginsStore()

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/cli",
"version": "1.3.12-alpha.3",
"version": "1.3.15-alpha.9",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js",
"bin": {
@ -26,18 +26,18 @@
"outputPath": "build"
},
"dependencies": {
"@budibase/backend-core": "1.3.12-alpha.3",
"@budibase/string-templates": "1.3.12-alpha.3",
"@budibase/types": "1.3.12-alpha.3",
"@budibase/backend-core": "1.3.15-alpha.9",
"@budibase/string-templates": "1.3.15-alpha.9",
"@budibase/types": "1.3.15-alpha.9",
"axios": "0.21.2",
"chalk": "4.1.0",
"cli-progress": "3.11.2",
"commander": "7.1.0",
"docker-compose": "0.23.6",
"dotenv": "16.0.1",
"download": "^8.0.0",
"download": "8.0.0",
"inquirer": "8.0.0",
"joi": "^17.6.0",
"joi": "17.6.0",
"lookpath": "1.1.0",
"node-fetch": "2",
"pkg": "5.7.0",

View file

@ -1 +1,2 @@
process.env.NO_JS = "1"
process.env.JS_BCRYPT = "1"

View file

@ -1,6 +0,0 @@
exports.PluginTypes = {
COMPONENT: "component",
DATASOURCE: "datasource",
}
exports.PLUGIN_TYPES_ARR = Object.values(exports.PluginTypes)

View file

@ -3,11 +3,11 @@ const { CommandWords } = require("../constants")
const { getSkeleton, fleshOutSkeleton } = require("./skeleton")
const questions = require("../questions")
const fs = require("fs")
const { PLUGIN_TYPES_ARR } = require("./constants")
const { validate } = require("./validate")
const { PLUGIN_TYPE_ARR } = require("@budibase/types")
const { validate } = require("@budibase/backend-core/plugins")
const { runPkgCommand } = require("../exec")
const { join } = require("path")
const { success, error, info } = require("../utils")
const { success, error, info, moveDirectory } = require("../utils")
function checkInPlugin() {
if (!fs.existsSync("package.json")) {
@ -22,9 +22,27 @@ function checkInPlugin() {
}
}
async function askAboutTopLevel(name) {
const files = fs.readdirSync(process.cwd())
// we are in an empty git repo, don't ask
if (files.find(file => file === ".git")) {
return false
} else {
console.log(
info(`By default the plugin will be created in the directory "${name}"`)
)
console.log(
info(
"if you are already in an empty directory, such as a new Git repo, you can disable this functionality."
)
)
return questions.confirmation("Create top level directory?")
}
}
async function init(opts) {
const type = opts["init"] || opts
if (!type || !PLUGIN_TYPES_ARR.includes(type)) {
if (!type || !PLUGIN_TYPE_ARR.includes(type)) {
console.log(
error(
"Please provide a type to init, either 'component' or 'datasource'."
@ -45,14 +63,21 @@ async function init(opts) {
`An amazing Budibase ${type}!`
)
const version = await questions.string("Version", "1.0.0")
const topLevel = await askAboutTopLevel(name)
// get the skeleton
console.log(info("Retrieving project..."))
await getSkeleton(type, name)
await fleshOutSkeleton(type, name, desc, version)
console.log(info("Installing dependencies..."))
await runPkgCommand("install", join(process.cwd(), name))
// if no parent directory desired move to cwd
if (!topLevel) {
moveDirectory(name, process.cwd())
console.log(info(`Plugin created in current directory.`))
} else {
console.log(info(`Plugin created in directory "${name}"`))
}
}
async function verify() {
// will throw errors if not acceptable

View file

@ -27,7 +27,10 @@ function checkForBinaries() {
}
function cleanup(evt) {
if (evt && evt.errno) {
if (!isNaN(evt)) {
return
}
if (evt) {
console.error(
error(
"Failed to run CLI command - please report with the following message:"

View file

@ -3,6 +3,7 @@ const fs = require("fs")
const axios = require("axios")
const path = require("path")
const progress = require("cli-progress")
const { join } = require("path")
exports.downloadFile = async (url, filePath) => {
filePath = path.resolve(filePath)
@ -67,3 +68,19 @@ exports.progressBar = total => {
exports.checkSlashesInUrl = url => {
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
}
exports.moveDirectory = (oldPath, newPath) => {
const files = fs.readdirSync(oldPath)
// check any file exists already
for (let file of files) {
if (fs.existsSync(join(newPath, file))) {
throw new Error(
"Unable to remove top level directory - some skeleton files already exist."
)
}
}
for (let file of files) {
fs.renameSync(join(oldPath, file), join(newPath, file))
}
fs.rmdirSync(oldPath)
}

View file

@ -753,7 +753,7 @@ double-ended-queue@2.1.0-0:
resolved "https://registry.yarnpkg.com/double-ended-queue/-/double-ended-queue-2.1.0-0.tgz#103d3527fd31528f40188130c841efdd78264e5c"
integrity sha512-+BNfZ+deCo8hMNpDqDnvT+c0XpJ5cUa6mqYq89bho2Ifze4URTqRkcwR399hWoTrTkbZ/XJYDgP6rc7pRgffEQ==
download@^8.0.0:
download@8.0.0:
version "8.0.0"
resolved "https://registry.yarnpkg.com/download/-/download-8.0.0.tgz#afc0b309730811731aae9f5371c9f46be73e51b1"
integrity sha512-ASRY5QhDk7FK+XrQtQyvhpDKanLluEEQtWl/J7Lxuf/b+i8RYh997QeXvL85xitrmRKVlx9c7eTrcRdq2GS4eA==
@ -1551,7 +1551,7 @@ isurl@^1.0.0-alpha5:
has-to-string-tag-x "^1.2.0"
is-object "^1.0.1"
joi@^17.6.0:
joi@17.6.0:
version "17.6.0"
resolved "https://registry.yarnpkg.com/joi/-/joi-17.6.0.tgz#0bb54f2f006c09a96e75ce687957bd04290054b2"
integrity sha512-OX5dG6DTbcr/kbMFj0KGYxuew69HPcAE3K/sZpEV2nP6e/j/C0HV+HNiBPCASxdx5T7DMoa0s8UeHWMnb6n2zw==

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/client",
"version": "1.3.12-alpha.3",
"version": "1.3.15-alpha.9",
"license": "MPL-2.0",
"module": "dist/budibase-client.js",
"main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw"
},
"dependencies": {
"@budibase/bbui": "1.3.12-alpha.3",
"@budibase/frontend-core": "1.3.12-alpha.3",
"@budibase/string-templates": "1.3.12-alpha.3",
"@budibase/bbui": "1.3.15-alpha.9",
"@budibase/frontend-core": "1.3.15-alpha.9",
"@budibase/string-templates": "1.3.15-alpha.9",
"@spectrum-css/button": "^3.0.3",
"@spectrum-css/card": "^3.0.3",
"@spectrum-css/divider": "^1.0.3",
@ -39,6 +39,7 @@
"sanitize-html": "^2.7.0",
"screenfull": "^6.0.1",
"shortid": "^2.2.15",
"socket.io-client": "^4.5.1",
"svelte": "^3.49.0",
"svelte-apexcharts": "^1.0.2",
"svelte-flatpickr": "^3.1.0",

View file

@ -91,8 +91,8 @@
<svelte:head>
{#if $builderStore.usedPlugins?.length}
{#each $builderStore.usedPlugins as plugin}
<script src={`/plugins/${plugin.jsUrl}`}></script>
{#each $builderStore.usedPlugins as plugin (plugin.hash)}
<script src={`/plugins/${plugin.jsUrl}?r=${plugin.hash || ""}`}></script>
{/each}
{/if}
</svelte:head>

View file

@ -125,7 +125,9 @@
// Empty components are those which accept children but do not have any.
// Empty states can be shown for these components, but can be disabled
// in the component manifest.
$: empty = interactive && !children.length && hasChildren
$: empty =
(interactive && !children.length && hasChildren) ||
hasMissingRequiredSettings
$: emptyState = empty && showEmptyState
// Enrich component settings

View file

@ -2,18 +2,16 @@
import { getContext } from "svelte"
import { builderStore } from "stores"
const { styleable } = getContext("sdk")
const component = getContext("component")
$: requiredSetting = $component.missingRequiredSettings?.[0]
</script>
{#if $builderStore.inBuilder && requiredSetting}
<div use:styleable={$component.styles}>
<div class="component-placeholder">
<span>
Add the <mark>{requiredSetting.label}</mark> setting to start using your
component -
Add the <mark>{requiredSetting.label}</mark> setting to start using your component
-
</span>
<span
class="spectrum-Link"
@ -24,7 +22,6 @@
Show me
</span>
</div>
</div>
{/if}
<style>

View file

@ -1,7 +1,6 @@
<script>
import { getContext } from "svelte"
import { ProgressCircle, Pagination } from "@budibase/bbui"
import Placeholder from "./Placeholder.svelte"
import { fetchData, LuceneUtils } from "@budibase/frontend-core"
export let dataSource
@ -132,12 +131,8 @@
<div class="loading">
<ProgressCircle />
</div>
{:else}
{#if $component.emptyState}
<Placeholder />
{:else}
<slot />
{/if}
{#if paginate && $fetch.supportsPagination}
<div class="pagination">
<Pagination

View file

@ -2,6 +2,7 @@ import ClientApp from "./components/ClientApp.svelte"
import { componentStore, builderStore, appStore, devToolsStore } from "./stores"
import loadSpectrumIcons from "@budibase/bbui/spectrum-icons-rollup.js"
import { get } from "svelte/store"
import { initWebsocket } from "./websocket.js"
// Provide svelte and svelte/internal as globals for custom components
import * as svelte from "svelte"
@ -28,6 +29,7 @@ const loadBudibase = () => {
navigation: window["##BUDIBASE_PREVIEW_NAVIGATION##"],
hiddenComponentIds: window["##BUDIBASE_HIDDEN_COMPONENT_IDS##"],
usedPlugins: window["##BUDIBASE_USED_PLUGINS##"],
location: window["##BUDIBASE_LOCATION##"],
})
// Set app ID - this window flag is set by both the preview and the real
@ -51,6 +53,9 @@ const loadBudibase = () => {
window.registerCustomComponent =
componentStore.actions.registerCustomComponent
// Initialise websocket
initWebsocket()
// Create app if one hasn't been created yet
if (!app) {
app = new ClientApp({

View file

@ -19,6 +19,7 @@ const createBuilderStore = () => {
isDragging: false,
navigation: null,
hiddenComponentIds: [],
usedPlugins: null,
// Legacy - allow the builder to specify a layout
layout: null,
@ -84,6 +85,20 @@ const createBuilderStore = () => {
highlightSetting: setting => {
dispatchEvent("highlight-setting", { setting })
},
updateUsedPlugin: (name, hash) => {
// Check if we used this plugin
const used = get(store)?.usedPlugins?.find(x => x.name === name)
if (used) {
store.update(state => {
state.usedPlugins = state.usedPlugins.filter(x => x.name !== name)
state.usedPlugins.push({
...used,
hash,
})
return state
})
}
},
}
return {
...store,

View file

@ -141,7 +141,7 @@ const createComponentStore = () => {
}
const registerCustomComponent = ({ Component, schema, version }) => {
if (!Component || !schema?.schema?.name) {
if (!Component || !schema?.schema?.name || !version) {
return
}
const component = `plugin/${schema.schema.name}`
@ -149,7 +149,6 @@ const createComponentStore = () => {
state.customComponentManifest[component] = {
Component,
schema,
version,
}
return state
})

View file

@ -0,0 +1,26 @@
import { builderStore } from "./stores/index.js"
import { get } from "svelte/store"
import { io } from "socket.io-client"
export const initWebsocket = () => {
const { inBuilder, location } = get(builderStore)
// Only connect when we're inside the builder preview, for now
if (!inBuilder || !location) {
return
}
// Initialise connection
const tls = location.protocol === "https:"
const proto = tls ? "wss:" : "ws:"
const host = location.hostname
const port = location.port || (tls ? 443 : 80)
const socket = io(`${proto}//${host}:${port}`, {
path: "/socket/client",
})
// Event handlers
socket.on("plugin-update", data => {
builderStore.actions.updateUsedPlugin(data.name, data.hash)
})
}

View file

@ -113,6 +113,11 @@
estree-walker "^1.0.1"
picomatch "^2.2.2"
"@socket.io/component-emitter@~3.1.0":
version "3.1.0"
resolved "https://registry.yarnpkg.com/@socket.io/component-emitter/-/component-emitter-3.1.0.tgz#96116f2a912e0c02817345b3c10751069920d553"
integrity sha512-+9jVqKhRSpsc591z5vX+X5Yyw+he/HCB4iQ/RYxw35CEPaY1gnsNE43nf9n9AaYjAQrTiI/mOwKUKdUs9vf7Xg==
"@spectrum-css/button@^3.0.3":
version "3.0.3"
resolved "https://registry.yarnpkg.com/@spectrum-css/button/-/button-3.0.3.tgz#2df1efaab6c7e0b3b06cb4b59e1eae59c7f1fc84"
@ -469,6 +474,13 @@ dayjs@^1.10.5:
resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.10.8.tgz#267df4bc6276fcb33c04a6735287e3f429abec41"
integrity sha512-wbNwDfBHHur9UOzNUjeKUOJ0fCb0a52Wx0xInmQ7Y8FstyajiV1NmK1e00cxsr9YrE9r7yAChE0VvpuY5Rnlow==
debug@~4.3.1, debug@~4.3.2:
version "4.3.4"
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
dependencies:
ms "2.1.2"
deepmerge@^4.2.2:
version "4.2.2"
resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955"
@ -536,6 +548,22 @@ emojis-list@^3.0.0:
resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78"
integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==
engine.io-client@~6.2.1:
version "6.2.2"
resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-6.2.2.tgz#c6c5243167f5943dcd9c4abee1bfc634aa2cbdd0"
integrity sha512-8ZQmx0LQGRTYkHuogVZuGSpDqYZtCM/nv8zQ68VZ+JkOpazJ7ICdsSpaO6iXwvaU30oFg5QJOJWj8zWqhbKjkQ==
dependencies:
"@socket.io/component-emitter" "~3.1.0"
debug "~4.3.1"
engine.io-parser "~5.0.3"
ws "~8.2.3"
xmlhttprequest-ssl "~2.0.0"
engine.io-parser@~5.0.3:
version "5.0.4"
resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-5.0.4.tgz#0b13f704fa9271b3ec4f33112410d8f3f41d0fc0"
integrity sha512-+nVFp+5z1E3HcToEnO7ZIj3g+3k9389DvWtvJZz0T6/eOCPIyyxehFcedoYrZQrp0LgQbD9pPXhpMBKMd5QURg==
entities@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55"
@ -824,6 +852,11 @@ minimist@^1.2.0:
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44"
integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==
ms@2.1.2:
version "2.1.2"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
nanoid@^2.1.0:
version "2.1.11"
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-2.1.11.tgz#ec24b8a758d591561531b4176a01e3ab4f0f0280"
@ -1389,6 +1422,24 @@ slash@^3.0.0:
resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634"
integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==
socket.io-client@^4.5.1:
version "4.5.1"
resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-4.5.1.tgz#cab8da71976a300d3090414e28c2203a47884d84"
integrity sha512-e6nLVgiRYatS+AHXnOnGi4ocOpubvOUCGhyWw8v+/FxW8saHkinG6Dfhi9TU0Kt/8mwJIAASxvw6eujQmjdZVA==
dependencies:
"@socket.io/component-emitter" "~3.1.0"
debug "~4.3.2"
engine.io-client "~6.2.1"
socket.io-parser "~4.2.0"
socket.io-parser@~4.2.0:
version "4.2.1"
resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-4.2.1.tgz#01c96efa11ded938dcb21cbe590c26af5eff65e5"
integrity sha512-V4GrkLy+HeF1F/en3SpUaM+7XxYXpuMUWLGde1kSSh5nQMN4hLrbPIkD+otwh6q9R6NOQBN4AMaOZ2zVjui82g==
dependencies:
"@socket.io/component-emitter" "~3.1.0"
debug "~4.3.1"
source-map-js@^1.0.1, source-map-js@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c"
@ -1598,6 +1649,16 @@ wrappy@1:
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
ws@~8.2.3:
version "8.2.3"
resolved "https://registry.yarnpkg.com/ws/-/ws-8.2.3.tgz#63a56456db1b04367d0b721a0b80cae6d8becbba"
integrity sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==
xmlhttprequest-ssl@~2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-2.0.0.tgz#91360c86b914e67f44dce769180027c0da618c67"
integrity sha512-QKxVRxiRACQcVuQEYFsI1hhkrMlrXHPegbbd1yn9UHOmRxY+si12nQYzri3vbzt8VdTTRviqcKxcyllFas5z2A==
y18n@^5.0.5:
version "5.0.8"
resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55"

View file

@ -1,12 +1,12 @@
{
"name": "@budibase/frontend-core",
"version": "1.3.12-alpha.3",
"version": "1.3.15-alpha.9",
"description": "Budibase frontend core libraries used in builder and client",
"author": "Budibase",
"license": "MPL-2.0",
"svelte": "src/index.js",
"dependencies": {
"@budibase/bbui": "1.3.12-alpha.3",
"@budibase/bbui": "1.3.15-alpha.9",
"lodash": "^4.17.21",
"svelte": "^3.46.2"
}

View file

@ -11,10 +11,14 @@ export const buildDatasourceEndpoints = API => ({
/**
* Prompts the server to build the schema for a datasource.
* @param datasourceId the datasource ID to build the schema for
* @param tablesFilter list of specific table names to be build the schema
*/
buildDatasourceSchema: async datasourceId => {
buildDatasourceSchema: async ({ datasourceId, tablesFilter }) => {
return await API.post({
url: `/api/datasources/${datasourceId}/schema`,
body: {
tablesFilter,
},
})
},

View file

@ -5,12 +5,22 @@ export const buildPluginEndpoints = API => ({
*/
uploadPlugin: async data => {
return await API.post({
url: "/api/plugin/upload",
url: `/api/plugin/upload`,
body: data,
json: false,
})
},
/**
* Creates a plugin from URL, Github or NPM
*/
createPlugin: async data => {
return await API.post({
url: `/api/plugin`,
body: data,
})
},
/**
* Gets a list of all plugins
*/
@ -19,4 +29,16 @@ export const buildPluginEndpoints = API => ({
url: "/api/plugin",
})
},
/**
* Deletes a plugin.
* @param pluginId the ID of the plugin to delete
*
* * @param pluginId the revision of the plugin to delete
*/
deletePlugin: async pluginId => {
return await API.delete({
url: `/api/plugin/${pluginId}`,
})
},
})

View file

@ -80,6 +80,19 @@ const cleanupQuery = query => {
return query
}
/**
* Removes a numeric prefix on field names designed to give fields uniqueness
*/
const removeKeyNumbering = key => {
if (typeof key === "string" && key.match(/\d[0-9]*:/g) != null) {
const parts = key.split(":")
parts.shift()
return parts.join(":")
} else {
return key
}
}
/**
* Builds a lucene JSON query from the filter structure generated in the builder
* @param filter the builder filter structure
@ -108,7 +121,7 @@ export const buildLuceneQuery = filter => {
query.allOr = true
return
}
if (type === "datetime") {
if (type === "datetime" && !isHbs) {
// Ensure date value is a valid date and parse into correct format
if (!value) {
return
@ -194,7 +207,7 @@ export const runLuceneQuery = (docs, query) => {
const filters = Object.entries(query[type] || {})
for (let i = 0; i < filters.length; i++) {
const [key, testValue] = filters[i]
const docValue = Helpers.deepGet(doc, key)
const docValue = Helpers.deepGet(doc, removeKeyNumbering(key))
if (failFn(docValue, testValue)) {
return false
}

View file

@ -1,7 +1,7 @@
{
"name": "@budibase/server",
"email": "hi@budibase.com",
"version": "1.3.12-alpha.3",
"version": "1.3.15-alpha.9",
"description": "Budibase Web Server",
"main": "src/index.ts",
"repository": {
@ -77,11 +77,11 @@
"license": "GPL-3.0",
"dependencies": {
"@apidevtools/swagger-parser": "10.0.3",
"@budibase/backend-core": "1.3.12-alpha.3",
"@budibase/client": "1.3.12-alpha.3",
"@budibase/pro": "1.3.12-alpha.3",
"@budibase/string-templates": "1.3.12-alpha.3",
"@budibase/types": "1.3.12-alpha.3",
"@budibase/backend-core": "1.3.15-alpha.9",
"@budibase/client": "1.3.15-alpha.9",
"@budibase/pro": "1.3.15-alpha.9",
"@budibase/string-templates": "1.3.15-alpha.9",
"@budibase/types": "1.3.15-alpha.9",
"@bull-board/api": "3.7.0",
"@bull-board/koa": "3.9.4",
"@elastic/elasticsearch": "7.10.0",
@ -95,7 +95,7 @@
"bcryptjs": "2.4.3",
"bull": "4.8.5",
"chmodr": "1.2.0",
"chokidar": "^3.5.3",
"chokidar": "3.5.3",
"csvtojson": "2.0.10",
"curlconverter": "3.21.0",
"dotenv": "8.2.0",
@ -139,9 +139,10 @@
"redis": "4",
"server-destroy": "1.0.1",
"snowflake-promise": "^4.5.0",
"socket.io": "^4.5.1",
"svelte": "3.49.0",
"swagger-parser": "10.0.3",
"tar": "^6.1.11",
"tar": "6.1.11",
"to-json-schema": "0.2.5",
"uuid": "3.3.2",
"validate.js": "0.13.1",

View file

@ -552,11 +552,7 @@ export const sync = async (ctx: any, next: any) => {
})
let error
try {
await replication.replicate({
filter: function (doc: any) {
return doc._id !== DocumentType.APP_METADATA
},
})
await replication.replicate(replication.appReplicateOpts())
} catch (err) {
error = err
} finally {

View file

@ -1,11 +1,12 @@
const { streamBackup } = require("../../utilities/fileSystem")
const { events, context } = require("@budibase/backend-core")
const { DocumentType } = require("../../db/utils")
const { isQsTrue } = require("../../utilities")
exports.exportAppDump = async function (ctx) {
let { appId, excludeRows } = ctx.query
const appName = decodeURI(ctx.query.appname)
excludeRows = excludeRows === "true"
excludeRows = isQsTrue(excludeRows)
const backupIdentifier = `${appName}-export-${new Date().getTime()}.txt`
ctx.attachment(backupIdentifier)
ctx.body = await streamBackup(appId, excludeRows)

View file

@ -2,7 +2,6 @@ const { DocumentType, getPluginParams } = require("../../db/utils")
const { getComponentLibraryManifest } = require("../../utilities/fileSystem")
const { getAppDB } = require("@budibase/backend-core/context")
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
const env = require("../../environment")
exports.fetchAppComponentDefinitions = async function (ctx) {
try {
@ -33,8 +32,6 @@ exports.fetchAppComponentDefinitions = async function (ctx) {
}
}
// for now custom components only supported in self-host
if (env.SELF_HOSTED) {
// Add custom components
const globalDB = getGlobalDB()
const response = await globalDB.allDocs(
@ -52,7 +49,6 @@ exports.fetchAppComponentDefinitions = async function (ctx) {
...plugin.schema.schema,
}
})
}
ctx.body = definitions
} catch (err) {

View file

@ -50,9 +50,23 @@ exports.fetch = async function (ctx) {
exports.buildSchemaFromDb = async function (ctx) {
const db = getAppDB()
const datasource = await db.get(ctx.params.datasourceId)
const tablesFilter = ctx.request.body.tablesFilter
const { tables, error } = await buildSchemaHelper(datasource)
let { tables, error } = await buildSchemaHelper(datasource)
if (tablesFilter) {
if (!datasource.entities) {
datasource.entities = {}
}
for (let key in tables) {
if (
tablesFilter.some(filter => filter.toLowerCase() === key.toLowerCase())
) {
datasource.entities[key] = tables[key]
}
}
} else {
datasource.entities = tables
}
const dbResp = await db.put(datasource)
datasource._rev = dbResp.rev
@ -223,10 +237,9 @@ const buildSchemaHelper = async datasource => {
// Connect to the DB and build the schema
const connector = new Connector(datasource.config)
await connector.buildSchema(datasource._id, datasource.entities)
datasource.entities = connector.tables
// make sure they all have a display name selected
for (let entity of Object.values(datasource.entities)) {
for (let entity of Object.values(datasource.entities ?? {})) {
if (entity.primaryDisplay) {
continue
}

View file

@ -15,6 +15,7 @@ import {
getAppId,
getAppDB,
getProdAppDB,
getDevAppDB,
} from "@budibase/backend-core/context"
import { quotas } from "@budibase/pro"
import { events } from "@budibase/backend-core"
@ -110,17 +111,29 @@ async function deployApp(deployment: any) {
target: productionAppId,
}
replication = new Replication(config)
const devDb = getDevAppDB()
console.log("Compacting development DB")
await devDb.compact()
console.log("Replication object created")
await replication.replicate()
await replication.replicate(replication.appReplicateOpts())
console.log("replication complete.. replacing app meta doc")
// app metadata is excluded as it is likely to be in conflict
// replicate the app metadata document manually
const db = getProdAppDB()
const appDoc = await db.get(DocumentType.APP_METADATA)
const appDoc = await devDb.get(DocumentType.APP_METADATA)
try {
const prodAppDoc = await db.get(DocumentType.APP_METADATA)
appDoc._rev = prodAppDoc._rev
} catch (err) {
delete appDoc._rev
}
// switch to production app ID
deployment.appUrl = appDoc.url
appDoc.appId = productionAppId
appDoc.instance._id = productionAppId
// remove automation errors if they exist
delete appDoc.automationErrors
await db.put(appDoc)
await appCache.invalidateAppMetadata(productionAppId)
console.log("New app doc written successfully.")

View file

@ -1,110 +0,0 @@
import { ObjectStoreBuckets } from "../../constants"
import { extractPluginTarball, loadJSFile } from "../../utilities/fileSystem"
import { getGlobalDB } from "@budibase/backend-core/tenancy"
import { generatePluginID, getPluginParams } from "../../db/utils"
import { uploadDirectory } from "@budibase/backend-core/objectStore"
import { PluginType, FileType } from "@budibase/types"
import env from "../../environment"
export async function getPlugins(type?: PluginType) {
const db = getGlobalDB()
const response = await db.allDocs(
getPluginParams(null, {
include_docs: true,
})
)
const plugins = response.rows.map((row: any) => row.doc)
if (type) {
return plugins.filter((plugin: any) => plugin.schema?.type === type)
} else {
return plugins
}
}
export async function upload(ctx: any) {
const plugins: FileType[] =
ctx.request.files.file.length > 1
? Array.from(ctx.request.files.file)
: [ctx.request.files.file]
try {
let docs = []
// can do single or multiple plugins
for (let plugin of plugins) {
const doc = await processPlugin(plugin)
docs.push(doc)
}
ctx.body = {
message: "Plugin(s) uploaded successfully",
plugins: docs,
}
} catch (err: any) {
const errMsg = err?.message ? err?.message : err
ctx.throw(400, `Failed to import plugin: ${errMsg}`)
}
}
export async function fetch(ctx: any) {
ctx.body = await getPlugins()
}
export async function destroy(ctx: any) {}
export async function processPlugin(plugin: FileType) {
if (!env.SELF_HOSTED) {
throw new Error("Plugins not supported outside of self-host.")
}
const db = getGlobalDB()
const { metadata, directory } = await extractPluginTarball(plugin)
const version = metadata.package.version,
name = metadata.package.name,
description = metadata.package.description
// first open the tarball into tmp directory
const bucketPath = `${name}/`
const files = await uploadDirectory(
ObjectStoreBuckets.PLUGINS,
directory,
bucketPath
)
const jsFile = files.find((file: any) => file.name.endsWith(".js"))
if (!jsFile) {
throw new Error(`Plugin missing .js file.`)
}
// validate the JS for a datasource
if (metadata.schema.type === PluginType.DATASOURCE) {
const js = loadJSFile(directory, jsFile.name)
// TODO: this isn't safe - but we need full node environment
// in future we should do this in a thread for safety
try {
eval(js)
} catch (err: any) {
const message = err?.message ? err.message : JSON.stringify(err)
throw new Error(`JS invalid: ${message}`)
}
}
const jsFileName = jsFile.name
const pluginId = generatePluginID(name)
// overwrite existing docs entirely if they exist
let rev
try {
const existing = await db.get(pluginId)
rev = existing._rev
} catch (err) {
rev = undefined
}
const doc = {
_id: pluginId,
_rev: rev,
...metadata,
name,
version,
description,
jsUrl: `${bucketPath}${jsFileName}`,
}
const response = await db.put(doc)
return {
...doc,
_rev: response.rev,
}
}

View file

@ -0,0 +1,15 @@
import {
createTempFolder,
getPluginMetadata,
extractTarball,
} from "../../../utilities/fileSystem"
export async function fileUpload(file: { name: string; path: string }) {
if (!file.name.endsWith(".tar.gz")) {
throw new Error("Plugin must be compressed into a gzipped tarball.")
}
const path = createTempFolder(file.name.split(".tar.gz")[0])
await extractTarball(file.path, path)
return await getPluginMetadata(path)
}

View file

@ -0,0 +1,75 @@
import { getPluginMetadata } from "../../../utilities/fileSystem"
import fetch from "node-fetch"
import { downloadUnzipTarball } from "./utils"
export async function request(
url: string,
headers: { [key: string]: string },
err: string
) {
const response = await fetch(url, { headers })
if (response.status >= 300) {
const respErr = await response.text()
throw new Error(`Error: ${err} - ${respErr}`)
}
return response.json()
}
export async function githubUpload(url: string, name = "", token = "") {
let githubUrl = url
if (!githubUrl.includes("https://github.com/")) {
throw new Error("The plugin origin must be from Github")
}
if (url.includes(".git")) {
githubUrl = url.replace(".git", "")
}
const githubApiUrl = githubUrl.replace(
"https://github.com/",
"https://api.github.com/repos/"
)
const headers: any = token ? { Authorization: `Bearer ${token}` } : {}
const pluginDetails = await request(
githubApiUrl,
headers,
"Repository not found"
)
const pluginName = pluginDetails.name || name
const pluginLatestReleaseUrl = pluginDetails?.["releases_url"]
? pluginDetails?.["releases_url"].replace("{/id}", "/latest")
: undefined
if (!pluginLatestReleaseUrl) {
throw new Error("Github release not found")
}
const pluginReleaseDetails = await request(
pluginLatestReleaseUrl,
headers,
"Github latest release not found"
)
const pluginReleaseTarballAsset = pluginReleaseDetails?.assets?.find(
(x: any) => x?.["content_type"] === "application/gzip"
)
const pluginLastReleaseTarballUrl =
pluginReleaseTarballAsset?.["browser_download_url"]
if (!pluginLastReleaseTarballUrl) {
throw new Error("Github latest release url not found")
}
try {
const path = await downloadUnzipTarball(
pluginLastReleaseTarballUrl,
pluginName,
headers
)
return await getPluginMetadata(path)
} catch (err: any) {
let errMsg = err?.message || err
if (errMsg === "unexpected response Not Found") {
errMsg = "Github release tarball not found"
}
throw new Error(errMsg)
}
}

View file

@ -0,0 +1,212 @@
import { ObjectStoreBuckets } from "../../../constants"
import { loadJSFile } from "../../../utilities/fileSystem"
import { npmUpload, urlUpload, githubUpload, fileUpload } from "./uploaders"
import { getGlobalDB } from "@budibase/backend-core/tenancy"
import { validate } from "@budibase/backend-core/plugins"
import { generatePluginID, getPluginParams } from "../../../db/utils"
import {
uploadDirectory,
deleteFolder,
} from "@budibase/backend-core/objectStore"
import { PluginType, FileType, PluginSource } from "@budibase/types"
import env from "../../../environment"
import { ClientAppSocket } from "../../../websocket"
export async function getPlugins(type?: PluginType) {
const db = getGlobalDB()
const response = await db.allDocs(
getPluginParams(null, {
include_docs: true,
})
)
const plugins = response.rows.map((row: any) => row.doc)
if (type) {
return plugins.filter((plugin: any) => plugin.schema?.type === type)
} else {
return plugins
}
}
export async function upload(ctx: any) {
const plugins: FileType[] =
ctx.request.files.file.length > 1
? Array.from(ctx.request.files.file)
: [ctx.request.files.file]
try {
let docs = []
// can do single or multiple plugins
for (let plugin of plugins) {
const doc = await processPlugin(plugin, PluginSource.FILE)
docs.push(doc)
}
ctx.body = {
message: "Plugin(s) uploaded successfully",
plugins: docs,
}
} catch (err: any) {
const errMsg = err?.message ? err?.message : err
ctx.throw(400, `Failed to import plugin: ${errMsg}`)
}
}
export async function create(ctx: any) {
const { source, url, headers, githubToken } = ctx.request.body
try {
let metadata
let directory
// Generating random name as a backup and needed for url
let name = "PLUGIN_" + Math.floor(100000 + Math.random() * 900000)
switch (source) {
case PluginSource.NPM:
const { metadata: metadataNpm, directory: directoryNpm } =
await npmUpload(url, name)
metadata = metadataNpm
directory = directoryNpm
break
case PluginSource.GITHUB:
const { metadata: metadataGithub, directory: directoryGithub } =
await githubUpload(url, name, githubToken)
metadata = metadataGithub
directory = directoryGithub
break
case PluginSource.URL:
const headersObj = headers || {}
const { metadata: metadataUrl, directory: directoryUrl } =
await urlUpload(url, name, headersObj)
metadata = metadataUrl
directory = directoryUrl
break
}
validate(metadata?.schema)
// Only allow components in cloud
if (!env.SELF_HOSTED && metadata?.schema?.type !== PluginType.COMPONENT) {
throw new Error(
"Only component plugins are supported outside of self-host"
)
}
const doc = await storePlugin(metadata, directory, source)
ctx.body = {
message: "Plugin uploaded successfully",
plugins: [doc],
}
} catch (err: any) {
const errMsg = err?.message ? err?.message : err
ctx.throw(400, `Failed to import plugin: ${errMsg}`)
}
ctx.status = 200
}
export async function fetch(ctx: any) {
ctx.body = await getPlugins()
}
export async function destroy(ctx: any) {
const db = getGlobalDB()
const { pluginId } = ctx.params
try {
const plugin = await db.get(pluginId)
const bucketPath = `${plugin.name}/`
await deleteFolder(ObjectStoreBuckets.PLUGINS, bucketPath)
await db.remove(pluginId, plugin._rev)
} catch (err: any) {
const errMsg = err?.message ? err?.message : err
ctx.throw(400, `Failed to delete plugin: ${errMsg}`)
}
ctx.message = `Plugin ${ctx.params.pluginId} deleted.`
ctx.status = 200
}
export async function storePlugin(
metadata: any,
directory: any,
source?: string
) {
const db = getGlobalDB()
const version = metadata.package.version,
name = metadata.package.name,
description = metadata.package.description,
hash = metadata.schema.hash
// first open the tarball into tmp directory
const bucketPath = `${name}/`
const files = await uploadDirectory(
ObjectStoreBuckets.PLUGINS,
directory,
bucketPath
)
const jsFile = files.find((file: any) => file.name.endsWith(".js"))
if (!jsFile) {
throw new Error(`Plugin missing .js file.`)
}
// validate the JS for a datasource
if (metadata.schema.type === PluginType.DATASOURCE) {
const js = loadJSFile(directory, jsFile.name)
// TODO: this isn't safe - but we need full node environment
// in future we should do this in a thread for safety
try {
eval(js)
} catch (err: any) {
const message = err?.message ? err.message : JSON.stringify(err)
throw new Error(`JS invalid: ${message}`)
}
}
const jsFileName = jsFile.name
const pluginId = generatePluginID(name)
// overwrite existing docs entirely if they exist
let rev
try {
const existing = await db.get(pluginId)
rev = existing._rev
} catch (err) {
rev = undefined
}
let doc = {
_id: pluginId,
_rev: rev,
...metadata,
name,
version,
hash,
description,
jsUrl: `${bucketPath}${jsFileName}`,
}
if (source) {
doc = {
...doc,
source,
}
}
const response = await db.put(doc)
ClientAppSocket.emit("plugin-update", { name, hash })
return {
...doc,
_rev: response.rev,
}
}
export async function processPlugin(plugin: FileType, source?: string) {
const { metadata, directory } = await fileUpload(plugin)
validate(metadata?.schema)
// Only allow components in cloud
if (!env.SELF_HOSTED && metadata?.schema?.type !== PluginType.COMPONENT) {
throw new Error("Only component plugins are supported outside of self-host")
}
return await storePlugin(metadata, directory, source)
}

View file

@ -0,0 +1,56 @@
import {
getPluginMetadata,
findFileRec,
extractTarball,
deleteFolderFileSystem,
} from "../../../utilities/fileSystem"
import fetch from "node-fetch"
import { join } from "path"
import { downloadUnzipTarball } from "./utils"
export async function npmUpload(url: string, name: string, headers = {}) {
let npmTarballUrl = url
let pluginName = name
if (
!npmTarballUrl.includes("https://www.npmjs.com") &&
!npmTarballUrl.includes("https://registry.npmjs.org")
) {
throw new Error("The plugin origin must be from NPM")
}
if (!npmTarballUrl.includes(".tgz")) {
const npmPackageURl = url.replace(
"https://www.npmjs.com/package/",
"https://registry.npmjs.org/"
)
const response = await fetch(npmPackageURl)
if (response.status !== 200) {
throw new Error("NPM Package not found")
}
let npmDetails = await response.json()
pluginName = npmDetails.name
const npmVersion = npmDetails["dist-tags"].latest
npmTarballUrl = npmDetails?.versions?.[npmVersion]?.dist?.tarball
if (!npmTarballUrl) {
throw new Error("NPM tarball url not found")
}
}
const path = await downloadUnzipTarball(npmTarballUrl, pluginName, headers)
const tarballPluginFile = findFileRec(path, ".tar.gz")
if (!tarballPluginFile) {
throw new Error("Tarball plugin file not found")
}
try {
await extractTarball(tarballPluginFile, path)
deleteFolderFileSystem(join(path, "package"))
} catch (err: any) {
throw new Error(err)
}
return await getPluginMetadata(path)
}

View file

@ -0,0 +1,4 @@
export { fileUpload } from "./file"
export { githubUpload } from "./github"
export { npmUpload } from "./npm"
export { urlUpload } from "./url"

View file

@ -0,0 +1,12 @@
import { downloadUnzipTarball } from "./utils"
import { getPluginMetadata } from "../../../utilities/fileSystem"
export async function urlUpload(url: string, name = "", headers = {}) {
if (!url.includes(".tar.gz")) {
throw new Error("Plugin must be compressed into a gzipped tarball.")
}
const path = await downloadUnzipTarball(url, name, headers)
return await getPluginMetadata(path)
}

View file

@ -0,0 +1,19 @@
import {
createTempFolder,
downloadTarballDirect,
} from "../../../utilities/fileSystem"
export async function downloadUnzipTarball(
url: string,
name: string,
headers = {}
) {
try {
const path = createTempFolder(name)
await downloadTarballDirect(url, path, headers)
return path
} catch (e: any) {
throw new Error(e.message)
}
}

View file

@ -534,7 +534,7 @@ module External {
})
// this is the response from knex if no rows found
const rows = !response[0].read ? response : []
const storeTo = isMany ? field.throughFrom || linkPrimaryKey : manyKey
const storeTo = isMany ? field.throughFrom || linkPrimaryKey : fieldName
related[storeTo] = { rows, isMany, tableId }
}
return related

View file

@ -71,6 +71,7 @@ exports.save = async ctx => {
name: plugin.name,
version: plugin.version,
jsUrl: plugin.jsUrl,
hash: plugin.hash,
})
}
})

View file

@ -67,7 +67,8 @@
previewDevice,
navigation,
hiddenComponentIds,
usedPlugins
usedPlugins,
location
} = parsed
// Set some flags so the app knows we're in the builder
@ -83,6 +84,7 @@
window["##BUDIBASE_PREVIEW_NAVIGATION##"] = navigation
window["##BUDIBASE_HIDDEN_COMPONENT_IDS##"] = hiddenComponentIds
window["##BUDIBASE_USED_PLUGINS##"] = usedPlugins
window["##BUDIBASE_LOCATION##"] = location
// Initialise app
try {

View file

@ -2,16 +2,27 @@ import Router from "@koa/router"
import * as controller from "../controllers/application"
import authorized from "../../middleware/authorized"
import { BUILDER } from "@budibase/backend-core/permissions"
import { applicationValidator } from "./utils/validators"
const router = new Router()
router
.post("/api/applications/:appId/sync", authorized(BUILDER), controller.sync)
.post("/api/applications", authorized(BUILDER), controller.create)
.post(
"/api/applications",
authorized(BUILDER),
applicationValidator(),
controller.create
)
.get("/api/applications/:appId/definition", controller.fetchAppDefinition)
.get("/api/applications", controller.fetch)
.get("/api/applications/:appId/appPackage", controller.fetchAppPackage)
.put("/api/applications/:appId", authorized(BUILDER), controller.update)
.put(
"/api/applications/:appId",
authorized(BUILDER),
applicationValidator(),
controller.update
)
.post(
"/api/applications/:appId/client/update",
authorized(BUILDER),

View file

@ -7,6 +7,7 @@ const router = new Router()
router
.post("/api/plugin/upload", authorized(BUILDER), controller.upload)
.post("/api/plugin", authorized(BUILDER), controller.create)
.get("/api/plugin", authorized(BUILDER), controller.fetch)
.delete("/api/plugin/:pluginId", authorized(BUILDER), controller.destroy)

View file

@ -56,7 +56,7 @@ router
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
controller.deleteObjects
)
.get("/preview", authorized(BUILDER), controller.serveBuilderPreview)
.get("/app/preview", authorized(BUILDER), controller.serveBuilderPreview)
.get("/:appId/:path*", controller.serveApp)
.get("/app/:appUrl/:path*", controller.serveApp)
.post(

View file

@ -294,7 +294,7 @@ describe("/queries", () => {
"url": "string",
"value": "string"
})
expect(res.body.rows[0].url).toContain("doctype html")
expect(res.body.rows[0].url).toContain("doctype%20html")
})
it("check that it automatically retries on fail with cached dynamics", async () => {
@ -379,7 +379,7 @@ describe("/queries", () => {
"queryHdr": userDetails.firstName,
"secondHdr" : "1234"
})
expect(res.body.rows[0].url).toEqual("http://www.google.com?email=" + userDetails.email)
expect(res.body.rows[0].url).toEqual("http://www.google.com?email=" + userDetails.email.replace("@", "%40"))
})
it("should bind the current user to query parameters", async () => {
@ -396,7 +396,7 @@ describe("/queries", () => {
"testParam" : "1234"
})
expect(res.body.rows[0].url).toEqual("http://www.google.com?test=" + userDetails.email +
expect(res.body.rows[0].url).toEqual("http://www.google.com?test=" + userDetails.email.replace("@", "%40") +
"&testName=" + userDetails.firstName + "&testParam=1234")
})

View file

@ -150,14 +150,14 @@ describe("/static", () => {
})
})
describe("/preview", () => {
describe("/app/preview", () => {
beforeEach(() => {
jest.clearAllMocks()
})
it("should serve the builder preview", async () => {
const headers = config.defaultHeaders()
const res = await request.get(`/preview`).set(headers).expect(200)
const res = await request.get(`/app/preview`).set(headers).expect(200)
expect(res.body.appId).toBe(config.appId)
expect(res.body.builderPreview).toBe(true)

View file

@ -10,6 +10,7 @@ const Joi = require("joi")
const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("")
const OPTIONAL_NUMBER = Joi.number().optional().allow(null)
const OPTIONAL_BOOLEAN = Joi.boolean().optional().allow(null)
const APP_NAME_REGEX = /^[\w\s]+$/
exports.tableValidator = () => {
// prettier-ignore
@ -214,7 +215,7 @@ exports.applicationValidator = () => {
return joiValidator.body(Joi.object({
_id: OPTIONAL_STRING,
_rev: OPTIONAL_STRING,
name: Joi.string().required(),
name: Joi.string().pattern(new RegExp(APP_NAME_REGEX)).required().error(new Error('App name must be letters, numbers and spaces only')),
url: OPTIONAL_STRING,
template: Joi.object({
templateString: OPTIONAL_STRING,

View file

@ -32,6 +32,7 @@ import * as migrations from "./migrations"
import { events, installation, tenancy } from "@budibase/backend-core"
import { createAdminUser, getChecklist } from "./utilities/workerRequests"
import { watch } from "./watch"
import { initialise as initialiseWebsockets } from "./websocket"
const app = new Koa()
@ -76,6 +77,7 @@ if (env.isProd()) {
const server = http.createServer(app.callback())
destroyable(server)
initialiseWebsockets(server)
let shuttingDown = false,
errCode = 0

View file

@ -1,16 +1,19 @@
const { processEvent } = require("./utils")
const { queue, shutdown } = require("./bullboard")
const { TRIGGER_DEFINITIONS } = require("./triggers")
const { TRIGGER_DEFINITIONS, rebootTrigger } = require("./triggers")
const { ACTION_DEFINITIONS } = require("./actions")
/**
* This module is built purely to kick off the worker farm and manage the inputs/outputs
*/
exports.init = function () {
exports.init = async function () {
// this promise will not complete
return queue.process(async job => {
const promise = queue.process(async job => {
await processEvent(job)
})
// on init we need to trigger any reboot automations
await rebootTrigger()
return promise
}
exports.getQueues = () => {

View file

@ -9,6 +9,7 @@ const { checkTestFlag } = require("../utilities/redis")
const utils = require("./utils")
const env = require("../environment")
const { doInAppContext, getAppDB } = require("@budibase/backend-core/context")
const { getAllApps } = require("@budibase/backend-core/db")
const TRIGGER_DEFINITIONS = definitions
const JOB_OPTS = {
@ -16,21 +17,24 @@ const JOB_OPTS = {
removeOnFail: true,
}
async function getAllAutomations() {
const db = getAppDB()
let automations = await db.allDocs(
getAutomationParams(null, { include_docs: true })
)
return automations.rows.map(row => row.doc)
}
async function queueRelevantRowAutomations(event, eventType) {
if (event.appId == null) {
throw `No appId specified for ${eventType} - check event emitters.`
}
doInAppContext(event.appId, async () => {
const db = getAppDB()
let automations = await db.allDocs(
getAutomationParams(null, { include_docs: true })
)
let automations = await getAllAutomations()
// filter down to the correct event type
automations = automations.rows
.map(automation => automation.doc)
.filter(automation => {
automations = automations.filter(automation => {
const trigger = automation.definition.trigger
return trigger && trigger.event === eventType
})
@ -110,4 +114,34 @@ exports.externalTrigger = async function (
}
}
exports.rebootTrigger = async () => {
// reboot cron option is only available on the main thread at
// startup and only usable in self host
if (env.isInThread() || !env.SELF_HOSTED) {
return
}
// iterate through all production apps, find the reboot crons
// and trigger events for them
const appIds = await getAllApps({ dev: false, idsOnly: true })
for (let prodAppId of appIds) {
await doInAppContext(prodAppId, async () => {
let automations = await getAllAutomations()
let rebootEvents = []
for (let automation of automations) {
if (utils.isRebootTrigger(automation)) {
const job = {
automation,
event: {
appId: prodAppId,
timestamp: Date.now(),
},
}
rebootEvents.push(queue.add(job, JOB_OPTS))
}
}
await Promise.all(rebootEvents)
})
}
}
exports.TRIGGER_DEFINITIONS = TRIGGER_DEFINITIONS

View file

@ -17,6 +17,7 @@ import { tenancy } from "@budibase/backend-core"
import { quotas } from "@budibase/pro"
import { Automation } from "@budibase/types"
const REBOOT_CRON = "@reboot"
const WH_STEP_ID = definitions.WEBHOOK.stepId
const CRON_STEP_ID = definitions.CRON.stepId
const Runner = new Thread(ThreadType.AUTOMATION)
@ -109,22 +110,33 @@ export async function clearMetadata() {
await db.bulkDocs(automationMetadata)
}
/**
* This function handles checking of any cron jobs that need to be enabled/updated.
* @param {string} appId The ID of the app in which we are checking for webhooks
* @param {object|undefined} automation The automation object to be updated.
*/
export async function enableCronTrigger(appId: any, automation: any) {
const trigger = automation ? automation.definition.trigger : null
function isCronTrigger(auto: any) {
export function isCronTrigger(auto: Automation) {
return (
auto &&
auto.definition.trigger &&
auto.definition.trigger.stepId === CRON_STEP_ID
)
}
export function isRebootTrigger(auto: Automation) {
const trigger = auto ? auto.definition.trigger : null
return isCronTrigger(auto) && trigger?.inputs.cron === REBOOT_CRON
}
/**
* This function handles checking of any cron jobs that need to be enabled/updated.
* @param {string} appId The ID of the app in which we are checking for webhooks
* @param {object|undefined} automation The automation object to be updated.
*/
export async function enableCronTrigger(appId: any, automation: Automation) {
const trigger = automation ? automation.definition.trigger : null
// need to create cron job
if (isCronTrigger(automation) && trigger?.inputs.cron) {
if (
isCronTrigger(automation) &&
!isRebootTrigger(automation) &&
trigger?.inputs.cron
) {
// make a job id rather than letting Bull decide, makes it easier to handle on way out
const jobId = `${appId}_cron_${newid()}`
const job: any = await queue.add(

View file

@ -34,6 +34,7 @@ export interface RestConfig {
defaultHeaders: {
[key: string]: any
}
legacyHttpParser: boolean
authConfigs: AuthConfig[]
staticVariables: {
[key: string]: string

View file

@ -96,7 +96,9 @@ function generateSchema(
const deletedColumns = Object.entries(oldTable.schema)
.filter(
([key, schema]) =>
schema.type !== FieldTypes.LINK && table.schema[key] == null
schema.type !== FieldTypes.LINK &&
schema.type !== FieldTypes.FORMULA &&
table.schema[key] == null
)
.map(([key]) => key)
deletedColumns.forEach(key => {

View file

@ -94,11 +94,16 @@ module.exports = {
for (let plugin of plugins) {
if (plugin.name === integration) {
// need to use commonJS require due to its dynamic runtime nature
return getDatasourcePlugin(
const retrieved: any = await getDatasourcePlugin(
plugin.name,
plugin.jsUrl,
plugin.schema?.hash
)
if (retrieved.integration) {
return retrieved.integration
} else {
return retrieved
}
}
}
}

View file

@ -14,6 +14,7 @@ import {
BearerAuthConfig,
} from "../definitions/datasource"
import { get } from "lodash"
import qs from "querystring"
const fetch = require("node-fetch")
const { formatBytes } = require("../utilities")
const { performance } = require("perf_hooks")
@ -75,6 +76,12 @@ const SCHEMA: Integration = {
required: false,
default: {},
},
legacyHttpParser: {
display: "Legacy HTTP Support",
type: DatasourceFieldType.BOOLEAN,
required: false,
default: false,
},
},
query: {
create: {
@ -211,7 +218,8 @@ class RestIntegration implements IntegrationBase {
}
}
const main = `${path}?${queryString}`
// make sure the query string is fully encoded
const main = `${path}?${qs.encode(qs.decode(queryString))}`
let complete = main
if (this.config.url && !main.startsWith("http")) {
complete = !this.config.url ? main : `${this.config.url}/${main}`
@ -373,6 +381,11 @@ class RestIntegration implements IntegrationBase {
paginationValues
)
if (this.config.legacyHttpParser) {
// https://github.com/nodejs/node/issues/43798
input.extraHttpOptions = { insecureHTTPParser: true }
}
this.startTimeMs = performance.now()
const url = this.getUrl(path, queryString, pagination, paginationValues)
const response = await fetch(url, input)

View file

@ -51,7 +51,7 @@ describe("REST Integration", () => {
name: "test",
}),
}
const response = await config.integration.create(query)
await config.integration.create(query)
expect(fetch).toHaveBeenCalledWith(`${BASE_URL}/api?test=1`, {
method: "POST",
body: '{"name":"test"}',
@ -299,7 +299,7 @@ describe("REST Integration", () => {
}
await config.integration.read(query)
expect(fetch).toHaveBeenCalledWith(
`${BASE_URL}/api?${pageParam}=${pageValue}&${sizeParam}=${sizeValue}&`,
`${BASE_URL}/api?${pageParam}=${pageValue}&${sizeParam}=${sizeValue}`,
{
headers: {},
method: "GET",
@ -426,7 +426,7 @@ describe("REST Integration", () => {
}
const res = await config.integration.read(query)
expect(fetch).toHaveBeenCalledWith(
`${BASE_URL}/api?${pageParam}=${pageValue}&${sizeParam}=${sizeValue}&`,
`${BASE_URL}/api?${pageParam}=${pageValue}&${sizeParam}=${sizeValue}`,
{
headers: {},
method: "GET",
@ -536,5 +536,40 @@ describe("REST Integration", () => {
expect(sentData.get(sizeParam)).toEqual(sizeValue.toString())
expect(res.pagination.cursor).toEqual(123)
})
it("should encode query string correctly", async () => {
const query = {
path: "api",
queryString: "test=1 2",
headers: HEADERS,
bodyType: "json",
requestBody: JSON.stringify({
name: "test",
}),
}
await config.integration.create(query)
expect(fetch).toHaveBeenCalledWith(`${BASE_URL}/api?test=1%202`, {
method: "POST",
body: '{"name":"test"}',
headers: HEADERS,
})
})
})
describe("Configuration options", () => {
it("Attaches insecureHttpParams when legacy HTTP Parser option is set", async () => {
config = new TestConfiguration({
url: BASE_URL,
legacyHttpParser: true,
})
await config.integration.read({})
expect(fetch).toHaveBeenCalledWith(`${BASE_URL}/?`, {
method: "GET",
headers: {},
extraHttpOptions: {
insecureHTTPParser: true,
},
})
})
})
})

Some files were not shown because too many files have changed in this diff Show more