1
0
Fork 0
mirror of synced 2024-05-18 19:33:49 +12:00

Merge branch 'develop' of github.com:Budibase/budibase into plugins-dev-experience-websocket

This commit is contained in:
Andrew Kingston 2022-09-12 20:00:32 +01:00
commit 3d57010a10
118 changed files with 1735 additions and 1818 deletions

View file

@ -162,7 +162,10 @@ spec:
name: bbapps
ports:
- containerPort: {{ .Values.services.apps.port }}
resources: {}
{{ with .Values.services.apps.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View file

@ -38,7 +38,10 @@ spec:
image: redgeoff/replicate-couchdb-cluster
imagePullPolicy: Always
name: couchdb-backup
resources: {}
{{ with .Values.services.couchdb.backup.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View file

@ -56,7 +56,10 @@ spec:
name: minio-service
ports:
- containerPort: {{ .Values.services.objectStore.port }}
resources: {}
{{ with .Values.services.objectStore.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
- mountPath: /data
name: minio-data

View file

@ -30,7 +30,10 @@ spec:
name: proxy-service
ports:
- containerPort: {{ .Values.services.proxy.port }}
resources: {}
{{ with .Values.services.proxy.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
{{- with .Values.affinity }}
affinity:

View file

@ -35,7 +35,10 @@ spec:
name: redis-service
ports:
- containerPort: {{ .Values.services.redis.port }}
resources: {}
{{ with .Values.services.redis.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
- mountPath: /data
name: redis-data

View file

@ -151,7 +151,10 @@ spec:
name: bbworker
ports:
- containerPort: {{ .Values.services.worker.port }}
resources: {}
{{ with .Values.services.worker.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View file

@ -60,19 +60,6 @@ ingress:
port:
number: 10000
resources:
{}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
autoscaling:
enabled: false
minReplicas: 1
@ -125,16 +112,19 @@ services:
proxy:
port: 10000
replicaCount: 1
resources: {}
apps:
port: 4002
replicaCount: 1
logLevel: info
resources: {}
# nodeDebug: "" # set the value of NODE_DEBUG
worker:
port: 4003
replicaCount: 1
resources: {}
couchdb:
enabled: true
@ -148,6 +138,7 @@ services:
target: ""
# backup interval in seconds
interval: ""
resources: {}
redis:
enabled: true # disable if using external redis
@ -161,6 +152,7 @@ services:
## If undefined (the default) or set to null, no storageClassName spec is
## set, choosing the default provisioner.
storageClass: ""
resources: {}
objectStore:
minio: true
@ -177,6 +169,7 @@ services:
## If undefined (the default) or set to null, no storageClassName spec is
## set, choosing the default provisioner.
storageClass: ""
resources: {}
# Override values in couchDB subchart
couchdb:

View file

@ -11,8 +11,8 @@
"dependencies": {
"bulma": "^0.9.3",
"next": "12.1.0",
"node-fetch": "^3.2.2",
"node-sass": "^7.0.1",
"node-fetch": "^3.2.10",
"sass": "^1.52.3",
"react": "17.0.2",
"react-dom": "17.0.2",
"react-notifications-component": "^3.4.1"
@ -24,4 +24,4 @@
"eslint-config-next": "12.1.0",
"typescript": "4.6.2"
}
}
}

View file

@ -2020,10 +2020,10 @@ node-domexception@^1.0.0:
resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5"
integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==
node-fetch@^3.2.2:
version "3.2.2"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.2.tgz#16d33fbe32ca7c6ca1ca8ba5dfea1dd885c59f04"
integrity sha512-Cwhq1JFIoon15wcIkFzubVNFE5GvXGV82pKf4knXXjvGmn7RJKcypeuqcVNZMGDZsAFWyIRya/anwAJr7TWJ7w==
node-fetch@^3.2.10:
version "3.2.10"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.10.tgz#e8347f94b54ae18b57c9c049ef641cef398a85c8"
integrity sha512-MhuzNwdURnZ1Cp4XTazr69K0BTizsBroX7Zx3UgDSVcZYKF/6p0CBe4EUb/hLqmzVhl0UpYfgRljQ4yxE+iCxA==
dependencies:
data-uri-to-buffer "^4.0.0"
fetch-blob "^3.1.4"

View file

@ -65,10 +65,6 @@ http {
proxy_pass http://{{ address }}:4001;
}
location /preview {
proxy_pass http://{{ address }}:4001;
}
location /builder {
proxy_pass http://{{ address }}:3000;
rewrite ^/builder(.*)$ /builder/$1 break;

View file

@ -88,10 +88,6 @@ http {
proxy_pass http://$apps:4002;
}
location /preview {
proxy_pass http://$apps:4002;
}
location = / {
proxy_pass http://$apps:4002;
}

View file

@ -4,9 +4,9 @@ echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
DATA_DIR=/home
mkdir -p $DATA_DIR/{search,minio,couchdb}
mkdir -p $DATA_DIR/couchdb/{dbs,views}
chown -R couchdb:couchdb $DATA_DIR/couchdb/
mkdir -p $DATA_DIR/{search,minio,couch}
mkdir -p $DATA_DIR/couch/{dbs,views}
chown -R couchdb:couchdb $DATA_DIR/couch/
apt update
apt-get install -y openssh-server
sed -i "s/#Port 22/Port 2222/" /etc/ssh/sshd_config
@ -16,5 +16,4 @@ if [[ "${TARGETBUILD}" = "aas" ]]; then
else
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
fi

View file

@ -1,5 +1,5 @@
; CouchDB Configuration Settings
[couchdb]
database_dir = DATA_DIR/couchdb/dbs
view_index_dir = DATA_DIR/couchdb/views
database_dir = DATA_DIR/couch/dbs
view_index_dir = DATA_DIR/couch/views

View file

@ -36,10 +36,10 @@ fi
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984
# make these directories in runner, incase of mount
mkdir -p ${DATA_DIR}/couchdb/{dbs,views}
mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/minio
mkdir -p ${DATA_DIR}/search
chown -R couchdb:couchdb ${DATA_DIR}/couchdb
chown -R couchdb:couchdb ${DATA_DIR}/couch
redis-server --requirepass $REDIS_PASSWORD &
/opt/clouseau/bin/clouseau &
/minio/minio server ${DATA_DIR}/minio &

View file

@ -1,5 +1,5 @@
{
"version": "1.3.4-alpha.1",
"version": "1.3.15-alpha.0",
"npmClient": "yarn",
"packages": [
"packages/*"

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "1.3.4-alpha.1",
"version": "1.3.15-alpha.0",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@ -20,7 +20,8 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
"@budibase/types": "1.3.4-alpha.1",
"@budibase/types": "1.3.15-alpha.0",
"@shopify/jest-koa-mocks": "5.0.1",
"@techpass/passport-openidconnect": "0.3.2",
"aws-sdk": "2.1030.0",
"bcrypt": "5.0.1",
@ -60,7 +61,6 @@
]
},
"devDependencies": {
"@shopify/jest-koa-mocks": "3.1.5",
"@types/jest": "27.5.1",
"@types/koa": "2.0.52",
"@types/lodash": "4.14.180",

View file

@ -0,0 +1,3 @@
module.exports = {
...require("./src/plugin"),
}

View file

@ -1,4 +1,5 @@
import { dangerousGetDB, closeDB } from "."
import { DocumentType } from "./constants"
class Replication {
source: any
@ -53,6 +54,14 @@ class Replication {
return this.replication
}
appReplicateOpts() {
return {
filter: (doc: any) => {
return doc._id !== DocumentType.APP_METADATA
},
}
}
/**
* Rollback the target DB back to the state of the source DB
*/
@ -60,6 +69,7 @@ class Replication {
await this.target.destroy()
// Recreate the DB again
this.target = dangerousGetDB(this.target.name)
// take the opportunity to remove deleted tombstones
await this.replicate()
}

View file

@ -254,7 +254,16 @@ export async function getAllApps({ dev, all, idsOnly, efficient }: any = {}) {
return false
})
if (idsOnly) {
return appDbNames
const devAppIds = appDbNames.filter(appId => isDevAppID(appId))
const prodAppIds = appDbNames.filter(appId => !isDevAppID(appId))
switch (dev) {
case true:
return devAppIds
case false:
return prodAppIds
default:
return appDbNames
}
}
const appPromises = appDbNames.map((app: any) =>
// skip setup otherwise databases could be re-created

View file

@ -18,6 +18,7 @@ import * as dbConstants from "./db/constants"
import logging from "./logging"
import pino from "./pino"
import * as middleware from "./middleware"
import plugins from "./plugin"
// mimic the outer package exports
import * as db from "./pkg/db"
@ -56,6 +57,7 @@ const core = {
errors,
logging,
roles,
plugins,
...pino,
...errorClasses,
middleware,

View file

@ -307,9 +307,13 @@ export const uploadDirectory = async (
return files
}
exports.downloadTarballDirect = async (url: string, path: string) => {
exports.downloadTarballDirect = async (
url: string,
path: string,
headers = {}
) => {
path = sanitizeKey(path)
const response = await fetch(url)
const response = await fetch(url, { headers })
if (!response.ok) {
throw new Error(`unexpected response ${response.statusText}`)
}

View file

@ -0,0 +1,7 @@
import * as utils from "./utils"
const pkg = {
...utils,
}
export = pkg

View file

@ -1,5 +1,8 @@
const { PluginTypes } = require("./constants")
const { DatasourceFieldType, QueryType } = require("@budibase/types")
const {
DatasourceFieldType,
QueryType,
PluginType,
} = require("@budibase/types")
const joi = require("joi")
const DATASOURCE_TYPES = [
@ -78,11 +81,11 @@ function validateDatasource(schema) {
}
exports.validate = schema => {
switch (schema.type) {
case PluginTypes.COMPONENT:
switch (schema?.type) {
case PluginType.COMPONENT:
validateComponent(schema)
break
case PluginTypes.DATASOURCE:
case PluginType.DATASOURCE:
validateDatasource(schema)
break
default:

View file

@ -543,13 +543,13 @@
semver "^7.3.5"
tar "^6.1.11"
"@shopify/jest-koa-mocks@3.1.5":
version "3.1.5"
resolved "https://registry.yarnpkg.com/@shopify/jest-koa-mocks/-/jest-koa-mocks-3.1.5.tgz#11f77ccfbcaf35cf5ee2c6108a286e61e6bea084"
integrity sha512-gQ3/7ELerv00TWO37AGFX5mT9CsFCS+3/UbKMuoIlKEU0QH2OX8BV9WBf/EKw7adCDNlxss0lqV6J8kf5pgr4A==
"@shopify/jest-koa-mocks@5.0.1":
version "5.0.1"
resolved "https://registry.yarnpkg.com/@shopify/jest-koa-mocks/-/jest-koa-mocks-5.0.1.tgz#fba490b6b7985fbb571eb9974897d396a3642e94"
integrity sha512-4YskS9q8+TEHNoyopmuoy2XyhInyqeOl7CF5ShJs19sm6m0EA/jGGvgf/osv2PeTfuf42/L2G9CzWUSg49yTSg==
dependencies:
koa "^2.13.4"
node-mocks-http "^1.5.8"
node-mocks-http "^1.11.0"
"@sideway/address@^4.1.3":
version "4.1.4"
@ -3914,7 +3914,7 @@ node-int64@^0.4.0:
resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b"
integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==
node-mocks-http@^1.5.8:
node-mocks-http@^1.11.0:
version "1.11.0"
resolved "https://registry.yarnpkg.com/node-mocks-http/-/node-mocks-http-1.11.0.tgz#defc0febf6b935f08245397d47534a8de592996e"
integrity sha512-jS/WzSOcKbOeGrcgKbenZeNhxUNnP36Yw11+hL4TTxQXErGfqYZ+MaYNNvhaTiGIJlzNSqgQkk9j8dSu1YWSuw==

View file

@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
"version": "1.3.4-alpha.1",
"version": "1.3.15-alpha.0",
"license": "MPL-2.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
@ -38,7 +38,7 @@
],
"dependencies": {
"@adobe/spectrum-css-workflow-icons": "^1.2.1",
"@budibase/string-templates": "1.3.4-alpha.1",
"@budibase/string-templates": "1.3.15-alpha.0",
"@spectrum-css/actionbutton": "^1.0.1",
"@spectrum-css/actiongroup": "^1.0.1",
"@spectrum-css/avatar": "^3.0.2",

View file

@ -4,10 +4,15 @@
export let size = "M"
export let tooltip = ""
export let muted
</script>
<TooltipWrapper {tooltip} {size}>
<label for="" class={`spectrum-FieldLabel spectrum-FieldLabel--size${size}`}>
<label
class:muted
for=""
class={`spectrum-FieldLabel spectrum-FieldLabel--size${size}`}
>
<slot />
</label>
</TooltipWrapper>
@ -17,4 +22,8 @@
padding: 0;
white-space: nowrap;
}
.muted {
opacity: 0.5;
}
</style>

View file

@ -24,7 +24,6 @@
export let secondaryAction = undefined
export let secondaryButtonWarning = false
export let dataCy = null
const { hide, cancel } = getContext(Context.Modal)
let loading = false
$: confirmDisabled = disabled || loading
@ -88,12 +87,11 @@
<section class="spectrum-Dialog-content content-grid">
<slot />
</section>
{#if showCancelButton || showConfirmButton}
{#if showCancelButton || showConfirmButton || $$slots.footer}
<div
class="spectrum-ButtonGroup spectrum-Dialog-buttonGroup spectrum-Dialog-buttonGroup--noFooter"
>
<slot name="footer" />
{#if showSecondaryButton && secondaryButtonText && secondaryAction}
<div class="secondary-action">
<Button

View file

@ -10,6 +10,7 @@
export let noHorizPadding = false
export let quiet = false
export let emphasized = false
export let onTop = false
export let size = "M"
let thisSelected = undefined
@ -75,6 +76,7 @@
bind:this={container}
class:spectrum-Tabs--quiet={quiet}
class:noHorizPadding
class:onTop
class:spectrum-Tabs--vertical={vertical}
class:spectrum-Tabs--horizontal={!vertical}
class="spectrum-Tabs spectrum-Tabs--size{size}"
@ -122,4 +124,7 @@
.noPadding {
margin: 0;
}
.onTop {
z-index: 100;
}
</style>

View file

@ -74,11 +74,11 @@ filterTests(["smoke", "all"], () => {
.contains("Update role")
.click({ force: true })
})
cy.reload({ timeout: 5000 })
cy.reload()
cy.wait(1000)
}
// Confirm roles exist within Configure roles table
cy.get(interact.SPECTRUM_TABLE, { timeout: 2000 })
cy.get(interact.SPECTRUM_TABLE, { timeout: 20000 })
.eq(0)
.within(assginedRoles => {
expect(assginedRoles).to.contain("Admin")
@ -180,7 +180,7 @@ filterTests(["smoke", "all"], () => {
cy.reload()
// Confirm details have been saved
cy.get(interact.FIELD, { timeout: 1000 }).eq(1).within(() => {
cy.get(interact.FIELD, { timeout: 20000 }).eq(1).within(() => {
cy.get(interact.SPECTRUM_TEXTFIELD_INPUT).should('have.value', "bb")
})
cy.get(interact.FIELD, { timeout: 1000 }).eq(2).within(() => {

View file

@ -20,7 +20,7 @@ filterTests(["all"], () => {
//Use the tree to delete a selected component
const deleteSelectedComponent = () => {
cy.get(
".nav-items-container .nav-item.selected .actions > div > .icon"
".nav-item.selected .actions > div > .icon"
).click({
force: true,
})
@ -91,7 +91,7 @@ filterTests(["all"], () => {
cy.searchAndAddComponent("Paragraph").then(componentId => {
cy.get("[data-cy=setting-_instanceName] input").type(componentId).blur()
cy.get(
".nav-items-container .nav-item.selected .actions > div > .icon"
".nav-item.selected .actions > div > .icon"
).click({
force: true,
})
@ -145,7 +145,7 @@ filterTests(["all"], () => {
return testFieldFocusOnCreate(label)
})
.then(() => {
cy.get(".nav-items-container .nav-item")
cy.get(".nav-item")
.contains(formId)
.click({ force: true })
deleteSelectedComponent()
@ -195,7 +195,7 @@ filterTests(["all"], () => {
return testFocusOnCreate(label)
})
.then(() => {
cy.get(".nav-items-container .nav-item")
cy.get(".nav-item")
.contains(providerId)
.click({ force: true })
deleteSelectedComponent()
@ -218,7 +218,7 @@ filterTests(["all"], () => {
.find(".component-placeholder")
.should("not.exist")
cy.getComponent(imageId).find(`img[alt=${imageId}]`).should("exist")
cy.get(".nav-items-container .nav-item")
cy.get(".nav-item")
.contains(imageId)
.click({ force: true })
deleteSelectedComponent()
@ -242,7 +242,7 @@ filterTests(["all"], () => {
cy.getComponent(markdownId)
.find(".editor-preview-full h1")
.contains("Hi")
cy.get(".nav-items-container .nav-item")
cy.get(".nav-item")
.contains(markdownId)
.click({ force: true })
deleteSelectedComponent()
@ -265,7 +265,7 @@ filterTests(["all"], () => {
.find(".component-placeholder")
.should("not.exist")
cy.getComponent(iconId).find("i.ri-save-fill").should("exist")
cy.get(".nav-items-container .nav-item")
cy.get(".nav-item")
.contains(iconId)
.click({ force: true })
deleteSelectedComponent()

View file

@ -1,7 +1,7 @@
import filterTests from "../../support/filterTests"
filterTests(['all'], () => {
context("Datasource Wizard", () => {
xcontext("Datasource Wizard", () => {
if (Cypress.env("TEST_ENV")) {
before(() => {
cy.login()

View file

@ -1,7 +1,7 @@
import filterTests from "../../support/filterTests"
filterTests(["all"], () => {
context("Oracle Datasource Testing", () => {
xcontext("Oracle Datasource Testing", () => {
if (Cypress.env("TEST_ENV")) {
before(() => {
cy.login()

View file

@ -162,7 +162,7 @@ filterTests(["all"], () => {
switchSchema("randomText")
// No tables displayed
cy.get(".spectrum-Body", { timeout: 5000 }).eq(2).should("contain", "No tables found")
cy.get(".spectrum-Body", { timeout: 10000 }).eq(2, { timeout: 10000 }).should("contain", "No tables found")
// Previously created query should be visible
cy.get(".spectrum-Table").should("contain", queryName)
@ -173,7 +173,7 @@ filterTests(["all"], () => {
switchSchema("1")
// Confirm tables exist - Check for specific one
cy.get(".spectrum-Table", { timeout: 5000 }).eq(0).should("contain", "test")
cy.get(".spectrum-Table", { timeout: 20000 }).eq(0).should("contain", "test")
cy.get(".spectrum-Table")
.eq(0)
.find(".spectrum-Table-row")
@ -187,7 +187,7 @@ filterTests(["all"], () => {
switchSchema("public")
// Confirm tables exist - again
cy.get(".spectrum-Table", { timeout: 5000 }).eq(0).should("contain", "REGIONS")
cy.get(".spectrum-Table", { timeout: 20000 }).eq(0).should("contain", "REGIONS")
cy.get(".spectrum-Table")
.eq(0)
.find(".spectrum-Table-row")

View file

@ -14,7 +14,7 @@ filterTests(["smoke", "all"], () => {
// Select REST data source
cy.selectExternalDatasource(datasource)
// Enter incorrect api & attempt to send query
cy.get(".spectrum-Button", { timeout: 500 }).contains("Add query").click({ force: true })
cy.get(".query-buttons", { timeout: 1000 }).contains("Add query").click({ force: true })
cy.intercept("**/preview").as("queryError")
cy.get("input").clear().type("random text")
cy.get(".spectrum-Button").contains("Send").click({ force: true })

View file

@ -4,7 +4,7 @@ Cypress.on("uncaught:exception", () => {
// ACCOUNTS & USERS
Cypress.Commands.add("login", (email, password) => {
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 10000 })
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 30000 })
cy.url()
.should("include", "/builder/")
.then(url => {
@ -33,7 +33,7 @@ Cypress.Commands.add("login", (email, password) => {
})
Cypress.Commands.add("logOut", () => {
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 2000 })
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 30000 })
cy.get(".user-dropdown .avatar > .icon").click({ force: true })
cy.get(".spectrum-Popover[data-cy='user-menu']").within(() => {
cy.get("li[data-cy='user-logout']").click({ force: true })
@ -43,7 +43,7 @@ Cypress.Commands.add("logOut", () => {
Cypress.Commands.add("logoutNoAppGrid", () => {
// Logs user out when app grid is not present
cy.visit(`${Cypress.config().baseUrl}/builder`)
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 30000 })
cy.get(".avatar > .icon").click({ force: true })
cy.get(".spectrum-Popover[data-cy='user-menu']").within(() => {
cy.get(".spectrum-Menu-item").contains("Log out").click({ force: true })
@ -68,11 +68,14 @@ Cypress.Commands.add("createUser", (email, permission) => {
.click({ force: true })
})
}
// Add user and wait for modal to change
cy.get(".spectrum-Button").contains("Add user").click({ force: true })
// Add user
cy.get(".spectrum-Button").contains("Add users").click({ force: true })
cy.get(".spectrum-ActionButton").contains("Add email").should("not.exist")
})
// Onboarding modal
cy.get(".spectrum-Dialog-grid", { timeout: 5000 }).contains(
"Choose your onboarding"
)
cy.get(".spectrum-Dialog-grid").within(() => {
cy.get(".onboarding-type").eq(1).click()
cy.get(".spectrum-Button").contains("Done").click({ force: true })
@ -163,7 +166,7 @@ Cypress.Commands.add("createApp", (name, addDefaultTable) => {
const shouldCreateDefaultTable =
typeof addDefaultTable != "boolean" ? true : addDefaultTable
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 10000 })
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 30000 })
cy.url({ timeout: 30000 }).should("include", "/apps")
cy.get(`[data-cy="create-app-btn"]`, { timeout: 5000 }).click({ force: true })
@ -197,7 +200,7 @@ Cypress.Commands.add("createApp", (name, addDefaultTable) => {
})
Cypress.Commands.add("deleteApp", name => {
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 5000 })
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 30000 })
cy.wait(2000)
cy.request(`${Cypress.config().baseUrl}/api/applications?status=all`)
.its("body")
@ -254,7 +257,7 @@ Cypress.Commands.add("deleteApp", name => {
})
Cypress.Commands.add("deleteAllApps", () => {
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 5000 })
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 30000 })
cy.wait(500)
cy.request(`${Cypress.config().baseUrl}/api/applications?status=all`, {
timeout: 5000,
@ -351,7 +354,7 @@ Cypress.Commands.add("alterAppVersion", (appId, version) => {
})
Cypress.Commands.add("importApp", (exportFilePath, name) => {
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 5000 })
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 30000 })
cy.request(`${Cypress.config().baseUrl}/api/applications?status=all`)
.its("body")
@ -386,7 +389,7 @@ Cypress.Commands.add("importApp", (exportFilePath, name) => {
// Filters visible with 1 or more
Cypress.Commands.add("searchForApplication", appName => {
cy.visit(`${Cypress.config().baseUrl}/builder`)
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 30000 })
cy.wait(2000)
// No app filter functionality if only 1 app exists
@ -409,7 +412,7 @@ Cypress.Commands.add("searchForApplication", appName => {
// Assumes there are no others
Cypress.Commands.add("applicationInAppTable", appName => {
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 10000 })
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 30000 })
cy.get(".appTable", { timeout: 5000 }).within(() => {
cy.get(".title").contains(appName).should("exist")
})
@ -454,8 +457,8 @@ Cypress.Commands.add("createTable", (tableName, initialTable) => {
cy.get(".spectrum-ButtonGroup").contains("Create").click()
})
// Ensure modal has closed and table is created
cy.get(".spectrum-Modal").should("not.exist")
cy.get(".spectrum-Tabs-content", { timeout: 1000 }).should(
cy.get(".spectrum-Modal", { timeout: 2000 }).should("not.exist")
cy.get(".spectrum-Tabs-content", { timeout: 2000 }).should(
"contain",
tableName
)
@ -634,30 +637,32 @@ Cypress.Commands.add(
(datasourceNames, accessLevelLabel) => {
cy.contains("Design").click()
cy.get(".spectrum-Button").contains("Add screen").click({ force: true })
cy.get(".spectrum-Modal").within(() => {
cy.get(".item").contains("Autogenerated screens").click()
cy.get(".spectrum-Dialog-grid").within(() => {
cy.get("[data-cy='autogenerated-screens']").click()
cy.intercept("**/api/datasources").as("autoScreens")
cy.get(".spectrum-Button").contains("Continue").click({ force: true })
cy.wait("@autoScreens")
cy.wait(5000)
})
cy.get(".spectrum-Modal [data-cy='data-source-modal']", {
timeout: 500,
}).within(() => {
cy.get("[data-cy='autogenerated-screens']").should("not.exist")
cy.get("[data-cy='data-source-modal']", { timeout: 10000 }).within(() => {
for (let i = 0; i < datasourceNames.length; i++) {
cy.wait(500)
cy.get(".data-source-entry").contains(datasourceNames[i]).click()
//Ensure the check mark is visible
cy.get(".data-source-entry")
.contains(datasourceNames[i], { timeout: 20000 })
.click({ force: true })
// Ensure the check mark is visible
cy.get(".data-source-entry")
.contains(datasourceNames[i])
.get(".data-source-check")
.get(".data-source-check", { timeout: 20000 })
.should("exist")
}
cy.get(".spectrum-Button").contains("Confirm").click({ force: true })
})
cy.get(".spectrum-Modal").within(() => {
cy.get(".spectrum-Modal", { timeout: 10000 }).within(() => {
if (accessLevelLabel) {
cy.get(".spectrum-Picker-label").click()
cy.wait(500)
cy.get(".spectrum-Picker-label", { timeout: 10000 }).click()
cy.contains(accessLevelLabel).click()
}
cy.get(".spectrum-Button").contains("Done").click({ force: true })
@ -912,8 +917,9 @@ Cypress.Commands.add("createRestQuery", (method, restUrl, queryPrettyName) => {
Cypress.Commands.add("closeModal", () => {
cy.get(".spectrum-Modal", { timeout: 2000 }).within(() => {
cy.get(".close-icon").click()
cy.wait(1000) // Wait for modal to close
})
// Confirm modal has closed
cy.get(".spectrum-Modal", { timeout: 10000 }).should("not.exist")
})
Cypress.Commands.add("expandBudibaseConnection", () => {

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/builder",
"version": "1.3.4-alpha.1",
"version": "1.3.15-alpha.0",
"license": "GPL-3.0",
"private": true,
"scripts": {
@ -69,10 +69,10 @@
}
},
"dependencies": {
"@budibase/bbui": "1.3.4-alpha.1",
"@budibase/client": "1.3.4-alpha.1",
"@budibase/frontend-core": "1.3.4-alpha.1",
"@budibase/string-templates": "1.3.4-alpha.1",
"@budibase/bbui": "1.3.15-alpha.0",
"@budibase/client": "1.3.15-alpha.0",
"@budibase/frontend-core": "1.3.15-alpha.0",
"@budibase/string-templates": "1.3.15-alpha.0",
"@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1",

View file

@ -1,6 +1,7 @@
<script>
import { Button, Select, Input, Label } from "@budibase/bbui"
import { createEventDispatcher } from "svelte"
import { onMount, createEventDispatcher } from "svelte"
import { flags } from "stores/backend"
const dispatch = createEventDispatcher()
export let value
@ -29,11 +30,16 @@
label: "Every Night at Midnight",
value: "0 0 * * *",
},
{
label: "Every Budibase Reboot",
value: "@reboot",
},
]
onMount(() => {
if (!$flags.cloud) {
CRON_EXPRESSIONS.push({
label: "Every Budibase Reboot",
value: "@reboot",
})
}
})
</script>
<div class="block-field">

View file

@ -166,7 +166,10 @@
/>
{/each}
</div>
{#if customIntegrations.length > 0}
</Layout>
{#if customIntegrations.length > 0}
<Layout noPadding gap="XS">
<Body size="S">Custom data source</Body>
<div class="item-list">
{#each customIntegrations as [integrationType, schema]}
@ -178,8 +181,8 @@
/>
{/each}
</div>
{/if}
</Layout>
</Layout>
{/if}
</ModalContent>
</Modal>

View file

@ -23,6 +23,7 @@
const dispatch = createEventDispatcher()
let bindingDrawer
let valid = true
let currentVal = value
$: readableValue = runtimeToReadableBinding(bindings, value)
$: tempValue = readableValue
@ -30,11 +31,17 @@
const saveBinding = () => {
onChange(tempValue)
onBlur()
bindingDrawer.hide()
}
const onChange = value => {
dispatch("change", readableToRuntimeBinding(bindings, value))
currentVal = readableToRuntimeBinding(bindings, value)
dispatch("change", currentVal)
}
const onBlur = () => {
dispatch("blur", currentVal)
}
</script>
@ -45,6 +52,7 @@
readonly={isJS}
value={isJS ? "(JavaScript function)" : readableValue}
on:change={event => onChange(event.detail)}
on:blur={onBlur}
{placeholder}
{updateOnChange}
/>

View file

@ -52,7 +52,7 @@
)
newBlock.inputs = {
fields: Object.keys(parameters.fields).reduce((fields, key) => {
fields: Object.keys(parameters.fields ?? {}).reduce((fields, key) => {
fields[key] = "string"
return fields
}, {}),

View file

@ -107,7 +107,7 @@
placeholder={keyPlaceholder}
readonly={readOnly}
bind:value={field.name}
on:change={changed}
on:blur={changed}
/>
{#if options}
<Select bind:value={field.value} on:change={changed} {options} />
@ -115,7 +115,10 @@
<DrawerBindableInput
{bindings}
placeholder="Value"
on:change={e => (field.value = e.detail)}
on:blur={e => {
field.value = e.detail
changed()
}}
disabled={readOnly}
value={field.value}
allowJS={false}
@ -127,7 +130,7 @@
placeholder={valuePlaceholder}
readonly={readOnly}
bind:value={field.value}
on:change={changed}
on:blur={changed}
/>
{/if}
{#if toggle}

View file

@ -1,16 +1,24 @@
<script>
import { ModalContent, Toggle } from "@budibase/bbui"
import { ModalContent, Toggle, Body } from "@budibase/bbui"
export let app
export let published
let excludeRows = false
$: title = published ? "Export published app" : "Export latest app"
$: confirmText = published ? "Export published" : "Export latest"
const exportApp = () => {
const id = app.deployed ? app.prodId : app.devId
const id = published ? app.prodId : app.devId
const appName = encodeURIComponent(app.name)
window.location = `/api/backups/export?appId=${id}&appname=${appName}&excludeRows=${excludeRows}`
}
</script>
<ModalContent title={"Export"} confirmText={"Export"} onConfirm={exportApp}>
<ModalContent {title} {confirmText} onConfirm={exportApp}>
<Body
>Apps can be exported with or without data that is within internal tables -
select this below.</Body
>
<Toggle text="Exclude Rows" bind:value={excludeRows} />
</ModalContent>

View file

@ -57,3 +57,10 @@ export const DefaultAppTheme = {
navBackground: "var(--spectrum-global-color-gray-50)",
navTextColor: "var(--spectrum-global-color-gray-800)",
}
export const PluginSource = {
URL: "URL",
NPM: "NPM",
GITHUB: "Github",
FILE: "File Upload",
}

View file

@ -46,7 +46,7 @@ export function buildQueryString(obj) {
if (str !== "") {
str += "&"
}
str += `${key}=${value || ""}`
str += `${key}=${encodeURIComponent(value || "")}`
}
}
return str

View file

@ -28,25 +28,25 @@
import { onMount } from "svelte"
import restUtils from "helpers/data/utils"
import {
RestBodyTypes as bodyTypes,
SchemaTypeOptions,
PaginationLocations,
PaginationTypes,
RawRestBodyTypes,
RestBodyTypes as bodyTypes,
SchemaTypeOptions,
} from "constants/backend"
import JSONPreview from "components/integration/JSONPreview.svelte"
import AccessLevelSelect from "components/integration/AccessLevelSelect.svelte"
import DynamicVariableModal from "../../_components/DynamicVariableModal.svelte"
import Placeholder from "assets/bb-spaceship.svg"
import { cloneDeep } from "lodash/fp"
import { RawRestBodyTypes } from "constants/backend"
import {
getRestBindings,
toBindingsArray,
runtimeToReadableBinding,
readableToRuntimeBinding,
runtimeToReadableMap,
readableToRuntimeMap,
runtimeToReadableBinding,
runtimeToReadableMap,
toBindingsArray,
} from "builderStore/dataBinding"
let query, datasource
@ -95,7 +95,7 @@
$: runtimeUrlQueries = readableToRuntimeMap(mergedBindings, breakQs)
function getSelectedQuery() {
const cloneQuery = cloneDeep(
return cloneDeep(
$queries.list.find(q => q._id === $queries.selected) || {
datasourceId: $params.selectedDatasource,
parameters: [],
@ -107,7 +107,6 @@
queryVerb: "read",
}
)
return cloneQuery
}
function checkQueryName(inputUrl = null) {
@ -121,14 +120,15 @@
if (!base) {
return base
}
const qs = restUtils.buildQueryString(
let qs = restUtils.buildQueryString(
runtimeToReadableMap(mergedBindings, qsObj)
)
let newUrl = base
if (base.includes("?")) {
newUrl = base.split("?")[0]
const split = base.split("?")
newUrl = split[0]
}
return qs.length > 0 ? `${newUrl}?${qs}` : newUrl
return qs.length === 0 ? newUrl : `${newUrl}?${qs}`
}
function buildQuery() {
@ -314,6 +314,25 @@
}
}
const paramsChanged = evt => {
breakQs = {}
for (let param of evt.detail) {
breakQs[param.name] = param.value
}
}
const urlChanged = evt => {
breakQs = {}
const qs = evt.target.value.split("?")[1]
if (qs && qs.length > 0) {
const parts = qs.split("&")
for (let part of parts) {
const [key, value] = part.split("=")
breakQs[key] = value
}
}
}
onMount(async () => {
query = getSelectedQuery()
@ -426,7 +445,11 @@
/>
</div>
<div class="url">
<Input bind:value={url} placeholder="http://www.api.com/endpoint" />
<Input
on:blur={urlChanged}
bind:value={url}
placeholder="http://www.api.com/endpoint"
/>
</div>
<Button primary disabled={!url} on:click={runQuery}>Send</Button>
<Button
@ -456,13 +479,16 @@
/>
</Tab>
<Tab title="Params">
<KeyValueBuilder
bind:object={breakQs}
name="param"
headings
bindings={mergedBindings}
bindingDrawerLeft="260px"
/>
{#key breakQs}
<KeyValueBuilder
on:change={paramsChanged}
object={breakQs}
name="param"
headings
bindings={mergedBindings}
bindingDrawerLeft="260px"
/>
{/key}
</Tab>
<Tab title="Headers">
<KeyValueBuilder

View file

@ -292,7 +292,7 @@
<iframe
title="componentPreview"
bind:this={iframe}
src="/preview"
src="/app/preview"
class:hidden={loading || error}
class:tablet={$store.previewDevice === "tablet"}
class:mobile={$store.previewDevice === "mobile"}

View file

@ -54,7 +54,7 @@
},
}
const handleKeyAction = async (component, key, ctrlKey = false) => {
const handleKeyAction = async (event, component, key, ctrlKey = false) => {
if (!component || !key) {
return false
}
@ -70,6 +70,9 @@
const handler = keyHandlers[key]
if (!handler) {
return false
} else if (event) {
event.preventDefault()
event.stopPropagation()
}
return handler(component)
} catch (error) {
@ -89,14 +92,19 @@
return
}
// Key events are always for the selected component
return handleKeyAction($selectedComponent, e.key, e.ctrlKey || e.metaKey)
return await handleKeyAction(
e,
$selectedComponent,
e.key,
e.ctrlKey || e.metaKey
)
}
const handleComponentMenu = async e => {
// Menu events can be for any component
const { id, key, ctrlKey } = e.detail
const component = findComponent($selectedScreen.props, id)
return await handleKeyAction(component, key, ctrlKey)
return await handleKeyAction(null, component, key, ctrlKey)
}
onMount(() => {

View file

@ -58,10 +58,14 @@
enrichedStructure.push({
name: "Plugins",
isCategory: true,
children: customComponents.map(x => ({
...definitions[x],
name: definitions[x].friendlyName || definitions[x].name,
})),
children: customComponents
.map(x => ({
...definitions[x],
name: definitions[x].friendlyName || definitions[x].name,
}))
.sort((a, b) => {
return a.name.toLowerCase() < b.name.toLowerCase() ? -1 : 1
}),
})
}

View file

@ -55,6 +55,8 @@
},
{ title: "Auth", href: "/builder/portal/manage/auth" },
{ title: "Email", href: "/builder/portal/manage/email" },
{ title: "Plugins", href: "/builder/portal/manage/plugins" },
{
title: "Organisation",
href: "/builder/portal/settings/organisation",

View file

@ -15,7 +15,6 @@
import Spinner from "components/common/Spinner.svelte"
import CreateAppModal from "components/start/CreateAppModal.svelte"
import UpdateAppModal from "components/start/UpdateAppModal.svelte"
import ExportAppModal from "components/start/ExportAppModal.svelte"
import { store, automationStore } from "builderStore"
import { API } from "api"
@ -33,7 +32,6 @@
let selectedApp
let creationModal
let updatingModal
let exportModal
let creatingApp = false
let loaded = $apps?.length || $templates?.length
let searchTerm = ""
@ -407,10 +405,6 @@
<UpdateAppModal app={selectedApp} />
</Modal>
<Modal bind:this={exportModal} padding={false} width="600px">
<ExportAppModal app={selectedApp} />
</Modal>
<style>
.appTable {
border-top: var(--border-light);

View file

@ -0,0 +1,127 @@
<script>
import {
ModalContent,
Label,
Input,
Select,
Dropzone,
Body,
notifications,
} from "@budibase/bbui"
import KeyValueBuilder from "components/integration/KeyValueBuilder.svelte"
import { plugins } from "stores/portal"
import { PluginSource } from "constants"
function opt(name, optional) {
if (optional) {
return { name, optional }
}
return { name }
}
let authOptions = {
[PluginSource.URL]: [opt("URL"), opt("Headers", true)],
[PluginSource.NPM]: [opt("URL")],
[PluginSource.GITHUB]: [opt("URL"), opt("Github Token", true)],
[PluginSource.FILE]: [opt("File Upload")],
}
let file
let source = PluginSource.URL
let dynamicValues = {}
let validation
$: validation = source === "File Upload" ? file : dynamicValues["URL"]
function infoMessage(optionName) {
switch (optionName) {
case PluginSource.URL:
return "Please specify a URL which directs to a built plugin TAR archive. You can provide headers if authentication is required."
case PluginSource.NPM:
return "Please specify the URL to a public NPM package which contains the built version of the plugin you wish to install."
case PluginSource.GITHUB:
return "Please specify the URL to a Github repository which contains built plugin releases. If this is a private repo you can provide a token to access it."
case PluginSource.FILE:
return "Please provide a built plugin TAR archive. You can build a plugin locally using the Budibase CLI."
}
}
async function save() {
try {
if (source === PluginSource.FILE) {
await plugins.uploadPlugin(file)
} else {
const url = dynamicValues["URL"]
let auth =
source === PluginSource.GITHUB
? dynamicValues["Github Token"]
: source === PluginSource.URL
? dynamicValues["Headers"]
: undefined
await plugins.createPlugin(source, url, auth)
}
notifications.success("Plugin added successfully.")
} catch (err) {
const msg = err?.message ? err.message : JSON.stringify(err)
notifications.error(`Failed to add plugin: ${msg}`)
}
}
</script>
<ModalContent
confirmText={"Save"}
onConfirm={save}
disabled={!validation}
size="M"
title="Add new plugin"
>
<div class="form-row">
<Label size="M">Source</Label>
<Select
placeholder={null}
bind:value={source}
options={Object.values(PluginSource)}
/>
</div>
<Body size="S">{infoMessage(source)}</Body>
{#each authOptions[source] as option}
{#if option.name === PluginSource.FILE}
<div class="form-row">
<Label size="M">{option.name}</Label>
<Dropzone
gallery={false}
value={[file]}
on:change={e => {
if (!e.detail || e.detail.length === 0) {
file = null
} else {
file = e.detail[0]
}
}}
/>
</div>
{:else}
<div class="form-row">
<div>
<Label size="M">{option.name}</Label>
{#if option.optional}
<Label size="S" muted><i>Optional</i></Label>
{/if}
</div>
{#if option.name === "Headers"}
<KeyValueBuilder bind:object={dynamicValues[option.name]} />
{:else}
<Input bind:value={dynamicValues[option.name]} />
{/if}
</div>
{/if}
{/each}
</ModalContent>
<style>
.form-row {
display: grid;
grid-template-columns: 60px 1fr;
grid-gap: var(--spacing-l);
align-items: center;
}
</style>

View file

@ -0,0 +1,33 @@
<script>
import { Body, ModalContent, notifications } from "@budibase/bbui"
import { plugins } from "stores/portal"
import { createEventDispatcher } from "svelte"
export let plugin
let dispatch = createEventDispatcher()
async function deletePlugin() {
try {
await plugins.deletePlugin(plugin._id)
notifications.success(`Plugin ${plugin?.name} deleted`)
dispatch("deleted")
} catch (error) {
const msg = error?.message ? error.message : JSON.stringify(error)
notifications.error(`Error deleting plugin: ${msg}`)
}
}
</script>
<ModalContent
warning
onConfirm={deletePlugin}
title="Delete Plugin"
confirmText="Delete plugin"
cancelText="Cancel"
showCloseIcon={false}
>
<Body>
Are you sure you want to delete <strong>{plugin?.name}</strong>
</Body>
</ModalContent>

View file

@ -0,0 +1,145 @@
<script>
import {
Icon,
Body,
Modal,
ModalContent,
Button,
Label,
Input,
} from "@budibase/bbui"
import DeletePluginModal from "../_components/DeletePluginModal.svelte"
export let plugin
let detailsModal
let deleteModal
let icon =
plugin.schema.type === "component"
? plugin.schema.schema.icon || "Book"
: plugin.schema.schema.icon || "Beaker"
function pluginDeleted() {
if (detailsModal) {
detailsModal.hide()
}
}
</script>
<div class="row" on:click={() => detailsModal.show()}>
<div class="title">
<div class="name">
<div>
<Icon size="M" name={icon} />
</div>
<div>
<Body
size="S"
color="var(--spectrum-global-color-gray-900)"
weight="800"
>
{plugin.name}
</Body>
</div>
</div>
</div>
<div class="desktop">{plugin.version}</div>
<div class="desktop">
{plugin.schema.type.charAt(0).toUpperCase() + plugin.schema.type.slice(1)}
</div>
<div>
<Icon name="ChevronRight" />
</div>
</div>
<Modal bind:this={detailsModal}>
<ModalContent
size="M"
title="Plugin details"
showConfirmButton={false}
showCancelButton={false}
>
<div class="details-row">
<Label size="M">Name</Label>
<Input disabled value={plugin.name} />
</div>
<div class="details-row">
<Label size="M">Type</Label>
<Input
disabled
value={plugin.schema.type.charAt(0).toUpperCase() +
plugin.schema.type.slice(1)}
/>
</div>
<div class="details-row">
<Label size="M">Source</Label>
<Input disabled value={plugin.source || "N/A"} />
</div>
<div class="details-row">
<Label size="M">Version</Label>
<Input disabled value={plugin.version} />
</div>
<div class="details-row">
<Label size="M">License</Label>
<Input disabled value={plugin.package.license} />
</div>
<div class="details-row">
<Label size="M">Author</Label>
<Input disabled value={plugin.package.author || "N/A"} />
</div>
<div class="footer" slot="footer">
<Button newStyles on:click={deleteModal.show()} warning>Delete</Button>
</div>
</ModalContent>
<Modal bind:this={deleteModal}>
<DeletePluginModal {plugin} on:deleted={pluginDeleted} />
</Modal>
</Modal>
<style>
.row {
display: grid;
grid-template-columns: 1fr 110px 140px 20px;
align-items: center;
background: var(--background);
border-radius: 4px;
padding: 0 16px;
height: 56px;
background: var(--spectrum-global-color-gray-50);
border: 1px solid var(--spectrum-global-color-gray-300);
transition: background 130ms ease-out;
}
.row:hover {
cursor: pointer;
background: var(--spectrum-global-color-gray-75);
}
.name {
grid-gap: var(--spacing-m);
grid-template-columns: 75px 75px;
align-items: center;
display: flex;
}
.details-row {
display: grid;
grid-template-columns: 60px 1fr;
grid-gap: var(--spacing-l) var(--spacing-l);
align-items: center;
}
@media (max-width: 640px) {
.desktop {
display: none !important;
}
}
.footer {
display: flex;
gap: var(--spacing-l);
}
</style>

View file

@ -0,0 +1,95 @@
<script>
import {
Layout,
Heading,
Body,
Button,
Select,
Divider,
Modal,
Search,
} from "@budibase/bbui"
import { onMount } from "svelte"
import { plugins } from "stores/portal"
import PluginRow from "./_components/PluginRow.svelte"
import AddPluginModal from "./_components/AddPluginModal.svelte"
let modal
let searchTerm = ""
let filter = "all"
let filterOptions = [
{ label: "All plugins", value: "all" },
{ label: "Components", value: "component" },
{ label: "Datasources", value: "datasource" },
]
$: filteredPlugins = $plugins
.filter(plugin => {
return filter === "all" || plugin.schema.type === filter
})
.filter(plugin => {
return (
!searchTerm ||
plugin?.name?.toLowerCase().includes(searchTerm.toLowerCase())
)
})
onMount(async () => {
await plugins.load()
})
</script>
<Layout noPadding>
<Layout gap="XS" noPadding>
<Heading size="M">Plugins</Heading>
<Body>Add your own custom datasources and components</Body>
</Layout>
<Divider size="S" />
<Layout noPadding>
<div class="controls">
<div>
<Button on:click={modal.show} newStyles cta icon={"Add"}>
Add plugin
</Button>
</div>
<div class="filters">
<div class="select">
<Select
bind:value={filter}
placeholder={null}
options={filterOptions}
autoWidth
quiet
/>
</div>
<Search bind:value={searchTerm} placeholder="Search plugins" />
</div>
</div>
{#if filteredPlugins?.length}
<Layout noPadding gap="S">
{#each filteredPlugins as plugin (plugin._id)}
<PluginRow {plugin} />
{/each}
</Layout>
{/if}
</Layout>
</Layout>
<Modal bind:this={modal}>
<AddPluginModal />
</Modal>
<style>
.filters {
display: flex;
gap: var(--spacing-xl);
}
.controls {
display: flex;
gap: var(--spacing-xl);
justify-content: space-between;
}
.controls :global(.spectrum-Search) {
width: 200px;
}
</style>

View file

@ -16,6 +16,7 @@
MenuItem,
Icon,
Helpers,
Modal,
} from "@budibase/bbui"
import OverviewTab from "../_components/OverviewTab.svelte"
import SettingsTab from "../_components/SettingsTab.svelte"
@ -29,6 +30,7 @@
import EditableIcon from "components/common/EditableIcon.svelte"
import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import HistoryTab from "components/portal/overview/automation/HistoryTab.svelte"
import ExportAppModal from "components/start/ExportAppModal.svelte"
import { checkIncomingDeploymentStatus } from "components/deploy/utils"
import { onDestroy, onMount } from "svelte"
@ -38,7 +40,9 @@
let loaded = false
let deletionModal
let unpublishModal
let exportModal
let appName = ""
let published
// App
$: filteredApps = $apps.filter(app => app.devId === application)
@ -140,11 +144,9 @@
notifications.success("App ID copied to clipboard.")
}
const exportApp = (app, opts = { published: false }) => {
const appName = encodeURIComponent(app.name)
const id = opts?.published ? app.prodId : app.devId
// always export the development version
window.location = `/api/backups/export?appId=${id}&appname=${appName}`
const exportApp = opts => {
published = opts.published
exportModal.show()
}
const unpublishApp = app => {
@ -206,6 +208,10 @@
})
</script>
<Modal bind:this={exportModal} padding={false} width="600px">
<ExportAppModal app={selectedApp} {published} />
</Modal>
<span class="overview-wrap">
<Page wide noPadding>
{#await promise}
@ -269,14 +275,14 @@
<Icon hoverable name="More" />
</span>
<MenuItem
on:click={() => exportApp(selectedApp, { published: false })}
on:click={() => exportApp({ published: false })}
icon="DownloadFromCloud"
>
Export latest
</MenuItem>
{#if isPublished}
<MenuItem
on:click={() => exportApp(selectedApp, { published: true })}
on:click={() => exportApp({ published: true })}
icon="DownloadFromCloudOutline"
>
Export published

View file

@ -15,7 +15,6 @@
import { API } from "api"
import { writable } from "svelte/store"
import { redirect } from "@roxi/routify"
import { onMount } from "svelte"
// Only admins allowed here
$: {
@ -34,12 +33,11 @@
})
let loading = false
async function uploadLogo() {
async function uploadLogo(file) {
try {
let data = new FormData()
data.append("file", $values.logo)
await API.uploadPlugin(data)
notifications.success("Plugin uploaded successfully")
data.append("file", file)
await API.uploadLogo(data)
} catch (error) {
notifications.error("Error uploading logo")
}
@ -73,11 +71,6 @@
}
loading = false
}
onMount(async () => {
const plugins = await API.getPlugins()
console.log(plugins)
})
</script>
{#if $auth.isAdmin}
@ -95,14 +88,14 @@
<Heading size="S">Information</Heading>
<Body size="S">Here you can update your logo and organization name.</Body>
</Layout>
<div class="fields">
<div class="field">
<div className="fields">
<div className="field">
<Label size="L">Org. name</Label>
<Input thin bind:value={$values.company} />
</div>
<div class="field logo">
<div className="field logo">
<Label size="L">Logo</Label>
<div class="file">
<div className="file">
<Dropzone
value={[$values.logo]}
on:change={e => {
@ -113,7 +106,6 @@
}
}}
/>
<button on:click={uploadLogo}>Upload</button>
</div>
</div>
</div>
@ -123,8 +115,8 @@
<Heading size="S">Platform</Heading>
<Body size="S">Here you can set up general platform settings.</Body>
</Layout>
<div class="fields">
<div class="field">
<div className="fields">
<div className="field">
<Label
size="L"
tooltip={"Update the Platform URL to match your Budibase web URL. This keeps email templates and authentication configs up to date."}
@ -158,15 +150,18 @@
display: grid;
grid-gap: var(--spacing-m);
}
.field {
display: grid;
grid-template-columns: 100px 1fr;
grid-gap: var(--spacing-l);
align-items: center;
}
.file {
max-width: 30ch;
}
.logo {
align-items: start;
}

View file

@ -8,3 +8,4 @@ export { oidc } from "./oidc"
export { templates } from "./templates"
export { licensing } from "./licensing"
export { groups } from "./groups"
export { plugins } from "./plugins"

View file

@ -0,0 +1,77 @@
import { writable } from "svelte/store"
import { API } from "api"
import { PluginSource } from "constants"
export function createPluginsStore() {
const { subscribe, set, update } = writable([])
async function load() {
const plugins = await API.getPlugins()
set(plugins)
}
async function deletePlugin(pluginId) {
await API.deletePlugin(pluginId)
update(state => {
state = state.filter(existing => existing._id !== pluginId)
return state
})
}
async function createPlugin(source, url, auth = null) {
let pluginData = {
source,
url,
}
switch (source) {
case PluginSource.URL:
pluginData.headers = auth
break
case PluginSource.GITHUB:
pluginData.githubToken = auth
break
}
let res = await API.createPlugin(pluginData)
let newPlugin = res.plugins[0]
update(state => {
const currentIdx = state.findIndex(plugin => plugin._id === newPlugin._id)
if (currentIdx >= 0) {
state.splice(currentIdx, 1, newPlugin)
} else {
state.push(newPlugin)
}
return state
})
}
async function uploadPlugin(file) {
if (!file) {
return
}
let data = new FormData()
data.append("file", file)
let resp = await API.uploadPlugin(data)
let newPlugin = resp.plugins[0]
update(state => {
const currentIdx = state.findIndex(plugin => plugin._id === newPlugin._id)
if (currentIdx >= 0) {
state.splice(currentIdx, 1, newPlugin)
} else {
state.push(newPlugin)
}
return state
})
}
return {
subscribe,
load,
createPlugin,
deletePlugin,
uploadPlugin,
}
}
export const plugins = createPluginsStore()

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/cli",
"version": "1.3.4-alpha.1",
"version": "1.3.15-alpha.0",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js",
"bin": {
@ -26,18 +26,18 @@
"outputPath": "build"
},
"dependencies": {
"@budibase/backend-core": "^1.3.4-alpha.1",
"@budibase/string-templates": "^1.3.4-alpha.1",
"@budibase/types": "^1.3.4-alpha.1",
"@budibase/backend-core": "1.3.15-alpha.0",
"@budibase/string-templates": "1.3.15-alpha.0",
"@budibase/types": "1.3.15-alpha.0",
"axios": "0.21.2",
"chalk": "4.1.0",
"cli-progress": "3.11.2",
"commander": "7.1.0",
"docker-compose": "0.23.6",
"dotenv": "16.0.1",
"download": "^8.0.0",
"download": "8.0.0",
"inquirer": "8.0.0",
"joi": "^17.6.0",
"joi": "17.6.0",
"lookpath": "1.1.0",
"node-fetch": "2",
"pkg": "5.7.0",

View file

@ -1,6 +0,0 @@
exports.PluginTypes = {
COMPONENT: "component",
DATASOURCE: "datasource",
}
exports.PLUGIN_TYPES_ARR = Object.values(exports.PluginTypes)

View file

@ -3,8 +3,8 @@ const { CommandWords } = require("../constants")
const { getSkeleton, fleshOutSkeleton } = require("./skeleton")
const questions = require("../questions")
const fs = require("fs")
const { PLUGIN_TYPES_ARR } = require("./constants")
const { validate } = require("./validate")
const { PLUGIN_TYPE_ARR } = require("@budibase/types")
const { validate } = require("@budibase/backend-core/plugins")
const { runPkgCommand } = require("../exec")
const { join } = require("path")
const { success, error, info } = require("../utils")
@ -24,7 +24,7 @@ function checkInPlugin() {
async function init(opts) {
const type = opts["init"] || opts
if (!type || !PLUGIN_TYPES_ARR.includes(type)) {
if (!type || !PLUGIN_TYPE_ARR.includes(type)) {
console.log(
error(
"Please provide a type to init, either 'component' or 'datasource'."

View file

@ -753,7 +753,7 @@ double-ended-queue@2.1.0-0:
resolved "https://registry.yarnpkg.com/double-ended-queue/-/double-ended-queue-2.1.0-0.tgz#103d3527fd31528f40188130c841efdd78264e5c"
integrity sha512-+BNfZ+deCo8hMNpDqDnvT+c0XpJ5cUa6mqYq89bho2Ifze4URTqRkcwR399hWoTrTkbZ/XJYDgP6rc7pRgffEQ==
download@^8.0.0:
download@8.0.0:
version "8.0.0"
resolved "https://registry.yarnpkg.com/download/-/download-8.0.0.tgz#afc0b309730811731aae9f5371c9f46be73e51b1"
integrity sha512-ASRY5QhDk7FK+XrQtQyvhpDKanLluEEQtWl/J7Lxuf/b+i8RYh997QeXvL85xitrmRKVlx9c7eTrcRdq2GS4eA==
@ -1551,7 +1551,7 @@ isurl@^1.0.0-alpha5:
has-to-string-tag-x "^1.2.0"
is-object "^1.0.1"
joi@^17.6.0:
joi@17.6.0:
version "17.6.0"
resolved "https://registry.yarnpkg.com/joi/-/joi-17.6.0.tgz#0bb54f2f006c09a96e75ce687957bd04290054b2"
integrity sha512-OX5dG6DTbcr/kbMFj0KGYxuew69HPcAE3K/sZpEV2nP6e/j/C0HV+HNiBPCASxdx5T7DMoa0s8UeHWMnb6n2zw==

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/client",
"version": "1.3.4-alpha.1",
"version": "1.3.15-alpha.0",
"license": "MPL-2.0",
"module": "dist/budibase-client.js",
"main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw"
},
"dependencies": {
"@budibase/bbui": "1.3.4-alpha.1",
"@budibase/frontend-core": "1.3.4-alpha.1",
"@budibase/string-templates": "1.3.4-alpha.1",
"@budibase/bbui": "1.3.15-alpha.0",
"@budibase/frontend-core": "1.3.15-alpha.0",
"@budibase/string-templates": "1.3.15-alpha.0",
"@spectrum-css/button": "^3.0.3",
"@spectrum-css/card": "^3.0.3",
"@spectrum-css/divider": "^1.0.3",

View file

@ -125,7 +125,9 @@
// Empty components are those which accept children but do not have any.
// Empty states can be shown for these components, but can be disabled
// in the component manifest.
$: empty = interactive && !children.length && hasChildren
$: empty =
(interactive && !children.length && hasChildren) ||
hasMissingRequiredSettings
$: emptyState = empty && showEmptyState
// Enrich component settings

View file

@ -2,28 +2,25 @@
import { getContext } from "svelte"
import { builderStore } from "stores"
const { styleable } = getContext("sdk")
const component = getContext("component")
$: requiredSetting = $component.missingRequiredSettings?.[0]
</script>
{#if $builderStore.inBuilder && requiredSetting}
<div use:styleable={$component.styles}>
<div class="component-placeholder">
<span>
Add the <mark>{requiredSetting.label}</mark> setting to start using your
component -
</span>
<span
class="spectrum-Link"
on:click={() => {
builderStore.actions.highlightSetting(requiredSetting.key)
}}
>
Show me
</span>
</div>
<div class="component-placeholder">
<span>
Add the <mark>{requiredSetting.label}</mark> setting to start using your component
-
</span>
<span
class="spectrum-Link"
on:click={() => {
builderStore.actions.highlightSetting(requiredSetting.key)
}}
>
Show me
</span>
</div>
{/if}

View file

@ -1,7 +1,6 @@
<script>
import { getContext } from "svelte"
import { ProgressCircle, Pagination } from "@budibase/bbui"
import Placeholder from "./Placeholder.svelte"
import { fetchData, LuceneUtils } from "@budibase/frontend-core"
export let dataSource
@ -133,11 +132,7 @@
<ProgressCircle />
</div>
{:else}
{#if $component.emptyState}
<Placeholder />
{:else}
<slot />
{/if}
<slot />
{#if paginate && $fetch.supportsPagination}
<div class="pagination">
<Pagination

File diff suppressed because it is too large Load diff

View file

@ -1,12 +1,12 @@
{
"name": "@budibase/frontend-core",
"version": "1.3.4-alpha.1",
"version": "1.3.15-alpha.0",
"description": "Budibase frontend core libraries used in builder and client",
"author": "Budibase",
"license": "MPL-2.0",
"svelte": "src/index.js",
"dependencies": {
"@budibase/bbui": "1.3.4-alpha.1",
"@budibase/bbui": "1.3.15-alpha.0",
"lodash": "^4.17.21",
"svelte": "^3.46.2"
}

View file

@ -5,12 +5,22 @@ export const buildPluginEndpoints = API => ({
*/
uploadPlugin: async data => {
return await API.post({
url: "/api/plugin/upload",
url: `/api/plugin/upload`,
body: data,
json: false,
})
},
/**
* Creates a plugin from URL, Github or NPM
*/
createPlugin: async data => {
return await API.post({
url: `/api/plugin`,
body: data,
})
},
/**
* Gets a list of all plugins
*/
@ -19,4 +29,16 @@ export const buildPluginEndpoints = API => ({
url: "/api/plugin",
})
},
/**
* Deletes a plugin.
* @param pluginId the ID of the plugin to delete
*
* * @param pluginId the revision of the plugin to delete
*/
deletePlugin: async pluginId => {
return await API.delete({
url: `/api/plugin/${pluginId}`,
})
},
})

View file

@ -80,6 +80,19 @@ const cleanupQuery = query => {
return query
}
/**
* Removes a numeric prefix on field names designed to give fields uniqueness
*/
const removeKeyNumbering = key => {
if (typeof key === "string" && key.match(/\d[0-9]*:/g) != null) {
const parts = key.split(":")
parts.shift()
return parts.join(":")
} else {
return key
}
}
/**
* Builds a lucene JSON query from the filter structure generated in the builder
* @param filter the builder filter structure
@ -194,7 +207,7 @@ export const runLuceneQuery = (docs, query) => {
const filters = Object.entries(query[type] || {})
for (let i = 0; i < filters.length; i++) {
const [key, testValue] = filters[i]
const docValue = Helpers.deepGet(doc, key)
const docValue = Helpers.deepGet(doc, removeKeyNumbering(key))
if (failFn(docValue, testValue)) {
return false
}

View file

@ -1,7 +1,7 @@
{
"name": "@budibase/server",
"email": "hi@budibase.com",
"version": "1.3.4-alpha.1",
"version": "1.3.15-alpha.0",
"description": "Budibase Web Server",
"main": "src/index.ts",
"repository": {
@ -77,11 +77,11 @@
"license": "GPL-3.0",
"dependencies": {
"@apidevtools/swagger-parser": "10.0.3",
"@budibase/backend-core": "1.3.4-alpha.1",
"@budibase/client": "1.3.4-alpha.1",
"@budibase/pro": "1.3.4-alpha.1",
"@budibase/string-templates": "1.3.4-alpha.1",
"@budibase/types": "1.3.4-alpha.1",
"@budibase/backend-core": "1.3.15-alpha.0",
"@budibase/client": "1.3.15-alpha.0",
"@budibase/pro": "1.3.15-alpha.0",
"@budibase/string-templates": "1.3.15-alpha.0",
"@budibase/types": "1.3.15-alpha.0",
"@bull-board/api": "3.7.0",
"@bull-board/koa": "3.9.4",
"@elastic/elasticsearch": "7.10.0",
@ -95,7 +95,7 @@
"bcryptjs": "2.4.3",
"bull": "4.8.5",
"chmodr": "1.2.0",
"chokidar": "^3.5.3",
"chokidar": "3.5.3",
"csvtojson": "2.0.10",
"curlconverter": "3.21.0",
"dotenv": "8.2.0",
@ -142,7 +142,7 @@
"socket.io": "^4.5.1",
"svelte": "3.49.0",
"swagger-parser": "10.0.3",
"tar": "^6.1.11",
"tar": "6.1.11",
"to-json-schema": "0.2.5",
"uuid": "3.3.2",
"validate.js": "0.13.1",

View file

@ -553,11 +553,7 @@ export const sync = async (ctx: any, next: any) => {
})
let error
try {
await replication.replicate({
filter: function (doc: any) {
return doc._id !== DocumentType.APP_METADATA
},
})
await replication.replicate(replication.appReplicateOpts())
} catch (err) {
error = err
} finally {

View file

@ -1,11 +1,12 @@
const { streamBackup } = require("../../utilities/fileSystem")
const { events, context } = require("@budibase/backend-core")
const { DocumentType } = require("../../db/utils")
const { isQsTrue } = require("../../utilities")
exports.exportAppDump = async function (ctx) {
let { appId, excludeRows } = ctx.query
const appName = decodeURI(ctx.query.appname)
excludeRows = excludeRows === "true"
excludeRows = isQsTrue(excludeRows)
const backupIdentifier = `${appName}-export-${new Date().getTime()}.txt`
ctx.attachment(backupIdentifier)
ctx.body = await streamBackup(appId, excludeRows)

View file

@ -15,6 +15,7 @@ import {
getAppId,
getAppDB,
getProdAppDB,
getDevAppDB,
} from "@budibase/backend-core/context"
import { quotas } from "@budibase/pro"
import { events } from "@budibase/backend-core"
@ -110,17 +111,29 @@ async function deployApp(deployment: any) {
target: productionAppId,
}
replication = new Replication(config)
const devDb = getDevAppDB()
console.log("Compacting development DB")
await devDb.compact()
console.log("Replication object created")
await replication.replicate()
await replication.replicate(replication.appReplicateOpts())
console.log("replication complete.. replacing app meta doc")
// app metadata is excluded as it is likely to be in conflict
// replicate the app metadata document manually
const db = getProdAppDB()
const appDoc = await db.get(DocumentType.APP_METADATA)
const appDoc = await devDb.get(DocumentType.APP_METADATA)
try {
const prodAppDoc = await db.get(DocumentType.APP_METADATA)
appDoc._rev = prodAppDoc._rev
} catch (err) {
delete appDoc._rev
}
// switch to production app ID
deployment.appUrl = appDoc.url
appDoc.appId = productionAppId
appDoc.instance._id = productionAppId
// remove automation errors if they exist
delete appDoc.automationErrors
await db.put(appDoc)
await appCache.invalidateAppMetadata(productionAppId)
console.log("New app doc written successfully.")

View file

@ -1,114 +0,0 @@
import { ObjectStoreBuckets } from "../../constants"
import { extractPluginTarball, loadJSFile } from "../../utilities/fileSystem"
import { getGlobalDB } from "@budibase/backend-core/tenancy"
import { generatePluginID, getPluginParams } from "../../db/utils"
import { uploadDirectory } from "@budibase/backend-core/objectStore"
import { PluginType, FileType } from "@budibase/types"
import { ClientAppSocket } from "../../app"
import env from "../../environment"
export async function getPlugins(type?: PluginType) {
const db = getGlobalDB()
const response = await db.allDocs(
getPluginParams(null, {
include_docs: true,
})
)
const plugins = response.rows.map((row: any) => row.doc)
if (type) {
return plugins.filter((plugin: any) => plugin.schema?.type === type)
} else {
return plugins
}
}
export async function upload(ctx: any) {
const plugins: FileType[] =
ctx.request.files.file.length > 1
? Array.from(ctx.request.files.file)
: [ctx.request.files.file]
try {
let docs = []
// can do single or multiple plugins
for (let plugin of plugins) {
const doc = await processPlugin(plugin)
docs.push(doc)
}
ctx.body = {
message: "Plugin(s) uploaded successfully",
plugins: docs,
}
} catch (err: any) {
const errMsg = err?.message ? err?.message : err
ctx.throw(400, `Failed to import plugin: ${errMsg}`)
}
}
export async function fetch(ctx: any) {
ctx.body = await getPlugins()
}
export async function destroy(ctx: any) {}
export async function processPlugin(plugin: FileType) {
if (!env.SELF_HOSTED) {
throw new Error("Plugins not supported outside of self-host.")
}
const db = getGlobalDB()
const { metadata, directory } = await extractPluginTarball(plugin)
const version = metadata.package.version,
name = metadata.package.name,
description = metadata.package.description,
hash = metadata.schema.hash
// first open the tarball into tmp directory
const bucketPath = `${name}/`
const files = await uploadDirectory(
ObjectStoreBuckets.PLUGINS,
directory,
bucketPath
)
const jsFile = files.find((file: any) => file.name.endsWith(".js"))
if (!jsFile) {
throw new Error(`Plugin missing .js file.`)
}
// validate the JS for a datasource
if (metadata.schema.type === PluginType.DATASOURCE) {
const js = loadJSFile(directory, jsFile.name)
// TODO: this isn't safe - but we need full node environment
// in future we should do this in a thread for safety
try {
eval(js)
} catch (err: any) {
const message = err?.message ? err.message : JSON.stringify(err)
throw new Error(`JS invalid: ${message}`)
}
}
const jsFileName = jsFile.name
const pluginId = generatePluginID(name)
// overwrite existing docs entirely if they exist
let rev
try {
const existing = await db.get(pluginId)
rev = existing._rev
} catch (err) {
rev = undefined
}
const doc = {
_id: pluginId,
_rev: rev,
...metadata,
name,
version,
hash,
description,
jsUrl: `${bucketPath}${jsFileName}`,
}
const response = await db.put(doc)
ClientAppSocket.emit("plugin-update", { name, hash })
return {
...doc,
_rev: response.rev,
}
}

View file

@ -0,0 +1,15 @@
import {
createTempFolder,
getPluginMetadata,
extractTarball,
} from "../../../utilities/fileSystem"
export async function fileUpload(file: { name: string; path: string }) {
if (!file.name.endsWith(".tar.gz")) {
throw new Error("Plugin must be compressed into a gzipped tarball.")
}
const path = createTempFolder(file.name.split(".tar.gz")[0])
await extractTarball(file.path, path)
return await getPluginMetadata(path)
}

View file

@ -0,0 +1,75 @@
import { getPluginMetadata } from "../../../utilities/fileSystem"
import fetch from "node-fetch"
import { downloadUnzipTarball } from "./utils"
export async function request(
url: string,
headers: { [key: string]: string },
err: string
) {
const response = await fetch(url, { headers })
if (response.status >= 300) {
const respErr = await response.text()
throw new Error(`Error: ${err} - ${respErr}`)
}
return response.json()
}
export async function githubUpload(url: string, name = "", token = "") {
let githubUrl = url
if (!githubUrl.includes("https://github.com/")) {
throw new Error("The plugin origin must be from Github")
}
if (url.includes(".git")) {
githubUrl = url.replace(".git", "")
}
const githubApiUrl = githubUrl.replace(
"https://github.com/",
"https://api.github.com/repos/"
)
const headers: any = token ? { Authorization: `Bearer ${token}` } : {}
const pluginDetails = await request(
githubApiUrl,
headers,
"Repository not found"
)
const pluginName = pluginDetails.name || name
const pluginLatestReleaseUrl = pluginDetails?.["releases_url"]
? pluginDetails?.["releases_url"].replace("{/id}", "/latest")
: undefined
if (!pluginLatestReleaseUrl) {
throw new Error("Github release not found")
}
const pluginReleaseDetails = await request(
pluginLatestReleaseUrl,
headers,
"Github latest release not found"
)
const pluginReleaseTarballAsset = pluginReleaseDetails?.assets?.find(
(x: any) => x?.["content_type"] === "application/gzip"
)
const pluginLastReleaseTarballUrl =
pluginReleaseTarballAsset?.["browser_download_url"]
if (!pluginLastReleaseTarballUrl) {
throw new Error("Github latest release url not found")
}
try {
const path = await downloadUnzipTarball(
pluginLastReleaseTarballUrl,
pluginName,
headers
)
return await getPluginMetadata(path)
} catch (err: any) {
let errMsg = err?.message || err
if (errMsg === "unexpected response Not Found") {
errMsg = "Github release tarball not found"
}
throw new Error(errMsg)
}
}

View file

@ -0,0 +1,206 @@
import { ObjectStoreBuckets } from "../../../constants"
import { loadJSFile } from "../../../utilities/fileSystem"
import { npmUpload, urlUpload, githubUpload, fileUpload } from "./uploaders"
import { getGlobalDB } from "@budibase/backend-core/tenancy"
import { validate } from "@budibase/backend-core/plugins"
import { generatePluginID, getPluginParams } from "../../../db/utils"
import {
uploadDirectory,
deleteFolder,
} from "@budibase/backend-core/objectStore"
import { PluginType, FileType, PluginSource } from "@budibase/types"
import env from "../../../environment"
import { ClientAppSocket } from "../../../app"
export async function getPlugins(type?: PluginType) {
const db = getGlobalDB()
const response = await db.allDocs(
getPluginParams(null, {
include_docs: true,
})
)
const plugins = response.rows.map((row: any) => row.doc)
if (type) {
return plugins.filter((plugin: any) => plugin.schema?.type === type)
} else {
return plugins
}
}
export async function upload(ctx: any) {
const plugins: FileType[] =
ctx.request.files.file.length > 1
? Array.from(ctx.request.files.file)
: [ctx.request.files.file]
try {
let docs = []
// can do single or multiple plugins
for (let plugin of plugins) {
const doc = await processPlugin(plugin, PluginSource.FILE)
docs.push(doc)
}
ctx.body = {
message: "Plugin(s) uploaded successfully",
plugins: docs,
}
} catch (err: any) {
const errMsg = err?.message ? err?.message : err
ctx.throw(400, `Failed to import plugin: ${errMsg}`)
}
}
export async function create(ctx: any) {
const { source, url, headers, githubToken } = ctx.request.body
if (!env.SELF_HOSTED) {
ctx.throw(400, "Plugins not supported outside of self-host.")
}
try {
let metadata
let directory
// Generating random name as a backup and needed for url
let name = "PLUGIN_" + Math.floor(100000 + Math.random() * 900000)
switch (source) {
case PluginSource.NPM:
const { metadata: metadataNpm, directory: directoryNpm } =
await npmUpload(url, name)
metadata = metadataNpm
directory = directoryNpm
break
case PluginSource.GITHUB:
const { metadata: metadataGithub, directory: directoryGithub } =
await githubUpload(url, name, githubToken)
metadata = metadataGithub
directory = directoryGithub
break
case PluginSource.URL:
const headersObj = headers || {}
const { metadata: metadataUrl, directory: directoryUrl } =
await urlUpload(url, name, headersObj)
metadata = metadataUrl
directory = directoryUrl
break
}
validate(metadata?.schema)
const doc = await storePlugin(metadata, directory, source)
ctx.body = {
message: "Plugin uploaded successfully",
plugins: [doc],
}
} catch (err: any) {
const errMsg = err?.message ? err?.message : err
ctx.throw(400, `Failed to import plugin: ${errMsg}`)
}
ctx.status = 200
}
export async function fetch(ctx: any) {
ctx.body = await getPlugins()
}
export async function destroy(ctx: any) {
const db = getGlobalDB()
const { pluginId } = ctx.params
try {
const plugin = await db.get(pluginId)
const bucketPath = `${plugin.name}/`
await deleteFolder(ObjectStoreBuckets.PLUGINS, bucketPath)
await db.remove(pluginId, plugin._rev)
} catch (err: any) {
const errMsg = err?.message ? err?.message : err
ctx.throw(400, `Failed to delete plugin: ${errMsg}`)
}
ctx.message = `Plugin ${ctx.params.pluginId} deleted.`
ctx.status = 200
}
export async function storePlugin(
metadata: any,
directory: any,
source?: string
) {
const db = getGlobalDB()
const version = metadata.package.version,
name = metadata.package.name,
description = metadata.package.description,
hash = metadata.schema.hash
// first open the tarball into tmp directory
const bucketPath = `${name}/`
const files = await uploadDirectory(
ObjectStoreBuckets.PLUGINS,
directory,
bucketPath
)
const jsFile = files.find((file: any) => file.name.endsWith(".js"))
if (!jsFile) {
throw new Error(`Plugin missing .js file.`)
}
// validate the JS for a datasource
if (metadata.schema.type === PluginType.DATASOURCE) {
const js = loadJSFile(directory, jsFile.name)
// TODO: this isn't safe - but we need full node environment
// in future we should do this in a thread for safety
try {
eval(js)
} catch (err: any) {
const message = err?.message ? err.message : JSON.stringify(err)
throw new Error(`JS invalid: ${message}`)
}
}
const jsFileName = jsFile.name
const pluginId = generatePluginID(name)
// overwrite existing docs entirely if they exist
let rev
try {
const existing = await db.get(pluginId)
rev = existing._rev
} catch (err) {
rev = undefined
}
let doc = {
_id: pluginId,
_rev: rev,
...metadata,
name,
version,
hash,
description,
jsUrl: `${bucketPath}${jsFileName}`,
}
if (source) {
doc = {
...doc,
source,
}
}
const response = await db.put(doc)
ClientAppSocket.emit("plugin-update", { name, hash })
return {
...doc,
_rev: response.rev,
}
}
export async function processPlugin(plugin: FileType, source?: string) {
if (!env.SELF_HOSTED) {
throw new Error("Plugins not supported outside of self-host.")
}
const { metadata, directory } = await fileUpload(plugin)
return await storePlugin(metadata, directory, source)
}

View file

@ -0,0 +1,56 @@
import {
getPluginMetadata,
findFileRec,
extractTarball,
deleteFolderFileSystem,
} from "../../../utilities/fileSystem"
import fetch from "node-fetch"
import { join } from "path"
import { downloadUnzipTarball } from "./utils"
export async function npmUpload(url: string, name: string, headers = {}) {
let npmTarballUrl = url
let pluginName = name
if (
!npmTarballUrl.includes("https://www.npmjs.com") &&
!npmTarballUrl.includes("https://registry.npmjs.org")
) {
throw new Error("The plugin origin must be from NPM")
}
if (!npmTarballUrl.includes(".tgz")) {
const npmPackageURl = url.replace(
"https://www.npmjs.com/package/",
"https://registry.npmjs.org/"
)
const response = await fetch(npmPackageURl)
if (response.status !== 200) {
throw new Error("NPM Package not found")
}
let npmDetails = await response.json()
pluginName = npmDetails.name
const npmVersion = npmDetails["dist-tags"].latest
npmTarballUrl = npmDetails?.versions?.[npmVersion]?.dist?.tarball
if (!npmTarballUrl) {
throw new Error("NPM tarball url not found")
}
}
const path = await downloadUnzipTarball(npmTarballUrl, pluginName, headers)
const tarballPluginFile = findFileRec(path, ".tar.gz")
if (!tarballPluginFile) {
throw new Error("Tarball plugin file not found")
}
try {
await extractTarball(tarballPluginFile, path)
deleteFolderFileSystem(join(path, "package"))
} catch (err: any) {
throw new Error(err)
}
return await getPluginMetadata(path)
}

View file

@ -0,0 +1,4 @@
export { fileUpload } from "./file"
export { githubUpload } from "./github"
export { npmUpload } from "./npm"
export { urlUpload } from "./url"

View file

@ -0,0 +1,12 @@
import { downloadUnzipTarball } from "./utils"
import { getPluginMetadata } from "../../../utilities/fileSystem"
export async function urlUpload(url: string, name = "", headers = {}) {
if (!url.includes(".tar.gz")) {
throw new Error("Plugin must be compressed into a gzipped tarball.")
}
const path = await downloadUnzipTarball(url, name, headers)
return await getPluginMetadata(path)
}

View file

@ -0,0 +1,19 @@
import {
createTempFolder,
downloadTarballDirect,
} from "../../../utilities/fileSystem"
export async function downloadUnzipTarball(
url: string,
name: string,
headers = {}
) {
try {
const path = createTempFolder(name)
await downloadTarballDirect(url, path, headers)
return path
} catch (e: any) {
throw new Error(e.message)
}
}

View file

@ -534,7 +534,7 @@ module External {
})
// this is the response from knex if no rows found
const rows = !response[0].read ? response : []
const storeTo = isMany ? field.throughFrom || linkPrimaryKey : manyKey
const storeTo = isMany ? field.throughFrom || linkPrimaryKey : fieldName
related[storeTo] = { rows, isMany, tableId }
}
return related

View file

@ -7,6 +7,7 @@ const router = new Router()
router
.post("/api/plugin/upload", authorized(BUILDER), controller.upload)
.post("/api/plugin", authorized(BUILDER), controller.create)
.get("/api/plugin", authorized(BUILDER), controller.fetch)
.delete("/api/plugin/:pluginId", authorized(BUILDER), controller.destroy)

View file

@ -56,7 +56,7 @@ router
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
controller.deleteObjects
)
.get("/preview", authorized(BUILDER), controller.serveBuilderPreview)
.get("/app/preview", authorized(BUILDER), controller.serveBuilderPreview)
.get("/:appId/:path*", controller.serveApp)
.get("/app/:appUrl/:path*", controller.serveApp)
.post(

View file

@ -294,7 +294,7 @@ describe("/queries", () => {
"url": "string",
"value": "string"
})
expect(res.body.rows[0].url).toContain("doctype html")
expect(res.body.rows[0].url).toContain("doctype%20html")
})
it("check that it automatically retries on fail with cached dynamics", async () => {
@ -379,7 +379,7 @@ describe("/queries", () => {
"queryHdr": userDetails.firstName,
"secondHdr" : "1234"
})
expect(res.body.rows[0].url).toEqual("http://www.google.com?email=" + userDetails.email)
expect(res.body.rows[0].url).toEqual("http://www.google.com?email=" + userDetails.email.replace("@", "%40"))
})
it("should bind the current user to query parameters", async () => {
@ -396,7 +396,7 @@ describe("/queries", () => {
"testParam" : "1234"
})
expect(res.body.rows[0].url).toEqual("http://www.google.com?test=" + userDetails.email +
expect(res.body.rows[0].url).toEqual("http://www.google.com?test=" + userDetails.email.replace("@", "%40") +
"&testName=" + userDetails.firstName + "&testParam=1234")
})

View file

@ -150,14 +150,14 @@ describe("/static", () => {
})
})
describe("/preview", () => {
describe("/app/preview", () => {
beforeEach(() => {
jest.clearAllMocks()
})
it("should serve the builder preview", async () => {
const headers = config.defaultHeaders()
const res = await request.get(`/preview`).set(headers).expect(200)
const res = await request.get(`/app/preview`).set(headers).expect(200)
expect(res.body.appId).toBe(config.appId)
expect(res.body.builderPreview).toBe(true)

View file

@ -1,16 +1,19 @@
const { processEvent } = require("./utils")
const { queue, shutdown } = require("./bullboard")
const { TRIGGER_DEFINITIONS } = require("./triggers")
const { TRIGGER_DEFINITIONS, rebootTrigger } = require("./triggers")
const { ACTION_DEFINITIONS } = require("./actions")
/**
* This module is built purely to kick off the worker farm and manage the inputs/outputs
*/
exports.init = function () {
exports.init = async function () {
// this promise will not complete
return queue.process(async job => {
const promise = queue.process(async job => {
await processEvent(job)
})
// on init we need to trigger any reboot automations
await rebootTrigger()
return promise
}
exports.getQueues = () => {

View file

@ -9,6 +9,7 @@ const { checkTestFlag } = require("../utilities/redis")
const utils = require("./utils")
const env = require("../environment")
const { doInAppContext, getAppDB } = require("@budibase/backend-core/context")
const { getAllApps } = require("@budibase/backend-core/db")
const TRIGGER_DEFINITIONS = definitions
const JOB_OPTS = {
@ -16,24 +17,27 @@ const JOB_OPTS = {
removeOnFail: true,
}
async function getAllAutomations() {
const db = getAppDB()
let automations = await db.allDocs(
getAutomationParams(null, { include_docs: true })
)
return automations.rows.map(row => row.doc)
}
async function queueRelevantRowAutomations(event, eventType) {
if (event.appId == null) {
throw `No appId specified for ${eventType} - check event emitters.`
}
doInAppContext(event.appId, async () => {
const db = getAppDB()
let automations = await db.allDocs(
getAutomationParams(null, { include_docs: true })
)
let automations = await getAllAutomations()
// filter down to the correct event type
automations = automations.rows
.map(automation => automation.doc)
.filter(automation => {
const trigger = automation.definition.trigger
return trigger && trigger.event === eventType
})
automations = automations.filter(automation => {
const trigger = automation.definition.trigger
return trigger && trigger.event === eventType
})
for (let automation of automations) {
let automationDef = automation.definition
@ -110,4 +114,34 @@ exports.externalTrigger = async function (
}
}
exports.rebootTrigger = async () => {
// reboot cron option is only available on the main thread at
// startup and only usable in self host
if (env.isInThread() || !env.SELF_HOSTED) {
return
}
// iterate through all production apps, find the reboot crons
// and trigger events for them
const appIds = await getAllApps({ dev: false, idsOnly: true })
for (let prodAppId of appIds) {
await doInAppContext(prodAppId, async () => {
let automations = await getAllAutomations()
let rebootEvents = []
for (let automation of automations) {
if (utils.isRebootTrigger(automation)) {
const job = {
automation,
event: {
appId: prodAppId,
timestamp: Date.now(),
},
}
rebootEvents.push(queue.add(job, JOB_OPTS))
}
}
await Promise.all(rebootEvents)
})
}
}
exports.TRIGGER_DEFINITIONS = TRIGGER_DEFINITIONS

View file

@ -17,6 +17,7 @@ import { tenancy } from "@budibase/backend-core"
import { quotas } from "@budibase/pro"
import { Automation } from "@budibase/types"
const REBOOT_CRON = "@reboot"
const WH_STEP_ID = definitions.WEBHOOK.stepId
const CRON_STEP_ID = definitions.CRON.stepId
const Runner = new Thread(ThreadType.AUTOMATION)
@ -109,22 +110,33 @@ export async function clearMetadata() {
await db.bulkDocs(automationMetadata)
}
export function isCronTrigger(auto: Automation) {
return (
auto &&
auto.definition.trigger &&
auto.definition.trigger.stepId === CRON_STEP_ID
)
}
export function isRebootTrigger(auto: Automation) {
const trigger = auto ? auto.definition.trigger : null
return isCronTrigger(auto) && trigger?.inputs.cron === REBOOT_CRON
}
/**
* This function handles checking of any cron jobs that need to be enabled/updated.
* @param {string} appId The ID of the app in which we are checking for webhooks
* @param {object|undefined} automation The automation object to be updated.
*/
export async function enableCronTrigger(appId: any, automation: any) {
export async function enableCronTrigger(appId: any, automation: Automation) {
const trigger = automation ? automation.definition.trigger : null
function isCronTrigger(auto: any) {
return (
auto &&
auto.definition.trigger &&
auto.definition.trigger.stepId === CRON_STEP_ID
)
}
// need to create cron job
if (isCronTrigger(automation) && trigger?.inputs.cron) {
if (
isCronTrigger(automation) &&
!isRebootTrigger(automation) &&
trigger?.inputs.cron
) {
// make a job id rather than letting Bull decide, makes it easier to handle on way out
const jobId = `${appId}_cron_${newid()}`
const job: any = await queue.add(

View file

@ -34,6 +34,7 @@ export interface RestConfig {
defaultHeaders: {
[key: string]: any
}
legacyHttpParser: boolean
authConfigs: AuthConfig[]
staticVariables: {
[key: string]: string

View file

@ -1,11 +1,11 @@
import {
DatasourceFieldType,
DatasourcePlus,
Integration,
QueryJson,
QueryType,
Table,
TableSchema,
QueryJson,
DatasourcePlus,
} from "@budibase/types"
import { OAuth2Client } from "google-auth-library"
import { buildExternalTableId } from "./utils"
@ -286,8 +286,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
async createTable(name?: string) {
try {
await this.connect()
const sheet = await this.client.addSheet({ title: name })
return sheet
return await this.client.addSheet({ title: name })
} catch (err) {
console.error("Error creating new table in google sheets", err)
throw err
@ -375,7 +374,8 @@ class GoogleSheetsIntegration implements DatasourcePlus {
const rows = await sheet.getRows()
const row = rows[query.rowIndex]
if (row) {
const updateValues = query.row
const updateValues =
typeof query.row === "string" ? JSON.parse(query.row) : query.row
for (let key in updateValues) {
row[key] = updateValues[key]
}

View file

@ -92,7 +92,7 @@ class RedisIntegration {
}
async disconnect() {
this.client.disconnect()
return this.client.disconnect()
}
async redisContext(query: Function) {
@ -101,7 +101,7 @@ class RedisIntegration {
} catch (err) {
throw new Error(`Redis error: ${err}`)
} finally {
this.disconnect()
await this.disconnect()
}
}
@ -117,26 +117,34 @@ class RedisIntegration {
async read(query: { key: string }) {
return this.redisContext(async () => {
const response = await this.client.get(query.key)
return response
return await this.client.get(query.key)
})
}
async delete(query: { key: string }) {
return this.redisContext(async () => {
const response = await this.client.del(query.key)
return response
return await this.client.del(query.key)
})
}
async command(query: { json: string }) {
return this.redisContext(async () => {
const commands = query.json.trim().split(" ")
const pipeline = this.client.pipeline([commands])
const result = await pipeline.exec()
return {
response: result[0][1],
// commands split line by line
const commands = query.json.trim().split("\n")
let pipelineCommands = []
// process each command separately
for (let command of commands) {
const tokenised = command.trim().split(" ")
// Pipeline only accepts lower case commands
tokenised[0] = tokenised[0].toLowerCase()
pipelineCommands.push(tokenised)
}
const pipeline = this.client.pipeline(pipelineCommands)
const result = await pipeline.exec()
return result.map((output: string | string[]) => output[1])
})
}
}

View file

@ -14,6 +14,7 @@ import {
BearerAuthConfig,
} from "../definitions/datasource"
import { get } from "lodash"
import qs from "querystring"
const fetch = require("node-fetch")
const { formatBytes } = require("../utilities")
const { performance } = require("perf_hooks")
@ -75,6 +76,12 @@ const SCHEMA: Integration = {
required: false,
default: {},
},
legacyHttpParser: {
display: "Legacy HTTP Support",
type: DatasourceFieldType.BOOLEAN,
required: false,
default: false,
},
},
query: {
create: {
@ -211,7 +218,8 @@ class RestIntegration implements IntegrationBase {
}
}
const main = `${path}?${queryString}`
// make sure the query string is fully encoded
const main = `${path}?${qs.encode(qs.decode(queryString))}`
let complete = main
if (this.config.url && !main.startsWith("http")) {
complete = !this.config.url ? main : `${this.config.url}/${main}`
@ -373,6 +381,11 @@ class RestIntegration implements IntegrationBase {
paginationValues
)
if (this.config.legacyHttpParser) {
// https://github.com/nodejs/node/issues/43798
input.extraHttpOptions = { insecureHTTPParser: true }
}
this.startTimeMs = performance.now()
const url = this.getUrl(path, queryString, pagination, paginationValues)
const response = await fetch(url, input)

View file

@ -29,7 +29,7 @@ describe("Redis Integration", () => {
key: "key",
value: "value",
}
const response = await config.integration.create(body)
await config.integration.create(body)
expect(await config.redis.get("key")).toEqual("value")
})
@ -49,7 +49,7 @@ describe("Redis Integration", () => {
expect(await config.redis.get(body.key)).toEqual(null)
})
it("calls the command method with the correct params", async () => {
it("calls the pipeline method with the correct params", async () => {
const body = {
json: "KEYS *",
}
@ -61,7 +61,24 @@ describe("Redis Integration", () => {
await config.integration.command(body)
expect(config.integration.client.pipeline).toHaveBeenCalledWith([
["KEYS", "*"],
["keys", "*"],
])
})
it("calls the pipeline method with several separated commands when there are newlines", async () => {
const body = {
json: 'SET foo "bar"\nGET foo',
}
// ioredis-mock doesn't support pipelines
config.integration.client.pipeline = jest.fn(() => ({
exec: jest.fn(() => [[]]),
}))
await config.integration.command(body)
expect(config.integration.client.pipeline).toHaveBeenCalledWith([
["set", "foo", '"bar"'],
["get", "foo"],
])
})
})

View file

@ -51,7 +51,7 @@ describe("REST Integration", () => {
name: "test",
}),
}
const response = await config.integration.create(query)
await config.integration.create(query)
expect(fetch).toHaveBeenCalledWith(`${BASE_URL}/api?test=1`, {
method: "POST",
body: '{"name":"test"}',
@ -299,7 +299,7 @@ describe("REST Integration", () => {
}
await config.integration.read(query)
expect(fetch).toHaveBeenCalledWith(
`${BASE_URL}/api?${pageParam}=${pageValue}&${sizeParam}=${sizeValue}&`,
`${BASE_URL}/api?${pageParam}=${pageValue}&${sizeParam}=${sizeValue}`,
{
headers: {},
method: "GET",
@ -426,7 +426,7 @@ describe("REST Integration", () => {
}
const res = await config.integration.read(query)
expect(fetch).toHaveBeenCalledWith(
`${BASE_URL}/api?${pageParam}=${pageValue}&${sizeParam}=${sizeValue}&`,
`${BASE_URL}/api?${pageParam}=${pageValue}&${sizeParam}=${sizeValue}`,
{
headers: {},
method: "GET",
@ -536,5 +536,40 @@ describe("REST Integration", () => {
expect(sentData.get(sizeParam)).toEqual(sizeValue.toString())
expect(res.pagination.cursor).toEqual(123)
})
it("should encode query string correctly", async () => {
const query = {
path: "api",
queryString: "test=1 2",
headers: HEADERS,
bodyType: "json",
requestBody: JSON.stringify({
name: "test",
}),
}
await config.integration.create(query)
expect(fetch).toHaveBeenCalledWith(`${BASE_URL}/api?test=1%202`, {
method: "POST",
body: '{"name":"test"}',
headers: HEADERS,
})
})
})
describe("Configuration options", () => {
it("Attaches insecureHttpParams when legacy HTTP Parser option is set", async () => {
config = new TestConfiguration({
url: BASE_URL,
legacyHttpParser: true,
})
await config.integration.read({})
expect(fetch).toHaveBeenCalledWith(`${BASE_URL}/?`, {
method: "GET",
headers: {},
extraHttpOptions: {
insecureHTTPParser: true,
},
})
})
})
})

View file

@ -11,3 +11,4 @@ declare module "@budibase/backend-core/encryption"
declare module "@budibase/backend-core/utils"
declare module "@budibase/backend-core/redis"
declare module "@budibase/backend-core/objectStore"
declare module "@budibase/backend-core/plugins"

View file

@ -458,6 +458,9 @@ class Orchestrator {
export function execute(input: AutomationEvent, callback: WorkerCallback) {
const appId = input.data.event.appId
if (!appId) {
throw new Error("Unable to execute, event doesn't contain app ID.")
}
doInAppContext(appId, async () => {
const automationOrchestrator = new Orchestrator(
input.data.automation,
@ -475,6 +478,9 @@ export function execute(input: AutomationEvent, callback: WorkerCallback) {
export const removeStalled = async (input: AutomationEvent) => {
const appId = input.data.event.appId
if (!appId) {
throw new Error("Unable to execute, event doesn't contain app ID.")
}
await doInAppContext(appId, async () => {
const automationOrchestrator = new Orchestrator(
input.data.automation,

View file

@ -15,6 +15,7 @@ const {
streamUpload,
deleteFolder,
downloadTarball,
downloadTarballDirect,
deleteFiles,
} = require("./utilities")
const { updateClientLibrary } = require("./clientLibrary")
@ -136,13 +137,13 @@ exports.defineFilter = excludeRows => {
* data or user relationships.
* @param {string} appId The app to backup
* @param {object} config Config to send to export DB
* @param {boolean} includeRows Flag to state whether the export should include data.
* @param {boolean} excludeRows Flag to state whether the export should include data.
* @returns {*} either a string or a stream of the backup
*/
const backupAppData = async (appId, config, includeRows) => {
const backupAppData = async (appId, config, excludeRows) => {
return await exports.exportDB(appId, {
...config,
filter: exports.defineFilter(includeRows),
filter: exports.defineFilter(excludeRows),
})
}
@ -159,11 +160,11 @@ exports.performBackup = async (appId, backupName) => {
/**
* Streams a backup of the database state for an app
* @param {string} appId The ID of the app which is to be backed up.
* @param {boolean} includeRows Flag to state whether the export should include data.
* @param {boolean} excludeRows Flag to state whether the export should include data.
* @returns {*} a readable stream of the backup which is written in real time
*/
exports.streamBackup = async (appId, includeRows) => {
return await backupAppData(appId, { stream: true }, includeRows)
exports.streamBackup = async (appId, excludeRows) => {
return await backupAppData(appId, { stream: true }, excludeRows)
}
/**
@ -338,31 +339,57 @@ exports.cleanup = appIds => {
}
}
exports.extractPluginTarball = async file => {
if (!file.name.endsWith(".tar.gz")) {
throw new Error("Plugin must be compressed into a gzipped tarball.")
const createTempFolder = item => {
const path = join(budibaseTempDir(), item)
try {
// remove old tmp directories automatically - don't combine
if (fs.existsSync(path)) {
fs.rmSync(path, { recursive: true, force: true })
}
fs.mkdirSync(path)
} catch (err) {
throw new Error(`Path cannot be created: ${err.message}`)
}
const path = join(budibaseTempDir(), file.name.split(".tar.gz")[0])
// remove old tmp directories automatically - don't combine
if (fs.existsSync(path)) {
fs.rmSync(path, { recursive: true, force: true })
}
fs.mkdirSync(path)
return path
}
exports.createTempFolder = createTempFolder
const extractTarball = async (fromFilePath, toPath) => {
await tar.extract({
file: file.path,
C: path,
file: fromFilePath,
C: toPath,
})
}
exports.extractTarball = extractTarball
const getPluginMetadata = async path => {
let metadata = {}
try {
const pkg = fs.readFileSync(join(path, "package.json"), "utf8")
const schema = fs.readFileSync(join(path, "schema.json"), "utf8")
metadata.schema = JSON.parse(schema)
metadata.package = JSON.parse(pkg)
if (
!metadata.package.name ||
!metadata.package.version ||
!metadata.package.description
) {
throw new Error(
"package.json is missing one of 'name', 'version' or 'description'."
)
}
} catch (err) {
throw new Error("Unable to process schema.json/package.json in plugin.")
throw new Error(
`Unable to process schema.json/package.json in plugin. ${err.message}`
)
}
return { metadata, directory: path }
}
exports.getPluginMetadata = getPluginMetadata
exports.getDatasourcePlugin = async (name, url, hash) => {
if (!fs.existsSync(DATASOURCE_PATH)) {
@ -396,6 +423,38 @@ exports.getDatasourcePlugin = async (name, url, hash) => {
}
}
/**
* Find for a file recursively from start path applying filter, return first match
*/
exports.findFileRec = (startPath, filter) => {
if (!fs.existsSync(startPath)) {
return
}
const files = fs.readdirSync(startPath)
for (let i = 0, len = files.length; i < len; i++) {
const filename = join(startPath, files[i])
const stat = fs.lstatSync(filename)
if (stat.isDirectory()) {
return exports.findFileRec(filename, filter)
} else if (filename.endsWith(filter)) {
return filename
}
}
}
/**
* Remove a folder which is not empty from the file system
*/
exports.deleteFolderFileSystem = path => {
if (!fs.existsSync(path)) {
return
}
fs.rmSync(path, { recursive: true, force: true })
}
/**
* Full function definition for below can be found in the utilities.
*/
@ -403,5 +462,6 @@ exports.upload = upload
exports.retrieve = retrieve
exports.retrieveToTmp = retrieveToTmp
exports.deleteFiles = deleteFiles
exports.downloadTarballDirect = downloadTarballDirect
exports.TOP_LEVEL_PATH = TOP_LEVEL_PATH
exports.NODE_MODULES_PATH = NODE_MODULES_PATH

View file

@ -162,3 +162,11 @@ exports.convertBookmark = bookmark => {
}
return bookmark
}
exports.isQsTrue = param => {
if (typeof param === "string") {
return param.toLowerCase() === "true"
} else {
return param === true
}
}

Some files were not shown because too many files have changed in this diff Show more