diff --git a/charts/budibase/templates/app-service-deployment.yaml b/charts/budibase/templates/app-service-deployment.yaml index f72d1aef03..d71ee6e178 100644 --- a/charts/budibase/templates/app-service-deployment.yaml +++ b/charts/budibase/templates/app-service-deployment.yaml @@ -79,7 +79,11 @@ spec: - name: MINIO_URL value: {{ .Values.services.objectStore.url }} - name: PLUGIN_BUCKET_NAME - value: {{ .Values.services.objectStore.pluginBucketName | default "plugins" | quote }} + value: {{ .Values.services.objectStore.pluginBucketName | quote }} + - name: APPS_BUCKET_NAME + value: {{ .Values.services.objectStore.appsBucketName | quote }} + - name: GLOBAL_CLOUD_BUCKET_NAME + value: {{ .Values.services.objectStore.globalBucketName | quote }} - name: PORT value: {{ .Values.services.apps.port | quote }} {{ if .Values.services.worker.publicApiRateLimitPerSecond }} diff --git a/charts/budibase/templates/worker-service-deployment.yaml b/charts/budibase/templates/worker-service-deployment.yaml index b1c6110d95..ffcda1ab72 100644 --- a/charts/budibase/templates/worker-service-deployment.yaml +++ b/charts/budibase/templates/worker-service-deployment.yaml @@ -78,7 +78,11 @@ spec: - name: MINIO_URL value: {{ .Values.services.objectStore.url }} - name: PLUGIN_BUCKET_NAME - value: {{ .Values.services.objectStore.pluginBucketName | default "plugins" | quote }} + value: {{ .Values.services.objectStore.pluginBucketName | quote }} + - name: APPS_BUCKET_NAME + value: {{ .Values.services.objectStore.appsBucketName | quote }} + - name: GLOBAL_CLOUD_BUCKET_NAME + value: {{ .Values.services.objectStore.globalBucketName | quote }} - name: PORT value: {{ .Values.services.worker.port | quote }} - name: MULTI_TENANCY diff --git a/hosting/proxy/80-listen-on-ipv6-by-default.sh b/hosting/proxy/80-listen-on-ipv6-by-default.sh new file mode 100644 index 0000000000..1d62732ea1 --- /dev/null +++ b/hosting/proxy/80-listen-on-ipv6-by-default.sh @@ -0,0 +1,24 @@ +#!/bin/sh +# vim:sw=4:ts=4:et + +set -e + +ME=$(basename $0) +NGINX_CONF_FILE="/etc/nginx/nginx.conf" +DEFAULT_CONF_FILE="/etc/nginx/conf.d/default.conf" + +# check if we have ipv6 available +if [ ! -f "/proc/net/if_inet6" ]; then + # ipv6 not available so delete lines from nginx conf + if [ -f "$NGINX_CONF_FILE" ]; then + sed -i '/listen \[::\]/d' $NGINX_CONF_FILE + fi + if [ -f "$DEFAULT_CONF_FILE" ]; then + sed -i '/listen \[::\]/d' $DEFAULT_CONF_FILE + fi + echo "$ME: info: ipv6 not available so delete lines from nginx conf" +else + echo "$ME: info: ipv6 is available so no need to delete lines from nginx conf" +fi + +exit 0 \ No newline at end of file diff --git a/hosting/proxy/Dockerfile b/hosting/proxy/Dockerfile index 5fd0dc7d11..68e8134750 100644 --- a/hosting/proxy/Dockerfile +++ b/hosting/proxy/Dockerfile @@ -5,7 +5,12 @@ FROM nginx:latest # override the output dir to output directly to /etc/nginx instead of /etc/nginx/conf.d ENV NGINX_ENVSUBST_OUTPUT_DIR=/etc/nginx COPY .generated-nginx.prod.conf /etc/nginx/templates/nginx.conf.template -COPY 10-listen-on-ipv6-by-default.sh /docker-entrypoint.d/10-listen-on-ipv6-by-default.sh + +# IPv6 removal needs to happen after envsubst +RUN rm -rf /docker-entrypoint.d/10-listen-on-ipv6-by-default.sh +COPY 80-listen-on-ipv6-by-default.sh /docker-entrypoint.d/80-listen-on-ipv6-by-default.sh +RUN chmod +x /docker-entrypoint.d/80-listen-on-ipv6-by-default.sh + # Error handling COPY error.html /usr/share/nginx/html/error.html diff --git a/hosting/single/runner.sh b/hosting/single/runner.sh index 6770d27ee0..a95c21a98f 100644 --- a/hosting/single/runner.sh +++ b/hosting/single/runner.sh @@ -18,6 +18,7 @@ declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONME [[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://localhost:4002 [[ -z "${APPS_URL}" ]] && export APPS_URL=http://localhost:4001 # export CUSTOM_DOMAIN=budi001.custom.com + # Azure App Service customisations if [[ "${TARGETBUILD}" = "aas" ]]; then DATA_DIR=/home @@ -27,6 +28,13 @@ else DATA_DIR=${DATA_DIR:-/data} fi +# Mount NFS or GCP Filestore if env vars exist for it +if [[ -z ${FILESHARE_IP} && -z ${FILESHARE_NAME} ]]; then + echo "Mount file share ${FILESHARE_IP}:/${FILESHARE_NAME} to ${DATA_DIR}" + mount -o nolock ${FILESHARE_IP}:/${FILESHARE_NAME} ${DATA_DIR} + echo "Mounting completed." +fi + if [ -f "${DATA_DIR}/.env" ]; then # Read in the .env file and export the variables for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done diff --git a/lerna.json b/lerna.json index b15124be55..16540f235e 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.0.30-alpha.13", + "version": "2.0.34-alpha.3", "npmClient": "yarn", "packages": [ "packages/*" diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index e6dff805ca..83ed8eb7d6 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/backend-core", - "version": "2.0.30-alpha.13", + "version": "2.0.34-alpha.3", "description": "Budibase backend core libraries used in server and worker", "main": "dist/src/index.js", "types": "dist/src/index.d.ts", @@ -20,12 +20,13 @@ "test:watch": "jest --watchAll" }, "dependencies": { - "@budibase/types": "2.0.30-alpha.13", + "@budibase/types": "2.0.34-alpha.3", "@shopify/jest-koa-mocks": "5.0.1", "@techpass/passport-openidconnect": "0.3.2", "aws-sdk": "2.1030.0", "bcrypt": "5.0.1", "bcryptjs": "2.4.3", + "bull": "4.10.1", "dotenv": "16.0.1", "emitter-listener": "1.1.2", "ioredis": "4.28.0", @@ -63,6 +64,7 @@ }, "devDependencies": { "@types/chance": "1.1.3", + "@types/ioredis": "4.28.0", "@types/jest": "27.5.1", "@types/koa": "2.0.52", "@types/lodash": "4.14.180", diff --git a/packages/backend-core/src/cache/writethrough.ts b/packages/backend-core/src/cache/writethrough.ts index ec6b1604c8..495ba58590 100644 --- a/packages/backend-core/src/cache/writethrough.ts +++ b/packages/backend-core/src/cache/writethrough.ts @@ -1,6 +1,7 @@ import BaseCache from "./base" import { getWritethroughClient } from "../redis/init" import { logWarn } from "../logging" +import PouchDB from "pouchdb" const DEFAULT_WRITE_RATE_MS = 10000 let CACHE: BaseCache | null = null diff --git a/packages/backend-core/src/context/index.ts b/packages/backend-core/src/context/index.ts index 35eeee608b..7efe0e23f7 100644 --- a/packages/backend-core/src/context/index.ts +++ b/packages/backend-core/src/context/index.ts @@ -53,6 +53,9 @@ export const getTenantIDFromAppID = (appId: string) => { if (!appId) { return null } + if (!isMultiTenant()) { + return DEFAULT_TENANT_ID + } const split = appId.split(SEPARATOR) const hasDev = split[1] === DocumentType.DEV if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) { diff --git a/packages/backend-core/src/db/constants.ts b/packages/backend-core/src/db/constants.ts index a61e8a2af2..446f1f7d01 100644 --- a/packages/backend-core/src/db/constants.ts +++ b/packages/backend-core/src/db/constants.ts @@ -21,6 +21,7 @@ export enum ViewName { ACCOUNT_BY_EMAIL = "account_by_email", PLATFORM_USERS_LOWERCASE = "platform_users_lowercase", USER_BY_GROUP = "by_group_user", + APP_BACKUP_BY_TRIGGER = "by_trigger", } export const DeprecatedViews = { @@ -30,6 +31,10 @@ export const DeprecatedViews = { ], } +export enum InternalTable { + USER_METADATA = "ta_users", +} + export enum DocumentType { USER = "us", GROUP = "gr", @@ -46,9 +51,23 @@ export enum DocumentType { AUTOMATION_LOG = "log_au", ACCOUNT_METADATA = "acc_metadata", PLUGIN = "plg", - TABLE = "ta", DATASOURCE = "datasource", DATASOURCE_PLUS = "datasource_plus", + APP_BACKUP = "backup", + TABLE = "ta", + ROW = "ro", + AUTOMATION = "au", + LINK = "li", + WEBHOOK = "wh", + INSTANCE = "inst", + LAYOUT = "layout", + SCREEN = "screen", + QUERY = "query", + DEPLOYMENTS = "deployments", + METADATA = "metadata", + MEM_VIEW = "view", + USER_FLAG = "flag", + AUTOMATION_METADATA = "meta_au", } export const StaticDatabases = { diff --git a/packages/backend-core/src/db/index.js b/packages/backend-core/src/db/index.js deleted file mode 100644 index aa6f7ebc2c..0000000000 --- a/packages/backend-core/src/db/index.js +++ /dev/null @@ -1,91 +0,0 @@ -const pouch = require("./pouch") -const env = require("../environment") - -const openDbs = [] -let PouchDB -let initialised = false -const dbList = new Set() - -if (env.MEMORY_LEAK_CHECK) { - setInterval(() => { - console.log("--- OPEN DBS ---") - console.log(openDbs) - }, 5000) -} - -const put = - dbPut => - async (doc, options = {}) => { - if (!doc.createdAt) { - doc.createdAt = new Date().toISOString() - } - doc.updatedAt = new Date().toISOString() - return dbPut(doc, options) - } - -const checkInitialised = () => { - if (!initialised) { - throw new Error("init has not been called") - } -} - -exports.init = opts => { - PouchDB = pouch.getPouch(opts) - initialised = true -} - -// NOTE: THIS IS A DANGEROUS FUNCTION - USE WITH CAUTION -// this function is prone to leaks, should only be used -// in situations that using the function doWithDB does not work -exports.dangerousGetDB = (dbName, opts) => { - checkInitialised() - if (env.isTest()) { - dbList.add(dbName) - } - const db = new PouchDB(dbName, opts) - if (env.MEMORY_LEAK_CHECK) { - openDbs.push(db.name) - } - const dbPut = db.put - db.put = put(dbPut) - return db -} - -// use this function if you have called dangerousGetDB - close -// the databases you've opened once finished -exports.closeDB = async db => { - if (!db || env.isTest()) { - return - } - if (env.MEMORY_LEAK_CHECK) { - openDbs.splice(openDbs.indexOf(db.name), 1) - } - try { - // specifically await so that if there is an error, it can be ignored - return await db.close() - } catch (err) { - // ignore error, already closed - } -} - -// we have to use a callback for this so that we can close -// the DB when we're done, without this manual requests would -// need to close the database when done with it to avoid memory leaks -exports.doWithDB = async (dbName, cb, opts = {}) => { - const db = exports.dangerousGetDB(dbName, opts) - // need this to be async so that we can correctly close DB after all - // async operations have been completed - try { - return await cb(db) - } finally { - await exports.closeDB(db) - } -} - -exports.allDbs = () => { - if (!env.isTest()) { - throw new Error("Cannot be used outside test environment.") - } - checkInitialised() - return [...dbList] -} diff --git a/packages/backend-core/src/db/index.ts b/packages/backend-core/src/db/index.ts new file mode 100644 index 0000000000..429cd61fc1 --- /dev/null +++ b/packages/backend-core/src/db/index.ts @@ -0,0 +1,133 @@ +import * as pouch from "./pouch" +import env from "../environment" +import { checkSlashesInUrl } from "../helpers" +import fetch from "node-fetch" +import { PouchOptions, CouchFindOptions } from "@budibase/types" +import PouchDB from "pouchdb" + +const openDbs: string[] = [] +let Pouch: any +let initialised = false +const dbList = new Set() + +if (env.MEMORY_LEAK_CHECK) { + setInterval(() => { + console.log("--- OPEN DBS ---") + console.log(openDbs) + }, 5000) +} + +const put = + (dbPut: any) => + async (doc: any, options = {}) => { + if (!doc.createdAt) { + doc.createdAt = new Date().toISOString() + } + doc.updatedAt = new Date().toISOString() + return dbPut(doc, options) + } + +const checkInitialised = () => { + if (!initialised) { + throw new Error("init has not been called") + } +} + +export async function init(opts?: PouchOptions) { + Pouch = pouch.getPouch(opts) + initialised = true +} + +// NOTE: THIS IS A DANGEROUS FUNCTION - USE WITH CAUTION +// this function is prone to leaks, should only be used +// in situations that using the function doWithDB does not work +export function dangerousGetDB(dbName: string, opts?: any): PouchDB.Database { + checkInitialised() + if (env.isTest()) { + dbList.add(dbName) + } + const db = new Pouch(dbName, opts) + if (env.MEMORY_LEAK_CHECK) { + openDbs.push(db.name) + } + const dbPut = db.put + db.put = put(dbPut) + return db +} + +// use this function if you have called dangerousGetDB - close +// the databases you've opened once finished +export async function closeDB(db: PouchDB.Database) { + if (!db || env.isTest()) { + return + } + if (env.MEMORY_LEAK_CHECK) { + openDbs.splice(openDbs.indexOf(db.name), 1) + } + try { + // specifically await so that if there is an error, it can be ignored + return await db.close() + } catch (err) { + // ignore error, already closed + } +} + +// we have to use a callback for this so that we can close +// the DB when we're done, without this manual requests would +// need to close the database when done with it to avoid memory leaks +export async function doWithDB(dbName: string, cb: any, opts = {}) { + const db = dangerousGetDB(dbName, opts) + // need this to be async so that we can correctly close DB after all + // async operations have been completed + try { + return await cb(db) + } finally { + await closeDB(db) + } +} + +export function allDbs() { + if (!env.isTest()) { + throw new Error("Cannot be used outside test environment.") + } + checkInitialised() + return [...dbList] +} + +export async function directCouchQuery( + path: string, + method: string = "GET", + body?: any +) { + let { url, cookie } = pouch.getCouchInfo() + const couchUrl = `${url}/${path}` + const params: any = { + method: method, + headers: { + Authorization: cookie, + }, + } + if (body && method !== "GET") { + params.body = JSON.stringify(body) + params.headers["Content-Type"] = "application/json" + } + const response = await fetch(checkSlashesInUrl(encodeURI(couchUrl)), params) + if (response.status < 300) { + return await response.json() + } else { + throw "Cannot connect to CouchDB instance" + } +} + +export async function directCouchAllDbs(queryString?: string) { + let couchPath = "/_all_dbs" + if (queryString) { + couchPath += `?${queryString}` + } + return await directCouchQuery(couchPath) +} + +export async function directCouchFind(dbName: string, opts: CouchFindOptions) { + const json = await directCouchQuery(`${dbName}/_find`, "POST", opts) + return { rows: json.docs, bookmark: json.bookmark } +} diff --git a/packages/backend-core/src/db/pouch.js b/packages/backend-core/src/db/pouch.ts similarity index 88% rename from packages/backend-core/src/db/pouch.js rename to packages/backend-core/src/db/pouch.ts index 12d7d787e3..1e37da9240 100644 --- a/packages/backend-core/src/db/pouch.js +++ b/packages/backend-core/src/db/pouch.ts @@ -1,7 +1,7 @@ -const PouchDB = require("pouchdb") -const env = require("../environment") +import PouchDB from "pouchdb" +import env from "../environment" -exports.getUrlInfo = (url = env.COUCH_DB_URL) => { +export const getUrlInfo = (url = env.COUCH_DB_URL) => { let cleanUrl, username, password, host if (url) { // Ensure the URL starts with a protocol @@ -44,8 +44,8 @@ exports.getUrlInfo = (url = env.COUCH_DB_URL) => { } } -exports.getCouchInfo = () => { - const urlInfo = exports.getUrlInfo() +export const getCouchInfo = () => { + const urlInfo = getUrlInfo() let username let password if (env.COUCH_DB_USERNAME) { @@ -82,11 +82,11 @@ exports.getCouchInfo = () => { * This should be rarely used outside of the main application config. * Exposed for exceptional cases such as in-memory views. */ -exports.getPouch = (opts = {}) => { - let { url, cookie } = exports.getCouchInfo() +export const getPouch = (opts: any = {}) => { + let { url, cookie } = getCouchInfo() let POUCH_DB_DEFAULTS = { prefix: url, - fetch: (url, opts) => { + fetch: (url: string, opts: any) => { // use a specific authorization cookie - be very explicit about how we authenticate opts.headers.set("Authorization", cookie) return PouchDB.fetch(url, opts) @@ -98,6 +98,7 @@ exports.getPouch = (opts = {}) => { PouchDB.plugin(inMemory) POUCH_DB_DEFAULTS = { prefix: undefined, + // @ts-ignore adapter: "memory", } } @@ -105,6 +106,7 @@ exports.getPouch = (opts = {}) => { if (opts.onDisk) { POUCH_DB_DEFAULTS = { prefix: undefined, + // @ts-ignore adapter: "leveldb", } } @@ -112,6 +114,7 @@ exports.getPouch = (opts = {}) => { if (opts.replication) { const replicationStream = require("pouchdb-replication-stream") PouchDB.plugin(replicationStream.plugin) + // @ts-ignore PouchDB.adapter("writableStream", replicationStream.adapters.writableStream) } diff --git a/packages/backend-core/src/db/utils.ts b/packages/backend-core/src/db/utils.ts index 1c4be7e366..c04da5da4f 100644 --- a/packages/backend-core/src/db/utils.ts +++ b/packages/backend-core/src/db/utils.ts @@ -1,14 +1,17 @@ import { newid } from "../hashing" import { DEFAULT_TENANT_ID, Configs } from "../constants" import env from "../environment" -import { SEPARATOR, DocumentType, UNICODE_MAX, ViewName } from "./constants" +import { + SEPARATOR, + DocumentType, + UNICODE_MAX, + ViewName, + InternalTable, +} from "./constants" import { getTenantId, getGlobalDB } from "../context" import { getGlobalDBName } from "./tenancy" -import fetch from "node-fetch" -import { doWithDB, allDbs } from "./index" -import { getCouchInfo } from "./pouch" +import { doWithDB, allDbs, directCouchAllDbs } from "./index" import { getAppMetadata } from "../cache/appMetadata" -import { checkSlashesInUrl } from "../helpers" import { isDevApp, isDevAppID, getProdAppID } from "./conversions" import { APP_PREFIX } from "./constants" import * as events from "../events" @@ -43,8 +46,8 @@ export const generateAppID = (tenantId = null) => { * @returns {object} Parameters which can then be used with an allDocs request. */ export function getDocParams( - docType: any, - docId: any = null, + docType: string, + docId?: string | null, otherProps: any = {} ) { if (docId == null) { @@ -57,6 +60,28 @@ export function getDocParams( } } +/** + * Gets the DB allDocs/query params for retrieving a row. + * @param {string|null} tableId The table in which the rows have been stored. + * @param {string|null} rowId The ID of the row which is being specifically queried for. This can be + * left null to get all the rows in the table. + * @param {object} otherProps Any other properties to add to the request. + * @returns {object} Parameters which can then be used with an allDocs request. + */ +export function getRowParams( + tableId?: string | null, + rowId?: string | null, + otherProps = {} +) { + if (tableId == null) { + return getDocParams(DocumentType.ROW, null, otherProps) + } + + const endOfKey = rowId == null ? `${tableId}${SEPARATOR}` : rowId + + return getDocParams(DocumentType.ROW, endOfKey, otherProps) +} + /** * Retrieve the correct index for a view based on default design DB. */ @@ -64,6 +89,17 @@ export function getQueryIndex(viewName: ViewName) { return `database/${viewName}` } +/** + * Gets a new row ID for the specified table. + * @param {string} tableId The table which the row is being created for. + * @param {string|null} id If an ID is to be used then the UUID can be substituted for this. + * @returns {string} The new ID which a row doc can be stored under. + */ +export function generateRowID(tableId: string, id?: string) { + id = id || newid() + return `${DocumentType.ROW}${SEPARATOR}${tableId}${SEPARATOR}${id}` +} + /** * Check if a given ID is that of a table. * @returns {boolean} @@ -131,6 +167,33 @@ export function getGlobalUserParams(globalId: any, otherProps: any = {}) { } } +/** + * Gets parameters for retrieving users, this is a utility function for the getDocParams function. + */ +export function getUserMetadataParams(userId?: string, otherProps = {}) { + return getRowParams(InternalTable.USER_METADATA, userId, otherProps) +} + +/** + * Generates a new user ID based on the passed in global ID. + * @param {string} globalId The ID of the global user. + * @returns {string} The new user ID which the user doc can be stored under. + */ +export function generateUserMetadataID(globalId: string) { + return generateRowID(InternalTable.USER_METADATA, globalId) +} + +/** + * Breaks up the ID to get the global ID. + */ +export function getGlobalIDFromUserMetadataID(id: string) { + const prefix = `${DocumentType.ROW}${SEPARATOR}${InternalTable.USER_METADATA}${SEPARATOR}` + if (!id || !id.includes(prefix)) { + return id + } + return id.split(prefix)[1] +} + export function getUsersByAppParams(appId: any, otherProps: any = {}) { const prodAppId = getProdAppID(appId) return { @@ -191,9 +254,9 @@ export function getRoleParams(roleId = null, otherProps = {}) { return getDocParams(DocumentType.ROLE, roleId, otherProps) } -export function getStartEndKeyURL(base: any, baseKey: any, tenantId = null) { +export function getStartEndKeyURL(baseKey: any, tenantId = null) { const tenancy = tenantId ? `${SEPARATOR}${tenantId}` : "" - return `${base}?startkey="${baseKey}${tenancy}"&endkey="${baseKey}${tenancy}${UNICODE_MAX}"` + return `startkey="${baseKey}${tenancy}"&endkey="${baseKey}${tenancy}${UNICODE_MAX}"` } /** @@ -209,22 +272,10 @@ export async function getAllDbs(opts = { efficient: false }) { return allDbs() } let dbs: any[] = [] - let { url, cookie } = getCouchInfo() - async function addDbs(couchUrl: string) { - const response = await fetch(checkSlashesInUrl(encodeURI(couchUrl)), { - method: "GET", - headers: { - Authorization: cookie, - }, - }) - if (response.status === 200) { - let json = await response.json() - dbs = dbs.concat(json) - } else { - throw "Cannot connect to CouchDB instance" - } + async function addDbs(queryString?: string) { + const json = await directCouchAllDbs(queryString) + dbs = dbs.concat(json) } - let couchUrl = `${url}/_all_dbs` let tenantId = getTenantId() if (!env.MULTI_TENANCY || (!efficient && tenantId === DEFAULT_TENANT_ID)) { // just get all DBs when: @@ -232,12 +283,12 @@ export async function getAllDbs(opts = { efficient: false }) { // - default tenant // - apps dbs don't contain tenant id // - non-default tenant dbs are filtered out application side in getAllApps - await addDbs(couchUrl) + await addDbs() } else { // get prod apps - await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP, tenantId)) + await addDbs(getStartEndKeyURL(DocumentType.APP, tenantId)) // get dev apps - await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP_DEV, tenantId)) + await addDbs(getStartEndKeyURL(DocumentType.APP_DEV, tenantId)) // add global db name dbs.push(getGlobalDBName(tenantId)) } diff --git a/packages/backend-core/src/events/publishers/backup.ts b/packages/backend-core/src/events/publishers/backup.ts new file mode 100644 index 0000000000..0fc81da259 --- /dev/null +++ b/packages/backend-core/src/events/publishers/backup.ts @@ -0,0 +1,12 @@ +import { AppBackup, AppBackupRestoreEvent, Event } from "@budibase/types" +import { publishEvent } from "../events" + +export async function appBackupRestored(backup: AppBackup) { + const properties: AppBackupRestoreEvent = { + appId: backup.appId, + backupName: backup.name!, + backupCreatedAt: backup.timestamp, + } + + await publishEvent(Event.APP_BACKUP_RESTORED, properties) +} diff --git a/packages/backend-core/src/events/publishers/index.ts b/packages/backend-core/src/events/publishers/index.ts index 6fe42c4bda..7306312a8f 100644 --- a/packages/backend-core/src/events/publishers/index.ts +++ b/packages/backend-core/src/events/publishers/index.ts @@ -19,3 +19,4 @@ export * as installation from "./installation" export * as backfill from "./backfill" export * as group from "./group" export * as plugin from "./plugin" +export * as backup from "./backup" diff --git a/packages/backend-core/src/index.ts b/packages/backend-core/src/index.ts index 42cad17620..659a56c051 100644 --- a/packages/backend-core/src/index.ts +++ b/packages/backend-core/src/index.ts @@ -19,6 +19,7 @@ import pino from "./pino" import * as middleware from "./middleware" import plugins from "./plugin" import encryption from "./security/encryption" +import * as queue from "./queue" // mimic the outer package exports import * as db from "./pkg/db" @@ -63,6 +64,7 @@ const core = { ...errorClasses, middleware, encryption, + queue, } export = core diff --git a/packages/backend-core/src/objectStore/index.ts b/packages/backend-core/src/objectStore/index.ts index 17e002cc49..8453c9aee6 100644 --- a/packages/backend-core/src/objectStore/index.ts +++ b/packages/backend-core/src/objectStore/index.ts @@ -18,11 +18,16 @@ const STATE = { bucketCreationPromises: {}, } +type ListParams = { + ContinuationToken?: string +} + const CONTENT_TYPE_MAP: any = { html: "text/html", css: "text/css", js: "application/javascript", json: "application/json", + gz: "application/gzip", } const STRING_CONTENT_TYPES = [ CONTENT_TYPE_MAP.html, @@ -32,16 +37,16 @@ const STRING_CONTENT_TYPES = [ ] // does normal sanitization and then swaps dev apps to apps -export function sanitizeKey(input: any) { +export function sanitizeKey(input: string) { return sanitize(sanitizeBucket(input)).replace(/\\/g, "/") } // simply handles the dev app to app conversion -export function sanitizeBucket(input: any) { +export function sanitizeBucket(input: string) { return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX) } -function publicPolicy(bucketName: any) { +function publicPolicy(bucketName: string) { return { Version: "2012-10-17", Statement: [ @@ -69,7 +74,7 @@ const PUBLIC_BUCKETS = [ * @return {Object} an S3 object store object, check S3 Nodejs SDK for usage. * @constructor */ -export const ObjectStore = (bucket: any) => { +export const ObjectStore = (bucket: string) => { const config: any = { s3ForcePathStyle: true, signatureVersion: "v4", @@ -93,7 +98,7 @@ export const ObjectStore = (bucket: any) => { * Given an object store and a bucket name this will make sure the bucket exists, * if it does not exist then it will create it. */ -export const makeSureBucketExists = async (client: any, bucketName: any) => { +export const makeSureBucketExists = async (client: any, bucketName: string) => { bucketName = sanitizeBucket(bucketName) try { await client @@ -145,7 +150,7 @@ export const upload = async ({ type, metadata, }: any) => { - const extension = [...filename.split(".")].pop() + const extension = filename.split(".").pop() const fileBytes = fs.readFileSync(path) const objectStore = ObjectStore(bucketName) @@ -168,8 +173,8 @@ export const upload = async ({ * through to the object store. */ export const streamUpload = async ( - bucketName: any, - filename: any, + bucketName: string, + filename: string, stream: any, extra = {} ) => { @@ -202,7 +207,7 @@ export const streamUpload = async ( * retrieves the contents of a file from the object store, if it is a known content type it * will be converted, otherwise it will be returned as a buffer stream. */ -export const retrieve = async (bucketName: any, filepath: any) => { +export const retrieve = async (bucketName: string, filepath: string) => { const objectStore = ObjectStore(bucketName) const params = { Bucket: sanitizeBucket(bucketName), @@ -217,10 +222,38 @@ export const retrieve = async (bucketName: any, filepath: any) => { } } +export const listAllObjects = async (bucketName: string, path: string) => { + const objectStore = ObjectStore(bucketName) + const list = (params: ListParams = {}) => { + return objectStore + .listObjectsV2({ + ...params, + Bucket: sanitizeBucket(bucketName), + Prefix: sanitizeKey(path), + }) + .promise() + } + let isTruncated = false, + token, + objects: AWS.S3.Types.Object[] = [] + do { + let params: ListParams = {} + if (token) { + params.ContinuationToken = token + } + const response = await list(params) + if (response.Contents) { + objects = objects.concat(response.Contents) + } + isTruncated = !!response.IsTruncated + } while (isTruncated) + return objects +} + /** * Same as retrieval function but puts to a temporary file. */ -export const retrieveToTmp = async (bucketName: any, filepath: any) => { +export const retrieveToTmp = async (bucketName: string, filepath: string) => { bucketName = sanitizeBucket(bucketName) filepath = sanitizeKey(filepath) const data = await retrieve(bucketName, filepath) @@ -229,10 +262,31 @@ export const retrieveToTmp = async (bucketName: any, filepath: any) => { return outputPath } +export const retrieveDirectory = async (bucketName: string, path: string) => { + let writePath = join(budibaseTempDir(), v4()) + fs.mkdirSync(writePath) + const objects = await listAllObjects(bucketName, path) + let fullObjects = await Promise.all( + objects.map(obj => retrieve(bucketName, obj.Key!)) + ) + let count = 0 + for (let obj of objects) { + const filename = obj.Key! + const data = fullObjects[count++] + const possiblePath = filename.split("/") + if (possiblePath.length > 1) { + const dirs = possiblePath.slice(0, possiblePath.length - 1) + fs.mkdirSync(join(writePath, ...dirs), { recursive: true }) + } + fs.writeFileSync(join(writePath, ...possiblePath), data) + } + return writePath +} + /** * Delete a single file. */ -export const deleteFile = async (bucketName: any, filepath: any) => { +export const deleteFile = async (bucketName: string, filepath: string) => { const objectStore = ObjectStore(bucketName) await makeSureBucketExists(objectStore, bucketName) const params = { @@ -242,7 +296,7 @@ export const deleteFile = async (bucketName: any, filepath: any) => { return objectStore.deleteObject(params) } -export const deleteFiles = async (bucketName: any, filepaths: any) => { +export const deleteFiles = async (bucketName: string, filepaths: string[]) => { const objectStore = ObjectStore(bucketName) await makeSureBucketExists(objectStore, bucketName) const params = { @@ -258,8 +312,8 @@ export const deleteFiles = async (bucketName: any, filepaths: any) => { * Delete a path, including everything within. */ export const deleteFolder = async ( - bucketName: any, - folder: any + bucketName: string, + folder: string ): Promise => { bucketName = sanitizeBucket(bucketName) folder = sanitizeKey(folder) @@ -292,9 +346,9 @@ export const deleteFolder = async ( } export const uploadDirectory = async ( - bucketName: any, - localPath: any, - bucketPath: any + bucketName: string, + localPath: string, + bucketPath: string ) => { bucketName = sanitizeBucket(bucketName) let uploads = [] @@ -326,7 +380,11 @@ exports.downloadTarballDirect = async ( await streamPipeline(response.body, zlib.Unzip(), tar.extract(path)) } -export const downloadTarball = async (url: any, bucketName: any, path: any) => { +export const downloadTarball = async ( + url: string, + bucketName: string, + path: string +) => { bucketName = sanitizeBucket(bucketName) path = sanitizeKey(path) const response = await fetch(url) diff --git a/packages/backend-core/src/objectStore/utils.js b/packages/backend-core/src/objectStore/utils.js index 9cf4f5f70e..2d4faf55d1 100644 --- a/packages/backend-core/src/objectStore/utils.js +++ b/packages/backend-core/src/objectStore/utils.js @@ -1,5 +1,6 @@ const { join } = require("path") const { tmpdir } = require("os") +const fs = require("fs") const env = require("../environment") /**************************************************** @@ -16,6 +17,11 @@ exports.ObjectStoreBuckets = { PLUGINS: env.PLUGIN_BUCKET_NAME, } -exports.budibaseTempDir = function () { - return join(tmpdir(), ".budibase") +const bbTmp = join(tmpdir(), ".budibase") +if (!fs.existsSync(bbTmp)) { + fs.mkdirSync(bbTmp) +} + +exports.budibaseTempDir = function () { + return bbTmp } diff --git a/packages/backend-core/src/queue/constants.ts b/packages/backend-core/src/queue/constants.ts new file mode 100644 index 0000000000..e8323dacb8 --- /dev/null +++ b/packages/backend-core/src/queue/constants.ts @@ -0,0 +1,4 @@ +export enum JobQueue { + AUTOMATION = "automationQueue", + APP_BACKUP = "appBackupQueue", +} diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts new file mode 100644 index 0000000000..80ee7362e4 --- /dev/null +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -0,0 +1,127 @@ +import events from "events" + +/** + * Bull works with a Job wrapper around all messages that contains a lot more information about + * the state of the message, this object constructor implements the same schema of Bull jobs + * for the sake of maintaining API consistency. + * @param {string} queue The name of the queue which the message will be carried on. + * @param {object} message The JSON message which will be passed back to the consumer. + * @returns {Object} A new job which can now be put onto the queue, this is mostly an + * internal structure so that an in memory queue can be easily swapped for a Bull queue. + */ +function newJob(queue: string, message: any) { + return { + timestamp: Date.now(), + queue: queue, + data: message, + } +} + +/** + * This is designed to replicate Bull (https://github.com/OptimalBits/bull) in memory as a sort of mock. + * It is relatively simple, using an event emitter internally to register when messages are available + * to the consumers - in can support many inputs and many consumers. + */ +class InMemoryQueue { + _name: string + _opts?: any + _messages: any[] + _emitter: EventEmitter + /** + * The constructor the queue, exactly the same as that of Bulls. + * @param {string} name The name of the queue which is being configured. + * @param {object|null} opts This is not used by the in memory queue as there is no real use + * case when in memory, but is the same API as Bull + */ + constructor(name: string, opts = null) { + this._name = name + this._opts = opts + this._messages = [] + this._emitter = new events.EventEmitter() + } + + /** + * Same callback API as Bull, each callback passed to this will consume messages as they are + * available. Please note this is a queue service, not a notification service, so each + * consumer will receive different messages. + * @param {function} func The callback function which will return a "Job", the same + * as the Bull API, within this job the property "data" contains the JSON message. Please + * note this is incredibly limited compared to Bull as in reality the Job would contain + * a lot more information about the queue and current status of Bull cluster. + */ + process(func: any) { + this._emitter.on("message", async () => { + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() + let resp = func(msg) + if (resp.then != null) { + await resp + } + }) + } + + // simply puts a message to the queue and emits to the queue for processing + /** + * Simple function to replicate the add message functionality of Bull, putting + * a new message on the queue. This then emits an event which will be used to + * return the message to a consumer (if one is attached). + * @param {object} msg A message to be transported over the queue, this should be + * a JSON message as this is required by Bull. + * @param {boolean} repeat serves no purpose for the import queue. + */ + // eslint-disable-next-line no-unused-vars + add(msg: any, repeat: boolean) { + if (typeof msg !== "object") { + throw "Queue only supports carrying JSON." + } + this._messages.push(newJob(this._name, msg)) + this._emitter.emit("message") + } + + /** + * replicating the close function from bull, which waits for jobs to finish. + */ + async close() { + return [] + } + + /** + * This removes a cron which has been implemented, this is part of Bull API. + * @param {string} cronJobId The cron which is to be removed. + */ + removeRepeatableByKey(cronJobId: string) { + // TODO: implement for testing + console.log(cronJobId) + } + + /** + * Implemented for tests + */ + getRepeatableJobs() { + return [] + } + + // eslint-disable-next-line no-unused-vars + removeJobs(pattern: string) { + // no-op + } + + /** + * Implemented for tests + */ + async clean() { + return [] + } + + async getJob() { + return {} + } + + on() { + // do nothing + } +} + +export = InMemoryQueue diff --git a/packages/backend-core/src/queue/index.ts b/packages/backend-core/src/queue/index.ts new file mode 100644 index 0000000000..b7d565ba13 --- /dev/null +++ b/packages/backend-core/src/queue/index.ts @@ -0,0 +1,2 @@ +export * from "./queue" +export * from "./constants" diff --git a/packages/backend-core/src/queue/listeners.ts b/packages/backend-core/src/queue/listeners.ts new file mode 100644 index 0000000000..e1975b5d06 --- /dev/null +++ b/packages/backend-core/src/queue/listeners.ts @@ -0,0 +1,101 @@ +import { Job, JobId, Queue } from "bull" +import { JobQueue } from "./constants" + +export type StalledFn = (job: Job) => Promise + +export function addListeners( + queue: Queue, + jobQueue: JobQueue, + removeStalledCb?: StalledFn +) { + logging(queue, jobQueue) + if (removeStalledCb) { + handleStalled(queue, removeStalledCb) + } +} + +function handleStalled(queue: Queue, removeStalledCb?: StalledFn) { + queue.on("stalled", async (job: Job) => { + if (removeStalledCb) { + await removeStalledCb(job) + } else if (job.opts.repeat) { + const jobId = job.id + const repeatJobs = await queue.getRepeatableJobs() + for (let repeatJob of repeatJobs) { + if (repeatJob.id === jobId) { + await queue.removeRepeatableByKey(repeatJob.key) + } + } + console.log(`jobId=${jobId} disabled`) + } + }) +} + +function logging(queue: Queue, jobQueue: JobQueue) { + let eventType: string + switch (jobQueue) { + case JobQueue.AUTOMATION: + eventType = "automation-event" + break + case JobQueue.APP_BACKUP: + eventType = "app-backup-event" + break + } + if (process.env.NODE_DEBUG?.includes("bull")) { + queue + .on("error", (error: any) => { + // An error occurred. + console.error(`${eventType}=error error=${JSON.stringify(error)}`) + }) + .on("waiting", (jobId: JobId) => { + // A Job is waiting to be processed as soon as a worker is idling. + console.log(`${eventType}=waiting jobId=${jobId}`) + }) + .on("active", (job: Job, jobPromise: any) => { + // A job has started. You can use `jobPromise.cancel()`` to abort it. + console.log(`${eventType}=active jobId=${job.id}`) + }) + .on("stalled", (job: Job) => { + // A job has been marked as stalled. This is useful for debugging job + // workers that crash or pause the event loop. + console.error( + `${eventType}=stalled jobId=${job.id} job=${JSON.stringify(job)}` + ) + }) + .on("progress", (job: Job, progress: any) => { + // A job's progress was updated! + console.log( + `${eventType}=progress jobId=${job.id} progress=${progress}` + ) + }) + .on("completed", (job: Job, result) => { + // A job successfully completed with a `result`. + console.log(`${eventType}=completed jobId=${job.id} result=${result}`) + }) + .on("failed", (job, err: any) => { + // A job failed with reason `err`! + console.log(`${eventType}=failed jobId=${job.id} error=${err}`) + }) + .on("paused", () => { + // The queue has been paused. + console.log(`${eventType}=paused`) + }) + .on("resumed", (job: Job) => { + // The queue has been resumed. + console.log(`${eventType}=paused jobId=${job.id}`) + }) + .on("cleaned", (jobs: Job[], type: string) => { + // Old jobs have been cleaned from the queue. `jobs` is an array of cleaned + // jobs, and `type` is the type of jobs cleaned. + console.log(`${eventType}=cleaned length=${jobs.length} type=${type}`) + }) + .on("drained", () => { + // Emitted every time the queue has processed all the waiting jobs (even if there can be some delayed jobs not yet processed) + console.log(`${eventType}=drained`) + }) + .on("removed", (job: Job) => { + // A job successfully removed. + console.log(`${eventType}=removed jobId=${job.id}`) + }) + } +} diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts new file mode 100644 index 0000000000..b4eeeb31aa --- /dev/null +++ b/packages/backend-core/src/queue/queue.ts @@ -0,0 +1,51 @@ +import env from "../environment" +import { getRedisOptions } from "../redis/utils" +import { JobQueue } from "./constants" +import InMemoryQueue from "./inMemoryQueue" +import BullQueue from "bull" +import { addListeners, StalledFn } from "./listeners" +const { opts: redisOpts, redisProtocolUrl } = getRedisOptions() + +const CLEANUP_PERIOD_MS = 60 * 1000 +let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = [] +let cleanupInterval: NodeJS.Timeout + +async function cleanup() { + for (let queue of QUEUES) { + await queue.clean(CLEANUP_PERIOD_MS, "completed") + } +} + +export function createQueue( + jobQueue: JobQueue, + opts: { removeStalledCb?: StalledFn } = {} +): BullQueue.Queue { + const queueConfig: any = redisProtocolUrl || { redis: redisOpts } + let queue: any + if (!env.isTest()) { + queue = new BullQueue(jobQueue, queueConfig) + } else { + queue = new InMemoryQueue(jobQueue, queueConfig) + } + addListeners(queue, jobQueue, opts?.removeStalledCb) + QUEUES.push(queue) + if (!cleanupInterval) { + cleanupInterval = setInterval(cleanup, CLEANUP_PERIOD_MS) + // fire off an initial cleanup + cleanup().catch(err => { + console.error(`Unable to cleanup automation queue initially - ${err}`) + }) + } + return queue +} + +exports.shutdown = async () => { + if (QUEUES.length) { + clearInterval(cleanupInterval) + for (let queue of QUEUES) { + await queue.close() + } + QUEUES = [] + } + console.log("Queues shutdown") +} diff --git a/packages/backend-core/src/redis/redlock.ts b/packages/backend-core/src/redis/redlock.ts index abb13b2534..586302c9b1 100644 --- a/packages/backend-core/src/redis/redlock.ts +++ b/packages/backend-core/src/redis/redlock.ts @@ -55,7 +55,12 @@ export const doWithLock = async (opts: LockOptions, task: any) => { let lock try { // aquire lock - let name: string = `${tenancy.getTenantId()}_${opts.name}` + let name: string + if (opts.systemLock) { + name = opts.name + } else { + name = `${tenancy.getTenantId()}_${opts.name}` + } if (opts.nameSuffix) { name = name + `_${opts.nameSuffix}` } diff --git a/packages/backend-core/yarn.lock b/packages/backend-core/yarn.lock index 6bc9b63728..d301526ba1 100644 --- a/packages/backend-core/yarn.lock +++ b/packages/backend-core/yarn.lock @@ -543,6 +543,36 @@ semver "^7.3.5" tar "^6.1.11" +"@msgpackr-extract/msgpackr-extract-darwin-arm64@2.1.2": + version "2.1.2" + resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-2.1.2.tgz#9571b87be3a3f2c46de05585470bc4f3af2f6f00" + integrity sha512-TyVLn3S/+ikMDsh0gbKv2YydKClN8HaJDDpONlaZR+LVJmsxLFUgA+O7zu59h9+f9gX1aj/ahw9wqa6rosmrYQ== + +"@msgpackr-extract/msgpackr-extract-darwin-x64@2.1.2": + version "2.1.2" + resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-darwin-x64/-/msgpackr-extract-darwin-x64-2.1.2.tgz#bfbc6936ede2955218f5621a675679a5fe8e6f4c" + integrity sha512-YPXtcVkhmVNoMGlqp81ZHW4dMxK09msWgnxtsDpSiZwTzUBG2N+No2bsr7WMtBKCVJMSD6mbAl7YhKUqkp/Few== + +"@msgpackr-extract/msgpackr-extract-linux-arm64@2.1.2": + version "2.1.2" + resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-linux-arm64/-/msgpackr-extract-linux-arm64-2.1.2.tgz#22555e28382af2922e7450634c8a2f240bb9eb82" + integrity sha512-vHZ2JiOWF2+DN9lzltGbhtQNzDo8fKFGrf37UJrgqxU0yvtERrzUugnfnX1wmVfFhSsF8OxrfqiNOUc5hko1Zg== + +"@msgpackr-extract/msgpackr-extract-linux-arm@2.1.2": + version "2.1.2" + resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-linux-arm/-/msgpackr-extract-linux-arm-2.1.2.tgz#ffb6ae1beea7ac572b6be6bf2a8e8162ebdd8be7" + integrity sha512-42R4MAFeIeNn+L98qwxAt360bwzX2Kf0ZQkBBucJ2Ircza3asoY4CDbgiu9VWklq8gWJVSJSJBwDI+c/THiWkA== + +"@msgpackr-extract/msgpackr-extract-linux-x64@2.1.2": + version "2.1.2" + resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-2.1.2.tgz#7caf62eebbfb1345de40f75e89666b3d4194755f" + integrity sha512-RjRoRxg7Q3kPAdUSC5EUUPlwfMkIVhmaRTIe+cqHbKrGZ4M6TyCA/b5qMaukQ/1CHWrqYY2FbKOAU8Hg0pQFzg== + +"@msgpackr-extract/msgpackr-extract-win32-x64@2.1.2": + version "2.1.2" + resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-win32-x64/-/msgpackr-extract-win32-x64-2.1.2.tgz#f2d8b9ddd8d191205ed26ce54aba3dfc5ae3e7c9" + integrity sha512-rIZVR48zA8hGkHIK7ED6+ZiXsjRCcAVBJbm8o89OKAMTmEAQ2QvoOxoiu3w2isAaWwzgtQIOFIqHwvZDyLKCvw== + "@shopify/jest-koa-mocks@5.0.1": version "5.0.1" resolved "https://registry.yarnpkg.com/@shopify/jest-koa-mocks/-/jest-koa-mocks-5.0.1.tgz#fba490b6b7985fbb571eb9974897d396a3642e94" @@ -733,6 +763,13 @@ resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-1.8.2.tgz#7315b4c4c54f82d13fa61c228ec5c2ea5cc9e0e1" integrity sha512-EqX+YQxINb+MeXaIqYDASb6U6FCHbWjkj4a1CKDBks3d/QiB2+PqBLyO72vLDgAO1wUI4O+9gweRcQK11bTL/w== +"@types/ioredis@4.28.0": + version "4.28.0" + resolved "https://registry.yarnpkg.com/@types/ioredis/-/ioredis-4.28.0.tgz#609b2ea0d91231df2dd7f67dd77436bc72584911" + integrity sha512-HSA/JQivJgV0e+353gvgu6WVoWvGRe0HyHOnAN2AvbVIhUlJBhNnnkP8gEEokrDWrxywrBkwo8NuDZ6TVPL9XA== + dependencies: + "@types/node" "*" + "@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": version "2.0.4" resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" @@ -1497,6 +1534,21 @@ buffer@^5.5.0, buffer@^5.6.0: base64-js "^1.3.1" ieee754 "^1.1.13" +bull@4.10.1: + version "4.10.1" + resolved "https://registry.yarnpkg.com/bull/-/bull-4.10.1.tgz#f14974b6089358b62b495a2cbf838aadc098e43f" + integrity sha512-Fp21tRPb2EaZPVfmM+ONZKVz2RA+to+zGgaTLyCKt3JMSU8OOBqK8143OQrnGuGpsyE5G+9FevFAGhdZZfQP2g== + dependencies: + cron-parser "^4.2.1" + debuglog "^1.0.0" + get-port "^5.1.1" + ioredis "^4.28.5" + lodash "^4.17.21" + msgpackr "^1.5.2" + p-timeout "^3.2.0" + semver "^7.3.2" + uuid "^8.3.0" + cache-content-type@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-content-type/-/cache-content-type-1.0.1.tgz#035cde2b08ee2129f4a8315ea8f00a00dba1453c" @@ -1764,6 +1816,13 @@ core-util-is@~1.0.0: resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== +cron-parser@^4.2.1: + version "4.6.0" + resolved "https://registry.yarnpkg.com/cron-parser/-/cron-parser-4.6.0.tgz#404c3fdbff10ae80eef6b709555d577ef2fd2e0d" + integrity sha512-guZNLMGUgg6z4+eGhmHGw7ft+v6OQeuHzd1gcLxCo9Yg/qoxmG3nindp2/uwGCLizEisf2H0ptqeVXeoCpP6FA== + dependencies: + luxon "^3.0.1" + cross-spawn@^7.0.3: version "7.0.3" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" @@ -1837,6 +1896,11 @@ debug@~3.1.0: dependencies: ms "2.0.0" +debuglog@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/debuglog/-/debuglog-1.0.1.tgz#aa24ffb9ac3df9a2351837cfb2d279360cd78492" + integrity sha512-syBZ+rnAK3EgMsH2aYEOLUW7mZSY9Gb+0wUMCFsZvcmiz+HigA0LOcq/HoQqVuGG+EKykunc7QG2bzrponfaSw== + decimal.js@^10.2.1: version "10.3.1" resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.3.1.tgz#d8c3a444a9c6774ba60ca6ad7261c3a94fd5e783" @@ -2318,6 +2382,11 @@ get-package-type@^0.1.0: resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== +get-port@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/get-port/-/get-port-5.1.1.tgz#0469ed07563479de6efb986baf053dcd7d4e3193" + integrity sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ== + get-stream@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" @@ -2652,6 +2721,23 @@ ioredis@4.28.0: redis-parser "^3.0.0" standard-as-callback "^2.1.0" +ioredis@^4.28.5: + version "4.28.5" + resolved "https://registry.yarnpkg.com/ioredis/-/ioredis-4.28.5.tgz#5c149e6a8d76a7f8fa8a504ffc85b7d5b6797f9f" + integrity sha512-3GYo0GJtLqgNXj4YhrisLaNNvWSNwSS2wS4OELGfGxH8I69+XfNdnmV1AyN+ZqMh0i7eX+SWjrwFKDBDgfBC1A== + dependencies: + cluster-key-slot "^1.1.0" + debug "^4.3.1" + denque "^1.1.0" + lodash.defaults "^4.2.0" + lodash.flatten "^4.4.0" + lodash.isarguments "^3.1.0" + p-map "^2.1.0" + redis-commands "1.7.0" + redis-errors "^1.2.0" + redis-parser "^3.0.0" + standard-as-callback "^2.1.0" + is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" @@ -3725,6 +3811,11 @@ ltgt@2.2.1, ltgt@^2.1.2, ltgt@~2.2.0: resolved "https://registry.yarnpkg.com/ltgt/-/ltgt-2.2.1.tgz#f35ca91c493f7b73da0e07495304f17b31f87ee5" integrity sha512-AI2r85+4MquTw9ZYqabu4nMwy9Oftlfa/e/52t9IjtfG+mGBbTNdAoZ3RQKLHR6r0wQnwZnPIEh/Ya6XTWAKNA== +luxon@^3.0.1: + version "3.0.4" + resolved "https://registry.yarnpkg.com/luxon/-/luxon-3.0.4.tgz#d179e4e9f05e092241e7044f64aaa54796b03929" + integrity sha512-aV48rGUwP/Vydn8HT+5cdr26YYQiUZ42NM6ToMoaGKwYfWbfLeRkEu1wXWMHBZT6+KyLfcbbtVcoQFCbbPjKlw== + make-dir@^3.0.0, make-dir@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" @@ -3872,6 +3963,27 @@ ms@^2.1.1, ms@^2.1.3: resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== +msgpackr-extract@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/msgpackr-extract/-/msgpackr-extract-2.1.2.tgz#56272030f3e163e1b51964ef8b1cd5e7240c03ed" + integrity sha512-cmrmERQFb19NX2JABOGtrKdHMyI6RUyceaPBQ2iRz9GnDkjBWFjNJC0jyyoOfZl2U/LZE3tQCCQc4dlRyA8mcA== + dependencies: + node-gyp-build-optional-packages "5.0.3" + optionalDependencies: + "@msgpackr-extract/msgpackr-extract-darwin-arm64" "2.1.2" + "@msgpackr-extract/msgpackr-extract-darwin-x64" "2.1.2" + "@msgpackr-extract/msgpackr-extract-linux-arm" "2.1.2" + "@msgpackr-extract/msgpackr-extract-linux-arm64" "2.1.2" + "@msgpackr-extract/msgpackr-extract-linux-x64" "2.1.2" + "@msgpackr-extract/msgpackr-extract-win32-x64" "2.1.2" + +msgpackr@^1.5.2: + version "1.7.2" + resolved "https://registry.yarnpkg.com/msgpackr/-/msgpackr-1.7.2.tgz#68d6debf5999d6b61abb6e7046a689991ebf7261" + integrity sha512-mWScyHTtG6TjivXX9vfIy2nBtRupaiAj0HQ2mtmpmYujAmqZmaaEVPaSZ1NKLMvicaMLFzEaMk0ManxMRg8rMQ== + optionalDependencies: + msgpackr-extract "^2.1.2" + napi-macros@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.0.0.tgz#2b6bae421e7b96eb687aa6c77a7858640670001b" @@ -3919,6 +4031,11 @@ node-forge@^0.7.1: resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.6.tgz#fdf3b418aee1f94f0ef642cd63486c77ca9724ac" integrity sha512-sol30LUpz1jQFBjOKwbjxijiE3b6pjd74YwfD0fJOKPjF+fONKb2Yg8rYgS6+bK6VDl+/wfr4IYpC7jDzLUIfw== +node-gyp-build-optional-packages@5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.0.3.tgz#92a89d400352c44ad3975010368072b41ad66c17" + integrity sha512-k75jcVzk5wnnc/FMxsf4udAoTEUv2jY3ycfdSd3yWu6Cnd1oee6/CfZJApyscA4FJOmdoixWwiwOyf16RzD5JA== + node-gyp-build@~4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.1.1.tgz#d7270b5d86717068d114cc57fff352f96d745feb" @@ -4075,6 +4192,11 @@ p-cancelable@^1.0.0: resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc" integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== +p-finally@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" + integrity sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow== + p-limit@^2.2.0: version "2.3.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" @@ -4094,6 +4216,13 @@ p-map@^2.1.0: resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== +p-timeout@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-3.2.0.tgz#c7e17abc971d2a7962ef83626b35d635acf23dfe" + integrity sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg== + dependencies: + p-finally "^1.0.0" + p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" @@ -5360,7 +5489,7 @@ uuid@8.1.0: resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.1.0.tgz#6f1536eb43249f473abc6bd58ff983da1ca30d8d" integrity sha512-CI18flHDznR0lq54xBycOVmphdCYnQLKn8abKn7PXUiKUGdEd+/l9LWNJmugXel4hXq7S+RMNl34ecyC9TntWg== -uuid@8.3.2, uuid@^8.3.2: +uuid@8.3.2, uuid@^8.3.0, uuid@^8.3.2: version "8.3.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== diff --git a/packages/bbui/package.json b/packages/bbui/package.json index 6e252dfbfa..37650dda37 100644 --- a/packages/bbui/package.json +++ b/packages/bbui/package.json @@ -1,7 +1,7 @@ { "name": "@budibase/bbui", "description": "A UI solution used in the different Budibase projects.", - "version": "2.0.30-alpha.13", + "version": "2.0.34-alpha.3", "license": "MPL-2.0", "svelte": "src/index.js", "module": "dist/bbui.es.js", @@ -38,7 +38,7 @@ ], "dependencies": { "@adobe/spectrum-css-workflow-icons": "^1.2.1", - "@budibase/string-templates": "2.0.30-alpha.13", + "@budibase/string-templates": "2.0.34-alpha.3", "@spectrum-css/actionbutton": "^1.0.1", "@spectrum-css/actiongroup": "^1.0.1", "@spectrum-css/avatar": "^3.0.2", diff --git a/packages/bbui/src/Form/Core/DatePicker.svelte b/packages/bbui/src/Form/Core/DatePicker.svelte index 1a7ab59818..9e7d44dbc3 100644 --- a/packages/bbui/src/Form/Core/DatePicker.svelte +++ b/packages/bbui/src/Form/Core/DatePicker.svelte @@ -17,7 +17,7 @@ export let timeOnly = false export let ignoreTimezones = false export let time24hr = false - + export let range = false const dispatch = createEventDispatcher() const flatpickrId = `${uuid()}-wrapper` let open = false @@ -41,6 +41,7 @@ time_24hr: time24hr || false, altFormat: timeOnly ? "H:i" : enableTime ? "F j Y, H:i" : "F j, Y", wrap: true, + mode: range ? "range" : null, appendTo, disableMobile: "true", onReady: () => { @@ -64,7 +65,6 @@ if (newValue) { newValue = newValue.toISOString() } - // If time only set date component to 2000-01-01 if (timeOnly) { // Classic flackpickr causing issues. @@ -95,7 +95,11 @@ .slice(0, -1) } - dispatch("change", newValue) + if (range) { + dispatch("change", event.detail) + } else { + dispatch("change", newValue) + } } const clearDateOnBackspace = event => { @@ -160,7 +164,7 @@ {#key redrawOptions} { - value = e.detail + if (range) { + // Flatpickr cant take two dates and work out what to display, needs to be provided a string. + // Like - "Date1 to Date2". Hence passing in that specifically from the array + value = e?.detail[1] + } else { + value = e.detail + } dispatch("change", e.detail) } @@ -34,6 +40,7 @@ {time24hr} {appendTo} {ignoreTimezones} + {range} on:change={onChange} /> diff --git a/packages/bbui/src/Table/CellRenderer.svelte b/packages/bbui/src/Table/CellRenderer.svelte index 246323244a..5004401d91 100644 --- a/packages/bbui/src/Table/CellRenderer.svelte +++ b/packages/bbui/src/Table/CellRenderer.svelte @@ -56,6 +56,7 @@ {schema} value={cellValue} on:clickrelationship + on:buttonclick > diff --git a/packages/bbui/src/Table/Table.svelte b/packages/bbui/src/Table/Table.svelte index 01a2ca4835..7745c3c407 100644 --- a/packages/bbui/src/Table/Table.svelte +++ b/packages/bbui/src/Table/Table.svelte @@ -387,6 +387,7 @@ schema={schema[field]} value={deepGet(row, field)} on:clickrelationship + on:buttonclick > diff --git a/packages/builder/assets/backups-default.png b/packages/builder/assets/backups-default.png new file mode 100644 index 0000000000..6e37cbb6c7 Binary files /dev/null and b/packages/builder/assets/backups-default.png differ diff --git a/packages/builder/package.json b/packages/builder/package.json index 7846e6b6b8..a1d217f0a7 100644 --- a/packages/builder/package.json +++ b/packages/builder/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/builder", - "version": "2.0.30-alpha.13", + "version": "2.0.34-alpha.3", "license": "GPL-3.0", "private": true, "scripts": { @@ -71,10 +71,10 @@ } }, "dependencies": { - "@budibase/bbui": "2.0.30-alpha.13", - "@budibase/client": "2.0.30-alpha.13", - "@budibase/frontend-core": "2.0.30-alpha.13", - "@budibase/string-templates": "2.0.30-alpha.13", + "@budibase/bbui": "2.0.34-alpha.3", + "@budibase/client": "2.0.34-alpha.3", + "@budibase/frontend-core": "2.0.34-alpha.3", + "@budibase/string-templates": "2.0.34-alpha.3", "@sentry/browser": "5.19.1", "@spectrum-css/page": "^3.0.1", "@spectrum-css/vars": "^3.0.1", diff --git a/packages/builder/src/builderStore/store/frontend.js b/packages/builder/src/builderStore/store/frontend.js index 848dd4405a..fc8b1b8427 100644 --- a/packages/builder/src/builderStore/store/frontend.js +++ b/packages/builder/src/builderStore/store/frontend.js @@ -182,7 +182,70 @@ export const getFrontendStore = () => { return state }) }, + validate: screen => { + // Recursive function to find any illegal children in component trees + const findIllegalChild = ( + component, + illegalChildren = [], + legalDirectChildren = [] + ) => { + const type = component._component + if (illegalChildren.includes(type)) { + return type + } + if ( + legalDirectChildren.length && + !legalDirectChildren.includes(type) + ) { + return type + } + if (!component?._children?.length) { + return + } + + const definition = store.actions.components.getDefinition( + component._component + ) + + // Reset whitelist for direct children + legalDirectChildren = [] + if (definition?.legalDirectChildren?.length) { + legalDirectChildren = definition.legalDirectChildren.map(x => { + return `@budibase/standard-components/${x}` + }) + } + + // Append blacklisted components and remove duplicates + if (definition?.illegalChildren?.length) { + const blacklist = definition.illegalChildren.map(x => { + return `@budibase/standard-components/${x}` + }) + illegalChildren = [...new Set([...illegalChildren, ...blacklist])] + } + + // Recurse on all children + for (let child of component._children) { + const illegalChild = findIllegalChild( + child, + illegalChildren, + legalDirectChildren + ) + if (illegalChild) { + return illegalChild + } + } + } + + // Validate the entire tree and throw an error if an illegal child is + // found anywhere + const illegalChild = findIllegalChild(screen.props) + if (illegalChild) { + const def = store.actions.components.getDefinition(illegalChild) + throw `You can't place a ${def.name} here` + } + }, save: async screen => { + store.actions.screens.validate(screen) const state = get(store) const creatingNewScreen = screen._id === undefined const savedScreen = await API.saveScreen(screen) @@ -445,7 +508,11 @@ export const getFrontendStore = () => { return { _id: Helpers.uuid(), _component: definition.component, - _styles: { normal: {}, hover: {}, active: {} }, + _styles: { + normal: {}, + hover: {}, + active: {}, + }, _instanceName: `New ${definition.friendlyName || definition.name}`, ...cloneDeep(props), ...extras, @@ -533,12 +600,11 @@ export const getFrontendStore = () => { }, patch: async (patchFn, componentId, screenId) => { // Use selected component by default - if (!componentId && !screenId) { + if (!componentId || !screenId) { const state = get(store) - componentId = state.selectedComponentId - screenId = state.selectedScreenId + componentId = componentId || state.selectedComponentId + screenId = screenId || state.selectedScreenId } - // Invalid if only a screen or component ID provided if (!componentId || !screenId || !patchFn) { return } @@ -601,16 +667,14 @@ export const getFrontendStore = () => { }) // Select the parent if cutting - if (cut) { + if (cut && selectParent) { const screen = get(selectedScreen) const parent = findComponentParent(screen?.props, component._id) if (parent) { - if (selectParent) { - store.update(state => { - state.selectedComponentId = parent._id - return state - }) - } + store.update(state => { + state.selectedComponentId = parent._id + return state + }) } } }, @@ -621,16 +685,24 @@ export const getFrontendStore = () => { } let newComponentId + // Remove copied component if cutting, regardless if pasting works + let componentToPaste = cloneDeep(state.componentToPaste) + if (componentToPaste.isCut) { + store.update(state => { + delete state.componentToPaste + return state + }) + } + // Patch screen const patch = screen => { // Get up to date ref to target targetComponent = findComponent(screen.props, targetComponent._id) if (!targetComponent) { - return + return false } - const cut = state.componentToPaste.isCut - const originalId = state.componentToPaste._id - let componentToPaste = cloneDeep(state.componentToPaste) + const cut = componentToPaste.isCut + const originalId = componentToPaste._id delete componentToPaste.isCut // Make new component unique if copying @@ -685,11 +757,8 @@ export const getFrontendStore = () => { const targetScreenId = targetScreen?._id || state.selectedScreenId await store.actions.screens.patch(patch, targetScreenId) + // Select the new component store.update(state => { - // Remove copied component if cutting - if (state.componentToPaste.isCut) { - delete state.componentToPaste - } state.selectedScreenId = targetScreenId state.selectedComponentId = newComponentId return state @@ -893,6 +962,15 @@ export const getFrontendStore = () => { } }) }, + updateStyles: async (styles, id) => { + const patchFn = component => { + component._styles.normal = { + ...component._styles.normal, + ...styles, + } + } + await store.actions.components.patch(patchFn, id) + }, updateCustomStyle: async style => { await store.actions.components.patch(component => { component._styles.custom = style diff --git a/packages/builder/src/components/backend/DataTable/Table.svelte b/packages/builder/src/components/backend/DataTable/Table.svelte index 85b271dee8..adc35a333d 100644 --- a/packages/builder/src/components/backend/DataTable/Table.svelte +++ b/packages/builder/src/components/backend/DataTable/Table.svelte @@ -8,6 +8,7 @@ import CreateEditRow from "./modals/CreateEditRow.svelte" import CreateEditUser from "./modals/CreateEditUser.svelte" import CreateEditColumn from "./modals/CreateEditColumn.svelte" + import { cloneDeep } from "lodash/fp" import { TableNames, UNEDITABLE_USER_FIELDS, @@ -110,7 +111,7 @@ } const editColumn = field => { - editableColumn = schema?.[field] + editableColumn = cloneDeep(schema?.[field]) if (editableColumn) { editColumnModal.show() } diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte index b7249ad60c..dce6df6d0d 100644 --- a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte @@ -488,7 +488,7 @@ ]} getOptionLabel={option => option.label} getOptionValue={option => option.value} - tooltip="Dynamic formula are calculated when retrieved, but cannot be filtered, + tooltip="Dynamic formula are calculated when retrieved, but cannot be filtered or sorted by, while static formula are calculated when the row is saved." /> {/if} diff --git a/packages/builder/src/components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte b/packages/builder/src/components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte index 27358df0be..1417de6dab 100644 --- a/packages/builder/src/components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte +++ b/packages/builder/src/components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte @@ -10,10 +10,14 @@ import KeyValueBuilder from "components/integration/KeyValueBuilder.svelte" import { capitalise } from "helpers" import { IntegrationTypes } from "constants/backend" + import { createValidationStore } from "helpers/validation/yup" + import { createEventDispatcher } from "svelte" export let datasource export let schema export let creating + const validation = createValidationStore() + const dispatch = createEventDispatcher() function filter([key, value]) { if (!value) { @@ -31,6 +35,17 @@ .filter(el => filter(el)) .map(([key]) => key) + // setup the validation for each required field + $: configKeys.forEach(key => { + if (schema[key].required) { + validation.addValidatorType(key, schema[key].type, schema[key].required) + } + }) + // run the validation whenever the config changes + $: validation.check(config) + // dispatch the validation result + $: dispatch("valid", $validation.valid) + let addButton function getDisplayName(key) { @@ -79,6 +94,7 @@ type={schema[configKey].type} on:change bind:value={config[configKey]} + error={$validation.errors[configKey]} /> {:else} @@ -88,6 +104,7 @@ type={schema[configKey].type} on:change bind:value={config[configKey]} + error={$validation.errors[configKey]} /> {/if} diff --git a/packages/builder/src/components/backend/DatasourceNavigator/modals/DatasourceConfigModal.svelte b/packages/builder/src/components/backend/DatasourceNavigator/modals/DatasourceConfigModal.svelte index c8a5bc96eb..edbe55178f 100644 --- a/packages/builder/src/components/backend/DatasourceNavigator/modals/DatasourceConfigModal.svelte +++ b/packages/builder/src/components/backend/DatasourceNavigator/modals/DatasourceConfigModal.svelte @@ -13,6 +13,7 @@ // kill the reference so the input isn't saved let datasource = cloneDeep(integration) let skipFetch = false + let isValid = false $: name = IntegrationNames[datasource.type] || datasource.name || datasource.type @@ -53,6 +54,7 @@ return true }} size="L" + disabled={!isValid} > (isValid = e.detail)} /> diff --git a/packages/builder/src/components/design/settings/componentSettings.js b/packages/builder/src/components/design/settings/componentSettings.js index c3b81968f4..441993fe1c 100644 --- a/packages/builder/src/components/design/settings/componentSettings.js +++ b/packages/builder/src/components/design/settings/componentSettings.js @@ -7,6 +7,7 @@ import TableSelect from "./controls/TableSelect.svelte" import ColorPicker from "./controls/ColorPicker.svelte" import { IconSelect } from "./controls/IconSelect" import FieldSelect from "./controls/FieldSelect.svelte" +import SortableFieldSelect from "./controls/SortableFieldSelect.svelte" import MultiFieldSelect from "./controls/MultiFieldSelect.svelte" import SearchFieldSelect from "./controls/SearchFieldSelect.svelte" import SchemaSelect from "./controls/SchemaSelect.svelte" @@ -41,6 +42,7 @@ const componentMap = { filter: FilterEditor, url: URLSelect, columns: ColumnEditor, + "field/sortable": SortableFieldSelect, "field/string": FormFieldSelect, "field/number": FormFieldSelect, "field/options": FormFieldSelect, diff --git a/packages/builder/src/components/design/settings/controls/SortableFieldSelect.svelte b/packages/builder/src/components/design/settings/controls/SortableFieldSelect.svelte new file mode 100644 index 0000000000..21ed68ce68 --- /dev/null +++ b/packages/builder/src/components/design/settings/controls/SortableFieldSelect.svelte @@ -0,0 +1,47 @@ + + + + + + diff --git a/packages/builder/src/components/portal/overview/backups/AppSizeRenderer.svelte b/packages/builder/src/components/portal/overview/backups/AppSizeRenderer.svelte new file mode 100644 index 0000000000..c103399f5b --- /dev/null +++ b/packages/builder/src/components/portal/overview/backups/AppSizeRenderer.svelte @@ -0,0 +1,41 @@ + + +
+ {#if automations != null && screens != null && datasources != null} +
+ +
{datasources || 0}
+
+
+ +
{screens || 0}
+
+
+ +
{automations || 0}
+
+ {/if} +
+ + diff --git a/packages/builder/src/components/portal/overview/backups/BackupsTab.svelte b/packages/builder/src/components/portal/overview/backups/BackupsTab.svelte new file mode 100644 index 0000000000..9a9dc3c5c0 --- /dev/null +++ b/packages/builder/src/components/portal/overview/backups/BackupsTab.svelte @@ -0,0 +1,345 @@ + + +
+ {#if !$licensing.backupsEnabled} + + +
+ Backups + + Pro plan + +
+
+ + Back up your apps and restore them to their previous state. + {#if !$auth.accountPortalAccess && !$licensing.groupsEnabled && $admin.cloud} + Contact your account holder to upgrade your plan. + {/if} + +
+ +
+ {#if $auth.accountPortalAccess} + + {/if} + + +
+
+
+ {:else if backupData?.length === 0 && !loaded && !filterOpt && !startDate} + +
+ BackupsDefault + + You have no backups yet +
+ You can manually backup your app any time +
+
+ +
+
+
+
+ {:else if loaded} + +