1
0
Fork 0
mirror of synced 2024-07-11 01:06:04 +12:00

Merge branch 'develop' of github.com:Budibase/budibase into side-panel

This commit is contained in:
Andrew Kingston 2022-11-09 16:57:24 +00:00
commit ce4f8ef5f9
127 changed files with 3691 additions and 694 deletions

View file

@ -84,6 +84,8 @@ spec:
value: {{ .Values.services.objectStore.appsBucketName | quote }}
- name: GLOBAL_CLOUD_BUCKET_NAME
value: {{ .Values.services.objectStore.globalBucketName | quote }}
- name: BACKUPS_BUCKET_NAME
value: {{ .Values.services.objectStore.backupsBucketName | quote }}
- name: PORT
value: {{ .Values.services.apps.port | quote }}
{{ if .Values.services.worker.publicApiRateLimitPerSecond }}
@ -156,6 +158,8 @@ spec:
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
- name: CDN_URL
value: {{ .Values.globals.cdnUrl }}
image: budibase/apps:{{ .Values.globals.appVersion }}
imagePullPolicy: Always

View file

@ -83,6 +83,8 @@ spec:
value: {{ .Values.services.objectStore.appsBucketName | quote }}
- name: GLOBAL_CLOUD_BUCKET_NAME
value: {{ .Values.services.objectStore.globalBucketName | quote }}
- name: BACKUPS_BUCKET_NAME
value: {{ .Values.services.objectStore.backupsBucketName | quote }}
- name: PORT
value: {{ .Values.services.worker.port | quote }}
- name: MULTI_TENANCY
@ -145,6 +147,8 @@ spec:
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
- name: CDN_URL
value: {{ .Values.globals.cdnUrl }}
image: budibase/worker:{{ .Values.globals.appVersion }}
imagePullPolicy: Always

View file

@ -98,6 +98,7 @@ globals:
# if createSecrets is set to false, you can hard-code your secrets here
internalApiKey: ""
jwtSecret: ""
cdnUrl: ""
smtp:
enabled: false

View file

@ -66,6 +66,15 @@ http {
proxy_set_header Connection "";
}
location /api/backups/ {
proxy_read_timeout 1800s;
proxy_connect_timeout 1800s;
proxy_send_timeout 1800s;
proxy_pass http://app-service;
proxy_http_version 1.1;
proxy_set_header Connection "";
}
location /api/ {
proxy_read_timeout 120s;
proxy_connect_timeout 120s;

View file

@ -51,11 +51,11 @@ http {
proxy_buffering off;
set $csp_default "default-src 'self'";
set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io";
set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io";
set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com";
set $csp_object "object-src 'none'";
set $csp_base_uri "base-uri 'self'";
set $csp_connect "connect-src 'self' https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com";
set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com";
set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com";
set $csp_frame "frame-src 'self' https:";
set $csp_img "img-src http: https: data: blob:";
@ -116,6 +116,15 @@ http {
rewrite ^/worker/(.*)$ /$1 break;
}
location /api/backups/ {
proxy_read_timeout 1800s;
proxy_connect_timeout 1800s;
proxy_send_timeout 1800s;
proxy_pass http://app-service;
proxy_http_version 1.1;
proxy_set_header Connection "";
}
location /api/ {
# calls to the API are rate limited with bursting
limit_req zone=ratelimit burst=20 nodelay;
@ -171,11 +180,13 @@ http {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://$minio:9000;
}

View file

@ -1,5 +1,5 @@
{
"version": "2.0.34-alpha.1",
"version": "2.1.22-alpha.0",
"npmClient": "yarn",
"packages": [
"packages/*"

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "2.0.34-alpha.1",
"version": "2.1.22-alpha.0",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@ -20,7 +20,7 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
"@budibase/types": "2.0.34-alpha.1",
"@budibase/types": "2.1.22-alpha.0",
"@shopify/jest-koa-mocks": "5.0.1",
"@techpass/passport-openidconnect": "0.3.2",
"aws-sdk": "2.1030.0",

View file

@ -24,10 +24,15 @@ import {
} from "./middleware"
import { invalidateUser } from "./cache/user"
import { User } from "@budibase/types"
import { logAlert } from "./logging"
// Strategies
passport.use(new LocalStrategy(local.options, local.authenticate))
passport.use(new JwtStrategy(jwt.options, jwt.authenticate))
if (jwt.options.secretOrKey) {
passport.use(new JwtStrategy(jwt.options, jwt.authenticate))
} else {
logAlert("No JWT Secret supplied, cannot configure JWT strategy")
}
passport.serializeUser((user: User, done: any) => done(null, user))

View file

@ -6,6 +6,7 @@ import { baseGlobalDBName } from "../db/tenancy"
import { IdentityContext } from "@budibase/types"
import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants"
import { ContextKey } from "./constants"
import PouchDB from "pouchdb"
import {
updateUsing,
closeWithUsing,
@ -22,16 +23,15 @@ export const DEFAULT_TENANT_ID = _DEFAULT_TENANT_ID
let TEST_APP_ID: string | null = null
export const closeTenancy = async () => {
let db
try {
if (env.USE_COUCH) {
db = getGlobalDB()
const db = getGlobalDB()
await closeDB(db)
}
} catch (err) {
// no DB found - skip closing
return
}
await closeDB(db)
// clear from context now that database is closed/task is finished
cls.setOnContext(ContextKey.TENANT_ID, null)
cls.setOnContext(ContextKey.GLOBAL_DB, null)

View file

@ -4,6 +4,7 @@ import * as events from "./events"
import * as migrations from "./migrations"
import * as users from "./users"
import * as roles from "./security/roles"
import * as permissions from "./security/permissions"
import * as accounts from "./cloud/accounts"
import * as installation from "./installation"
import env from "./environment"
@ -65,6 +66,7 @@ const core = {
middleware,
encryption,
queue,
permissions,
}
export = core

View file

@ -152,6 +152,7 @@ export = (
return next()
}
} catch (err: any) {
console.error("Auth Error", err?.message || err)
// invalid token, clear the cookie
if (err && err.name === "JsonWebTokenError") {
clearCookie(ctx, Cookies.Auth)

View file

@ -22,7 +22,19 @@ type ListParams = {
ContinuationToken?: string
}
type UploadParams = {
bucket: string
filename: string
path: string
type?: string
// can be undefined, we will remove it
metadata?: {
[key: string]: string | undefined
}
}
const CONTENT_TYPE_MAP: any = {
txt: "text/plain",
html: "text/html",
css: "text/css",
js: "application/javascript",
@ -149,20 +161,32 @@ export const upload = async ({
path,
type,
metadata,
}: any) => {
}: UploadParams) => {
const extension = filename.split(".").pop()
const fileBytes = fs.readFileSync(path)
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
let contentType = type
if (!contentType) {
contentType = extension
? CONTENT_TYPE_MAP[extension.toLowerCase()]
: CONTENT_TYPE_MAP.txt
}
const config: any = {
// windows file paths need to be converted to forward slashes for s3
Key: sanitizeKey(filename),
Body: fileBytes,
ContentType: type || CONTENT_TYPE_MAP[extension.toLowerCase()],
ContentType: contentType,
}
if (metadata) {
if (metadata && typeof metadata === "object") {
// remove any nullish keys from the metadata object, as these may be considered invalid
for (let key of Object.keys(metadata)) {
if (!metadata[key] || typeof metadata[key] !== "string") {
delete metadata[key]
}
}
config.Metadata = metadata
}
return objectStore.upload(config).promise()

View file

@ -1,5 +1,6 @@
const { join } = require("path")
const { tmpdir } = require("os")
const fs = require("fs")
const env = require("../environment")
/****************************************************
@ -16,6 +17,11 @@ exports.ObjectStoreBuckets = {
PLUGINS: env.PLUGIN_BUCKET_NAME,
}
exports.budibaseTempDir = function () {
return join(tmpdir(), ".budibase")
const bbTmp = join(tmpdir(), ".budibase")
if (!fs.existsSync(bbTmp)) {
fs.mkdirSync(bbTmp)
}
exports.budibaseTempDir = function () {
return bbTmp
}

View file

@ -1,4 +1,5 @@
import events from "events"
import { timeout } from "../../utils"
/**
* Bull works with a Job wrapper around all messages that contains a lot more information about
@ -27,6 +28,7 @@ class InMemoryQueue {
_opts?: any
_messages: any[]
_emitter: EventEmitter
_runCount: number
/**
* The constructor the queue, exactly the same as that of Bulls.
* @param {string} name The name of the queue which is being configured.
@ -38,6 +40,7 @@ class InMemoryQueue {
this._opts = opts
this._messages = []
this._emitter = new events.EventEmitter()
this._runCount = 0
}
/**
@ -59,6 +62,7 @@ class InMemoryQueue {
if (resp.then != null) {
await resp
}
this._runCount++
})
}
@ -122,6 +126,15 @@ class InMemoryQueue {
on() {
// do nothing
}
async waitForCompletion() {
const currentCount = this._runCount
let increased = false
do {
await timeout(50)
increased = this._runCount > currentCount
} while (!increased)
}
}
export = InMemoryQueue

View file

@ -1,21 +1,27 @@
const { cloneDeep } = require("lodash/fp")
const { BUILTIN_PERMISSION_IDS, PermissionLevels } = require("./permissions")
const {
import { BUILTIN_PERMISSION_IDS, PermissionLevels } from "./permissions"
import {
generateRoleID,
getRoleParams,
DocumentType,
SEPARATOR,
} = require("../db/utils")
const { getAppDB } = require("../context")
const { doWithDB } = require("../db")
} from "../db/utils"
import { getAppDB } from "../context"
import { doWithDB } from "../db"
import { Screen, Role as RoleDoc } from "@budibase/types"
const { cloneDeep } = require("lodash/fp")
const BUILTIN_IDS = {
export const BUILTIN_ROLE_IDS = {
ADMIN: "ADMIN",
POWER: "POWER",
BASIC: "BASIC",
PUBLIC: "PUBLIC",
}
const BUILTIN_IDS = {
...BUILTIN_ROLE_IDS,
BUILDER: "BUILDER",
}
// exclude internal roles like builder
const EXTERNAL_BUILTIN_ROLE_IDS = [
BUILTIN_IDS.ADMIN,
@ -24,19 +30,26 @@ const EXTERNAL_BUILTIN_ROLE_IDS = [
BUILTIN_IDS.PUBLIC,
]
function Role(id, name) {
this._id = id
this.name = name
}
export class Role {
_id: string
name: string
permissionId?: string
inherits?: string
Role.prototype.addPermission = function (permissionId) {
this.permissionId = permissionId
return this
}
constructor(id: string, name: string) {
this._id = id
this.name = name
}
Role.prototype.addInheritance = function (inherits) {
this.inherits = inherits
return this
addPermission(permissionId: string) {
this.permissionId = permissionId
return this
}
addInheritance(inherits: string) {
this.inherits = inherits
return this
}
}
const BUILTIN_ROLES = {
@ -57,27 +70,30 @@ const BUILTIN_ROLES = {
),
}
exports.getBuiltinRoles = () => {
export function getBuiltinRoles() {
return cloneDeep(BUILTIN_ROLES)
}
exports.BUILTIN_ROLE_ID_ARRAY = Object.values(BUILTIN_ROLES).map(
export const BUILTIN_ROLE_ID_ARRAY = Object.values(BUILTIN_ROLES).map(
role => role._id
)
exports.BUILTIN_ROLE_NAME_ARRAY = Object.values(BUILTIN_ROLES).map(
export const BUILTIN_ROLE_NAME_ARRAY = Object.values(BUILTIN_ROLES).map(
role => role.name
)
function isBuiltin(role) {
return exports.BUILTIN_ROLE_ID_ARRAY.some(builtin => role.includes(builtin))
export function isBuiltin(role?: string) {
return BUILTIN_ROLE_ID_ARRAY.some(builtin => role?.includes(builtin))
}
/**
* Works through the inheritance ranks to see how far up the builtin stack this ID is.
*/
exports.builtinRoleToNumber = id => {
const builtins = exports.getBuiltinRoles()
export function builtinRoleToNumber(id?: string) {
if (!id) {
return 0
}
const builtins = getBuiltinRoles()
const MAX = Object.values(builtins).length + 1
if (id === BUILTIN_IDS.ADMIN || id === BUILTIN_IDS.BUILDER) {
return MAX
@ -97,14 +113,14 @@ exports.builtinRoleToNumber = id => {
/**
* Converts any role to a number, but has to be async to get the roles from db.
*/
exports.roleToNumber = async id => {
if (exports.isBuiltin(id)) {
return exports.builtinRoleToNumber(id)
export async function roleToNumber(id?: string) {
if (isBuiltin(id)) {
return builtinRoleToNumber(id)
}
const hierarchy = await exports.getUserRoleHierarchy(id)
const hierarchy = (await getUserRoleHierarchy(id)) as RoleDoc[]
for (let role of hierarchy) {
if (isBuiltin(role.inherits)) {
return exports.builtinRoleToNumber(role.inherits) + 1
if (isBuiltin(role?.inherits)) {
return builtinRoleToNumber(role.inherits) + 1
}
}
return 0
@ -113,15 +129,14 @@ exports.roleToNumber = async id => {
/**
* Returns whichever builtin roleID is lower.
*/
exports.lowerBuiltinRoleID = (roleId1, roleId2) => {
export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string) {
if (!roleId1) {
return roleId2
}
if (!roleId2) {
return roleId1
}
return exports.builtinRoleToNumber(roleId1) >
exports.builtinRoleToNumber(roleId2)
return builtinRoleToNumber(roleId1) > builtinRoleToNumber(roleId2)
? roleId2
: roleId1
}
@ -132,11 +147,11 @@ exports.lowerBuiltinRoleID = (roleId1, roleId2) => {
* @param {string|null} roleId The level ID to lookup.
* @returns {Promise<Role|object|null>} The role object, which may contain an "inherits" property.
*/
exports.getRole = async roleId => {
export async function getRole(roleId?: string) {
if (!roleId) {
return null
}
let role = {}
let role: any = {}
// built in roles mostly come from the in-code implementation,
// but can be extended by a doc stored about them (e.g. permissions)
if (isBuiltin(roleId)) {
@ -146,10 +161,10 @@ exports.getRole = async roleId => {
}
try {
const db = getAppDB()
const dbRole = await db.get(exports.getDBRoleID(roleId))
const dbRole = await db.get(getDBRoleID(roleId))
role = Object.assign(role, dbRole)
// finalise the ID
role._id = exports.getExternalRoleID(role._id)
role._id = getExternalRoleID(role._id)
} catch (err) {
// only throw an error if there is no role at all
if (Object.keys(role).length === 0) {
@ -162,12 +177,12 @@ exports.getRole = async roleId => {
/**
* Simple function to get all the roles based on the top level user role ID.
*/
async function getAllUserRoles(userRoleId) {
async function getAllUserRoles(userRoleId?: string): Promise<RoleDoc[]> {
// admins have access to all roles
if (userRoleId === BUILTIN_IDS.ADMIN) {
return exports.getAllRoles()
return getAllRoles()
}
let currentRole = await exports.getRole(userRoleId)
let currentRole = await getRole(userRoleId)
let roles = currentRole ? [currentRole] : []
let roleIds = [userRoleId]
// get all the inherited roles
@ -177,7 +192,7 @@ async function getAllUserRoles(userRoleId) {
roleIds.indexOf(currentRole.inherits) === -1
) {
roleIds.push(currentRole.inherits)
currentRole = await exports.getRole(currentRole.inherits)
currentRole = await getRole(currentRole.inherits)
roles.push(currentRole)
}
return roles
@ -191,7 +206,10 @@ async function getAllUserRoles(userRoleId) {
* @returns {Promise<string[]|object[]>} returns an ordered array of the roles, with the first being their
* highest level of access and the last being the lowest level.
*/
exports.getUserRoleHierarchy = async (userRoleId, opts = { idOnly: true }) => {
export async function getUserRoleHierarchy(
userRoleId?: string,
opts = { idOnly: true }
) {
// special case, if they don't have a role then they are a public user
const roles = await getAllUserRoles(userRoleId)
return opts.idOnly ? roles.map(role => role._id) : roles
@ -200,9 +218,12 @@ exports.getUserRoleHierarchy = async (userRoleId, opts = { idOnly: true }) => {
// this function checks that the provided permissions are in an array format
// some templates/older apps will use a simple string instead of array for roles
// convert the string to an array using the theory that write is higher than read
exports.checkForRoleResourceArray = (rolePerms, resourceId) => {
export function checkForRoleResourceArray(
rolePerms: { [key: string]: string[] },
resourceId: string
) {
if (rolePerms && !Array.isArray(rolePerms[resourceId])) {
const permLevel = rolePerms[resourceId]
const permLevel = rolePerms[resourceId] as any
rolePerms[resourceId] = [permLevel]
if (permLevel === PermissionLevels.WRITE) {
rolePerms[resourceId].push(PermissionLevels.READ)
@ -215,7 +236,7 @@ exports.checkForRoleResourceArray = (rolePerms, resourceId) => {
* Given an app ID this will retrieve all of the roles that are currently within that app.
* @return {Promise<object[]>} An array of the role objects that were found.
*/
exports.getAllRoles = async appId => {
export async function getAllRoles(appId?: string) {
if (appId) {
return doWithDB(appId, internal)
} else {
@ -227,30 +248,30 @@ exports.getAllRoles = async appId => {
}
return internal(appDB)
}
async function internal(db) {
let roles = []
async function internal(db: any) {
let roles: RoleDoc[] = []
if (db) {
const body = await db.allDocs(
getRoleParams(null, {
include_docs: true,
})
)
roles = body.rows.map(row => row.doc)
roles = body.rows.map((row: any) => row.doc)
}
const builtinRoles = exports.getBuiltinRoles()
const builtinRoles = getBuiltinRoles()
// need to combine builtin with any DB record of them (for sake of permissions)
for (let builtinRoleId of EXTERNAL_BUILTIN_ROLE_IDS) {
const builtinRole = builtinRoles[builtinRoleId]
const dbBuiltin = roles.filter(
dbRole => exports.getExternalRoleID(dbRole._id) === builtinRoleId
dbRole => getExternalRoleID(dbRole._id) === builtinRoleId
)[0]
if (dbBuiltin == null) {
roles.push(builtinRole || builtinRoles.BASIC)
} else {
// remove role and all back after combining with the builtin
roles = roles.filter(role => role._id !== dbBuiltin._id)
dbBuiltin._id = exports.getExternalRoleID(dbBuiltin._id)
dbBuiltin._id = getExternalRoleID(dbBuiltin._id)
roles.push(Object.assign(builtinRole, dbBuiltin))
}
}
@ -260,7 +281,7 @@ exports.getAllRoles = async appId => {
continue
}
for (let resourceId of Object.keys(role.permissions)) {
role.permissions = exports.checkForRoleResourceArray(
role.permissions = checkForRoleResourceArray(
role.permissions,
resourceId
)
@ -277,11 +298,11 @@ exports.getAllRoles = async appId => {
* @param subResourceId The sub resource being requested
* @return {Promise<{permissions}|Object>} returns the permissions required to access.
*/
exports.getRequiredResourceRole = async (
permLevel,
{ resourceId, subResourceId }
) => {
const roles = await exports.getAllRoles()
export async function getRequiredResourceRole(
permLevel: string,
{ resourceId, subResourceId }: { resourceId?: string; subResourceId?: string }
) {
const roles = await getAllRoles()
let main = [],
sub = []
for (let role of roles) {
@ -289,8 +310,8 @@ exports.getRequiredResourceRole = async (
if (!role.permissions) {
continue
}
const mainRes = role.permissions[resourceId]
const subRes = role.permissions[subResourceId]
const mainRes = resourceId ? role.permissions[resourceId] : undefined
const subRes = subResourceId ? role.permissions[subResourceId] : undefined
if (mainRes && mainRes.indexOf(permLevel) !== -1) {
main.push(role._id)
} else if (subRes && subRes.indexOf(permLevel) !== -1) {
@ -301,12 +322,13 @@ exports.getRequiredResourceRole = async (
return main.concat(sub)
}
class AccessController {
export class AccessController {
userHierarchies: { [key: string]: string[] }
constructor() {
this.userHierarchies = {}
}
async hasAccess(tryingRoleId, userRoleId) {
async hasAccess(tryingRoleId?: string, userRoleId?: string) {
// special cases, the screen has no role, the roles are the same or the user
// is currently in the builder
if (
@ -318,16 +340,18 @@ class AccessController {
) {
return true
}
let roleIds = this.userHierarchies[userRoleId]
if (!roleIds) {
roleIds = await exports.getUserRoleHierarchy(userRoleId)
let roleIds = userRoleId ? this.userHierarchies[userRoleId] : null
if (!roleIds && userRoleId) {
roleIds = (await getUserRoleHierarchy(userRoleId, {
idOnly: true,
})) as string[]
this.userHierarchies[userRoleId] = roleIds
}
return roleIds.indexOf(tryingRoleId) !== -1
return roleIds?.indexOf(tryingRoleId) !== -1
}
async checkScreensAccess(screens, userRoleId) {
async checkScreensAccess(screens: Screen[], userRoleId: string) {
let accessibleScreens = []
// don't want to handle this with Promise.all as this would mean all custom roles would be
// retrieved at same time, it is likely a custom role will be re-used and therefore want
@ -341,8 +365,8 @@ class AccessController {
return accessibleScreens
}
async checkScreenAccess(screen, userRoleId) {
const roleId = screen && screen.routing ? screen.routing.roleId : null
async checkScreenAccess(screen: Screen, userRoleId: string) {
const roleId = screen && screen.routing ? screen.routing.roleId : undefined
if (await this.hasAccess(roleId, userRoleId)) {
return screen
}
@ -353,8 +377,8 @@ class AccessController {
/**
* Adds the "role_" for builtin role IDs which are to be written to the DB (for permissions).
*/
exports.getDBRoleID = roleId => {
if (roleId.startsWith(DocumentType.ROLE)) {
export function getDBRoleID(roleId?: string) {
if (roleId?.startsWith(DocumentType.ROLE)) {
return roleId
}
return generateRoleID(roleId)
@ -363,15 +387,10 @@ exports.getDBRoleID = roleId => {
/**
* Remove the "role_" from builtin role IDs that have been written to the DB (for permissions).
*/
exports.getExternalRoleID = roleId => {
// for built in roles we want to remove the DB role ID element (role_)
if (roleId.startsWith(DocumentType.ROLE) && isBuiltin(roleId)) {
export function getExternalRoleID(roleId?: string) {
// for built-in roles we want to remove the DB role ID element (role_)
if (roleId?.startsWith(DocumentType.ROLE) && isBuiltin(roleId)) {
return roleId.split(`${DocumentType.ROLE}${SEPARATOR}`)[1]
}
return roleId
}
exports.AccessController = AccessController
exports.BUILTIN_ROLE_IDS = BUILTIN_IDS
exports.isBuiltin = isBuiltin
exports.Role = Role

View file

@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
"version": "2.0.34-alpha.1",
"version": "2.1.22-alpha.0",
"license": "MPL-2.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
@ -38,7 +38,7 @@
],
"dependencies": {
"@adobe/spectrum-css-workflow-icons": "^1.2.1",
"@budibase/string-templates": "2.0.34-alpha.1",
"@budibase/string-templates": "2.1.22-alpha.0",
"@spectrum-css/actionbutton": "^1.0.1",
"@spectrum-css/actiongroup": "^1.0.1",
"@spectrum-css/avatar": "^3.0.2",

View file

@ -139,7 +139,7 @@
{#if open}
<div
use:clickOutside={handleOutsideClick}
transition:fly={{ y: -20, duration: 200 }}
transition:fly|local={{ y: -20, duration: 200 }}
class="spectrum-Popover spectrum-Popover--bottom spectrum-Picker-popover is-open"
class:spectrum-Popover--align-right={alignRight}
>

View file

@ -41,7 +41,7 @@
time_24hr: time24hr || false,
altFormat: timeOnly ? "H:i" : enableTime ? "F j Y, H:i" : "F j, Y",
wrap: true,
mode: range ? "range" : null,
mode: range ? "range" : "single",
appendTo,
disableMobile: "true",
onReady: () => {
@ -66,7 +66,7 @@
newValue = newValue.toISOString()
}
// If time only set date component to 2000-01-01
else if (timeOnly) {
if (timeOnly) {
// Classic flackpickr causing issues.
// When selecting a value for the first time for a "time only" field,
// the time is always offset by 1 hour for some reason (regardless of time

View file

@ -43,6 +43,7 @@
let selectedImageIdx = 0
let fileDragged = false
let selectedUrl
let fileInput
$: selectedImage = value?.[selectedImageIdx] ?? null
$: fileCount = value?.length ?? 0
$: isImage =
@ -102,6 +103,7 @@
await deleteAttachments(
value.filter((x, idx) => idx === selectedImageIdx).map(item => item.key)
)
fileInput.value = ""
}
selectedImageIdx = 0
}
@ -234,6 +236,7 @@
type="file"
multiple
accept={extensions}
bind:this={fileInput}
on:change={handleFile}
/>
<svg

View file

@ -102,6 +102,13 @@
}
return value
}
const handleOutsideClick = event => {
if (open) {
event.stopPropagation()
open = false
}
}
</script>
<div
@ -151,7 +158,7 @@
{disabled}
class:is-open={open}
aria-haspopup="listbox"
on:mousedown={onClick}
on:click={onClick}
>
<span class="spectrum-Picker-label">
<div>
@ -168,7 +175,7 @@
</button>
{#if open}
<div
use:clickOutside={() => (open = false)}
use:clickOutside={handleOutsideClick}
transition:fly|local={{ y: -20, duration: 200 }}
class="spectrum-Popover spectrum-Popover--bottom spectrum-Picker-popover is-open"
>

View file

@ -16,8 +16,10 @@
export let autoWidth = false
const dispatch = createEventDispatcher()
$: selectedLookupMap = getSelectedLookupMap(value)
$: optionLookupMap = getOptionLookupMap(options)
$: fieldText = getFieldText(value, optionLookupMap, placeholder)
$: isOptionSelected = optionValue => selectedLookupMap[optionValue] === true
$: toggleOption = makeToggleOption(selectedLookupMap, value)

View file

@ -61,6 +61,7 @@
const onPickPrimary = newValue => {
dispatch("pickprimary", newValue)
primaryOpen = false
dispatch("closed")
}
const onClearPrimary = () => {
@ -92,6 +93,7 @@
if (primaryOpen) {
event.stopPropagation()
primaryOpen = false
dispatch("closed")
}
}

View file

@ -128,5 +128,6 @@
on:blur
on:focus
on:keyup
on:closed
/>
</Field>

File diff suppressed because it is too large Load diff

View file

@ -2,7 +2,7 @@ import filterTests from "../support/filterTests"
const interact = require('../support/interact')
filterTests(['all'], () => {
context("Add Multi-Option Datatype", () => {
xcontext("Add Multi-Option Datatype", () => {
before(() => {
cy.login()
cy.createTestApp()

View file

@ -2,7 +2,7 @@ import filterTests from "../support/filterTests"
const interact = require('../support/interact')
filterTests(['all'], () => {
context("Add Radio Buttons", () => {
xcontext("Add Radio Buttons", () => {
before(() => {
cy.login()
cy.createTestApp()

View file

@ -1,7 +1,7 @@
import filterTests from "../../support/filterTests"
filterTests(["all"], () => {
context("MySQL Datasource Testing", () => {
xcontext("MySQL Datasource Testing", () => {
if (Cypress.env("TEST_ENV")) {
before(() => {
cy.login()

View file

@ -2,7 +2,7 @@ import filterTests from "../support/filterTests"
const interact = require("../support/interact")
filterTests(["all"], () => {
context("Rename an App", () => {
xcontext("Rename an App", () => {
beforeEach(() => {
cy.login()
cy.createTestApp()

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/builder",
"version": "2.0.34-alpha.1",
"version": "2.1.22-alpha.0",
"license": "GPL-3.0",
"private": true,
"scripts": {
@ -71,10 +71,10 @@
}
},
"dependencies": {
"@budibase/bbui": "2.0.34-alpha.1",
"@budibase/client": "2.0.34-alpha.1",
"@budibase/frontend-core": "2.0.34-alpha.1",
"@budibase/string-templates": "2.0.34-alpha.1",
"@budibase/bbui": "2.1.22-alpha.0",
"@budibase/client": "2.1.22-alpha.0",
"@budibase/frontend-core": "2.1.22-alpha.0",
"@budibase/string-templates": "2.1.22-alpha.0",
"@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1",

View file

@ -245,7 +245,10 @@ export const getFrontendStore = () => {
}
},
save: async screen => {
store.actions.screens.validate(screen)
/*
Temporarily disabled to accomodate migration issues.
store.actions.screens.validate(screen)
*/
const state = get(store)
const creatingNewScreen = screen._id === undefined
const savedScreen = await API.saveScreen(screen)

View file

@ -7,7 +7,7 @@
export let webhookModal
</script>
<div class="title">
<div class="nav">
<Tabs selected="Automations">
<Tab title="Automations">
<AutomationList />
@ -27,12 +27,15 @@
top: var(--spacing-l);
right: var(--spacing-xl);
}
.title {
.nav {
overflow-y: auto;
background: var(--background);
display: flex;
flex-direction: column;
justify-content: flex-start;
align-items: stretch;
position: relative;
border-right: var(--border-light);
padding-bottom: 60px;
}
</style>

View file

@ -120,7 +120,7 @@
allSteps[idx]?.stepId === ActionStepID.LOOP &&
allSteps.find(x => x.blockToLoop === block.id)
// If the previous block was a loop block, decerement the index so the following
// If the previous block was a loop block, decrement the index so the following
// steps are in the correct order
if (wasLoopBlock) {
loopBlockCount++

View file

@ -14,6 +14,12 @@
export let block
export let isTestModal
$: parsedBindings = bindings.map(binding => {
let clone = Object.assign({}, binding)
clone.icon = "ShareAndroid"
return clone
})
let table
let schemaFields
@ -79,6 +85,10 @@
return [value]
}
if (type === "json") {
return value.value
}
return value
}
@ -109,7 +119,7 @@
{isTestModal}
{field}
{schema}
{bindings}
bindings={parsedBindings}
{value}
{onChange}
/>
@ -124,7 +134,7 @@
on:change={e => onChange(e, field, schema.type)}
label={field}
type="string"
{bindings}
bindings={parsedBindings}
fillWidth={true}
allowJS={true}
updateOnChange={false}

View file

@ -5,11 +5,13 @@
DatePicker,
Multiselect,
TextArea,
Label,
} from "@budibase/bbui"
import LinkedRowSelector from "components/common/LinkedRowSelector.svelte"
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
import ModalBindableInput from "../../common/bindings/ModalBindableInput.svelte"
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
import Editor from "components/integration/QueryEditor.svelte"
export let onChange
export let field
@ -18,6 +20,12 @@
export let bindings
export let isTestModal
$: parsedBindings = bindings.map(binding => {
let clone = Object.assign({}, binding)
clone.icon = "ShareAndroid"
return clone
})
function schemaHasOptions(schema) {
return !!schema.constraints?.inclusion?.length
}
@ -50,6 +58,20 @@
/>
{:else if schema.type === "longform"}
<TextArea label={field} bind:value={value[field]} />
{:else if schema.type === "json"}
<span>
<Label>{field}</Label>
<Editor
editorHeight="150"
mode="json"
on:change={e => {
if (e.detail?.value !== value[field]) {
onChange(e, field, schema.type)
}
}}
value={value[field]}
/>
</span>
{:else if schema.type === "link"}
<LinkedRowSelector bind:linkedRows={value[field]} {schema} />
{:else if schema.type === "string" || schema.type === "number"}
@ -60,7 +82,7 @@
on:change={e => onChange(e, field)}
label={field}
type="string"
{bindings}
bindings={parsedBindings}
fillWidth={true}
allowJS={true}
updateOnChange={false}

View file

@ -70,7 +70,11 @@
.map(([key, error]) => ({ dataPath: key, message: error }))
.flat()
}
} else if (error.status === 400) {
} else if (error.status === 400 && response?.validationErrors) {
errors = Object.keys(response.validationErrors).map(field => ({
message: `${field} ${response.validationErrors[field][0]}`,
}))
} else {
errors = [{ message: response?.message || "Unknown error" }]
}
} else {

View file

@ -44,7 +44,10 @@
// run the validation whenever the config changes
$: validation.check(config)
// dispatch the validation result
$: dispatch("valid", $validation.valid)
$: dispatch(
"valid",
Object.values($validation.errors).filter(val => val != null).length === 0
)
let addButton

View file

@ -247,7 +247,7 @@
return
}
hoverTarget = {
title: binding.display?.name || binding.fieldSchema.name,
title: binding.display?.name || binding.fieldSchema?.name,
description: binding.description,
}
popover.show()

View file

@ -305,6 +305,9 @@
getOptionLabel={x => x}
getOptionValue={x => x}
value={rule.value}
on:change={e => {
rule.value = e.detail
}}
/>
{:else if rule.type === "boolean"}
<Select

View file

@ -132,7 +132,20 @@
config={integrationInfo.extra}
/>
{/if}
<BindingBuilder bind:queryBindings={query.parameters} bindable={false} />
{#key query.parameters}
<BindingBuilder
queryBindings={query.parameters}
bindable={false}
on:change={e => {
query.parameters = e.detail.map(binding => {
return {
name: binding.name,
default: binding.value,
}
})
}}
/>
{/key}
{/if}
</div>
{#if shouldShowQueryConfig}

View file

@ -44,14 +44,7 @@
valuePlaceholder="Default"
bindings={[...userBindings]}
bindingDrawerLeft="260px"
on:change={e => {
queryBindings = e.detail.map(binding => {
return {
name: binding.name,
default: binding.value,
}
})
}}
on:change
/>
</div>
</Layout>

View file

@ -10,7 +10,7 @@
} from "@budibase/bbui"
import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import CreateRestoreModal from "./CreateRestoreModal.svelte"
import { createEventDispatcher } from "svelte"
import { createEventDispatcher, onMount } from "svelte"
export let row
@ -49,6 +49,10 @@
async function downloadExport() {
window.open(`/api/apps/${row.appId}/backups/${row._id}/file`, "_blank")
}
onMount(() => {
name = row.name
})
</script>
<div class="cell">
@ -62,7 +66,7 @@
<MenuItem on:click={deleteDialog.show} icon="Delete">Delete</MenuItem>
<MenuItem on:click={downloadExport} icon="Download">Download</MenuItem>
{/if}
<MenuItem on:click={updateDialog.show} icon="Edit">Update</MenuItem>
<MenuItem on:click={updateDialog.show} icon="Edit">Rename</MenuItem>
</ActionMenu>
</div>
@ -77,7 +81,7 @@
title="Confirm Deletion"
>
Are you sure you wish to delete the backup
<i>{row.name}</i>
<i>{row.name}?</i>
This action cannot be undone.
</ConfirmDialog>
@ -100,7 +104,7 @@
title="Update Backup"
warning={false}
>
<Input onlabel="Backup name" placeholder={row.name} bind:value={name} />
<Input onlabel="Backup name" bind:value={name} />
</ConfirmDialog>
<style>

View file

@ -21,13 +21,14 @@
import AppSizeRenderer from "./AppSizeRenderer.svelte"
import CreateBackupModal from "./CreateBackupModal.svelte"
import ActionsRenderer from "./ActionsRenderer.svelte"
import DateRenderer from "./DateRenderer.svelte"
import DateRenderer from "components/common/renderers/DateTimeRenderer.svelte"
import UserRenderer from "./UserRenderer.svelte"
import StatusRenderer from "./StatusRenderer.svelte"
import TypeRenderer from "./TypeRenderer.svelte"
import NameRenderer from "./NameRenderer.svelte"
import BackupsDefault from "assets/backups-default.png"
import { BackupTrigger, BackupType } from "constants/backend/backups"
import { onMount } from "svelte"
export let app
let backupData = null
@ -36,50 +37,61 @@
let filterOpt = null
let startDate = null
let endDate = null
let filters = getFilters()
let loaded = false
let filters = [
{
label: "Manual backup",
value: { type: BackupType.BACKUP, trigger: BackupTrigger.MANUAL },
},
{
label: "Published backup",
value: { type: BackupType.BACKUP, trigger: BackupTrigger.PUBLISH },
},
{
label: "Scheduled backup",
value: { type: BackupType.BACKUP, trigger: BackupTrigger.SCHEDULED },
},
{
label: "Pre-restore backup",
value: { type: BackupType.BACKUP, trigger: BackupTrigger.RESTORING },
},
{
label: "Manual restore",
value: { type: BackupType.RESTORE, trigger: BackupTrigger.MANUAL },
},
]
$: page = $pageInfo.page
$: fetchBackups(filterOpt, page, startDate, endDate)
function getFilters() {
const options = []
let types = ["backup"]
let triggers = ["manual", "publish", "scheduled", "restoring"]
for (let type of types) {
for (let trigger of triggers) {
let label = `${trigger} ${type}`
label = label.charAt(0).toUpperCase() + label?.slice(1)
options.push({ label, value: { type, trigger } })
}
}
options.push({
label: `Manual restore`,
value: { type: "restore", trigger: "manual" },
})
return options
}
const schema = {
let schema = {
type: {
displayName: "Type",
width: "auto",
},
createdAt: {
displayName: "Date",
width: "auto",
},
name: {
displayName: "Name",
width: "auto",
},
appSize: {
displayName: "App size",
width: "auto",
},
createdBy: {
displayName: "User",
width: "auto",
},
status: {
displayName: "Status",
width: "auto",
},
actions: {
displayName: null,
width: "5%",
},
}
@ -90,6 +102,7 @@
{ column: "createdBy", component: UserRenderer },
{ column: "status", component: StatusRenderer },
{ column: "type", component: TypeRenderer },
{ column: "name", component: NameRenderer },
]
function flattenBackups(backups) {
@ -154,6 +167,7 @@
onMount(() => {
fetchBackups(filterOpt, page, startDate, endDate)
loaded = true
})
</script>
@ -169,7 +183,7 @@
</div>
<div>
<Body>
Backup your apps and restore them to their previous state.
Back up your apps and restore them to their previous state.
{#if !$auth.accountPortalAccess && !$licensing.groupsEnabled && $admin.cloud}
Contact your account holder to upgrade your plan.
{/if}
@ -195,12 +209,32 @@
window.open("https://budibase.com/pricing/", "_blank")
}}
>
View Plans
View plans
</Button>
</div>
</Layout>
</Page>
{:else if backupData?.length > 0}
{:else if backupData?.length === 0 && !loaded && !filterOpt && !startDate}
<Page wide={false}>
<div class="align">
<img
width="220px"
height="130px"
src={BackupsDefault}
alt="BackupsDefault"
/>
<Layout gap="S">
<Heading>You have no backups yet</Heading>
<div class="opacity">
<Body size="S">You can manually backup your app any time</Body>
</div>
<div class="padding">
<Button on:click={modal.show} cta>Create Backup</Button>
</div>
</Layout>
</div>
</Page>
{:else if loaded}
<Layout noPadding gap="M" alignContent="start">
<div class="search">
<div class="select">
@ -232,9 +266,10 @@
>
</div>
</div>
<div>
<div class="table">
<Table
{schema}
disableSorting
allowSelectRows={false}
allowEditColumns={false}
allowEditRows={false}
@ -255,26 +290,6 @@
</div>
</div>
</Layout>
{:else if backupData?.length === 0}
<Page wide={false}>
<div class="align">
<img
width="200px"
height="120px"
src={BackupsDefault}
alt="BackupsDefault"
/>
<Layout gap="S">
<Heading>You have no backups yet</Heading>
<div class="opacity">
<Body size="S">You can manually backup your app any time</Body>
</div>
<div class="padding">
<Button on:click={modal.show} cta>Create Backup</Button>
</div>
</Layout>
</div>
</Page>
{/if}
</div>
@ -299,7 +314,7 @@
}
.select {
flex-basis: 150px;
flex-basis: 100px;
}
.pagination {
@ -333,4 +348,8 @@
display: flex;
gap: var(--spacing-m);
}
.table {
overflow-x: scroll;
}
</style>

View file

@ -13,6 +13,7 @@
<ModalContent
onConfirm={() => createManualBackup(name)}
title="Create new backup"
diabled={!name}
confirmText="Create"
><Input label="Backup name" bind:value={name} /></ModalContent
>

View file

@ -12,7 +12,7 @@
<ModalContent
onConfirm={() => confirm(name)}
title="Backup your current version"
title="Back up your current version"
confirmText="Confirm Restore"
disabled={!name}
>

View file

@ -1,22 +0,0 @@
<script>
import DateTimeRenderer from "components/common/renderers/DateTimeRenderer.svelte"
import dayjs from "dayjs"
import relativeTime from "dayjs/plugin/relativeTime"
dayjs.extend(relativeTime)
export let value
$: timeSince = dayjs(value).fromNow()
</script>
<div class="cell">
{timeSince} - <DateTimeRenderer {value} />
</div>
<style>
.cell {
display: flex;
flex-direction: row;
gap: var(--spacing-m);
align-items: center;
}
</style>

View file

@ -0,0 +1,8 @@
<script>
import { truncate } from "lodash"
export let value
$: truncatedValue = truncate(value, { length: 12 })
</script>
{truncatedValue}

View file

@ -1,13 +1,29 @@
<script>
import { BackupTrigger } from "constants/backend/backups"
export let row
$: baseTrig = row?.trigger || "manual"
$: trigger = row?.trigger || "manual"
$: type = row?.type || "backup"
$: trigger = baseTrig.charAt(0).toUpperCase() + baseTrig.slice(1)
function printTrigger(trig) {
let final = "undefined"
switch (trig) {
case BackupTrigger.PUBLISH:
final = "published"
break
case BackupTrigger.RESTORING:
final = "pre-restore"
break
default:
final = trig
break
}
return final.charAt(0).toUpperCase() + final.slice(1)
}
</script>
<div class="cell">
{trigger}
{printTrigger(trigger)}
{type}
</div>

View file

@ -0,0 +1,11 @@
export const BackupTrigger = {
MANUAL: "manual",
PUBLISH: "publish",
RESTORING: "restoring",
SCHEDULED: "scheduled",
}
export const BackupType = {
BACKUP: "backup",
RESTORE: "restore",
}

View file

@ -28,15 +28,13 @@ export const createValidationStore = () => {
let propertyValidator
switch (type) {
case "number":
propertyValidator = number().transform(value =>
isNaN(value) ? undefined : value
)
propertyValidator = number().nullable()
break
case "email":
propertyValidator = string().email()
propertyValidator = string().email().nullable()
break
default:
propertyValidator = string()
propertyValidator = string().nullable()
}
if (required) {

View file

@ -383,10 +383,5 @@
.user-dropdown {
flex: 0 1 0;
}
/* Reduce BBUI page padding */
.content :global(> *) {
padding: calc(var(--spacing-xl) * 1.5) !important;
}
}
</style>

View file

@ -5,9 +5,16 @@
const dispatch = createEventDispatcher()
let filter = null
$: filteredGroups = !filter
? $groups
: $groups.filter(group =>
group.name?.toLowerCase().includes(filter.toLowerCase())
)
$: optionSections = {
groups: {
data: $groups,
data: filteredGroups,
getLabel: group => group.name,
getValue: group => group._id,
getIcon: group => group.icon,
@ -15,21 +22,28 @@
},
}
$: appData = [{ id: "", role: "" }]
$: onChange = selected => {
const { detail } = selected
if (!detail) return
if (!detail || Object.keys(detail).length == 0) {
dispatch("change", null)
return
}
const groupSelected = $groups.find(x => x._id === detail)
const appIds = groupSelected?.apps || null
dispatch("change", appIds)
const appRoleIds = groupSelected?.roles
? Object.keys(groupSelected?.roles)
: []
dispatch("change", appRoleIds)
}
</script>
<PickerDropdown
autocomplete
bind:searchTerm={filter}
primaryOptions={optionSections}
placeholder={"Filter by access"}
on:pickprimary={onChange}
on:closed={() => {
filter = null
}}
/>

View file

@ -20,7 +20,14 @@
import { store, automationStore } from "builderStore"
import { API } from "api"
import { onMount } from "svelte"
import { apps, auth, admin, templates, licensing } from "stores/portal"
import {
apps,
auth,
admin,
templates,
licensing,
groups,
} from "stores/portal"
import { goto } from "@roxi/routify"
import AppRow from "components/start/AppRow.svelte"
import { AppStatus } from "constants"
@ -59,10 +66,15 @@
$: enrichedApps = enrichApps($apps, $auth.user, sortBy)
$: filteredApps = enrichedApps.filter(
app =>
app?.name?.toLowerCase().includes(searchTerm.toLowerCase()) &&
(accessFilterList !== null ? accessFilterList.includes(app?.appId) : true)
(searchTerm
? app?.name?.toLowerCase().includes(searchTerm.toLowerCase())
: true) &&
(accessFilterList !== null
? accessFilterList?.includes(
`${app?.type}_${app?.tenantId}_${app?.appId}`
)
: true)
)
$: lockedApps = filteredApps.filter(app => app?.lockedYou || app?.lockedOther)
$: unlocked = lockedApps?.length === 0
$: automationErrors = getAutomationErrors(enrichedApps)
@ -155,11 +167,13 @@
const autoCreateApp = async () => {
try {
// Auto name app if has same name
let appName = template.key
const templateKey = template.key.split("/")[1]
let appName = templateKey.replace(/-/g, " ")
const appsWithSameName = $apps.filter(app =>
app.name?.startsWith(appName)
)
appName = `${appName}-${appsWithSameName.length + 1}`
appName = `${appName} ${appsWithSameName.length + 1}`
// Create form data to create app
let data = new FormData()
@ -231,6 +245,10 @@
// always load latest
await licensing.init()
if ($licensing.groupsEnabled) {
await groups.actions.init()
}
if ($templates?.length === 0) {
notifications.error(
"There was a problem loading quick start templates."

View file

@ -391,11 +391,7 @@
gap: var(--spacing-l);
}
}
@media (max-width: 640px) {
.overview-wrap :global(.content > *) {
padding: calc(var(--spacing-xl) * 1.5) !important;
}
}
.app-title {
display: flex;
gap: var(--spacing-m);

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/cli",
"version": "2.0.34-alpha.1",
"version": "2.1.22-alpha.0",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js",
"bin": {
@ -26,9 +26,9 @@
"outputPath": "build"
},
"dependencies": {
"@budibase/backend-core": "2.0.34-alpha.1",
"@budibase/string-templates": "2.0.34-alpha.1",
"@budibase/types": "2.0.34-alpha.1",
"@budibase/backend-core": "2.1.22-alpha.0",
"@budibase/string-templates": "2.1.22-alpha.0",
"@budibase/types": "2.1.22-alpha.0",
"axios": "0.21.2",
"chalk": "4.1.0",
"cli-progress": "3.11.2",

View file

@ -4,7 +4,7 @@ const fs = require("fs")
const { join } = require("path")
const { getAllDbs } = require("../core/db")
const tar = require("tar")
const { progressBar } = require("../utils")
const { progressBar, httpCall } = require("../utils")
const {
TEMP_DIR,
COUCH_DIR,
@ -86,6 +86,15 @@ async function importBackup(opts) {
bar.stop()
console.log("MinIO Import")
await importObjects()
// finish by letting the system know that a restore has occurred
try {
await httpCall(
`http://localhost:${config.MAIN_PORT}/api/system/restored`,
"POST"
)
} catch (err) {
// ignore error - it will be an older system
}
console.log("Import complete")
fs.rmSync(TEMP_DIR, { recursive: true })
}

View file

@ -16,16 +16,21 @@ exports.exportObjects = async () => {
const path = join(TEMP_DIR, MINIO_DIR)
fs.mkdirSync(path)
let fullList = []
let errorCount = 0
for (let bucket of bucketList) {
const client = ObjectStore(bucket)
try {
await client.headBucket().promise()
} catch (err) {
errorCount++
continue
}
const list = await client.listObjectsV2().promise()
fullList = fullList.concat(list.Contents.map(el => ({ ...el, bucket })))
}
if (errorCount === bucketList.length) {
throw new Error("Unable to access MinIO/S3 - check environment config.")
}
const bar = progressBar(fullList.length)
let count = 0
for (let object of fullList) {

View file

@ -2,17 +2,19 @@ const dotenv = require("dotenv")
const fs = require("fs")
const { string } = require("../questions")
const { getPouch } = require("../core/db")
const { env: environment } = require("@budibase/backend-core")
exports.DEFAULT_COUCH = "http://budibase:budibase@localhost:10000/db/"
exports.DEFAULT_MINIO = "http://localhost:10000/"
exports.TEMP_DIR = ".temp"
exports.COUCH_DIR = "couchdb"
exports.MINIO_DIR = "minio"
const REQUIRED = [
{ value: "MAIN_PORT", default: "10000" },
{ value: "COUCH_DB_URL", default: exports.DEFAULT_COUCH },
{ value: "MINIO_URL", default: exports.DEFAULT_MINIO },
{
value: "COUCH_DB_URL",
default: "http://budibase:budibase@localhost:10000/db/",
},
{ value: "MINIO_URL", default: "http://localhost:10000" },
{ value: "MINIO_ACCESS_KEY" },
{ value: "MINIO_SECRET_KEY" },
]
@ -27,7 +29,7 @@ exports.checkURLs = config => {
] = `http://${username}:${password}@localhost:${mainPort}/db/`
}
if (!config["MINIO_URL"]) {
config["MINIO_URL"] = exports.DEFAULT_MINIO
config["MINIO_URL"] = `http://localhost:${mainPort}/`
}
return config
}
@ -65,6 +67,10 @@ exports.getConfig = async (envFile = true) => {
} else {
config = await exports.askQuestions()
}
// fill out environment
for (let key of Object.keys(config)) {
environment._set(key, config[key])
}
return config
}

View file

@ -1,6 +1,7 @@
#!/usr/bin/env node
require("./prebuilds")
require("./environment")
const json = require("../package.json")
const { getCommands } = require("./options")
const { Command } = require("commander")
const { getHelpDescription } = require("./utils")
@ -10,7 +11,7 @@ async function init() {
const program = new Command()
.addHelpCommand("help", getHelpDescription("Help with Budibase commands."))
.helpOption(false)
program.helpOption()
.version(json.version)
// add commands
for (let command of getCommands()) {
command.configure(program)

View file

@ -23,6 +23,14 @@ exports.downloadFile = async (url, filePath) => {
})
}
exports.httpCall = async (url, method) => {
const response = await axios({
url,
method,
})
return response.data
}
exports.getHelpDescription = string => {
return chalk.cyan(string)
}

View file

@ -5037,45 +5037,6 @@
}
]
},
"grid": {
"name": "Grid (Beta)",
"icon": "ViewGrid",
"hasChildren": true,
"styles": [
"size"
],
"illegalChildren": ["section", "grid"],
"legalDirectChildren": [
"container",
"tableblock",
"cardsblock",
"repeaterblock",
"formblock"
],
"size": {
"width": 800,
"height": 400
},
"showEmptyState": false,
"settings": [
{
"type": "number",
"label": "Rows",
"key": "rows",
"defaultValue": 12,
"min": 1,
"max": 32
},
{
"type": "number",
"label": "Columns",
"key": "cols",
"defaultValue": 12,
"min": 1,
"max": 32
}
]
},
"formblock": {
"name": "Form Block",
"icon": "Form",

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/client",
"version": "2.0.34-alpha.1",
"version": "2.1.22-alpha.0",
"license": "MPL-2.0",
"module": "dist/budibase-client.js",
"main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw"
},
"dependencies": {
"@budibase/bbui": "2.0.34-alpha.1",
"@budibase/frontend-core": "2.0.34-alpha.1",
"@budibase/string-templates": "2.0.34-alpha.1",
"@budibase/bbui": "2.1.22-alpha.0",
"@budibase/frontend-core": "2.1.22-alpha.0",
"@budibase/string-templates": "2.1.22-alpha.0",
"@spectrum-css/button": "^3.0.3",
"@spectrum-css/card": "^3.0.3",
"@spectrum-css/divider": "^1.0.3",

View file

@ -85,13 +85,8 @@
valueType: "Binding",
},
]
// If we're using an "update" form, use the real data provider. If we're
// using a create form, we just want a fake array so that our repeater
// will actually render the form, but data doesn't matter.
$: dataProvider =
actionType !== "Create"
? `{{ literal ${safe(providerId)} }}`
: { rows: [{}] }
$: dataProvider = `{{ literal ${safe(providerId)} }}`
$: renderDeleteButton = showDeleteButton && actionType === "Update"
$: renderSaveButton = showSaveButton && actionType !== "View"
$: renderButtons = renderDeleteButton || renderSaveButton

View file

@ -32,6 +32,7 @@
validation,
formStep
)
$: schemaType = fieldSchema?.type !== "formula" ? fieldSchema?.type : "string"
// Focus label when editing

View file

@ -128,6 +128,23 @@
return fields.find(field => get(field).name === name)
}
const getDefault = (defaultValue, schema, type) => {
// Remove any values not present in the field schema
// Convert any values supplied to string
if (Array.isArray(defaultValue) && type == "array" && schema) {
return defaultValue.reduce((acc, entry) => {
let processedOption = String(entry)
let schemaOptions = schema.constraints.inclusion
if (schemaOptions.indexOf(processedOption) > -1) {
acc.push(processedOption)
}
return acc
}, [])
} else {
return defaultValue
}
}
const formApi = {
registerField: (
field,
@ -143,6 +160,7 @@
// Create validation function based on field schema
const schemaConstraints = schema?.[field]?.constraints
const validator = disableValidation
? null
: createValidatorFromConstraints(
@ -152,8 +170,10 @@
table
)
const parsedDefault = getDefault(defaultValue, schema?.[field], type)
// If we've already registered this field then keep some existing state
let initialValue = Helpers.deepGet(initialValues, field) ?? defaultValue
let initialValue = Helpers.deepGet(initialValues, field) ?? parsedDefault
let initialError = null
let fieldId = `id-${Helpers.uuid()}`
const existingField = getField(field)
@ -186,11 +206,11 @@
error: initialError,
disabled:
disabled || fieldDisabled || (isAutoColumn && !editAutoColumns),
defaultValue,
defaultValue: parsedDefault,
validator,
lastUpdate: Date.now(),
},
fieldApi: makeFieldApi(field, defaultValue),
fieldApi: makeFieldApi(field, parsedDefault),
fieldSchema: schema?.[field] ?? {},
})

View file

@ -225,7 +225,10 @@ const changeFormStepHandler = async (action, context) => {
}
const closeScreenModalHandler = action => {
let { url } = action.parameters
let url
if (action?.parameters) {
url = action.parameters.url
}
// Emit this as a window event, so parent screens which are iframing us in
// can close the modal
window.parent.postMessage({ type: "close-screen-modal", url })

File diff suppressed because it is too large Load diff

View file

@ -1,12 +1,12 @@
{
"name": "@budibase/frontend-core",
"version": "2.0.34-alpha.1",
"version": "2.1.22-alpha.0",
"description": "Budibase frontend core libraries used in builder and client",
"author": "Budibase",
"license": "MPL-2.0",
"svelte": "src/index.js",
"dependencies": {
"@budibase/bbui": "2.0.34-alpha.1",
"@budibase/bbui": "2.1.22-alpha.0",
"lodash": "^4.17.21",
"svelte": "^3.46.2"
}

View file

@ -158,7 +158,7 @@ export const buildUserEndpoints = API => ({
userInfo: {
admin: user.admin ? { global: true } : undefined,
builder: user.admin || user.builder ? { global: true } : undefined,
groups: user.groups,
userGroups: user.groups,
},
})),
})

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/sdk",
"version": "2.0.34-alpha.1",
"version": "2.1.22-alpha.0",
"description": "Budibase Public API SDK",
"author": "Budibase",
"license": "MPL-2.0",

View file

@ -37,6 +37,20 @@ module AwsMock {
Contents: {},
})
)
// @ts-ignore
this.createBucket = jest.fn(
response({
Contents: {},
})
)
// @ts-ignore
this.deleteObjects = jest.fn(
response({
Contents: {},
})
)
}
aws.DynamoDB = { DocumentClient }

View file

@ -30,11 +30,21 @@ module FetchMock {
}
if (url.includes("/api/global")) {
return json({
const user = {
email: "test@test.com",
_id: "us_test@test.com",
status: "active",
})
roles: {},
builder: {
global: false,
},
admin: {
global: false,
},
}
return url.endsWith("/users") && opts.method === "GET"
? json([user])
: json(user)
}
// mocked data based on url
else if (url.includes("api/apps")) {

View file

@ -1,7 +1,7 @@
{
"name": "@budibase/server",
"email": "hi@budibase.com",
"version": "2.0.34-alpha.1",
"version": "2.1.22-alpha.0",
"description": "Budibase Web Server",
"main": "src/index.ts",
"repository": {
@ -77,11 +77,11 @@
"license": "GPL-3.0",
"dependencies": {
"@apidevtools/swagger-parser": "10.0.3",
"@budibase/backend-core": "2.0.34-alpha.1",
"@budibase/client": "2.0.34-alpha.1",
"@budibase/pro": "2.0.34-alpha.1",
"@budibase/string-templates": "2.0.34-alpha.1",
"@budibase/types": "2.0.34-alpha.1",
"@budibase/backend-core": "2.1.22-alpha.0",
"@budibase/client": "2.1.22-alpha.0",
"@budibase/pro": "2.1.22-alpha.0",
"@budibase/string-templates": "2.1.22-alpha.0",
"@budibase/types": "2.1.22-alpha.0",
"@bull-board/api": "3.7.0",
"@bull-board/koa": "3.9.4",
"@elastic/elasticsearch": "7.10.0",

View file

@ -5,6 +5,8 @@ import { isQsTrue } from "../../utilities"
export async function exportAppDump(ctx: any) {
let { appId, excludeRows } = ctx.query
// remove the 120 second limit for the request
ctx.req.setTimeout(0)
const appName = decodeURI(ctx.query.appname)
excludeRows = isQsTrue(excludeRows)
const backupIdentifier = `${appName}-export-${new Date().getTime()}.tar.gz`

View file

@ -282,9 +282,11 @@ module External {
const linkTablePrimary = linkTable.primary[0]
// one to many
if (isOneSide(field)) {
newRow[field.foreignKey || linkTablePrimary] = breakRowIdField(
row[key][0]
)[0]
let id = row[key][0]
if (typeof row[key] === "string") {
id = decodeURIComponent(row[key]).match(/\[(.*?)\]/)?.[1]
}
newRow[field.foreignKey || linkTablePrimary] = breakRowIdField(id)[0]
}
// many to many
else if (field.through) {

View file

@ -37,6 +37,9 @@ export async function patch(ctx: any): Promise<any> {
datasourceId: tableId,
}
)
if (!row) {
ctx.throw(404, "Row not found")
}
ctx.status = 200
ctx.eventEmitter &&
ctx.eventEmitter.emitRow(`row:update`, appId, row, table)
@ -55,52 +58,35 @@ export const save = async (ctx: any) => {
if (body && body._id) {
return patch(ctx)
}
try {
const { row, table } = await quotas.addRow(() =>
quotas.addQuery(() => pickApi(tableId).save(ctx), {
datasourceId: tableId,
})
)
ctx.status = 200
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table)
ctx.message = `${table.name} saved successfully`
ctx.body = row
} catch (err) {
ctx.throw(400, err)
}
}
export async function fetchView(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.body = await quotas.addQuery(() => pickApi(tableId).fetchView(ctx), {
const { row, table } = await quotas.addRow(() =>
quotas.addQuery(() => pickApi(tableId).save(ctx), {
datasourceId: tableId,
})
} catch (err) {
ctx.throw(400, err)
}
)
ctx.status = 200
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table)
ctx.message = `${table.name} saved successfully`
ctx.body = row
}
export async function fetchView(ctx: any) {
const tableId = getTableId(ctx)
ctx.body = await quotas.addQuery(() => pickApi(tableId).fetchView(ctx), {
datasourceId: tableId,
})
}
export async function fetch(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.body = await quotas.addQuery(() => pickApi(tableId).fetch(ctx), {
datasourceId: tableId,
})
} catch (err) {
ctx.throw(400, err)
}
ctx.body = await quotas.addQuery(() => pickApi(tableId).fetch(ctx), {
datasourceId: tableId,
})
}
export async function find(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.body = await quotas.addQuery(() => pickApi(tableId).find(ctx), {
datasourceId: tableId,
})
} catch (err) {
ctx.throw(400, err)
}
ctx.body = await quotas.addQuery(() => pickApi(tableId).find(ctx), {
datasourceId: tableId,
})
}
export async function destroy(ctx: any) {
@ -137,46 +123,30 @@ export async function destroy(ctx: any) {
export async function search(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.status = 200
ctx.body = await quotas.addQuery(() => pickApi(tableId).search(ctx), {
datasourceId: tableId,
})
} catch (err) {
ctx.throw(400, err)
}
ctx.status = 200
ctx.body = await quotas.addQuery(() => pickApi(tableId).search(ctx), {
datasourceId: tableId,
})
}
export async function validate(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).validate(ctx)
} catch (err) {
ctx.throw(400, err)
}
ctx.body = await pickApi(tableId).validate(ctx)
}
export async function fetchEnrichedRow(ctx: any) {
const tableId = getTableId(ctx)
try {
ctx.body = await quotas.addQuery(
() => pickApi(tableId).fetchEnrichedRow(ctx),
{
datasourceId: tableId,
}
)
} catch (err) {
ctx.throw(400, err)
}
ctx.body = await quotas.addQuery(
() => pickApi(tableId).fetchEnrichedRow(ctx),
{
datasourceId: tableId,
}
)
}
export const exportRows = async (ctx: any) => {
const tableId = getTableId(ctx)
try {
ctx.body = await quotas.addQuery(() => pickApi(tableId).exportRows(ctx), {
datasourceId: tableId,
})
} catch (err) {
ctx.throw(400, err)
}
ctx.body = await quotas.addQuery(() => pickApi(tableId).exportRows(ctx), {
datasourceId: tableId,
})
}

View file

@ -118,7 +118,7 @@ exports.patch = async ctx => {
})
if (!validateResult.valid) {
throw { validation: validateResult.errors }
ctx.throw(400, { validation: validateResult.errors })
}
// returned row is cleaned and prepared for writing to DB

View file

@ -82,10 +82,20 @@ exports.validate = async ({ tableId, row, table }) => {
// non required MultiSelect creates an empty array, which should not throw errors
errors[fieldName] = [`${fieldName} is required`]
}
} else if (type === FieldTypes.JSON && typeof row[fieldName] === "string") {
} else if (
(type === FieldTypes.ATTACHMENT || type === FieldTypes.JSON) &&
typeof row[fieldName] === "string"
) {
// this should only happen if there is an error
try {
JSON.parse(row[fieldName])
const json = JSON.parse(row[fieldName])
if (type === FieldTypes.ATTACHMENT) {
if (Array.isArray(json)) {
row[fieldName] = json
} else {
errors[fieldName] = [`Must be an array`]
}
}
} catch (err) {
errors[fieldName] = [`Contains invalid JSON`]
}

View file

@ -5,7 +5,7 @@ require("svelte/register")
const send = require("koa-send")
const { resolve, join } = require("../../../utilities/centralPath")
const uuid = require("uuid")
const { ObjectStoreBuckets, ATTACHMENT_DIR } = require("../../../constants")
const { ObjectStoreBuckets } = require("../../../constants")
const { processString } = require("@budibase/string-templates")
const {
loadHandlebarsFile,
@ -90,7 +90,7 @@ export const uploadFile = async function (ctx: any) {
return prepareUpload({
file,
s3Key: `${ctx.appId}/${ATTACHMENT_DIR}/${processedFileName}`,
s3Key: `${ctx.appId}/attachments/${processedFileName}`,
bucket: ObjectStoreBuckets.APPS,
})
})
@ -111,6 +111,8 @@ export const serveApp = async function (ctx: any) {
const App = require("./templates/BudibaseApp.svelte").default
const plugins = enrichPluginURLs(appInfo.usedPlugins)
const { head, html, css } = App.render({
metaImage:
"https://res.cloudinary.com/daog6scxm/image/upload/v1666109324/meta-images/budibase-meta-image_uukc1m.png",
title: appInfo.name,
production: env.isProd(),
appId,

View file

@ -1,6 +1,8 @@
<script>
export let title = ""
export let favicon = ""
export let metaImage = ""
export let url = ""
export let clientLibPath
export let usedPlugins
@ -12,6 +14,15 @@
name="viewport"
content="width=device-width, initial-scale=1.0, viewport-fit=cover"
/>
<!-- Opengraph Meta Tags -->
<meta name="twitter:card" content="summary_large_image" />
<meta name="twitter:site" content="@budibase" />
<meta name="twitter:image" content={metaImage} />
<meta name="twitter:title" content="{title} - built with Budibase" />
<meta property="og:site_name" content="Budibase" />
<meta property="og:title" content="{title} - built with Budibase" />
<meta property="og:type" content="website" />
<meta property="og:image" content={metaImage} />
<title>{title}</title>
<link rel="icon" type="image/png" href={favicon} />

View file

@ -1,66 +1,51 @@
const { generateWebhookID, getWebhookParams } = require("../../db/utils")
import { getWebhookParams } from "../../db/utils"
import triggers from "../../automations/triggers"
import { db as dbCore, context } from "@budibase/backend-core"
import {
Webhook,
WebhookActionType,
BBContext,
Automation,
} from "@budibase/types"
import sdk from "../../sdk"
const toJsonSchema = require("to-json-schema")
const validate = require("jsonschema").validate
const { WebhookType } = require("../../constants")
const triggers = require("../../automations/triggers")
const { getProdAppID } = require("@budibase/backend-core/db")
const { getAppDB, updateAppId } = require("@budibase/backend-core/context")
const AUTOMATION_DESCRIPTION = "Generated from Webhook Schema"
function Webhook(name, type, target) {
this.live = true
this.name = name
this.action = {
type,
target,
}
}
exports.Webhook = Webhook
exports.fetch = async ctx => {
const db = getAppDB()
export async function fetch(ctx: BBContext) {
const db = context.getAppDB()
const response = await db.allDocs(
getWebhookParams(null, {
include_docs: true,
})
)
ctx.body = response.rows.map(row => row.doc)
ctx.body = response.rows.map((row: any) => row.doc)
}
exports.save = async ctx => {
const db = getAppDB()
const webhook = ctx.request.body
webhook.appId = ctx.appId
// check that the webhook exists
if (webhook._id) {
await db.get(webhook._id)
} else {
webhook._id = generateWebhookID()
}
const response = await db.put(webhook)
webhook._rev = response.rev
export async function save(ctx: BBContext) {
const webhook = await sdk.automations.webhook.save(ctx.request.body)
ctx.body = {
message: "Webhook created successfully",
webhook,
}
}
exports.destroy = async ctx => {
const db = getAppDB()
ctx.body = await db.remove(ctx.params.id, ctx.params.rev)
export async function destroy(ctx: BBContext) {
ctx.body = await sdk.automations.webhook.destroy(
ctx.params.id,
ctx.params.rev
)
}
exports.buildSchema = async ctx => {
await updateAppId(ctx.params.instance)
const db = getAppDB()
const webhook = await db.get(ctx.params.id)
export async function buildSchema(ctx: BBContext) {
await context.updateAppId(ctx.params.instance)
const db = context.getAppDB()
const webhook = (await db.get(ctx.params.id)) as Webhook
webhook.bodySchema = toJsonSchema(ctx.request.body)
// update the automation outputs
if (webhook.action.type === WebhookType.AUTOMATION) {
let automation = await db.get(webhook.action.target)
if (webhook.action.type === WebhookActionType.AUTOMATION) {
let automation = (await db.get(webhook.action.target)) as Automation
const autoOutputs = automation.definition.trigger.schema.outputs
let properties = webhook.bodySchema.properties
// reset webhook outputs
@ -78,18 +63,18 @@ exports.buildSchema = async ctx => {
ctx.body = await db.put(webhook)
}
exports.trigger = async ctx => {
const prodAppId = getProdAppID(ctx.params.instance)
await updateAppId(prodAppId)
export async function trigger(ctx: BBContext) {
const prodAppId = dbCore.getProdAppID(ctx.params.instance)
await context.updateAppId(prodAppId)
try {
const db = getAppDB()
const webhook = await db.get(ctx.params.id)
const db = context.getAppDB()
const webhook = (await db.get(ctx.params.id)) as Webhook
// validate against the schema
if (webhook.bodySchema) {
validate(ctx.request.body, webhook.bodySchema)
}
const target = await db.get(webhook.action.target)
if (webhook.action.type === WebhookType.AUTOMATION) {
if (webhook.action.type === WebhookActionType.AUTOMATION) {
// trigger with both the pure request and then expand it
// incase the user has produced a schema to bind to
await triggers.externalTrigger(target, {
@ -102,7 +87,7 @@ exports.trigger = async ctx => {
ctx.body = {
message: "Webhook trigger fired successfully",
}
} catch (err) {
} catch (err: any) {
if (err.status === 404) {
ctx.status = 200
ctx.body = {

View file

@ -1,10 +1,10 @@
const { joiValidator } = require("@budibase/backend-core/auth")
const { DataSourceOperation } = require("../../../constants")
const { WebhookType } = require("../../../constants")
const {
BUILTIN_PERMISSION_IDS,
PermissionLevels,
} = require("@budibase/backend-core/permissions")
const { WebhookActionType } = require("@budibase/types")
const Joi = require("joi")
const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("")
@ -126,7 +126,7 @@ exports.webhookValidator = () => {
name: Joi.string().required(),
bodySchema: Joi.object().optional(),
action: Joi.object({
type: Joi.string().required().valid(WebhookType.AUTOMATION),
type: Joi.string().required().valid(WebhookActionType.AUTOMATION),
target: Joi.string().required(),
}).required(),
}).unknown(true))

View file

@ -1,9 +1,10 @@
const Router = require("@koa/router")
const controller = require("../controllers/webhook")
const authorized = require("../../middleware/authorized")
const { BUILDER } = require("@budibase/backend-core/permissions")
const { webhookValidator } = require("./utils/validators")
import Router from "@koa/router"
import * as controller from "../controllers/webhook"
import authorized from "../../middleware/authorized"
import { permissions } from "@budibase/backend-core"
import { webhookValidator } from "./utils/validators"
const BUILDER = permissions.BUILDER
const router = new Router()
router
@ -23,4 +24,4 @@ router
// this shouldn't have authorisation, right now its always public
.post("/api/webhooks/trigger/:instance/:id", controller.trigger)
module.exports = router
export default router

View file

@ -15,30 +15,16 @@ db.init()
const Koa = require("koa")
const destroyable = require("server-destroy")
const koaBody = require("koa-body")
const pino = require("koa-pino-logger")
const http = require("http")
const api = require("./api")
const eventEmitter = require("./events")
const automations = require("./automations/index")
const Sentry = require("@sentry/node")
const fileSystem = require("./utilities/fileSystem")
const bullboard = require("./automations/bullboard")
const { logAlert } = require("@budibase/backend-core/logging")
const { pinoSettings } = require("@budibase/backend-core")
const { Thread } = require("./threads")
const fs = require("fs")
import redis from "./utilities/redis"
import * as migrations from "./migrations"
import { events, installation, tenancy } from "@budibase/backend-core"
import {
createAdminUser,
generateApiKey,
getChecklist,
} from "./utilities/workerRequests"
import { watch } from "./watch"
import { events } from "@budibase/backend-core"
import { initialise as initialiseWebsockets } from "./websocket"
import sdk from "./sdk"
import * as pro from "@budibase/pro"
import { startup } from "./startup"
const app = new Koa()
@ -54,19 +40,6 @@ app.use(
})
)
app.use(pino(pinoSettings()))
if (!env.isTest()) {
const plugin = bullboard.init()
app.use(plugin)
}
app.context.eventEmitter = eventEmitter
app.context.auth = {}
// api routes
app.use(api.router.routes())
if (env.isProd()) {
env._set("NODE_ENV", "production")
Sentry.init()
@ -104,86 +77,8 @@ server.on("close", async () => {
}
})
const initPro = async () => {
await pro.init({
backups: {
processing: {
exportAppFn: sdk.backups.exportApp,
importAppFn: sdk.backups.importApp,
statsFn: sdk.backups.calculateBackupStats,
},
},
})
}
module.exports = server.listen(env.PORT || 0, async () => {
console.log(`Budibase running on ${JSON.stringify(server.address())}`)
env._set("PORT", server.address().port)
eventEmitter.emitPort(env.PORT)
fileSystem.init()
await redis.init()
// run migrations on startup if not done via http
// not recommended in a clustered environment
if (!env.HTTP_MIGRATIONS && !env.isTest()) {
try {
await migrations.migrate()
} catch (e) {
logAlert("Error performing migrations. Exiting.", e)
shutdown()
}
}
// check and create admin user if required
if (
env.SELF_HOSTED &&
!env.MULTI_TENANCY &&
env.BB_ADMIN_USER_EMAIL &&
env.BB_ADMIN_USER_PASSWORD
) {
const checklist = await getChecklist()
if (!checklist?.adminUser?.checked) {
try {
const tenantId = tenancy.getTenantId()
const user = await createAdminUser(
env.BB_ADMIN_USER_EMAIL,
env.BB_ADMIN_USER_PASSWORD,
tenantId
)
// Need to set up an API key for automated integration tests
if (env.isTest()) {
await generateApiKey(user._id)
}
console.log(
"Admin account automatically created for",
env.BB_ADMIN_USER_EMAIL
)
} catch (e) {
logAlert("Error creating initial admin user. Exiting.", e)
shutdown()
}
}
}
// monitor plugin directory if required
if (
env.SELF_HOSTED &&
!env.MULTI_TENANCY &&
env.PLUGINS_DIR &&
fs.existsSync(env.PLUGINS_DIR)
) {
watch()
}
// check for version updates
await installation.checkInstallVersion()
// done last - these will never complete
let promises = []
promises.push(automations.init())
promises.push(initPro())
await Promise.all(promises)
await startup(app, server)
})
const shutdown = () => {

View file

@ -1,5 +1,4 @@
const {
findHBSBlocks,
decodeJSBinding,
isJSBinding,
encodeJSBinding,
@ -82,24 +81,34 @@ exports.getError = err => {
}
exports.substituteLoopStep = (hbsString, substitute) => {
let blocks = []
let checkForJS = isJSBinding(hbsString)
let substitutedHbsString = ""
let open = checkForJS ? `$("` : "{{"
let closed = checkForJS ? `")` : "}}"
if (checkForJS) {
hbsString = decodeJSBinding(hbsString)
blocks.push(hbsString)
} else {
blocks = findHBSBlocks(hbsString)
}
for (let block of blocks) {
block = block.replace(/loop/, substitute)
if (checkForJS) {
hbsString = encodeJSBinding(block)
} else {
hbsString = block
let pointer = 0,
openPointer = 0,
closedPointer = 0
while (pointer < hbsString.length) {
openPointer = hbsString.indexOf(open, pointer)
closedPointer = hbsString.indexOf(closed, pointer) + 2
if (openPointer < 0 || closedPointer < 0) {
substitutedHbsString += hbsString.substring(pointer)
break
}
let before = hbsString.substring(pointer, openPointer)
let block = hbsString
.substring(openPointer, closedPointer)
.replace(/loop/, substitute)
substitutedHbsString += before + block
pointer = closedPointer
}
return hbsString
if (checkForJS) {
substitutedHbsString = encodeJSBinding(substitutedHbsString)
}
return substitutedHbsString
}
exports.stringSplit = value => {

View file

@ -3,6 +3,7 @@ const { BullAdapter } = require("@bull-board/api/bullAdapter")
const { KoaAdapter } = require("@bull-board/koa")
const { queue } = require("@budibase/backend-core")
const automation = require("../threads/automation")
const { backups } = require("@budibase/pro")
let automationQueue = queue.createQueue(
queue.JobQueue.AUTOMATION,
@ -11,9 +12,13 @@ let automationQueue = queue.createQueue(
const PATH_PREFIX = "/bulladmin"
exports.init = () => {
exports.init = async () => {
// Set up queues for bull board admin
const backupQueue = await backups.getBackupQueue()
const queues = [automationQueue]
if (backupQueue) {
queues.push(backupQueue)
}
const adapters = []
const serverAdapter = new KoaAdapter()
for (let queue of queues) {

View file

@ -0,0 +1,17 @@
const automationUtils = require("../automationUtils")
describe("automationUtils", () => {
test("substituteLoopStep should allow multiple loop binding substitutes", () => {
expect(automationUtils.substituteLoopStep(
`{{ loop.currentItem._id }} {{ loop.currentItem._id }} {{ loop.currentItem._id }}`,
"step.2"))
.toBe(`{{ step.2.currentItem._id }} {{ step.2.currentItem._id }} {{ step.2.currentItem._id }}`)
})
test("substituteLoopStep should handle not subsituting outside of curly braces", () => {
expect(automationUtils.substituteLoopStep(
`loop {{ loop.currentItem._id }}loop loop{{ loop.currentItem._id }}loop`,
"step.2"))
.toBe(`loop {{ step.2.currentItem._id }}loop loop{{ step.2.currentItem._id }}loop`)
})
})

View file

@ -1,10 +1,9 @@
import { Thread, ThreadType } from "../threads"
import { definitions } from "./triggerInfo"
import * as webhooks from "../api/controllers/webhook"
import { automationQueue } from "./bullboard"
import newid from "../db/newid"
import { updateEntityMetadata } from "../utilities"
import { MetadataTypes, WebhookType } from "../constants"
import { MetadataTypes } from "../constants"
import { getProdAppID, doWithDB } from "@budibase/backend-core/db"
import { getAutomationMetadataParams } from "../db/utils"
import { cloneDeep } from "lodash/fp"
@ -15,7 +14,8 @@ import {
} from "@budibase/backend-core/context"
import { context } from "@budibase/backend-core"
import { quotas } from "@budibase/pro"
import { Automation } from "@budibase/types"
import { Automation, WebhookActionType } from "@budibase/types"
import sdk from "../sdk"
const REBOOT_CRON = "@reboot"
const WH_STEP_ID = definitions.WEBHOOK.stepId
@ -197,16 +197,12 @@ export async function checkForWebhooks({ oldAuto, newAuto }: any) {
let db = getAppDB()
// need to get the webhook to get the rev
const webhook = await db.get(oldTrigger.webhookId)
const ctx = {
appId,
params: { id: webhook._id, rev: webhook._rev },
}
// might be updating - reset the inputs to remove the URLs
if (newTrigger) {
delete newTrigger.webhookId
newTrigger.inputs = {}
}
await webhooks.destroy(ctx)
await sdk.automations.webhook.destroy(webhook._id, webhook._rev)
} catch (err) {
// don't worry about not being able to delete, if it doesn't exist all good
}
@ -216,18 +212,14 @@ export async function checkForWebhooks({ oldAuto, newAuto }: any) {
(!isWebhookTrigger(oldAuto) || triggerChanged) &&
isWebhookTrigger(newAuto)
) {
const ctx: any = {
appId,
request: {
body: new webhooks.Webhook(
"Automation webhook",
WebhookType.AUTOMATION,
newAuto._id
),
},
}
await webhooks.save(ctx)
const id = ctx.body.webhook._id
const webhook = await sdk.automations.webhook.save(
sdk.automations.webhook.newDoc(
"Automation webhook",
WebhookActionType.AUTOMATION,
newAuto._id
)
)
const id = webhook._id
newTrigger.webhookId = id
// the app ID has to be development for this endpoint
// it can only be used when building the app

View file

@ -196,10 +196,6 @@ exports.BuildSchemaErrors = {
INVALID_COLUMN: "invalid_column",
}
exports.WebhookType = {
AUTOMATION: "automation",
}
exports.AutomationErrors = {
INCORRECT_TYPE: "INCORRECT_TYPE",
MAX_ITERATIONS: "MAX_ITERATIONS_REACHED",

View file

@ -1,9 +1,4 @@
import {
Automation,
AutomationResults,
AutomationStep,
Document,
} from "@budibase/types"
import { AutomationResults, AutomationStep, Document } from "@budibase/types"
export enum LoopStepType {
ARRAY = "Array",

View file

@ -221,6 +221,7 @@ export interface components {
*/
type?:
| "string"
| "barcodeqr"
| "longform"
| "options"
| "number"
@ -326,6 +327,7 @@ export interface components {
*/
type?:
| "string"
| "barcodeqr"
| "longform"
| "options"
| "number"
@ -433,6 +435,7 @@ export interface components {
*/
type?:
| "string"
| "barcodeqr"
| "longform"
| "options"
| "number"

View file

@ -46,6 +46,7 @@ module.exports = {
AWS_REGION: process.env.AWS_REGION,
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
CDN_URL: process.env.CDN_URL || "https://cdn.budi.live",
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,

View file

@ -5,10 +5,13 @@ import {
IntegrationBase,
} from "@budibase/types"
const { Client } = require("@elastic/elasticsearch")
import { Client, ClientOptions } from "@elastic/elasticsearch"
interface ElasticsearchConfig {
url: string
ssl?: boolean
ca?: string
rejectUnauthorized?: boolean
}
const SCHEMA: Integration = {
@ -23,6 +26,21 @@ const SCHEMA: Integration = {
required: true,
default: "http://localhost:9200",
},
ssl: {
type: DatasourceFieldType.BOOLEAN,
default: false,
required: false,
},
rejectUnauthorized: {
type: DatasourceFieldType.BOOLEAN,
default: true,
required: false,
},
ca: {
type: DatasourceFieldType.LONGFORM,
default: false,
required: false,
},
},
query: {
create: {
@ -81,7 +99,19 @@ class ElasticSearchIntegration implements IntegrationBase {
constructor(config: ElasticsearchConfig) {
this.config = config
this.client = new Client({ node: config.url })
const clientConfig: ClientOptions = {
node: this.config.url,
}
if (this.config.ssl) {
clientConfig.ssl = {
rejectUnauthorized: this.config.rejectUnauthorized,
ca: this.config.ca || undefined,
}
}
this.client = new Client(clientConfig)
}
async create(query: { index: string; json: object }) {

View file

@ -1,5 +1,12 @@
import { Integration, QueryType, IntegrationBase } from "@budibase/types"
import {
Integration,
QueryType,
IntegrationBase,
DatasourceFieldType,
} from "@budibase/types"
const AWS = require("aws-sdk")
const csv = require("csvtojson")
interface S3Config {
region: string
@ -40,13 +47,103 @@ const SCHEMA: Integration = {
},
},
query: {
create: {
type: QueryType.FIELDS,
fields: {
bucket: {
display: "New Bucket",
type: DatasourceFieldType.STRING,
required: true,
},
location: {
required: true,
default: "us-east-1",
type: DatasourceFieldType.STRING,
},
grantFullControl: {
display: "Grant full control",
type: DatasourceFieldType.STRING,
},
grantRead: {
display: "Grant read",
type: DatasourceFieldType.STRING,
},
grantReadAcp: {
display: "Grant read ACP",
type: DatasourceFieldType.STRING,
},
grantWrite: {
display: "Grant write",
type: DatasourceFieldType.STRING,
},
grantWriteAcp: {
display: "Grant write ACP",
type: DatasourceFieldType.STRING,
},
},
},
read: {
type: QueryType.FIELDS,
fields: {
bucket: {
type: "string",
type: DatasourceFieldType.STRING,
required: true,
},
delimiter: {
type: DatasourceFieldType.STRING,
},
marker: {
type: DatasourceFieldType.STRING,
},
maxKeys: {
type: DatasourceFieldType.NUMBER,
display: "Max Keys",
},
prefix: {
type: DatasourceFieldType.STRING,
},
},
},
readCsv: {
displayName: "Read CSV",
type: QueryType.FIELDS,
fields: {
bucket: {
type: DatasourceFieldType.STRING,
required: true,
},
key: {
type: DatasourceFieldType.STRING,
required: true,
},
},
},
delete: {
type: QueryType.FIELDS,
fields: {
bucket: {
type: DatasourceFieldType.STRING,
required: true,
},
delete: {
type: DatasourceFieldType.JSON,
required: true,
},
},
},
},
extra: {
acl: {
required: false,
displayName: "ACL",
type: DatasourceFieldType.LIST,
data: {
create: [
"private",
"public-read",
"public-read-write",
"authenticated-read",
],
},
},
},
@ -67,14 +164,93 @@ class S3Integration implements IntegrationBase {
this.client = new AWS.S3(this.config)
}
async read(query: { bucket: string }) {
async create(query: {
bucket: string
location: string
grantFullControl: string
grantRead: string
grantReadAcp: string
grantWrite: string
grantWriteAcp: string
extra: {
acl: string
}
}) {
let params: any = {
Bucket: query.bucket,
ACL: query.extra?.acl,
GrantFullControl: query.grantFullControl,
GrantRead: query.grantRead,
GrantReadACP: query.grantReadAcp,
GrantWrite: query.grantWrite,
GrantWriteACP: query.grantWriteAcp,
}
if (query.location) {
params["CreateBucketConfiguration"] = {
LocationConstraint: query.location,
}
}
return await this.client.createBucket(params).promise()
}
async read(query: {
bucket: string
delimiter: string
expectedBucketOwner: string
marker: string
maxKeys: number
prefix: string
}) {
const response = await this.client
.listObjects({
Bucket: query.bucket,
Delimiter: query.delimiter,
Marker: query.marker,
MaxKeys: query.maxKeys,
Prefix: query.prefix,
})
.promise()
return response.Contents
}
async readCsv(query: { bucket: string; key: string }) {
const stream = this.client
.getObject({
Bucket: query.bucket,
Key: query.key,
})
.createReadStream()
let csvError = false
return new Promise((resolve, reject) => {
stream.on("error", (err: Error) => {
reject(err)
})
const response = csv()
.fromStream(stream)
.on("error", () => {
csvError = true
})
stream.on("finish", () => {
resolve(response)
})
}).catch(err => {
if (csvError) {
throw new Error("Could not read CSV")
} else {
throw err
}
})
}
async delete(query: { bucket: string; delete: string }) {
return await this.client
.deleteObjects({
Bucket: query.bucket,
Delete: JSON.parse(query.delete),
})
.promise()
}
}
export default {

View file

@ -18,11 +18,95 @@ describe("S3 Integration", () => {
})
it("calls the read method with the correct params", async () => {
const response = await config.integration.read({
await config.integration.read({
bucket: "test",
delimiter: "/",
marker: "file.txt",
maxKeys: 999,
prefix: "directory/",
})
expect(config.integration.client.listObjects).toHaveBeenCalledWith({
Bucket: "test",
Delimiter: "/",
Marker: "file.txt",
MaxKeys: 999,
Prefix: "directory/",
})
})
it("calls the create method with the correct params", async () => {
await config.integration.create({
bucket: "test",
location: "af-south-1",
grantFullControl: "me",
grantRead: "him",
grantReadAcp: "her",
grantWrite: "she",
grantWriteAcp: "he",
objectLockEnabledForBucket: true,
extra: {
acl: "private",
},
})
expect(config.integration.client.createBucket).toHaveBeenCalledWith({
Bucket: "test",
CreateBucketConfiguration: {
LocationConstraint: "af-south-1",
},
GrantFullControl: "me",
GrantRead: "him",
GrantReadACP: "her",
GrantWrite: "she",
GrantWriteACP: "he",
ACL: "private",
})
})
it("does not add undefined location constraint when calling the create method", async () => {
await config.integration.create({
bucket: "test",
})
expect(config.integration.client.createBucket).toHaveBeenCalledWith({
Bucket: "test",
GrantFullControl: undefined,
GrantRead: undefined,
GrantReadACP: undefined,
GrantWrite: undefined,
GrantWriteACP: undefined,
ACL: undefined,
})
})
it("calls the delete method with the correct params ", async () => {
await config.integration.delete({
bucket: "test",
delete: `{
"Objects": [
{
"Key": "HappyFace.jpg",
"VersionId": "2LWg7lQLnY41.maGB5Z6SWW.dcq0vx7b"
},
{
"Key": "HappyFace.jpg",
"VersionId": "yoz3HB.ZhCS_tKVEmIOr7qYyyAaZSKVd"
}
]
}`,
})
expect(config.integration.client.deleteObjects).toHaveBeenCalledWith({
Bucket: "test",
Delete: {
Objects: [
{
Key: "HappyFace.jpg",
VersionId: "2LWg7lQLnY41.maGB5Z6SWW.dcq0vx7b",
},
{
Key: "HappyFace.jpg",
VersionId: "yoz3HB.ZhCS_tKVEmIOr7qYyyAaZSKVd",
},
],
},
})
})
})

View file

@ -1,13 +1,11 @@
const TestConfig = require("../../../tests/utilities/TestConfiguration")
const syncApps = jest.fn()
const syncRows = jest.fn()
const syncPlugins = jest.fn()
jest.mock("../usageQuotas/syncApps", () => ({ run: syncApps }) )
jest.mock("../usageQuotas/syncRows", () => ({ run: syncRows }) )
jest.mock("../usageQuotas/syncPlugins", () => ({ run: syncPlugins }) )
const TestConfig = require("../../../tests/utilities/TestConfiguration")
const migration = require("../syncQuotas")
describe("run", () => {

View file

@ -1,11 +1,13 @@
jest.mock("@budibase/backend-core/db", () => ({
...jest.requireActual("@budibase/backend-core/db"),
createNewUserEmailView: jest.fn(),
}))
const coreDb = require("@budibase/backend-core/db")
const TestConfig = require("../../../tests/utilities/TestConfiguration")
const { TENANT_ID } = require("../../../tests/utilities/structures")
const { getGlobalDB, doInTenant } = require("@budibase/backend-core/tenancy")
// mock email view creation
const coreDb = require("@budibase/backend-core/db")
const createNewUserEmailView = jest.fn()
coreDb.createNewUserEmailView = createNewUserEmailView
const migration = require("../userEmailViewCasing")
@ -22,7 +24,7 @@ describe("run", () => {
await doInTenant(TENANT_ID, async () => {
const globalDb = getGlobalDB()
await migration.run(globalDb)
expect(createNewUserEmailView).toHaveBeenCalledTimes(1)
expect(coreDb.createNewUserEmailView).toHaveBeenCalledTimes(1)
})
})
})

View file

@ -0,0 +1,5 @@
import * as webhook from "./webhook"
export default {
webhook,
}

View file

@ -0,0 +1,43 @@
import { Webhook, WebhookActionType } from "@budibase/types"
import { db as dbCore, context } from "@budibase/backend-core"
import { generateWebhookID } from "../../../db/utils"
function isWebhookID(id: string) {
return id.startsWith(dbCore.DocumentType.WEBHOOK)
}
export function newDoc(
name: string,
type: WebhookActionType,
target: string
): Webhook {
return {
live: true,
name,
action: {
type,
target,
},
}
}
export async function save(webhook: Webhook) {
const db = context.getAppDB()
// check that the webhook exists
if (webhook._id && isWebhookID(webhook._id)) {
await db.get(webhook._id)
} else {
webhook._id = generateWebhookID()
}
const response = await db.put(webhook)
webhook._rev = response.rev
return webhook
}
export async function destroy(id: string, rev: string) {
const db = context.getAppDB()
if (!id || !isWebhookID(id)) {
throw new Error("Provided webhook ID is not valid.")
}
return await db.remove(id, rev)
}

View file

@ -1,20 +1,25 @@
import { db as dbCore } from "@budibase/backend-core"
import { TABLE_ROW_PREFIX } from "../../../db/utils"
import { getAutomationParams, TABLE_ROW_PREFIX } from "../../../db/utils"
import { budibaseTempDir } from "../../../utilities/budibaseDir"
import { DB_EXPORT_FILE, GLOBAL_DB_EXPORT_FILE } from "./constants"
import {
uploadDirectory,
upload,
uploadDirectory,
} from "../../../utilities/fileSystem/utilities"
import { downloadTemplate } from "../../../utilities/fileSystem"
import { ObjectStoreBuckets, FieldTypes } from "../../../constants"
import { FieldTypes, ObjectStoreBuckets } from "../../../constants"
import { join } from "path"
import fs from "fs"
import sdk from "../../"
import { CouchFindOptions, RowAttachment } from "@budibase/types"
import {
Automation,
AutomationTriggerStepId,
CouchFindOptions,
RowAttachment,
} from "@budibase/types"
import PouchDB from "pouchdb"
const uuid = require("uuid/v4")
const tar = require("tar")
import PouchDB from "pouchdb"
type TemplateType = {
file?: {
@ -81,12 +86,43 @@ async function updateAttachmentColumns(
}
}
async function updateAutomations(prodAppId: string, db: PouchDB.Database) {
const automations = (
await db.allDocs(
getAutomationParams(null, {
include_docs: true,
})
)
).rows.map(row => row.doc) as Automation[]
const devAppId = dbCore.getDevAppID(prodAppId)
let toSave: Automation[] = []
for (let automation of automations) {
const oldDevAppId = automation.appId,
oldProdAppId = dbCore.getProdAppID(automation.appId)
if (
automation.definition.trigger.stepId === AutomationTriggerStepId.WEBHOOK
) {
const old = automation.definition.trigger.inputs
automation.definition.trigger.inputs = {
schemaUrl: old.schemaUrl.replace(oldDevAppId, devAppId),
triggerUrl: old.triggerUrl.replace(oldProdAppId, prodAppId),
}
}
automation.appId = devAppId
toSave.push(automation)
}
await db.bulkDocs(toSave)
}
/**
* This function manages temporary template files which are stored by Koa.
* @param {Object} template The template object retrieved from the Koa context object.
* @returns {Object} Returns a fs read stream which can be loaded into the database.
*/
async function getTemplateStream(template: TemplateType) {
if (template.file && template.file.type !== "text/plain") {
throw new Error("Cannot import a non-text based file.")
}
if (template.file) {
return fs.createReadStream(template.file.path)
} else if (template.key) {
@ -123,7 +159,7 @@ export async function importApp(
) {
let prodAppId = dbCore.getProdAppID(appId)
let dbStream: any
const isTar = template.file && template.file.type === "application/gzip"
const isTar = template.file && template?.file?.type?.endsWith("gzip")
const isDirectory =
template.file && fs.lstatSync(template.file.path).isDirectory()
if (template.file && (isTar || isDirectory)) {
@ -165,5 +201,6 @@ export async function importApp(
throw "Error loading database dump from template."
}
await updateAttachmentColumns(prodAppId, db)
await updateAutomations(prodAppId, db)
return ok
}

View file

@ -1,9 +1,11 @@
import { default as backups } from "./app/backups"
import { default as tables } from "./app/tables"
import { default as automations } from "./app/automations"
const sdk = {
backups,
tables,
automations,
}
// default export for TS

Some files were not shown because too many files have changed in this diff Show more