1
0
Fork 0
mirror of synced 2024-06-26 18:10:51 +12:00

Merge remote-tracking branch 'origin/develop' into feat/user-groups-tab

This commit is contained in:
Peter Clement 2022-07-19 11:23:31 +01:00
commit c5b9be60c7
106 changed files with 5004 additions and 15655 deletions

View file

@ -1,11 +1,8 @@
name: Deploy Budibase Single Container Image to DockerHub
on:
push:
branches:
- "omnibus-action"
- "develop"
- "master"
- "main"
workflow_dispatch:
env:
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
BRANCH: ${{ github.event.pull_request.head.ref }}
@ -40,7 +37,7 @@ jobs:
- name: Runt Yarn Lint
run: yarn lint
- name: Run Yarn Build
run: yarn build
run: yarn build:docker:pre
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
@ -60,3 +57,12 @@ jobs:
platforms: linux/amd64,linux/arm64
tags: budibase/budibase,budibase/budibase:v${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile
- name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/amd64
build-args: TARGETBUILD=aas
tags: budibase/budibase-aas,budibase/budibase-aas:v${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile

View file

@ -68,7 +68,7 @@ jobs:
- name: Publish budibase packages to NPM
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
RELEASE_VERSION_TYPE: ${{ github.event.inputs.version }}
RELEASE_VERSION_TYPE: ${{ github.event.inputs.versioning }}
run: |
# setup the username and email. I tend to use 'GitHub Actions Bot' with no email by default
git config --global user.name "Budibase Release Bot"

View file

@ -135,13 +135,18 @@ You can learn more about the Budibase API at the following places:
## 🏁 Get started
<a href="https://docs.budibase.com/docs/hosting-methods"><img src="https://res.cloudinary.com/daog6scxm/image/upload/v1634808888/logo/deploy_npl9za.png" /></a>
Deploy Budibase self-hosted in your existing infrastructure, using Docker, Kubernetes, and Digital Ocean.
Or use Budibase Cloud if you don't need to self-host, and would like to get started quickly.
### [Get started with self-hosting Budibase](https://docs.budibase.com/docs/hosting-methods)
- [Docker - single ARM compatible image](https://docs.budibase.com/docs/docker)
- [Docker Compose](https://docs.budibase.com/docs/docker-compose)
- [Kubernetes](https://docs.budibase.com/docs/kubernetes-k8s)
- [Digital Ocean](https://docs.budibase.com/docs/digitalocean)
- [Portainer](https://docs.budibase.com/docs/portainer)
### [Get started with Budibase Cloud](https://budibase.com)

View file

@ -151,6 +151,10 @@ spec:
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}
{{ if .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml .Values.imagePullSecrets | nindent 6 }}
{{ end }}
restartPolicy: Always
serviceAccountName: ""
status: {}

View file

@ -68,6 +68,10 @@ spec:
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}
{{ if .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml .Values.imagePullSecrets | nindent 6 }}
{{ end }}
restartPolicy: Always
serviceAccountName: ""
volumes:
@ -75,4 +79,4 @@ spec:
persistentVolumeClaim:
claimName: minio-data
status: {}
{{- end }}
{{- end }}

View file

@ -40,6 +40,10 @@ spec:
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}
{{ if .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml .Values.imagePullSecrets | nindent 6 }}
{{ end }}
restartPolicy: Always
serviceAccountName: ""
volumes:

View file

@ -47,6 +47,10 @@ spec:
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}
{{ if .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml .Values.imagePullSecrets | nindent 6 }}
{{ end }}
restartPolicy: Always
serviceAccountName: ""
volumes:
@ -54,4 +58,4 @@ spec:
persistentVolumeClaim:
claimName: redis-data
status: {}
{{- end }}
{{- end }}

View file

@ -145,6 +145,10 @@ spec:
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}
{{ if .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml .Values.imagePullSecrets | nindent 6 }}
{{ end }}
restartPolicy: Always
serviceAccountName: ""
status: {}

View file

@ -11,10 +11,11 @@ services:
- minio_data:/data
ports:
- "${MINIO_PORT}:9000"
- "9001:9001"
environment:
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
command: server /data
command: server /data --console-address ":9001"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s

View file

@ -63,7 +63,7 @@ services:
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
MINIO_BROWSER: "off"
command: server /data
command: server /data --console-address ":9001"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s

View file

@ -3,15 +3,15 @@
echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
mkdir -p /home/budibase/{minio,couchdb}
mkdir -p /home/budibase/couchdb/data
chown -R couchdb:couchdb /home/budibase/couchdb/
mkdir -p /home/{search,minio,couch}
mkdir -p /home/couch/{dbs,views}
chown -R couchdb:couchdb /home/couch/
apt update
apt-get install -y openssh-server
sed -i 's#dir=/opt/couchdb/data/search#dir=/home/budibase/couchdb/data/search#' /opt/clouseau/clouseau.ini
sed -i 's#/minio/minio server /minio &#/minio/minio server /home/budibase/minio &#' /runner.sh
sed -i 's#database_dir = ./data#database_dir = /home/budibase/couchdb/data#' /opt/couchdb/etc/default.ini
sed -i 's#view_index_dir = ./data#view_index_dir = /home/budibase/couchdb/data#' /opt/couchdb/etc/default.ini
sed -i 's#dir=/opt/couchdb/data/search#dir=/home/search#' /opt/clouseau/clouseau.ini
sed -i 's#/minio/minio server /minio &#/minio/minio server /home/minio &#' /runner.sh
sed -i 's#database_dir = ./data#database_dir = /home/couch/dbs#' /opt/couchdb/etc/default.ini
sed -i 's#view_index_dir = ./data#view_index_dir = /home/couch/views#' /opt/couchdb/etc/default.ini
sed -i "s/#Port 22/Port 2222/" /etc/ssh/sshd_config
/etc/init.d/ssh restart
fi

View file

@ -122,8 +122,7 @@ RUN yarn cache clean -f
EXPOSE 80
EXPOSE 443
VOLUME /opt/couchdb/data
VOLUME /minio
VOLUME /data
# setup letsencrypt certificate
RUN apt-get install -y certbot python3-certbot-nginx

View file

@ -24,8 +24,8 @@ if [ ! -f "/data/.env" ]; then
fi
# make these directories in runner, incase of mount
mkdir -p /data/couch/dbs /data/couch/views
chown couchdb:couchdb /data/couch /data/couch/dbs /data/couch/views
mkdir -p /data/couch/{dbs,views} /home/couch/{dbs,views}
chown -R couchdb:couchdb /data/couch /home/couch
redis-server --requirepass $REDIS_PASSWORD &
/opt/clouseau/bin/clouseau &
/minio/minio server /data/minio &

View file

@ -1,5 +1,5 @@
{
"version": "1.0.220-alpha.4",
"version": "1.1.15-alpha.2",
"npmClient": "yarn",
"packages": [
"packages/*"

View file

@ -54,6 +54,7 @@
"test:e2e:ci:notify": "lerna run cy:ci:notify",
"build:specs": "lerna run specs",
"build:docker": "lerna run build:docker && npm run build:docker:proxy:compose && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
"build:docker:pre": "lerna run build && lerna run predocker",
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
"build:docker:proxy:compose": "node scripts/proxy/generateProxyConfig compose && npm run build:docker:proxy",
"build:docker:proxy:preprod": "node scripts/proxy/generateProxyConfig preprod && npm run build:docker:proxy",
@ -65,7 +66,7 @@
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
"build:docker:single:image": "docker build -f hosting/single/Dockerfile -t budibase:latest .",
"build:docker:single": "lerna run build && lerna run predocker && npm run build:docker:single:image",
"build:docker:single": "npm run build:docker:pre && npm run build:docker:single:image",
"build:docs": "lerna run build:docs",
"release:helm": "node scripts/releaseHelmChart",
"env:multi:enable": "lerna run env:multi:enable",
@ -84,4 +85,4 @@
"install:pro": "bash scripts/pro/install.sh",
"dep:clean": "yarn clean && yarn bootstrap"
}
}
}

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "1.0.220-alpha.4",
"version": "1.1.15-alpha.2",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@ -20,7 +20,7 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
"@budibase/types": "^1.0.220-alpha.4",
"@budibase/types": "^1.1.15-alpha.2",
"@techpass/passport-openidconnect": "0.3.2",
"aws-sdk": "2.1030.0",
"bcrypt": "5.0.1",
@ -59,10 +59,10 @@
]
},
"devDependencies": {
"@budibase/types": "^1.0.219",
"@shopify/jest-koa-mocks": "3.1.5",
"@types/jest": "27.5.1",
"@types/koa": "2.0.52",
"@types/lodash": "4.14.180",
"@types/node": "14.18.20",
"@types/node-fetch": "2.6.1",
"@types/pouchdb": "6.4.0",

View file

@ -0,0 +1,17 @@
export enum ContextKeys {
TENANT_ID = "tenantId",
GLOBAL_DB = "globalDb",
APP_ID = "appId",
IDENTITY = "identity",
// whatever the request app DB was
CURRENT_DB = "currentDb",
// get the prod app DB from the request
PROD_DB = "prodDb",
// get the dev app DB from the request
DEV_DB = "devDb",
DB_OPTS = "dbOpts",
// check if something else is using the context, don't close DB
TENANCY_IN_USE = "tenancyInUse",
APP_IN_USE = "appInUse",
IDENTITY_IN_USE = "identityInUse",
}

View file

@ -1,354 +0,0 @@
const env = require("../environment")
const { SEPARATOR, DocumentTypes } = require("../db/constants")
const { DEFAULT_TENANT_ID } = require("../constants")
const cls = require("./FunctionContext")
const { dangerousGetDB, closeDB } = require("../db")
const { getProdAppID, getDevelopmentAppID } = require("../db/conversions")
const { baseGlobalDBName } = require("../tenancy/utils")
const { isEqual } = require("lodash")
// some test cases call functions directly, need to
// store an app ID to pretend there is a context
let TEST_APP_ID = null
const ContextKeys = {
TENANT_ID: "tenantId",
GLOBAL_DB: "globalDb",
APP_ID: "appId",
IDENTITY: "identity",
// whatever the request app DB was
CURRENT_DB: "currentDb",
// get the prod app DB from the request
PROD_DB: "prodDb",
// get the dev app DB from the request
DEV_DB: "devDb",
DB_OPTS: "dbOpts",
// check if something else is using the context, don't close DB
IN_USE: "inUse",
}
exports.DEFAULT_TENANT_ID = DEFAULT_TENANT_ID
// this function makes sure the PouchDB objects are closed and
// fully deleted when finished - this protects against memory leaks
async function closeAppDBs() {
const dbKeys = [
ContextKeys.CURRENT_DB,
ContextKeys.PROD_DB,
ContextKeys.DEV_DB,
]
for (let dbKey of dbKeys) {
const db = cls.getFromContext(dbKey)
if (!db) {
continue
}
await closeDB(db)
// clear the DB from context, incase someone tries to use it again
cls.setOnContext(dbKey, null)
}
// clear the app ID now that the databases are closed
if (cls.getFromContext(ContextKeys.APP_ID)) {
cls.setOnContext(ContextKeys.APP_ID, null)
}
if (cls.getFromContext(ContextKeys.DB_OPTS)) {
cls.setOnContext(ContextKeys.DB_OPTS, null)
}
}
exports.closeTenancy = async () => {
if (env.USE_COUCH) {
await closeDB(exports.getGlobalDB())
}
// clear from context now that database is closed/task is finished
cls.setOnContext(ContextKeys.TENANT_ID, null)
cls.setOnContext(ContextKeys.GLOBAL_DB, null)
}
exports.isDefaultTenant = () => {
return exports.getTenantId() === exports.DEFAULT_TENANT_ID
}
exports.isMultiTenant = () => {
return env.MULTI_TENANCY
}
// used for automations, API endpoints should always be in context already
exports.doInTenant = (tenantId, task, { forceNew } = {}) => {
// the internal function is so that we can re-use an existing
// context - don't want to close DB on a parent context
async function internal(opts = { existing: false }) {
// set the tenant id
if (!opts.existing) {
exports.updateTenantId(tenantId)
}
try {
// invoke the task
return await task()
} finally {
const using = cls.getFromContext(ContextKeys.IN_USE)
if (!using || using <= 1) {
await exports.closeTenancy()
} else {
cls.setOnContext(using - 1)
}
}
}
const using = cls.getFromContext(ContextKeys.IN_USE)
if (
!forceNew &&
using &&
cls.getFromContext(ContextKeys.TENANT_ID) === tenantId
) {
cls.setOnContext(ContextKeys.IN_USE, using + 1)
return internal({ existing: true })
} else {
return cls.run(async () => {
cls.setOnContext(ContextKeys.IN_USE, 1)
return internal()
})
}
}
/**
* Given an app ID this will attempt to retrieve the tenant ID from it.
* @return {null|string} The tenant ID found within the app ID.
*/
exports.getTenantIDFromAppID = appId => {
if (!appId) {
return null
}
const split = appId.split(SEPARATOR)
const hasDev = split[1] === DocumentTypes.DEV
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
return null
}
if (hasDev) {
return split[2]
} else {
return split[1]
}
}
const setAppTenantId = appId => {
const appTenantId =
exports.getTenantIDFromAppID(appId) || exports.DEFAULT_TENANT_ID
exports.updateTenantId(appTenantId)
}
exports.doInAppContext = (appId, task, { forceNew } = {}) => {
if (!appId) {
throw new Error("appId is required")
}
const identity = exports.getIdentity()
// the internal function is so that we can re-use an existing
// context - don't want to close DB on a parent context
async function internal(opts = { existing: false }) {
// set the app tenant id
if (!opts.existing) {
setAppTenantId(appId)
}
// set the app ID
cls.setOnContext(ContextKeys.APP_ID, appId)
// preserve the identity
exports.setIdentity(identity)
try {
// invoke the task
return await task()
} finally {
const using = cls.getFromContext(ContextKeys.IN_USE)
if (!using || using <= 1) {
await closeAppDBs()
} else {
cls.setOnContext(using - 1)
}
}
}
const using = cls.getFromContext(ContextKeys.IN_USE)
if (!forceNew && using && cls.getFromContext(ContextKeys.APP_ID) === appId) {
cls.setOnContext(ContextKeys.IN_USE, using + 1)
return internal({ existing: true })
} else {
return cls.run(async () => {
cls.setOnContext(ContextKeys.IN_USE, 1)
return internal()
})
}
}
exports.doInIdentityContext = (identity, task) => {
if (!identity) {
throw new Error("identity is required")
}
async function internal(opts = { existing: false }) {
if (!opts.existing) {
cls.setOnContext(ContextKeys.IDENTITY, identity)
// set the tenant so that doInTenant will preserve identity
if (identity.tenantId) {
exports.updateTenantId(identity.tenantId)
}
}
try {
// invoke the task
return await task()
} finally {
const using = cls.getFromContext(ContextKeys.IN_USE)
if (!using || using <= 1) {
exports.setIdentity(null)
} else {
cls.setOnContext(using - 1)
}
}
}
const existing = cls.getFromContext(ContextKeys.IDENTITY)
const using = cls.getFromContext(ContextKeys.IN_USE)
if (using && existing && existing._id === identity._id) {
cls.setOnContext(ContextKeys.IN_USE, using + 1)
return internal({ existing: true })
} else {
return cls.run(async () => {
cls.setOnContext(ContextKeys.IN_USE, 1)
return internal({ existing: false })
})
}
}
exports.setIdentity = identity => {
cls.setOnContext(ContextKeys.IDENTITY, identity)
}
exports.getIdentity = () => {
try {
return cls.getFromContext(ContextKeys.IDENTITY)
} catch (e) {
// do nothing - identity is not in context
}
}
exports.updateTenantId = tenantId => {
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
if (env.USE_COUCH) {
exports.setGlobalDB(tenantId)
}
}
exports.updateAppId = async appId => {
try {
// have to close first, before removing the databases from context
await closeAppDBs()
cls.setOnContext(ContextKeys.APP_ID, appId)
} catch (err) {
if (env.isTest()) {
TEST_APP_ID = appId
} else {
throw err
}
}
}
exports.setGlobalDB = tenantId => {
const dbName = baseGlobalDBName(tenantId)
const db = dangerousGetDB(dbName)
cls.setOnContext(ContextKeys.GLOBAL_DB, db)
return db
}
exports.getGlobalDB = () => {
const db = cls.getFromContext(ContextKeys.GLOBAL_DB)
if (!db) {
throw new Error("Global DB not found")
}
return db
}
exports.isTenantIdSet = () => {
const tenantId = cls.getFromContext(ContextKeys.TENANT_ID)
return !!tenantId
}
exports.getTenantId = () => {
if (!exports.isMultiTenant()) {
return exports.DEFAULT_TENANT_ID
}
const tenantId = cls.getFromContext(ContextKeys.TENANT_ID)
if (!tenantId) {
throw new Error("Tenant id not found")
}
return tenantId
}
exports.getAppId = () => {
const foundId = cls.getFromContext(ContextKeys.APP_ID)
if (!foundId && env.isTest() && TEST_APP_ID) {
return TEST_APP_ID
} else {
return foundId
}
}
function getContextDB(key, opts) {
const dbOptsKey = `${key}${ContextKeys.DB_OPTS}`
let storedOpts = cls.getFromContext(dbOptsKey)
let db = cls.getFromContext(key)
if (db && isEqual(opts, storedOpts)) {
return db
}
const appId = exports.getAppId()
let toUseAppId
switch (key) {
case ContextKeys.CURRENT_DB:
toUseAppId = appId
break
case ContextKeys.PROD_DB:
toUseAppId = getProdAppID(appId)
break
case ContextKeys.DEV_DB:
toUseAppId = getDevelopmentAppID(appId)
break
}
db = dangerousGetDB(toUseAppId, opts)
try {
cls.setOnContext(key, db)
if (opts) {
cls.setOnContext(dbOptsKey, opts)
}
} catch (err) {
if (!env.isTest()) {
throw err
}
}
return db
}
/**
* Opens the app database based on whatever the request
* contained, dev or prod.
*/
exports.getAppDB = (opts = null) => {
return getContextDB(ContextKeys.CURRENT_DB, opts)
}
/**
* This specifically gets the prod app ID, if the request
* contained a development app ID, this will open the prod one.
*/
exports.getProdAppDB = (opts = null) => {
return getContextDB(ContextKeys.PROD_DB, opts)
}
/**
* This specifically gets the dev app ID, if the request
* contained a prod app ID, this will open the dev one.
*/
exports.getDevAppDB = (opts = null) => {
return getContextDB(ContextKeys.DEV_DB, opts)
}

View file

@ -0,0 +1,247 @@
import env from "../environment"
import { SEPARATOR, DocumentTypes } from "../db/constants"
import cls from "./FunctionContext"
import { dangerousGetDB, closeDB } from "../db"
import { baseGlobalDBName } from "../tenancy/utils"
import { IdentityContext } from "@budibase/types"
import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants"
import { ContextKeys } from "./constants"
import {
updateUsing,
closeWithUsing,
setAppTenantId,
setIdentity,
closeAppDBs,
getContextDB,
} from "./utils"
export const DEFAULT_TENANT_ID = _DEFAULT_TENANT_ID
// some test cases call functions directly, need to
// store an app ID to pretend there is a context
let TEST_APP_ID: string | null = null
export const closeTenancy = async () => {
let db
try {
if (env.USE_COUCH) {
db = getGlobalDB()
}
} catch (err) {
// no DB found - skip closing
return
}
await closeDB(db)
// clear from context now that database is closed/task is finished
cls.setOnContext(ContextKeys.TENANT_ID, null)
cls.setOnContext(ContextKeys.GLOBAL_DB, null)
}
// export const isDefaultTenant = () => {
// return getTenantId() === DEFAULT_TENANT_ID
// }
export const isMultiTenant = () => {
return env.MULTI_TENANCY
}
/**
* Given an app ID this will attempt to retrieve the tenant ID from it.
* @return {null|string} The tenant ID found within the app ID.
*/
export const getTenantIDFromAppID = (appId: string) => {
if (!appId) {
return null
}
const split = appId.split(SEPARATOR)
const hasDev = split[1] === DocumentTypes.DEV
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
return null
}
if (hasDev) {
return split[2]
} else {
return split[1]
}
}
// used for automations, API endpoints should always be in context already
export const doInTenant = (tenantId: string | null, task: any) => {
// the internal function is so that we can re-use an existing
// context - don't want to close DB on a parent context
async function internal(opts = { existing: false }) {
// set the tenant id + global db if this is a new context
if (!opts.existing) {
updateTenantId(tenantId)
}
try {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKeys.TENANCY_IN_USE, () => {
return closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKeys.TENANT_ID) === tenantId
return updateUsing(ContextKeys.TENANCY_IN_USE, existing, internal)
}
export const doInAppContext = (appId: string, task: any) => {
if (!appId) {
throw new Error("appId is required")
}
const identity = getIdentity()
// the internal function is so that we can re-use an existing
// context - don't want to close DB on a parent context
async function internal(opts = { existing: false }) {
// set the app tenant id
if (!opts.existing) {
setAppTenantId(appId)
}
// set the app ID
cls.setOnContext(ContextKeys.APP_ID, appId)
// preserve the identity
if (identity) {
setIdentity(identity)
}
try {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKeys.APP_IN_USE, async () => {
await closeAppDBs()
await closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKeys.APP_ID) === appId
return updateUsing(ContextKeys.APP_IN_USE, existing, internal)
}
export const doInIdentityContext = (identity: IdentityContext, task: any) => {
if (!identity) {
throw new Error("identity is required")
}
async function internal(opts = { existing: false }) {
if (!opts.existing) {
cls.setOnContext(ContextKeys.IDENTITY, identity)
// set the tenant so that doInTenant will preserve identity
if (identity.tenantId) {
updateTenantId(identity.tenantId)
}
}
try {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKeys.IDENTITY_IN_USE, async () => {
setIdentity(null)
await closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKeys.IDENTITY)
return updateUsing(ContextKeys.IDENTITY_IN_USE, existing, internal)
}
export const getIdentity = (): IdentityContext | undefined => {
try {
return cls.getFromContext(ContextKeys.IDENTITY)
} catch (e) {
// do nothing - identity is not in context
}
}
export const updateTenantId = (tenantId: string | null) => {
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
if (env.USE_COUCH) {
setGlobalDB(tenantId)
}
}
export const updateAppId = async (appId: string) => {
try {
// have to close first, before removing the databases from context
await closeAppDBs()
cls.setOnContext(ContextKeys.APP_ID, appId)
} catch (err) {
if (env.isTest()) {
TEST_APP_ID = appId
} else {
throw err
}
}
}
export const setGlobalDB = (tenantId: string | null) => {
const dbName = baseGlobalDBName(tenantId)
const db = dangerousGetDB(dbName)
cls.setOnContext(ContextKeys.GLOBAL_DB, db)
return db
}
export const getGlobalDB = () => {
const db = cls.getFromContext(ContextKeys.GLOBAL_DB)
if (!db) {
throw new Error("Global DB not found")
}
return db
}
export const isTenantIdSet = () => {
const tenantId = cls.getFromContext(ContextKeys.TENANT_ID)
return !!tenantId
}
export const getTenantId = () => {
if (!isMultiTenant()) {
return DEFAULT_TENANT_ID
}
const tenantId = cls.getFromContext(ContextKeys.TENANT_ID)
if (!tenantId) {
throw new Error("Tenant id not found")
}
return tenantId
}
export const getAppId = () => {
const foundId = cls.getFromContext(ContextKeys.APP_ID)
if (!foundId && env.isTest() && TEST_APP_ID) {
return TEST_APP_ID
} else {
return foundId
}
}
/**
* Opens the app database based on whatever the request
* contained, dev or prod.
*/
export const getAppDB = (opts?: any) => {
return getContextDB(ContextKeys.CURRENT_DB, opts)
}
/**
* This specifically gets the prod app ID, if the request
* contained a development app ID, this will open the prod one.
*/
export const getProdAppDB = (opts?: any) => {
return getContextDB(ContextKeys.PROD_DB, opts)
}
/**
* This specifically gets the dev app ID, if the request
* contained a prod app ID, this will open the dev one.
*/
export const getDevAppDB = (opts?: any) => {
return getContextDB(ContextKeys.DEV_DB, opts)
}

View file

@ -0,0 +1,148 @@
import "../../../tests/utilities/TestConfiguration"
import * as context from ".."
import { DEFAULT_TENANT_ID } from "../../constants"
import env from "../../environment"
// must use require to spy index file exports due to known issue in jest
const dbUtils = require("../../db")
jest.spyOn(dbUtils, "closeDB")
jest.spyOn(dbUtils, "dangerousGetDB")
describe("context", () => {
beforeEach(() => {
jest.clearAllMocks()
})
describe("doInTenant", () => {
describe("single-tenancy", () => {
it("defaults to the default tenant", () => {
const tenantId = context.getTenantId()
expect(tenantId).toBe(DEFAULT_TENANT_ID)
})
it("defaults to the default tenant db", async () => {
await context.doInTenant(DEFAULT_TENANT_ID, () => {
const db = context.getGlobalDB()
expect(db.name).toBe("global-db")
})
expect(dbUtils.dangerousGetDB).toHaveBeenCalledTimes(1)
expect(dbUtils.closeDB).toHaveBeenCalledTimes(1)
})
})
describe("multi-tenancy", () => {
beforeEach(() => {
env._set("MULTI_TENANCY", 1)
})
it("fails when no tenant id is set", () => {
const test = () => {
let error
try {
context.getTenantId()
} catch (e: any) {
error = e
}
expect(error.message).toBe("Tenant id not found")
}
// test under no tenancy
test()
// test after tenancy has been accessed to ensure cleanup
context.doInTenant("test", () => {})
test()
})
it("fails when no tenant db is set", () => {
const test = () => {
let error
try {
context.getGlobalDB()
} catch (e: any) {
error = e
}
expect(error.message).toBe("Global DB not found")
}
// test under no tenancy
test()
// test after tenancy has been accessed to ensure cleanup
context.doInTenant("test", () => {})
test()
})
it("sets tenant id", () => {
context.doInTenant("test", () => {
const tenantId = context.getTenantId()
expect(tenantId).toBe("test")
})
})
it("initialises the tenant db", async () => {
await context.doInTenant("test", () => {
const db = context.getGlobalDB()
expect(db.name).toBe("test_global-db")
})
expect(dbUtils.dangerousGetDB).toHaveBeenCalledTimes(1)
expect(dbUtils.closeDB).toHaveBeenCalledTimes(1)
})
it("sets the tenant id when nested with same tenant id", async () => {
await context.doInTenant("test", async () => {
const tenantId = context.getTenantId()
expect(tenantId).toBe("test")
await context.doInTenant("test", async () => {
const tenantId = context.getTenantId()
expect(tenantId).toBe("test")
await context.doInTenant("test", () => {
const tenantId = context.getTenantId()
expect(tenantId).toBe("test")
})
})
})
})
it("initialises the tenant db when nested with same tenant id", async () => {
await context.doInTenant("test", async () => {
const db = context.getGlobalDB()
expect(db.name).toBe("test_global-db")
await context.doInTenant("test", async () => {
const db = context.getGlobalDB()
expect(db.name).toBe("test_global-db")
await context.doInTenant("test", () => {
const db = context.getGlobalDB()
expect(db.name).toBe("test_global-db")
})
})
})
// only 1 db is opened and closed
expect(dbUtils.dangerousGetDB).toHaveBeenCalledTimes(1)
expect(dbUtils.closeDB).toHaveBeenCalledTimes(1)
})
it("sets different tenant id inside another context", () => {
context.doInTenant("test", () => {
const tenantId = context.getTenantId()
expect(tenantId).toBe("test")
context.doInTenant("nested", () => {
const tenantId = context.getTenantId()
expect(tenantId).toBe("nested")
context.doInTenant("double-nested", () => {
const tenantId = context.getTenantId()
expect(tenantId).toBe("double-nested")
})
})
})
})
})
})
})

View file

@ -0,0 +1,113 @@
import {
DEFAULT_TENANT_ID,
getAppId,
getTenantIDFromAppID,
updateTenantId,
} from "./index"
import cls from "./FunctionContext"
import { IdentityContext } from "@budibase/types"
import { ContextKeys } from "./constants"
import { dangerousGetDB, closeDB } from "../db"
import { isEqual } from "lodash"
import { getDevelopmentAppID, getProdAppID } from "../db/conversions"
import env from "../environment"
export async function updateUsing(
usingKey: string,
existing: boolean,
internal: (opts: { existing: boolean }) => Promise<any>
) {
const using = cls.getFromContext(usingKey)
if (using && existing) {
cls.setOnContext(usingKey, using + 1)
return internal({ existing: true })
} else {
return cls.run(async () => {
cls.setOnContext(usingKey, 1)
return internal({ existing: false })
})
}
}
export async function closeWithUsing(
usingKey: string,
closeFn: () => Promise<any>
) {
const using = cls.getFromContext(usingKey)
if (!using || using <= 1) {
await closeFn()
} else {
cls.setOnContext(usingKey, using - 1)
}
}
export const setAppTenantId = (appId: string) => {
const appTenantId = getTenantIDFromAppID(appId) || DEFAULT_TENANT_ID
updateTenantId(appTenantId)
}
export const setIdentity = (identity: IdentityContext | null) => {
cls.setOnContext(ContextKeys.IDENTITY, identity)
}
// this function makes sure the PouchDB objects are closed and
// fully deleted when finished - this protects against memory leaks
export async function closeAppDBs() {
const dbKeys = [
ContextKeys.CURRENT_DB,
ContextKeys.PROD_DB,
ContextKeys.DEV_DB,
]
for (let dbKey of dbKeys) {
const db = cls.getFromContext(dbKey)
if (!db) {
continue
}
await closeDB(db)
// clear the DB from context, incase someone tries to use it again
cls.setOnContext(dbKey, null)
}
// clear the app ID now that the databases are closed
if (cls.getFromContext(ContextKeys.APP_ID)) {
cls.setOnContext(ContextKeys.APP_ID, null)
}
if (cls.getFromContext(ContextKeys.DB_OPTS)) {
cls.setOnContext(ContextKeys.DB_OPTS, null)
}
}
export function getContextDB(key: string, opts: any) {
const dbOptsKey = `${key}${ContextKeys.DB_OPTS}`
let storedOpts = cls.getFromContext(dbOptsKey)
let db = cls.getFromContext(key)
if (db && isEqual(opts, storedOpts)) {
return db
}
const appId = getAppId()
let toUseAppId
switch (key) {
case ContextKeys.CURRENT_DB:
toUseAppId = appId
break
case ContextKeys.PROD_DB:
toUseAppId = getProdAppID(appId)
break
case ContextKeys.DEV_DB:
toUseAppId = getDevelopmentAppID(appId)
break
}
db = dangerousGetDB(toUseAppId, opts)
try {
cls.setOnContext(key, db)
if (opts) {
cls.setOnContext(dbOptsKey, opts)
}
} catch (err) {
if (!env.isTest()) {
throw err
}
}
return db
}

View file

@ -11,8 +11,8 @@ export enum AutomationViewModes {
}
export enum ViewNames {
USER_BY_EMAIL = "by_email",
USER_BY_APP = "by_app",
USER_BY_EMAIL = "by_email2",
BY_API_KEY = "by_api_key",
USER_BY_BUILDERS = "by_builders",
LINK = "by_link",
@ -20,6 +20,13 @@ export enum ViewNames {
AUTOMATION_LOGS = "automation_logs",
}
export const DeprecatedViews = {
[ViewNames.USER_BY_EMAIL]: [
// removed due to inaccuracy in view doc filter logic
"by_email",
],
}
export enum DocumentTypes {
USER = "us",
GROUP = "gr",

View file

@ -1,10 +1,18 @@
const pouch = require("./pouch")
const env = require("../environment")
const openDbs = []
let PouchDB
let initialised = false
const dbList = new Set()
if (env.MEMORY_LEAK_CHECK) {
setInterval(() => {
console.log("--- OPEN DBS ---")
console.log(openDbs)
}, 5000)
}
const put =
dbPut =>
async (doc, options = {}) => {
@ -35,6 +43,9 @@ exports.dangerousGetDB = (dbName, opts) => {
dbList.add(dbName)
}
const db = new PouchDB(dbName, opts)
if (env.MEMORY_LEAK_CHECK) {
openDbs.push(db.name)
}
const dbPut = db.put
db.put = put(dbPut)
return db
@ -46,6 +57,9 @@ exports.closeDB = async db => {
if (!db || env.isTest()) {
return
}
if (env.MEMORY_LEAK_CHECK) {
openDbs.splice(openDbs.indexOf(db.name), 1)
}
try {
// specifically await so that if there is an error, it can be ignored
return await db.close()

View file

@ -102,6 +102,13 @@ exports.getPouch = (opts = {}) => {
}
}
if (opts.onDisk) {
POUCH_DB_DEFAULTS = {
prefix: undefined,
adapter: "leveldb",
}
}
if (opts.replication) {
const replicationStream = require("pouchdb-replication-stream")
PouchDB.plugin(replicationStream.plugin)

View file

@ -1,20 +1,42 @@
const { DocumentTypes, ViewNames, SEPARATOR } = require("./constants")
const {
DocumentTypes,
ViewNames,
DeprecatedViews,
SEPARATOR,
} = require("./utils")
const { getGlobalDB } = require("../tenancy")
const DESIGN_DB = "_design/database"
function DesignDoc() {
return {
_id: "_design/database",
_id: DESIGN_DB,
// view collation information, read before writing any complex views:
// https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification
views: {},
}
}
exports.createUserEmailView = async () => {
async function removeDeprecated(db, viewName) {
if (!DeprecatedViews[viewName]) {
return
}
try {
const designDoc = await db.get(DESIGN_DB)
for (let deprecatedNames of DeprecatedViews[viewName]) {
delete designDoc.views[deprecatedNames]
}
await db.put(designDoc)
} catch (err) {
// doesn't exist, ignore
}
}
exports.createNewUserEmailView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
designDoc = await db.get(DESIGN_DB)
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
@ -22,7 +44,7 @@ exports.createUserEmailView = async () => {
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentTypes.USER}")) {
if (doc._id.startsWith("${DocumentTypes.USER}${SEPARATOR}")) {
emit(doc.email.toLowerCase(), doc._id)
}
}`,
@ -108,7 +130,7 @@ exports.createUserBuildersView = async () => {
exports.queryGlobalView = async (viewName, params, db = null) => {
const CreateFuncByName = {
[ViewNames.USER_BY_EMAIL]: exports.createUserEmailView,
[ViewNames.USER_BY_EMAIL]: exports.createNewUserEmailView,
[ViewNames.BY_API_KEY]: exports.createApiKeyView,
[ViewNames.USER_BY_BUILDERS]: exports.createUserBuildersView,
[ViewNames.USER_BY_APP]: exports.createUserAppView,
@ -126,6 +148,7 @@ exports.queryGlobalView = async (viewName, params, db = null) => {
} catch (err) {
if (err != null && err.name === "not_found") {
const createFunc = CreateFuncByName[viewName]
await removeDeprecated(db, viewName)
await createFunc()
return exports.queryGlobalView(viewName, params)
} else {

View file

@ -54,6 +54,7 @@ const env = {
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,
SERVICE: process.env.SERVICE || "budibase",
MEMORY_LEAK_CHECK: process.env.MEMORY_LEAK_CHECK || false,
DEPLOYMENT_ENVIRONMENT:
process.env.DEPLOYMENT_ENVIRONMENT || "docker-compose",
_set(key: any, value: any) {

View file

@ -2,7 +2,7 @@ import PostHog from "posthog-node"
import { Event, Identity, Group, BaseEvent } from "@budibase/types"
import { EventProcessor } from "./types"
import env from "../../environment"
import context from "../../context"
import * as context from "../../context"
const pkg = require("../../../package.json")
export default class PosthogProcessor implements EventProcessor {

View file

@ -9,7 +9,7 @@ import {
getGlobalDBName,
getTenantId,
} from "../tenancy"
import context from "../context"
import * as context from "../context"
import { DEFINITIONS } from "."
import {
Migration,

View file

@ -75,9 +75,11 @@ export const ObjectStore = (bucket: any) => {
s3ForcePathStyle: true,
signatureVersion: "v4",
apiVersion: "2006-03-01",
params: {
}
if (bucket) {
config.params = {
Bucket: sanitizeBucket(bucket),
},
}
}
if (env.MINIO_URL) {
config.endpoint = env.MINIO_URL
@ -292,6 +294,7 @@ export const uploadDirectory = async (
}
}
await Promise.all(uploads)
return files
}
exports.downloadTarballDirect = async (url: string, path: string) => {

View file

@ -764,6 +764,11 @@
"@types/koa-compose" "*"
"@types/node" "*"
"@types/lodash@4.14.180":
version "4.14.180"
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.180.tgz#4ab7c9ddfc92ec4a887886483bc14c79fb380670"
integrity sha512-XOKXa1KIxtNXgASAnwj7cnttJxS4fksBRywK/9LzRV5YxrF80BXZIGeQSuoESQ/VkUj30Ae0+YcuHc15wJCB2g==
"@types/mime@^1":
version "1.3.2"
resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a"

View file

@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
"version": "1.0.220-alpha.4",
"version": "1.1.15-alpha.2",
"license": "MPL-2.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
@ -38,7 +38,7 @@
],
"dependencies": {
"@adobe/spectrum-css-workflow-icons": "^1.2.1",
"@budibase/string-templates": "^1.0.220-alpha.4",
"@budibase/string-templates": "^1.1.15-alpha.2",
"@spectrum-css/actionbutton": "^1.0.1",
"@spectrum-css/actiongroup": "^1.0.1",
"@spectrum-css/avatar": "^3.0.2",

File diff suppressed because it is too large Load diff

View file

@ -100,24 +100,18 @@ filterTests(['smoke', 'all'], () => {
})
it("should create the first application from scratch, using the users first name as the default app name", () => {
cy.visit(`${Cypress.config().baseUrl}/builder`)
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 5000 })
cy.updateUserInformation("Ted", "Userman")
cy.createApp("", false)
cy.visit(`${Cypress.config().baseUrl}/builder`)
cy.applicationInAppTable("Teds app")
cy.deleteApp("Teds app")
//Accomodate names that end in 'S'
// Accomodate names that end in 'S'
cy.updateUserInformation("Chris", "Userman")
cy.createApp("", false)
cy.visit(`${Cypress.config().baseUrl}/builder`)
cy.applicationInAppTable("Chris app")
cy.deleteApp("Chris app")

View file

@ -4,7 +4,7 @@ Cypress.on("uncaught:exception", () => {
// ACCOUNTS & USERS
Cypress.Commands.add("login", (email, password) => {
cy.visit(`${Cypress.config().baseUrl}/builder`)
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 5000 })
cy.wait(2000)
cy.url().then(url => {
if (url.includes("builder/admin")) {
@ -139,7 +139,9 @@ Cypress.Commands.add("createApp", (name, addDefaultTable) => {
cy.get(`[data-cy="create-app-btn"]`, { timeout: 5000 }).click({ force: true })
// If apps already exist
cy.request(`${Cypress.config().baseUrl}/api/applications?status=all`)
cy.request(`${Cypress.config().baseUrl}/api/applications?status=all`, {
timeout: 5000,
})
.its("body")
.then(val => {
if (val.length > 0) {
@ -223,9 +225,11 @@ Cypress.Commands.add("deleteApp", name => {
})
Cypress.Commands.add("deleteAllApps", () => {
cy.visit(`${Cypress.config().baseUrl}/builder`)
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 5000 })
cy.wait(500)
cy.request(`${Cypress.config().baseUrl}/api/applications?status=all`)
cy.request(`${Cypress.config().baseUrl}/api/applications?status=all`, {
timeout: 5000,
})
.its("body")
.then(val => {
for (let i = 0; i < val.length; i++) {
@ -377,7 +381,7 @@ Cypress.Commands.add("searchForApplication", appName => {
// Assumes there are no others
Cypress.Commands.add("applicationInAppTable", appName => {
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 10000 })
cy.get(".appTable", { timeout: 2000 }).within(() => {
cy.get(".appTable", { timeout: 5000 }).within(() => {
cy.get(".title").contains(appName).should("exist")
})
})

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/builder",
"version": "1.0.220-alpha.4",
"version": "1.1.15-alpha.2",
"license": "GPL-3.0",
"private": true,
"scripts": {
@ -69,10 +69,10 @@
}
},
"dependencies": {
"@budibase/bbui": "^1.0.220-alpha.4",
"@budibase/client": "^1.0.220-alpha.4",
"@budibase/frontend-core": "^1.0.220-alpha.4",
"@budibase/string-templates": "^1.0.220-alpha.4",
"@budibase/bbui": "^1.1.15-alpha.2",
"@budibase/client": "^1.1.15-alpha.2",
"@budibase/frontend-core": "^1.1.15-alpha.2",
"@budibase/string-templates": "^1.1.15-alpha.2",
"@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1",
@ -113,7 +113,7 @@
"rollup": "^2.44.0",
"rollup-plugin-copy": "^3.4.0",
"start-server-and-test": "^1.12.1",
"svelte": "^3.48.0",
"svelte": "^3.49.0",
"svelte-jester": "^1.3.2",
"ts-node": "^10.4.0",
"tsconfig-paths": "4.0.0",

View file

@ -52,8 +52,8 @@ export default class IntercomClient {
* @param {Object} user - user to identify
* @returns Intercom global object
*/
show(user = {}) {
if (!this.initialised || !user?.admin) return
show(user = {}, enabled) {
if (!this.initialised || !enabled) return
return window.Intercom("boot", {
app_id: this.token,

View file

@ -12,6 +12,7 @@
notifications,
Modal,
} from "@budibase/bbui"
import { ActionStepID } from "constants/backend/automations"
export let automation
let testDataModal
@ -82,7 +83,7 @@
in:fly|local={{ x: 500, duration: 500 }}
out:fly|local={{ x: 500, duration: 500 }}
>
{#if block.stepId !== "LOOP"}
{#if block.stepId !== ActionStepID.LOOP}
<FlowItem {testDataModal} {block} />
{/if}
</div>

View file

@ -10,11 +10,15 @@
Select,
ActionButton,
notifications,
Label,
} from "@budibase/bbui"
import AutomationBlockSetup from "../../SetupPanel/AutomationBlockSetup.svelte"
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
import ActionModal from "./ActionModal.svelte"
import FlowItemHeader from "./FlowItemHeader.svelte"
import RoleSelect from "components/design/settings/controls/RoleSelect.svelte"
import { ActionStepID, TriggerStepID } from "constants/backend/automations"
import { permissions } from "stores/backend"
export let block
export let testDataModal
@ -23,9 +27,12 @@
let actionModal
let blockComplete
let showLooping = false
let role
$: automationId = $automationStore.selectedAutomation?.automation._id
$: showBindingPicker =
block.stepId === "CREATE_ROW" || block.stepId === "UPDATE_ROW"
block.stepId === ActionStepID.CREATE_ROW ||
block.stepId === ActionStepID.UPDATE_ROW
$: isTrigger = block.type === "TRIGGER"
@ -45,6 +52,32 @@
x => x.blockToLoop === block.id
)
$: setPermissions(role)
$: getPermissions(automationId)
async function setPermissions(role) {
if (!role || !automationId) {
return
}
await permissions.save({
level: "execute",
role,
resource: automationId,
})
}
async function getPermissions(automationId) {
if (!automationId) {
return
}
const perms = await permissions.forResource(automationId)
if (!perms["execute"]) {
role = "BASIC"
} else {
role = perms["execute"]
}
}
async function removeLooping() {
loopingSelected = false
let loopBlock =
@ -205,6 +238,10 @@
</div>
{/if}
{#if block.stepId === TriggerStepID.APP}
<Label>Role</Label>
<RoleSelect bind:value={role} />
{/if}
<AutomationBlockSetup
schemaProperties={Object.entries(block.schema.inputs.properties)}
{block}

View file

@ -96,7 +96,7 @@
onSelect(block)
}}
>
<Icon name={blockComplete ? "ChevronUp" : "ChevronDown"} />
<Icon hoverable name={blockComplete ? "ChevronUp" : "ChevronDown"} />
</div>
</div>
</div>

View file

@ -1,6 +1,7 @@
<script>
import { Icon, Divider, Tabs, Tab, TextArea, Label } from "@budibase/bbui"
import FlowItemHeader from "./FlowChart/FlowItemHeader.svelte"
import { ActionStepID } from "constants/backend/automations"
export let automation
export let testResults
@ -10,7 +11,7 @@
let blocks
function prepTestResults(results) {
return results?.steps.filter(x => x.stepId !== "LOOP" || [])
return results?.steps.filter(x => x.stepId !== ActionStepID.LOOP || [])
}
function textArea(results, message) {
@ -30,7 +31,7 @@
}
blocks = blocks
.concat(automation.definition.steps || [])
.filter(x => x.stepId !== "LOOP")
.filter(x => x.stepId !== ActionStepID.LOOP)
} else if (filteredResults) {
blocks = filteredResults || []
// make sure there is an ID for each block being displayed
@ -45,7 +46,7 @@
<div class="container">
{#each blocks as block, idx}
<div class="block" style={width ? `width: ${width}` : ""}>
{#if block.stepId !== "LOOP"}
{#if block.stepId !== ActionStepID.LOOP}
<FlowItemHeader
showTestStatus={true}
bind:showParameters
@ -67,27 +68,20 @@
{/if}
<div class="tabs">
<Tabs quiet noPadding selected="Input">
<Tabs noHorizPadding selected="Input">
<Tab title="Input">
<div style="padding: 10px 10px 10px 10px;">
<TextArea
minHeight="80px"
disabled
value={textArea(filteredResults?.[idx]?.inputs, "No input")}
/>
</div></Tab
>
<TextArea
minHeight="80px"
disabled
value={textArea(filteredResults?.[idx]?.inputs, "No input")}
/>
</Tab>
<Tab title="Output">
<div style="padding: 10px 10px 10px 10px;">
<TextArea
minHeight="100px"
disabled
value={textArea(
filteredResults?.[idx]?.outputs,
"No output"
)}
/>
</div>
<TextArea
minHeight="100px"
disabled
value={textArea(filteredResults?.[idx]?.outputs, "No output")}
/>
</Tab>
</Tabs>
</div>
@ -113,6 +107,7 @@
align-items: stretch;
position: relative;
flex: 1 1 auto;
padding: 0 var(--spacing-xl) var(--spacing-xl) var(--spacing-xl);
}
.block {

View file

@ -2,6 +2,7 @@
import { Icon, Divider } from "@budibase/bbui"
import TestDisplay from "./TestDisplay.svelte"
import { automationStore } from "builderStore"
import { ActionStepID } from "constants/backend/automations"
export let automation
export let testResults
@ -16,7 +17,7 @@
}
blocks = blocks
.concat(automation.definition.steps || [])
.filter(x => x.stepId !== "LOOP")
.filter(x => x.stepId !== ActionStepID.LOOP)
} else if (testResults) {
blocks = testResults.steps || []
}

View file

@ -11,6 +11,7 @@
Body,
Icon,
} from "@budibase/bbui"
import { TriggerStepID } from "constants/backend/automations"
let name
let selectedTrigger
@ -35,7 +36,7 @@
)
automationStore.actions.addBlockToAutomation(newBlock)
if (triggerVal.stepId === "WEBHOOK") {
if (triggerVal.stepId === TriggerStepID.WEBHOOK) {
webhookModal.show
}

View file

@ -30,6 +30,7 @@
import { LuceneUtils } from "@budibase/frontend-core"
import { getSchemaForTable } from "builderStore/dataBinding"
import { Utils } from "@budibase/frontend-core"
import { TriggerStepID, ActionStepID } from "constants/backend/automations"
export let block
export let testData
@ -54,12 +55,13 @@
$: schema = getSchemaForTable(tableId, { searchableSchema: true }).schema
$: schemaFields = Object.values(schema || {})
$: queryLimit = tableId?.includes("datasource") ? "∞" : "1000"
$: isTrigger = block?.type === "TRIGGER"
const onChange = Utils.sequential(async (e, key) => {
try {
if (isTestModal) {
// Special case for webhook, as it requires a body, but the schema already brings back the body's contents
if (stepId === "WEBHOOK") {
if (stepId === TriggerStepID.WEBHOOK) {
automationStore.actions.addTestDataToAutomation({
body: {
[key]: e.detail,
@ -100,9 +102,9 @@
// Extract all outputs from all previous steps as available bindins
let bindings = []
for (let idx = 0; idx < blockIdx; idx++) {
let wasLoopBlock = allSteps[idx]?.stepId === "LOOP"
let wasLoopBlock = allSteps[idx]?.stepId === ActionStepID.LOOP
let isLoopBlock =
allSteps[idx]?.stepId === "LOOP" &&
allSteps[idx]?.stepId === ActionStepID.LOOP &&
allSteps.find(x => x.blockToLoop === block.id)
// If the previous block was a loop block, decerement the index so the following
@ -261,6 +263,7 @@
/>
{:else if value.customType === "table"}
<TableSelector
{isTrigger}
value={inputData[key]}
on:change={e => onChange(e, key)}
/>
@ -343,7 +346,7 @@
<CreateWebhookModal />
</Modal>
{#if stepId === "WEBHOOK"}
{#if stepId === TriggerStepID.WEBHOOK}
<Button secondary on:click={() => webhookModal.show()}>Set Up Webhook</Button>
{/if}

View file

@ -1,5 +1,5 @@
<script>
import { Button, Select, Input } from "@budibase/bbui"
import { Button, Select, Input, Label } from "@budibase/bbui"
import { createEventDispatcher } from "svelte"
const dispatch = createEventDispatcher()
@ -9,6 +9,7 @@
dispatch("change", e.detail)
}
let touched = false
let presets = false
const CRON_EXPRESSIONS = [
@ -36,8 +37,10 @@
</script>
<div class="block-field">
<Input on:change={onChange} {value} />
<Input on:change={onChange} {value} on:blur={() => (touched = true)} />
{#if touched && !value}
<Label><div class="error">Please specify a CRON expression</div></Label>
{/if}
<div class="presets">
<Button on:click={() => (presets = !presets)}
>{presets ? "Hide" : "Show"} Presets</Button
@ -62,4 +65,8 @@
.block-field {
padding-top: var(--spacing-s);
}
.error {
padding-top: var(--spacing-xs);
color: var(--spectrum-global-color-red-500);
}
</style>

View file

@ -2,10 +2,16 @@
import { tables } from "stores/backend"
import { Select } from "@budibase/bbui"
import { createEventDispatcher } from "svelte"
import { TableNames } from "constants"
const dispatch = createEventDispatcher()
export let value
export let isTrigger
$: filteredTables = $tables.list.filter(table => {
return !isTrigger || table._id !== TableNames.USERS
})
const onChange = e => {
value = e.detail
@ -16,7 +22,7 @@
<Select
on:change={onChange}
bind:value
options={$tables.list}
options={filteredTables}
getOptionLabel={table => table.name}
getOptionValue={table => table._id}
/>

View file

@ -3,6 +3,7 @@
import { ModalContent } from "@budibase/bbui"
import { onMount } from "svelte"
import WebhookDisplay from "../automation/Shared/WebhookDisplay.svelte"
import { TriggerStepID } from "constants/backend/automations"
let webhookUrls = []
@ -11,7 +12,7 @@
onMount(() => {
webhookUrls = automations.map(automation => {
const trigger = automation.definition.trigger
if (trigger?.stepId === "WEBHOOK" && trigger.inputs) {
if (trigger?.stepId === TriggerStepID.WEBHOOK && trigger.inputs) {
return {
type: "Automation",
name: automation.name,

View file

@ -69,7 +69,14 @@
{#if !hideIcon}
<div class="icon-wrapper" class:highlight={updateAvailable}>
<Icon name="Refresh" hoverable on:click={updateModal.show} />
<Icon
name="Refresh"
hoverable
on:click={updateModal.show}
tooltip={updateAvailable
? "An update is available"
: "No updates are available"}
/>
</div>
{/if}
<Modal bind:this={updateModal}>

View file

@ -6,8 +6,8 @@
Button,
Layout,
DrawerContent,
ActionMenu,
MenuItem,
ActionButton,
Search,
} from "@budibase/bbui"
import { getAvailableActions } from "./index"
import { generate } from "shortid"
@ -22,8 +22,24 @@
export let actions
export let bindings = []
$: showAvailableActions = !actions?.length
let actionQuery
$: parsedQuery =
typeof actionQuery === "string" ? actionQuery.toLowerCase().trim() : ""
let selectedAction = actions?.length ? actions[0] : null
$: mappedActionTypes = actionTypes.reduce((acc, action) => {
let parsedName = action.name.toLowerCase().trim()
if (parsedQuery.length && parsedName.indexOf(parsedQuery) < 0) {
return acc
}
acc[action.type] = acc[action.type] || []
acc[action.type].push(action)
return acc
}, {})
// These are ephemeral bindings which only exist while executing actions
$: buttonContextBindings = getButtonContextBindings(
$currentAsset,
@ -61,7 +77,12 @@
actions = actions
}
const addAction = actionType => () => {
const toggleActionList = () => {
actionQuery = null
showAvailableActions = !showAvailableActions
}
const addAction = actionType => {
const newAction = {
parameters: {},
[EVENT_TYPE_KEY]: actionType.name,
@ -78,6 +99,11 @@
selectedAction = action
}
const onAddAction = actionType => {
addAction(actionType)
toggleActionList()
}
function handleDndConsider(e) {
actions = e.detail.items
}
@ -88,7 +114,39 @@
<DrawerContent>
<Layout noPadding gap="S" slot="sidebar">
{#if actions && actions.length > 0}
{#if showAvailableActions || !actions?.length}
<div class="actions-list">
{#if actions?.length > 0}
<div>
<ActionButton
secondary
icon={"ArrowLeft"}
on:click={toggleActionList}
>
Back
</ActionButton>
</div>
{/if}
<div class="search-wrap">
<Search placeholder="Search" bind:value={actionQuery} />
</div>
{#each Object.entries(mappedActionTypes) as [categoryId, category], idx}
<div class="heading" class:top-entry={idx === 0}>{categoryId}</div>
<ul>
{#each category as actionType}
<li on:click={onAddAction(actionType)}>
<span class="action-name">{actionType.name}</span>
</li>
{/each}
</ul>
{/each}
</div>
{/if}
{#if actions && actions.length > 0 && !showAvailableActions}
<div>
<Button secondary on:click={toggleActionList}>Add Action</Button>
</div>
<div
class="actions"
use:dndzone={{
@ -120,17 +178,9 @@
{/each}
</div>
{/if}
<ActionMenu>
<Button slot="control" secondary>Add Action</Button>
{#each actionTypes as actionType}
<MenuItem on:click={addAction(actionType)}>
{actionType.name}
</MenuItem>
{/each}
</ActionMenu>
</Layout>
<Layout noPadding>
{#if selectedActionComponent}
{#if selectedActionComponent && !showAvailableActions}
{#key selectedAction.id}
<div class="selected-action-container">
<svelte:component
@ -152,13 +202,10 @@
align-items: stretch;
gap: var(--spacing-s);
}
.action-header {
color: var(--spectrum-global-color-gray-700);
flex: 1 1 auto;
}
.action-container {
background-color: var(--background);
padding: var(--spacing-s) var(--spacing-m);
@ -182,4 +229,55 @@
.action-container.selected .action-header {
color: var(--spectrum-global-color-gray-900);
}
.actions-list > * {
padding-bottom: var(--spectrum-global-dimension-static-size-200);
}
.actions-list .heading {
padding-bottom: var(--spectrum-global-dimension-static-size-100);
padding-top: var(--spectrum-global-dimension-static-size-50);
}
.actions-list .heading.top-entry {
padding-top: 0px;
}
ul {
list-style: none;
padding: 0;
margin: 0;
}
li {
font-size: var(--font-size-s);
padding: var(--spacing-m);
border-radius: 4px;
background-color: var(--spectrum-global-color-gray-200);
transition: background-color 130ms ease-in-out, color 130ms ease-in-out,
border-color 130ms ease-in-out;
word-wrap: break-word;
}
li:not(:last-of-type) {
margin-bottom: var(--spacing-s);
}
li :global(*) {
transition: color 130ms ease-in-out;
}
li:hover {
color: var(--spectrum-global-color-gray-900);
background-color: var(--spectrum-global-color-gray-50);
cursor: pointer;
}
.action-name {
font-weight: 600;
text-transform: capitalize;
}
.heading {
font-size: var(--font-size-s);
font-weight: 600;
text-transform: uppercase;
color: var(--spectrum-global-color-gray-600);
}
</style>

View file

@ -69,9 +69,16 @@
notifications.error("Error creating automation")
}
}
$: actionCount = value?.length
$: actionText = `${actionCount || "No"} action${
actionCount !== 1 ? "s" : ""
} set`
</script>
<div class="action-count">{actionText}</div>
<ActionButton on:click={openDrawer}>Define actions</ActionButton>
<Drawer bind:this={drawer} title={"Actions"}>
<svelte:fragment slot="description">
Define what actions to run.
@ -85,3 +92,10 @@
{key}
/>
</Drawer>
<style>
.action-count {
padding-bottom: var(--spacing-s);
font-weight: 600;
}
</style>

View file

@ -2,6 +2,7 @@
import { Select, Label, Input, Checkbox } from "@budibase/bbui"
import { automationStore } from "builderStore"
import SaveFields from "./SaveFields.svelte"
import { TriggerStepID } from "constants/backend/automations"
export let parameters = {}
export let bindings = []
@ -16,7 +17,7 @@
: AUTOMATION_STATUS.NEW
$: automations = $automationStore.automations
.filter(a => a.definition.trigger?.stepId === "APP")
.filter(a => a.definition.trigger?.stepId === TriggerStepID.APP)
.map(automation => {
const schema = Object.entries(
automation.definition.trigger.inputs.fields || {}

View file

@ -2,6 +2,7 @@
"actions": [
{
"name": "Save Row",
"type": "data",
"component": "SaveRow",
"context": [
{
@ -12,6 +13,7 @@
},
{
"name": "Duplicate Row",
"type": "data",
"component": "DuplicateRow",
"context": [
{
@ -22,14 +24,17 @@
},
{
"name": "Delete Row",
"type": "data",
"component": "DeleteRow"
},
{
"name": "Navigate To",
"type": "application",
"component": "NavigateTo"
},
{
"name": "Execute Query",
"type": "data",
"component": "ExecuteQuery",
"context": [
{
@ -40,43 +45,53 @@
},
{
"name": "Trigger Automation",
"type": "application",
"component": "TriggerAutomation"
},
{
"name": "Update Field Value",
"type": "form",
"component": "UpdateFieldValue"
},
{
"name": "Validate Form",
"type": "form",
"component": "ValidateForm"
},
{
"name": "Change Form Step",
"type": "form",
"component": "ChangeFormStep"
},
{
"name": "Clear Form",
"type": "form",
"component": "ClearForm"
},
{
"name": "Log Out",
"type": "application",
"component": "LogOut"
},
{
"name": "Close Screen Modal",
"type": "application",
"component": "CloseScreenModal"
},
{
"name": "Refresh Data Provider",
"type": "data",
"component": "RefreshDataProvider"
},
{
"name": "Update State",
"type": "data",
"component": "UpdateState",
"dependsOnFeature": "state"
},
{
"name": "Upload File to S3",
"type": "data",
"component": "S3Upload",
"context": [
{
@ -87,12 +102,14 @@
},
{
"name": "Export Data",
"type": "data",
"component": "ExportData"
},
{
"name": "Continue if / Stop if",
"type": "logic",
"component": "ContinueIf",
"dependsOnFeature": "continueIfAction"
}
]
}
}

View file

@ -25,6 +25,7 @@
export let otherSources
export let showAllQueries
export let bindings = []
export let showDataProviders = true
const dispatch = createEventDispatcher()
const arrayTypes = ["attachment", "array"]
@ -258,7 +259,7 @@
{/each}
</ul>
{/if}
{#if dataProviders?.length}
{#if showDataProviders && dataProviders?.length}
<Divider size="S" />
<div class="title">
<Heading size="XS">Data Providers</Heading>

View file

@ -4,4 +4,10 @@
const otherSources = [{ name: "Custom", label: "Custom" }]
</script>
<DataSourceSelect on:change {...$$props} showAllQueries={true} {otherSources} />
<DataSourceSelect
on:change
{...$$props}
showAllQueries={true}
showDataProviders={false}
{otherSources}
/>

View file

@ -23,7 +23,7 @@
<ActionButton noPadding size="S" icon="Close" quiet on:click={close} />
</div>
</div>
<Layout paddingX="XL" gap="S">
<Layout paddingY="XL" paddingX="XL" gap="S">
<div class="icon">
<Icon name="Clock" />
<DateTimeRenderer value={history.createdAt} />
@ -71,7 +71,6 @@
}
.bottom {
margin-top: var(--spacing-m);
border-top: var(--border-light);
padding-top: calc(var(--spacing-xl) * 2);
padding-bottom: calc(var(--spacing-xl) * 2);

View file

@ -119,7 +119,7 @@
</script>
<div class="root" class:panelOpen={showPanel}>
<Layout paddingX="XL" gap="S" alignContent="start">
<Layout noPadding gap="M" alignContent="start">
<div class="search">
<div class="select">
<Select
@ -147,16 +147,28 @@
</div>
</div>
{#if runHistory}
<Table
on:click={viewDetails}
schema={runHistorySchema}
allowSelectRows={false}
allowEditColumns={false}
allowEditRows={false}
data={runHistory}
{customRenderers}
placeholderText="No history found"
/>
<div>
<Table
on:click={viewDetails}
schema={runHistorySchema}
allowSelectRows={false}
allowEditColumns={false}
allowEditRows={false}
data={runHistory}
{customRenderers}
placeholderText="No history found"
border={false}
/>
<div class="pagination">
<Pagination
page={$pageInfo.pageNumber}
hasPrevPage={$pageInfo.loading ? false : $pageInfo.hasPrevPage}
hasNextPage={$pageInfo.loading ? false : $pageInfo.hasNextPage}
goToPrevPage={pageInfo.prevPage}
goToNextPage={pageInfo.nextPage}
/>
</div>
</div>
{/if}
</Layout>
<div class="panel" class:panelShow={showPanel}>
@ -169,26 +181,19 @@
/>
</div>
</div>
<div class="pagination">
<Pagination
page={$pageInfo.pageNumber}
hasPrevPage={$pageInfo.loading ? false : $pageInfo.hasPrevPage}
hasNextPage={$pageInfo.loading ? false : $pageInfo.hasNextPage}
goToPrevPage={pageInfo.prevPage}
goToNextPage={pageInfo.nextPage}
/>
</div>
<style>
.root {
display: grid;
grid-template-columns: 1fr;
height: 100%;
padding: var(--spectrum-alias-grid-gutter-medium)
var(--spectrum-alias-grid-gutter-large);
}
.search {
display: flex;
gap: var(--spacing-l);
gap: var(--spacing-xl);
width: 100%;
align-items: flex-end;
}
@ -198,15 +203,15 @@
}
.pagination {
position: absolute;
bottom: 0;
margin-bottom: var(--spacing-xl);
margin-left: var(--spacing-l);
display: flex;
flex-direction: row;
justify-content: flex-end;
margin-top: var(--spacing-xl);
}
.panel {
display: none;
background-color: var(--background);
margin-top: calc(-1 * var(--spectrum-alias-grid-gutter-medium));
}
.panelShow {

View file

@ -0,0 +1,28 @@
export const TriggerStepID = {
ROW_SAVED: "ROW_SAVED",
ROW_UPDATED: "ROW_UPDATED",
ROW_DELETED: "ROW_DELETED",
WEBHOOK: "WEBHOOK",
APP: "APP",
CRON: "CRON",
}
export const ActionStepID = {
SEND_EMAIL_SMTP: "SEND_EMAIL_SMTP",
CREATE_ROW: "CREATE_ROW",
UPDATE_ROW: "UPDATE_ROW",
DELETE_ROW: "DELETE_ROW",
OUTGOING_WEBHOOK: "OUTGOING_WEBHOOK",
EXECUTE_SCRIPT: "EXECUTE_SCRIPT",
EXECUTE_QUERY: "EXECUTE_QUERY",
SERVER_LOG: "SERVER_LOG",
DELAY: "DELAY",
FILTER: "FILTER",
QUERY_ROWS: "QUERY_ROWS",
LOOP: "LOOP",
// these used to be lowercase step IDs, maintain for backwards compat
discord: "discord",
slack: "slack",
zapier: "zapier",
integromat: "integromat",
}

View file

@ -3,6 +3,7 @@
import { roles, flags } from "stores/backend"
import { Icon, Tabs, Tab, Heading, notifications } from "@budibase/bbui"
import RevertModal from "components/deploy/RevertModal.svelte"
import VersionModal from "components/deploy/VersionModal.svelte"
import DeployNavigation from "components/deploy/DeployNavigation.svelte"
import { API } from "api"
import { isActive, goto, layout, redirect } from "@roxi/routify"
@ -107,6 +108,7 @@
</Tabs>
</div>
<div class="toprightnav">
<VersionModal />
<RevertModal />
<Icon
name="Visibility"

View file

@ -28,12 +28,15 @@
}
drawer.hide()
}
$: conditionCount = componentInstance?._conditions?.length
$: conditionText = `${conditionCount || "No"} condition${
conditionCount !== 1 ? "s" : ""
} set`
</script>
<DetailSummary
name={`Conditions${componentInstance?._conditions ? " *" : ""}`}
collapsible={false}
>
<DetailSummary name={"Conditions"} collapsible={false}>
<div class="conditionCount">{conditionText}</div>
<div>
<ActionButton on:click={openDrawer}>Configure conditions</ActionButton>
</div>
@ -45,3 +48,10 @@
<Button cta slot="buttons" on:click={() => save()}>Save</Button>
<ConditionalUIDrawer slot="body" bind:conditions={tempValue} {bindings} />
</Drawer>
<style>
.conditionCount {
font-weight: 600;
margin-top: -5px;
}
</style>

View file

@ -208,11 +208,6 @@
<span class="overview-wrap">
<Page wide noPadding>
{#await promise}
<span class="page-header">
<ActionButton secondary icon={"ArrowLeft"} on:click={backToAppList}>
Back
</ActionButton>
</span>
<div class="loading">
<ProgressCircle size="XL" />
</div>
@ -404,7 +399,7 @@
line-height: 1em;
margin-bottom: var(--spacing-s);
}
.tab-wrap :global(.spectrum-Tabs) {
.tab-wrap :global(> .spectrum-Tabs) {
padding-left: var(--spectrum-alias-grid-gutter-large);
padding-right: var(--spectrum-alias-grid-gutter-large);
}

View file

@ -58,16 +58,16 @@
</Layout>
</span>
<span class="version-section">
<Layout gap="XS" paddingY="XXL" paddingX="">
<Layout gap="XS" noPadding>
<Heading size="S">App version</Heading>
<Divider />
<Body>
{#if updateAvailable}
<p class="version-status">
<Body>
The app is currently using version
<strong>{$store.version}</strong>
but version <strong>{clientPackage.version}</strong> is available.
</p>
</Body>
{:else}
<p class="version-status">
The app is currently using version

View file

@ -90,8 +90,8 @@ export function createQueriesStore() {
// Assume all the fields are strings and create a basic schema from the
// unique fields returned by the server
const schema = {}
for (let field of result.schemaFields) {
schema[field] = "string"
for (let [field, type] of Object.entries(result.schemaFields)) {
schema[field] = type || "string"
}
return { ...result, schema, rows: result.rows || [] }
},

View file

@ -58,17 +58,20 @@ export function createAuthStore() {
.activate()
.then(() => {
analytics.identify(user._id)
analytics.showChat({
email: user.email,
created_at: (user.createdAt || Date.now()) / 1000,
name: user.account?.name,
user_id: user._id,
tenant: user.tenantId,
admin: user?.admin?.global,
builder: user?.builder?.global,
"Company size": user.account?.size,
"Job role": user.account?.profession,
})
analytics.showChat(
{
email: user.email,
created_at: (user.createdAt || Date.now()) / 1000,
name: user.account?.name,
user_id: user._id,
tenant: user.tenantId,
admin: user?.admin?.global,
builder: user?.builder?.global,
"Company size": user.account?.size,
"Job role": user.account?.profession,
},
!!user?.account
)
})
.catch(() => {
// This request may fail due to browser extensions blocking requests

View file

@ -5732,10 +5732,10 @@ svelte-portal@0.1.0:
resolved "https://registry.yarnpkg.com/svelte-portal/-/svelte-portal-0.1.0.tgz#cc2821cc84b05ed5814e0218dcdfcbebc53c1742"
integrity sha512-kef+ksXVKun224mRxat+DdO4C+cGHla+fEcZfnBAvoZocwiaceOfhf5azHYOPXSSB1igWVFTEOF3CDENPnuWxg==
svelte@^3.48.0:
version "3.48.0"
resolved "https://registry.yarnpkg.com/svelte/-/svelte-3.48.0.tgz#f98c866d45e155bad8e1e88f15f9c03cd28753d3"
integrity sha512-fN2YRm/bGumvjUpu6yI3BpvZnpIm9I6A7HR4oUNYd7ggYyIwSA/BX7DJ+UXXffLp6XNcUijyLvttbPVCYa/3xQ==
svelte@^3.49.0:
version "3.49.0"
resolved "https://registry.yarnpkg.com/svelte/-/svelte-3.49.0.tgz#5baee3c672306de1070c3b7888fc2204e36a4029"
integrity sha512-+lmjic1pApJWDfPCpUUTc1m8azDqYCG1JN9YEngrx/hUyIcFJo6VZhj0A1Ai0wqoHcEIuQy+e9tk+4uDgdtsFA==
symbol-tree@^3.2.4:
version "3.2.4"

View file

@ -4,3 +4,4 @@ nginx.conf
build/
docker-error.log
envoy.yaml
*.tar.gz

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/cli",
"version": "1.0.220-alpha.4",
"version": "1.1.15-alpha.2",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js",
"bin": {
@ -9,28 +9,43 @@
"author": "Budibase",
"license": "GPL-3.0",
"scripts": {
"build": "pkg . --out-path build"
"prebuild": "rm -rf prebuilds 2> /dev/null && cp -r node_modules/leveldown/prebuilds prebuilds",
"build": "yarn prebuild && renamer --find .node --replace .fake 'prebuilds/**' && pkg . --out-path build && yarn postbuild",
"postbuild": "rm -rf prebuilds 2> /dev/null"
},
"pkg": {
"targets": [
"node14-linux",
"node14-win",
"node14-macos"
"node16-linux",
"node16-win",
"node16-macos"
],
"assets": [
"node_modules/@budibase/backend-core/dist/**/*",
"prebuilds/**/*"
],
"outputPath": "build"
},
"dependencies": {
"axios": "^0.21.1",
"chalk": "^4.1.0",
"commander": "^7.1.0",
"docker-compose": "^0.23.6",
"inquirer": "^8.0.0",
"lookpath": "^1.1.0",
"pkg": "^5.3.0",
"@budibase/backend-core": "^1.1.15-alpha.1",
"axios": "0.21.1",
"chalk": "4.1.0",
"cli-progress": "3.11.2",
"commander": "7.1.0",
"docker-compose": "0.23.6",
"dotenv": "16.0.1",
"inquirer": "8.0.0",
"lookpath": "1.1.0",
"node-fetch": "2",
"pkg": "5.7.0",
"posthog-node": "1.0.7",
"randomstring": "^1.1.5"
"pouchdb": "7.3.0",
"pouchdb-replication-stream": "1.2.9",
"randomstring": "1.1.5",
"tar": "6.1.11"
},
"devDependencies": {
"eslint": "^7.20.0"
"copyfiles": "^2.4.1",
"eslint": "^7.20.0",
"renamer": "^4.0.0"
}
}

View file

@ -0,0 +1,121 @@
const Command = require("../structures/Command")
const { CommandWords } = require("../constants")
const fs = require("fs")
const { join } = require("path")
const { getAllDbs } = require("../core/db")
const tar = require("tar")
const { progressBar } = require("../utils")
const {
TEMP_DIR,
COUCH_DIR,
MINIO_DIR,
getConfig,
replication,
getPouches,
} = require("./utils")
const { exportObjects, importObjects } = require("./objectStore")
async function exportBackup(opts) {
const envFile = opts.env || undefined
let filename = opts["export"] || opts
if (typeof filename !== "string") {
filename = `backup-${new Date().toISOString()}.tar.gz`
}
const config = await getConfig(envFile)
const dbList = await getAllDbs(config["COUCH_DB_URL"])
const { Remote, Local } = getPouches(config)
if (fs.existsSync(TEMP_DIR)) {
fs.rmSync(TEMP_DIR, { recursive: true })
}
const couchDir = join(TEMP_DIR, COUCH_DIR)
fs.mkdirSync(TEMP_DIR)
fs.mkdirSync(couchDir)
console.log("CouchDB Export")
const bar = progressBar(dbList.length)
let count = 0
for (let db of dbList) {
bar.update(++count)
const remote = new Remote(db)
const local = new Local(join(TEMP_DIR, COUCH_DIR, db))
await replication(remote, local)
}
bar.stop()
console.log("S3 Export")
await exportObjects()
tar.create(
{
sync: true,
gzip: true,
file: filename,
cwd: join(TEMP_DIR),
},
[COUCH_DIR, MINIO_DIR]
)
fs.rmSync(TEMP_DIR, { recursive: true })
console.log(`Generated export file - ${filename}`)
}
async function importBackup(opts) {
const envFile = opts.env || undefined
const filename = opts["import"] || opts
const config = await getConfig(envFile)
if (!filename || !fs.existsSync(filename)) {
console.error("Cannot import without specifying a valid file to import")
process.exit(-1)
}
if (fs.existsSync(TEMP_DIR)) {
fs.rmSync(TEMP_DIR, { recursive: true })
}
fs.mkdirSync(TEMP_DIR)
tar.extract({
sync: true,
cwd: join(TEMP_DIR),
file: filename,
})
const { Remote, Local } = getPouches(config)
const dbList = fs.readdirSync(join(TEMP_DIR, COUCH_DIR))
console.log("CouchDB Import")
const bar = progressBar(dbList.length)
let count = 0
for (let db of dbList) {
bar.update(++count)
const remote = new Remote(db)
const local = new Local(join(TEMP_DIR, COUCH_DIR, db))
await replication(local, remote)
}
bar.stop()
console.log("MinIO Import")
await importObjects()
console.log("Import complete")
fs.rmSync(TEMP_DIR, { recursive: true })
}
async function pickOne(opts) {
if (opts["import"]) {
return importBackup(opts)
} else if (opts["export"]) {
return exportBackup(opts)
}
}
const command = new Command(`${CommandWords.BACKUPS}`)
.addHelp(
"Allows building backups of Budibase, as well as importing a backup to a new instance."
)
.addSubOption(
"--export [filename]",
"Export a backup from an existing Budibase installation.",
exportBackup
)
.addSubOption(
"--import [filename]",
"Import a backup to a new Budibase installation.",
importBackup
)
.addSubOption(
"--env [envFile]",
"Provide an environment variable file to configure the CLI.",
pickOne
)
exports.command = command

View file

@ -0,0 +1,63 @@
const {
ObjectStoreBuckets,
ObjectStore,
retrieve,
uploadDirectory,
makeSureBucketExists,
} = require("@budibase/backend-core/objectStore")
const fs = require("fs")
const { join } = require("path")
const { TEMP_DIR, MINIO_DIR } = require("./utils")
const { progressBar } = require("../utils")
const bucketList = Object.values(ObjectStoreBuckets)
exports.exportObjects = async () => {
const path = join(TEMP_DIR, MINIO_DIR)
fs.mkdirSync(path)
let fullList = []
for (let bucket of bucketList) {
const client = ObjectStore(bucket)
try {
await client.headBucket().promise()
} catch (err) {
continue
}
const list = await client.listObjectsV2().promise()
fullList = fullList.concat(list.Contents.map(el => ({ ...el, bucket })))
}
const bar = progressBar(fullList.length)
let count = 0
for (let object of fullList) {
const filename = object.Key
const data = await retrieve(object.bucket, filename)
const possiblePath = filename.split("/")
if (possiblePath.length > 1) {
const dirs = possiblePath.slice(0, possiblePath.length - 1)
fs.mkdirSync(join(path, object.bucket, ...dirs), { recursive: true })
}
fs.writeFileSync(join(path, object.bucket, ...possiblePath), data)
bar.update(++count)
}
bar.stop()
}
exports.importObjects = async () => {
const path = join(TEMP_DIR, MINIO_DIR)
const buckets = fs.readdirSync(path)
let total = 0
buckets.forEach(bucket => {
const files = fs.readdirSync(join(path, bucket))
total += files.length
})
const bar = progressBar(total)
let count = 0
for (let bucket of buckets) {
const client = ObjectStore(bucket)
await makeSureBucketExists(client, bucket)
const files = await uploadDirectory(bucket, join(path, bucket), "/")
count += files.length
bar.update(count)
}
bar.stop()
}

View file

@ -0,0 +1,88 @@
const dotenv = require("dotenv")
const fs = require("fs")
const { string } = require("../questions")
const { getPouch } = require("../core/db")
exports.DEFAULT_COUCH = "http://budibase:budibase@localhost:10000/db/"
exports.DEFAULT_MINIO = "http://localhost:10000/"
exports.TEMP_DIR = ".temp"
exports.COUCH_DIR = "couchdb"
exports.MINIO_DIR = "minio"
const REQUIRED = [
{ value: "MAIN_PORT", default: "10000" },
{ value: "COUCH_DB_URL", default: exports.DEFAULT_COUCH },
{ value: "MINIO_URL", default: exports.DEFAULT_MINIO },
{ value: "MINIO_ACCESS_KEY" },
{ value: "MINIO_SECRET_KEY" },
]
exports.checkURLs = config => {
const mainPort = config["MAIN_PORT"],
username = config["COUCH_DB_USER"],
password = config["COUCH_DB_PASSWORD"]
if (!config["COUCH_DB_URL"] && mainPort && username && password) {
config[
"COUCH_DB_URL"
] = `http://${username}:${password}@localhost:${mainPort}/db/`
}
if (!config["MINIO_URL"]) {
config["MINIO_URL"] = exports.DEFAULT_MINIO
}
return config
}
exports.askQuestions = async () => {
console.log(
"*** NOTE: use a .env file to load these parameters repeatedly ***"
)
let config = {}
for (let property of REQUIRED) {
config[property.value] = await string(property.value, property.default)
}
return config
}
exports.loadEnvironment = path => {
if (!fs.existsSync(path)) {
throw "Unable to file specified .env file"
}
const env = fs.readFileSync(path, "utf8")
const config = exports.checkURLs(dotenv.parse(env))
for (let required of REQUIRED) {
if (!config[required.value]) {
throw `Cannot find "${required.value}" property in .env file`
}
}
return config
}
// true is the default value passed by commander
exports.getConfig = async (envFile = true) => {
let config
if (envFile !== true) {
config = exports.loadEnvironment(envFile)
} else {
config = await exports.askQuestions()
}
return config
}
exports.replication = (from, to) => {
return new Promise((resolve, reject) => {
from.replicate
.to(to)
.on("complete", () => {
resolve()
})
.on("error", err => {
reject(err)
})
})
}
exports.getPouches = config => {
const Remote = getPouch(config["COUCH_DB_URL"])
const Local = getPouch()
return { Remote, Local }
}

View file

@ -1,4 +1,5 @@
exports.CommandWords = {
BACKUPS: "backups",
HOSTING: "hosting",
ANALYTICS: "analytics",
HELP: "help",

View file

@ -0,0 +1,38 @@
const PouchDB = require("pouchdb")
const { checkSlashesInUrl } = require("../utils")
const fetch = require("node-fetch")
/**
* Fully qualified URL including username and password, or nothing for local
*/
exports.getPouch = (url = undefined) => {
let POUCH_DB_DEFAULTS = {}
if (!url) {
POUCH_DB_DEFAULTS = {
prefix: undefined,
adapter: "leveldb",
}
} else {
POUCH_DB_DEFAULTS = {
prefix: url,
}
}
const replicationStream = require("pouchdb-replication-stream")
PouchDB.plugin(replicationStream.plugin)
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
return PouchDB.defaults(POUCH_DB_DEFAULTS)
}
exports.getAllDbs = async url => {
const response = await fetch(
checkSlashesInUrl(encodeURI(`${url}/_all_dbs`)),
{
method: "GET",
}
)
if (response.status === 200) {
return await response.json()
} else {
throw "Cannot connect to CouchDB instance"
}
}

View file

@ -1,4 +1,5 @@
#!/usr/bin/env node
require("./prebuilds")
const { getCommands } = require("./options")
const { Command } = require("commander")
const { getHelpDescription } = require("./utils")

View file

@ -1,6 +1,7 @@
const analytics = require("./analytics")
const hosting = require("./hosting")
const backups = require("./backups")
exports.getCommands = () => {
return [hosting.command, analytics.command]
return [hosting.command, analytics.command, backups.command]
}

View file

@ -0,0 +1,34 @@
const os = require("os")
const { join } = require("path")
const fs = require("fs")
const PREBUILDS = "prebuilds"
const ARCH = `${os.platform()}-${os.arch()}`
const PREBUILD_DIR = join(process.execPath, "..", PREBUILDS, ARCH)
checkForBinaries()
function checkForBinaries() {
const readDir = join(__filename, "..", "..", PREBUILDS, ARCH)
if (fs.existsSync(PREBUILD_DIR) || !fs.existsSync(readDir)) {
return
}
const natives = fs.readdirSync(readDir)
if (fs.existsSync(readDir)) {
fs.mkdirSync(PREBUILD_DIR, { recursive: true })
for (let native of natives) {
const filename = `${native.split(".fake")[0]}.node`
fs.cpSync(join(readDir, native), join(PREBUILD_DIR, filename))
}
}
}
function cleanup() {
if (fs.existsSync(PREBUILD_DIR)) {
fs.rmSync(PREBUILD_DIR, { recursive: true })
}
}
const events = ["exit", "SIGINT", "SIGUSR1", "SIGUSR2", "uncaughtException"]
events.forEach(event => {
process.on(event, cleanup)
})

View file

@ -39,8 +39,10 @@ class Command {
let executed = false
for (let opt of thisCmd.opts) {
const lookup = opt.command.split(" ")[0].replace("--", "")
if (options[lookup]) {
await opt.func(options[lookup])
if (!executed && options[lookup]) {
const input =
Object.keys(options).length > 1 ? options : options[lookup]
await opt.func(input)
executed = true
}
}

View file

@ -2,6 +2,7 @@ const chalk = require("chalk")
const fs = require("fs")
const axios = require("axios")
const path = require("path")
const progress = require("cli-progress")
exports.downloadFile = async (url, filePath) => {
filePath = path.resolve(filePath)
@ -56,3 +57,13 @@ exports.parseEnv = env => {
}
return result
}
exports.progressBar = total => {
const bar = new progress.SingleBar({}, progress.Presets.shades_classic)
bar.start(total, 0)
return bar
}
exports.checkSlashesInUrl = url => {
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
}

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/client",
"version": "1.0.220-alpha.4",
"version": "1.1.15-alpha.2",
"license": "MPL-2.0",
"module": "dist/budibase-client.js",
"main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw"
},
"dependencies": {
"@budibase/bbui": "^1.0.220-alpha.4",
"@budibase/frontend-core": "^1.0.220-alpha.4",
"@budibase/string-templates": "^1.0.220-alpha.4",
"@budibase/bbui": "^1.1.15-alpha.2",
"@budibase/frontend-core": "^1.1.15-alpha.2",
"@budibase/string-templates": "^1.1.15-alpha.2",
"@spectrum-css/button": "^3.0.3",
"@spectrum-css/card": "^3.0.3",
"@spectrum-css/divider": "^1.0.3",
@ -39,7 +39,7 @@
"sanitize-html": "^2.7.0",
"screenfull": "^6.0.1",
"shortid": "^2.2.15",
"svelte": "^3.38.2",
"svelte": "^3.49.0",
"svelte-apexcharts": "^1.0.2",
"svelte-flatpickr": "^3.1.0",
"svelte-spa-router": "^3.0.5"

View file

@ -323,6 +323,9 @@
position: relative;
padding: 32px;
}
.main.size--max {
padding: 0;
}
.layout--none .main {
padding: 0;
}
@ -465,6 +468,9 @@
.mobile:not(.layout--none) .main {
padding: 16px;
}
.mobile .main.size--max {
padding: 0;
}
/* Transform links into drawer */
.mobile .links {

View file

@ -98,7 +98,7 @@
})
}
})
return enrichedColumns.slice(0, 3)
return enrichedColumns.slice(0, 5)
}
// Builds a full details page URL for the card title

View file

@ -89,7 +89,7 @@
})
}
})
return enrichedColumns.slice(0, 3)
return enrichedColumns.slice(0, 5)
}
// Load the datasource schema so we can determine column types

View file

@ -1446,10 +1446,10 @@ svelte-spa-router@^3.0.5:
dependencies:
regexparam "2.0.0"
svelte@^3.38.2:
version "3.46.4"
resolved "https://registry.yarnpkg.com/svelte/-/svelte-3.46.4.tgz#0c46bc4a3e20a2617a1b7dc43a722f9d6c084a38"
integrity sha512-qKJzw6DpA33CIa+C/rGp4AUdSfii0DOTCzj/2YpSKKayw5WGSS624Et9L1nU1k2OVRS9vaENQXp2CVZNU+xvIg==
svelte@^3.49.0:
version "3.49.0"
resolved "https://registry.yarnpkg.com/svelte/-/svelte-3.49.0.tgz#5baee3c672306de1070c3b7888fc2204e36a4029"
integrity sha512-+lmjic1pApJWDfPCpUUTc1m8azDqYCG1JN9YEngrx/hUyIcFJo6VZhj0A1Ai0wqoHcEIuQy+e9tk+4uDgdtsFA==
svg.draggable.js@^2.2.2:
version "2.2.2"
@ -1536,7 +1536,7 @@ timsort@^0.3.0:
util-deprecate@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==
wrap-ansi@^7.0.0:
version "7.0.0"

View file

@ -1,12 +1,12 @@
{
"name": "@budibase/frontend-core",
"version": "1.0.220-alpha.4",
"version": "1.1.15-alpha.2",
"description": "Budibase frontend core libraries used in builder and client",
"author": "Budibase",
"license": "MPL-2.0",
"svelte": "src/index.js",
"dependencies": {
"@budibase/bbui": "^1.0.220-alpha.4",
"@budibase/bbui": "^1.1.15-alpha.2",
"lodash": "^4.17.21",
"svelte": "^3.46.2"
}

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,7 @@
{
"name": "@budibase/server",
"email": "hi@budibase.com",
"version": "1.0.220-alpha.4",
"version": "1.1.15-alpha.2",
"description": "Budibase Web Server",
"main": "src/index.ts",
"repository": {
@ -77,10 +77,11 @@
"license": "GPL-3.0",
"dependencies": {
"@apidevtools/swagger-parser": "10.0.3",
"@budibase/backend-core": "^1.0.220-alpha.4",
"@budibase/client": "^1.0.220-alpha.4",
"@budibase/pro": "1.0.220-alpha.4",
"@budibase/string-templates": "^1.0.220-alpha.4",
"@budibase/backend-core": "^1.1.15-alpha.2",
"@budibase/client": "^1.1.15-alpha.2",
"@budibase/pro": "1.1.15-alpha.2",
"@budibase/string-templates": "^1.1.15-alpha.2",
"@budibase/types": "^1.1.15-alpha.2",
"@bull-board/api": "3.7.0",
"@bull-board/koa": "3.9.4",
"@elastic/elasticsearch": "7.10.0",
@ -136,7 +137,7 @@
"redis": "4",
"server-destroy": "1.0.1",
"snowflake-promise": "^4.5.0",
"svelte": "3.44.1",
"svelte": "3.49.0",
"swagger-parser": "10.0.3",
"to-json-schema": "0.2.5",
"uuid": "3.3.2",
@ -151,7 +152,6 @@
"@babel/core": "7.17.4",
"@babel/preset-env": "7.16.11",
"@budibase/standard-components": "^0.9.139",
"@budibase/types": "^1.0.220-alpha.4",
"@jest/test-sequencer": "24.9.0",
"@types/apidoc": "0.50.0",
"@types/bson": "4.2.0",

View file

@ -22,9 +22,6 @@ const {
BUILTIN_ROLE_IDS,
AccessController,
} = require("@budibase/backend-core/roles")
import { BASE_LAYOUTS } from "../../constants/layouts"
import { cloneDeep } from "lodash/fp"
const { processObject } = require("@budibase/string-templates")
const { CacheKeys, bustCache } = require("@budibase/backend-core/cache")
const {
getAllApps,
@ -45,13 +42,8 @@ const { getTenantId, isMultiTenant } = require("@budibase/backend-core/tenancy")
import { syncGlobalUsers } from "./user"
const { app: appCache } = require("@budibase/backend-core/cache")
import { cleanupAutomations } from "../../automations/utils"
import { context } from "@budibase/backend-core"
import { checkAppMetadata } from "../../automations/logging"
const {
getAppDB,
getProdAppDB,
updateAppId,
doInAppContext,
} = require("@budibase/backend-core/context")
import { getUniqueRows } from "../../utilities/usageQuota/rows"
import { quotas } from "@budibase/pro"
import { errors, events, migrations } from "@budibase/backend-core"
@ -61,7 +53,7 @@ const URL_REGEX_SLASH = /\/|\\/g
// utility function, need to do away with this
async function getLayouts() {
const db = getAppDB()
const db = context.getAppDB()
return (
await db.allDocs(
getLayoutParams(null, {
@ -72,7 +64,7 @@ async function getLayouts() {
}
async function getScreens() {
const db = getAppDB()
const db = context.getAppDB()
return (
await db.allDocs(
getScreenParams(null, {
@ -135,9 +127,9 @@ async function createInstance(template: any) {
const tenantId = isMultiTenant() ? getTenantId() : null
const baseAppId = generateAppID(tenantId)
const appId = generateDevAppID(baseAppId)
await updateAppId(appId)
await context.updateAppId(appId)
const db = getAppDB()
const db = context.getAppDB()
await db.put({
_id: "_design/database",
// view collation information, read before writing any complex views:
@ -213,7 +205,7 @@ export const fetchAppDefinition = async (ctx: any) => {
}
export const fetchAppPackage = async (ctx: any) => {
const db = getAppDB()
const db = context.getAppDB()
const application = await db.get(DocumentTypes.APP_METADATA)
const layouts = await getLayouts()
let screens = await getScreens()
@ -252,7 +244,7 @@ const performAppCreate = async (ctx: any) => {
const instance = await createInstance(instanceConfig)
const appId = instance._id
const db = getAppDB()
const db = context.getAppDB()
let _rev
try {
// if template there will be an existing doc
@ -390,7 +382,7 @@ export const update = async (ctx: any) => {
export const updateClient = async (ctx: any) => {
// Get current app version
const db = getAppDB()
const db = context.getAppDB()
const application = await db.get(DocumentTypes.APP_METADATA)
const currentVersion = application.version
@ -414,7 +406,7 @@ export const updateClient = async (ctx: any) => {
export const revertClient = async (ctx: any) => {
// Check app can be reverted
const db = getAppDB()
const db = context.getAppDB()
const application = await db.get(DocumentTypes.APP_METADATA)
if (!application.revertableVersion) {
ctx.throw(400, "There is no version to revert to")
@ -446,7 +438,7 @@ const destroyApp = async (ctx: any) => {
appId = getProdAppID(appId)
}
const db = isUnpublish ? getProdAppDB() : getAppDB()
const db = isUnpublish ? context.getProdAppDB() : context.getAppDB()
const app = await db.get(DocumentTypes.APP_METADATA)
const result = await db.destroy()
@ -514,7 +506,7 @@ export const sync = async (ctx: any, next: any) => {
try {
// specific case, want to make sure setup is skipped
const prodDb = getProdAppDB({ skip_setup: true })
const prodDb = context.getProdAppDB({ skip_setup: true })
const info = await prodDb.info()
if (info.error) throw info.error
} catch (err) {
@ -556,8 +548,8 @@ export const sync = async (ctx: any, next: any) => {
}
const updateAppPackage = async (appPackage: any, appId: any) => {
return doInAppContext(appId, async () => {
const db = getAppDB()
return context.doInAppContext(appId, async () => {
const db = context.getAppDB()
const application = await db.get(DocumentTypes.APP_METADATA)
const newAppPackage = { ...application, ...appPackage }

View file

@ -1,5 +1,5 @@
import { generateQueryID, getQueryParams, isProdAppID } from "../../../db/utils"
import { BaseQueryVerbs } from "../../../constants"
import { BaseQueryVerbs, FieldTypes } from "../../../constants"
import { Thread, ThreadType } from "../../../threads"
import { save as saveDatasource } from "../datasource"
import { RestImporter } from "./import"
@ -154,10 +154,37 @@ export async function preview(ctx: any) {
},
})
const { rows, keys, info, extra } = await quotas.addQuery(runFn)
const schemaFields: any = {}
if (rows?.length > 0) {
for (let key of [...new Set(keys)] as string[]) {
const field = rows[0][key]
let type = typeof field,
fieldType = FieldTypes.STRING
if (field)
switch (type) {
case "boolean":
schemaFields[key] = FieldTypes.BOOLEAN
break
case "object":
if (field instanceof Date) {
fieldType = FieldTypes.DATETIME
} else if (Array.isArray(field)) {
fieldType = FieldTypes.ARRAY
} else {
fieldType = FieldTypes.JSON
}
break
case "number":
fieldType = FieldTypes.NUMBER
break
}
schemaFields[key] = fieldType
}
}
await events.query.previewed(datasource, query)
ctx.body = {
rows,
schemaFields: [...new Set(keys)],
schemaFields,
info,
extra,
}

View file

@ -37,9 +37,10 @@ describe("/permission", () => {
.expect("Content-Type", /json/)
.expect(200)
expect(res.body).toBeDefined()
expect(res.body.length).toEqual(2)
expect(res.body.length).toEqual(3)
expect(res.body).toContain("read")
expect(res.body).toContain("write")
expect(res.body).toContain("execute")
})
})

View file

@ -215,7 +215,10 @@ describe("/queries", () => {
.expect("Content-Type", /json/)
.expect(200)
// these responses come from the mock
expect(res.body.schemaFields).toEqual(["a", "b"])
expect(res.body.schemaFields).toEqual({
"a": "string",
"b": "number",
})
expect(res.body.rows.length).toEqual(1)
expect(events.query.previewed).toBeCalledTimes(1)
expect(events.query.previewed).toBeCalledWith(datasource, query)
@ -289,7 +292,11 @@ describe("/queries", () => {
queryString: "test={{ variable2 }}",
})
// these responses come from the mock
expect(res.body.schemaFields).toEqual(["url", "opts", "value"])
expect(res.body.schemaFields).toEqual({
"opts": "json",
"url": "string",
"value": "string",
})
expect(res.body.rows[0].url).toEqual("http://www.google.com?test=1")
})
@ -299,7 +306,11 @@ describe("/queries", () => {
path: "www.google.com",
queryString: "test={{ variable3 }}",
})
expect(res.body.schemaFields).toEqual(["url", "opts", "value"])
expect(res.body.schemaFields).toEqual({
"opts": "json",
"url": "string",
"value": "string"
})
expect(res.body.rows[0].url).toContain("doctype html")
})
@ -318,7 +329,11 @@ describe("/queries", () => {
path: "www.failonce.com",
queryString: "test={{ variable3 }}",
})
expect(res.body.schemaFields).toEqual(["fails", "url", "opts"])
expect(res.body.schemaFields).toEqual({
"fails": "number",
"opts": "json",
"url": "string"
})
expect(res.body.rows[0].fails).toEqual(1)
})

View file

@ -46,26 +46,26 @@ describe("/rows", () => {
describe("save, load, update", () => {
it("returns a success message when the row is created", async () => {
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const res = await request
.post(`/api/${row.tableId}/rows`)
.send(row)
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect(200)
expect(res.res.statusMessage).toEqual(`${table.name} saved successfully`)
expect(res.body.name).toEqual("Test Contact")
expect(res.body._rev).toBeDefined()
await assertRowUsage(rowUsage + 1)
await assertQueryUsage(queryUsage + 1)
// const rowUsage = await getRowUsage()
// const queryUsage = await getQueryUsage()
//
// const res = await request
// .post(`/api/${row.tableId}/rows`)
// .send(row)
// .set(config.defaultHeaders())
// .expect('Content-Type', /json/)
// .expect(200)
// expect(res.res.statusMessage).toEqual(`${table.name} saved successfully`)
// expect(res.body.name).toEqual("Test Contact")
// expect(res.body._rev).toBeDefined()
// await assertRowUsage(rowUsage + 1)
// await assertQueryUsage(queryUsage + 1)
})
it("updates a row successfully", async () => {
const existing = await config.createRow()
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
// const rowUsage = await getRowUsage()
// const queryUsage = await getQueryUsage()
const res = await request
.post(`/api/${table._id}/rows`)
@ -78,11 +78,11 @@ describe("/rows", () => {
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect(200)
expect(res.res.statusMessage).toEqual(`${table.name} updated successfully.`)
expect(res.body.name).toEqual("Updated Name")
await assertRowUsage(rowUsage)
await assertQueryUsage(queryUsage + 1)
// await assertRowUsage(rowUsage)
// await assertQueryUsage(queryUsage + 1)
})
it("should load a row", async () => {

View file

@ -20,7 +20,6 @@ import redis from "./utilities/redis"
import * as migrations from "./migrations"
import { events, installation, tenancy } from "@budibase/backend-core"
import { createAdminUser, getChecklist } from "./utilities/workerRequests"
import { tenantSucceeded } from "@budibase/backend-core/dist/src/events/publishers/backfill"
const app = new Koa()

View file

@ -97,7 +97,7 @@ export async function enableCronTrigger(appId: any, automation: any) {
)
}
// need to create cron job
if (isCronTrigger(automation)) {
if (isCronTrigger(automation) && trigger?.inputs.cron) {
// make a job id rather than letting Bull decide, makes it easier to handle on way out
const jobId = `${appId}_cron_${newid()}`
const job: any = await queue.add(

View file

@ -53,7 +53,7 @@ const INTEGRATIONS = {
}
// optionally add oracle integration if the oracle binary can be installed
if (!(process.arch === "arm64" && process.platform === "darwin")) {
if (process.arch && !process.arch.startsWith("arm")) {
const oracle = require("./oracle")
DEFINITIONS[SourceNames.ORACLE] = oracle.schema
INTEGRATIONS[SourceNames.ORACLE] = oracle.integration

View file

@ -4,15 +4,13 @@ const { getGlobalDB, doInTenant } = require("@budibase/backend-core/tenancy")
// mock email view creation
const coreDb = require("@budibase/backend-core/db")
const createUserEmailView = jest.fn()
coreDb.createUserEmailView = createUserEmailView
const createNewUserEmailView = jest.fn()
coreDb.createNewUserEmailView = createNewUserEmailView
const migration = require("../userEmailViewCasing")
describe("run", () => {
doInTenant(TENANT_ID, () => {
let config = new TestConfig(false)
const globalDb = getGlobalDB()
beforeEach(async () => {
await config.init()
@ -21,8 +19,10 @@ describe("run", () => {
afterAll(config.end)
it("runs successfully", async () => {
await migration.run(globalDb)
expect(createUserEmailView).toHaveBeenCalledTimes(1)
await doInTenant(TENANT_ID, async () => {
const globalDb = getGlobalDB()
await migration.run(globalDb)
expect(createNewUserEmailView).toHaveBeenCalledTimes(1)
})
})
})
})

View file

@ -1,4 +1,4 @@
const { createUserEmailView } = require("@budibase/backend-core/db")
const { createNewUserEmailView } = require("@budibase/backend-core/db")
/**
* Date:
@ -9,5 +9,5 @@ const { createUserEmailView } = require("@budibase/backend-core/db")
*/
export const run = async (db: any) => {
await createUserEmailView(db)
await createNewUserEmailView(db)
}

View file

@ -106,21 +106,31 @@ class TestConfiguration {
// UTILS
async _req(config, params, controlFunc) {
async _req(body, params, controlFunc, opts = { prodApp: false }) {
// create a fake request ctx
const request = {}
// set the app id
let appId
if (opts.prodApp) {
appId = this.prodAppId
} else {
appId = this.appId
}
request.appId = appId
// fake cookies, we don't need them
request.cookies = { set: () => {}, get: () => {} }
request.config = { jwtSecret: env.JWT_SECRET }
request.appId = this.appId
request.user = { appId: this.appId, tenantId: TENANT_ID }
request.user = { appId, tenantId: TENANT_ID }
request.query = {}
request.request = {
body: config,
body,
}
return this.doInContext(this.appId, async () => {
if (params) {
request.params = params
}
if (params) {
request.params = params
}
return this.doInContext(appId, async () => {
await controlFunc(request)
return request.body
})
@ -323,7 +333,6 @@ class TestConfiguration {
// create production app
this.prodApp = await this.deploy()
this.prodAppId = this.prodApp.appId
this.allApps.push(this.prodApp)
this.allApps.push(this.app)
@ -334,11 +343,13 @@ class TestConfiguration {
async deploy() {
await this._req(null, null, controllers.deploy.deployApp)
const prodAppId = this.getAppId().replace("_dev", "")
this.prodAppId = prodAppId
return context.doInAppContext(prodAppId, async () => {
const appPackage = await this._req(
null,
{ appId: prodAppId },
controllers.app.fetchAppPackage
controllers.app.fetchAppPackage,
{ prodApp: true }
)
return appPackage.application
})

View file

@ -13,6 +13,7 @@ const { DocumentTypes } = require("../db/utils")
const CURRENTLY_SUPPORTED_LEVELS = [
PermissionLevels.WRITE,
PermissionLevels.READ,
PermissionLevels.EXECUTE,
]
exports.getPermissionType = resourceId => {

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/string-templates",
"version": "1.0.220-alpha.4",
"version": "1.1.15-alpha.2",
"description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs",
"module": "dist/bundle.mjs",

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/types",
"version": "1.0.220-alpha.4",
"version": "1.1.15-alpha.2",
"description": "Budibase types",
"main": "dist/index.js",
"types": "dist/index.d.ts",

View file

@ -4,6 +4,7 @@ import { IdentityType } from "./events/identification"
export interface BaseContext {
_id: string
type: IdentityType
tenantId?: string
}
export interface AccountUserContext extends BaseContext {
@ -13,6 +14,7 @@ export interface AccountUserContext extends BaseContext {
export interface UserContext extends BaseContext, User {
_id: string
tenantId: string
account?: Account
}

Some files were not shown because too many files have changed in this diff Show more