1
0
Fork 0
mirror of synced 2024-06-25 17:40:38 +12:00

Merge branch 'develop' of github.com:Budibase/budibase into cheeks-lab-day-grid

This commit is contained in:
Andrew Kingston 2022-09-20 17:23:52 +01:00
commit 2c3f02347e
496 changed files with 20139 additions and 8917 deletions

24
.github/ISSUE_TEMPLATE/epic.md vendored Normal file
View file

@ -0,0 +1,24 @@
---
name: Epic
about: Plan a new project
title: ''
labels: epic
assignees: ''
---
## Description
Brief summary of what this Epic is, whether it's a larger project, goal, or user story. Describe the job to be done, which persona this Epic is mainly for, or if more multiple, break it down by user and job story.
## Spec
Link to confluence spec
## Teams and Stakeholders
Describe who needs to be kept up-to-date about this Epic, included in discussions, or updated along the way. Stakeholders can be both in Product/Engineering, as well as other teams like Customer Success who might want to keep customers updated on the Epic project.
## Workflow
- [ ] Spec Created and pasted above
- [ ] Product Review
- [ ] Designs created
- [ ] Individual Tasks created and assigned to Epic

View file

@ -59,3 +59,9 @@ jobs:
with:
install: false
command: yarn test:e2e:ci
- name: QA Core Integration Tests
run: |
cd qa-core
yarn
yarn api:test:ci

5
.gitignore vendored
View file

@ -63,6 +63,7 @@ typings/
# dotenv environment variables file
.env
!qa-core/.env
!hosting/.env
hosting/.generated-nginx.dev.conf
hosting/proxy/.generated-nginx.prod.conf
@ -102,4 +103,6 @@ packages/builder/cypress/reports
stats.html
# TypeScript cache
*.tsbuildinfo
*.tsbuildinfo
budibase-component
budibase-datasource

View file

@ -8,4 +8,4 @@ packages/server/client
packages/server/src/definitions/openapi.ts
packages/builder/.routify
packages/builder/cypress/support/queryLevelTransformerFunction.js
packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js
packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js

View file

@ -4,7 +4,7 @@
"singleQuote": false,
"trailingComma": "es5",
"arrowParens": "avoid",
"jsxBracketSameLine": false,
"bracketSameLine": false,
"plugins": ["prettier-plugin-svelte"],
"svelteSortOrder": "options-scripts-markup-styles"
}

View file

@ -65,7 +65,7 @@ Budibase is open-source - licensed as GPL v3. This should fill you with confiden
<br /><br />
### Load data or start from scratch
Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no data sources. [Request new data sources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no datasources. [Request new datasources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
<p align="center">
<img alt="Budibase data" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970242/Out%20of%20beta%20launch/data_n1tlhf.png">

View file

@ -124,11 +124,31 @@ spec:
value: {{ .Values.globals.tenantFeatureFlags | quote }}
{{ if .Values.globals.bbAdminUserEmail }}
- name: BB_ADMIN_USER_EMAIL
value: { { .Values.globals.bbAdminUserEmail | quote } }
value: {{ .Values.globals.bbAdminUserEmail | quote }}
{{ end }}
{{ if .Values.globals.bbAdminUserPassword }}
- name: BB_ADMIN_USER_PASSWORD
value: { { .Values.globals.bbAdminUserPassword | quote } }
value: {{ .Values.globals.bbAdminUserPassword | quote }}
{{ end }}
{{ if .Values.globals.pluginsDir }}
- name: PLUGINS_DIR
value: {{ .Values.globals.pluginsDir | quote }}
{{ end }}
{{ if .Values.services.apps.nodeDebug }}
- name: NODE_DEBUG
value: {{ .Values.services.apps.nodeDebug | quote }}
{{ end }}
{{ if .Values.globals.elasticApmEnabled }}
- name: ELASTIC_APM_ENABLED
value: {{ .Values.globals.elasticApmEnabled | quote }}
{{ end }}
{{ if .Values.globals.elasticApmSecretToken }}
- name: ELASTIC_APM_SECRET_TOKEN
value: {{ .Values.globals.elasticApmSecretToken | quote }}
{{ end }}
{{ if .Values.globals.elasticApmServerUrl }}
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
image: budibase/apps:{{ .Values.globals.appVersion }}
@ -142,7 +162,10 @@ spec:
name: bbapps
ports:
- containerPort: {{ .Values.services.apps.port }}
resources: {}
{{ with .Values.services.apps.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View file

@ -38,7 +38,10 @@ spec:
image: redgeoff/replicate-couchdb-cluster
imagePullPolicy: Always
name: couchdb-backup
resources: {}
{{ with .Values.services.couchdb.backup.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View file

@ -56,7 +56,10 @@ spec:
name: minio-service
ports:
- containerPort: {{ .Values.services.objectStore.port }}
resources: {}
{{ with .Values.services.objectStore.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
- mountPath: /data
name: minio-data

View file

@ -30,7 +30,10 @@ spec:
name: proxy-service
ports:
- containerPort: {{ .Values.services.proxy.port }}
resources: {}
{{ with .Values.services.proxy.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
{{- with .Values.affinity }}
affinity:

View file

@ -35,7 +35,10 @@ spec:
name: redis-service
ports:
- containerPort: {{ .Values.services.redis.port }}
resources: {}
{{ with .Values.services.redis.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
- mountPath: /data
name: redis-data

View file

@ -27,6 +27,8 @@ spec:
spec:
containers:
- env:
- name: BUDIBASE_ENVIRONMENT
value: {{ .Values.globals.budibaseEnv }}
- name: DEPLOYMENT_ENVIRONMENT
value: "kubernetes"
- name: CLUSTER_PORT
@ -125,6 +127,19 @@ spec:
value: {{ .Values.globals.google.secret | quote }}
- name: TENANT_FEATURE_FLAGS
value: {{ .Values.globals.tenantFeatureFlags | quote }}
{{ if .Values.globals.elasticApmEnabled }}
- name: ELASTIC_APM_ENABLED
value: {{ .Values.globals.elasticApmEnabled | quote }}
{{ end }}
{{ if .Values.globals.elasticApmSecretToken }}
- name: ELASTIC_APM_SECRET_TOKEN
value: {{ .Values.globals.elasticApmSecretToken | quote }}
{{ end }}
{{ if .Values.globals.elasticApmServerUrl }}
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
image: budibase/worker:{{ .Values.globals.appVersion }}
imagePullPolicy: Always
livenessProbe:
@ -136,7 +151,10 @@ spec:
name: bbworker
ports:
- containerPort: {{ .Values.services.worker.port }}
resources: {}
{{ with .Values.services.worker.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View file

@ -60,19 +60,6 @@ ingress:
port:
number: 10000
resources:
{}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
autoscaling:
enabled: false
minReplicas: 1
@ -114,6 +101,10 @@ globals:
smtp:
enabled: false
# elasticApmEnabled:
# elasticApmSecretToken:
# elasticApmServerUrl:
services:
budibaseVersion: latest
dns: cluster.local
@ -121,15 +112,19 @@ services:
proxy:
port: 10000
replicaCount: 1
resources: {}
apps:
port: 4002
replicaCount: 1
logLevel: info
resources: {}
# nodeDebug: "" # set the value of NODE_DEBUG
worker:
port: 4003
replicaCount: 1
resources: {}
couchdb:
enabled: true
@ -143,6 +138,7 @@ services:
target: ""
# backup interval in seconds
interval: ""
resources: {}
redis:
enabled: true # disable if using external redis
@ -156,6 +152,7 @@ services:
## If undefined (the default) or set to null, no storageClassName spec is
## set, choosing the default provisioner.
storageClass: ""
resources: {}
objectStore:
minio: true
@ -172,6 +169,7 @@ services:
## If undefined (the default) or set to null, no storageClassName spec is
## set, choosing the default provisioner.
storageClass: ""
resources: {}
# Override values in couchDB subchart
couchdb:

View file

@ -348,7 +348,7 @@ export interface paths {
}
}
responses: {
/** Returns the created table, including the ID which has been generated for it. This can be internal or external data sources. */
/** Returns the created table, including the ID which has been generated for it. This can be internal or external datasources. */
200: {
content: {
"application/json": components["schemas"]["tableOutput"]
@ -959,7 +959,7 @@ export interface components {
query: {
/** @description The ID of the query. */
_id: string
/** @description The ID of the data source the query belongs to. */
/** @description The ID of the datasource the query belongs to. */
datasourceId?: string
/** @description The bindings which are required to perform this query. */
parameters?: string[]
@ -983,7 +983,7 @@ export interface components {
data: {
/** @description The ID of the query. */
_id: string
/** @description The ID of the data source the query belongs to. */
/** @description The ID of the datasource the query belongs to. */
datasourceId?: string
/** @description The bindings which are required to perform this query. */
parameters?: string[]

View file

@ -11,8 +11,8 @@
"dependencies": {
"bulma": "^0.9.3",
"next": "12.1.0",
"node-fetch": "^3.2.2",
"node-sass": "^7.0.1",
"node-fetch": "^3.2.10",
"sass": "^1.52.3",
"react": "17.0.2",
"react-dom": "17.0.2",
"react-notifications-component": "^3.4.1"
@ -24,4 +24,4 @@
"eslint-config-next": "12.1.0",
"typescript": "4.6.2"
}
}
}

View file

@ -2020,10 +2020,10 @@ node-domexception@^1.0.0:
resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5"
integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==
node-fetch@^3.2.2:
version "3.2.2"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.2.tgz#16d33fbe32ca7c6ca1ca8ba5dfea1dd885c59f04"
integrity sha512-Cwhq1JFIoon15wcIkFzubVNFE5GvXGV82pKf4knXXjvGmn7RJKcypeuqcVNZMGDZsAFWyIRya/anwAJr7TWJ7w==
node-fetch@^3.2.10:
version "3.2.10"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.10.tgz#e8347f94b54ae18b57c9c049ef641cef398a85c8"
integrity sha512-MhuzNwdURnZ1Cp4XTazr69K0BTizsBroX7Zx3UgDSVcZYKF/6p0CBe4EUb/hLqmzVhl0UpYfgRljQ4yxE+iCxA==
dependencies:
data-uri-to-buffer "^4.0.0"
fetch-blob "^3.1.4"

View file

@ -22,4 +22,7 @@ BUDIBASE_ENVIRONMENT=PRODUCTION
# An admin user can be automatically created initially if these are set
BB_ADMIN_USER_EMAIL=
BB_ADMIN_USER_PASSWORD=
BB_ADMIN_USER_PASSWORD=
# A path that is watched for plugin bundles. Any bundles found are imported automatically/
PLUGINS_DIR=

View file

@ -25,9 +25,12 @@ services:
REDIS_PASSWORD: ${REDIS_PASSWORD}
BB_ADMIN_USER_EMAIL: ${BB_ADMIN_USER_EMAIL}
BB_ADMIN_USER_PASSWORD: ${BB_ADMIN_USER_PASSWORD}
PLUGINS_DIR: ${PLUGINS_DIR}
depends_on:
- worker-service
- redis-service
# volumes:
# - /some/path/to/plugins:/plugins
worker-service:
restart: unless-stopped
@ -78,6 +81,7 @@ services:
image: budibase/proxy
environment:
- PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
- PROXY_RATE_LIMIT_API_PER_SECOND=20
depends_on:
- minio-service
- worker-service

View file

@ -22,4 +22,7 @@ BUDIBASE_ENVIRONMENT=PRODUCTION
# An admin user can be automatically created initially if these are set
BB_ADMIN_USER_EMAIL=
BB_ADMIN_USER_PASSWORD=
BB_ADMIN_USER_PASSWORD=
# A path that is watched for plugin bundles. Any bundles found are imported automatically/
PLUGINS_DIR=

View file

@ -15,7 +15,10 @@ http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
'"$http_user_agent" "$http_x_forwarded_for" '
'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default "upgrade";
@ -62,10 +65,6 @@ http {
proxy_pass http://{{ address }}:4001;
}
location /preview {
proxy_pass http://{{ address }}:4001;
}
location /builder {
proxy_pass http://{{ address }}:3000;
rewrite ^/builder(.*)$ /builder/$1 break;
@ -81,6 +80,20 @@ http {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
location /vite/ {
proxy_pass http://{{ address }}:3000;
rewrite ^/vite(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://{{ address }}:4001;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View file

@ -11,7 +11,7 @@ events {
http {
# rate limiting
limit_req_status 429;
limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=20r/s;
limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=${PROXY_RATE_LIMIT_API_PER_SECOND}r/s;
limit_req_zone $binary_remote_addr zone=webhooks:10m rate=${PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND}r/s;
include /etc/nginx/mime.types;
@ -33,7 +33,10 @@ http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
'"$http_user_agent" "$http_x_forwarded_for" '
'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default "upgrade";
@ -85,10 +88,6 @@ http {
proxy_pass http://$apps:4002;
}
location /preview {
proxy_pass http://$apps:4002;
}
location = / {
proxy_pass http://$apps:4002;
}
@ -159,6 +158,15 @@ http {
rewrite ^/db/(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://$apps:4002;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View file

@ -10,4 +10,5 @@ COPY .generated-nginx.prod.conf /etc/nginx/templates/nginx.conf.template
COPY error.html /usr/share/nginx/html/error.html
# Default environment
ENV PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
ENV PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
ENV PROXY_RATE_LIMIT_API_PER_SECOND=20

View file

@ -4,17 +4,20 @@ echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
DATA_DIR=/home
mkdir -p $DATA_DIR/{search,minio,couchdb}
mkdir -p $DATA_DIR/couchdb/{dbs,views}
chown -R couchdb:couchdb $DATA_DIR/couchdb/
mkdir -p $DATA_DIR/{search,minio,couch}
mkdir -p $DATA_DIR/couch/{dbs,views}
chown -R couchdb:couchdb $DATA_DIR/couch/
apt update
apt-get install -y openssh-server
sed -i "s/#Port 22/Port 2222/" /etc/ssh/sshd_config
echo "root:Docker!" | chpasswd
mkdir -p /tmp
chmod +x /tmp/ssh_setup.sh \
&& (sleep 1;/tmp/ssh_setup.sh 2>&1 > /dev/null)
cp /etc/sshd_config /etc/ssh/sshd_config
/etc/init.d/ssh restart
sed -i "s#DATA_DIR#/home#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/home#g" /opt/couchdb/etc/local.ini
else
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
fi

View file

@ -29,23 +29,8 @@ ENV TARGETBUILD $TARGETBUILD
COPY --from=build /app /app
COPY --from=build /worker /worker
ENV \
APP_PORT=4001 \
ARCHITECTURE=amd \
BUDIBASE_ENVIRONMENT=PRODUCTION \
CLUSTER_PORT=80 \
# CUSTOM_DOMAIN=budi001.custom.com \
DATA_DIR=/data \
DEPLOYMENT_ENVIRONMENT=docker \
MINIO_URL=http://localhost:9000 \
POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU \
REDIS_URL=localhost:6379 \
SELF_HOSTED=1 \
TARGETBUILD=$TARGETBUILD \
WORKER_PORT=4002 \
WORKER_URL=http://localhost:4002 \
APPS_URL=http://localhost:4001
# ENV CUSTOM_DOMAIN=budi001.custom.com \
# See runner.sh for Env Vars
# These secret env variables are generated by the runner at startup
# their values can be overriden by the user, they will be written
# to the .env file in the /data directory for use later on
@ -117,6 +102,8 @@ RUN chmod +x ./build-target-paths.sh
# Script below sets the path for storing data based on $DATA_DIR
# For Azure App Service install SSH & point data locations to /home
ADD hosting/single/ssh/sshd_config /etc/
ADD hosting/single/ssh/ssh_setup.sh /tmp
RUN /build-target-paths.sh
# cleanup cache
@ -124,6 +111,8 @@ RUN yarn cache clean -f
EXPOSE 80
EXPOSE 443
# Expose port 2222 for SSH on Azure App Service build
EXPOSE 2222
VOLUME /data
# setup letsencrypt certificate

View file

@ -1,5 +1,5 @@
; CouchDB Configuration Settings
[couchdb]
database_dir = DATA_DIR/couchdb/dbs
view_index_dir = DATA_DIR/couchdb/views
database_dir = DATA_DIR/couch/dbs
view_index_dir = DATA_DIR/couch/views

View file

@ -66,6 +66,15 @@ server {
rewrite ^/db/(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://127.0.0.1:4001;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View file

@ -1,6 +1,21 @@
#!/bin/bash
declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "DATA_DIR" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD")
declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONMENT" "CLUSTER_PORT" "DEPLOYMENT_ENVIRONMENT" "MINIO_URL" "NODE_ENV" "POSTHOG_TOKEN" "REDIS_URL" "SELF_HOSTED" "WORKER_PORT" "WORKER_URL")
# Check the env vars set in Dockerfile have come through, AAS seems to drop them
[[ -z "${APP_PORT}" ]] && export APP_PORT=4001
[[ -z "${ARCHITECTURE}" ]] && export ARCHITECTURE=amd
[[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION
[[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80
[[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker
[[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://localhost:9000
[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production
[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
[[ -z "${REDIS_URL}" ]] && export REDIS_URL=localhost:6379
[[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1
[[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002
[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://localhost:4002
[[ -z "${APPS_URL}" ]] && export APPS_URL=http://localhost:4001
# export CUSTOM_DOMAIN=budi001.custom.com
# Azure App Service customisations
if [[ "${TARGETBUILD}" = "aas" ]]; then
DATA_DIR=/home
@ -10,9 +25,10 @@ else
fi
if [ -f "${DATA_DIR}/.env" ]; then
export $(cat ${DATA_DIR}/.env | xargs)
# Read in the .env file and export the variables
for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
fi
# first randomise any unset environment variables
# randomise any unset environment variables
for ENV_VAR in "${ENV_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
@ -30,16 +46,23 @@ if [ ! -f "${DATA_DIR}/.env" ]; then
temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
done
for ENV_VAR in "${DOCKER_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
done
echo "COUCH_DB_URL=${COUCH_DB_URL}" >> ${DATA_DIR}/.env
fi
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984
# Read in the .env file and export the variables
for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
ln -s ${DATA_DIR}/.env /app/.env
ln -s ${DATA_DIR}/.env /worker/.env
# make these directories in runner, incase of mount
mkdir -p ${DATA_DIR}/couchdb/{dbs,views}
mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/minio
mkdir -p ${DATA_DIR}/search
chown -R couchdb:couchdb ${DATA_DIR}/couchdb
chown -R couchdb:couchdb ${DATA_DIR}/couch
redis-server --requirepass $REDIS_PASSWORD &
/opt/clouseau/bin/clouseau &
/minio/minio server ${DATA_DIR}/minio &

View file

@ -0,0 +1,8 @@
#!/bin/sh
ssh-keygen -A
#prepare run dir
if [ ! -d "/var/run/sshd" ]; then
mkdir -p /var/run/sshd
fi

View file

@ -0,0 +1,12 @@
Port 2222
ListenAddress 0.0.0.0
LoginGraceTime 180
X11Forwarding yes
Ciphers aes128-cbc,3des-cbc,aes256-cbc,aes128-ctr,aes192-ctr,aes256-ctr
MACs hmac-sha1,hmac-sha1-96
StrictModes yes
SyslogFacility DAEMON
PasswordAuthentication yes
PermitEmptyPasswords no
PermitRootLogin yes
Subsystem sftp internal-sftp

View file

@ -1,5 +1,5 @@
{
"version": "1.2.44-alpha.8",
"version": "1.4.3-alpha.1",
"npmClient": "yarn",
"packages": [
"packages/*"

View file

@ -13,6 +13,7 @@
"js-yaml": "^4.1.0",
"kill-port": "^1.6.1",
"lerna": "3.14.1",
"madge": "^5.0.1",
"prettier": "^2.3.1",
"prettier-plugin-svelte": "^2.3.0",
"rimraf": "^3.0.2",
@ -25,6 +26,7 @@
"bootstrap": "lerna bootstrap && lerna link && ./scripts/link-dependencies.sh",
"build": "lerna run build",
"build:dev": "lerna run prebuild && tsc --build --watch --preserveWatchOutput",
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
"release": "lerna publish ${RELEASE_VERSION_TYPE:-patch} --yes --force-publish && yarn release:pro",
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop --exact && yarn release:pro:develop",
"release:pro": "bash scripts/pro/release.sh",
@ -45,8 +47,8 @@
"lint:eslint": "eslint packages",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\"",
"lint": "yarn run lint:eslint && yarn run lint:prettier",
"lint:fix:eslint": "eslint --fix packages",
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
"lint:fix:eslint": "eslint --fix packages qa-core",
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"test:e2e": "lerna run cy:test --stream",
"test:e2e:ci": "lerna run cy:ci --stream",

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "1.2.44-alpha.8",
"version": "1.4.3-alpha.1",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@ -20,10 +20,12 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
"@budibase/types": "1.2.44-alpha.8",
"@budibase/types": "1.4.3-alpha.1",
"@shopify/jest-koa-mocks": "5.0.1",
"@techpass/passport-openidconnect": "0.3.2",
"aws-sdk": "2.1030.0",
"bcrypt": "5.0.1",
"bcryptjs": "2.4.3",
"dotenv": "16.0.1",
"emitter-listener": "1.1.2",
"ioredis": "4.28.0",
@ -60,7 +62,6 @@
]
},
"devDependencies": {
"@shopify/jest-koa-mocks": "3.1.5",
"@types/jest": "27.5.1",
"@types/koa": "2.0.52",
"@types/lodash": "4.14.180",

View file

@ -0,0 +1,3 @@
module.exports = {
...require("./src/plugin"),
}

View file

@ -1,11 +1,11 @@
const passport = require("koa-passport")
const LocalStrategy = require("passport-local").Strategy
const JwtStrategy = require("passport-jwt").Strategy
const { getGlobalDB } = require("./tenancy")
import { getGlobalDB } from "./tenancy"
const refresh = require("passport-oauth2-refresh")
const { Configs } = require("./constants")
const { getScopedConfig } = require("./db/utils")
const {
import { Configs } from "./constants"
import { getScopedConfig } from "./db/utils"
import {
jwt,
local,
authenticated,
@ -13,7 +13,6 @@ const {
oidc,
auditLog,
tenancy,
appTenancy,
authError,
ssoCallbackUrl,
csrf,
@ -22,32 +21,36 @@ const {
builderOnly,
builderOrAdmin,
joiValidator,
} = require("./middleware")
const { invalidateUser } = require("./cache/user")
} from "./middleware"
import { invalidateUser } from "./cache/user"
import { User } from "@budibase/types"
// Strategies
passport.use(new LocalStrategy(local.options, local.authenticate))
passport.use(new JwtStrategy(jwt.options, jwt.authenticate))
passport.serializeUser((user, done) => done(null, user))
passport.serializeUser((user: User, done: any) => done(null, user))
passport.deserializeUser(async (user, done) => {
passport.deserializeUser(async (user: User, done: any) => {
const db = getGlobalDB()
try {
const user = await db.get(user._id)
return done(null, user)
const dbUser = await db.get(user._id)
return done(null, dbUser)
} catch (err) {
console.error(`User not found`, err)
return done(null, false, { message: "User not found" })
}
})
async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) {
async function refreshOIDCAccessToken(
db: any,
chosenConfig: any,
refreshToken: string
) {
const callbackUrl = await oidc.getCallbackUrl(db, chosenConfig)
let enrichedConfig
let strategy
let enrichedConfig: any
let strategy: any
try {
enrichedConfig = await oidc.fetchStrategyConfig(chosenConfig, callbackUrl)
@ -70,22 +73,28 @@ async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) {
refresh.requestNewAccessToken(
Configs.OIDC,
refreshToken,
(err, accessToken, refreshToken, params) => {
(err: any, accessToken: string, refreshToken: any, params: any) => {
resolve({ err, accessToken, refreshToken, params })
}
)
})
}
async function refreshGoogleAccessToken(db, config, refreshToken) {
async function refreshGoogleAccessToken(
db: any,
config: any,
refreshToken: any
) {
let callbackUrl = await google.getCallbackUrl(db, config)
let strategy
try {
strategy = await google.strategyFactory(config, callbackUrl)
} catch (err) {
} catch (err: any) {
console.error(err)
throw new Error("Error constructing OIDC refresh strategy", err)
throw new Error(
`Error constructing OIDC refresh strategy: message=${err.message}`
)
}
refresh.use(strategy)
@ -94,14 +103,18 @@ async function refreshGoogleAccessToken(db, config, refreshToken) {
refresh.requestNewAccessToken(
Configs.GOOGLE,
refreshToken,
(err, accessToken, refreshToken, params) => {
(err: any, accessToken: string, refreshToken: string, params: any) => {
resolve({ err, accessToken, refreshToken, params })
}
)
})
}
async function refreshOAuthToken(refreshToken, configType, configId) {
async function refreshOAuthToken(
refreshToken: string,
configType: string,
configId: string
) {
const db = getGlobalDB()
const config = await getScopedConfig(db, {
@ -113,7 +126,7 @@ async function refreshOAuthToken(refreshToken, configType, configId) {
let refreshResponse
if (configType === Configs.OIDC) {
// configId - retrieved from cookie.
chosenConfig = config.configs.filter(c => c.uuid === configId)[0]
chosenConfig = config.configs.filter((c: any) => c.uuid === configId)[0]
if (!chosenConfig) {
throw new Error("Invalid OIDC configuration")
}
@ -134,7 +147,7 @@ async function refreshOAuthToken(refreshToken, configType, configId) {
return refreshResponse
}
async function updateUserOAuth(userId, oAuthConfig) {
async function updateUserOAuth(userId: string, oAuthConfig: any) {
const details = {
accessToken: oAuthConfig.accessToken,
refreshToken: oAuthConfig.refreshToken,
@ -162,14 +175,13 @@ async function updateUserOAuth(userId, oAuthConfig) {
}
}
module.exports = {
export = {
buildAuthMiddleware: authenticated,
passport,
google,
oidc,
jwt: require("jsonwebtoken"),
buildTenancyMiddleware: tenancy,
buildAppTenancyMiddleware: appTenancy,
auditLog,
authError,
buildCsrfMiddleware: csrf,

View file

@ -7,6 +7,7 @@ exports.Cookies = {
CurrentApp: "budibase:currentapp",
Auth: "budibase:auth",
Init: "budibase:init",
ACCOUNT_RETURN_URL: "budibase:account:returnurl",
DatasourceAuth: "budibase:datasourceauth",
OIDC_CONFIG: "budibase:oidc:config",
}

View file

@ -2,7 +2,7 @@ import env from "../environment"
import { SEPARATOR, DocumentType } from "../db/constants"
import cls from "./FunctionContext"
import { dangerousGetDB, closeDB } from "../db"
import { baseGlobalDBName } from "../tenancy/utils"
import { baseGlobalDBName } from "../db/tenancy"
import { IdentityContext } from "@budibase/types"
import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants"
import { ContextKey } from "./constants"

View file

@ -1,4 +1,5 @@
import { dangerousGetDB, closeDB } from "."
import { DocumentType } from "./constants"
class Replication {
source: any
@ -53,6 +54,14 @@ class Replication {
return this.replication
}
appReplicateOpts() {
return {
filter: (doc: any) => {
return doc._id !== DocumentType.APP_METADATA
},
}
}
/**
* Rollback the target DB back to the state of the source DB
*/
@ -60,6 +69,7 @@ class Replication {
await this.target.destroy()
// Recreate the DB again
this.target = dangerousGetDB(this.target.name)
// take the opportunity to remove deleted tombstones
await this.replicate()
}

View file

@ -18,6 +18,8 @@ export enum ViewName {
LINK = "by_link",
ROUTING = "screen_routes",
AUTOMATION_LOGS = "automation_logs",
ACCOUNT_BY_EMAIL = "account_by_email",
PLATFORM_USERS_LOWERCASE = "platform_users_lowercase",
}
export const DeprecatedViews = {
@ -41,6 +43,8 @@ export enum DocumentType {
MIGRATIONS = "migrations",
DEV_INFO = "devinfo",
AUTOMATION_LOG = "log_au",
ACCOUNT_METADATA = "acc_metadata",
PLUGIN = "plg",
}
export const StaticDatabases = {

View file

@ -0,0 +1,22 @@
import { DEFAULT_TENANT_ID } from "../constants"
import { StaticDatabases, SEPARATOR } from "./constants"
import { getTenantId } from "../context"
export const getGlobalDBName = (tenantId?: string) => {
// tenant ID can be set externally, for example user API where
// new tenants are being created, this may be the case
if (!tenantId) {
tenantId = getTenantId()
}
return baseGlobalDBName(tenantId)
}
export const baseGlobalDBName = (tenantId: string | undefined | null) => {
let dbName
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
dbName = StaticDatabases.GLOBAL.name
} else {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
}
return dbName
}

View file

@ -2,7 +2,8 @@ import { newid } from "../hashing"
import { DEFAULT_TENANT_ID, Configs } from "../constants"
import env from "../environment"
import { SEPARATOR, DocumentType, UNICODE_MAX, ViewName } from "./constants"
import { getTenantId, getGlobalDBName, getGlobalDB } from "../tenancy"
import { getTenantId, getGlobalDB } from "../context"
import { getGlobalDBName } from "./tenancy"
import fetch from "node-fetch"
import { doWithDB, allDbs } from "./index"
import { getCouchInfo } from "./pouch"
@ -15,6 +16,7 @@ import * as events from "../events"
export * from "./constants"
export * from "./conversions"
export { default as Replication } from "./Replication"
export * from "./tenancy"
/**
* Generates a new app ID.
@ -254,7 +256,16 @@ export async function getAllApps({ dev, all, idsOnly, efficient }: any = {}) {
return false
})
if (idsOnly) {
return appDbNames
const devAppIds = appDbNames.filter(appId => isDevAppID(appId))
const prodAppIds = appDbNames.filter(appId => !isDevAppID(appId))
switch (dev) {
case true:
return devAppIds
case false:
return prodAppIds
default:
return appDbNames
}
}
const appPromises = appDbNames.map((app: any) =>
// skip setup otherwise databases could be re-created
@ -357,6 +368,21 @@ export const generateDevInfoID = (userId: any) => {
return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`
}
/**
* Generates a new plugin ID - to be used in the global DB.
* @returns {string} The new plugin ID which a plugin metadata document can be stored under.
*/
export const generatePluginID = (name: string) => {
return `${DocumentType.PLUGIN}${SEPARATOR}${name}`
}
/**
* Gets parameters for retrieving automations, this is a utility function for the getDocParams function.
*/
export const getPluginParams = (pluginId?: string | null, otherProps = {}) => {
return getDocParams(DocumentType.PLUGIN, pluginId, otherProps)
}
/**
* Returns the most granular configuration document from the DB based on the type, workspace and userID passed.
* @param {Object} db - db instance to query

View file

@ -1,158 +0,0 @@
const {
DocumentType,
ViewName,
DeprecatedViews,
SEPARATOR,
} = require("./utils")
const { getGlobalDB } = require("../tenancy")
const DESIGN_DB = "_design/database"
function DesignDoc() {
return {
_id: DESIGN_DB,
// view collation information, read before writing any complex views:
// https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification
views: {},
}
}
async function removeDeprecated(db, viewName) {
if (!DeprecatedViews[viewName]) {
return
}
try {
const designDoc = await db.get(DESIGN_DB)
for (let deprecatedNames of DeprecatedViews[viewName]) {
delete designDoc.views[deprecatedNames]
}
await db.put(designDoc)
} catch (err) {
// doesn't exist, ignore
}
}
exports.createNewUserEmailView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get(DESIGN_DB)
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}")) {
emit(doc.email.toLowerCase(), doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.USER_BY_EMAIL]: view,
}
await db.put(designDoc)
}
exports.createUserAppView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}") && doc.roles) {
for (let prodAppId of Object.keys(doc.roles)) {
let emitted = prodAppId + "${SEPARATOR}" + doc._id
emit(emitted, null)
}
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.USER_BY_APP]: view,
}
await db.put(designDoc)
}
exports.createApiKeyView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
designDoc = DesignDoc()
}
const view = {
map: `function(doc) {
if (doc._id.startsWith("${DocumentType.DEV_INFO}") && doc.apiKey) {
emit(doc.apiKey, doc.userId)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.BY_API_KEY]: view,
}
await db.put(designDoc)
}
exports.createUserBuildersView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
map: `function(doc) {
if (doc.builder && doc.builder.global === true) {
emit(doc._id, doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.USER_BY_BUILDERS]: view,
}
await db.put(designDoc)
}
exports.queryGlobalView = async (viewName, params, db = null) => {
const CreateFuncByName = {
[ViewName.USER_BY_EMAIL]: exports.createNewUserEmailView,
[ViewName.BY_API_KEY]: exports.createApiKeyView,
[ViewName.USER_BY_BUILDERS]: exports.createUserBuildersView,
[ViewName.USER_BY_APP]: exports.createUserAppView,
}
// can pass DB in if working with something specific
if (!db) {
db = getGlobalDB()
}
try {
let response = (await db.query(`database/${viewName}`, params)).rows
response = response.map(resp =>
params.include_docs ? resp.doc : resp.value
)
return response.length <= 1 ? response[0] : response
} catch (err) {
if (err != null && err.name === "not_found") {
const createFunc = CreateFuncByName[viewName]
await removeDeprecated(db, viewName)
await createFunc()
return exports.queryGlobalView(viewName, params)
} else {
throw err
}
}
}

View file

@ -0,0 +1,261 @@
import { DocumentType, ViewName, DeprecatedViews, SEPARATOR } from "./utils"
import { getGlobalDB } from "../context"
import PouchDB from "pouchdb"
import { StaticDatabases } from "./constants"
import { doWithDB } from "./"
const DESIGN_DB = "_design/database"
function DesignDoc() {
return {
_id: DESIGN_DB,
// view collation information, read before writing any complex views:
// https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification
views: {},
}
}
interface DesignDocument {
views: any
}
async function removeDeprecated(db: PouchDB.Database, viewName: ViewName) {
// @ts-ignore
if (!DeprecatedViews[viewName]) {
return
}
try {
const designDoc = await db.get<DesignDocument>(DESIGN_DB)
// @ts-ignore
for (let deprecatedNames of DeprecatedViews[viewName]) {
delete designDoc.views[deprecatedNames]
}
await db.put(designDoc)
} catch (err) {
// doesn't exist, ignore
}
}
export const createNewUserEmailView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get(DESIGN_DB)
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}")) {
emit(doc.email.toLowerCase(), doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.USER_BY_EMAIL]: view,
}
await db.put(designDoc)
}
export const createAccountEmailView = async () => {
await doWithDB(
StaticDatabases.PLATFORM_INFO.name,
async (db: PouchDB.Database) => {
let designDoc
try {
designDoc = await db.get<DesignDocument>(DESIGN_DB)
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentType.ACCOUNT_METADATA}${SEPARATOR}")) {
emit(doc.email.toLowerCase(), doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.ACCOUNT_BY_EMAIL]: view,
}
await db.put(designDoc)
}
)
}
export const createUserAppView = async () => {
const db = getGlobalDB() as PouchDB.Database
let designDoc
try {
designDoc = await db.get<DesignDocument>("_design/database")
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}") && doc.roles) {
for (let prodAppId of Object.keys(doc.roles)) {
let emitted = prodAppId + "${SEPARATOR}" + doc._id
emit(emitted, null)
}
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.USER_BY_APP]: view,
}
await db.put(designDoc)
}
export const createApiKeyView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
designDoc = DesignDoc()
}
const view = {
map: `function(doc) {
if (doc._id.startsWith("${DocumentType.DEV_INFO}") && doc.apiKey) {
emit(doc.apiKey, doc.userId)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.BY_API_KEY]: view,
}
await db.put(designDoc)
}
export const createUserBuildersView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
map: `function(doc) {
if (doc.builder && doc.builder.global === true) {
emit(doc._id, doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.USER_BY_BUILDERS]: view,
}
await db.put(designDoc)
}
export const createPlatformUserView = async () => {
await doWithDB(
StaticDatabases.PLATFORM_INFO.name,
async (db: PouchDB.Database) => {
let designDoc
try {
designDoc = await db.get<DesignDocument>(DESIGN_DB)
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc.tenantId) {
emit(doc._id.toLowerCase(), doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.PLATFORM_USERS_LOWERCASE]: view,
}
await db.put(designDoc)
}
)
}
export interface QueryViewOptions {
arrayResponse?: boolean
}
export const queryView = async <T>(
viewName: ViewName,
params: PouchDB.Query.Options<T, T>,
db: PouchDB.Database,
CreateFuncByName: any,
opts?: QueryViewOptions
): Promise<T[] | T | undefined> => {
try {
let response = await db.query<T, T>(`database/${viewName}`, params)
const rows = response.rows
const docs = rows.map(row => (params.include_docs ? row.doc : row.value))
// if arrayResponse has been requested, always return array regardless of length
if (opts?.arrayResponse) {
return docs
} else {
// return the single document if there is only one
return docs.length <= 1 ? docs[0] : docs
}
} catch (err: any) {
if (err != null && err.name === "not_found") {
const createFunc = CreateFuncByName[viewName]
await removeDeprecated(db, viewName)
await createFunc()
return queryView(viewName, params, db, CreateFuncByName, opts)
} else {
throw err
}
}
}
export const queryPlatformView = async <T>(
viewName: ViewName,
params: PouchDB.Query.Options<T, T>,
opts?: QueryViewOptions
): Promise<T[] | T | undefined> => {
const CreateFuncByName = {
[ViewName.ACCOUNT_BY_EMAIL]: createAccountEmailView,
[ViewName.PLATFORM_USERS_LOWERCASE]: createPlatformUserView,
}
return doWithDB(
StaticDatabases.PLATFORM_INFO.name,
async (db: PouchDB.Database) => {
return queryView(viewName, params, db, CreateFuncByName, opts)
}
)
}
export const queryGlobalView = async <T>(
viewName: ViewName,
params: PouchDB.Query.Options<T, T>,
db?: PouchDB.Database,
opts?: QueryViewOptions
): Promise<T[] | T | undefined> => {
const CreateFuncByName = {
[ViewName.USER_BY_EMAIL]: createNewUserEmailView,
[ViewName.BY_API_KEY]: createApiKeyView,
[ViewName.USER_BY_BUILDERS]: createUserBuildersView,
[ViewName.USER_BY_APP]: createUserAppView,
}
// can pass DB in if working with something specific
if (!db) {
db = getGlobalDB() as PouchDB.Database
}
return queryView(viewName, params, db, CreateFuncByName, opts)
}

View file

@ -19,6 +19,7 @@ if (!LOADED && isDev() && !isTest()) {
const env = {
isTest,
isDev,
JS_BCRYPT: process.env.JS_BCRYPT,
JWT_SECRET: process.env.JWT_SECRET,
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
@ -36,7 +37,7 @@ const env = {
MULTI_TENANCY: process.env.MULTI_TENANCY,
ACCOUNT_PORTAL_URL:
process.env.ACCOUNT_PORTAL_URL || "https://account.budibase.app",
ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY,
ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY || "",
DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED || ""),
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
@ -50,6 +51,7 @@ const env = {
GLOBAL_BUCKET_NAME: process.env.GLOBAL_BUCKET_NAME || "global",
GLOBAL_CLOUD_BUCKET_NAME:
process.env.GLOBAL_CLOUD_BUCKET_NAME || "prod-budi-tenant-uploads",
PLUGIN_BUCKET_NAME: process.env.PLUGIN_BUCKET_NAME || "plugins",
USE_COUCH: process.env.USE_COUCH || true,
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,

View file

@ -1,11 +0,0 @@
class BudibaseError extends Error {
constructor(message, code, type) {
super(message)
this.code = code
this.type = type
}
}
module.exports = {
BudibaseError,
}

View file

@ -0,0 +1,10 @@
export class BudibaseError extends Error {
code: string
type: string
constructor(message: string, code: string, type: string) {
super(message)
this.code = code
this.type = type
}
}

View file

@ -1,11 +0,0 @@
const { BudibaseError } = require("./base")
class GenericError extends BudibaseError {
constructor(message, code, type) {
super(message, code, type ? type : "generic")
}
}
module.exports = {
GenericError,
}

View file

@ -0,0 +1,7 @@
import { BudibaseError } from "./base"
export class GenericError extends BudibaseError {
constructor(message: string, code: string, type: string) {
super(message, code, type ? type : "generic")
}
}

View file

@ -1,12 +0,0 @@
const { GenericError } = require("./generic")
class HTTPError extends GenericError {
constructor(message, httpStatus, code = "http", type = "generic") {
super(message, code, type)
this.status = httpStatus
}
}
module.exports = {
HTTPError,
}

View file

@ -0,0 +1,15 @@
import { GenericError } from "./generic"
export class HTTPError extends GenericError {
status: number
constructor(
message: string,
httpStatus: number,
code = "http",
type = "generic"
) {
super(message, code, type)
this.status = httpStatus
}
}

View file

@ -1,5 +1,6 @@
const http = require("./http")
const licensing = require("./licensing")
import { HTTPError } from "./http"
import { UsageLimitError, FeatureDisabledError } from "./licensing"
import * as licensing from "./licensing"
const codes = {
...licensing.codes,
@ -11,7 +12,7 @@ const context = {
...licensing.context,
}
const getPublicError = err => {
const getPublicError = (err: any) => {
let error
if (err.code || err.type) {
// add generic error information
@ -32,13 +33,15 @@ const getPublicError = err => {
return error
}
module.exports = {
const pkg = {
codes,
types,
errors: {
UsageLimitError: licensing.UsageLimitError,
FeatureDisabledError: licensing.FeatureDisabledError,
HTTPError: http.HTTPError,
UsageLimitError,
FeatureDisabledError,
HTTPError,
},
getPublicError,
}
export = pkg

View file

@ -1,43 +0,0 @@
const { HTTPError } = require("./http")
const type = "license_error"
const codes = {
USAGE_LIMIT_EXCEEDED: "usage_limit_exceeded",
FEATURE_DISABLED: "feature_disabled",
}
const context = {
[codes.USAGE_LIMIT_EXCEEDED]: err => {
return {
limitName: err.limitName,
}
},
[codes.FEATURE_DISABLED]: err => {
return {
featureName: err.featureName,
}
},
}
class UsageLimitError extends HTTPError {
constructor(message, limitName) {
super(message, 400, codes.USAGE_LIMIT_EXCEEDED, type)
this.limitName = limitName
}
}
class FeatureDisabledError extends HTTPError {
constructor(message, featureName) {
super(message, 400, codes.FEATURE_DISABLED, type)
this.featureName = featureName
}
}
module.exports = {
type,
codes,
context,
UsageLimitError,
FeatureDisabledError,
}

View file

@ -0,0 +1,39 @@
import { HTTPError } from "./http"
export const type = "license_error"
export const codes = {
USAGE_LIMIT_EXCEEDED: "usage_limit_exceeded",
FEATURE_DISABLED: "feature_disabled",
}
export const context = {
[codes.USAGE_LIMIT_EXCEEDED]: (err: any) => {
return {
limitName: err.limitName,
}
},
[codes.FEATURE_DISABLED]: (err: any) => {
return {
featureName: err.featureName,
}
},
}
export class UsageLimitError extends HTTPError {
limitName: string
constructor(message: string, limitName: string) {
super(message, 400, codes.USAGE_LIMIT_EXCEEDED, type)
this.limitName = limitName
}
}
export class FeatureDisabledError extends HTTPError {
featureName: string
constructor(message: string, featureName: string) {
super(message, 400, codes.FEATURE_DISABLED, type)
this.featureName = featureName
}
}

View file

@ -8,4 +8,5 @@ import { processors } from "./processors"
export const shutdown = () => {
processors.shutdown()
console.log("Events shutdown")
}

View file

@ -5,8 +5,15 @@ import {
DatasourceCreatedEvent,
DatasourceUpdatedEvent,
DatasourceDeletedEvent,
SourceName,
} from "@budibase/types"
function isCustom(datasource: Datasource) {
const sources = Object.values(SourceName)
// if not in the base source list, then it must be custom
return !sources.includes(datasource.source)
}
export async function created(
datasource: Datasource,
timestamp?: string | number
@ -14,6 +21,7 @@ export async function created(
const properties: DatasourceCreatedEvent = {
datasourceId: datasource._id as string,
source: datasource.source,
custom: isCustom(datasource),
}
await publishEvent(Event.DATASOURCE_CREATED, properties, timestamp)
}
@ -22,6 +30,7 @@ export async function updated(datasource: Datasource) {
const properties: DatasourceUpdatedEvent = {
datasourceId: datasource._id as string,
source: datasource.source,
custom: isCustom(datasource),
}
await publishEvent(Event.DATASOURCE_UPDATED, properties)
}
@ -30,6 +39,7 @@ export async function deleted(datasource: Datasource) {
const properties: DatasourceDeletedEvent = {
datasourceId: datasource._id as string,
source: datasource.source,
custom: isCustom(datasource),
}
await publishEvent(Event.DATASOURCE_DELETED, properties)
}

View file

@ -18,3 +18,4 @@ export * as view from "./view"
export * as installation from "./installation"
export * as backfill from "./backfill"
export * as group from "./group"
export * as plugin from "./plugin"

View file

@ -0,0 +1,41 @@
import { publishEvent } from "../events"
import {
Event,
Plugin,
PluginDeletedEvent,
PluginImportedEvent,
PluginInitEvent,
} from "@budibase/types"
export async function init(plugin: Plugin) {
const properties: PluginInitEvent = {
type: plugin.schema.type,
name: plugin.name,
description: plugin.description,
version: plugin.version,
}
await publishEvent(Event.PLUGIN_INIT, properties)
}
export async function imported(plugin: Plugin) {
const properties: PluginImportedEvent = {
pluginId: plugin._id as string,
type: plugin.schema.type,
source: plugin.source,
name: plugin.name,
description: plugin.description,
version: plugin.version,
}
await publishEvent(Event.PLUGIN_IMPORTED, properties)
}
export async function deleted(plugin: Plugin) {
const properties: PluginDeletedEvent = {
pluginId: plugin._id as string,
type: plugin.schema.type,
name: plugin.name,
description: plugin.description,
version: plugin.version,
}
await publishEvent(Event.PLUGIN_DELETED, properties)
}

View file

@ -31,20 +31,26 @@ const TENANT_FEATURE_FLAGS = getFeatureFlags()
exports.isEnabled = featureFlag => {
const tenantId = tenancy.getTenantId()
return (
TENANT_FEATURE_FLAGS &&
TENANT_FEATURE_FLAGS[tenantId] &&
TENANT_FEATURE_FLAGS[tenantId].includes(featureFlag)
)
const flags = exports.getTenantFeatureFlags(tenantId)
return flags.includes(featureFlag)
}
exports.getTenantFeatureFlags = tenantId => {
if (TENANT_FEATURE_FLAGS && TENANT_FEATURE_FLAGS[tenantId]) {
return TENANT_FEATURE_FLAGS[tenantId]
const flags = []
if (TENANT_FEATURE_FLAGS) {
const globalFlags = TENANT_FEATURE_FLAGS["*"]
const tenantFlags = TENANT_FEATURE_FLAGS[tenantId]
if (globalFlags) {
flags.push(...globalFlags)
}
if (tenantFlags) {
flags.push(...tenantFlags)
}
}
return []
return flags
}
exports.FeatureFlag = {

View file

@ -1,5 +1,5 @@
const bcrypt = require("bcrypt")
const env = require("./environment")
const bcrypt = env.JS_BCRYPT ? require("bcryptjs") : require("bcrypt")
const { v4 } = require("uuid")
const SALT_ROUNDS = env.SALT_ROUNDS || 10

View file

@ -1,5 +1,4 @@
import errors from "./errors"
const errorClasses = errors.errors
import * as events from "./events"
import * as migrations from "./migrations"
@ -15,8 +14,10 @@ import deprovisioning from "./context/deprovision"
import auth from "./auth"
import constants from "./constants"
import * as dbConstants from "./db/constants"
import logging from "./logging"
import * as logging from "./logging"
import pino from "./pino"
import * as middleware from "./middleware"
import plugins from "./plugin"
// mimic the outer package exports
import * as db from "./pkg/db"
@ -55,8 +56,10 @@ const core = {
errors,
logging,
roles,
plugins,
...pino,
...errorClasses,
middleware,
}
export = core

View file

@ -65,7 +65,7 @@ async function checkApiKey(apiKey: string, populateUser?: Function) {
* The tenancy modules should not be used here and it should be assumed that the tenancy context
* has not yet been populated.
*/
module.exports = (
export = (
noAuthPatterns = [],
opts: { publicAllowed: boolean; populateUser?: Function } = {
publicAllowed: false,
@ -106,6 +106,7 @@ module.exports = (
user = await getUser(userId, session.tenantId)
}
user.csrfToken = session.csrfToken
if (session?.lastAccessedAt < timeMinusOneMinute()) {
// make sure we denote that the session is still in use
await updateSessionTTL(session)

View file

@ -13,7 +13,8 @@ const adminOnly = require("./adminOnly")
const builderOrAdmin = require("./builderOrAdmin")
const builderOnly = require("./builderOnly")
const joiValidator = require("./joi-validator")
module.exports = {
const pkg = {
google,
oidc,
jwt,
@ -33,3 +34,5 @@ module.exports = {
builderOrAdmin,
joiValidator,
}
export = pkg

View file

@ -13,10 +13,13 @@ function validate(schema, property) {
params = ctx.request[property]
}
schema = schema.append({
createdAt: Joi.any().optional(),
updatedAt: Joi.any().optional(),
})
// not all schemas have the append property e.g. array schemas
if (schema.append) {
schema = schema.append({
createdAt: Joi.any().optional(),
updatedAt: Joi.any().optional(),
})
}
const { error } = schema.validate(params)
if (error) {

View file

@ -17,14 +17,6 @@ export const DEFINITIONS: MigrationDefinition[] = [
type: MigrationType.APP,
name: MigrationName.APP_URLS,
},
{
type: MigrationType.GLOBAL,
name: MigrationName.DEVELOPER_QUOTA,
},
{
type: MigrationType.GLOBAL,
name: MigrationName.PUBLISHED_APP_QUOTA,
},
{
type: MigrationType.APP,
name: MigrationName.EVENT_APP_BACKFILL,

View file

@ -3,12 +3,8 @@ import { doWithDB } from "../db"
import { DocumentType, StaticDatabases } from "../db/constants"
import { getAllApps } from "../db/utils"
import environment from "../environment"
import {
doInTenant,
getTenantIds,
getGlobalDBName,
getTenantId,
} from "../tenancy"
import { doInTenant, getTenantIds, getTenantId } from "../tenancy"
import { getGlobalDBName } from "../db/tenancy"
import * as context from "../context"
import { DEFINITIONS } from "."
import {

View file

@ -57,7 +57,11 @@ function publicPolicy(bucketName: any) {
}
}
const PUBLIC_BUCKETS = [ObjectStoreBuckets.APPS, ObjectStoreBuckets.GLOBAL]
const PUBLIC_BUCKETS = [
ObjectStoreBuckets.APPS,
ObjectStoreBuckets.GLOBAL,
ObjectStoreBuckets.PLUGINS,
]
/**
* Gets a connection to the object store using the S3 SDK.
@ -66,15 +70,13 @@ const PUBLIC_BUCKETS = [ObjectStoreBuckets.APPS, ObjectStoreBuckets.GLOBAL]
* @constructor
*/
export const ObjectStore = (bucket: any) => {
AWS.config.update({
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION,
})
const config: any = {
s3ForcePathStyle: true,
signatureVersion: "v4",
apiVersion: "2006-03-01",
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION,
}
if (bucket) {
config.params = {
@ -174,6 +176,14 @@ export const streamUpload = async (
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
// Set content type for certain known extensions
if (filename?.endsWith(".js")) {
extra = {
...extra,
ContentType: "application/javascript",
}
}
const params = {
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filename),
@ -297,9 +307,13 @@ export const uploadDirectory = async (
return files
}
exports.downloadTarballDirect = async (url: string, path: string) => {
exports.downloadTarballDirect = async (
url: string,
path: string,
headers = {}
) => {
path = sanitizeKey(path)
const response = await fetch(url)
const response = await fetch(url, { headers })
if (!response.ok) {
throw new Error(`unexpected response ${response.statusText}`)
}

View file

@ -8,6 +8,7 @@ exports.ObjectStoreBuckets = {
TEMPLATES: env.TEMPLATES_BUCKET_NAME,
GLOBAL: env.GLOBAL_BUCKET_NAME,
GLOBAL_CLOUD: env.GLOBAL_CLOUD_BUCKET_NAME,
PLUGINS: env.PLUGIN_BUCKET_NAME,
}
exports.budibaseTempDir = function () {

View file

@ -0,0 +1,7 @@
import * as utils from "./utils"
const pkg = {
...utils,
}
export = pkg

View file

@ -0,0 +1,94 @@
const {
DatasourceFieldType,
QueryType,
PluginType,
} = require("@budibase/types")
const joi = require("joi")
const DATASOURCE_TYPES = [
"Relational",
"Non-relational",
"Spreadsheet",
"Object store",
"Graph",
"API",
]
function runJoi(validator, schema) {
const { error } = validator.validate(schema)
if (error) {
throw error
}
}
function validateComponent(schema) {
const validator = joi.object({
type: joi.string().allow("component").required(),
metadata: joi.object().unknown(true).required(),
hash: joi.string().optional(),
version: joi.string().optional(),
schema: joi
.object({
name: joi.string().required(),
settings: joi.array().items(joi.object().unknown(true)).required(),
})
.unknown(true),
})
runJoi(validator, schema)
}
function validateDatasource(schema) {
const fieldValidator = joi.object({
type: joi
.string()
.allow(...Object.values(DatasourceFieldType))
.required(),
required: joi.boolean().required(),
default: joi.any(),
display: joi.string(),
})
const queryValidator = joi
.object({
type: joi.string().allow(...Object.values(QueryType)),
fields: joi.object().pattern(joi.string(), fieldValidator),
})
.required()
const validator = joi.object({
type: joi.string().allow("datasource").required(),
metadata: joi.object().unknown(true).required(),
hash: joi.string().optional(),
version: joi.string().optional(),
schema: joi.object({
docs: joi.string(),
friendlyName: joi.string().required(),
type: joi.string().allow(...DATASOURCE_TYPES),
description: joi.string().required(),
datasource: joi.object().pattern(joi.string(), fieldValidator).required(),
query: joi
.object({
create: queryValidator,
read: queryValidator,
update: queryValidator,
delete: queryValidator,
})
.unknown(true)
.required(),
}),
})
runJoi(validator, schema)
}
exports.validate = schema => {
switch (schema?.type) {
case PluginType.COMPONENT:
validateComponent(schema)
break
case PluginType.DATASOURCE:
validateDatasource(schema)
break
default:
throw new Error(`Unknown plugin type - check schema.json: ${schema.type}`)
}
}

View file

@ -2,18 +2,12 @@ const redis = require("../redis/init")
const { v4: uuidv4 } = require("uuid")
const { logWarn } = require("../logging")
const env = require("../environment")
interface Session {
key: string
userId: string
sessionId: string
lastAccessedAt: string
createdAt: string
csrfToken?: string
value: string
}
type SessionKey = { key: string }[]
import {
Session,
ScannedSession,
SessionKey,
CreateSession,
} from "@budibase/types"
// a week in seconds
const EXPIRY_SECONDS = 86400 * 7
@ -22,14 +16,14 @@ function makeSessionID(userId: string, sessionId: string) {
return `${userId}/${sessionId}`
}
export async function getSessionsForUser(userId: string) {
export async function getSessionsForUser(userId: string): Promise<Session[]> {
if (!userId) {
console.trace("Cannot get sessions for undefined userId")
return []
}
const client = await redis.getSessionClient()
const sessions = await client.scan(userId)
return sessions.map((session: Session) => session.value)
const sessions: ScannedSession[] = await client.scan(userId)
return sessions.map(session => session.value)
}
export async function invalidateSessions(
@ -39,33 +33,32 @@ export async function invalidateSessions(
try {
const reason = opts?.reason || "unknown"
let sessionIds: string[] = opts.sessionIds || []
let sessions: SessionKey
let sessionKeys: SessionKey[]
// If no sessionIds, get all the sessions for the user
if (sessionIds.length === 0) {
sessions = await getSessionsForUser(userId)
sessions.forEach(
(session: any) =>
(session.key = makeSessionID(session.userId, session.sessionId))
)
const sessions = await getSessionsForUser(userId)
sessionKeys = sessions.map(session => ({
key: makeSessionID(session.userId, session.sessionId),
}))
} else {
// use the passed array of sessionIds
sessionIds = Array.isArray(sessionIds) ? sessionIds : [sessionIds]
sessions = sessionIds.map((sessionId: string) => ({
sessionKeys = sessionIds.map(sessionId => ({
key: makeSessionID(userId, sessionId),
}))
}
if (sessions && sessions.length > 0) {
if (sessionKeys && sessionKeys.length > 0) {
const client = await redis.getSessionClient()
const promises = []
for (let session of sessions) {
promises.push(client.delete(session.key))
for (let sessionKey of sessionKeys) {
promises.push(client.delete(sessionKey.key))
}
if (!env.isTest()) {
logWarn(
`Invalidating sessions for ${userId} (reason: ${reason}) - ${sessions
.map(session => session.key)
`Invalidating sessions for ${userId} (reason: ${reason}) - ${sessionKeys
.map(sessionKey => sessionKey.key)
.join(", ")}`
)
}
@ -76,22 +69,26 @@ export async function invalidateSessions(
}
}
export async function createASession(userId: string, session: Session) {
export async function createASession(
userId: string,
createSession: CreateSession
) {
// invalidate all other sessions
await invalidateSessions(userId, { reason: "creation" })
const client = await redis.getSessionClient()
const sessionId = session.sessionId
if (!session.csrfToken) {
session.csrfToken = uuidv4()
}
session = {
...session,
const sessionId = createSession.sessionId
const csrfToken = createSession.csrfToken ? createSession.csrfToken : uuidv4()
const key = makeSessionID(userId, sessionId)
const session: Session = {
...createSession,
csrfToken,
createdAt: new Date().toISOString(),
lastAccessedAt: new Date().toISOString(),
userId,
}
await client.store(makeSessionID(userId, sessionId), session, EXPIRY_SECONDS)
await client.store(key, session, EXPIRY_SECONDS)
}
export async function updateSessionTTL(session: Session) {
@ -106,7 +103,10 @@ export async function endSession(userId: string, sessionId: string) {
await client.delete(makeSessionID(userId, sessionId))
}
export async function getSession(userId: string, sessionId: string) {
export async function getSession(
userId: string,
sessionId: string
): Promise<Session> {
if (!userId || !sessionId) {
throw new Error(`Invalid session details - ${userId} - ${sessionId}`)
}

View file

@ -1,6 +1,7 @@
import { doWithDB } from "../db"
import { StaticDatabases } from "../db/constants"
import { baseGlobalDBName } from "./utils"
import { queryPlatformView } from "../db/views"
import { StaticDatabases, ViewName } from "../db/constants"
import { getGlobalDBName } from "../db/tenancy"
import {
getTenantId,
DEFAULT_TENANT_ID,
@ -8,6 +9,7 @@ import {
getTenantIDFromAppID,
} from "../context"
import env from "../environment"
import { PlatformUser } from "@budibase/types"
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name
@ -87,15 +89,6 @@ export const tryAddTenant = async (
})
}
export const getGlobalDBName = (tenantId?: string) => {
// tenant ID can be set externally, for example user API where
// new tenants are being created, this may be the case
if (!tenantId) {
tenantId = getTenantId()
}
return baseGlobalDBName(tenantId)
}
export const doWithGlobalDB = (tenantId: string, cb: any) => {
return doWithDB(getGlobalDBName(tenantId), cb)
}
@ -116,14 +109,16 @@ export const lookupTenantId = async (userId: string) => {
}
// lookup, could be email or userId, either will return a doc
export const getTenantUser = async (identifier: string) => {
return doWithDB(PLATFORM_INFO_DB, async (db: any) => {
try {
return await db.get(identifier)
} catch (err) {
return null
}
})
export const getTenantUser = async (
identifier: string
): Promise<PlatformUser | null> => {
// use the view here and allow to find anyone regardless of casing
// Use lowercase to ensure email login is case insensitive
const response = queryPlatformView(ViewName.PLATFORM_USERS_LOWERCASE, {
keys: [identifier.toLowerCase()],
include_docs: true,
}) as Promise<PlatformUser>
return response
}
export const isUserInAppTenant = (appId: string, user: any) => {

View file

@ -1,12 +0,0 @@
const { DEFAULT_TENANT_ID } = require("../constants")
const { StaticDatabases, SEPARATOR } = require("../db/constants")
exports.baseGlobalDBName = tenantId => {
let dbName
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
dbName = StaticDatabases.GLOBAL.name
} else {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
}
return dbName
}

View file

@ -1,30 +1,39 @@
const {
import {
ViewName,
getUsersByAppParams,
getProdAppID,
generateAppUserID,
} = require("./db/utils")
const { queryGlobalView } = require("./db/views")
const { UNICODE_MAX } = require("./db/constants")
} from "./db/utils"
import { queryGlobalView } from "./db/views"
import { UNICODE_MAX } from "./db/constants"
import { User } from "@budibase/types"
/**
* Given an email address this will use a view to search through
* all the users to find one with this email address.
* @param {string} email the email to lookup the user by.
* @return {Promise<object|null>}
*/
exports.getGlobalUserByEmail = async email => {
export const getGlobalUserByEmail = async (
email: String
): Promise<User | undefined> => {
if (email == null) {
throw "Must supply an email address to view"
}
return await queryGlobalView(ViewName.USER_BY_EMAIL, {
const response = await queryGlobalView<User>(ViewName.USER_BY_EMAIL, {
key: email.toLowerCase(),
include_docs: true,
})
if (Array.isArray(response)) {
// shouldn't be able to happen, but need to handle just in case
throw new Error(`Multiple users found with email address: ${email}`)
}
return response
}
exports.searchGlobalUsersByApp = async (appId, opts) => {
export const searchGlobalUsersByApp = async (appId: any, opts: any) => {
if (typeof appId !== "string") {
throw new Error("Must provide a string based app ID")
}
@ -39,24 +48,24 @@ exports.searchGlobalUsersByApp = async (appId, opts) => {
return Array.isArray(response) ? response : [response]
}
exports.getGlobalUserByAppPage = (appId, user) => {
export const getGlobalUserByAppPage = (appId: string, user: User) => {
if (!user) {
return
}
return generateAppUserID(getProdAppID(appId), user._id)
return generateAppUserID(getProdAppID(appId), user._id!)
}
/**
* Performs a starts with search on the global email view.
*/
exports.searchGlobalUsersByEmail = async (email, opts) => {
export const searchGlobalUsersByEmail = async (email: string, opts: any) => {
if (typeof email !== "string") {
throw new Error("Must provide a string to search by")
}
const lcEmail = email.toLowerCase()
// handle if passing up startkey for pagination
const startkey = opts && opts.startkey ? opts.startkey : lcEmail
let response = await queryGlobalView(ViewName.USER_BY_EMAIL, {
let response = await queryGlobalView<User>(ViewName.USER_BY_EMAIL, {
...opts,
startkey,
endkey: `${lcEmail}${UNICODE_MAX}`,

View file

@ -42,6 +42,18 @@ async function resolveAppUrl(ctx) {
return app && app.appId ? app.appId : undefined
}
exports.isServingApp = ctx => {
// dev app
if (ctx.path.startsWith(`/${APP_PREFIX}`)) {
return true
}
// prod app
if (ctx.path.startsWith(PROD_APP_PREFIX)) {
return true
}
return false
}
/**
* Given a request tries to find the appId, which can be located in various places
* @param {object} ctx The main request body to look through.

View file

@ -0,0 +1,7 @@
export const getAccount = jest.fn()
export const getAccountByTenantId = jest.fn()
jest.mock("../../../src/cloud/accounts", () => ({
getAccount,
getAccountByTenantId,
}))

View file

@ -1,2 +0,0 @@
exports.MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
exports.MOCK_DATE_TIMESTAMP = 1577836800000

View file

@ -0,0 +1,2 @@
export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
export const MOCK_DATE_TIMESTAMP = 1577836800000

View file

@ -1,9 +0,0 @@
const posthog = require("./posthog")
const events = require("./events")
const date = require("./date")
module.exports = {
posthog,
date,
events,
}

View file

@ -0,0 +1,4 @@
import "./posthog"
import "./events"
export * as accounts from "./accounts"
export * as date from "./date"

View file

@ -543,13 +543,13 @@
semver "^7.3.5"
tar "^6.1.11"
"@shopify/jest-koa-mocks@3.1.5":
version "3.1.5"
resolved "https://registry.yarnpkg.com/@shopify/jest-koa-mocks/-/jest-koa-mocks-3.1.5.tgz#11f77ccfbcaf35cf5ee2c6108a286e61e6bea084"
integrity sha512-gQ3/7ELerv00TWO37AGFX5mT9CsFCS+3/UbKMuoIlKEU0QH2OX8BV9WBf/EKw7adCDNlxss0lqV6J8kf5pgr4A==
"@shopify/jest-koa-mocks@5.0.1":
version "5.0.1"
resolved "https://registry.yarnpkg.com/@shopify/jest-koa-mocks/-/jest-koa-mocks-5.0.1.tgz#fba490b6b7985fbb571eb9974897d396a3642e94"
integrity sha512-4YskS9q8+TEHNoyopmuoy2XyhInyqeOl7CF5ShJs19sm6m0EA/jGGvgf/osv2PeTfuf42/L2G9CzWUSg49yTSg==
dependencies:
koa "^2.13.4"
node-mocks-http "^1.5.8"
node-mocks-http "^1.11.0"
"@sideway/address@^4.1.3":
version "4.1.4"
@ -1377,6 +1377,11 @@ bcrypt@5.0.1:
"@mapbox/node-pre-gyp" "^1.0.0"
node-addon-api "^3.1.0"
bcryptjs@2.4.3:
version "2.4.3"
resolved "https://registry.yarnpkg.com/bcryptjs/-/bcryptjs-2.4.3.tgz#9ab5627b93e60621ff7cdac5da9733027df1d0cb"
integrity sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ==
binary-extensions@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d"
@ -3914,7 +3919,7 @@ node-int64@^0.4.0:
resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b"
integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==
node-mocks-http@^1.5.8:
node-mocks-http@^1.11.0:
version "1.11.0"
resolved "https://registry.yarnpkg.com/node-mocks-http/-/node-mocks-http-1.11.0.tgz#defc0febf6b935f08245397d47534a8de592996e"
integrity sha512-jS/WzSOcKbOeGrcgKbenZeNhxUNnP36Yw11+hL4TTxQXErGfqYZ+MaYNNvhaTiGIJlzNSqgQkk9j8dSu1YWSuw==

View file

@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
"version": "1.2.44-alpha.8",
"version": "1.4.3-alpha.1",
"license": "MPL-2.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
@ -38,7 +38,7 @@
],
"dependencies": {
"@adobe/spectrum-css-workflow-icons": "^1.2.1",
"@budibase/string-templates": "1.2.44-alpha.8",
"@budibase/string-templates": "1.4.3-alpha.1",
"@spectrum-css/actionbutton": "^1.0.1",
"@spectrum-css/actiongroup": "^1.0.1",
"@spectrum-css/avatar": "^3.0.2",

View file

@ -1,4 +1,4 @@
export default function positionDropdown(element, { anchor, align }) {
export default function positionDropdown(element, { anchor, align, maxWidth }) {
let positionSide = "top"
let maxHeight = 0
let dimensions = getDimensions(anchor)
@ -34,13 +34,24 @@ export default function positionDropdown(element, { anchor, align }) {
}
function calcLeftPosition() {
return align === "right"
? dimensions.left + dimensions.width - dimensions.containerWidth
: dimensions.left
let left
if (align == "right") {
left = dimensions.left + dimensions.width - dimensions.containerWidth
} else if (align == "right-side") {
left = dimensions.left + dimensions.width
} else {
left = dimensions.left
}
return left
}
element.style.position = "absolute"
element.style.zIndex = "9999"
if (maxWidth) {
element.style.maxWidth = `${maxWidth}px`
}
element.style.minWidth = `${dimensions.width}px`
element.style.maxHeight = `${maxHeight.toFixed(0)}px`
element.style.transformOrigin = `center ${positionSide}`
@ -54,10 +65,8 @@ export default function positionDropdown(element, { anchor, align }) {
element.style.left = `${calcLeftPosition(dimensions).toFixed(0)}px`
})
})
resizeObserver.observe(anchor)
resizeObserver.observe(element)
return {
destroy() {
resizeObserver.disconnect()

View file

@ -4,22 +4,32 @@
import { banner } from "../Stores/banner"
import Banner from "./Banner.svelte"
import { fly } from "svelte/transition"
import TooltipWrapper from "../Tooltip/TooltipWrapper.svelte"
</script>
<Portal target=".banner-container">
<div class="banner">
{#if $banner.message}
{#each $banner.messages as message}
<div transition:fly={{ y: -30 }}>
<Banner
type={$banner.type}
extraButtonText={$banner.extraButtonText}
extraButtonAction={$banner.extraButtonAction}
on:change={$banner.onChange}
type={message.type}
extraButtonText={message.extraButtonText}
extraButtonAction={message.extraButtonAction}
on:change={() => {
if (message.onChange) {
message.onChange()
}
}}
showCloseButton={typeof message.showCloseButton === "boolean"
? message.showCloseButton
: true}
>
{$banner.message}
<TooltipWrapper tooltip={message.tooltip} disabled={false}>
{message.message}
</TooltipWrapper>
</Banner>
</div>
{/if}
{/each}
</div>
</Portal>

View file

@ -78,7 +78,7 @@
bottom: 0;
background: var(--background);
border-top: var(--border-light);
z-index: 2;
z-index: 3;
}
.fillWidth {

View file

@ -67,6 +67,13 @@
// If time only set date component to 2000-01-01
if (timeOnly) {
// Classic flackpickr causing issues.
// When selecting a value for the first time for a "time only" field,
// the time is always offset by 1 hour for some reason (regardless of time
// zone) so we need to correct it.
if (!value && newValue) {
newValue = new Date(dates[0].getTime() + 60 * 60 * 1000).toISOString()
}
newValue = `2000-01-01T${newValue.split("T")[1]}`
}

View file

@ -139,7 +139,13 @@
<div class="title">
<div class="filename">
{#if selectedUrl}
<Link href={selectedUrl}>{selectedImage.name}</Link>
<Link
target="_blank"
download={selectedImage.name}
href={selectedUrl}
>
{selectedImage.name}
</Link>
{:else}
{selectedImage.name}
{/if}

View file

@ -10,6 +10,7 @@
export let disabled = false
export let getOptionLabel = option => option
export let getOptionValue = option => option
export let getOptionTitle = option => option
const dispatch = createEventDispatcher()
const onChange = e => dispatch("change", e.target.value)
@ -19,7 +20,7 @@
{#if options && Array.isArray(options)}
{#each options as option}
<div
title={getOptionLabel(option)}
title={getOptionTitle(option)}
class="spectrum-Radio spectrum-FieldGroup-item spectrum-Radio--emphasized"
class:is-invalid={!!error}
>

View file

@ -12,6 +12,7 @@
export let direction = "vertical"
export let getOptionLabel = option => extractProperty(option, "label")
export let getOptionValue = option => extractProperty(option, "value")
export let getOptionTitle = option => extractProperty(option, "label")
const dispatch = createEventDispatcher()
const onChange = e => {
@ -35,6 +36,7 @@
{direction}
{getOptionLabel}
{getOptionValue}
{getOptionTitle}
on:change={onChange}
/>
</Field>

View file

@ -6,6 +6,7 @@
export let header = ""
export let message = ""
export let onConfirm = undefined
export let buttonText = ""
$: icon = selectIcon(type)
// if newlines used, convert them to different elements
@ -39,13 +40,16 @@
<div class="spectrum-InLineAlert-content">{splitMsg}</div>
{/each}
{#if onConfirm}
<div class="spectrum-InLineAlert-footer">
<Button secondary on:click={onConfirm}>OK</Button>
<div class="spectrum-InLineAlert-footer button">
<Button secondary on:click={onConfirm}>{buttonText || "OK"}</Button>
</div>
{/if}
</div>
<style>
.button {
margin-top: 10px;
}
.spectrum-InLineAlert {
--spectrum-semantic-negative-border-color: #e34850;
--spectrum-semantic-positive-border-color: #2d9d78;

View file

@ -4,10 +4,15 @@
export let size = "M"
export let tooltip = ""
export let muted
</script>
<TooltipWrapper {tooltip} {size}>
<label for="" class={`spectrum-FieldLabel spectrum-FieldLabel--size${size}`}>
<label
class:muted
for=""
class={`spectrum-FieldLabel spectrum-FieldLabel--size${size}`}
>
<slot />
</label>
</TooltipWrapper>
@ -17,4 +22,8 @@
padding: 0;
white-space: nowrap;
}
.muted {
opacity: 0.5;
}
</style>

View file

@ -8,12 +8,14 @@
export let secondary = false
export let overBackground = false
export let target
export let download
</script>
<a
on:click
{href}
{target}
{download}
class:spectrum-Link--primary={primary}
class:spectrum-Link--secondary={secondary}
class:spectrum-Link--overBackground={overBackground}

View file

@ -24,7 +24,6 @@
export let secondaryAction = undefined
export let secondaryButtonWarning = false
export let dataCy = null
const { hide, cancel } = getContext(Context.Modal)
let loading = false
$: confirmDisabled = disabled || loading
@ -88,12 +87,11 @@
<section class="spectrum-Dialog-content content-grid">
<slot />
</section>
{#if showCancelButton || showConfirmButton}
{#if showCancelButton || showConfirmButton || $$slots.footer}
<div
class="spectrum-ButtonGroup spectrum-Dialog-buttonGroup spectrum-Dialog-buttonGroup--noFooter"
>
<slot name="footer" />
{#if showSecondaryButton && secondaryButtonText && secondaryAction}
<div class="secondary-action">
<Button

View file

@ -11,6 +11,7 @@
export let align = "right"
export let portalTarget
export let dataCy
export let maxWidth
export let direction = "bottom"
export let showTip = false
@ -45,7 +46,7 @@
<Portal target={portalTarget}>
<div
tabindex="0"
use:positionDropdown={{ anchor, align }}
use:positionDropdown={{ anchor, align, maxWidth }}
use:clickOutside={hide}
on:keydown={handleEscape}
class={"spectrum-Popover is-open " + (tooltipClasses || "")}

View file

@ -8,6 +8,8 @@
export let duration = 1000
export let width = false
export let sideLabel = false
export let hidePercentage = true
export let color // red, green, default = blue
export let size = "M"
@ -37,7 +39,7 @@
<slot />
</div>
{/if}
{#if value || value === 0}
{#if !hidePercentage && (value || value === 0)}
<div
class="spectrum-FieldLabel spectrum-ProgressBar-percentage spectrum-FieldLabel--size{size}"
>
@ -47,8 +49,19 @@
<div class="spectrum-ProgressBar-track">
<div
class="spectrum-ProgressBar-fill"
class:color-green={color === "green"}
class:color-red={color === "red"}
style={value || value === 0 ? `width: ${$progress}%` : ""}
/>
</div>
<div class="spectrum-ProgressBar-label" hidden="" />
</div>
<style>
.color-green {
background: #009562;
}
.color-red {
background: #dd2019;
}
</style>

View file

@ -1,6 +1,7 @@
<script>
import { getContext } from "svelte"
const multilevel = getContext("sidenav-type")
import Badge from "../Badge/Badge.svelte"
export let href = ""
export let external = false
export let heading = ""
@ -8,6 +9,7 @@
export let selected = false
export let disabled = false
export let dataCy
export let badge = ""
</script>
<li
@ -38,10 +40,22 @@
</svg>
{/if}
<slot />
{#if badge}
<div class="badge">
<Badge active size="S">{badge}</Badge>
</div>
{/if}
</a>
{#if multilevel && $$slots.subnav}
<ul class="spectrum-SideNav">
<slot name="subnav" />
</ul>
{/if}
</li>
<style>
.badge {
margin-left: 10px;
}
</style>

View file

@ -1,7 +1,14 @@
import { writable } from "svelte/store"
export const BANNER_TYPES = {
INFO: "info",
NEGATIVE: "negative",
}
export function createBannerStore() {
const DEFAULT_CONFIG = {}
const DEFAULT_CONFIG = {
messages: [],
}
const banner = writable(DEFAULT_CONFIG)
@ -20,17 +27,38 @@ export function createBannerStore() {
const showStatus = async () => {
const config = {
message: "Some systems are experiencing issues",
type: "negative",
type: BANNER_TYPES.NEGATIVE,
extraButtonText: "View Status",
extraButtonAction: () => window.open("https://status.budibase.com/"),
}
await show(config)
await queue([config])
}
const queue = async entries => {
const priority = {
[BANNER_TYPES.NEGATIVE]: 0,
[BANNER_TYPES.INFO]: 1,
}
banner.update(store => {
const sorted = [...store.messages, ...entries].sort((a, b) => {
if (priority[a.type] == priority[b.type]) {
return 0
}
return priority[a.type] < priority[b.type] ? -1 : 1
})
return {
...store,
messages: sorted,
}
})
}
return {
subscribe: banner.subscribe,
showStatus,
show,
queue,
}
}

View file

@ -15,14 +15,24 @@
{#each attachments as attachment}
{#if isImage(attachment.extension)}
<Link quiet target="_blank" href={attachment.url}>
<Link
quiet
target="_blank"
download={attachment.name}
href={attachment.url}
>
<div class="center" title={attachment.name}>
<img src={attachment.url} alt={attachment.extension} />
</div>
</Link>
{:else}
<div class="file" title={attachment.name}>
<Link quiet target="_blank" href={attachment.url}>
<Link
quiet
target="_blank"
download={attachment.name}
href={attachment.url}
>
{attachment.extension}
</Link>
</div>

View file

@ -10,6 +10,7 @@
export let noHorizPadding = false
export let quiet = false
export let emphasized = false
export let onTop = false
export let size = "M"
let thisSelected = undefined
@ -75,6 +76,7 @@
bind:this={container}
class:spectrum-Tabs--quiet={quiet}
class:noHorizPadding
class:onTop
class:spectrum-Tabs--vertical={vertical}
class:spectrum-Tabs--horizontal={!vertical}
class="spectrum-Tabs spectrum-Tabs--size{size}"
@ -122,4 +124,7 @@
.noPadding {
margin: 0;
}
.onTop {
z-index: 100;
}
</style>

View file

@ -4,6 +4,7 @@
export let tooltip = ""
export let size = "M"
export let disabled = true
let showTooltip = false
</script>
@ -19,7 +20,7 @@
on:mouseleave={() => (showTooltip = false)}
on:focus
>
<Icon name="InfoOutline" size="S" disabled={true} />
<Icon name="InfoOutline" size="S" {disabled} />
</div>
{#if showTooltip}
<div class="tooltip">
@ -47,14 +48,13 @@
display: flex;
justify-content: center;
top: 15px;
z-index: 100;
z-index: 200;
width: 160px;
}
.icon {
transform: scale(0.75);
}
.icon-small {
margin-top: -2px;
margin-bottom: -5px;
margin-bottom: -2px;
}
</style>

View file

@ -34,6 +34,7 @@ export { default as Layout } from "./Layout/Layout.svelte"
export { default as Page } from "./Layout/Page.svelte"
export { default as Link } from "./Link/Link.svelte"
export { default as Tooltip } from "./Tooltip/Tooltip.svelte"
export { default as TooltipWrapper } from "./Tooltip/TooltipWrapper.svelte"
export { default as Menu } from "./Menu/Menu.svelte"
export { default as MenuSection } from "./Menu/Section.svelte"
export { default as MenuSeparator } from "./Menu/Separator.svelte"
@ -94,7 +95,7 @@ export { default as clickOutside } from "./Actions/click_outside"
// Stores
export { notifications, createNotificationStore } from "./Stores/notifications"
export { banner } from "./Stores/banner"
export { banner, BANNER_TYPES } from "./Stores/banner"
// Helpers
export * as Helpers from "./helpers"

Some files were not shown because too many files have changed in this diff Show more