diff --git a/.all-contributorsrc b/.all-contributorsrc index 53705907c2..3a416f917e 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -162,6 +162,7 @@ "translation" ] }, + { "login": "mslourens", "name": "Maurits Lourens", "avatar_url": "https://avatars.githubusercontent.com/u/1907152?v=4", diff --git a/.github/workflows/README.md b/.github/workflows/README.md index c33665c964..f77323d85a 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -119,6 +119,8 @@ This job is responsible for deploying to our production, cloud kubernetes enviro ## Pro +| **NOTE**: When developing for both pro / budibase repositories, your branch names need to match, or else the correct pro doesn't get run within your CI job. + ### Installing Pro The pro package is always installed from source in our CI jobs. @@ -132,7 +134,7 @@ This is done to prevent pro needing to be published prior to CI runs in budiabse - backend-core lives in the monorepo, so it can't be released independently to be used in pro - therefore the only option is to pull pro from source and release it as a part of the monorepo release, as if it were a mono package -The install is performed using the same steps as local development, via the `yarn bootstrap` command, see the [Contributing Guide#Pro](../CONTRIBUTING.md#pro) +The install is performed using the same steps as local development, via the `yarn bootstrap` command, see the [Contributing Guide#Pro](../../docs/CONTRIBUTING.md#pro) The branch to install pro from can vary depending on ref of the commit that triggered the budibase CI job. This is done to enable branches which have changes in both the monorepo and the pro repo to have their CI pass successfully. diff --git a/.github/workflows/deploy-release.yml b/.github/workflows/deploy-release.yml index 0fb8a5fea0..024b97597f 100644 --- a/.github/workflows/deploy-release.yml +++ b/.github/workflows/deploy-release.yml @@ -68,6 +68,13 @@ jobs: ] env: KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}' + + - name: Re roll the services + uses: actions-hub/kubectl@master + env: + KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG }} + with: + args: rollout restart deployment proxy-service -n budibase && kubectl rollout restart deployment app-service -n budibase && kubectl rollout restart deployment worker-service -n budibase - name: Discord Webhook Action uses: tsickert/discord-webhook@v4.0.0 diff --git a/.github/workflows/release-develop.yml b/.github/workflows/release-develop.yml index 631308d945..067d0eb0dd 100644 --- a/.github/workflows/release-develop.yml +++ b/.github/workflows/release-develop.yml @@ -18,8 +18,9 @@ on: workflow_dispatch: env: - # Posthog token used by ui at build time - POSTHOG_TOKEN: phc_uDYOfnFt6wAbBAXkC6STjcrTpAFiWIhqgFcsC1UVO5F + # Posthog token used by ui at build time + # disable unless needed for testing + # POSTHOG_TOKEN: phc_uDYOfnFt6wAbBAXkC6STjcrTpAFiWIhqgFcsC1UVO5F INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }} PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} FEATURE_PREVIEW_URL: https://budirelease.live @@ -120,6 +121,13 @@ jobs: env: KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}' + - name: Re roll the services + uses: actions-hub/kubectl@master + env: + KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG }} + with: + args: rollout restart deployment proxy-service -n budibase && kubectl rollout restart deployment app-service -n budibase && kubectl rollout restart deployment worker-service -n budibase + - name: Discord Webhook Action uses: tsickert/discord-webhook@v4.0.0 with: diff --git a/.github/workflows/release-selfhost.yml b/.github/workflows/release-selfhost.yml index fc2b7b0cca..da064f3e32 100644 --- a/.github/workflows/release-selfhost.yml +++ b/.github/workflows/release-selfhost.yml @@ -3,24 +3,37 @@ name: Budibase Release Selfhost on: workflow_dispatch: +env: + BRANCH: ${{ github.event.pull_request.head.ref }} + BASE_BRANCH: ${{ github.event.pull_request.base.ref}} + jobs: release: runs-on: ubuntu-latest steps: + - name: Fail if branch is not master + if: github.ref != 'refs/heads/master' + run: | + echo "Ref is not master, you must run this job from master." + exit 1 + - uses: actions/checkout@v2 with: node-version: 14.x fetch_depth: 0 + - name: Get the latest budibase release version + id: version + run: | + release_version=$(cat lerna.json | jq -r '.version') + echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV + - name: Tag and release Docker images (Self Host) run: | docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - # Get latest release version - release_version=$(cat lerna.json | jq -r '.version') - echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV - release_tag=v$release_version + release_tag=v${{ env.RELEASE_VERSION }} # Pull apps and worker images docker pull budibase/apps:$release_tag @@ -40,13 +53,15 @@ jobs: DOCKER_USER: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }} SELFHOST_TAG: latest - - - name: Build CLI executables + + - name: Install Pro + run: yarn install:pro $BRANCH $BASE_BRANCH + + - name: Bootstrap and build (CLI) run: | - pushd packages/cli yarn + yarn bootstrap yarn build - popd - name: Build OpenAPI spec run: | @@ -93,4 +108,4 @@ jobs: with: webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }} content: "Self Host Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Self Host." - embed-title: ${{ env.RELEASE_VERSION }} \ No newline at end of file + embed-title: ${{ env.RELEASE_VERSION }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 348b600f90..961082e1ef 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -29,7 +29,7 @@ on: env: # Posthog token used by ui at build time - POSTHOG_TOKEN: phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS + POSTHOG_TOKEN: phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }} SENTRY_DSN: ${{ secrets.SENTRY_DSN }} PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }} diff --git a/README.md b/README.md index ae149f7347..1dec1737da 100644 --- a/README.md +++ b/README.md @@ -169,7 +169,7 @@ If you have a question or would like to talk with other Budibase users and join ## ❗ Code of conduct -Budibase is dedicated to providing a welcoming, diverse, and harrassment-free experience for everyone. We expect everyone in the Budibase community to abide by our [**Code of Conduct**](https://github.com/Budibase/budibase/blob/HEAD/.github/CODE_OF_CONDUCT.md). Please read it. +Budibase is dedicated to providing a welcoming, diverse, and harrassment-free experience for everyone. We expect everyone in the Budibase community to abide by our [**Code of Conduct**](https://github.com/Budibase/budibase/blob/HEAD/docs/CODE_OF_CONDUCT.md). Please read it.
diff --git a/charts/budibase/values.yaml b/charts/budibase/values.yaml index 2734202fff..404e92c70f 100644 --- a/charts/budibase/values.yaml +++ b/charts/budibase/values.yaml @@ -91,7 +91,7 @@ globals: budibaseEnv: PRODUCTION enableAnalytics: "1" sentryDSN: "" - posthogToken: "phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS" + posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU" logLevel: info selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index 531ed05749..fb0848596c 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -4,10 +4,10 @@ From opening a bug report to creating a pull request: every contribution is appr ## Table of contents -- [Quick start](#quick-start) -- [Status](#status) -- [What's included](#whats-included) -- [Bugs and feature requests](#bugs-and-feature-requests) +- [Where to start](#not-sure-where-to-start) +- [Contributor Licence Agreement](#contributor-license-agreement-cla) +- [Glossary of Terms](#glossary-of-terms) +- [Contributing to Budibase](#contributing-to-budibase) ## Not Sure Where to Start? @@ -32,6 +32,9 @@ All contributors must sign an [Individual Contributor License Agreement](https:/ If contributing on behalf of your company, your company must sign a [Corporate Contributor License Agreement](https://github.com/budibase/budibase/blob/next/.github/cla/corporate-cla.md). If so, please contact us via community@budibase.com. +If for any reason, your first contribution is in a PR created by other contributor, please just add a comment to the PR +with the following text to agree our CLA: "I have read the CLA Document and I hereby sign the CLA". + ## Glossary of Terms To understand the budibase API, it can be helpful to understand the top level entities that make up Budibase. @@ -162,7 +165,10 @@ When you are running locally, budibase stores data on disk using docker volumes. ### Development Modes -A combination of environment variables controls the mode budibase runs in. +A combination of environment variables controls the mode budibase runs in. + +| **NOTE**: You need to clean your browser cookies when you change between different modes. + Yarn commands can be used to mimic the different modes as described in the sections below: #### Self Hosted @@ -189,7 +195,7 @@ To enable this mode, use: yarn mode:account ``` ### CI - An overview of the CI pipelines can be found [here](./workflows/README.md) + An overview of the CI pipelines can be found [here](../.github/workflows/README.md) ### Pro diff --git a/docs/DEV-SETUP-MACOSX.md b/docs/DEV-SETUP-MACOSX.md index 5606fd0d10..c5990e58da 100644 --- a/docs/DEV-SETUP-MACOSX.md +++ b/docs/DEV-SETUP-MACOSX.md @@ -4,6 +4,11 @@ Install instructions [here](https://brew.sh/) +| **NOTE**: If you are working on a M1 Apple Silicon which is running Z shell, you could need to add +`eval $(/opt/homebrew/bin/brew shellenv)` line to your `.zshrc`. This will make your zsh to find the apps you install +through brew. + + ### Install Node Budibase requires a recent version of node (14+): @@ -51,4 +56,7 @@ So this command will actually run the application in dev mode. It creates .env f The dev version will be available on port 10000 i.e. -http://127.0.0.1:10000/builder/admin \ No newline at end of file +http://127.0.0.1:10000/builder/admin + +| **NOTE**: If you are working on a M1 Apple Silicon, you will need to uncomment `# platform: linux/amd64` line in +[hosting/docker-compose-dev.yaml](../hosting/docker-compose.dev.yaml) \ No newline at end of file diff --git a/hosting/docker-compose.yaml b/hosting/docker-compose.yaml index f669f9261d..7d3e6960dc 100644 --- a/hosting/docker-compose.yaml +++ b/hosting/docker-compose.yaml @@ -76,6 +76,8 @@ services: - "${MAIN_PORT}:10000" container_name: bbproxy image: budibase/proxy + environment: + - PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10 depends_on: - minio-service - worker-service diff --git a/hosting/nginx.prod.conf.hbs b/hosting/nginx.prod.conf.hbs index ac35a2020d..4213626309 100644 --- a/hosting/nginx.prod.conf.hbs +++ b/hosting/nginx.prod.conf.hbs @@ -9,7 +9,11 @@ events { } http { + # rate limiting + limit_req_status 429; limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=20r/s; + limit_req_zone $binary_remote_addr zone=webhooks:10m rate=${PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND}r/s; + include /etc/nginx/mime.types; default_type application/octet-stream; proxy_set_header Host $host; @@ -126,6 +130,25 @@ http { proxy_pass http://$apps:4002; } + location /api/webhooks/ { + # calls to webhooks are rate limited + limit_req zone=webhooks nodelay; + + # Rest of configuration copied from /api/ location above + # 120s timeout on API requests + proxy_read_timeout 120s; + proxy_connect_timeout 120s; + proxy_send_timeout 120s; + + proxy_http_version 1.1; + proxy_set_header Connection $connection_upgrade; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + + proxy_pass http://$apps:4002; + } + location /db/ { proxy_pass http://$couchdb:5984; rewrite ^/db/(.*)$ /$1 break; diff --git a/hosting/proxy/Dockerfile b/hosting/proxy/Dockerfile index a2b17d3333..d9b33e3e9a 100644 --- a/hosting/proxy/Dockerfile +++ b/hosting/proxy/Dockerfile @@ -1,3 +1,13 @@ FROM nginx:latest -COPY .generated-nginx.prod.conf /etc/nginx/nginx.conf -COPY error.html /usr/share/nginx/html/error.html \ No newline at end of file + +# nginx.conf +# use the default nginx behaviour for *.template files which are processed with envsubst +# override the output dir to output directly to /etc/nginx instead of /etc/nginx/conf.d +ENV NGINX_ENVSUBST_OUTPUT_DIR=/etc/nginx +COPY .generated-nginx.prod.conf /etc/nginx/templates/nginx.conf.template + +# Error handling +COPY error.html /usr/share/nginx/html/error.html + +# Default environment +ENV PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10 \ No newline at end of file diff --git a/hosting/scripts/build-target-paths.sh b/hosting/scripts/build-target-paths.sh index 4c165d12e7..ee314c1ce4 100644 --- a/hosting/scripts/build-target-paths.sh +++ b/hosting/scripts/build-target-paths.sh @@ -3,15 +3,18 @@ echo ${TARGETBUILD} > /buildtarget.txt if [[ "${TARGETBUILD}" = "aas" ]]; then # Azure AppService uses /home for persisent data & SSH on port 2222 - mkdir -p /home/{search,minio,couch} - mkdir -p /home/couch/{dbs,views} - chown -R couchdb:couchdb /home/couch/ + DATA_DIR=/home + mkdir -p $DATA_DIR/{search,minio,couchdb} + mkdir -p $DATA_DIR/couchdb/{dbs,views} + chown -R couchdb:couchdb $DATA_DIR/couchdb/ apt update apt-get install -y openssh-server - sed -i 's#dir=/opt/couchdb/data/search#dir=/home/search#' /opt/clouseau/clouseau.ini - sed -i 's#/minio/minio server /minio &#/minio/minio server /home/minio &#' /runner.sh - sed -i 's#database_dir = ./data#database_dir = /home/couch/dbs#' /opt/couchdb/etc/default.ini - sed -i 's#view_index_dir = ./data#view_index_dir = /home/couch/views#' /opt/couchdb/etc/default.ini sed -i "s/#Port 22/Port 2222/" /etc/ssh/sshd_config /etc/init.d/ssh restart -fi + sed -i "s#DATA_DIR#/home#g" /opt/clouseau/clouseau.ini + sed -i "s#DATA_DIR#/home#g" /opt/couchdb/etc/local.ini +else + sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini + sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini + +fi \ No newline at end of file diff --git a/hosting/single/Dockerfile b/hosting/single/Dockerfile index b5bf17adde..476a6e5e94 100644 --- a/hosting/single/Dockerfile +++ b/hosting/single/Dockerfile @@ -20,10 +20,10 @@ RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh FROM couchdb:3.2.1 # TARGETARCH can be amd64 or arm e.g. docker build --build-arg TARGETARCH=amd64 -ARG TARGETARCH amd64 +ARG TARGETARCH=amd64 #TARGETBUILD can be set to single (for single docker image) or aas (for azure app service) # e.g. docker build --build-arg TARGETBUILD=aas .... -ARG TARGETBUILD single +ARG TARGETBUILD=single ENV TARGETBUILD $TARGETBUILD COPY --from=build /app /app @@ -35,9 +35,10 @@ ENV \ BUDIBASE_ENVIRONMENT=PRODUCTION \ CLUSTER_PORT=80 \ # CUSTOM_DOMAIN=budi001.custom.com \ + DATA_DIR=/data \ DEPLOYMENT_ENVIRONMENT=docker \ MINIO_URL=http://localhost:9000 \ - POSTHOG_TOKEN=phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS \ + POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU \ REDIS_URL=localhost:6379 \ SELF_HOSTED=1 \ TARGETBUILD=$TARGETBUILD \ @@ -114,6 +115,7 @@ RUN chmod +x ./healthcheck.sh ADD hosting/scripts/build-target-paths.sh . RUN chmod +x ./build-target-paths.sh +# Script below sets the path for storing data based on $DATA_DIR # For Azure App Service install SSH & point data locations to /home RUN /build-target-paths.sh diff --git a/hosting/single/clouseau/clouseau.ini b/hosting/single/clouseau/clouseau.ini index 78e43744e5..578a5acafa 100644 --- a/hosting/single/clouseau/clouseau.ini +++ b/hosting/single/clouseau/clouseau.ini @@ -7,7 +7,7 @@ name=clouseau@127.0.0.1 cookie=monster ; the path where you would like to store the search index files -dir=/data/search +dir=DATA_DIR/search ; the number of search indexes that can be open simultaneously max_indexes_open=500 diff --git a/hosting/single/couch/local.ini b/hosting/single/couch/local.ini index 72872a60e1..35f0383dfc 100644 --- a/hosting/single/couch/local.ini +++ b/hosting/single/couch/local.ini @@ -1,5 +1,5 @@ ; CouchDB Configuration Settings [couchdb] -database_dir = /data/couch/dbs -view_index_dir = /data/couch/views +database_dir = DATA_DIR/couchdb/dbs +view_index_dir = DATA_DIR/couchdb/views diff --git a/hosting/single/healthcheck.sh b/hosting/single/healthcheck.sh index b92cd153a3..592b3e94fa 100644 --- a/hosting/single/healthcheck.sh +++ b/hosting/single/healthcheck.sh @@ -3,6 +3,11 @@ healthy=true if [ -f "/data/.env" ]; then export $(cat /data/.env | xargs) +elif [ -f "/home/.env" ]; then + export $(cat /home/.env | xargs) +else + echo "No .env file found" + healthy=false fi if [[ $(curl -Lfk -s -w "%{http_code}\n" http://localhost/ -o /dev/null) -ne 200 ]]; then diff --git a/hosting/single/runner.sh b/hosting/single/runner.sh index 9abb2fd093..09387343ba 100644 --- a/hosting/single/runner.sh +++ b/hosting/single/runner.sh @@ -1,7 +1,16 @@ #!/bin/bash -declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD") -if [ -f "/data/.env" ]; then - export $(cat /data/.env | xargs) +declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "DATA_DIR" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD") + +# Azure App Service customisations +if [[ "${TARGETBUILD}" = "aas" ]]; then + DATA_DIR=/home + /etc/init.d/ssh start +else + DATA_DIR=${DATA_DIR:-/data} +fi + +if [ -f "${DATA_DIR}/.env" ]; then + export $(cat ${DATA_DIR}/.env | xargs) fi # first randomise any unset environment variables for ENV_VAR in "${ENV_VARS[@]}" @@ -14,21 +23,26 @@ done if [[ -z "${COUCH_DB_URL}" ]]; then export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984 fi -if [ ! -f "/data/.env" ]; then - touch /data/.env +if [ ! -f "${DATA_DIR}/.env" ]; then + touch ${DATA_DIR}/.env for ENV_VAR in "${ENV_VARS[@]}" do temp=$(eval "echo \$$ENV_VAR") - echo "$ENV_VAR=$temp" >> /data/.env + echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env done + echo "COUCH_DB_URL=${COUCH_DB_URL}" >> ${DATA_DIR}/.env fi +export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984 + # make these directories in runner, incase of mount -mkdir -p /data/couch/{dbs,views} /home/couch/{dbs,views} -chown -R couchdb:couchdb /data/couch /home/couch +mkdir -p ${DATA_DIR}/couchdb/{dbs,views} +mkdir -p ${DATA_DIR}/minio +mkdir -p ${DATA_DIR}/search +chown -R couchdb:couchdb ${DATA_DIR}/couchdb redis-server --requirepass $REDIS_PASSWORD & /opt/clouseau/bin/clouseau & -/minio/minio server /data/minio & +/minio/minio server ${DATA_DIR}/minio & /docker-entrypoint.sh /opt/couchdb/bin/couchdb & /etc/init.d/nginx restart if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then diff --git a/i18n/README.es.md b/i18n/README.es.md index 7245dc8656..21eb8caef7 100644 --- a/i18n/README.es.md +++ b/i18n/README.es.md @@ -8,10 +8,11 @@

- Construye herramientas empresariales personalizadas en cuestión de minutos y en su propia infraestructura. + Construye herramientas empresariales personalizadas en cuestión de minutos y en tu propia infraestructura.

- Budibase es una plataforma de código bajo de código abierto, que ayuda a desarrolladores y profesionales de TI a crear, automatizar y enviar aplicaciones empresariales personalizadas en cuestión de minutos y en su propia infraestructura + Budibase es una plataforma low code de código abierto, que ayuda a desarrolladores y profesionales de TI a crear y +automatizar aplicaciones personalizadas en cuestión de minutos

@@ -20,7 +21,7 @@

- + Budibase design ui

@@ -30,9 +31,6 @@ GitHub release (latest by date) - - Discord - Follow @budibase @@ -43,130 +41,213 @@

- Sign-up + Comenzar con Budibase en la nube · - Docs + Comenzar con Docker, K8s, DO · - Feature request + Documentaciones · - Report a bug + Pedir una funcionalidad · - Support: Discussions - & - Discord + Reportar un error + · + Support: Comunidad

+

+## ✨ Caracteristicas -## ✨ Features -When other platforms chose the closed source route, we decided to go open source. When other platforms chose cloud builders, we decided a local builder offered the better developer experience. We like to do things differently at Budibase. +### Construir aplicaciones reales +Con Budibase podras construir aplicaciones de pagina unica de gran rendimiento. Ademas, puedes hacerlas con un diseño +adaptativo para darles a tus usuarios una gran experiencia. +

-- **Build and ship real software.** Unlike other platforms, with Budibase you build and ship single page applications. Budibase applications have performance baked in and can be designed responsively, providing your users with a great experience. +### Codigo abierto y ampliable +Budibase es de codigo abierto con licencia GPL v3. Puedes ampliarlo o modificarlo para adaptarlo a tus necesidades y preferencias. -- **Open source and extensable.** Budibase is open-source. The builder is licensed AGPL v3, the server is GPL v3, and the client is MPL. This should fill you with confidence that Budibase will always be around. You can also code against Budibase or fork it and make changes as you please, providing a developer-friendly experience. +De esta manera proveemos una buena experiencia para el desarrollador asi como establecemos la confianza de que Budibase siempre estara funcional. +

-- **Load data or start from scratch.** Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, mySQL, Airtable, Google Sheets, S3, DyanmoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no data sources. [Request new data sources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas). +### Cargar informacion o empezar desde cero +Budibase permite importar datos desde multiples fuentes, entre las que estan incluidas: MondoDB, CouchDB, PostgreSQL, MySQL, +Airtable, S3, DynamoDB o API REST. -- **Design and build apps with powerful pre-made components.** Budibase comes out of the box with beautifully designed, powerful components which you can use like building blocks to build your UI. We also expose a lot of your favourite CSS styling options so you can go that extra creative mile. [Request new components](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas). - -- **Automate processes, integrate with other tools, and connect to webhooks.** Save time by automating manual processes and workflows. From connecting to webhooks, to automating emails, simply tell Budibase what to do and let it work for you. You can easily [create new automations for Budibase here](https://github.com/Budibase/automations) or [request new integrations here](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas). - -- **Cloud hosting and self-hosting.** Users can self-host (see below), or host their apps with Budibase. Currently, our cloud hosting offering is limited to the free tier but we aim to change this in the future. For heavy usage, we advise users to self-host. +O si lo prefieres, con Budibase puedes empezar desde cero y construir tus propias aplicaciones +sin necesidad de herramientas externas. +[Sugerir fuente de datos](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).

- Budibase design ui + Budibase data

+

+### Diseña y construye aplicaciones con componentes profesionales prediseñados -## ⌛ Status -- [x] Alpha: We are demoing Budibase to users and receiving feedback -- [x] Private Beta: We are testing Budibase with a closed set of customers -- [x] Public Beta: Anyone can [sign-up and use Budibase](https://portal.budi.live/signup). -- [ ] Official Launch +Budibase incorpora componentes profesionales prediseñados que podras usar de manera facil e intuitiva +como bloques de construccion para la interfaz de tu aplicacion. -Watch "releases" of this repo to get notified of major updates, and give the star button a click whilst you're there. +Tambien mostramos gran parte del CSS para que puedas adaptar los componentes a tus diseños. +[Sugerir componente](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).

- + Budibase design

+

-### Stargazers over time +### Procesos automatizados, integra tu aplicacion con otras herramientas y conectala a eventos webhook + +Ahorra tiempo automatizando flujos de trabajo y procesos manuales. Podras desde conectar eventos webhook hasta automatizar emails, +simplemente dile a Budibase que hacer y deja que el haga el trabajo por ti. +[Crear nuevos procesos automatizados](https://github.com/Budibase/automations) o [Sugerir proceso automatizado](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas). + +

+ Budibase automations +

+

+ +### Tus herramientas favoritas + +Budibase integra un gran numero de herramientas que te permitiran construir tus aplicaciones ajustandose a tus preferencias. + +

+ Budibase integrations +

+

+ +### Un paraiso para administradores + +Puedes albergar Budibase en tu propia infraestructura y gestionar globalmente usuarios, incorporaciones, SMTP, aplicaciones, +grupos, diseños de temas, etc. + +Tambien puedes gestionar los usuarios y grupos, o delegar en personas asignadas para ello, desde nuestra aplicacion sin +mucho esfuerzo. + +Budibase is made to scale. With Budibase, you can self-host on your own infrastructure and globally manage users, onboarding, SMTP, apps, groups, theming and more. You can also provide users/groups with an app portal and disseminate user-management to the group manager. + +- Video Promocional: https://youtu.be/xoljVpty_Kw + +
+ +--- + +
+ + +## Budibase API Publica + +Como todo lo que construimos en Budibase, nuestra nueva API publica es facil de usar, flexible e introduce nueva ampliacion +del sistema. Budibase API ofrece: +- Uso de Budibase como backend +- Interoperabilidad + +#### Documentacion + +Puedes aprender mas acerca de Budibase API en los siguientes documentos: +- [Documentacion general](https://docs.budibase.com/docs/public-api) : Como optener tu clave para la API, usar Insomnia y Postman +- [API Interactiva](https://docs.budibase.com/reference/post_applications) : Aprende como trabajar con la API + +#### Guias + +- [Construye una aplicacion con Budibase y Next.js](https://budibase.com/blog/building-a-crud-app-with-budibase-and-next.js/) + +

+ Budibase data +

+

+ +


+ +## 🏁 Comenzar con Budibase + +Puedes alojar Budibase en tu propia infraestructura con Docker, Kubernetes o Digital Ocean; o usa Budibase en la nube si +quieres empezar a crear tus aplicaciones rapidamente y sin ningun tipo de preocupacion. + +### [Comenzar con Budibase self-hosting](https://docs.budibase.com/docs/hosting-methods) + +- [Docker - single ARM compatible image](https://docs.budibase.com/docs/docker) +- [Docker Compose](https://docs.budibase.com/docs/docker-compose) +- [Kubernetes](https://docs.budibase.com/docs/kubernetes-k8s) +- [Digital Ocean](https://docs.budibase.com/docs/digitalocean) +- [Portainer](https://docs.budibase.com/docs/portainer) + + +### [Comenzar con Budibase en la nube](https://budibase.com) + +

+ +## 🎓 Aprende a usar Budibase + +Aqui tienes la [documentacion de Budibase](https://docs.budibase.com/docs). +
+ + +

+ +## 💬 Comunidad + +Te invitamos a que te unas a nuestra comunidad de Budibase, alli podras hacer las preguntas que quieras, ayudar a otras +personas o tener una charla entretenida con otros usuarios de Budibase. +[Acceder a la comunidad de Budibase](https://github.com/Budibase/budibase/discussions) +


+ + +## ❗ Codigo de conducta + +Budibase presta especial atencion en acoger a personas de toda diversidad y ofrecer un entorno de respeto mutuo. Asi mismo +esperamos lo mismo de nuestra comunidad, por favor lee el +[**Codigo de conducta**](https://github.com/Budibase/budibase/blob/HEAD/.github/CODE_OF_CONDUCT.md). +
+ +

+ + +## 🙌 Contribuir en Budibase + +Desde comunicar un bug a solventar un error en el codigo, toda contribucion es apreciada y bienvenida. Si estas planeando +implementar una nueva funcionalidad o un realizar un cambio en la API, por favor crea un [nuevo mensaje aqui](https://github.com/Budibase/budibase/issues), +de esta manera nos encargaremos que tu trabajo no sea en vano. + +Aqui tienes instrucciones de como configurar tu entorno Budibase para [Debian](https://github.com/Budibase/budibase/tree/HEAD/docs/DEV-SETUP-DEBIAN.md) +y [MacOSX](https://github.com/Budibase/budibase/tree/HEAD/docs/DEV-SETUP-MACOSX.md) + +### No estas seguro por donde empezar? +Un buen lugar para empezar a contribuir con nosotros es [aqui](https://github.com/Budibase/budibase/projects/22). + +### Organizacion del repositorio + +Budibase es un repositorio unico gestionado por Lerna. Lerna construye y publica los paquetes de Budibase sincronizandolos +cada ves que se realiza un cambio. A rasgos generales, estos son los paquetes que conforman Budibase: + +- [packages/builder](https://github.com/Budibase/budibase/tree/HEAD/packages/builder) - contiene el codigo del builder de la parte cliente, esta es una aplicacion svelte. + +- [packages/client](https://github.com/Budibase/budibase/tree/HEAD/packages/client) - Este modulo se ejecuta en el browser y es el responsable de leer definiciones JSON y crear aplicaciones web en el momento. + +- [packages/server](https://github.com/Budibase/budibase/tree/HEAD/packages/server) - La parte servidor de Budibase. Esta aplicacion Koa es responsable de suministrar lo necesario al builder para asi generar las aplicaciones Budibase. Tambien provee una API para interaccionar con la base de datos y el almacenamiento de ficheros. + +Para mas informacion, por favor lee el siguiente documento [CONTRIBUTING.md](https://github.com/Budibase/budibase/blob/HEAD/docs/CONTRIBUTING.md) + +

+ + +## 📝 Licencia + +Budibase es open-source, licenciado como [GPL v3](https://www.gnu.org/licenses/gpl-3.0.en.html). El cliente y las librerias +de componentes estan licenciadas como [MPL](https://directory.fsf.org/wiki/License:MPL-2.0) - de esta manera, puedes licenciar +como tu quieras las aplicaciones que construyas. + +

+ +## ⭐ Historia de nuestros Stargazers [![Stargazers over time](https://starchart.cc/Budibase/budibase.svg)](https://starchart.cc/Budibase/budibase) -If you are having issues between updates of the builder, please use the guide [here](https://github.com/Budibase/budibase/blob/HEAD/.github/CONTRIBUTING.md#troubleshooting) to clear down your environment. +Si estas teniendo problemas con el builder despues de actualizar, por favor [lee esta guia](https://github.com/Budibase/budibase/blob/HEAD/docs/CONTRIBUTING.md#troubleshooting) to clear down your environment. +

-## 🏁 Getting Started with Budibase +## Contribuidores ✨ -The Budibase builder runs in Electron, on Mac, PC and Linux. Follow the steps below to get started: -- [ ] [Sign-up to Budibase](https://portal.budi.live/signup) -- [ ] Create a username and password -- [ ] Copy your API key -- [ ] Download Budibase -- [ ] Open Budibase and enter your API key - -[Here is a guided tutorial](https://docs.budibase.com/tutorial/tutorial-signing-up) if you need extra help. - - -## 🤖 Self-hosting - -Budibase wants to make sure anyone can use the tools we develop and we know a lot of people need to be able to host the apps they make on their own systems - that is why we've decided to try and make self hosting as easy as possible! - -Currently, you can host your apps using Docker or Digital Ocean. The documentation for self-hosting can be found [here](https://docs.budibase.com/docs/hosting-methods). - -[![Deploy to DO](https://www.deploytodo.com/do-btn-blue.svg)](https://cloud.digitalocean.com/droplets/new?onboarding_origin=marketplace&i=09038e&fleetUuid=bb04f9c8-1de8-4687-b2ae-1d5177a0535b&appId=77729671&type=applications&size=s-4vcpu-8gb®ion=nyc1&refcode=0caaa6085a82&image=budibase-20-04) - - -## 🎓 Learning Budibase - -The Budibase [documentation lives here](https://docs.budibase.com). - -You can also follow a quick tutorial on [how to build a CRM with Budibase](https://docs.budibase.com/tutorial/tutorial-introduction) - - -## Roadmap - -Checkout our [Public Roadmap](https://github.com/Budibase/budibase/projects/10). If you would like to discuss some of the items on the roadmap, please feel to reach out on [Discord](https://discord.gg/rCYayfe), or via [Github discussions](https://github.com/Budibase/budibase/discussions) - - -## ❗ Code of Conduct - -Budibase is dedicated to providing a welcoming, diverse, and harrassment-free experience for everyone. We expect everyone in the Budibase community to abide by our [**Code of Conduct**](https://github.com/Budibase/budibase/blob/HEAD/.github/CODE_OF_CONDUCT.md). Please read it. - -## 🙌 Contributing to Budibase - -From opening a bug report to creating a pull request: every contribution is appreciated and welcomed. If you're planning to implement a new feature or change the API please create an issue first. This way we can ensure your work is not in vain. - -### Not Sure Where to Start? -A good place to start contributing, is the [First time issues project](https://github.com/Budibase/budibase/projects/22). - -### How the repository is organized -Budibase is a monorepo managed by lerna. Lerna manages the building and publishing of the budibase packages. At a high level, here are the packages that make up Budibase. - -- [packages/builder](https://github.com/Budibase/budibase/tree/HEAD/packages/builder) - contains code for the budibase builder client side svelte application. - -- [packages/client](https://github.com/Budibase/budibase/tree/HEAD/packages/client) - A module that runs in the browser responsible for reading JSON definition and creating living, breathing web apps from it. - -- [packages/server](https://github.com/Budibase/budibase/tree/HEAD/packages/server) - The budibase server. This Koa app is responsible for serving the JS for the builder and budibase apps, as well as providing the API for interaction with the database and file system. - -For more information, see [CONTRIBUTING.md](https://github.com/Budibase/budibase/blob/HEAD/.github/CONTRIBUTING.md) - -## 📝 License - -Budibase is open-source. The builder is licensed [AGPL v3](https://www.gnu.org/licenses/agpl-3.0.en.html), the server is licensed [GPL v3](https://www.gnu.org/licenses/gpl-3.0.en.html), and the client is licensed [MPL](https://directory.fsf.org/wiki/License:MPL-2.0). - -## 💬 Get in touch - -If you have a question or would like to talk with other Budibase users, please hop over to [Github discussions](https://github.com/Budibase/budibase/discussions) or join our Discord server: - -[Discord chatroom](https://discord.gg/rCYayfe) - -![Discord Shield](https://discordapp.com/api/guilds/733030666647765003/widget.png?style=shield) - - -## Contributors ✨ - -Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)): +Queremos prestar un especial agradecimiento a nuestra maravillosa gente ([emoji key](https://allcontributors.org/docs/en/emoji-key)): @@ -179,14 +260,18 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
Michael Shanks

📖 💻 ⚠️
Kevin Åberg Kultalahti

📖 💻 ⚠️
Joe

📖 💻 🖋 🎨 -
Conor_Mack

💻 ⚠️ +
Rory Powell

💻 📖 ⚠️ +
Peter Clement

💻 📖 ⚠️ +
Conor_Mack

💻 ⚠️
pngwn

💻 ⚠️
HugoLd

💻
victoriasloan

💻
yashank09

💻
SOVLOOKUP

💻 +
seoulaja

🌍 +
Maurits Lourens

⚠️ 💻 @@ -195,4 +280,5 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d -This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome! +Este proyecto sigue las especificaciones de [all-contributors](https://github.com/all-contributors/all-contributors). +Todo tipo de contribuciones son agradecidas! diff --git a/lerna.json b/lerna.json index e30ac928ad..9fedf86f90 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "1.1.33-alpha.1", + "version": "1.2.41-alpha.0", "npmClient": "yarn", "packages": [ "packages/*" diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index f9b8cdcd0b..27246f1828 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/backend-core", - "version": "1.1.33-alpha.1", + "version": "1.2.41-alpha.0", "description": "Budibase backend core libraries used in server and worker", "main": "dist/src/index.js", "types": "dist/src/index.d.ts", @@ -20,13 +20,14 @@ "test:watch": "jest --watchAll" }, "dependencies": { - "@budibase/types": "1.1.33-alpha.1", + "@budibase/types": "1.2.41-alpha.0", "@techpass/passport-openidconnect": "0.3.2", "aws-sdk": "2.1030.0", "bcrypt": "5.0.1", "dotenv": "16.0.1", "emitter-listener": "1.1.2", "ioredis": "4.28.0", + "joi": "17.6.0", "jsonwebtoken": "8.5.1", "koa-passport": "4.1.4", "lodash": "4.17.21", diff --git a/packages/backend-core/src/auth.js b/packages/backend-core/src/auth.js index 9ae29a3cbd..d39b8426fb 100644 --- a/packages/backend-core/src/auth.js +++ b/packages/backend-core/src/auth.js @@ -19,6 +19,8 @@ const { csrf, internalApi, adminOnly, + builderOnly, + builderOrAdmin, joiValidator, } = require("./middleware") @@ -176,5 +178,7 @@ module.exports = { updateUserOAuth, ssoCallbackUrl, adminOnly, + builderOnly, + builderOrAdmin, joiValidator, } diff --git a/packages/backend-core/src/cache/appMetadata.js b/packages/backend-core/src/cache/appMetadata.js index b0d9481cbd..a7ff0d2fc1 100644 --- a/packages/backend-core/src/cache/appMetadata.js +++ b/packages/backend-core/src/cache/appMetadata.js @@ -1,6 +1,6 @@ const redis = require("../redis/init") const { doWithDB } = require("../db") -const { DocumentTypes } = require("../db/constants") +const { DocumentType } = require("../db/constants") const AppState = { INVALID: "invalid", @@ -14,7 +14,7 @@ const populateFromDB = async appId => { return doWithDB( appId, db => { - return db.get(DocumentTypes.APP_METADATA) + return db.get(DocumentType.APP_METADATA) }, { skip_setup: true } ) diff --git a/packages/backend-core/src/cache/generic.js b/packages/backend-core/src/cache/generic.js index e2f3915339..26ef0c6bb0 100644 --- a/packages/backend-core/src/cache/generic.js +++ b/packages/backend-core/src/cache/generic.js @@ -9,6 +9,7 @@ exports.CacheKeys = { UNIQUE_TENANT_ID: "uniqueTenantId", EVENTS: "events", BACKFILL_METADATA: "backfillMetadata", + EVENTS_RATE_LIMIT: "eventsRateLimit", } exports.TTL = { diff --git a/packages/backend-core/src/context/constants.ts b/packages/backend-core/src/context/constants.ts index ef8dcd7821..937ad8f248 100644 --- a/packages/backend-core/src/context/constants.ts +++ b/packages/backend-core/src/context/constants.ts @@ -1,4 +1,4 @@ -export enum ContextKeys { +export enum ContextKey { TENANT_ID = "tenantId", GLOBAL_DB = "globalDb", APP_ID = "appId", diff --git a/packages/backend-core/src/context/index.ts b/packages/backend-core/src/context/index.ts index 1e430f01de..78ce764d55 100644 --- a/packages/backend-core/src/context/index.ts +++ b/packages/backend-core/src/context/index.ts @@ -1,11 +1,11 @@ import env from "../environment" -import { SEPARATOR, DocumentTypes } from "../db/constants" +import { SEPARATOR, DocumentType } from "../db/constants" import cls from "./FunctionContext" import { dangerousGetDB, closeDB } from "../db" import { baseGlobalDBName } from "../tenancy/utils" import { IdentityContext } from "@budibase/types" import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants" -import { ContextKeys } from "./constants" +import { ContextKey } from "./constants" import { updateUsing, closeWithUsing, @@ -33,8 +33,8 @@ export const closeTenancy = async () => { } await closeDB(db) // clear from context now that database is closed/task is finished - cls.setOnContext(ContextKeys.TENANT_ID, null) - cls.setOnContext(ContextKeys.GLOBAL_DB, null) + cls.setOnContext(ContextKey.TENANT_ID, null) + cls.setOnContext(ContextKey.GLOBAL_DB, null) } // export const isDefaultTenant = () => { @@ -54,7 +54,7 @@ export const getTenantIDFromAppID = (appId: string) => { return null } const split = appId.split(SEPARATOR) - const hasDev = split[1] === DocumentTypes.DEV + const hasDev = split[1] === DocumentType.DEV if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) { return null } @@ -83,14 +83,14 @@ export const doInTenant = (tenantId: string | null, task: any) => { // invoke the task return await task() } finally { - await closeWithUsing(ContextKeys.TENANCY_IN_USE, () => { + await closeWithUsing(ContextKey.TENANCY_IN_USE, () => { return closeTenancy() }) } } - const existing = cls.getFromContext(ContextKeys.TENANT_ID) === tenantId - return updateUsing(ContextKeys.TENANCY_IN_USE, existing, internal) + const existing = cls.getFromContext(ContextKey.TENANT_ID) === tenantId + return updateUsing(ContextKey.TENANCY_IN_USE, existing, internal) } export const doInAppContext = (appId: string, task: any) => { @@ -108,7 +108,7 @@ export const doInAppContext = (appId: string, task: any) => { setAppTenantId(appId) } // set the app ID - cls.setOnContext(ContextKeys.APP_ID, appId) + cls.setOnContext(ContextKey.APP_ID, appId) // preserve the identity if (identity) { @@ -118,14 +118,14 @@ export const doInAppContext = (appId: string, task: any) => { // invoke the task return await task() } finally { - await closeWithUsing(ContextKeys.APP_IN_USE, async () => { + await closeWithUsing(ContextKey.APP_IN_USE, async () => { await closeAppDBs() await closeTenancy() }) } } - const existing = cls.getFromContext(ContextKeys.APP_ID) === appId - return updateUsing(ContextKeys.APP_IN_USE, existing, internal) + const existing = cls.getFromContext(ContextKey.APP_ID) === appId + return updateUsing(ContextKey.APP_IN_USE, existing, internal) } export const doInIdentityContext = (identity: IdentityContext, task: any) => { @@ -135,7 +135,7 @@ export const doInIdentityContext = (identity: IdentityContext, task: any) => { async function internal(opts = { existing: false }) { if (!opts.existing) { - cls.setOnContext(ContextKeys.IDENTITY, identity) + cls.setOnContext(ContextKey.IDENTITY, identity) // set the tenant so that doInTenant will preserve identity if (identity.tenantId) { updateTenantId(identity.tenantId) @@ -146,27 +146,27 @@ export const doInIdentityContext = (identity: IdentityContext, task: any) => { // invoke the task return await task() } finally { - await closeWithUsing(ContextKeys.IDENTITY_IN_USE, async () => { + await closeWithUsing(ContextKey.IDENTITY_IN_USE, async () => { setIdentity(null) await closeTenancy() }) } } - const existing = cls.getFromContext(ContextKeys.IDENTITY) - return updateUsing(ContextKeys.IDENTITY_IN_USE, existing, internal) + const existing = cls.getFromContext(ContextKey.IDENTITY) + return updateUsing(ContextKey.IDENTITY_IN_USE, existing, internal) } export const getIdentity = (): IdentityContext | undefined => { try { - return cls.getFromContext(ContextKeys.IDENTITY) + return cls.getFromContext(ContextKey.IDENTITY) } catch (e) { // do nothing - identity is not in context } } export const updateTenantId = (tenantId: string | null) => { - cls.setOnContext(ContextKeys.TENANT_ID, tenantId) + cls.setOnContext(ContextKey.TENANT_ID, tenantId) if (env.USE_COUCH) { setGlobalDB(tenantId) } @@ -176,7 +176,7 @@ export const updateAppId = async (appId: string) => { try { // have to close first, before removing the databases from context await closeAppDBs() - cls.setOnContext(ContextKeys.APP_ID, appId) + cls.setOnContext(ContextKey.APP_ID, appId) } catch (err) { if (env.isTest()) { TEST_APP_ID = appId @@ -189,12 +189,12 @@ export const updateAppId = async (appId: string) => { export const setGlobalDB = (tenantId: string | null) => { const dbName = baseGlobalDBName(tenantId) const db = dangerousGetDB(dbName) - cls.setOnContext(ContextKeys.GLOBAL_DB, db) + cls.setOnContext(ContextKey.GLOBAL_DB, db) return db } export const getGlobalDB = () => { - const db = cls.getFromContext(ContextKeys.GLOBAL_DB) + const db = cls.getFromContext(ContextKey.GLOBAL_DB) if (!db) { throw new Error("Global DB not found") } @@ -202,7 +202,7 @@ export const getGlobalDB = () => { } export const isTenantIdSet = () => { - const tenantId = cls.getFromContext(ContextKeys.TENANT_ID) + const tenantId = cls.getFromContext(ContextKey.TENANT_ID) return !!tenantId } @@ -210,7 +210,7 @@ export const getTenantId = () => { if (!isMultiTenant()) { return DEFAULT_TENANT_ID } - const tenantId = cls.getFromContext(ContextKeys.TENANT_ID) + const tenantId = cls.getFromContext(ContextKey.TENANT_ID) if (!tenantId) { throw new Error("Tenant id not found") } @@ -218,7 +218,7 @@ export const getTenantId = () => { } export const getAppId = () => { - const foundId = cls.getFromContext(ContextKeys.APP_ID) + const foundId = cls.getFromContext(ContextKey.APP_ID) if (!foundId && env.isTest() && TEST_APP_ID) { return TEST_APP_ID } else { @@ -231,7 +231,7 @@ export const getAppId = () => { * contained, dev or prod. */ export const getAppDB = (opts?: any) => { - return getContextDB(ContextKeys.CURRENT_DB, opts) + return getContextDB(ContextKey.CURRENT_DB, opts) } /** @@ -239,7 +239,7 @@ export const getAppDB = (opts?: any) => { * contained a development app ID, this will open the prod one. */ export const getProdAppDB = (opts?: any) => { - return getContextDB(ContextKeys.PROD_DB, opts) + return getContextDB(ContextKey.PROD_DB, opts) } /** @@ -247,5 +247,5 @@ export const getProdAppDB = (opts?: any) => { * contained a prod app ID, this will open the dev one. */ export const getDevAppDB = (opts?: any) => { - return getContextDB(ContextKeys.DEV_DB, opts) + return getContextDB(ContextKey.DEV_DB, opts) } diff --git a/packages/backend-core/src/context/utils.ts b/packages/backend-core/src/context/utils.ts index 62693f18e8..6e7100b594 100644 --- a/packages/backend-core/src/context/utils.ts +++ b/packages/backend-core/src/context/utils.ts @@ -6,7 +6,7 @@ import { } from "./index" import cls from "./FunctionContext" import { IdentityContext } from "@budibase/types" -import { ContextKeys } from "./constants" +import { ContextKey } from "./constants" import { dangerousGetDB, closeDB } from "../db" import { isEqual } from "lodash" import { getDevelopmentAppID, getProdAppID } from "../db/conversions" @@ -47,17 +47,13 @@ export const setAppTenantId = (appId: string) => { } export const setIdentity = (identity: IdentityContext | null) => { - cls.setOnContext(ContextKeys.IDENTITY, identity) + cls.setOnContext(ContextKey.IDENTITY, identity) } // this function makes sure the PouchDB objects are closed and // fully deleted when finished - this protects against memory leaks export async function closeAppDBs() { - const dbKeys = [ - ContextKeys.CURRENT_DB, - ContextKeys.PROD_DB, - ContextKeys.DEV_DB, - ] + const dbKeys = [ContextKey.CURRENT_DB, ContextKey.PROD_DB, ContextKey.DEV_DB] for (let dbKey of dbKeys) { const db = cls.getFromContext(dbKey) if (!db) { @@ -68,16 +64,16 @@ export async function closeAppDBs() { cls.setOnContext(dbKey, null) } // clear the app ID now that the databases are closed - if (cls.getFromContext(ContextKeys.APP_ID)) { - cls.setOnContext(ContextKeys.APP_ID, null) + if (cls.getFromContext(ContextKey.APP_ID)) { + cls.setOnContext(ContextKey.APP_ID, null) } - if (cls.getFromContext(ContextKeys.DB_OPTS)) { - cls.setOnContext(ContextKeys.DB_OPTS, null) + if (cls.getFromContext(ContextKey.DB_OPTS)) { + cls.setOnContext(ContextKey.DB_OPTS, null) } } export function getContextDB(key: string, opts: any) { - const dbOptsKey = `${key}${ContextKeys.DB_OPTS}` + const dbOptsKey = `${key}${ContextKey.DB_OPTS}` let storedOpts = cls.getFromContext(dbOptsKey) let db = cls.getFromContext(key) if (db && isEqual(opts, storedOpts)) { @@ -88,13 +84,13 @@ export function getContextDB(key: string, opts: any) { let toUseAppId switch (key) { - case ContextKeys.CURRENT_DB: + case ContextKey.CURRENT_DB: toUseAppId = appId break - case ContextKeys.PROD_DB: + case ContextKey.PROD_DB: toUseAppId = getProdAppID(appId) break - case ContextKeys.DEV_DB: + case ContextKey.DEV_DB: toUseAppId = getDevelopmentAppID(appId) break } diff --git a/packages/backend-core/src/db/constants.ts b/packages/backend-core/src/db/constants.ts index 9c6be25424..460476da24 100644 --- a/packages/backend-core/src/db/constants.ts +++ b/packages/backend-core/src/db/constants.ts @@ -4,13 +4,13 @@ export const UNICODE_MAX = "\ufff0" /** * Can be used to create a few different forms of querying a view. */ -export enum AutomationViewModes { +export enum AutomationViewMode { ALL = "all", AUTOMATION = "automation", STATUS = "status", } -export enum ViewNames { +export enum ViewName { USER_BY_APP = "by_app", USER_BY_EMAIL = "by_email2", BY_API_KEY = "by_api_key", @@ -21,13 +21,13 @@ export enum ViewNames { } export const DeprecatedViews = { - [ViewNames.USER_BY_EMAIL]: [ + [ViewName.USER_BY_EMAIL]: [ // removed due to inaccuracy in view doc filter logic "by_email", ], } -export enum DocumentTypes { +export enum DocumentType { USER = "us", GROUP = "gr", WORKSPACE = "workspace", @@ -62,6 +62,6 @@ export const StaticDatabases = { }, } -export const APP_PREFIX = exports.DocumentTypes.APP + exports.SEPARATOR -export const APP_DEV = exports.DocumentTypes.APP_DEV + exports.SEPARATOR +export const APP_PREFIX = DocumentType.APP + SEPARATOR +export const APP_DEV = DocumentType.APP_DEV + SEPARATOR export const APP_DEV_PREFIX = APP_DEV diff --git a/packages/backend-core/src/db/utils.ts b/packages/backend-core/src/db/utils.ts index 8ab6fa6e98..321ebd7f58 100644 --- a/packages/backend-core/src/db/utils.ts +++ b/packages/backend-core/src/db/utils.ts @@ -1,7 +1,7 @@ import { newid } from "../hashing" import { DEFAULT_TENANT_ID, Configs } from "../constants" import env from "../environment" -import { SEPARATOR, DocumentTypes, UNICODE_MAX, ViewNames } from "./constants" +import { SEPARATOR, DocumentType, UNICODE_MAX, ViewName } from "./constants" import { getTenantId, getGlobalDBName, getGlobalDB } from "../tenancy" import fetch from "node-fetch" import { doWithDB, allDbs } from "./index" @@ -58,7 +58,7 @@ export function getDocParams( /** * Retrieve the correct index for a view based on default design DB. */ -export function getQueryIndex(viewName: ViewNames) { +export function getQueryIndex(viewName: ViewName) { return `database/${viewName}` } @@ -67,7 +67,7 @@ export function getQueryIndex(viewName: ViewNames) { * @returns {string} The new workspace ID which the workspace doc can be stored under. */ export function generateWorkspaceID() { - return `${DocumentTypes.WORKSPACE}${SEPARATOR}${newid()}` + return `${DocumentType.WORKSPACE}${SEPARATOR}${newid()}` } /** @@ -76,8 +76,8 @@ export function generateWorkspaceID() { export function getWorkspaceParams(id = "", otherProps = {}) { return { ...otherProps, - startkey: `${DocumentTypes.WORKSPACE}${SEPARATOR}${id}`, - endkey: `${DocumentTypes.WORKSPACE}${SEPARATOR}${id}${UNICODE_MAX}`, + startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`, + endkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}${UNICODE_MAX}`, } } @@ -86,7 +86,7 @@ export function getWorkspaceParams(id = "", otherProps = {}) { * @returns {string} The new user ID which the user doc can be stored under. */ export function generateGlobalUserID(id?: any) { - return `${DocumentTypes.USER}${SEPARATOR}${id || newid()}` + return `${DocumentType.USER}${SEPARATOR}${id || newid()}` } /** @@ -102,8 +102,8 @@ export function getGlobalUserParams(globalId: any, otherProps: any = {}) { // need to include this incase pagination startkey: startkey ? startkey - : `${DocumentTypes.USER}${SEPARATOR}${globalId}`, - endkey: `${DocumentTypes.USER}${SEPARATOR}${globalId}${UNICODE_MAX}`, + : `${DocumentType.USER}${SEPARATOR}${globalId}`, + endkey: `${DocumentType.USER}${SEPARATOR}${globalId}${UNICODE_MAX}`, } } @@ -121,7 +121,7 @@ export function getUsersByAppParams(appId: any, otherProps: any = {}) { * @param ownerId The owner/user of the template, this could be global or a workspace level. */ export function generateTemplateID(ownerId: any) { - return `${DocumentTypes.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}` + return `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}` } export function generateAppUserID(prodAppId: string, userId: string) { @@ -143,7 +143,7 @@ export function getTemplateParams( if (templateId) { final = templateId } else { - final = `${DocumentTypes.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}` + final = `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}` } return { ...otherProps, @@ -157,14 +157,14 @@ export function getTemplateParams( * @returns {string} The new role ID which the role doc can be stored under. */ export function generateRoleID(id: any) { - return `${DocumentTypes.ROLE}${SEPARATOR}${id || newid()}` + return `${DocumentType.ROLE}${SEPARATOR}${id || newid()}` } /** * Gets parameters for retrieving a role, this is a utility function for the getDocParams function. */ export function getRoleParams(roleId = null, otherProps = {}) { - return getDocParams(DocumentTypes.ROLE, roleId, otherProps) + return getDocParams(DocumentType.ROLE, roleId, otherProps) } export function getStartEndKeyURL(base: any, baseKey: any, tenantId = null) { @@ -211,9 +211,9 @@ export async function getAllDbs(opts = { efficient: false }) { await addDbs(couchUrl) } else { // get prod apps - await addDbs(getStartEndKeyURL(couchUrl, DocumentTypes.APP, tenantId)) + await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP, tenantId)) // get dev apps - await addDbs(getStartEndKeyURL(couchUrl, DocumentTypes.APP_DEV, tenantId)) + await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP_DEV, tenantId)) // add global db name dbs.push(getGlobalDBName(tenantId)) } @@ -233,14 +233,18 @@ export async function getAllApps({ dev, all, idsOnly, efficient }: any = {}) { } let dbs = await getAllDbs({ efficient }) const appDbNames = dbs.filter((dbName: any) => { + if (env.isTest() && !dbName) { + return false + } + const split = dbName.split(SEPARATOR) // it is an app, check the tenantId - if (split[0] === DocumentTypes.APP) { + if (split[0] === DocumentType.APP) { // tenantId is always right before the UUID const possibleTenantId = split[split.length - 2] const noTenantId = - split.length === 2 || possibleTenantId === DocumentTypes.DEV + split.length === 2 || possibleTenantId === DocumentType.DEV return ( (tenantId === DEFAULT_TENANT_ID && noTenantId) || @@ -326,7 +330,7 @@ export async function dbExists(dbName: any) { export const generateConfigID = ({ type, workspace, user }: any) => { const scope = [type, workspace, user].filter(Boolean).join(SEPARATOR) - return `${DocumentTypes.CONFIG}${SEPARATOR}${scope}` + return `${DocumentType.CONFIG}${SEPARATOR}${scope}` } /** @@ -340,8 +344,8 @@ export const getConfigParams = ( return { ...otherProps, - startkey: `${DocumentTypes.CONFIG}${SEPARATOR}${scope}`, - endkey: `${DocumentTypes.CONFIG}${SEPARATOR}${scope}${UNICODE_MAX}`, + startkey: `${DocumentType.CONFIG}${SEPARATOR}${scope}`, + endkey: `${DocumentType.CONFIG}${SEPARATOR}${scope}${UNICODE_MAX}`, } } @@ -350,7 +354,7 @@ export const getConfigParams = ( * @returns {string} The new dev info ID which info for dev (like api key) can be stored under. */ export const generateDevInfoID = (userId: any) => { - return `${DocumentTypes.DEV_INFO}${SEPARATOR}${userId}` + return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}` } /** diff --git a/packages/backend-core/src/db/views.js b/packages/backend-core/src/db/views.js index baf1807ca5..3a45611a8f 100644 --- a/packages/backend-core/src/db/views.js +++ b/packages/backend-core/src/db/views.js @@ -1,6 +1,6 @@ const { - DocumentTypes, - ViewNames, + DocumentType, + ViewName, DeprecatedViews, SEPARATOR, } = require("./utils") @@ -44,14 +44,14 @@ exports.createNewUserEmailView = async () => { const view = { // if using variables in a map function need to inject them before use map: `function(doc) { - if (doc._id.startsWith("${DocumentTypes.USER}${SEPARATOR}")) { + if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}")) { emit(doc.email.toLowerCase(), doc._id) } }`, } designDoc.views = { ...designDoc.views, - [ViewNames.USER_BY_EMAIL]: view, + [ViewName.USER_BY_EMAIL]: view, } await db.put(designDoc) } @@ -68,7 +68,7 @@ exports.createUserAppView = async () => { const view = { // if using variables in a map function need to inject them before use map: `function(doc) { - if (doc._id.startsWith("${DocumentTypes.USER}${SEPARATOR}") && doc.roles) { + if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}") && doc.roles) { for (let prodAppId of Object.keys(doc.roles)) { let emitted = prodAppId + "${SEPARATOR}" + doc._id emit(emitted, null) @@ -78,7 +78,7 @@ exports.createUserAppView = async () => { } designDoc.views = { ...designDoc.views, - [ViewNames.USER_BY_APP]: view, + [ViewName.USER_BY_APP]: view, } await db.put(designDoc) } @@ -93,14 +93,14 @@ exports.createApiKeyView = async () => { } const view = { map: `function(doc) { - if (doc._id.startsWith("${DocumentTypes.DEV_INFO}") && doc.apiKey) { + if (doc._id.startsWith("${DocumentType.DEV_INFO}") && doc.apiKey) { emit(doc.apiKey, doc.userId) } }`, } designDoc.views = { ...designDoc.views, - [ViewNames.BY_API_KEY]: view, + [ViewName.BY_API_KEY]: view, } await db.put(designDoc) } @@ -123,17 +123,17 @@ exports.createUserBuildersView = async () => { } designDoc.views = { ...designDoc.views, - [ViewNames.USER_BY_BUILDERS]: view, + [ViewName.USER_BY_BUILDERS]: view, } await db.put(designDoc) } exports.queryGlobalView = async (viewName, params, db = null) => { const CreateFuncByName = { - [ViewNames.USER_BY_EMAIL]: exports.createNewUserEmailView, - [ViewNames.BY_API_KEY]: exports.createApiKeyView, - [ViewNames.USER_BY_BUILDERS]: exports.createUserBuildersView, - [ViewNames.USER_BY_APP]: exports.createUserAppView, + [ViewName.USER_BY_EMAIL]: exports.createNewUserEmailView, + [ViewName.BY_API_KEY]: exports.createApiKeyView, + [ViewName.USER_BY_BUILDERS]: exports.createUserBuildersView, + [ViewName.USER_BY_APP]: exports.createUserAppView, } // can pass DB in if working with something specific if (!db) { diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index 37804b31a6..0348d921ab 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -55,6 +55,8 @@ const env = { DEFAULT_LICENSE: process.env.DEFAULT_LICENSE, SERVICE: process.env.SERVICE || "budibase", MEMORY_LEAK_CHECK: process.env.MEMORY_LEAK_CHECK || false, + LOG_LEVEL: process.env.LOG_LEVEL, + SESSION_UPDATE_PERIOD: process.env.SESSION_UPDATE_PERIOD, DEPLOYMENT_ENVIRONMENT: process.env.DEPLOYMENT_ENVIRONMENT || "docker-compose", _set(key: any, value: any) { diff --git a/packages/backend-core/src/events/processors/AnalyticsProcessor.ts b/packages/backend-core/src/events/processors/AnalyticsProcessor.ts index 2ee7a02afa..f9d7547120 100644 --- a/packages/backend-core/src/events/processors/AnalyticsProcessor.ts +++ b/packages/backend-core/src/events/processors/AnalyticsProcessor.ts @@ -2,7 +2,7 @@ import { Event, Identity, Group, IdentityType } from "@budibase/types" import { EventProcessor } from "./types" import env from "../../environment" import * as analytics from "../analytics" -import PosthogProcessor from "./PosthogProcessor" +import PosthogProcessor from "./posthog" /** * Events that are always captured. @@ -32,7 +32,7 @@ export default class AnalyticsProcessor implements EventProcessor { return } if (this.posthog) { - this.posthog.processEvent(event, identity, properties, timestamp) + await this.posthog.processEvent(event, identity, properties, timestamp) } } @@ -45,14 +45,14 @@ export default class AnalyticsProcessor implements EventProcessor { return } if (this.posthog) { - this.posthog.identify(identity, timestamp) + await this.posthog.identify(identity, timestamp) } } async identifyGroup(group: Group, timestamp?: string | number) { // Group indentifications (tenant and installation) always on if (this.posthog) { - this.posthog.identifyGroup(group, timestamp) + await this.posthog.identifyGroup(group, timestamp) } } diff --git a/packages/backend-core/src/events/processors/PosthogProcessor.ts b/packages/backend-core/src/events/processors/posthog/PosthogProcessor.ts similarity index 72% rename from packages/backend-core/src/events/processors/PosthogProcessor.ts rename to packages/backend-core/src/events/processors/posthog/PosthogProcessor.ts index eb12db1dc4..593e5ff082 100644 --- a/packages/backend-core/src/events/processors/PosthogProcessor.ts +++ b/packages/backend-core/src/events/processors/posthog/PosthogProcessor.ts @@ -1,9 +1,26 @@ import PostHog from "posthog-node" import { Event, Identity, Group, BaseEvent } from "@budibase/types" -import { EventProcessor } from "./types" -import env from "../../environment" -import * as context from "../../context" -const pkg = require("../../../package.json") +import { EventProcessor } from "../types" +import env from "../../../environment" +import * as context from "../../../context" +import * as rateLimiting from "./rateLimiting" +const pkg = require("../../../../package.json") + +const EXCLUDED_EVENTS: Event[] = [ + Event.USER_UPDATED, + Event.EMAIL_SMTP_UPDATED, + Event.AUTH_SSO_UPDATED, + Event.APP_UPDATED, + Event.ROLE_UPDATED, + Event.DATASOURCE_UPDATED, + Event.QUERY_UPDATED, + Event.TABLE_UPDATED, + Event.VIEW_UPDATED, + Event.VIEW_FILTER_UPDATED, + Event.VIEW_CALCULATION_UPDATED, + Event.AUTOMATION_TRIGGER_UPDATED, + Event.USER_GROUP_UPDATED, +] export default class PosthogProcessor implements EventProcessor { posthog: PostHog @@ -21,6 +38,15 @@ export default class PosthogProcessor implements EventProcessor { properties: BaseEvent, timestamp?: string | number ): Promise { + // don't send excluded events + if (EXCLUDED_EVENTS.includes(event)) { + return + } + + if (await rateLimiting.limited(event)) { + return + } + properties.version = pkg.version properties.service = env.SERVICE properties.environment = identity.environment diff --git a/packages/backend-core/src/events/processors/posthog/index.ts b/packages/backend-core/src/events/processors/posthog/index.ts new file mode 100644 index 0000000000..dceb10d2cd --- /dev/null +++ b/packages/backend-core/src/events/processors/posthog/index.ts @@ -0,0 +1,2 @@ +import PosthogProcessor from "./PosthogProcessor" +export default PosthogProcessor diff --git a/packages/backend-core/src/events/processors/posthog/rateLimiting.ts b/packages/backend-core/src/events/processors/posthog/rateLimiting.ts new file mode 100644 index 0000000000..9c7b7876d6 --- /dev/null +++ b/packages/backend-core/src/events/processors/posthog/rateLimiting.ts @@ -0,0 +1,106 @@ +import { Event } from "@budibase/types" +import { CacheKeys, TTL } from "../../../cache/generic" +import * as cache from "../../../cache/generic" +import * as context from "../../../context" + +type RateLimitedEvent = + | Event.SERVED_BUILDER + | Event.SERVED_APP_PREVIEW + | Event.SERVED_APP + +const isRateLimited = (event: Event): event is RateLimitedEvent => { + return ( + event === Event.SERVED_BUILDER || + event === Event.SERVED_APP_PREVIEW || + event === Event.SERVED_APP + ) +} + +const isPerApp = (event: RateLimitedEvent) => { + return event === Event.SERVED_APP_PREVIEW || event === Event.SERVED_APP +} + +interface EventProperties { + timestamp: number +} + +enum RateLimit { + CALENDAR_DAY = "calendarDay", +} + +const RATE_LIMITS = { + [Event.SERVED_APP]: RateLimit.CALENDAR_DAY, + [Event.SERVED_APP_PREVIEW]: RateLimit.CALENDAR_DAY, + [Event.SERVED_BUILDER]: RateLimit.CALENDAR_DAY, +} + +/** + * Check if this event should be sent right now + * Return false to signal the event SHOULD be sent + * Return true to signal the event should NOT be sent + */ +export const limited = async (event: Event): Promise => { + // not a rate limited event -- send + if (!isRateLimited(event)) { + return false + } + + const cachedEvent = await readEvent(event) + if (cachedEvent) { + const timestamp = new Date(cachedEvent.timestamp) + const limit = RATE_LIMITS[event] + switch (limit) { + case RateLimit.CALENDAR_DAY: { + // get midnight at the start of the next day for the timestamp + timestamp.setDate(timestamp.getDate() + 1) + timestamp.setHours(0, 0, 0, 0) + + // if we have passed the threshold into the next day + if (Date.now() > timestamp.getTime()) { + // update the timestamp in the event -- send + await recordEvent(event, { timestamp: Date.now() }) + return false + } else { + // still within the limited period -- don't send + return true + } + } + } + } else { + // no event present i.e. expired -- send + await recordEvent(event, { timestamp: Date.now() }) + return false + } +} + +const eventKey = (event: RateLimitedEvent) => { + let key = `${CacheKeys.EVENTS_RATE_LIMIT}:${event}` + if (isPerApp(event)) { + key = key + ":" + context.getAppId() + } + return key +} + +const readEvent = async ( + event: RateLimitedEvent +): Promise => { + const key = eventKey(event) + const result = await cache.get(key) + return result as EventProperties +} + +const recordEvent = async ( + event: RateLimitedEvent, + properties: EventProperties +) => { + const key = eventKey(event) + const limit = RATE_LIMITS[event] + let ttl + switch (limit) { + case RateLimit.CALENDAR_DAY: { + ttl = TTL.ONE_DAY + } + } + + await cache.store(key, properties, ttl) +} diff --git a/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts b/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts new file mode 100644 index 0000000000..d14b697966 --- /dev/null +++ b/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts @@ -0,0 +1,145 @@ +import "../../../../../tests/utilities/TestConfiguration" +import PosthogProcessor from "../PosthogProcessor" +import { Event, IdentityType, Hosting } from "@budibase/types" +const tk = require("timekeeper") +import * as cache from "../../../../cache/generic" +import { CacheKeys } from "../../../../cache/generic" +import * as context from "../../../../context" + +const newIdentity = () => { + return { + id: "test", + type: IdentityType.USER, + hosting: Hosting.SELF, + environment: "test", + } +} + +describe("PosthogProcessor", () => { + beforeEach(async () => { + jest.clearAllMocks() + await cache.bustCache( + `${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}` + ) + }) + + describe("processEvent", () => { + it("processes event", async () => { + const processor = new PosthogProcessor("test") + + const identity = newIdentity() + const properties = {} + + await processor.processEvent(Event.APP_CREATED, identity, properties) + + expect(processor.posthog.capture).toHaveBeenCalledTimes(1) + }) + + it("honours exclusions", async () => { + const processor = new PosthogProcessor("test") + + const identity = newIdentity() + const properties = {} + + await processor.processEvent(Event.AUTH_SSO_UPDATED, identity, properties) + expect(processor.posthog.capture).toHaveBeenCalledTimes(0) + }) + + describe("rate limiting", () => { + it("sends daily event once in same day", async () => { + const processor = new PosthogProcessor("test") + const identity = newIdentity() + const properties = {} + + tk.freeze(new Date(2022, 0, 1, 14, 0)) + await processor.processEvent(Event.SERVED_BUILDER, identity, properties) + // go forward one hour + tk.freeze(new Date(2022, 0, 1, 15, 0)) + await processor.processEvent(Event.SERVED_BUILDER, identity, properties) + + expect(processor.posthog.capture).toHaveBeenCalledTimes(1) + }) + + it("sends daily event once per unique day", async () => { + const processor = new PosthogProcessor("test") + const identity = newIdentity() + const properties = {} + + tk.freeze(new Date(2022, 0, 1, 14, 0)) + await processor.processEvent(Event.SERVED_BUILDER, identity, properties) + // go forward into next day + tk.freeze(new Date(2022, 0, 2, 9, 0)) + await processor.processEvent(Event.SERVED_BUILDER, identity, properties) + // go forward into next day + tk.freeze(new Date(2022, 0, 3, 5, 0)) + await processor.processEvent(Event.SERVED_BUILDER, identity, properties) + // go forward one hour + tk.freeze(new Date(2022, 0, 3, 6, 0)) + await processor.processEvent(Event.SERVED_BUILDER, identity, properties) + + expect(processor.posthog.capture).toHaveBeenCalledTimes(3) + }) + + it("sends event again after cache expires", async () => { + const processor = new PosthogProcessor("test") + const identity = newIdentity() + const properties = {} + + tk.freeze(new Date(2022, 0, 1, 14, 0)) + await processor.processEvent(Event.SERVED_BUILDER, identity, properties) + + await cache.bustCache( + `${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}` + ) + + tk.freeze(new Date(2022, 0, 1, 14, 0)) + await processor.processEvent(Event.SERVED_BUILDER, identity, properties) + + expect(processor.posthog.capture).toHaveBeenCalledTimes(2) + }) + + it("sends per app events once per day per app", async () => { + const processor = new PosthogProcessor("test") + const identity = newIdentity() + const properties = {} + + const runAppEvents = async (appId: string) => { + await context.doInAppContext(appId, async () => { + tk.freeze(new Date(2022, 0, 1, 14, 0)) + await processor.processEvent(Event.SERVED_APP, identity, properties) + await processor.processEvent( + Event.SERVED_APP_PREVIEW, + identity, + properties + ) + + // go forward one hour - should be ignored + tk.freeze(new Date(2022, 0, 1, 15, 0)) + await processor.processEvent(Event.SERVED_APP, identity, properties) + await processor.processEvent( + Event.SERVED_APP_PREVIEW, + identity, + properties + ) + + // go forward into next day + tk.freeze(new Date(2022, 0, 2, 9, 0)) + + await processor.processEvent(Event.SERVED_APP, identity, properties) + await processor.processEvent( + Event.SERVED_APP_PREVIEW, + identity, + properties + ) + }) + } + + await runAppEvents("app_1") + expect(processor.posthog.capture).toHaveBeenCalledTimes(4) + + await runAppEvents("app_2") + expect(processor.posthog.capture).toHaveBeenCalledTimes(8) + }) + }) + }) +}) diff --git a/packages/backend-core/src/events/publishers/license.ts b/packages/backend-core/src/events/publishers/license.ts index 44dafd84ce..1adc71652e 100644 --- a/packages/backend-core/src/events/publishers/license.ts +++ b/packages/backend-core/src/events/publishers/license.ts @@ -20,12 +20,6 @@ export async function downgraded(license: License) { await publishEvent(Event.LICENSE_DOWNGRADED, properties) } -// TODO -export async function updated(license: License) { - const properties: LicenseUpdatedEvent = {} - await publishEvent(Event.LICENSE_UPDATED, properties) -} - // TODO export async function activated(license: License) { const properties: LicenseActivatedEvent = {} diff --git a/packages/backend-core/src/events/publishers/serve.ts b/packages/backend-core/src/events/publishers/serve.ts index 13afede029..128e0b9b11 100644 --- a/packages/backend-core/src/events/publishers/serve.ts +++ b/packages/backend-core/src/events/publishers/serve.ts @@ -7,22 +7,26 @@ import { AppServedEvent, } from "@budibase/types" -export async function servedBuilder() { - const properties: BuilderServedEvent = {} +export async function servedBuilder(timezone: string) { + const properties: BuilderServedEvent = { + timezone, + } await publishEvent(Event.SERVED_BUILDER, properties) } -export async function servedApp(app: App) { +export async function servedApp(app: App, timezone: string) { const properties: AppServedEvent = { appVersion: app.version, + timezone, } await publishEvent(Event.SERVED_APP, properties) } -export async function servedAppPreview(app: App) { +export async function servedAppPreview(app: App, timezone: string) { const properties: AppPreviewServedEvent = { appId: app.appId, appVersion: app.version, + timezone, } await publishEvent(Event.SERVED_APP_PREVIEW, properties) } diff --git a/packages/backend-core/src/index.ts b/packages/backend-core/src/index.ts index 35777ae817..6d2e8dcd10 100644 --- a/packages/backend-core/src/index.ts +++ b/packages/backend-core/src/index.ts @@ -1,4 +1,5 @@ import errors from "./errors" + const errorClasses = errors.errors import * as events from "./events" import * as migrations from "./migrations" @@ -9,12 +10,13 @@ import * as installation from "./installation" import env from "./environment" import tenancy from "./tenancy" import featureFlags from "./featureFlags" -import sessions from "./security/sessions" +import * as sessions from "./security/sessions" import deprovisioning from "./context/deprovision" import auth from "./auth" import constants from "./constants" import * as dbConstants from "./db/constants" import logging from "./logging" +import pino from "./pino" // mimic the outer package exports import * as db from "./pkg/db" @@ -53,6 +55,7 @@ const core = { errors, logging, roles, + ...pino, ...errorClasses, } diff --git a/packages/backend-core/src/middleware/authenticated.js b/packages/backend-core/src/middleware/authenticated.ts similarity index 60% rename from packages/backend-core/src/middleware/authenticated.js rename to packages/backend-core/src/middleware/authenticated.ts index d86af773c3..b51ead46b9 100644 --- a/packages/backend-core/src/middleware/authenticated.js +++ b/packages/backend-core/src/middleware/authenticated.ts @@ -1,28 +1,39 @@ -const { Cookies, Headers } = require("../constants") -const { getCookie, clearCookie, openJwt } = require("../utils") -const { getUser } = require("../cache/user") -const { getSession, updateSessionTTL } = require("../security/sessions") -const { buildMatcherRegex, matches } = require("./matchers") -const env = require("../environment") -const { SEPARATOR } = require("../db/constants") -const { ViewNames } = require("../db/utils") -const { queryGlobalView } = require("../db/views") -const { getGlobalDB, doInTenant } = require("../tenancy") -const { decrypt } = require("../security/encryption") +import { Cookies, Headers } from "../constants" +import { getCookie, clearCookie, openJwt } from "../utils" +import { getUser } from "../cache/user" +import { getSession, updateSessionTTL } from "../security/sessions" +import { buildMatcherRegex, matches } from "./matchers" +import { SEPARATOR } from "../db/constants" +import { ViewName } from "../db/utils" +import { queryGlobalView } from "../db/views" +import { getGlobalDB, doInTenant } from "../tenancy" +import { decrypt } from "../security/encryption" const identity = require("../context/identity") +const env = require("../environment") -function finalise( - ctx, - { authenticated, user, internal, version, publicEndpoint } = {} -) { - ctx.publicEndpoint = publicEndpoint || false - ctx.isAuthenticated = authenticated || false - ctx.user = user - ctx.internal = internal || false - ctx.version = version +const ONE_MINUTE = env.SESSION_UPDATE_PERIOD || 60 * 1000 + +interface FinaliseOpts { + authenticated?: boolean + internal?: boolean + publicEndpoint?: boolean + version?: string + user?: any } -async function checkApiKey(apiKey, populateUser) { +function timeMinusOneMinute() { + return new Date(Date.now() - ONE_MINUTE).toISOString() +} + +function finalise(ctx: any, opts: FinaliseOpts = {}) { + ctx.publicEndpoint = opts.publicEndpoint || false + ctx.isAuthenticated = opts.authenticated || false + ctx.user = opts.user + ctx.internal = opts.internal || false + ctx.version = opts.version +} + +async function checkApiKey(apiKey: string, populateUser?: Function) { if (apiKey === env.INTERNAL_API_KEY) { return { valid: true } } @@ -32,7 +43,7 @@ async function checkApiKey(apiKey, populateUser) { const db = getGlobalDB() // api key is encrypted in the database const userId = await queryGlobalView( - ViewNames.BY_API_KEY, + ViewName.BY_API_KEY, { key: apiKey, }, @@ -56,10 +67,12 @@ async function checkApiKey(apiKey, populateUser) { */ module.exports = ( noAuthPatterns = [], - opts = { publicAllowed: false, populateUser: null } + opts: { publicAllowed: boolean; populateUser?: Function } = { + publicAllowed: false, + } ) => { const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : [] - return async (ctx, next) => { + return async (ctx: any, next: any) => { let publicEndpoint = false const version = ctx.request.headers[Headers.API_VER] // the path is not authenticated @@ -71,45 +84,40 @@ module.exports = ( // check the actual user is authenticated first, try header or cookie const headerToken = ctx.request.headers[Headers.TOKEN] const authCookie = getCookie(ctx, Cookies.Auth) || openJwt(headerToken) + const apiKey = ctx.request.headers[Headers.API_KEY] + const tenantId = ctx.request.headers[Headers.TENANT_ID] let authenticated = false, user = null, internal = false - if (authCookie) { - let error = null + if (authCookie && !apiKey) { const sessionId = authCookie.sessionId const userId = authCookie.userId - - const session = await getSession(userId, sessionId) - if (!session) { - error = "No session found" - } else { - try { - if (opts && opts.populateUser) { - user = await getUser( - userId, - session.tenantId, - opts.populateUser(ctx) - ) - } else { - user = await getUser(userId, session.tenantId) - } - user.csrfToken = session.csrfToken - authenticated = true - } catch (err) { - error = err + let session + try { + // getting session handles error checking (if session exists etc) + session = await getSession(userId, sessionId) + if (opts && opts.populateUser) { + user = await getUser( + userId, + session.tenantId, + opts.populateUser(ctx) + ) + } else { + user = await getUser(userId, session.tenantId) } - } - if (error) { - console.error("Auth Error", error) + user.csrfToken = session.csrfToken + if (session?.lastAccessedAt < timeMinusOneMinute()) { + // make sure we denote that the session is still in use + await updateSessionTTL(session) + } + authenticated = true + } catch (err: any) { + authenticated = false + console.error("Auth Error", err?.message || err) // remove the cookie as the user does not exist anymore clearCookie(ctx, Cookies.Auth) - } else { - // make sure we denote that the session is still in use - await updateSessionTTL(session) } } - const apiKey = ctx.request.headers[Headers.API_KEY] - const tenantId = ctx.request.headers[Headers.TENANT_ID] // this is an internal request, no user made it if (!authenticated && apiKey) { const populateUser = opts.populateUser ? opts.populateUser(ctx) : null @@ -142,7 +150,7 @@ module.exports = ( } else { return next() } - } catch (err) { + } catch (err: any) { // invalid token, clear the cookie if (err && err.name === "JsonWebTokenError") { clearCookie(ctx, Cookies.Auth) diff --git a/packages/worker/src/middleware/builderOnly.js b/packages/backend-core/src/middleware/builderOnly.js similarity index 100% rename from packages/worker/src/middleware/builderOnly.js rename to packages/backend-core/src/middleware/builderOnly.js diff --git a/packages/worker/src/middleware/builderOrAdmin.js b/packages/backend-core/src/middleware/builderOrAdmin.js similarity index 100% rename from packages/worker/src/middleware/builderOrAdmin.js rename to packages/backend-core/src/middleware/builderOrAdmin.js diff --git a/packages/backend-core/src/middleware/index.js b/packages/backend-core/src/middleware/index.js index 9d94bf5763..7e7b8a2931 100644 --- a/packages/backend-core/src/middleware/index.js +++ b/packages/backend-core/src/middleware/index.js @@ -10,6 +10,8 @@ const internalApi = require("./internalApi") const datasourceGoogle = require("./passport/datasource/google") const csrf = require("./csrf") const adminOnly = require("./adminOnly") +const builderOrAdmin = require("./builderOrAdmin") +const builderOnly = require("./builderOnly") const joiValidator = require("./joi-validator") module.exports = { google, @@ -27,5 +29,7 @@ module.exports = { }, csrf, adminOnly, + builderOnly, + builderOrAdmin, joiValidator, } diff --git a/packages/backend-core/src/middleware/joi-validator.js b/packages/backend-core/src/middleware/joi-validator.js index 1686b0e727..748ccebd89 100644 --- a/packages/backend-core/src/middleware/joi-validator.js +++ b/packages/backend-core/src/middleware/joi-validator.js @@ -1,3 +1,5 @@ +const Joi = require("joi") + function validate(schema, property) { // Return a Koa middleware function return (ctx, next) => { @@ -10,6 +12,12 @@ function validate(schema, property) { } else if (ctx.request[property] != null) { params = ctx.request[property] } + + schema = schema.append({ + createdAt: Joi.any().optional(), + updatedAt: Joi.any().optional(), + }) + const { error } = schema.validate(params) if (error) { ctx.throw(400, `Invalid ${property} - ${error.message}`) diff --git a/packages/backend-core/src/migrations/definitions.ts b/packages/backend-core/src/migrations/definitions.ts index 745c8718c9..34ec0f0cad 100644 --- a/packages/backend-core/src/migrations/definitions.ts +++ b/packages/backend-core/src/migrations/definitions.ts @@ -37,4 +37,8 @@ export const DEFINITIONS: MigrationDefinition[] = [ type: MigrationType.INSTALLATION, name: MigrationName.EVENT_INSTALLATION_BACKFILL, }, + { + type: MigrationType.GLOBAL, + name: MigrationName.GLOBAL_INFO_SYNC_USERS, + }, ] diff --git a/packages/backend-core/src/migrations/migrations.ts b/packages/backend-core/src/migrations/migrations.ts index 2e4ef0da76..ca238ff80e 100644 --- a/packages/backend-core/src/migrations/migrations.ts +++ b/packages/backend-core/src/migrations/migrations.ts @@ -1,6 +1,6 @@ import { DEFAULT_TENANT_ID } from "../constants" import { doWithDB } from "../db" -import { DocumentTypes, StaticDatabases } from "../db/constants" +import { DocumentType, StaticDatabases } from "../db/constants" import { getAllApps } from "../db/utils" import environment from "../environment" import { @@ -21,10 +21,10 @@ import { export const getMigrationsDoc = async (db: any) => { // get the migrations doc try { - return await db.get(DocumentTypes.MIGRATIONS) + return await db.get(DocumentType.MIGRATIONS) } catch (err: any) { if (err.status && err.status === 404) { - return { _id: DocumentTypes.MIGRATIONS } + return { _id: DocumentType.MIGRATIONS } } else { console.error(err) throw err diff --git a/packages/backend-core/src/pino.js b/packages/backend-core/src/pino.js new file mode 100644 index 0000000000..69962b3841 --- /dev/null +++ b/packages/backend-core/src/pino.js @@ -0,0 +1,11 @@ +const env = require("./environment") + +exports.pinoSettings = () => ({ + prettyPrint: { + levelFirst: true, + }, + level: env.LOG_LEVEL || "error", + autoLogging: { + ignore: req => req.url.includes("/health"), + }, +}) diff --git a/packages/backend-core/src/security/roles.js b/packages/backend-core/src/security/roles.js index 44dc4f2d3e..983aebf676 100644 --- a/packages/backend-core/src/security/roles.js +++ b/packages/backend-core/src/security/roles.js @@ -3,7 +3,7 @@ const { BUILTIN_PERMISSION_IDS, PermissionLevels } = require("./permissions") const { generateRoleID, getRoleParams, - DocumentTypes, + DocumentType, SEPARATOR, } = require("../db/utils") const { getAppDB } = require("../context") @@ -203,15 +203,24 @@ exports.getAllRoles = async appId => { if (appId) { return doWithDB(appId, internal) } else { - return internal(getAppDB()) + let appDB + try { + appDB = getAppDB() + } catch (error) { + // We don't have any apps, so we'll just use the built-in roles + } + return internal(appDB) } async function internal(db) { - const body = await db.allDocs( - getRoleParams(null, { - include_docs: true, - }) - ) - let roles = body.rows.map(row => row.doc) + let roles = [] + if (db) { + const body = await db.allDocs( + getRoleParams(null, { + include_docs: true, + }) + ) + roles = body.rows.map(row => row.doc) + } const builtinRoles = exports.getBuiltinRoles() // need to combine builtin with any DB record of them (for sake of permissions) @@ -329,7 +338,7 @@ class AccessController { * Adds the "role_" for builtin role IDs which are to be written to the DB (for permissions). */ exports.getDBRoleID = roleId => { - if (roleId.startsWith(DocumentTypes.ROLE)) { + if (roleId.startsWith(DocumentType.ROLE)) { return roleId } return generateRoleID(roleId) @@ -340,8 +349,8 @@ exports.getDBRoleID = roleId => { */ exports.getExternalRoleID = roleId => { // for built in roles we want to remove the DB role ID element (role_) - if (roleId.startsWith(DocumentTypes.ROLE) && isBuiltin(roleId)) { - return roleId.split(`${DocumentTypes.ROLE}${SEPARATOR}`)[1] + if (roleId.startsWith(DocumentType.ROLE) && isBuiltin(roleId)) { + return roleId.split(`${DocumentType.ROLE}${SEPARATOR}`)[1] } return roleId } diff --git a/packages/backend-core/src/security/sessions.js b/packages/backend-core/src/security/sessions.js deleted file mode 100644 index 8874b47469..0000000000 --- a/packages/backend-core/src/security/sessions.js +++ /dev/null @@ -1,95 +0,0 @@ -const redis = require("../redis/init") -const { v4: uuidv4 } = require("uuid") - -// a week in seconds -const EXPIRY_SECONDS = 86400 * 7 - -async function getSessionsForUser(userId) { - const client = await redis.getSessionClient() - const sessions = await client.scan(userId) - return sessions.map(session => session.value) -} - -function makeSessionID(userId, sessionId) { - return `${userId}/${sessionId}` -} - -async function invalidateSessions(userId, sessionIds = null) { - try { - let sessions = [] - - // If no sessionIds, get all the sessions for the user - if (!sessionIds) { - sessions = await getSessionsForUser(userId) - sessions.forEach( - session => - (session.key = makeSessionID(session.userId, session.sessionId)) - ) - } else { - // use the passed array of sessionIds - sessions = Array.isArray(sessionIds) ? sessionIds : [sessionIds] - sessions = sessions.map(sessionId => ({ - key: makeSessionID(userId, sessionId), - })) - } - - const client = await redis.getSessionClient() - const promises = [] - for (let session of sessions) { - promises.push(client.delete(session.key)) - } - await Promise.all(promises) - } catch (err) { - console.error(`Error invalidating sessions: ${err}`) - } -} - -exports.createASession = async (userId, session) => { - // invalidate all other sessions - await invalidateSessions(userId) - - const client = await redis.getSessionClient() - const sessionId = session.sessionId - if (!session.csrfToken) { - session.csrfToken = uuidv4() - } - session = { - createdAt: new Date().toISOString(), - lastAccessedAt: new Date().toISOString(), - ...session, - userId, - } - await client.store(makeSessionID(userId, sessionId), session, EXPIRY_SECONDS) -} - -exports.updateSessionTTL = async session => { - const client = await redis.getSessionClient() - const key = makeSessionID(session.userId, session.sessionId) - session.lastAccessedAt = new Date().toISOString() - await client.store(key, session, EXPIRY_SECONDS) -} - -exports.endSession = async (userId, sessionId) => { - const client = await redis.getSessionClient() - await client.delete(makeSessionID(userId, sessionId)) -} - -exports.getSession = async (userId, sessionId) => { - try { - const client = await redis.getSessionClient() - return client.get(makeSessionID(userId, sessionId)) - } catch (err) { - // if can't get session don't error, just don't return anything - console.error(err) - return null - } -} - -exports.getAllSessions = async () => { - const client = await redis.getSessionClient() - const sessions = await client.scan() - return sessions.map(session => session.value) -} - -exports.getUserSessions = getSessionsForUser -exports.invalidateSessions = invalidateSessions diff --git a/packages/backend-core/src/security/sessions.ts b/packages/backend-core/src/security/sessions.ts new file mode 100644 index 0000000000..284adbcd1f --- /dev/null +++ b/packages/backend-core/src/security/sessions.ts @@ -0,0 +1,119 @@ +const redis = require("../redis/init") +const { v4: uuidv4 } = require("uuid") +const { logWarn } = require("../logging") +const env = require("../environment") + +interface Session { + key: string + userId: string + sessionId: string + lastAccessedAt: string + createdAt: string + csrfToken?: string + value: string +} + +type SessionKey = { key: string }[] + +// a week in seconds +const EXPIRY_SECONDS = 86400 * 7 + +function makeSessionID(userId: string, sessionId: string) { + return `${userId}/${sessionId}` +} + +export async function getSessionsForUser(userId: string) { + if (!userId) { + console.trace("Cannot get sessions for undefined userId") + return [] + } + const client = await redis.getSessionClient() + const sessions = await client.scan(userId) + return sessions.map((session: Session) => session.value) +} + +export async function invalidateSessions( + userId: string, + opts: { sessionIds?: string[]; reason?: string } = {} +) { + try { + const reason = opts?.reason || "unknown" + let sessionIds: string[] = opts.sessionIds || [] + let sessions: SessionKey + + // If no sessionIds, get all the sessions for the user + if (sessionIds.length === 0) { + sessions = await getSessionsForUser(userId) + sessions.forEach( + (session: any) => + (session.key = makeSessionID(session.userId, session.sessionId)) + ) + } else { + // use the passed array of sessionIds + sessionIds = Array.isArray(sessionIds) ? sessionIds : [sessionIds] + sessions = sessionIds.map((sessionId: string) => ({ + key: makeSessionID(userId, sessionId), + })) + } + + if (sessions && sessions.length > 0) { + const client = await redis.getSessionClient() + const promises = [] + for (let session of sessions) { + promises.push(client.delete(session.key)) + } + if (!env.isTest()) { + logWarn( + `Invalidating sessions for ${userId} (reason: ${reason}) - ${sessions + .map(session => session.key) + .join(", ")}` + ) + } + await Promise.all(promises) + } + } catch (err) { + console.error(`Error invalidating sessions: ${err}`) + } +} + +export async function createASession(userId: string, session: Session) { + // invalidate all other sessions + await invalidateSessions(userId, { reason: "creation" }) + + const client = await redis.getSessionClient() + const sessionId = session.sessionId + if (!session.csrfToken) { + session.csrfToken = uuidv4() + } + session = { + ...session, + createdAt: new Date().toISOString(), + lastAccessedAt: new Date().toISOString(), + userId, + } + await client.store(makeSessionID(userId, sessionId), session, EXPIRY_SECONDS) +} + +export async function updateSessionTTL(session: Session) { + const client = await redis.getSessionClient() + const key = makeSessionID(session.userId, session.sessionId) + session.lastAccessedAt = new Date().toISOString() + await client.store(key, session, EXPIRY_SECONDS) +} + +export async function endSession(userId: string, sessionId: string) { + const client = await redis.getSessionClient() + await client.delete(makeSessionID(userId, sessionId)) +} + +export async function getSession(userId: string, sessionId: string) { + if (!userId || !sessionId) { + throw new Error(`Invalid session details - ${userId} - ${sessionId}`) + } + const client = await redis.getSessionClient() + const session = await client.get(makeSessionID(userId, sessionId)) + if (!session) { + throw new Error(`Session not found - ${userId} - ${sessionId}`) + } + return session +} diff --git a/packages/backend-core/src/security/tests/sessions.spec.ts b/packages/backend-core/src/security/tests/sessions.spec.ts new file mode 100644 index 0000000000..7f01bdcdb7 --- /dev/null +++ b/packages/backend-core/src/security/tests/sessions.spec.ts @@ -0,0 +1,12 @@ +import * as sessions from "../sessions" + +describe("sessions", () => { + describe("getSessionsForUser", () => { + it("returns empty when user is undefined", async () => { + // @ts-ignore - allow the undefined to be passed + const results = await sessions.getSessionsForUser(undefined) + + expect(results).toStrictEqual([]) + }) + }) +}) diff --git a/packages/backend-core/src/users.js b/packages/backend-core/src/users.js index 34d546a8bb..de5ce238c1 100644 --- a/packages/backend-core/src/users.js +++ b/packages/backend-core/src/users.js @@ -1,5 +1,5 @@ const { - ViewNames, + ViewName, getUsersByAppParams, getProdAppID, generateAppUserID, @@ -18,7 +18,7 @@ exports.getGlobalUserByEmail = async email => { throw "Must supply an email address to view" } - return await queryGlobalView(ViewNames.USER_BY_EMAIL, { + return await queryGlobalView(ViewName.USER_BY_EMAIL, { key: email.toLowerCase(), include_docs: true, }) @@ -32,7 +32,7 @@ exports.searchGlobalUsersByApp = async (appId, opts) => { include_docs: true, }) params.startkey = opts && opts.startkey ? opts.startkey : params.startkey - let response = await queryGlobalView(ViewNames.USER_BY_APP, params) + let response = await queryGlobalView(ViewName.USER_BY_APP, params) if (!response) { response = [] } @@ -56,7 +56,7 @@ exports.searchGlobalUsersByEmail = async (email, opts) => { const lcEmail = email.toLowerCase() // handle if passing up startkey for pagination const startkey = opts && opts.startkey ? opts.startkey : lcEmail - let response = await queryGlobalView(ViewNames.USER_BY_EMAIL, { + let response = await queryGlobalView(ViewName.USER_BY_EMAIL, { ...opts, startkey, endkey: `${lcEmail}${UNICODE_MAX}`, diff --git a/packages/backend-core/src/utils.js b/packages/backend-core/src/utils.js index cf32539c58..0587267e9a 100644 --- a/packages/backend-core/src/utils.js +++ b/packages/backend-core/src/utils.js @@ -1,20 +1,18 @@ -const { - DocumentTypes, - SEPARATOR, - ViewNames, - getAllApps, -} = require("./db/utils") +const { DocumentType, SEPARATOR, ViewName, getAllApps } = require("./db/utils") const jwt = require("jsonwebtoken") const { options } = require("./middleware/passport/jwt") const { queryGlobalView } = require("./db/views") const { Headers, Cookies, MAX_VALID_DATE } = require("./constants") const env = require("./environment") const userCache = require("./cache/user") -const { getUserSessions, invalidateSessions } = require("./security/sessions") +const { + getSessionsForUser, + invalidateSessions, +} = require("./security/sessions") const events = require("./events") const tenancy = require("./tenancy") -const APP_PREFIX = DocumentTypes.APP + SEPARATOR +const APP_PREFIX = DocumentType.APP + SEPARATOR const PROD_APP_PREFIX = "/app/" function confirmAppId(possibleAppId) { @@ -151,7 +149,7 @@ exports.isClient = ctx => { } const getBuilders = async () => { - const builders = await queryGlobalView(ViewNames.USER_BY_BUILDERS, { + const builders = await queryGlobalView(ViewName.USER_BY_BUILDERS, { include_docs: false, }) @@ -178,7 +176,7 @@ exports.platformLogout = async ({ ctx, userId, keepActiveSession }) => { if (!ctx) throw new Error("Koa context must be supplied to logout.") const currentSession = exports.getCookie(ctx, Cookies.Auth) - let sessions = await getUserSessions(userId) + let sessions = await getSessionsForUser(userId) if (keepActiveSession) { sessions = sessions.filter( @@ -190,10 +188,8 @@ exports.platformLogout = async ({ ctx, userId, keepActiveSession }) => { exports.clearCookie(ctx, Cookies.CurrentApp) } - await invalidateSessions( - userId, - sessions.map(({ sessionId }) => sessionId) - ) + const sessionIds = sessions.map(({ sessionId }) => sessionId) + await invalidateSessions(userId, { sessionIds, reason: "logout" }) await events.auth.logout() await userCache.invalidateUser(userId) } diff --git a/packages/backend-core/tests/utilities/mocks/index.js b/packages/backend-core/tests/utilities/mocks/index.js index 3dd5c854c0..6aa1c4a54f 100644 --- a/packages/backend-core/tests/utilities/mocks/index.js +++ b/packages/backend-core/tests/utilities/mocks/index.js @@ -1,7 +1,9 @@ +const posthog = require("./posthog") const events = require("./events") const date = require("./date") module.exports = { + posthog, date, events, } diff --git a/packages/backend-core/tests/utilities/mocks/posthog.ts b/packages/backend-core/tests/utilities/mocks/posthog.ts new file mode 100644 index 0000000000..e9cc653ccc --- /dev/null +++ b/packages/backend-core/tests/utilities/mocks/posthog.ts @@ -0,0 +1,7 @@ +jest.mock("posthog-node", () => { + return jest.fn().mockImplementation(() => { + return { + capture: jest.fn(), + } + }) +}) diff --git a/packages/backend-core/yarn.lock b/packages/backend-core/yarn.lock index e1f38a798f..9f71691f44 100644 --- a/packages/backend-core/yarn.lock +++ b/packages/backend-core/yarn.lock @@ -291,6 +291,18 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== +"@hapi/hoek@^9.0.0": + version "9.3.0" + resolved "https://registry.yarnpkg.com/@hapi/hoek/-/hoek-9.3.0.tgz#8368869dcb735be2e7f5cb7647de78e167a251fb" + integrity sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ== + +"@hapi/topo@^5.0.0": + version "5.1.0" + resolved "https://registry.yarnpkg.com/@hapi/topo/-/topo-5.1.0.tgz#dc448e332c6c6e37a4dc02fd84ba8d44b9afb012" + integrity sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg== + dependencies: + "@hapi/hoek" "^9.0.0" + "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" @@ -539,6 +551,23 @@ koa "^2.13.4" node-mocks-http "^1.5.8" +"@sideway/address@^4.1.3": + version "4.1.4" + resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.4.tgz#03dccebc6ea47fdc226f7d3d1ad512955d4783f0" + integrity sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw== + dependencies: + "@hapi/hoek" "^9.0.0" + +"@sideway/formula@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@sideway/formula/-/formula-3.0.0.tgz#fe158aee32e6bd5de85044be615bc08478a0a13c" + integrity sha512-vHe7wZ4NOXVfkoRb8T5otiENVlT7a3IAiw7H5M2+GO+9CDgcVUUsX1zalAztCmwyOr2RUTGJdgB+ZvSVqmdHmg== + +"@sideway/pinpoint@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@sideway/pinpoint/-/pinpoint-2.0.0.tgz#cff8ffadc372ad29fd3f78277aeb29e632cc70df" + integrity sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ== + "@sindresorhus/is@^0.14.0": version "0.14.0" resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" @@ -3193,6 +3222,17 @@ jmespath@0.15.0: resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.15.0.tgz#a3f222a9aae9f966f5d27c796510e28091764217" integrity sha512-+kHj8HXArPfpPEKGLZ+kB5ONRTCiGQXo8RQYL0hH8t6pWXUBBK5KkkQmTNOwKK4LEsd0yTsgtjJVm4UBSZea4w== +joi@17.6.0: + version "17.6.0" + resolved "https://registry.yarnpkg.com/joi/-/joi-17.6.0.tgz#0bb54f2f006c09a96e75ce687957bd04290054b2" + integrity sha512-OX5dG6DTbcr/kbMFj0KGYxuew69HPcAE3K/sZpEV2nP6e/j/C0HV+HNiBPCASxdx5T7DMoa0s8UeHWMnb6n2zw== + dependencies: + "@hapi/hoek" "^9.0.0" + "@hapi/topo" "^5.0.0" + "@sideway/address" "^4.1.3" + "@sideway/formula" "^3.0.0" + "@sideway/pinpoint" "^2.0.0" + join-component@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/join-component/-/join-component-1.1.0.tgz#b8417b750661a392bee2c2537c68b2a9d4977cd5" diff --git a/packages/bbui/package.json b/packages/bbui/package.json index 2020b8c0ee..79234a65f8 100644 --- a/packages/bbui/package.json +++ b/packages/bbui/package.json @@ -1,7 +1,7 @@ { "name": "@budibase/bbui", "description": "A UI solution used in the different Budibase projects.", - "version": "1.1.33-alpha.1", + "version": "1.2.41-alpha.0", "license": "MPL-2.0", "svelte": "src/index.js", "module": "dist/bbui.es.js", @@ -38,7 +38,7 @@ ], "dependencies": { "@adobe/spectrum-css-workflow-icons": "^1.2.1", - "@budibase/string-templates": "1.1.33-alpha.1", + "@budibase/string-templates": "1.2.41-alpha.0", "@spectrum-css/actionbutton": "^1.0.1", "@spectrum-css/actiongroup": "^1.0.1", "@spectrum-css/avatar": "^3.0.2", diff --git a/packages/bbui/src/Form/Core/DatePicker.svelte b/packages/bbui/src/Form/Core/DatePicker.svelte index 39a7d9d626..d75350d8e8 100644 --- a/packages/bbui/src/Form/Core/DatePicker.svelte +++ b/packages/bbui/src/Form/Core/DatePicker.svelte @@ -16,6 +16,7 @@ export let appendTo = undefined export let timeOnly = false export let ignoreTimezones = false + export let time24hr = false const dispatch = createEventDispatcher() const flatpickrId = `${uuid()}-wrapper` @@ -37,6 +38,7 @@ enableTime: timeOnly || enableTime || false, noCalendar: timeOnly || false, altInput: true, + time_24hr: time24hr || false, altFormat: timeOnly ? "H:i" : enableTime ? "F j Y, H:i" : "F j, Y", wrap: true, appendTo, @@ -49,6 +51,12 @@ }, } + $: redrawOptions = { + timeOnly, + enableTime, + time24hr, + } + const handleChange = event => { const [dates] = event.detail const noTimezone = enableTime && !timeOnly && ignoreTimezones @@ -142,7 +150,7 @@ } -{#key timeOnly} +{#key redrawOptions} { - if (value?.length) { + if (Array.isArray(value) && value.length > 0) { if (!map) { return "" } @@ -36,7 +36,7 @@ const getSelectedLookupMap = value => { let map = {} - if (value?.length) { + if (Array.isArray(value) && value.length > 0) { value.forEach(option => { if (option) { map[option] = true diff --git a/packages/bbui/src/Form/Core/PickerDropdown.svelte b/packages/bbui/src/Form/Core/PickerDropdown.svelte index fbe43717ba..28cb2b2a4e 100644 --- a/packages/bbui/src/Form/Core/PickerDropdown.svelte +++ b/packages/bbui/src/Form/Core/PickerDropdown.svelte @@ -15,7 +15,6 @@ export let id = null export let placeholder = "Choose an option or type" export let disabled = false - export let readonly = false export let updateOnChange = true export let error = null export let secondaryOptions = [] @@ -35,6 +34,7 @@ export let isOptionSelected = () => false export let isPlaceholder = false export let placeholderOption = null + export let showClearIcon = true const dispatch = createEventDispatcher() let primaryOpen = false @@ -50,17 +50,11 @@ } const updateValue = newValue => { - if (readonly) { - return - } dispatch("change", newValue) } const onClickSecondary = () => { dispatch("click") - if (readonly) { - return - } secondaryOpen = true } @@ -80,24 +74,15 @@ } const onBlur = event => { - if (readonly) { - return - } focus = false updateValue(event.target.value) } const onInput = event => { - if (readonly || !updateOnChange) { - return - } updateValue(event.target.value) } const updateValueOnEnter = event => { - if (readonly) { - return - } if (event.key === "Enter") { updateValue(event.target.value) } @@ -140,11 +125,12 @@ value={primaryLabel || ""} placeholder={placeholder || ""} {disabled} - {readonly} + readonly class="spectrum-Textfield-input spectrum-InputGroup-input" class:labelPadding={iconData} + class:open={primaryOpen} /> - {#if primaryValue} + {#if primaryValue && showClearIcon} + {/if} {#if showConfirmButton} diff --git a/packages/bbui/src/Popover/Popover.svelte b/packages/bbui/src/Popover/Popover.svelte index 1017ef71fc..756e5e6a09 100644 --- a/packages/bbui/src/Popover/Popover.svelte +++ b/packages/bbui/src/Popover/Popover.svelte @@ -63,7 +63,7 @@ diff --git a/packages/builder/src/components/common/bindings/utils.js b/packages/builder/src/components/common/bindings/utils.js index 42a3f11677..c7b40604ad 100644 --- a/packages/builder/src/components/common/bindings/utils.js +++ b/packages/builder/src/components/common/bindings/utils.js @@ -18,10 +18,14 @@ export function addHBSBinding(value, caretPos, binding) { return value } -export function addJSBinding(value, caretPos, binding) { +export function addJSBinding(value, caretPos, binding, { helper } = {}) { binding = typeof binding === "string" ? binding : binding.path value = value == null ? "" : value - binding = `$("${binding}")` + if (!helper) { + binding = `$("${binding}")` + } else { + binding = `helper.${binding}()` + } if (caretPos.start) { value = value.substring(0, caretPos.start) + diff --git a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/ExecuteQuery.svelte b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/ExecuteQuery.svelte index 462ee71cbe..6b9efa76d3 100644 --- a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/ExecuteQuery.svelte +++ b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/ExecuteQuery.svelte @@ -3,6 +3,7 @@ import { datasources, integrations, queries } from "stores/backend" import BindingBuilder from "components/integration/QueryBindingBuilder.svelte" import IntegrationQueryEditor from "components/integration/index.svelte" + import { BUDIBASE_DATASOURCE_ID } from "constants/backend" export let parameters export let bindings = [] @@ -11,6 +12,10 @@ $: datasource = $datasources.list.find( ds => ds._id === parameters.datasourceId ) + // Executequery must exclude budibase datasource + $: executeQueryDatasources = $datasources.list.filter( + x => x._id !== BUDIBASE_DATASOURCE_ID + ) function fetchQueryDefinition(query) { const source = $datasources.list.find( @@ -24,7 +29,7 @@ onFieldChange(filter, e.detail)} - placeholder="Column" - /> - - {#if filter.valueType === "Binding"} - (filter.value = event.detail)} + onFieldChange(filter, e.detail)} + placeholder="Column" /> - {:else if ["string", "longform", "number", "formula"].includes(filter.type)} - - {:else if ["options", "array"].includes(filter.type)} - onOperatorChange(filter, e.detail)} + placeholder={null} /> - {:else if filter.type === "boolean"} - - {:else if filter.type === "datetime"} - (filter.value = event.detail)} + /> + {:else if ["string", "longform", "number", "formula"].includes(filter.type)} + + {:else if filter.type === "array" || (filter.type === "options" && filter.operator === "oneOf")} + + {:else if filter.type === "options"} + + {:else if filter.type === "boolean"} + + {:else if filter.type === "datetime"} + + {:else} + + {/if} + duplicateFilter(filter.id)} /> - {:else} - - {/if} - duplicateFilter(filter.id)} - /> - removeFilter(filter.id)} - /> - {/each} + removeFilter(filter.id)} + /> + {/each} + {/if} -
+
@@ -202,4 +250,14 @@ align-items: center; grid-template-columns: 1fr 120px 120px 1fr auto auto; } + + .filter-label { + margin-bottom: var(--spacing-s); + } + + .bottom { + display: flex; + justify-content: space-between; + align-items: center; + } diff --git a/packages/builder/src/components/design/settings/controls/FilterEditor/FilterEditor.svelte b/packages/builder/src/components/design/settings/controls/FilterEditor/FilterEditor.svelte index 2cb35a9cf5..ea54afc0ee 100644 --- a/packages/builder/src/components/design/settings/controls/FilterEditor/FilterEditor.svelte +++ b/packages/builder/src/components/design/settings/controls/FilterEditor/FilterEditor.svelte @@ -8,21 +8,73 @@ import FilterDrawer from "./FilterDrawer.svelte" import { currentAsset } from "builderStore" + const QUERY_START_REGEX = /\d[0-9]*:/g const dispatch = createEventDispatcher() export let value = [] export let componentInstance export let bindings = [] - let drawer - let tempValue = value || [] + let drawer, + toSaveFilters = null, + allOr, + initialAllOr + $: initialFilters = correctFilters(value || []) $: dataSource = getDatasourceForProvider($currentAsset, componentInstance) $: schema = getSchemaForDatasource($currentAsset, dataSource)?.schema $: schemaFields = Object.values(schema || {}) - const saveFilter = async () => { - dispatch("change", tempValue) + function addNumbering(filters) { + let count = 1 + for (let value of filters) { + if (value.field && value.field?.match(QUERY_START_REGEX) == null) { + value.field = `${count++}:${value.field}` + } + } + return filters + } + + function correctFilters(filters) { + const corrected = [] + for (let filter of filters) { + let field = filter.field + if (filter.operator === "allOr") { + initialAllOr = allOr = true + continue + } + if ( + typeof filter.field === "string" && + filter.field.match(QUERY_START_REGEX) != null + ) { + const parts = field.split(":") + const number = parts[0] + // it's the new format, remove number + if (!isNaN(parseInt(number))) { + parts.shift() + field = parts.join(":") + } + } + corrected.push({ + ...filter, + field, + }) + } + return corrected + } + + async function saveFilter() { + if (!toSaveFilters && allOr !== initialAllOr) { + toSaveFilters = initialFilters + } + const filters = toSaveFilters?.filter(filter => filter.operator !== "allOr") + if (allOr && filters) { + filters.push({ operator: "allOr" }) + } + // only save if anything was updated + if (filters) { + dispatch("change", addNumbering(filters)) + } notifications.success("Filters saved.") drawer.hide() } @@ -33,8 +85,12 @@ { + toSaveFilters = event.detail + }} /> diff --git a/packages/builder/src/components/start/AppRow.svelte b/packages/builder/src/components/start/AppRow.svelte index 49f99c9f77..91920073bb 100644 --- a/packages/builder/src/components/start/AppRow.svelte +++ b/packages/builder/src/components/start/AppRow.svelte @@ -30,7 +30,7 @@ {/if}
- +
diff --git a/packages/builder/src/constants/backend/index.js b/packages/builder/src/constants/backend/index.js index d8d09d592f..28ce35d9f7 100644 --- a/packages/builder/src/constants/backend/index.js +++ b/packages/builder/src/constants/backend/index.js @@ -163,6 +163,8 @@ export const SWITCHABLE_TYPES = [ ...ALLOWABLE_NUMBER_TYPES, ] +export const BUDIBASE_DATASOURCE_ID = "bb_internal" + export const IntegrationTypes = { POSTGRES: "POSTGRES", MONGODB: "MONGODB", diff --git a/packages/builder/src/pages/builder/_layout.svelte b/packages/builder/src/pages/builder/_layout.svelte index cb760cd165..2e8ea2ef0a 100644 --- a/packages/builder/src/pages/builder/_layout.svelte +++ b/packages/builder/src/pages/builder/_layout.svelte @@ -3,6 +3,7 @@ import { admin, auth } from "stores/portal" import { onMount } from "svelte" import { CookieUtils, Constants } from "@budibase/frontend-core" + import { API } from "api" let loaded = false @@ -53,6 +54,9 @@ await auth.setOrganisation(urlTenantId) } } + async function analyticsPing() { + await API.analyticsPing({ source: "builder" }) + } onMount(async () => { try { @@ -73,6 +77,9 @@ // being logged in } loaded = true + + // lastly + await analyticsPing() }) $: { diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/rest/[query]/index.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/rest/[query]/index.svelte index 5ccc173318..b0a93f8eec 100644 --- a/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/rest/[query]/index.svelte +++ b/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/rest/[query]/index.svelte @@ -55,13 +55,16 @@ let saveId, url let response, schema, enabledHeaders let authConfigId - let dynamicVariables, addVariableModal, varBinding + let dynamicVariables, addVariableModal, varBinding, globalDynamicBindings let restBindings = getRestBindings() $: staticVariables = datasource?.config?.staticVariables || {} $: customRequestBindings = toBindingsArray(requestBindings, "Binding") - $: dynamicRequestBindings = toBindingsArray(dynamicVariables, "Dynamic") + $: globalDynamicRequestBindings = toBindingsArray( + globalDynamicBindings, + "Dynamic" + ) $: dataSourceStaticBindings = toBindingsArray( staticVariables, "Datasource.Static" @@ -70,7 +73,7 @@ $: mergedBindings = [ ...restBindings, ...customRequestBindings, - ...dynamicRequestBindings, + ...globalDynamicRequestBindings, ...dataSourceStaticBindings, ] @@ -231,11 +234,11 @@ ] // convert dynamic variables list to simple key/val object - const getDynamicVariables = (datasource, queryId) => { + const getDynamicVariables = (datasource, queryId, matchFn) => { const variablesList = datasource?.config?.dynamicVariables if (variablesList && variablesList.length > 0) { const filtered = queryId - ? variablesList.filter(variable => variable.queryId === queryId) + ? variablesList.filter(variable => matchFn(variable, queryId)) : variablesList return filtered.reduce( (acc, next) => ({ ...acc, [next.name]: next.value }), @@ -367,12 +370,21 @@ if (query && !query.fields.pagination) { query.fields.pagination = {} } - dynamicVariables = getDynamicVariables(datasource, query._id) + dynamicVariables = getDynamicVariables( + datasource, + query._id, + (variable, queryId) => variable.queryId === queryId + ) + globalDynamicBindings = getDynamicVariables( + datasource, + query._id, + (variable, queryId) => variable.queryId !== queryId + ) prettifyQueryRequestBody( query, requestBindings, - dynamicVariables, + globalDynamicBindings, staticVariables, restBindings ) @@ -437,7 +449,7 @@ valuePlaceholder="Default" bindings={[ ...restBindings, - ...dynamicRequestBindings, + ...globalDynamicRequestBindings, ...dataSourceStaticBindings, ]} bindingDrawerLeft="260px" diff --git a/packages/builder/src/pages/builder/invite/index.svelte b/packages/builder/src/pages/builder/invite/index.svelte index 8ac35de07f..ffc2b09e85 100644 --- a/packages/builder/src/pages/builder/invite/index.svelte +++ b/packages/builder/src/pages/builder/invite/index.svelte @@ -1,13 +1,16 @@
- logo + logo - Accept Invitation + Invitation to {company} - Please enter a password to set up your user. + Please enter a password to get started. @@ -46,7 +57,7 @@ } .container { margin: 0 auto; - width: 260px; + width: 300px; display: flex; flex-direction: column; justify-content: flex-start; diff --git a/packages/builder/src/pages/builder/portal/apps/_components/AcessFilter.svelte b/packages/builder/src/pages/builder/portal/apps/_components/AcessFilter.svelte index d0662e7b41..5d2a381187 100644 --- a/packages/builder/src/pages/builder/portal/apps/_components/AcessFilter.svelte +++ b/packages/builder/src/pages/builder/portal/apps/_components/AcessFilter.svelte @@ -22,7 +22,7 @@ if (!detail) return const groupSelected = $groups.find(x => x._id === detail) - const appIds = groupSelected?.apps.map(x => x.appId) || null + const appIds = groupSelected?.apps || null dispatch("change", appIds) } diff --git a/packages/builder/src/pages/builder/portal/apps/index.svelte b/packages/builder/src/pages/builder/portal/apps/index.svelte index 0d05e170e0..13d23f6a51 100644 --- a/packages/builder/src/pages/builder/portal/apps/index.svelte +++ b/packages/builder/src/pages/builder/portal/apps/index.svelte @@ -20,14 +20,13 @@ import { store, automationStore } from "builderStore" import { API } from "api" import { onMount } from "svelte" - import { apps, auth, admin, templates, groups } from "stores/portal" + import { apps, auth, admin, templates } from "stores/portal" import download from "downloadjs" import { goto } from "@roxi/routify" import AppRow from "components/start/AppRow.svelte" import { AppStatus } from "constants" import Logo from "assets/bb-space-man.svg" import AccessFilter from "./_components/AcessFilter.svelte" - import { Constants } from "@budibase/frontend-core" let sortBy = "name" let template @@ -69,10 +68,6 @@ $: unlocked = lockedApps?.length === 0 $: automationErrors = getAutomationErrors(enrichedApps) - $: hasGroupsLicense = $auth.user?.license.features.includes( - Constants.Features.USER_GROUPS - ) - const enrichApps = (apps, user, sortBy) => { const enrichedApps = apps.map(app => ({ ...app, @@ -360,7 +355,7 @@ {/if}
- {#if hasGroupsLicense && $groups.length} + {#if $auth.groupsEnabled} {/if} -
-
- - -
- {#if userId !== $auth.user._id} -
- - +
+
+ + +
+
+ + +
+ + {#if userId !== $auth.user._id} +
+ + @@ -95,11 +92,11 @@ options={Constants.BuilderRoleDescriptions} /> - {#if hasGroupsLicense} + {#if $auth.groupsEnabled} option.name} getOptionValue={option => option._id} @@ -122,14 +119,12 @@ label { font-family: var(--font-sans); - cursor: pointer; font-weight: 600; box-sizing: border-box; overflow: hidden; border-radius: var(--border-radius-s); color: var(--ink); padding: var(--spacing-m) var(--spacing-l); - transition: all 0.2s ease 0s; display: inline-flex; text-rendering: optimizeLegibility; min-width: auto; @@ -141,10 +136,15 @@ align-items: center; justify-content: center; width: 100%; - background-color: var(--grey-2); - font-size: var(--font-size-xs); + background: var(--spectrum-global-color-gray-200); + font-size: 12px; line-height: normal; border: var(--border-transparent); + transition: background-color 130ms ease-out; + } + label:hover { + background: var(--spectrum-global-color-gray-300); + cursor: pointer; } input[type="file"] { diff --git a/packages/builder/src/pages/builder/portal/manage/users/_components/PasswordModal.svelte b/packages/builder/src/pages/builder/portal/manage/users/_components/PasswordModal.svelte index 01dac8c222..02501f2de0 100644 --- a/packages/builder/src/pages/builder/portal/manage/users/_components/PasswordModal.svelte +++ b/packages/builder/src/pages/builder/portal/manage/users/_components/PasswordModal.svelte @@ -49,10 +49,10 @@ cancelText="Cancel" showCloseIcon={false} > - All your new users can be accessed through the autogenerated passwords. - Make not of these passwords or download the csv + + All your new users can be accessed through the autogenerated passwords. Take + note of these passwords or download the CSV file. +
diff --git a/packages/builder/src/pages/builder/portal/manage/users/_components/RoleTableRenderer.svelte b/packages/builder/src/pages/builder/portal/manage/users/_components/RoleTableRenderer.svelte index 4f481d374c..fe7acee6c4 100644 --- a/packages/builder/src/pages/builder/portal/manage/users/_components/RoleTableRenderer.svelte +++ b/packages/builder/src/pages/builder/portal/manage/users/_components/RoleTableRenderer.svelte @@ -3,14 +3,20 @@ import { Constants } from "@budibase/frontend-core" export let row - $: value = - Constants.BbRoles.find(x => x.value === users.getUserRole(row))?.label || - "Not Available" + + const TooltipMap = { + appUser: "Only has access to published apps", + developer: "Access to the app builder", + admin: "Full access", + } + + $: role = Constants.BudibaseRoleOptions.find( + x => x.value === users.getUserRole(row) + ) + $: value = role?.label || "Not available" + $: tooltip = TooltipMap[role?.value] || "" -
+
{value}
- - diff --git a/packages/builder/src/pages/builder/portal/manage/users/index.svelte b/packages/builder/src/pages/builder/portal/manage/users/index.svelte index d18881d1bb..73cf5e26fa 100644 --- a/packages/builder/src/pages/builder/portal/manage/users/index.svelte +++ b/packages/builder/src/pages/builder/portal/manage/users/index.svelte @@ -8,11 +8,10 @@ Layout, Modal, ModalContent, - Icon, + Search, notifications, Pagination, - Search, - Label, + Divider, } from "@budibase/bbui" import AddUserModal from "./_components/AddUserModal.svelte" import { users, groups, auth } from "stores/portal" @@ -20,68 +19,42 @@ import DeleteRowsButton from "components/backend/DataTable/buttons/DeleteRowsButton.svelte" import GroupsTableRenderer from "./_components/GroupsTableRenderer.svelte" import AppsTableRenderer from "./_components/AppsTableRenderer.svelte" - import NameTableRenderer from "./_components/NameTableRenderer.svelte" import RoleTableRenderer from "./_components/RoleTableRenderer.svelte" import { goto } from "@roxi/routify" import OnboardingTypeModal from "./_components/OnboardingTypeModal.svelte" import PasswordModal from "./_components/PasswordModal.svelte" import ImportUsersModal from "./_components/ImportUsersModal.svelte" import { createPaginationStore } from "helpers/pagination" + import { get } from "svelte/store" import { Constants } from "@budibase/frontend-core" - const accessTypes = [ - { - icon: "User", - description: "App user - Only has access to published apps", - }, - { - icon: "Hammer", - description: "Developer - Access to the app builder", - }, - { - icon: "Draw", - description: "Admin - Full access", - }, - ] - - //let email let enrichedUsers = [] let createUserModal, inviteConfirmationModal, onboardingTypeModal, passwordModal, importUsersModal - let pageInfo = createPaginationStore() let prevEmail = undefined, searchEmail = undefined - let selectedRows = [] let customRenderers = [ { column: "userGroups", component: GroupsTableRenderer }, { column: "apps", component: AppsTableRenderer }, - { column: "name", component: NameTableRenderer }, { column: "role", component: RoleTableRenderer }, ] - $: hasGroupsLicense = $auth.user?.license.features.includes( - Constants.Features.USER_GROUPS - ) - $: schema = { - name: {}, email: {}, role: { sortable: false, }, - ...(hasGroupsLicense && { - userGroups: { sortable: false, displayName: "User groups" }, + ...($auth.groupsEnabled && { + userGroups: { sortable: false, displayName: "Groups" }, }), apps: {}, } - $: userData = [] - $: page = $pageInfo.page $: fetchUsers(page, searchEmail) $: { @@ -104,6 +77,7 @@ } }) } + const showOnboardingTypeModal = async addUsersData => { userData = await removingDuplicities(addUsersData) if (!userData?.users?.length) return @@ -112,13 +86,13 @@ } async function createUserFlow() { - let emails = userData?.users?.map(x => x.email) || [] + const payload = userData?.users?.map(user => ({ + email: user.email, + builder: user.role === Constants.BudibaseRoles.Developer, + admin: user.role === Constants.BudibaseRoles.Admin, + })) try { - const res = await users.invite({ - emails: emails, - builder: false, - admin: false, - }) + const res = await users.invite(payload) notifications.success(res.message) inviteConfirmationModal.show() } catch (error) { @@ -198,6 +172,10 @@ const deleteRows = async () => { try { let ids = selectedRows.map(user => user._id) + if (ids.includes(get(auth).user._id)) { + notifications.error("You cannot delete yourself") + return + } await users.bulkDelete(ids) notifications.success(`Successfully deleted ${selectedRows.length} rows`) selectedRows = [] @@ -227,23 +205,13 @@ } - + Users Add users and control who gets access to your published apps - -
- {#each accessTypes as type} -
- -
- {type.description} -
-
- {/each} -
- + +
- - -
- - -
- {#if selectedRows.length > 0} - - {/if} + Import users +
- $goto(`./${detail._id}`)} - {schema} - bind:selectedRows - data={enrichedUsers} - allowEditColumns={false} - allowEditRows={false} - allowSelectRows={true} - showHeaderBorder={false} - {customRenderers} - /> - +
$goto(`./${detail._id}`)} + {schema} + bind:selectedRows + data={enrichedUsers} + allowEditColumns={false} + allowEditRows={false} + allowSelectRows={true} + showHeaderBorder={false} + {customRenderers} + /> + @@ -320,28 +296,22 @@ display: flex; flex-direction: row; justify-content: flex-end; - margin-top: var(--spacing-xl); } - .field { + .controls { display: flex; - align-items: center; flex-direction: row; - grid-gap: var(--spacing-m); - margin-left: auto; + justify-content: space-between; + align-items: center; } - - .field > :global(*) + :global(*) { - margin-left: var(--spacing-m); - } - - .access-description { + .controls-right { display: flex; - margin-top: var(--spacing-xl); - opacity: 0.8; + flex-direction: row; + justify-content: flex-end; + align-items: center; + gap: var(--spacing-xl); } - - .access-text { - margin-left: var(--spacing-m); + .controls-right :global(.spectrum-Search) { + width: 200px; } diff --git a/packages/builder/src/pages/builder/portal/overview/_components/AccessTab.svelte b/packages/builder/src/pages/builder/portal/overview/_components/AccessTab.svelte index 565dfc7aa2..5e327a8743 100644 --- a/packages/builder/src/pages/builder/portal/overview/_components/AccessTab.svelte +++ b/packages/builder/src/pages/builder/portal/overview/_components/AccessTab.svelte @@ -17,10 +17,10 @@ import { users, groups, apps, auth } from "stores/portal" import AssignmentModal from "./AssignmentModal.svelte" import { createPaginationStore } from "helpers/pagination" - import { Constants } from "@budibase/frontend-core" import { roles } from "stores/backend" export let app + let assignmentModal let appGroups = [] let appUsers = [] @@ -28,14 +28,9 @@ search = undefined let pageInfo = createPaginationStore() let fixedAppId + $: page = $pageInfo.page - - $: hasGroupsLicense = $auth.user?.license.features.includes( - Constants.Features.USER_GROUPS - ) - $: fixedAppId = apps.getProdAppID(app.devId) - $: appGroups = $groups.filter(x => { return x.apps.includes(app.appId) }) @@ -161,7 +156,7 @@ > - {#if hasGroupsLicense && appGroups.length} + {#if $auth.groupsEnabled && appGroups.length} {#each appGroups as group} { + return !group.apps.find(appId => { + return appId === app.appId + }) + }) + $: valid = + appData?.length && !appData?.some(x => !x.id?.length || !x.role?.length) + $: optionSections = { + ...($auth.groupsEnabled && + filteredGroups.length && { + ["User groups"]: { + data: filteredGroups, + getLabel: group => group.name, + getValue: group => group._id, + getIcon: group => group.icon, + getColour: group => group.color, + }, + }), + users: { + data: availableUsers, + getLabel: user => user.email, + getValue: user => user._id, + getIcon: user => user.icon, + getColour: user => user.color, + }, + } + + const getAvailableUsers = (allUsers, appUsers, newUsers) => { + return (allUsers.data || []).filter(user => { + // Filter out assigned users + if (appUsers.find(x => x._id === user._id)) { + return false + } + + // Filter out new users which are going to be assigned + return !newUsers.find(x => x.id === user._id) + }) + } + async function fetchUsers(page, search) { if ($pageInfo.loading) { return @@ -39,36 +82,13 @@ } } - $: filteredGroups = $groups.filter(group => { - return !group.apps.find(appId => { - return appId === app.appId - }) - }) - - $: optionSections = { - ...(filteredGroups.length && { - groups: { - data: filteredGroups, - getLabel: group => group.name, - getValue: group => group._id, - getIcon: group => group.icon, - getColour: group => group.color, - }, - }), - users: { - data: $users.data.filter(u => !appUsers.find(x => x._id === u._id)), - getLabel: user => user.email, - getValue: user => user._id, - getIcon: user => user.icon, - getColour: user => user.color, - }, - } - - $: appData = [{ id: "", role: "" }] - function addNewInput() { appData = [...appData, { id: "", role: "" }] } + + const removeItem = index => { + appData = appData.filter((x, idx) => idx !== index) + } addData(appData)} showCloseIcon={false} + disabled={!valid} > - - {#each appData as input, index} - group.name} - getPrimaryOptionValue={group => group.name} - getPrimaryOptionIcon={group => group.icon} - getPrimaryOptionColour={group => group.colour} - getSecondaryOptionLabel={role => role.name} - getSecondaryOptionValue={role => role._id} - getSecondaryOptionColour={role => RoleUtils.getRoleColour(role._id)} - /> - {/each} - + {#if appData?.length} + + {#each appData as input, index} +
+
+ group.name} + getPrimaryOptionValue={group => group.name} + getPrimaryOptionIcon={group => group.icon} + getPrimaryOptionColour={group => group.colour} + getSecondaryOptionLabel={role => role.name} + getSecondaryOptionValue={role => role._id} + getSecondaryOptionColour={role => + RoleUtils.getRoleColour(role._id)} + /> +
+
+ removeItem(index)} + /> +
+
+ {/each} +
+ {/if}
Add email
+ + diff --git a/packages/builder/src/stores/portal/auth.js b/packages/builder/src/stores/portal/auth.js index d6f4fc140f..8ac19ab785 100644 --- a/packages/builder/src/stores/portal/auth.js +++ b/packages/builder/src/stores/portal/auth.js @@ -2,6 +2,8 @@ import { derived, writable, get } from "svelte/store" import { API } from "api" import { admin } from "stores/portal" import analytics from "analytics" +import { FEATURE_FLAGS } from "helpers/featureFlags" +import { Constants } from "@budibase/frontend-core" export function createAuthStore() { const auth = writable({ @@ -10,11 +12,13 @@ export function createAuthStore() { tenantSet: false, loaded: false, postLogout: false, + groupsEnabled: false, }) const store = derived(auth, $store => { let initials = null let isAdmin = false let isBuilder = false + let groupsEnabled = false if ($store.user) { const user = $store.user if (user.firstName) { @@ -29,6 +33,9 @@ export function createAuthStore() { } isAdmin = !!user.admin?.global isBuilder = !!user.builder?.global + groupsEnabled = + user?.license.features.includes(Constants.Features.USER_GROUPS) && + user?.featureFlags.includes(FEATURE_FLAGS.USER_GROUPS) } return { user: $store.user, @@ -39,6 +46,7 @@ export function createAuthStore() { initials, isAdmin, isBuilder, + groupsEnabled, } }) diff --git a/packages/builder/src/stores/portal/users.js b/packages/builder/src/stores/portal/users.js index 490d1bc9f6..7fc3704e98 100644 --- a/packages/builder/src/stores/portal/users.js +++ b/packages/builder/src/stores/portal/users.js @@ -26,12 +26,8 @@ export function createUsersStore() { return await API.getUsers() } - async function invite({ emails, builder, admin }) { - return API.inviteUsers({ - emails, - builder, - admin, - }) + async function invite(payload) { + return API.inviteUsers(payload) } async function acceptInvite(inviteCode, password) { return API.acceptInvite({ diff --git a/packages/cli/.gitignore b/packages/cli/.gitignore index efef4f97c8..655ef7b624 100644 --- a/packages/cli/.gitignore +++ b/packages/cli/.gitignore @@ -5,3 +5,4 @@ build/ docker-error.log envoy.yaml *.tar.gz +prebuilds/ diff --git a/packages/cli/package.json b/packages/cli/package.json index 0a38a26a52..230d28b8d5 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/cli", - "version": "1.1.33-alpha.1", + "version": "1.2.41-alpha.0", "description": "Budibase CLI, for developers, self hosting and migrations.", "main": "src/index.js", "bin": { @@ -26,7 +26,7 @@ "outputPath": "build" }, "dependencies": { - "@budibase/backend-core": "1.1.32-alpha.6", + "@budibase/backend-core": "1.2.41-alpha.0", "axios": "0.21.2", "chalk": "4.1.0", "cli-progress": "3.11.2", diff --git a/packages/cli/yarn.lock b/packages/cli/yarn.lock index 86d4d40e78..19e76101ad 100644 --- a/packages/cli/yarn.lock +++ b/packages/cli/yarn.lock @@ -48,6 +48,44 @@ "@babel/helper-validator-identifier" "^7.16.7" to-fast-properties "^2.0.0" +"@budibase/backend-core@1.1.32-alpha.6": + version "1.1.32-alpha.6" + resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.1.32-alpha.6.tgz#e9dc1a1989a2a6952f5ce002fcdfef66625f3de8" + integrity sha512-8oT6veeSmymuJfnu1jAkDAWD4fLj5W0KxNq6GlC+eMWWDZloDF4fMWDpuYTFBeinq1z1GeSFXc9Ak6u+1Z7LtQ== + dependencies: + "@budibase/types" "1.1.32-alpha.6" + "@techpass/passport-openidconnect" "0.3.2" + aws-sdk "2.1030.0" + bcrypt "5.0.1" + dotenv "16.0.1" + emitter-listener "1.1.2" + ioredis "4.28.0" + jsonwebtoken "8.5.1" + koa-passport "4.1.4" + lodash "4.17.21" + lodash.isarguments "3.1.0" + node-fetch "2.6.7" + passport-google-auth "1.0.2" + passport-google-oauth "2.0.0" + passport-jwt "4.0.0" + passport-local "1.0.0" + passport-oauth2-refresh "^2.1.0" + posthog-node "1.3.0" + pouchdb "7.3.0" + pouchdb-find "7.2.2" + pouchdb-replication-stream "1.2.9" + redlock "4.2.0" + sanitize-s3-objectkey "0.0.1" + semver "7.3.7" + tar-fs "2.1.1" + uuid "8.3.2" + zlib "1.0.5" + +"@budibase/types@1.1.32-alpha.6": + version "1.1.32-alpha.6" + resolved "https://registry.yarnpkg.com/@budibase/types/-/types-1.1.32-alpha.6.tgz#95d8d73c7ed6ebc22ff26a44365127a478e19409" + integrity sha512-AKKxrzVqGtcSzZZ2fP6i2Vgv6ICN9NEEE1dmzRk9AImZS+XKQ9VgVpdE+4gHgFK7L0gBYAsiaoEpCbbrI/+NoQ== + "@eslint/eslintrc@^0.4.3": version "0.4.3" resolved "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.3.tgz" @@ -77,6 +115,21 @@ resolved "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz" integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== +"@mapbox/node-pre-gyp@^1.0.0": + version "1.0.9" + resolved "https://registry.yarnpkg.com/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.9.tgz#09a8781a3a036151cdebbe8719d6f8b25d4058bc" + integrity sha512-aDF3S3rK9Q2gey/WAttUlISduDItz5BU3306M9Eyv6/oS40aMprnopshtlKTykxRNIBEZuRMaZAnbrQ4QtKGyw== + dependencies: + detect-libc "^2.0.0" + https-proxy-agent "^5.0.0" + make-dir "^3.1.0" + node-fetch "^2.6.7" + nopt "^5.0.0" + npmlog "^5.0.1" + rimraf "^3.0.2" + semver "^7.3.5" + tar "^6.1.11" + "@nodelib/fs.scandir@2.1.5": version "2.1.5" resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz" @@ -98,6 +151,22 @@ "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" +"@techpass/passport-openidconnect@0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@techpass/passport-openidconnect/-/passport-openidconnect-0.3.2.tgz#f8fd5d97256286665dbf26dac92431f977ab1e63" + integrity sha512-fnCtEiexXSHA029B//hJcCJlLJrT3lhpNCyA0rnz58Qttz0BLGCVv6yMT8HmOnGThH6vcDOVwdgKM3kbCQtEhw== + dependencies: + base64url "^3.0.1" + oauth "^0.9.15" + passport-strategy "^1.0.0" + request "^2.88.0" + webfinger "^0.4.2" + +abbrev@1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" + integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== + abort-controller@3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz" @@ -144,7 +213,7 @@ agent-base@6: dependencies: debug "4" -ajv@^6.10.0, ajv@^6.12.4: +ajv@^6.10.0, ajv@^6.12.3, ajv@^6.12.4: version "6.12.6" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== @@ -205,6 +274,19 @@ aproba@^1.0.3: resolved "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz" integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== +"aproba@^1.0.3 || ^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/aproba/-/aproba-2.0.0.tgz#52520b8ae5b569215b354efc0caa3fe1e45a8adc" + integrity sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ== + +are-we-there-yet@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz#372e0e7bd279d8e94c653aaa1f67200884bf3e1c" + integrity sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw== + dependencies: + delegates "^1.0.0" + readable-stream "^3.6.0" + are-we-there-yet@~1.1.2: version "1.1.7" resolved "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.7.tgz" @@ -250,16 +332,65 @@ array-uniq@1.0.2: resolved "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.2.tgz" integrity sha1-X8w3OSB3VyPP1k1lxkvvU7+eum0= +asn1@~0.2.3: + version "0.2.6" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.6.tgz#0d3a7bb6e64e02a90c0303b31f292868ea09a08d" + integrity sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ== + dependencies: + safer-buffer "~2.1.0" + +assert-plus@1.0.0, assert-plus@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" + integrity sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw== + astral-regex@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz" integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== +async@~2.1.4: + version "2.1.5" + resolved "https://registry.yarnpkg.com/async/-/async-2.1.5.tgz#e587c68580994ac67fc56ff86d3ac56bdbe810bc" + integrity sha512-+g/Ncjbx0JSq2Mk03WQkyKvNh5q9Qvyo/RIqIqnmC5feJY70PNl2ESwZU2BhAB+AZPkHNzzyC2Dq2AS5VnTKhQ== + dependencies: + lodash "^4.14.0" + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== + at-least-node@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz" integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== +aws-sdk@2.1030.0: + version "2.1030.0" + resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1030.0.tgz#24a856af3d2b8b37c14a8f59974993661c66fd82" + integrity sha512-to0STOb8DsSGuSsUb/WCbg/UFnMGfIYavnJH5ZlRCHzvCFjTyR+vfE8ku+qIZvfFM4+5MNTQC/Oxfun2X/TuyA== + dependencies: + buffer "4.9.2" + events "1.1.1" + ieee754 "1.1.13" + jmespath "0.15.0" + querystring "0.2.0" + sax "1.2.1" + url "0.10.3" + uuid "3.3.2" + xml2js "0.4.19" + +aws-sign2@~0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" + integrity sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA== + +aws4@^1.8.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59" + integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA== + axios-retry@^3.1.9: version "3.2.4" resolved "https://registry.npmjs.org/axios-retry/-/axios-retry-3.2.4.tgz" @@ -275,6 +406,13 @@ axios@0.21.2: dependencies: follow-redirects "^1.14.0" +axios@0.24.0: + version "0.24.0" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.24.0.tgz#804e6fa1e4b9c5288501dd9dff56a7a0940d20d6" + integrity sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA== + dependencies: + follow-redirects "^1.14.4" + axios@^0.21.1: version "0.21.4" resolved "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz" @@ -287,11 +425,31 @@ balanced-match@^1.0.0: resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== -base64-js@^1.3.1: +base64-js@^1.0.2, base64-js@^1.3.1: version "1.5.1" resolved "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz" integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== +base64url@3.x.x, base64url@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/base64url/-/base64url-3.0.1.tgz#6399d572e2bc3f90a9a8b22d5dbb0a32d33f788d" + integrity sha512-ir1UPr3dkwexU7FdV8qBBbNDRUhMmIekYMFZfi+C/sLNnRESKPl23nB9b2pltqfOQNnGzsDdId90AEtG5tCx4A== + +bcrypt-pbkdf@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" + integrity sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w== + dependencies: + tweetnacl "^0.14.3" + +bcrypt@5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/bcrypt/-/bcrypt-5.0.1.tgz#f1a2c20f208e2ccdceea4433df0c8b2c54ecdf71" + integrity sha512-9BTgmrhZM2t1bNuDtrtIMVSmmxZBrJ71n8Wg+YgdjHuIWYF7SjjmCPZFB+/5i/o/PIeRpwVJR3P+NrpIItUjqw== + dependencies: + "@mapbox/node-pre-gyp" "^1.0.0" + node-addon-api "^3.1.0" + bl@^4.0.3: version "4.1.0" resolved "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz" @@ -301,6 +459,11 @@ bl@^4.0.3: inherits "^2.0.4" readable-stream "^3.4.0" +bluebird@^3.7.2: + version "3.7.2" + resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" + integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== + brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" @@ -316,11 +479,30 @@ braces@^3.0.1: dependencies: fill-range "^7.0.1" +buffer-equal-constant-time@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819" + integrity sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA== + +buffer-from@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" + integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== + buffer-from@1.1.2: version "1.1.2" resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== +buffer@4.9.2: + version "4.9.2" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" + integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== + dependencies: + base64-js "^1.0.2" + ieee754 "^1.1.4" + isarray "^1.0.0" + buffer@^5.5.0, buffer@^5.6.0: version "5.7.1" resolved "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz" @@ -334,6 +516,11 @@ callsites@^3.0.0: resolved "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz" integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== +caseless@~0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" + integrity sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw== + chalk@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.0.tgz#4e14870a618d9e2edd97dd8345fd9d9dc315646a" @@ -412,6 +599,11 @@ clone-buffer@1.0.0: resolved "https://registry.npmjs.org/clone-buffer/-/clone-buffer-1.0.0.tgz" integrity sha512-KLLTJWrvwIP+OPfMn0x2PheDEP20RPUcGXj/ERegTgdmPEZylALQldygiqrPPu8P45uNuPs7ckmReLY6v/iA5g== +cluster-key-slot@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz#30474b2a981fb12172695833052bc0d01336d10d" + integrity sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw== + code-point-at@^1.0.0: version "1.1.0" resolved "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz" @@ -441,6 +633,18 @@ color-name@~1.1.4: resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== +color-support@^1.1.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2" + integrity sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg== + +combined-stream@^1.0.6, combined-stream@~1.0.6: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + command-line-args@^5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/command-line-args/-/command-line-args-5.2.1.tgz#c44c32e437a57d7c51157696893c5909e9cec42e" @@ -476,7 +680,7 @@ concat-map@0.0.1: resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= -console-control-strings@^1.0.0, console-control-strings@~1.1.0: +console-control-strings@^1.0.0, console-control-strings@^1.1.0, console-control-strings@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" integrity sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ== @@ -494,6 +698,11 @@ copyfiles@^2.4.1: untildify "^4.0.0" yargs "^16.1.0" +core-util-is@1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + integrity sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ== + core-util-is@~1.0.0: version "1.0.3" resolved "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz" @@ -518,6 +727,13 @@ current-module-paths@^1.1.0: resolved "https://registry.yarnpkg.com/current-module-paths/-/current-module-paths-1.1.0.tgz#5d5bf214281d80aea264e642f028e672098238f6" integrity sha512-HGhLUszcgprjKmzvQoCQda8iEWsQn3sWVzPdttyJVR5cjfVDYcoyozQA5D1YXgab9v84SPMpSuD+YrPX6i1IMQ== +dashdash@^1.12.0: + version "1.14.1" + resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" + integrity sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g== + dependencies: + assert-plus "^1.0.0" + debug@4, debug@^4.0.1, debug@^4.1.1: version "4.3.2" resolved "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz" @@ -525,6 +741,13 @@ debug@4, debug@^4.0.1, debug@^4.1.1: dependencies: ms "2.1.2" +debug@^4.3.1: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + decompress-response@^4.2.0: version "4.2.1" resolved "https://registry.npmjs.org/decompress-response/-/decompress-response-4.2.1.tgz" @@ -550,16 +773,31 @@ deferred-leveldown@~5.3.0: abstract-leveldown "~6.2.1" inherits "^2.0.3" +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== + delegates@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz" integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= +denque@^1.1.0: + version "1.5.1" + resolved "https://registry.yarnpkg.com/denque/-/denque-1.5.1.tgz#07f670e29c9a78f8faecb2566a1e2c11929c5cbf" + integrity sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw== + detect-libc@^1.0.3: version "1.0.3" resolved "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz" integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= +detect-libc@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.1.tgz#e1897aa88fa6ad197862937fbc0441ef352ee0cd" + integrity sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w== + dir-glob@^3.0.1: version "3.0.1" resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz" @@ -589,6 +827,28 @@ double-ended-queue@2.1.0-0: resolved "https://registry.npmjs.org/double-ended-queue/-/double-ended-queue-2.1.0-0.tgz" integrity sha512-+BNfZ+deCo8hMNpDqDnvT+c0XpJ5cUa6mqYq89bho2Ifze4URTqRkcwR399hWoTrTkbZ/XJYDgP6rc7pRgffEQ== +ecc-jsbn@~0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" + integrity sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw== + dependencies: + jsbn "~0.1.0" + safer-buffer "^2.1.0" + +ecdsa-sig-formatter@1.0.11: + version "1.0.11" + resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf" + integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ== + dependencies: + safe-buffer "^5.0.1" + +emitter-listener@1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/emitter-listener/-/emitter-listener-1.1.2.tgz#56b140e8f6992375b3d7cb2cab1cc7432d9632e8" + integrity sha512-Bt1sBAGFHY9DKY+4/2cV6izcKJUf5T7/gkdmkxzX/qv9CcGH8xSwVRW5mtX03SWJtRTWSOpzCuWN9rBFYZepZQ== + dependencies: + shimmer "^1.2.0" + emoji-regex@^8.0.0: version "8.0.0" resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz" @@ -778,11 +1038,21 @@ event-target-shim@^5.0.0: resolved "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz" integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== +events@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/events/-/events-1.1.1.tgz#9ebdb7635ad099c70dcc4c2a1f5004288e8bd924" + integrity sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw== + expand-template@^2.0.3: version "2.0.3" resolved "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz" integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg== +extend@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== + external-editor@^3.0.3: version "3.1.0" resolved "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz" @@ -792,6 +1062,16 @@ external-editor@^3.0.3: iconv-lite "^0.4.24" tmp "^0.0.33" +extsprintf@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" + integrity sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g== + +extsprintf@^1.2.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.1.tgz#8d172c064867f235c0c84a596806d279bf4bcc07" + integrity sha512-Wrk35e8ydCKDj/ArClo1VrPVmN8zph5V4AtHwIuHhvMXsKf73UT3BOD+azBIW+3wOJ4FhEH7zyaJCFvChjYvMA== + fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz" @@ -830,6 +1110,13 @@ fastq@^1.6.0: dependencies: reusify "^1.0.4" +fetch-cookie@0.10.1: + version "0.10.1" + resolved "https://registry.yarnpkg.com/fetch-cookie/-/fetch-cookie-0.10.1.tgz#5ea88f3d36950543c87997c27ae2aeafb4b5c4d4" + integrity sha512-beB+VEd4cNeVG1PY+ee74+PkuCQnik78pgLi5Ah/7qdUfov8IctU0vLUbBT8/10Ma5GMBeI4wtxhGrEfKNYs2g== + dependencies: + tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0" + fetch-cookie@0.11.0: version "0.11.0" resolved "https://registry.npmjs.org/fetch-cookie/-/fetch-cookie-0.11.0.tgz" @@ -886,11 +1173,25 @@ flatted@^3.1.0: resolved "https://registry.npmjs.org/flatted/-/flatted-3.2.4.tgz" integrity sha512-8/sOawo8tJ4QOBX8YlQBMxL8+RLZfxMQOif9o0KUKTNTjMYElWPE0r/m5VNFxTRd0NSw8qSy8dajrwX4RYI1Hw== -follow-redirects@^1.14.0: +follow-redirects@^1.14.0, follow-redirects@^1.14.4: version "1.15.1" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.1.tgz#0ca6a452306c9b276e4d3127483e29575e207ad5" integrity sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA== +forever-agent@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" + integrity sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw== + +form-data@~2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" + integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.6" + mime-types "^2.1.12" + from2@^2.3.0: version "2.3.0" resolved "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz" @@ -936,6 +1237,21 @@ functional-red-black-tree@^1.0.1: resolved "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz" integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= +gauge@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/gauge/-/gauge-3.0.2.tgz#03bf4441c044383908bcfa0656ad91803259b395" + integrity sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q== + dependencies: + aproba "^1.0.3 || ^2.0.0" + color-support "^1.1.2" + console-control-strings "^1.0.0" + has-unicode "^2.0.1" + object-assign "^4.1.1" + signal-exit "^3.0.0" + string-width "^4.2.3" + strip-ansi "^6.0.1" + wide-align "^1.1.2" + gauge@~2.7.3: version "2.7.4" resolved "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz" @@ -955,6 +1271,13 @@ get-caller-file@^2.0.5: resolved "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== +getpass@^0.1.1: + version "0.1.7" + resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" + integrity sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng== + dependencies: + assert-plus "^1.0.0" + github-from-package@0.0.0: version "0.0.0" resolved "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz" @@ -1017,11 +1340,60 @@ globby@^11.1.0: merge2 "^1.4.1" slash "^3.0.0" +google-auth-library@~0.10.0: + version "0.10.0" + resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-0.10.0.tgz#6e15babee85fd1dd14d8d128a295b6838d52136e" + integrity sha512-KM54Y9GhdAzfXUHmWEoYmaOykSLuMG7W4HvVLYqyogxOyE6px8oSS8W13ngqW0oDGZ915GFW3V6OM6+qcdvPOA== + dependencies: + gtoken "^1.2.1" + jws "^3.1.4" + lodash.noop "^3.0.1" + request "^2.74.0" + +google-p12-pem@^0.1.0: + version "0.1.2" + resolved "https://registry.yarnpkg.com/google-p12-pem/-/google-p12-pem-0.1.2.tgz#33c46ab021aa734fa0332b3960a9a3ffcb2f3177" + integrity sha512-puhMlJ2+E/rgvxWaqgN/nC7x623OAE8MR9vBUqxF0inCE7HoVfCHvTeQ9+BR+rj9KM0fIg6XV6tmbt7XHHssoQ== + dependencies: + node-forge "^0.7.1" + +googleapis@^16.0.0: + version "16.1.0" + resolved "https://registry.yarnpkg.com/googleapis/-/googleapis-16.1.0.tgz#0f19f2d70572d918881a0f626e3b1a2fa8629576" + integrity sha512-5czmF7xkIlJKc1+/+5tltrI1skoR3HKtkDOld9rk+DOucTpZRjOhCoJzoSjxB3M8rP2tEb1VIr1TPyzR3V2PUQ== + dependencies: + async "~2.1.4" + google-auth-library "~0.10.0" + string-template "~1.0.0" + graceful-fs@^4.1.6, graceful-fs@^4.2.0: version "4.2.8" resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz" integrity sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg== +gtoken@^1.2.1: + version "1.2.3" + resolved "https://registry.yarnpkg.com/gtoken/-/gtoken-1.2.3.tgz#5509571b8afd4322e124cf66cf68115284c476d8" + integrity sha512-wQAJflfoqSgMWrSBk9Fg86q+sd6s7y6uJhIvvIPz++RElGlMtEqsdAR2oWwZ/WTEtp7P9xFbJRrT976oRgzJ/w== + dependencies: + google-p12-pem "^0.1.0" + jws "^3.0.0" + mime "^1.4.1" + request "^2.72.0" + +har-schema@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" + integrity sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q== + +har-validator@~5.1.3: + version "5.1.5" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd" + integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w== + dependencies: + ajv "^6.12.3" + har-schema "^2.0.0" + has-flag@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" @@ -1032,7 +1404,7 @@ has-flag@^4.0.0: resolved "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz" integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== -has-unicode@^2.0.0: +has-unicode@^2.0.0, has-unicode@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" integrity sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ== @@ -1044,6 +1416,15 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" +http-signature@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" + integrity sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ== + dependencies: + assert-plus "^1.0.0" + jsprim "^1.2.2" + sshpk "^1.7.0" + https-proxy-agent@^5.0.0: version "5.0.0" resolved "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz" @@ -1059,7 +1440,12 @@ iconv-lite@^0.4.24: dependencies: safer-buffer ">= 2.1.2 < 3" -ieee754@^1.1.13: +ieee754@1.1.13: + version "1.1.13" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" + integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== + +ieee754@^1.1.13, ieee754@^1.1.4: version "1.2.1" resolved "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== @@ -1147,6 +1533,23 @@ into-stream@^6.0.0: from2 "^2.3.0" p-is-promise "^3.0.0" +ioredis@4.28.0: + version "4.28.0" + resolved "https://registry.yarnpkg.com/ioredis/-/ioredis-4.28.0.tgz#5a2be3f37ff2075e2332f280eaeb02ab4d9ff0d3" + integrity sha512-I+zkeeWp3XFgPT2CtJKxvaF5FjGBGt4yGYljRjQecdQKteThuAsKqffeF1lgHVlYnuNeozRbPOCDNZ7tDWPeig== + dependencies: + cluster-key-slot "^1.1.0" + debug "^4.3.1" + denque "^1.1.0" + lodash.defaults "^4.2.0" + lodash.flatten "^4.4.0" + lodash.isarguments "^3.1.0" + p-map "^2.1.0" + redis-commands "1.7.0" + redis-errors "^1.2.0" + redis-parser "^3.0.0" + standard-as-callback "^2.1.0" + is-buffer@~1.1.6: version "1.1.6" resolved "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz" @@ -1193,12 +1596,17 @@ is-retry-allowed@^2.2.0: resolved "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-2.2.0.tgz" integrity sha512-XVm7LOeLpTW4jV19QSH38vkswxoLud8sQ57YwJVTPWdiaI9I8keEhGFpBlslyVsgdQy4Opg8QOLb8YRgsyZiQg== +is-typedarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== + isarray@0.0.1: version "0.0.1" resolved "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== -isarray@~1.0.0: +isarray@^1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= @@ -1208,6 +1616,16 @@ isexe@^2.0.0: resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= +isstream@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" + integrity sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g== + +jmespath@0.15.0: + version "0.15.0" + resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.15.0.tgz#a3f222a9aae9f966f5d27c796510e28091764217" + integrity sha512-+kHj8HXArPfpPEKGLZ+kB5ONRTCiGQXo8RQYL0hH8t6pWXUBBK5KkkQmTNOwKK4LEsd0yTsgtjJVm4UBSZea4w== + join-component@^1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/join-component/-/join-component-1.1.0.tgz" @@ -1226,6 +1644,11 @@ js-yaml@^3.13.1: argparse "^1.0.7" esprima "^4.0.0" +jsbn@~0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" + integrity sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg== + json-schema-traverse@^0.4.1: version "0.4.1" resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" @@ -1236,12 +1659,17 @@ json-schema-traverse@^1.0.0: resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz" integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== +json-schema@0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" + integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== + json-stable-stringify-without-jsonify@^1.0.1: version "1.0.1" resolved "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz" integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE= -json-stringify-safe@^5.0.1: +json-stringify-safe@^5.0.1, json-stringify-safe@~5.0.1: version "5.0.1" resolved "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" integrity sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA== @@ -1255,6 +1683,56 @@ jsonfile@^6.0.1: optionalDependencies: graceful-fs "^4.1.6" +jsonwebtoken@8.5.1, jsonwebtoken@^8.2.0: + version "8.5.1" + resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d" + integrity sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w== + dependencies: + jws "^3.2.2" + lodash.includes "^4.3.0" + lodash.isboolean "^3.0.3" + lodash.isinteger "^4.0.4" + lodash.isnumber "^3.0.3" + lodash.isplainobject "^4.0.6" + lodash.isstring "^4.0.1" + lodash.once "^4.0.0" + ms "^2.1.1" + semver "^5.6.0" + +jsprim@^1.2.2: + version "1.4.2" + resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.2.tgz#712c65533a15c878ba59e9ed5f0e26d5b77c5feb" + integrity sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw== + dependencies: + assert-plus "1.0.0" + extsprintf "1.3.0" + json-schema "0.4.0" + verror "1.10.0" + +jwa@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a" + integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA== + dependencies: + buffer-equal-constant-time "1.0.1" + ecdsa-sig-formatter "1.0.11" + safe-buffer "^5.0.1" + +jws@^3.0.0, jws@^3.1.4, jws@^3.2.2: + version "3.2.2" + resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" + integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== + dependencies: + jwa "^1.4.1" + safe-buffer "^5.0.1" + +koa-passport@4.1.4: + version "4.1.4" + resolved "https://registry.yarnpkg.com/koa-passport/-/koa-passport-4.1.4.tgz#5f1665c1c2a37ace79af9f970b770885ca30ccfa" + integrity sha512-dJBCkl4X+zdYxbI2V2OtoGy0PUenpvp2ZLLWObc8UJhsId0iQpTFT8RVcuA0709AL2txGwRHnSPoT1bYNGa6Kg== + dependencies: + passport "^0.4.0" + level-codec@9.0.2, level-codec@^9.0.0: version "9.0.2" resolved "https://registry.npmjs.org/level-codec/-/level-codec-9.0.2.tgz" @@ -1379,11 +1857,66 @@ lodash.camelcase@^4.3.0: resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6" integrity sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA== +lodash.defaults@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-4.2.0.tgz#d09178716ffea4dde9e5fb7b37f6f0802274580c" + integrity sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ== + +lodash.flatten@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f" + integrity sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g== + +lodash.includes@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f" + integrity sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w== + +lodash.isarguments@3.1.0, lodash.isarguments@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz#2f573d85c6a24289ff00663b491c1d338ff3458a" + integrity sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg== + +lodash.isboolean@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6" + integrity sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg== + +lodash.isinteger@^4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343" + integrity sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA== + +lodash.isnumber@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc" + integrity sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw== + +lodash.isplainobject@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" + integrity sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA== + +lodash.isstring@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451" + integrity sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw== + lodash.merge@^4.6.2: version "4.6.2" resolved "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz" integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== +lodash.noop@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/lodash.noop/-/lodash.noop-3.0.1.tgz#38188f4d650a3a474258439b96ec45b32617133c" + integrity sha512-TmYdmu/pebrdTIBDK/FDx9Bmfzs9x0sZG6QIJuMDTqEPfeciLcN13ij+cOd0i9vwJfBtbG9UQ+C7MkXgYxrIJg== + +lodash.once@^4.0.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" + integrity sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg== + lodash.pick@^4.0.0: version "4.4.0" resolved "https://registry.npmjs.org/lodash.pick/-/lodash.pick-4.4.0.tgz" @@ -1394,7 +1927,7 @@ lodash.truncate@^4.4.2: resolved "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz" integrity sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM= -lodash@^4.17.21: +lodash@4.17.21, lodash@^4.14.0, lodash@^4.17.21: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -1416,6 +1949,13 @@ ltgt@2.2.1, ltgt@^2.1.2: resolved "https://registry.npmjs.org/ltgt/-/ltgt-2.2.1.tgz" integrity sha512-AI2r85+4MquTw9ZYqabu4nMwy9Oftlfa/e/52t9IjtfG+mGBbTNdAoZ3RQKLHR6r0wQnwZnPIEh/Ya6XTWAKNA== +make-dir@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + md5@^2.3.0: version "2.3.0" resolved "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz" @@ -1438,6 +1978,23 @@ micromatch@^4.0.4: braces "^3.0.1" picomatch "^2.2.3" +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12, mime-types@~2.1.19: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime@^1.4.1: + version "1.6.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz" @@ -1497,7 +2054,7 @@ ms@2.1.2: resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -ms@^2.1.3: +ms@^2.1.1, ms@^2.1.3: version "2.1.3" resolved "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== @@ -1547,13 +2104,28 @@ node-abi@^2.21.0: dependencies: semver "^5.4.1" -node-fetch@2, node-fetch@2.6.7, node-fetch@^2.6.6: +node-addon-api@^3.1.0: + version "3.2.1" + resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.2.1.tgz#81325e0a2117789c0128dab65e7e38f07ceba161" + integrity sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A== + +node-fetch@2, node-fetch@2.6.7, node-fetch@^2.6.6, node-fetch@^2.6.7: version "2.6.7" resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz" integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== dependencies: whatwg-url "^5.0.0" +node-fetch@2.6.0: + version "2.6.0" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd" + integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA== + +node-forge@^0.7.1: + version "0.7.6" + resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.6.tgz#fdf3b418aee1f94f0ef642cd63486c77ca9724ac" + integrity sha512-sol30LUpz1jQFBjOKwbjxijiE3b6pjd74YwfD0fJOKPjF+fONKb2Yg8rYgS6+bK6VDl+/wfr4IYpC7jDzLUIfw== + node-gyp-build@~4.1.0: version "4.1.1" resolved "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.1.1.tgz" @@ -1567,6 +2139,13 @@ noms@0.0.0: inherits "^2.0.1" readable-stream "~1.0.31" +nopt@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-5.0.0.tgz#530942bb58a512fccafe53fe210f13a25355dc88" + integrity sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ== + dependencies: + abbrev "1" + npmlog@^4.0.1: version "4.1.2" resolved "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz" @@ -1577,12 +2156,32 @@ npmlog@^4.0.1: gauge "~2.7.3" set-blocking "~2.0.0" +npmlog@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-5.0.1.tgz#f06678e80e29419ad67ab964e0fa69959c1eb8b0" + integrity sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw== + dependencies: + are-we-there-yet "^2.0.0" + console-control-strings "^1.1.0" + gauge "^3.0.0" + set-blocking "^2.0.0" + number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz" integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= -object-assign@^4.1.0: +oauth-sign@~0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" + integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== + +oauth@0.9.x, oauth@^0.9.15: + version "0.9.15" + resolved "https://registry.yarnpkg.com/oauth/-/oauth-0.9.15.tgz#bd1fefaf686c96b75475aed5196412ff60cfb9c1" + integrity sha512-a5ERWK1kh38ExDEfoO6qUHJb32rd7aYmPHuyCu3Fta/cnICvYmgd2uhuKXvPD+PXB+gCEYYEaQdIRAjCOwAKNA== + +object-assign@^4.1.0, object-assign@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== @@ -1635,6 +2234,11 @@ p-is-promise@^3.0.0: resolved "https://registry.npmjs.org/p-is-promise/-/p-is-promise-3.0.0.tgz" integrity sha512-Wo8VsW4IRQSKVXsJCn7TomUaVtyfjVDn3nUP7kE967BQk0CwFpdbZs0X0uk5sW9mkBa9eNM7hCMaG93WUAwxYQ== +p-map@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" + integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== + parent-module@^1.0.0: version "1.0.1" resolved "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz" @@ -1642,6 +2246,89 @@ parent-module@^1.0.0: dependencies: callsites "^3.0.0" +passport-google-auth@1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/passport-google-auth/-/passport-google-auth-1.0.2.tgz#8b300b5aa442ef433de1d832ed3112877d0b2938" + integrity sha512-cfAqna6jZLyMEwUdd4PIwAh2mQKQVEDAaRIaom1pG6h4x4Gwjllf/Jflt3TkR1Sen5Rkvr3l7kSXCWE1EKkh8g== + dependencies: + googleapis "^16.0.0" + passport-strategy "1.x" + +passport-google-oauth1@1.x.x: + version "1.0.0" + resolved "https://registry.yarnpkg.com/passport-google-oauth1/-/passport-google-oauth1-1.0.0.tgz#af74a803df51ec646f66a44d82282be6f108e0cc" + integrity sha512-qpCEhuflJgYrdg5zZIpAq/K3gTqa1CtHjbubsEsidIdpBPLkEVq6tB1I8kBNcH89RdSiYbnKpCBXAZXX/dtx1Q== + dependencies: + passport-oauth1 "1.x.x" + +passport-google-oauth20@2.x.x: + version "2.0.0" + resolved "https://registry.yarnpkg.com/passport-google-oauth20/-/passport-google-oauth20-2.0.0.tgz#0d241b2d21ebd3dc7f2b60669ec4d587e3a674ef" + integrity sha512-KSk6IJ15RoxuGq7D1UKK/8qKhNfzbLeLrG3gkLZ7p4A6DBCcv7xpyQwuXtWdpyR0+E0mwkpjY1VfPOhxQrKzdQ== + dependencies: + passport-oauth2 "1.x.x" + +passport-google-oauth@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/passport-google-oauth/-/passport-google-oauth-2.0.0.tgz#f6eb4bc96dd6c16ec0ecfdf4e05ec48ca54d4dae" + integrity sha512-JKxZpBx6wBQXX1/a1s7VmdBgwOugohH+IxCy84aPTZNq/iIPX6u7Mqov1zY7MKRz3niFPol0KJz8zPLBoHKtYA== + dependencies: + passport-google-oauth1 "1.x.x" + passport-google-oauth20 "2.x.x" + +passport-jwt@4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/passport-jwt/-/passport-jwt-4.0.0.tgz#7f0be7ba942e28b9f5d22c2ebbb8ce96ef7cf065" + integrity sha512-BwC0n2GP/1hMVjR4QpnvqA61TxenUMlmfNjYNgK0ZAs0HK4SOQkHcSv4L328blNTLtHq7DbmvyNJiH+bn6C5Mg== + dependencies: + jsonwebtoken "^8.2.0" + passport-strategy "^1.0.0" + +passport-local@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/passport-local/-/passport-local-1.0.0.tgz#1fe63268c92e75606626437e3b906662c15ba6ee" + integrity sha512-9wCE6qKznvf9mQYYbgJ3sVOHmCWoUNMVFoZzNoznmISbhnNNPhN9xfY3sLmScHMetEJeoY7CXwfhCe7argfQow== + dependencies: + passport-strategy "1.x.x" + +passport-oauth1@1.x.x: + version "1.2.0" + resolved "https://registry.yarnpkg.com/passport-oauth1/-/passport-oauth1-1.2.0.tgz#5229d431781bf5b265bec86ce9a9cce58a756cf9" + integrity sha512-Sv2YWodC6jN12M/OXwmR4BIXeeIHjjbwYTQw4kS6tHK4zYzSEpxBgSJJnknBjICA5cj0ju3FSnG1XmHgIhYnLg== + dependencies: + oauth "0.9.x" + passport-strategy "1.x.x" + utils-merge "1.x.x" + +passport-oauth2-refresh@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/passport-oauth2-refresh/-/passport-oauth2-refresh-2.1.0.tgz#c31cd133826383f5539d16ad8ab4f35ca73ce4a4" + integrity sha512-4ML7ooCESCqiTgdDBzNUFTBcPR8zQq9iM6eppEUGMMvLdsjqRL93jKwWm4Az3OJcI+Q2eIVyI8sVRcPFvxcF/A== + +passport-oauth2@1.x.x: + version "1.6.1" + resolved "https://registry.yarnpkg.com/passport-oauth2/-/passport-oauth2-1.6.1.tgz#c5aee8f849ce8bd436c7f81d904a3cd1666f181b" + integrity sha512-ZbV43Hq9d/SBSYQ22GOiglFsjsD1YY/qdiptA+8ej+9C1dL1TVB+mBE5kDH/D4AJo50+2i8f4bx0vg4/yDDZCQ== + dependencies: + base64url "3.x.x" + oauth "0.9.x" + passport-strategy "1.x.x" + uid2 "0.0.x" + utils-merge "1.x.x" + +passport-strategy@1.x, passport-strategy@1.x.x, passport-strategy@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/passport-strategy/-/passport-strategy-1.0.0.tgz#b5539aa8fc225a3d1ad179476ddf236b440f52e4" + integrity sha512-CB97UUvDKJde2V0KDWWB3lyf6PC3FaZP7YxZ2G8OAtn9p4HI9j9JLP9qjOGZFvyl8uwNT8qM+hGnz/n16NI7oA== + +passport@^0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/passport/-/passport-0.4.1.tgz#941446a21cb92fc688d97a0861c38ce9f738f270" + integrity sha512-IxXgZZs8d7uFSt3eqNjM9NQ3g3uQCW5avD8mRNoXV99Yig50vjuaez6dQK2qC0kVWPRTujxY0dWgGfT09adjYg== + dependencies: + passport-strategy "1.x.x" + pause "0.0.1" + path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" @@ -1662,6 +2349,16 @@ path-type@^4.0.0: resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz" integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== +pause@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/pause/-/pause-0.0.1.tgz#1d408b3fdb76923b9543d96fb4c9dfd535d9cb5d" + integrity sha512-KG8UEiEVkR3wGEb4m5yZkVCzigAD+cVEJck2CzYZO37ZGJfctvVptVO192MwrtPhzONn6go8ylnOdMhKqi4nfg== + +performance-now@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow== + picomatch@^2.2.3: version "2.3.0" resolved "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz" @@ -1715,6 +2412,20 @@ posthog-node@1.0.7: remove-trailing-slash "^0.1.1" uuid "^8.3.2" +posthog-node@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/posthog-node/-/posthog-node-1.3.0.tgz#804ed2f213a2f05253f798bf9569d55a9cad94f7" + integrity sha512-2+VhqiY/rKIqKIXyvemBFHbeijHE25sP7eKltnqcFqAssUE6+sX6vusN9A4luzToOqHQkUZexiCKxvuGagh7JA== + dependencies: + axios "0.24.0" + axios-retry "^3.1.9" + component-type "^1.2.1" + join-component "^1.1.0" + md5 "^2.3.0" + ms "^2.1.3" + remove-trailing-slash "^0.1.1" + uuid "^8.3.2" + pouch-stream@^0.4.0: version "0.4.1" resolved "https://registry.npmjs.org/pouch-stream/-/pouch-stream-0.4.1.tgz" @@ -1723,6 +2434,84 @@ pouch-stream@^0.4.0: inherits "^2.0.1" readable-stream "^1.0.27-1" +pouchdb-abstract-mapreduce@7.2.2: + version "7.2.2" + resolved "https://registry.yarnpkg.com/pouchdb-abstract-mapreduce/-/pouchdb-abstract-mapreduce-7.2.2.tgz#dd1b10a83f8d24361dce9aaaab054614b39f766f" + integrity sha512-7HWN/2yV2JkwMnGnlp84lGvFtnm0Q55NiBUdbBcaT810+clCGKvhssBCrXnmwShD1SXTwT83aszsgiSfW+SnBA== + dependencies: + pouchdb-binary-utils "7.2.2" + pouchdb-collate "7.2.2" + pouchdb-collections "7.2.2" + pouchdb-errors "7.2.2" + pouchdb-fetch "7.2.2" + pouchdb-mapreduce-utils "7.2.2" + pouchdb-md5 "7.2.2" + pouchdb-utils "7.2.2" + +pouchdb-binary-utils@7.2.2: + version "7.2.2" + resolved "https://registry.yarnpkg.com/pouchdb-binary-utils/-/pouchdb-binary-utils-7.2.2.tgz#0690b348052c543b1e67f032f47092ca82bcb10e" + integrity sha512-shacxlmyHbUrNfE6FGYpfyAJx7Q0m91lDdEAaPoKZM3SzAmbtB1i+OaDNtYFztXjJl16yeudkDb3xOeokVL3Qw== + dependencies: + buffer-from "1.1.1" + +pouchdb-collate@7.2.2: + version "7.2.2" + resolved "https://registry.yarnpkg.com/pouchdb-collate/-/pouchdb-collate-7.2.2.tgz#fc261f5ef837c437e3445fb0abc3f125d982c37c" + integrity sha512-/SMY9GGasslknivWlCVwXMRMnQ8myKHs4WryQ5535nq1Wj/ehpqWloMwxEQGvZE1Sda3LOm7/5HwLTcB8Our+w== + +pouchdb-collections@7.2.2: + version "7.2.2" + resolved "https://registry.yarnpkg.com/pouchdb-collections/-/pouchdb-collections-7.2.2.tgz#aeed77f33322429e3f59d59ea233b48ff0e68572" + integrity sha512-6O9zyAYlp3UdtfneiMYuOCWdUCQNo2bgdjvNsMSacQX+3g8WvIoFQCYJjZZCpTttQGb+MHeRMr8m2U95lhJTew== + +pouchdb-errors@7.2.2: + version "7.2.2" + resolved "https://registry.yarnpkg.com/pouchdb-errors/-/pouchdb-errors-7.2.2.tgz#80d811d65c766c9d20b755c6e6cc123f8c3c4792" + integrity sha512-6GQsiWc+7uPfgEHeavG+7wuzH3JZW29Dnrvz8eVbDFE50kVFxNDVm3EkYHskvo5isG7/IkOx7PV7RPTA3keG3g== + dependencies: + inherits "2.0.4" + +pouchdb-fetch@7.2.2: + version "7.2.2" + resolved "https://registry.yarnpkg.com/pouchdb-fetch/-/pouchdb-fetch-7.2.2.tgz#492791236d60c899d7e9973f9aca0d7b9cc02230" + integrity sha512-lUHmaG6U3zjdMkh8Vob9GvEiRGwJfXKE02aZfjiVQgew+9SLkuOxNw3y2q4d1B6mBd273y1k2Lm0IAziRNxQnA== + dependencies: + abort-controller "3.0.0" + fetch-cookie "0.10.1" + node-fetch "2.6.0" + +pouchdb-find@7.2.2: + version "7.2.2" + resolved "https://registry.yarnpkg.com/pouchdb-find/-/pouchdb-find-7.2.2.tgz#1227afdd761812d508fe0794b3e904518a721089" + integrity sha512-BmFeFVQ0kHmDehvJxNZl9OmIztCjPlZlVSdpijuFbk/Fi1EFPU1BAv3kLC+6DhZuOqU/BCoaUBY9sn66pPY2ag== + dependencies: + pouchdb-abstract-mapreduce "7.2.2" + pouchdb-collate "7.2.2" + pouchdb-errors "7.2.2" + pouchdb-fetch "7.2.2" + pouchdb-md5 "7.2.2" + pouchdb-selector-core "7.2.2" + pouchdb-utils "7.2.2" + +pouchdb-mapreduce-utils@7.2.2: + version "7.2.2" + resolved "https://registry.yarnpkg.com/pouchdb-mapreduce-utils/-/pouchdb-mapreduce-utils-7.2.2.tgz#13a46a3cc2a3f3b8e24861da26966904f2963146" + integrity sha512-rAllb73hIkU8rU2LJNbzlcj91KuulpwQu804/F6xF3fhZKC/4JQMClahk+N/+VATkpmLxp1zWmvmgdlwVU4HtQ== + dependencies: + argsarray "0.0.1" + inherits "2.0.4" + pouchdb-collections "7.2.2" + pouchdb-utils "7.2.2" + +pouchdb-md5@7.2.2: + version "7.2.2" + resolved "https://registry.yarnpkg.com/pouchdb-md5/-/pouchdb-md5-7.2.2.tgz#415401acc5a844112d765bd1fb4e5d9f38fb0838" + integrity sha512-c/RvLp2oSh8PLAWU5vFBnp6ejJABIdKqboZwRRUrWcfGDf+oyX8RgmJFlYlzMMOh4XQLUT1IoaDV8cwlsuryZw== + dependencies: + pouchdb-binary-utils "7.2.2" + spark-md5 "3.0.1" + pouchdb-promise@^6.0.4: version "6.4.3" resolved "https://registry.npmjs.org/pouchdb-promise/-/pouchdb-promise-6.4.3.tgz" @@ -1743,6 +2532,28 @@ pouchdb-replication-stream@1.2.9: pouchdb-promise "^6.0.4" through2 "^2.0.0" +pouchdb-selector-core@7.2.2: + version "7.2.2" + resolved "https://registry.yarnpkg.com/pouchdb-selector-core/-/pouchdb-selector-core-7.2.2.tgz#264d7436a8c8ac3801f39960e79875ef7f3879a0" + integrity sha512-XYKCNv9oiNmSXV5+CgR9pkEkTFqxQGWplnVhO3W9P154H08lU0ZoNH02+uf+NjZ2kjse7Q1fxV4r401LEcGMMg== + dependencies: + pouchdb-collate "7.2.2" + pouchdb-utils "7.2.2" + +pouchdb-utils@7.2.2: + version "7.2.2" + resolved "https://registry.yarnpkg.com/pouchdb-utils/-/pouchdb-utils-7.2.2.tgz#c17c4788f1d052b0daf4ef8797bbc4aaa3945aa4" + integrity sha512-XmeM5ioB4KCfyB2MGZXu1Bb2xkElNwF1qG+zVFbQsKQij0zvepdOUfGuWvLRHxTOmt4muIuSOmWZObZa3NOgzQ== + dependencies: + argsarray "0.0.1" + clone-buffer "1.0.0" + immediate "3.3.0" + inherits "2.0.4" + pouchdb-collections "7.2.2" + pouchdb-errors "7.2.2" + pouchdb-md5 "7.2.2" + uuid "8.1.0" + pouchdb@7.3.0: version "7.3.0" resolved "https://registry.yarnpkg.com/pouchdb/-/pouchdb-7.3.0.tgz#440fbef12dfd8f9002320802528665e883a3b7f8" @@ -1818,6 +2629,11 @@ prr@~1.0.1: resolved "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz" integrity sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw== +psl@^1.1.28: + version "1.9.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" + integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== + psl@^1.1.33: version "1.8.0" resolved "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz" @@ -1831,11 +2647,26 @@ pump@^3.0.0: end-of-stream "^1.1.0" once "^1.3.1" +punycode@1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" + integrity sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw== + punycode@^2.1.0, punycode@^2.1.1: version "2.1.1" resolved "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== +qs@~6.5.2: + version "6.5.3" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.3.tgz#3aeeffc91967ef6e35c0e488ef46fb296ab76aad" + integrity sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA== + +querystring@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" + integrity sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g== + queue-microtask@^1.2.2: version "1.2.3" resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz" @@ -1905,6 +2736,30 @@ readable-stream@~1.0.31: isarray "0.0.1" string_decoder "~0.10.x" +redis-commands@1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/redis-commands/-/redis-commands-1.7.0.tgz#15a6fea2d58281e27b1cd1acfb4b293e278c3a89" + integrity sha512-nJWqw3bTFy21hX/CPKHth6sfhZbdiHP6bTawSgQBlKOVRG7EZkfHbbHwQJnrE4vsQf0CMNE+3gJ4Fmm16vdVlQ== + +redis-errors@^1.0.0, redis-errors@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/redis-errors/-/redis-errors-1.2.0.tgz#eb62d2adb15e4eaf4610c04afe1529384250abad" + integrity sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w== + +redis-parser@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/redis-parser/-/redis-parser-3.0.0.tgz#b66d828cdcafe6b4b8a428a7def4c6bcac31c8b4" + integrity sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A== + dependencies: + redis-errors "^1.0.0" + +redlock@4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/redlock/-/redlock-4.2.0.tgz#c26590768559afd5fff76aa1133c94b411ff4f5f" + integrity sha512-j+oQlG+dOwcetUt2WJWttu4CZVeRzUrcVcISFmEmfyuwCVSJ93rDT7YSgg7H7rnxwoRyk/jU46kycVka5tW7jA== + dependencies: + bluebird "^3.7.2" + reduce-flatten@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/reduce-flatten/-/reduce-flatten-2.0.0.tgz#734fd84e65f375d7ca4465c69798c25c9d10ae27" @@ -1943,6 +2798,32 @@ renamer@^4.0.0: stream-read-all "^3.0.1" typical "^7.1.1" +request@^2.72.0, request@^2.74.0, request@^2.88.0: + version "2.88.2" + resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" + integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw== + dependencies: + aws-sign2 "~0.7.0" + aws4 "^1.8.0" + caseless "~0.12.0" + combined-stream "~1.0.6" + extend "~3.0.2" + forever-agent "~0.6.1" + form-data "~2.3.2" + har-validator "~5.1.3" + http-signature "~1.2.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.19" + oauth-sign "~0.9.0" + performance-now "^2.1.0" + qs "~6.5.2" + safe-buffer "^5.1.2" + tough-cookie "~2.5.0" + tunnel-agent "^0.6.0" + uuid "^3.3.2" + require-directory@^2.1.1: version "2.1.1" resolved "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz" @@ -2006,7 +2887,7 @@ rxjs@^6.6.6: dependencies: tslib "^1.9.0" -safe-buffer@^5.0.1, safe-buffer@~5.2.0: +safe-buffer@^5.0.1, safe-buffer@^5.1.2, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== @@ -2016,16 +2897,43 @@ safe-buffer@~5.1.0, safe-buffer@~5.1.1: resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -"safer-buffer@>= 2.1.2 < 3": +"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== -semver@^5.4.1: +sanitize-s3-objectkey@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/sanitize-s3-objectkey/-/sanitize-s3-objectkey-0.0.1.tgz#efa9887cd45275b40234fb4bb12fc5754fe64e7e" + integrity sha512-ZTk7aqLxy4sD40GWcYWoLfbe05XLmkKvh6vGKe13ADlei24xlezcvjgKy1qRArlaIbIMYaqK7PCalvZtulZlaQ== + +sax@1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a" + integrity sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA== + +sax@>=0.1.1, sax@>=0.6.0: + version "1.2.4" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + +semver@7.3.7: + version "7.3.7" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" + integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== + dependencies: + lru-cache "^6.0.0" + +semver@^5.4.1, semver@^5.6.0: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== +semver@^6.0.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + semver@^7.2.1, semver@^7.3.5: version "7.3.5" resolved "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz" @@ -2033,7 +2941,7 @@ semver@^7.2.1, semver@^7.3.5: dependencies: lru-cache "^6.0.0" -set-blocking@~2.0.0: +set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw== @@ -2050,6 +2958,11 @@ shebang-regex@^3.0.0: resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz" integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== +shimmer@^1.2.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/shimmer/-/shimmer-1.2.1.tgz#610859f7de327b587efebf501fb43117f9aff337" + integrity sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw== + signal-exit@^3.0.0, signal-exit@^3.0.2: version "3.0.5" resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.5.tgz" @@ -2088,6 +3001,11 @@ source-map@~0.6.1: resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== +spark-md5@3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/spark-md5/-/spark-md5-3.0.1.tgz#83a0e255734f2ab4e5c466e5a2cfc9ba2aa2124d" + integrity sha512-0tF3AGSD1ppQeuffsLDIOWlKUd3lS92tFxcsrh5Pe3ZphhnoK+oXIBTzOAThZCiuINZLvpiLH/1VS1/ANEJVig== + spark-md5@3.0.2: version "3.0.2" resolved "https://registry.npmjs.org/spark-md5/-/spark-md5-3.0.2.tgz" @@ -2105,6 +3023,31 @@ sprintf-js@~1.0.2: resolved "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= +sshpk@^1.7.0: + version "1.17.0" + resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.17.0.tgz#578082d92d4fe612b13007496e543fa0fbcbe4c5" + integrity sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ== + dependencies: + asn1 "~0.2.3" + assert-plus "^1.0.0" + bcrypt-pbkdf "^1.0.0" + dashdash "^1.12.0" + ecc-jsbn "~0.1.1" + getpass "^0.1.1" + jsbn "~0.1.0" + safer-buffer "^2.0.2" + tweetnacl "~0.14.0" + +standard-as-callback@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/standard-as-callback/-/standard-as-callback-2.1.0.tgz#8953fc05359868a77b5b9739a665c5977bb7df45" + integrity sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A== + +step@0.0.x: + version "0.0.6" + resolved "https://registry.yarnpkg.com/step/-/step-0.0.6.tgz#143e7849a5d7d3f4a088fe29af94915216eeede2" + integrity sha512-qSSeQinUJk2w38vUFobjFoE307GqsozMC8VisOCkJLpklvKPT0ptPHwWOrENoag8rgLudvTkfP3bancwP93/Jw== + stream-meter@^1.0.4: version "1.0.4" resolved "https://registry.npmjs.org/stream-meter/-/stream-meter-1.0.4.tgz" @@ -2117,6 +3060,11 @@ stream-read-all@^3.0.1: resolved "https://registry.yarnpkg.com/stream-read-all/-/stream-read-all-3.0.1.tgz#60762ae45e61d93ba0978cda7f3913790052ad96" integrity sha512-EWZT9XOceBPlVJRrYcykW8jyRSZYbkb/0ZK36uLEmoWVO5gxBOnntNTseNzfREsqxqdfEGQrD8SXQ3QWbBmq8A== +string-template@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/string-template/-/string-template-1.0.0.tgz#9e9f2233dc00f218718ec379a28a5673ecca8b96" + integrity sha512-SLqR3GBUXuoPP5MmYtD7ompvXiG87QjT6lzOszyXjTM86Uu7At7vNnt2xgyTLq5o9T4IxTYFyGxcULqpsmsfdg== + string-width@^1.0.1: version "1.0.2" resolved "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz" @@ -2218,7 +3166,7 @@ table@^6.0.9: string-width "^4.2.3" strip-ansi "^6.0.1" -tar-fs@^2.0.0, tar-fs@^2.1.1: +tar-fs@2.1.1, tar-fs@^2.0.0, tar-fs@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== @@ -2239,7 +3187,7 @@ tar-stream@^2.1.4: inherits "^2.0.3" readable-stream "^3.1.1" -tar@6.1.11: +tar@6.1.11, tar@^6.1.11: version "6.1.11" resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.11.tgz#6760a38f003afa1b2ffd0ffe9e9abbd0eab3d621" integrity sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA== @@ -2305,6 +3253,14 @@ to-regex-range@^5.0.1: punycode "^2.1.1" universalify "^0.1.2" +tough-cookie@~2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" + integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== + dependencies: + psl "^1.1.28" + punycode "^2.1.1" + tr46@~0.0.3: version "0.0.3" resolved "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz" @@ -2322,6 +3278,11 @@ tunnel-agent@^0.6.0: dependencies: safe-buffer "^5.0.1" +tweetnacl@^0.14.3, tweetnacl@~0.14.0: + version "0.14.5" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" + integrity sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA== + type-check@^0.4.0, type-check@~0.4.0: version "0.4.0" resolved "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz" @@ -2361,6 +3322,11 @@ typical@^7.1.1: resolved "https://registry.yarnpkg.com/typical/-/typical-7.1.1.tgz#ba177ab7ab103b78534463ffa4c0c9754523ac1f" integrity sha512-T+tKVNs6Wu7IWiAce5BgMd7OZfNYUndHwc5MknN+UHOudi7sGZzuHdCadllRuqJ3fPtgFtIH9+lt9qRv6lmpfA== +uid2@0.0.x: + version "0.0.4" + resolved "https://registry.yarnpkg.com/uid2/-/uid2-0.0.4.tgz#033f3b1d5d32505f5ce5f888b9f3b667123c0a44" + integrity sha512-IevTus0SbGwQzYh3+fRsAMTVVPOoIVufzacXcHPmdlle1jUpq7BRL+mw3dgeLanvGZdwwbWhRV6XrcFNdBmjWA== + universalify@^0.1.2: version "0.1.2" resolved "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz" @@ -2383,26 +3349,71 @@ uri-js@^4.2.2: dependencies: punycode "^2.1.0" +url@0.10.3: + version "0.10.3" + resolved "https://registry.yarnpkg.com/url/-/url-0.10.3.tgz#021e4d9c7705f21bbf37d03ceb58767402774c64" + integrity sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ== + dependencies: + punycode "1.3.2" + querystring "0.2.0" + util-deprecate@^1.0.1, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= +utils-merge@1.x.x: + version "1.0.1" + resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== + +uuid@3.3.2: + version "3.3.2" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131" + integrity sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA== + +uuid@8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.1.0.tgz#6f1536eb43249f473abc6bd58ff983da1ca30d8d" + integrity sha512-CI18flHDznR0lq54xBycOVmphdCYnQLKn8abKn7PXUiKUGdEd+/l9LWNJmugXel4hXq7S+RMNl34ecyC9TntWg== + uuid@8.3.2, uuid@^8.3.2: version "8.3.2" resolved "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== +uuid@^3.3.2: + version "3.4.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" + integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== + v8-compile-cache@^2.0.3: version "2.3.0" resolved "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz" integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== +verror@1.10.0: + version "1.10.0" + resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" + integrity sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw== + dependencies: + assert-plus "^1.0.0" + core-util-is "1.0.2" + extsprintf "^1.2.0" + vuvuzela@1.0.3: version "1.0.3" resolved "https://registry.npmjs.org/vuvuzela/-/vuvuzela-1.0.3.tgz" integrity sha512-Tm7jR1xTzBbPW+6y1tknKiEhz04Wf/1iZkcTJjSFcpNko43+dFW6+OOeQe9taJIug3NdfUAjFKgUSyQrIKaDvQ== +webfinger@^0.4.2: + version "0.4.2" + resolved "https://registry.yarnpkg.com/webfinger/-/webfinger-0.4.2.tgz#3477a6d97799461896039fcffc650b73468ee76d" + integrity sha512-PvvQ/k74HkC3q5G7bGu4VYeKDt3ePZMzT5qFPtEnOL8eyIU1/06OtDn9X5vlkQ23BlegA3eN89rDLiYUife3xQ== + dependencies: + step "0.0.x" + xml2js "0.1.x" + webidl-conversions@^3.0.0: version "3.0.1" resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz" @@ -2423,7 +3434,7 @@ which@^2.0.1: dependencies: isexe "^2.0.0" -wide-align@^1.1.0: +wide-align@^1.1.0, wide-align@^1.1.2: version "1.1.5" resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.5.tgz#df1d4c206854369ecf3c9a4898f1b23fbd9d15d3" integrity sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg== @@ -2464,6 +3475,26 @@ write-stream@~0.4.3: dependencies: readable-stream "~0.0.2" +xml2js@0.1.x: + version "0.1.14" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.1.14.tgz#5274e67f5a64c5f92974cd85139e0332adc6b90c" + integrity sha512-pbdws4PPPNc1HPluSUKamY4GWMk592K7qwcj6BExbVOhhubub8+pMda/ql68b6L3luZs/OGjGSB5goV7SnmgnA== + dependencies: + sax ">=0.1.1" + +xml2js@0.4.19: + version "0.4.19" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.19.tgz#686c20f213209e94abf0d1bcf1efaa291c7827a7" + integrity sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q== + dependencies: + sax ">=0.6.0" + xmlbuilder "~9.0.1" + +xmlbuilder@~9.0.1: + version "9.0.7" + resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d" + integrity sha512-7YXTQc3P2l9+0rjaUbLwMKRhtmwg1M1eDf6nag7urC7pIPYLD9W/jmzQ4ptRSUbodw5S0jfoGTflLemQibSpeQ== + xtend@^4.0.2, xtend@~4.0.0, xtend@~4.0.1: version "4.0.2" resolved "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz" @@ -2496,3 +3527,8 @@ yargs@^16.1.0, yargs@^16.2.0: string-width "^4.2.0" y18n "^5.0.5" yargs-parser "^20.2.2" + +zlib@1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/zlib/-/zlib-1.0.5.tgz#6e7c972fc371c645a6afb03ab14769def114fcc0" + integrity sha512-40fpE2II+Cd3k8HWTWONfeKE2jL+P42iWJ1zzps5W51qcTsOUKM5Q5m2PFb0CLxlmFAaUuUdJGc3OfZy947v0w== diff --git a/packages/client/manifest.json b/packages/client/manifest.json index cfed5b65f7..14f02cbe5e 100644 --- a/packages/client/manifest.json +++ b/packages/client/manifest.json @@ -1466,10 +1466,11 @@ }, { "type": "select", - "label": "Colours", + "label": "Colors", "key": "palette", "defaultValue": "Palette 1", "options": [ + "Custom", "Palette 1", "Palette 2", "Palette 3", @@ -1482,6 +1483,51 @@ "Palette 10" ] }, + { + "type": "color", + "label": "C1", + "key": "c1", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C2", + "key": "c2", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C3", + "key": "c3", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C4", + "key": "c4", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C5", + "key": "c5", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, { "type": "boolean", "label": "Stacked", @@ -1581,10 +1627,11 @@ }, { "type": "select", - "label": "Colours", + "label": "Colors", "key": "palette", "defaultValue": "Palette 1", "options": [ + "Custom", "Palette 1", "Palette 2", "Palette 3", @@ -1597,6 +1644,51 @@ "Palette 10" ] }, + { + "type": "color", + "label": "C1", + "key": "c1", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C2", + "key": "c2", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C3", + "key": "c3", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C4", + "key": "c4", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C5", + "key": "c5", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, { "type": "select", "label": "Curve", @@ -1695,10 +1787,11 @@ }, { "type": "select", - "label": "Colours", + "label": "Colors", "key": "palette", "defaultValue": "Palette 1", "options": [ + "Custom", "Palette 1", "Palette 2", "Palette 3", @@ -1711,6 +1804,51 @@ "Palette 10" ] }, + { + "type": "color", + "label": "C1", + "key": "c1", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C2", + "key": "c2", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C3", + "key": "c3", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C4", + "key": "c4", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C5", + "key": "c5", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, { "type": "select", "label": "Curve", @@ -1800,10 +1938,11 @@ }, { "type": "select", - "label": "Colours", + "label": "Colors", "key": "palette", "defaultValue": "Palette 1", "options": [ + "Custom", "Palette 1", "Palette 2", "Palette 3", @@ -1816,6 +1955,51 @@ "Palette 10" ] }, + { + "type": "color", + "label": "C1", + "key": "c1", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C2", + "key": "c2", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C3", + "key": "c3", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C4", + "key": "c4", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C5", + "key": "c5", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, { "type": "boolean", "label": "Data Labels", @@ -1882,10 +2066,11 @@ }, { "type": "select", - "label": "Colours", + "label": "Colors", "key": "palette", "defaultValue": "Palette 1", "options": [ + "Custom", "Palette 1", "Palette 2", "Palette 3", @@ -1898,6 +2083,51 @@ "Palette 10" ] }, + { + "type": "color", + "label": "C1", + "key": "c1", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C2", + "key": "c2", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C3", + "key": "c3", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C4", + "key": "c4", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, + { + "type": "color", + "label": "C5", + "key": "c5", + "dependsOn": { + "setting": "palette", + "value": "Custom" + } + }, { "type": "boolean", "label": "Data Labels", @@ -2875,6 +3105,12 @@ "key": "timeOnly", "defaultValue": false }, + { + "type": "boolean", + "label": "24-Hour time", + "key": "time24hr", + "defaultValue": false + }, { "type": "boolean", "label": "Ignore time zones", diff --git a/packages/client/package.json b/packages/client/package.json index e100176e5d..876bd0782e 100644 --- a/packages/client/package.json +++ b/packages/client/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/client", - "version": "1.1.33-alpha.1", + "version": "1.2.41-alpha.0", "license": "MPL-2.0", "module": "dist/budibase-client.js", "main": "dist/budibase-client.js", @@ -19,9 +19,9 @@ "dev:builder": "rollup -cw" }, "dependencies": { - "@budibase/bbui": "1.1.33-alpha.1", - "@budibase/frontend-core": "1.1.33-alpha.1", - "@budibase/string-templates": "1.1.33-alpha.1", + "@budibase/bbui": "1.2.41-alpha.0", + "@budibase/frontend-core": "1.2.41-alpha.0", + "@budibase/string-templates": "1.2.41-alpha.0", "@spectrum-css/button": "^3.0.3", "@spectrum-css/card": "^3.0.3", "@spectrum-css/divider": "^1.0.3", diff --git a/packages/client/src/components/ClientApp.svelte b/packages/client/src/components/ClientApp.svelte index 8312fb29d1..fa503c2e23 100644 --- a/packages/client/src/components/ClientApp.svelte +++ b/packages/client/src/components/ClientApp.svelte @@ -83,6 +83,8 @@ dataLoaded = true if (get(builderStore).inBuilder) { builderStore.actions.notifyLoaded() + } else { + builderStore.actions.analyticsPing({ source: "app" }) } }) diff --git a/packages/client/src/components/app/charts/ApexChart.svelte b/packages/client/src/components/app/charts/ApexChart.svelte index 7bb37a587b..87d78bf5a2 100644 --- a/packages/client/src/components/app/charts/ApexChart.svelte +++ b/packages/client/src/components/app/charts/ApexChart.svelte @@ -10,7 +10,9 @@ {#if options} -
+ {#key options.customColor} +
+ {/key} {:else if $builderStore.inBuilder}
diff --git a/packages/client/src/components/app/charts/ApexOptionsBuilder.js b/packages/client/src/components/app/charts/ApexOptionsBuilder.js index ef4083d648..31c5a820f7 100644 --- a/packages/client/src/components/app/charts/ApexOptionsBuilder.js +++ b/packages/client/src/components/app/charts/ApexOptionsBuilder.js @@ -62,8 +62,14 @@ export class ApexOptionsBuilder { return this.setOption(["title", "text"], title) } - color(color) { - return this.setOption(["colors"], [color]) + colors(colors) { + if (!colors) { + delete this.options.colors + this.options["customColor"] = false + return this + } + this.options["customColor"] = true + return this.setOption(["colors"], colors) } width(width) { diff --git a/packages/client/src/components/app/charts/BarChart.svelte b/packages/client/src/components/app/charts/BarChart.svelte index f1e02d8af6..fd8443e2d6 100644 --- a/packages/client/src/components/app/charts/BarChart.svelte +++ b/packages/client/src/components/app/charts/BarChart.svelte @@ -16,6 +16,7 @@ export let stacked export let yAxisUnits export let palette + export let c1, c2, c3, c4, c5 export let horizontal $: options = setUpChart( @@ -33,9 +34,13 @@ stacked, yAxisUnits, palette, - horizontal + horizontal, + c1 && c2 && c3 && c4 && c5 ? [c1, c2, c3, c4, c5] : null, + customColor ) + $: customColor = palette === "Custom" + const setUpChart = ( title, dataProvider, @@ -51,7 +56,9 @@ stacked, yAxisUnits, palette, - horizontal + horizontal, + colors, + customColor ) => { const allCols = [labelColumn, ...(valueColumns || [null])] if ( @@ -85,6 +92,7 @@ .stacked(stacked) .palette(palette) .horizontal(horizontal) + .colors(customColor ? colors : null) // Add data let useDates = false diff --git a/packages/client/src/components/app/charts/LineChart.svelte b/packages/client/src/components/app/charts/LineChart.svelte index afb9f14262..7f82a833d2 100644 --- a/packages/client/src/components/app/charts/LineChart.svelte +++ b/packages/client/src/components/app/charts/LineChart.svelte @@ -17,6 +17,7 @@ export let legend export let yAxisUnits export let palette + export let c1, c2, c3, c4, c5 // Area specific props export let area @@ -40,9 +41,13 @@ palette, area, stacked, - gradient + gradient, + c1 && c2 && c3 && c4 && c5 ? [c1, c2, c3, c4, c5] : null, + customColor ) + $: customColor = palette === "Custom" + const setUpChart = ( title, dataProvider, @@ -60,7 +65,9 @@ palette, area, stacked, - gradient + gradient, + colors, + customColor ) => { const allCols = [labelColumn, ...(valueColumns || [null])] if ( @@ -96,6 +103,7 @@ .legend(legend) .yUnits(yAxisUnits) .palette(palette) + .colors(customColor ? colors : null) // Add data let useDates = false diff --git a/packages/client/src/components/app/charts/PieChart.svelte b/packages/client/src/components/app/charts/PieChart.svelte index 38a42ee37b..8cb7317d94 100644 --- a/packages/client/src/components/app/charts/PieChart.svelte +++ b/packages/client/src/components/app/charts/PieChart.svelte @@ -13,6 +13,7 @@ export let legend export let donut export let palette + export let c1, c2, c3, c4, c5 $: options = setUpChart( title, @@ -25,9 +26,13 @@ animate, legend, donut, - palette + palette, + c1 && c2 && c3 && c4 && c5 ? [c1, c2, c3, c4, c5] : null, + customColor ) + $: customColor = palette === "Custom" + const setUpChart = ( title, dataProvider, @@ -39,7 +44,9 @@ animate, legend, donut, - palette + palette, + colors, + customColor ) => { if ( !dataProvider || @@ -70,6 +77,7 @@ .legend(legend) .legendPosition("right") .palette(palette) + .colors(customColor ? colors : null) // Add data if valid datasource const series = data.map(row => parseFloat(row[valueColumn])) diff --git a/packages/client/src/components/app/forms/DateTimeField.svelte b/packages/client/src/components/app/forms/DateTimeField.svelte index 4ca766121d..bbbc5a266d 100644 --- a/packages/client/src/components/app/forms/DateTimeField.svelte +++ b/packages/client/src/components/app/forms/DateTimeField.svelte @@ -8,6 +8,7 @@ export let disabled = false export let enableTime = false export let timeOnly = false + export let time24hr = false export let ignoreTimezones = false export let validation export let defaultValue @@ -44,6 +45,7 @@ appendTo={document.getElementById("flatpickr-root")} {enableTime} {timeOnly} + {time24hr} {ignoreTimezones} {placeholder} /> diff --git a/packages/client/src/components/app/forms/RelationshipField.svelte b/packages/client/src/components/app/forms/RelationshipField.svelte index 2c1136bea3..ce2f194786 100644 --- a/packages/client/src/components/app/forms/RelationshipField.svelte +++ b/packages/client/src/components/app/forms/RelationshipField.svelte @@ -55,6 +55,9 @@ if (!values) { return [] } + if (!Array.isArray(values)) { + values = [values] + } return values.map(value => (typeof value === "object" ? value._id : value)) } diff --git a/packages/client/src/stores/builder.js b/packages/client/src/stores/builder.js index af81fc33c8..32eb956d52 100644 --- a/packages/client/src/stores/builder.js +++ b/packages/client/src/stores/builder.js @@ -1,4 +1,5 @@ import { writable, get } from "svelte/store" +import { API } from "api" import { devToolsStore } from "./devTools.js" const dispatchEvent = (type, data = {}) => { @@ -48,6 +49,13 @@ const createBuilderStore = () => { notifyLoaded: () => { dispatchEvent("preview-loaded") }, + analyticsPing: async () => { + try { + await API.analyticsPing({ source: "app" }) + } catch (error) { + // Do nothing + } + }, moveComponent: (componentId, destinationComponentId, mode) => { dispatchEvent("move-component", { componentId, diff --git a/packages/frontend-core/package.json b/packages/frontend-core/package.json index ca0fd0d771..f0173e9fb8 100644 --- a/packages/frontend-core/package.json +++ b/packages/frontend-core/package.json @@ -1,12 +1,12 @@ { "name": "@budibase/frontend-core", - "version": "1.1.33-alpha.1", + "version": "1.2.41-alpha.0", "description": "Budibase frontend core libraries used in builder and client", "author": "Budibase", "license": "MPL-2.0", "svelte": "src/index.js", "dependencies": { - "@budibase/bbui": "1.1.33-alpha.1", + "@budibase/bbui": "1.2.41-alpha.0", "lodash": "^4.17.21", "svelte": "^3.46.2" } diff --git a/packages/frontend-core/src/api/analytics.js b/packages/frontend-core/src/api/analytics.js index 5aa35469ae..653943c6ab 100644 --- a/packages/frontend-core/src/api/analytics.js +++ b/packages/frontend-core/src/api/analytics.js @@ -7,4 +7,11 @@ export const buildAnalyticsEndpoints = API => ({ url: "/api/bbtel", }) }, + analyticsPing: async ({ source }) => { + const timezone = Intl.DateTimeFormat().resolvedOptions().timeZone + return await API.post({ + url: "/api/bbtel/ping", + body: { source, timezone }, + }) + }, }) diff --git a/packages/frontend-core/src/api/user.js b/packages/frontend-core/src/api/user.js index 17223a80e6..653376aa55 100644 --- a/packages/frontend-core/src/api/user.js +++ b/packages/frontend-core/src/api/user.js @@ -141,20 +141,18 @@ export const buildUserEndpoints = API => ({ /** * Invites multiple users to the current tenant. - * @param email An array of email addresses - * @param builder whether the user should be a global builder - * @param admin whether the user should be a global admin + * @param users An array of users to invite */ - inviteUsers: async ({ emails, builder, admin }) => { + inviteUsers: async users => { return await API.post({ - url: "/api/global/users/inviteMultiple", - body: { - emails, + url: "/api/global/users/multi/invite", + body: users.map(user => ({ + email: user.email, userInfo: { - admin: admin ? { global: true } : undefined, - builder: builder ? { global: true } : undefined, + admin: user.admin ? { global: true } : undefined, + builder: user.admin || user.builder ? { global: true } : undefined, }, - }, + })), }) }, diff --git a/packages/frontend-core/src/constants.js b/packages/frontend-core/src/constants.js index 77765f8d6e..a5b3650b1c 100644 --- a/packages/frontend-core/src/constants.js +++ b/packages/frontend-core/src/constants.js @@ -39,13 +39,17 @@ export const OperatorOptions = { label: "Contains", }, NotContains: { - value: "notEqual", + value: "notContains", label: "Does Not Contain", }, In: { value: "oneOf", label: "Is in", }, + ContainsAny: { + value: "containsAny", + label: "Has any", + }, } // Cookie names @@ -60,25 +64,31 @@ export const TableNames = { USERS: "ta_users", } -export const BbRoles = [ - { label: "App User", value: "appUser" }, - { label: "Developer", value: "developer" }, - { label: "Admin", value: "admin" }, +export const BudibaseRoles = { + AppUser: "appUser", + Developer: "developer", + Admin: "admin", +} + +export const BudibaseRoleOptions = [ + { label: "App User", value: BudibaseRoles.AppUser }, + { label: "Developer", value: BudibaseRoles.Developer }, + { label: "Admin", value: BudibaseRoles.Admin }, ] export const BuilderRoleDescriptions = [ { - value: "appUser", + value: BudibaseRoles.AppUser, icon: "User", label: "App user - Only has access to published apps", }, { - value: "developer", + value: BudibaseRoles.Developer, icon: "Hammer", label: "Developer - Access to the app builder", }, { - value: "admin", + value: BudibaseRoles.Admin, icon: "Draw", label: "Admin - Full access", }, diff --git a/packages/frontend-core/src/fetch/DataFetch.js b/packages/frontend-core/src/fetch/DataFetch.js index ecd5313af5..338e6e0405 100644 --- a/packages/frontend-core/src/fetch/DataFetch.js +++ b/packages/frontend-core/src/fetch/DataFetch.js @@ -170,6 +170,7 @@ export default class DataFetch { rows: page.rows, info: page.info, cursors: paginate && page.hasNextPage ? [null, page.cursor] : [null], + error: page.error, })) } @@ -182,7 +183,7 @@ export default class DataFetch { const features = get(this.featureStore) // Get the actual data - let { rows, info, hasNextPage, cursor } = await this.getData() + let { rows, info, hasNextPage, cursor, error } = await this.getData() // If we don't support searching, do a client search if (!features.supportsSearch) { @@ -204,6 +205,7 @@ export default class DataFetch { info, hasNextPage, cursor, + error, } } @@ -345,8 +347,14 @@ export default class DataFetch { return } this.store.update($store => ({ ...$store, loading: true })) - const { rows, info } = await this.getPage() - this.store.update($store => ({ ...$store, rows, info, loading: false })) + const { rows, info, error } = await this.getPage() + this.store.update($store => ({ + ...$store, + rows, + info, + loading: false, + error, + })) } /** @@ -386,7 +394,7 @@ export default class DataFetch { cursor: nextCursor, pageNumber: $store.pageNumber + 1, })) - const { rows, info, hasNextPage, cursor } = await this.getPage() + const { rows, info, hasNextPage, cursor, error } = await this.getPage() // Update state this.store.update($store => { @@ -400,6 +408,7 @@ export default class DataFetch { info, cursors, loading: false, + error, } }) } @@ -421,7 +430,7 @@ export default class DataFetch { cursor: prevCursor, pageNumber: $store.pageNumber - 1, })) - const { rows, info } = await this.getPage() + const { rows, info, error } = await this.getPage() // Update state this.store.update($store => { @@ -430,6 +439,7 @@ export default class DataFetch { rows, info, loading: false, + error, } }) } diff --git a/packages/frontend-core/src/fetch/TableFetch.js b/packages/frontend-core/src/fetch/TableFetch.js index cf0e124020..a13b1bd186 100644 --- a/packages/frontend-core/src/fetch/TableFetch.js +++ b/packages/frontend-core/src/fetch/TableFetch.js @@ -37,6 +37,7 @@ export default class TableFetch extends DataFetch { return { rows: [], hasNextPage: false, + error, } } } diff --git a/packages/frontend-core/src/utils/lucene.js b/packages/frontend-core/src/utils/lucene.js index b6699628d1..6c0c2035d3 100644 --- a/packages/frontend-core/src/utils/lucene.js +++ b/packages/frontend-core/src/utils/lucene.js @@ -32,9 +32,9 @@ export const getValidOperatorsForType = type => { } else if (type === "number") { return numOps } else if (type === "options") { - return [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty] + return [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty, Op.In] } else if (type === "array") { - return [Op.Contains, Op.NotContains, Op.Empty, Op.NotEmpty] + return [Op.Contains, Op.NotContains, Op.Empty, Op.NotEmpty, Op.ContainsAny] } else if (type === "boolean") { return [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty] } else if (type === "longform") { @@ -96,6 +96,7 @@ export const buildLuceneQuery = filter => { contains: {}, notContains: {}, oneOf: {}, + containsAny: {}, } if (Array.isArray(filter)) { filter.forEach(expression => { @@ -103,6 +104,10 @@ export const buildLuceneQuery = filter => { const isHbs = typeof value === "string" && value.match(HBS_REGEX)?.length > 0 // Parse all values into correct types + if (operator === "allOr") { + query.allOr = true + return + } if (type === "datetime") { // Ensure date value is a valid date and parse into correct format if (!value) { @@ -124,6 +129,13 @@ export const buildLuceneQuery = filter => { if (type === "boolean") { value = `${value}`?.toLowerCase() === "true" } + if ( + ["contains", "notContains", "containsAny"].includes(operator) && + type === "array" && + typeof value === "string" + ) { + value = value.split(",") + } if (operator.startsWith("range")) { const minint = SqlNumberTypeRangeMap[externalType]?.min || Number.MIN_SAFE_INTEGER @@ -240,6 +252,18 @@ export const runLuceneQuery = (docs, query) => { return !testValue?.includes(docValue) }) + const containsAny = match("containsAny", (docValue, testValue) => { + return !docValue?.includes(...testValue) + }) + + const contains = match("contains", (docValue, testValue) => { + return !testValue?.every(item => docValue?.includes(item)) + }) + + const notContains = match("notContains", (docValue, testValue) => { + return testValue?.every(item => docValue?.includes(item)) + }) + // Match a document against all criteria const docMatch = doc => { return ( @@ -250,7 +274,10 @@ export const runLuceneQuery = (docs, query) => { notEqualMatch(doc) && emptyMatch(doc) && notEmptyMatch(doc) && - oneOf(doc) + oneOf(doc) && + contains(doc) && + containsAny(doc) && + notContains(doc) ) } diff --git a/packages/server/Dockerfile b/packages/server/Dockerfile index 1d15dd501f..bd01b6f9ff 100644 --- a/packages/server/Dockerfile +++ b/packages/server/Dockerfile @@ -11,7 +11,7 @@ ENV PORT=4001 ENV COUCH_DB_URL=https://couchdb.budi.live:5984 ENV BUDIBASE_ENVIRONMENT=PRODUCTION ENV SERVICE=app-service -ENV POSTHOG_TOKEN=phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS +ENV POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU # copy files and install dependencies COPY . ./ diff --git a/packages/server/__mocks__/mongodb.ts b/packages/server/__mocks__/mongodb.ts index 2a03dc7a7b..92ec89227f 100644 --- a/packages/server/__mocks__/mongodb.ts +++ b/packages/server/__mocks__/mongodb.ts @@ -8,6 +8,7 @@ module MongoMock { this.insertMany = jest.fn(() => ({ toArray: () => [] })) this.find = jest.fn(() => ({ toArray: () => [] })) this.findOne = jest.fn() + this.findOneAndUpdate = jest.fn() this.count = jest.fn() this.deleteOne = jest.fn() this.deleteMany = jest.fn(() => ({ toArray: () => [] })) @@ -19,6 +20,7 @@ module MongoMock { find: this.find, insertMany: this.insertMany, findOne: this.findOne, + findOneAndUpdate: this.findOneAndUpdate, count: this.count, deleteOne: this.deleteOne, deleteMany: this.deleteMany, @@ -31,5 +33,7 @@ module MongoMock { }) } + mongodb.ObjectID = require("mongodb").ObjectID + module.exports = mongodb } diff --git a/packages/server/__mocks__/node-fetch.ts b/packages/server/__mocks__/node-fetch.ts index 1a7015fa52..dfffa7eb58 100644 --- a/packages/server/__mocks__/node-fetch.ts +++ b/packages/server/__mocks__/node-fetch.ts @@ -57,12 +57,19 @@ module FetchMock { 404 ) } else if (url.includes("_search")) { + const body = opts.body + const parts = body.split("tableId:") + let tableId + if (parts && parts[1]) { + tableId = parts[1].split('"')[0] + } return json({ rows: [ { doc: { _id: "test", - tableId: opts.body.split("tableId:")[1].split('"')[0], + tableId: tableId, + query: opts.body, }, }, ], diff --git a/packages/server/package.json b/packages/server/package.json index 8ff18e1d24..53b68f1a03 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,7 +1,7 @@ { "name": "@budibase/server", "email": "hi@budibase.com", - "version": "1.1.33-alpha.1", + "version": "1.2.41-alpha.0", "description": "Budibase Web Server", "main": "src/index.ts", "repository": { @@ -77,11 +77,11 @@ "license": "GPL-3.0", "dependencies": { "@apidevtools/swagger-parser": "10.0.3", - "@budibase/backend-core": "1.1.33-alpha.1", - "@budibase/client": "1.1.33-alpha.1", - "@budibase/pro": "1.1.33-alpha.1", - "@budibase/string-templates": "1.1.33-alpha.1", - "@budibase/types": "1.1.33-alpha.1", + "@budibase/backend-core": "1.2.41-alpha.0", + "@budibase/client": "1.2.41-alpha.0", + "@budibase/pro": "1.2.41-alpha.0", + "@budibase/string-templates": "1.2.41-alpha.0", + "@budibase/types": "1.2.41-alpha.0", "@bull-board/api": "3.7.0", "@bull-board/koa": "3.9.4", "@elastic/elasticsearch": "7.10.0", @@ -106,7 +106,7 @@ "google-auth-library": "7.12.0", "google-spreadsheet": "3.2.0", "jimp": "0.16.1", - "joi": "17.2.1", + "joi": "17.6.0", "js-yaml": "4.1.0", "jsonschema": "1.4.0", "knex": "0.95.15", diff --git a/packages/server/src/api/controllers/analytics.ts b/packages/server/src/api/controllers/analytics.ts index efb9115e54..6c80593d5d 100644 --- a/packages/server/src/api/controllers/analytics.ts +++ b/packages/server/src/api/controllers/analytics.ts @@ -1,4 +1,7 @@ import { events } from "@budibase/backend-core" +import { AnalyticsPingRequest, PingSource } from "@budibase/types" +import { DocumentType, isDevAppID } from "../../db/utils" +import { context } from "@budibase/backend-core" export const isEnabled = async (ctx: any) => { const enabled = await events.analytics.enabled() @@ -6,3 +9,27 @@ export const isEnabled = async (ctx: any) => { enabled, } } + +export const ping = async (ctx: any) => { + const body = ctx.request.body as AnalyticsPingRequest + switch (body.source) { + case PingSource.APP: { + const db = context.getAppDB({ skip_setup: true }) + const appInfo = await db.get(DocumentType.APP_METADATA) + let appId = context.getAppId() + + if (isDevAppID(appId)) { + await events.serve.servedAppPreview(appInfo, body.timezone) + } else { + await events.serve.servedApp(appInfo, body.timezone) + } + break + } + case PingSource.BUILDER: { + await events.serve.servedBuilder(body.timezone) + break + } + } + + ctx.status = 200 +} diff --git a/packages/server/src/api/controllers/application.ts b/packages/server/src/api/controllers/application.ts index 57a18e7df3..ae4a60d78d 100644 --- a/packages/server/src/api/controllers/application.ts +++ b/packages/server/src/api/controllers/application.ts @@ -15,7 +15,7 @@ import { getLayoutParams, getScreenParams, generateDevAppID, - DocumentTypes, + DocumentType, AppStatus, } from "../../db/utils" const { @@ -206,7 +206,7 @@ export const fetchAppDefinition = async (ctx: any) => { export const fetchAppPackage = async (ctx: any) => { const db = context.getAppDB() - const application = await db.get(DocumentTypes.APP_METADATA) + const application = await db.get(DocumentType.APP_METADATA) const layouts = await getLayouts() let screens = await getScreens() @@ -248,13 +248,13 @@ const performAppCreate = async (ctx: any) => { let _rev try { // if template there will be an existing doc - const existing = await db.get(DocumentTypes.APP_METADATA) + const existing = await db.get(DocumentType.APP_METADATA) _rev = existing._rev } catch (err) { // nothing to do } const newApplication: App = { - _id: DocumentTypes.APP_METADATA, + _id: DocumentType.APP_METADATA, _rev, appId: instance._id, type: "app", @@ -383,7 +383,7 @@ export const update = async (ctx: any) => { export const updateClient = async (ctx: any) => { // Get current app version const db = context.getAppDB() - const application = await db.get(DocumentTypes.APP_METADATA) + const application = await db.get(DocumentType.APP_METADATA) const currentVersion = application.version // Update client library and manifest @@ -407,7 +407,7 @@ export const updateClient = async (ctx: any) => { export const revertClient = async (ctx: any) => { // Check app can be reverted const db = context.getAppDB() - const application = await db.get(DocumentTypes.APP_METADATA) + const application = await db.get(DocumentType.APP_METADATA) if (!application.revertableVersion) { ctx.throw(400, "There is no version to revert to") } @@ -439,7 +439,7 @@ const destroyApp = async (ctx: any) => { } const db = isUnpublish ? context.getProdAppDB() : context.getAppDB() - const app = await db.get(DocumentTypes.APP_METADATA) + const app = await db.get(DocumentType.APP_METADATA) const result = await db.destroy() if (isUnpublish) { @@ -526,7 +526,7 @@ export const sync = async (ctx: any, next: any) => { try { await replication.replicate({ filter: function (doc: any) { - return doc._id !== DocumentTypes.APP_METADATA + return doc._id !== DocumentType.APP_METADATA }, }) } catch (err) { @@ -550,7 +550,7 @@ export const sync = async (ctx: any, next: any) => { const updateAppPackage = async (appPackage: any, appId: any) => { return context.doInAppContext(appId, async () => { const db = context.getAppDB() - const application = await db.get(DocumentTypes.APP_METADATA) + const application = await db.get(DocumentType.APP_METADATA) const newAppPackage = { ...application, ...appPackage } if (appPackage._rev !== application._rev) { diff --git a/packages/server/src/api/controllers/automation.js b/packages/server/src/api/controllers/automation.js index 775fe03b0f..2190adc3b9 100644 --- a/packages/server/src/api/controllers/automation.js +++ b/packages/server/src/api/controllers/automation.js @@ -3,7 +3,7 @@ const triggers = require("../../automations/triggers") const { getAutomationParams, generateAutomationID, - DocumentTypes, + DocumentType, } = require("../../db/utils") const { checkForWebhooks, @@ -201,7 +201,7 @@ exports.clearLogError = async function (ctx) { const { automationId, appId } = ctx.request.body await doInAppContext(appId, async () => { const db = getProdAppDB() - const metadata = await db.get(DocumentTypes.APP_METADATA) + const metadata = await db.get(DocumentType.APP_METADATA) if (!automationId) { delete metadata.automationErrors } else if ( diff --git a/packages/server/src/api/controllers/backup.js b/packages/server/src/api/controllers/backup.js index f92b3fb256..a31ab134db 100644 --- a/packages/server/src/api/controllers/backup.js +++ b/packages/server/src/api/controllers/backup.js @@ -1,6 +1,6 @@ const { streamBackup } = require("../../utilities/fileSystem") const { events, context } = require("@budibase/backend-core") -const { DocumentTypes } = require("../../db/utils") +const { DocumentType } = require("../../db/utils") exports.exportAppDump = async function (ctx) { let { appId, excludeRows } = ctx.query @@ -12,7 +12,7 @@ exports.exportAppDump = async function (ctx) { await context.doInAppContext(appId, async () => { const appDb = context.getAppDB() - const app = await appDb.get(DocumentTypes.APP_METADATA) + const app = await appDb.get(DocumentType.APP_METADATA) await events.app.exported(app) }) } diff --git a/packages/server/src/api/controllers/cloud.js b/packages/server/src/api/controllers/cloud.js index 38804f4d4a..7be8b14bcd 100644 --- a/packages/server/src/api/controllers/cloud.js +++ b/packages/server/src/api/controllers/cloud.js @@ -11,7 +11,7 @@ const { getGlobalDB, } = require("@budibase/backend-core/tenancy") const { create } = require("./application") -const { getDocParams, DocumentTypes, isDevAppID } = require("../../db/utils") +const { getDocParams, DocumentType, isDevAppID } = require("../../db/utils") async function createApp(appName, appImport) { const ctx = { @@ -31,7 +31,7 @@ exports.exportApps = async ctx => { } const apps = await getAllApps({ all: true }) const globalDBString = await exportDB(getGlobalDBName(), { - filter: doc => !doc._id.startsWith(DocumentTypes.USER), + filter: doc => !doc._id.startsWith(DocumentType.USER), }) let allDBs = { global: globalDBString, @@ -97,7 +97,7 @@ exports.importApps = async ctx => { } // if there are any users make sure to remove them - let users = await getAllDocType(globalDb, DocumentTypes.USER) + let users = await getAllDocType(globalDb, DocumentType.USER) let userDeletionPromises = [] for (let user of users) { userDeletionPromises.push(globalDb.remove(user._id, user._rev)) diff --git a/packages/server/src/api/controllers/component.js b/packages/server/src/api/controllers/component.js index 2d0aaea23a..dec5aebb7c 100644 --- a/packages/server/src/api/controllers/component.js +++ b/packages/server/src/api/controllers/component.js @@ -1,10 +1,10 @@ -const { DocumentTypes } = require("../../db/utils") +const { DocumentType } = require("../../db/utils") const { getComponentLibraryManifest } = require("../../utilities/fileSystem") const { getAppDB } = require("@budibase/backend-core/context") exports.fetchAppComponentDefinitions = async function (ctx) { const db = getAppDB() - const app = await db.get(DocumentTypes.APP_METADATA) + const app = await db.get(DocumentType.APP_METADATA) let componentManifests = await Promise.all( app.componentLibraries.map(async library => { diff --git a/packages/server/src/api/controllers/datasource.js b/packages/server/src/api/controllers/datasource.js index 379b719f4f..8b9b765a5f 100644 --- a/packages/server/src/api/controllers/datasource.js +++ b/packages/server/src/api/controllers/datasource.js @@ -2,7 +2,7 @@ const { generateDatasourceID, getDatasourceParams, getQueryParams, - DocumentTypes, + DocumentType, BudibaseInternalDB, getTableParams, } = require("../../db/utils") @@ -132,7 +132,7 @@ exports.save = async function (ctx) { const datasource = { _id: generateDatasourceID({ plus }), - type: plus ? DocumentTypes.DATASOURCE_PLUS : DocumentTypes.DATASOURCE, + type: plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE, ...ctx.request.body.datasource, } diff --git a/packages/server/src/api/controllers/deploy/index.ts b/packages/server/src/api/controllers/deploy/index.ts index e2b66fc936..d63e167dfb 100644 --- a/packages/server/src/api/controllers/deploy/index.ts +++ b/packages/server/src/api/controllers/deploy/index.ts @@ -4,7 +4,7 @@ import { getProdAppID, getDevelopmentAppID, } from "@budibase/backend-core/db" -import { DocumentTypes, getAutomationParams } from "../../../db/utils" +import { DocumentType, getAutomationParams } from "../../../db/utils" import { disableAllCrons, enableCronTrigger, @@ -52,9 +52,9 @@ async function storeDeploymentHistory(deployment: any) { let deploymentDoc try { // theres only one deployment doc per app database - deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS) + deploymentDoc = await db.get(DocumentType.DEPLOYMENTS) } catch (err) { - deploymentDoc = { _id: DocumentTypes.DEPLOYMENTS, history: {} } + deploymentDoc = { _id: DocumentType.DEPLOYMENTS, history: {} } } const deploymentId = deploymentJSON._id @@ -115,7 +115,7 @@ async function deployApp(deployment: any) { await replication.replicate() console.log("replication complete.. replacing app meta doc") const db = getProdAppDB() - const appDoc = await db.get(DocumentTypes.APP_METADATA) + const appDoc = await db.get(DocumentType.APP_METADATA) deployment.appUrl = appDoc.url @@ -146,7 +146,7 @@ async function deployApp(deployment: any) { export async function fetchDeployments(ctx: any) { try { const db = getAppDB() - const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS) + const deploymentDoc = await db.get(DocumentType.DEPLOYMENTS) const { updated, deployments } = await checkAllDeployments(deploymentDoc) if (updated) { await db.put(deployments) @@ -160,7 +160,7 @@ export async function fetchDeployments(ctx: any) { export async function deploymentProgress(ctx: any) { try { const db = getAppDB() - const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS) + const deploymentDoc = await db.get(DocumentType.DEPLOYMENTS) ctx.body = deploymentDoc[ctx.params.deploymentId] } catch (err) { ctx.throw( @@ -173,7 +173,7 @@ export async function deploymentProgress(ctx: any) { const isFirstDeploy = async () => { try { const db = getProdAppDB() - await db.get(DocumentTypes.APP_METADATA) + await db.get(DocumentType.APP_METADATA) } catch (e: any) { if (e.status === 404) { return true diff --git a/packages/server/src/api/controllers/dev.js b/packages/server/src/api/controllers/dev.js index 3c5835ac21..8438175ca8 100644 --- a/packages/server/src/api/controllers/dev.js +++ b/packages/server/src/api/controllers/dev.js @@ -4,7 +4,7 @@ const { checkSlashesInUrl } = require("../../utilities") const { request } = require("../../utilities/workerRequests") const { clearLock } = require("../../utilities/redis") const { Replication, getProdAppID } = require("@budibase/backend-core/db") -const { DocumentTypes } = require("../../db/utils") +const { DocumentType } = require("../../db/utils") const { app: appCache } = require("@budibase/backend-core/cache") const { getProdAppDB, getAppDB } = require("@budibase/backend-core/context") const { events } = require("@budibase/backend-core") @@ -87,7 +87,7 @@ exports.revert = async ctx => { if (info.error) { throw info.error } - const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS) + const deploymentDoc = await db.get(DocumentType.DEPLOYMENTS) if ( !deploymentDoc.history || Object.keys(deploymentDoc.history).length === 0 @@ -110,7 +110,7 @@ exports.revert = async ctx => { // update appID in reverted app to be dev version again const db = getAppDB() - const appDoc = await db.get(DocumentTypes.APP_METADATA) + const appDoc = await db.get(DocumentType.APP_METADATA) appDoc.appId = appId appDoc.instance._id = appId await db.put(appDoc) diff --git a/packages/server/src/api/controllers/integration.js b/packages/server/src/api/controllers/integration.js index 28748541c4..cc9efac93a 100644 --- a/packages/server/src/api/controllers/integration.js +++ b/packages/server/src/api/controllers/integration.js @@ -1,6 +1,6 @@ const { cloneDeep } = require("lodash") const { definitions } = require("../../integrations") -const { SourceNames } = require("../../definitions/datasource") +const { SourceName } = require("@budibase/types") const googlesheets = require("../../integrations/googlesheets") const { featureFlags } = require("@budibase/backend-core") @@ -10,7 +10,7 @@ exports.fetch = async function (ctx) { // for google sheets integration google verification if (featureFlags.isEnabled(featureFlags.FeatureFlag.GOOGLE_SHEETS)) { - defs[SourceNames.GOOGLE_SHEETS] = googlesheets.schema + defs[SourceName.GOOGLE_SHEETS] = googlesheets.schema } ctx.body = defs diff --git a/packages/server/src/api/controllers/public/utils.ts b/packages/server/src/api/controllers/public/utils.ts index 8a7d7c4f4e..d86eced9ba 100644 --- a/packages/server/src/api/controllers/public/utils.ts +++ b/packages/server/src/api/controllers/public/utils.ts @@ -1,6 +1,6 @@ const { getAppDB } = require("@budibase/backend-core/context") import { isExternalTable } from "../../../integrations/utils" -import { APP_PREFIX, DocumentTypes } from "../../../db/utils" +import { APP_PREFIX, DocumentType } from "../../../db/utils" export async function addRev( body: { _id?: string; _rev?: string }, @@ -11,7 +11,7 @@ export async function addRev( } let id = body._id if (body._id.startsWith(APP_PREFIX)) { - id = DocumentTypes.APP_METADATA + id = DocumentType.APP_METADATA } const db = getAppDB() const dbDoc = await db.get(id) diff --git a/packages/server/src/api/controllers/query/import/sources/openapi2.ts b/packages/server/src/api/controllers/query/import/sources/openapi2.ts index e2dcec7613..230647475e 100644 --- a/packages/server/src/api/controllers/query/import/sources/openapi2.ts +++ b/packages/server/src/api/controllers/query/import/sources/openapi2.ts @@ -1,5 +1,5 @@ import { ImportInfo } from "./base" -import { Query, QueryParameter } from "../../../../../definitions/datasource" +import { Query, QueryParameter } from "@budibase/types" import { OpenAPIV2 } from "openapi-types" import { OpenAPISource } from "./base/openapi" import { URL } from "url" diff --git a/packages/server/src/api/controllers/query/import/sources/openapi3.ts b/packages/server/src/api/controllers/query/import/sources/openapi3.ts index f08f21e495..f86f684c32 100644 --- a/packages/server/src/api/controllers/query/import/sources/openapi3.ts +++ b/packages/server/src/api/controllers/query/import/sources/openapi3.ts @@ -1,5 +1,5 @@ import { ImportInfo } from "./base" -import { Query, QueryParameter } from "../../../../../definitions/datasource" +import { Query, QueryParameter } from "@budibase/types" import { OpenAPIV3 } from "openapi-types" import { OpenAPISource } from "./base/openapi" import { URL } from "url" diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index 7983044f66..a1aecdb0f2 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -1,18 +1,16 @@ import { - FilterTypes, - IncludeRelationships, + FilterType, + IncludeRelationship, Operation, PaginationJson, RelationshipsJson, SearchFilters, SortJson, -} from "../../../definitions/datasource" -import { Datasource, FieldSchema, Row, Table, -} from "../../../definitions/common" +} from "@budibase/types" import { breakRowIdField, generateRowIdField, @@ -128,7 +126,7 @@ module External { if ( typeof filter !== "object" || Object.keys(filter).length === 0 || - key === FilterTypes.ONE_OF + key === FilterType.ONE_OF ) { continue } @@ -634,7 +632,7 @@ module External { */ buildFields( table: Table, - includeRelations: IncludeRelationships = IncludeRelationships.INCLUDE + includeRelations: IncludeRelationship = IncludeRelationship.INCLUDE ) { function extractRealFields(table: Table, existing: string[] = []) { return Object.entries(table.schema) diff --git a/packages/server/src/api/controllers/row/external.js b/packages/server/src/api/controllers/row/external.js index b1c322b8b6..c9f6aa2f78 100644 --- a/packages/server/src/api/controllers/row/external.js +++ b/packages/server/src/api/controllers/row/external.js @@ -128,25 +128,35 @@ exports.search = async ctx => { [params.sort]: direction, } } - const rows = await handleRequest(DataSourceOperation.READ, tableId, { - filters: query, - sort, - paginate: paginateObj, - }) - let hasNextPage = false - if (paginate && rows.length === limit) { - const nextRows = await handleRequest(DataSourceOperation.READ, tableId, { + try { + const rows = await handleRequest(DataSourceOperation.READ, tableId, { filters: query, sort, - paginate: { - limit: 1, - page: bookmark * limit + 1, - }, + paginate: paginateObj, }) - hasNextPage = nextRows.length > 0 + let hasNextPage = false + if (paginate && rows.length === limit) { + const nextRows = await handleRequest(DataSourceOperation.READ, tableId, { + filters: query, + sort, + paginate: { + limit: 1, + page: bookmark * limit + 1, + }, + }) + hasNextPage = nextRows.length > 0 + } + // need wrapper object for bookmarks etc when paginating + return { rows, hasNextPage, bookmark: bookmark + 1 } + } catch (err) { + if (err.message && err.message.includes("does not exist")) { + throw new Error( + `Table updated externally, please re-fetch - ${err.message}` + ) + } else { + throw err + } } - // need wrapper object for bookmarks etc when paginating - return { rows, hasNextPage, bookmark: bookmark + 1 } } exports.validate = async () => { diff --git a/packages/server/src/api/controllers/row/internal.js b/packages/server/src/api/controllers/row/internal.js index b2b932a69a..086e1d9ce4 100644 --- a/packages/server/src/api/controllers/row/internal.js +++ b/packages/server/src/api/controllers/row/internal.js @@ -3,7 +3,7 @@ const { generateRowID, getRowParams, getTableIDFromRowID, - DocumentTypes, + DocumentType, InternalTables, } = require("../../../db/utils") const { dangerousGetDB } = require("@budibase/backend-core/db") @@ -183,7 +183,7 @@ exports.fetchView = async ctx => { const viewName = ctx.params.viewName // if this is a table view being looked for just transfer to that - if (viewName.startsWith(DocumentTypes.TABLE)) { + if (viewName.startsWith(DocumentType.TABLE)) { ctx.params.tableId = viewName return exports.fetch(ctx) } diff --git a/packages/server/src/api/controllers/row/internalSearch.js b/packages/server/src/api/controllers/row/internalSearch.js index 8a04fc2bd0..3cf60fbcc0 100644 --- a/packages/server/src/api/controllers/row/internalSearch.js +++ b/packages/server/src/api/controllers/row/internalSearch.js @@ -1,4 +1,5 @@ const { SearchIndexes } = require("../../../db/utils") +const { removeKeyNumbering } = require("./utils") const fetch = require("node-fetch") const { getCouchInfo } = require("@budibase/backend-core/db") const { getAppId } = require("@budibase/backend-core/context") @@ -20,6 +21,8 @@ class QueryBuilder { notEmpty: {}, oneOf: {}, contains: {}, + notContains: {}, + containsAny: {}, ...base, } this.limit = 50 @@ -125,6 +128,16 @@ class QueryBuilder { return this } + addNotContains(key, value) { + this.query.notContains[key] = value + return this + } + + addContainsAny(key, value) { + this.query.containsAny[key] = value + return this + } + /** * Preprocesses a value before going into a lucene search. * Transforms strings to lowercase and wraps strings and bools in quotes. @@ -170,11 +183,29 @@ class QueryBuilder { return `${key}:${builder.preprocess(value, allPreProcessingOpts)}` } - const contains = (key, value) => { - if (!value && value !== 0) { + const contains = (key, value, mode = "AND") => { + if (Array.isArray(value) && value.length === 0) { return null } - return `${key}:${builder.preprocess(value, { escape: true })}` + if (!Array.isArray(value)) { + return `${key}:${value}` + } + let statement = `${builder.preprocess(value[0], { escape: true })}` + for (let i = 1; i < value.length; i++) { + statement += ` ${mode} ${builder.preprocess(value[i], { + escape: true, + })}` + } + return `${key}:(${statement})` + } + + const notContains = (key, value) => { + const allPrefix = allOr === "" ? "*:* AND" : "" + return allPrefix + "NOT " + contains(key, value) + } + + const containsAny = (key, value) => { + return contains(key, value, "OR") } const oneOf = (key, value) => { @@ -197,6 +228,8 @@ class QueryBuilder { function build(structure, queryFn) { for (let [key, value] of Object.entries(structure)) { + // check for new format - remove numbering if needed + key = removeKeyNumbering(key) key = builder.preprocess(key.replace(/ /g, "_"), { escape: true, }) @@ -275,6 +308,12 @@ class QueryBuilder { if (this.query.contains) { build(this.query.contains, contains) } + if (this.query.notContains) { + build(this.query.notContains, notContains) + } + if (this.query.containsAny) { + build(this.query.containsAny, containsAny) + } // make sure table ID is always added as an AND if (tableId) { query = `(${query})` @@ -310,6 +349,9 @@ class QueryBuilder { } } +// exported for unit testing +exports.QueryBuilder = QueryBuilder + /** * Executes a lucene search query. * @param url The query URL @@ -423,6 +465,7 @@ exports.paginatedSearch = async (query, params) => { // Try fetching 1 row in the next page to see if another page of results // exists or not const nextResults = await search + .setTable(params.tableId) .setBookmark(searchResults.bookmark) .setLimit(1) .run() diff --git a/packages/server/src/api/controllers/row/utils.js b/packages/server/src/api/controllers/row/utils.js index 5da7ca331e..4c837e7630 100644 --- a/packages/server/src/api/controllers/row/utils.js +++ b/packages/server/src/api/controllers/row/utils.js @@ -3,8 +3,9 @@ const { cloneDeep } = require("lodash/fp") const { InternalTables } = require("../../../db/utils") const userController = require("../user") const { FieldTypes } = require("../../../constants") -const { makeExternalQuery } = require("../../../integrations/base/utils") const { getAppDB } = require("@budibase/backend-core/context") +const { makeExternalQuery } = require("../../../integrations/base/query") +const { removeKeyNumbering } = require("../../../integrations/base/utils") validateJs.extend(validateJs.validators.datetime, { parse: function (value) { @@ -16,6 +17,8 @@ validateJs.extend(validateJs.validators.datetime, { }, }) +exports.removeKeyNumbering = removeKeyNumbering + exports.getDatasourceAndQuery = async json => { const datasourceId = json.endpoint.datasourceId const db = getAppDB() diff --git a/packages/server/src/api/controllers/static/index.js b/packages/server/src/api/controllers/static/index.ts similarity index 85% rename from packages/server/src/api/controllers/static/index.js rename to packages/server/src/api/controllers/static/index.ts index 86bce89b4a..7aeea98adc 100644 --- a/packages/server/src/api/controllers/static/index.js +++ b/packages/server/src/api/controllers/static/index.ts @@ -14,18 +14,17 @@ const env = require("../../../environment") const { clientLibraryPath } = require("../../../utilities") const { upload } = require("../../../utilities/fileSystem") const { attachmentsRelativeURL } = require("../../../utilities") -const { DocumentTypes, isDevAppID } = require("../../../db/utils") +const { DocumentType } = require("../../../db/utils") const { getAppDB, getAppId } = require("@budibase/backend-core/context") const { setCookie, clearCookie } = require("@budibase/backend-core/utils") const AWS = require("aws-sdk") -const { events } = require("@budibase/backend-core") const fs = require("fs") const { downloadTarballDirect, } = require("../../../utilities/fileSystem/utilities") -async function prepareUpload({ s3Key, bucket, metadata, file }) { +async function prepareUpload({ s3Key, bucket, metadata, file }: any) { const response = await upload({ bucket, metadata, @@ -44,7 +43,7 @@ async function prepareUpload({ s3Key, bucket, metadata, file }) { } } -exports.toggleBetaUiFeature = async function (ctx) { +export const toggleBetaUiFeature = async function (ctx: any) { const cookieName = `beta:${ctx.params.feature}` if (ctx.cookies.get(cookieName)) { @@ -72,21 +71,18 @@ exports.toggleBetaUiFeature = async function (ctx) { } } -exports.serveBuilder = async function (ctx) { +export const serveBuilder = async function (ctx: any) { const builderPath = resolve(TOP_LEVEL_PATH, "builder") await send(ctx, ctx.file, { root: builderPath }) - if (!ctx.file.includes("assets/")) { - await events.serve.servedBuilder() - } } -exports.uploadFile = async function (ctx) { +export const uploadFile = async function (ctx: any) { let files = ctx.request.files.file.length > 1 ? Array.from(ctx.request.files.file) : [ctx.request.files.file] - const uploads = files.map(async file => { + const uploads = files.map(async (file: any) => { const fileExtension = [...file.name.split(".")].pop() // filenames converted to UUIDs so they are unique const processedFileName = `${uuid.v4()}.${fileExtension}` @@ -101,9 +97,9 @@ exports.uploadFile = async function (ctx) { ctx.body = await Promise.all(uploads) } -exports.serveApp = async function (ctx) { +export const serveApp = async function (ctx: any) { const db = getAppDB({ skip_setup: true }) - const appInfo = await db.get(DocumentTypes.APP_METADATA) + const appInfo = await db.get(DocumentType.APP_METADATA) let appId = getAppId() if (!env.isJest()) { @@ -126,21 +122,15 @@ exports.serveApp = async function (ctx) { // just return the app info for jest to assert on ctx.body = appInfo } - - if (isDevAppID(appInfo.appId)) { - await events.serve.servedAppPreview(appInfo) - } else { - await events.serve.servedApp(appInfo) - } } -exports.serveClientLibrary = async function (ctx) { +export const serveClientLibrary = async function (ctx: any) { return send(ctx, "budibase-client.js", { root: join(NODE_MODULES_PATH, "@budibase", "client", "dist"), }) } -exports.getSignedUploadURL = async function (ctx) { +export const getSignedUploadURL = async function (ctx: any) { const database = getAppDB() // Ensure datasource is valid diff --git a/packages/server/src/api/controllers/table/external.js b/packages/server/src/api/controllers/table/external.js index 34319c5bff..d919e9dad7 100644 --- a/packages/server/src/api/controllers/table/external.js +++ b/packages/server/src/api/controllers/table/external.js @@ -14,7 +14,7 @@ const { FieldTypes, RelationshipTypes, } = require("../../../constants") -const { makeExternalQuery } = require("../../../integrations/base/utils") +const { makeExternalQuery } = require("../../../integrations/base/query") const { cloneDeep } = require("lodash/fp") const csvParser = require("../../../utilities/csvParser") const { handleRequest } = require("../row/external") diff --git a/packages/server/src/api/controllers/view/index.js b/packages/server/src/api/controllers/view/index.js index 5fc479adfb..b2c3a84c59 100644 --- a/packages/server/src/api/controllers/view/index.js +++ b/packages/server/src/api/controllers/view/index.js @@ -7,7 +7,7 @@ const { getTable } = require("../table/utils") const { FieldTypes } = require("../../../constants") const { getAppDB } = require("@budibase/backend-core/context") const { events } = require("@budibase/backend-core") -const { DocumentTypes } = require("../../../db/utils") +const { DocumentType } = require("../../../db/utils") const { cloneDeep, isEqual } = require("lodash") exports.fetch = async ctx => { @@ -181,7 +181,7 @@ exports.exportView = async ctx => { ctx.attachment(filename) ctx.body = apiFileReturn(exporter(headers, rows)) - if (viewName.startsWith(DocumentTypes.TABLE)) { + if (viewName.startsWith(DocumentType.TABLE)) { await events.table.exported(table, format) } else { await events.view.exported(table, format) diff --git a/packages/server/src/api/controllers/view/utils.js b/packages/server/src/api/controllers/view/utils.js index 5bddbf345c..7ce7d324e4 100644 --- a/packages/server/src/api/controllers/view/utils.js +++ b/packages/server/src/api/controllers/view/utils.js @@ -1,8 +1,8 @@ const { - ViewNames, + ViewName, generateMemoryViewID, getMemoryViewParams, - DocumentTypes, + DocumentType, SEPARATOR, } = require("../../../db/utils") const env = require("../../../environment") @@ -16,12 +16,21 @@ exports.getView = async viewName => { return designDoc.views[viewName] } else { // This is a table view, don't read the view from the DB - if (viewName.startsWith(DocumentTypes.TABLE + SEPARATOR)) { + if (viewName.startsWith(DocumentType.TABLE + SEPARATOR)) { return null } - const viewDoc = await db.get(generateMemoryViewID(viewName)) - return viewDoc.view + try { + const viewDoc = await db.get(generateMemoryViewID(viewName)) + return viewDoc.view + } catch (err) { + // Return null when PouchDB doesn't found the view + if (err.status === 404) { + return null + } + + throw err + } } } @@ -32,7 +41,7 @@ exports.getViews = async () => { const designDoc = await db.get("_design/database") for (let name of Object.keys(designDoc.views)) { // Only return custom views, not built ins - if (Object.values(ViewNames).indexOf(name) !== -1) { + if (Object.values(ViewName).indexOf(name) !== -1) { continue } response.push({ diff --git a/packages/server/src/api/routes/analytics.js b/packages/server/src/api/routes/analytics.js index 1f58ea04e7..d13ace12d1 100644 --- a/packages/server/src/api/routes/analytics.js +++ b/packages/server/src/api/routes/analytics.js @@ -4,5 +4,6 @@ const controller = require("../controllers/analytics") const router = Router() router.get("/api/bbtel", controller.isEnabled) +router.post("/api/bbtel/ping", controller.ping) module.exports = router diff --git a/packages/server/src/api/routes/tests/analytics.spec.js b/packages/server/src/api/routes/tests/analytics.spec.js new file mode 100644 index 0000000000..73d5810d7f --- /dev/null +++ b/packages/server/src/api/routes/tests/analytics.spec.js @@ -0,0 +1,59 @@ +const setup = require("./utilities") +const { events, constants, db } = require("@budibase/backend-core") + +describe("/static", () => { + let request = setup.getRequest() + let config = setup.getConfig() + let app + + const timezone = "Europe/London" + + afterAll(setup.afterAll) + + beforeEach(async () => { + app = await config.init() + jest.clearAllMocks() + }) + + describe("/ping", () => { + it("should ping from builder", async () => { + await request + .post("/api/bbtel/ping") + .send({source: "builder", timezone}) + .set(config.defaultHeaders()) + .expect(200) + + expect(events.serve.servedBuilder).toBeCalledTimes(1) + expect(events.serve.servedBuilder).toBeCalledWith(timezone) + expect(events.serve.servedApp).not.toBeCalled() + expect(events.serve.servedAppPreview).not.toBeCalled() + }) + + it("should ping from app preview", async () => { + await request + .post("/api/bbtel/ping") + .send({source: "app", timezone}) + .set(config.defaultHeaders()) + .expect(200) + + expect(events.serve.servedAppPreview).toBeCalledTimes(1) + expect(events.serve.servedAppPreview).toBeCalledWith(config.getApp(), timezone) + expect(events.serve.servedApp).not.toBeCalled() + }) + + it("should ping from app", async () => { + const headers = config.defaultHeaders() + headers[constants.Headers.APP_ID] = config.prodAppId + + await request + .post("/api/bbtel/ping") + .send({source: "app", timezone}) + .set(headers) + .expect(200) + + expect(events.serve.servedApp).toBeCalledTimes(1) + expect(events.serve.servedApp).toBeCalledWith(config.getProdApp(), timezone) + expect(events.serve.servedAppPreview).not.toBeCalled() + }) + }) +}) diff --git a/packages/server/src/api/routes/tests/internalSearch.spec.js b/packages/server/src/api/routes/tests/internalSearch.spec.js new file mode 100644 index 0000000000..3b478eda17 --- /dev/null +++ b/packages/server/src/api/routes/tests/internalSearch.spec.js @@ -0,0 +1,176 @@ +const search = require("../../controllers/row/internalSearch") +// this will be mocked out for _search endpoint +const fetch = require("node-fetch") +const PARAMS = { + tableId: "ta_12345679abcdef", + version: "1", + bookmark: null, + sort: null, + sortOrder: "ascending", + sortType: "string", +} + +function checkLucene(resp, expected, params = PARAMS) { + const query = resp.rows[0].query + const json = JSON.parse(query) + if (PARAMS.sort) { + expect(json.sort).toBe(`${PARAMS.sort}<${PARAMS.sortType}>`) + } + if (PARAMS.bookmark) { + expect(json.bookmark).toBe(PARAMS.bookmark) + } + expect(json.include_docs).toBe(true) + expect(json.q).toBe(`(${expected}) AND tableId:"${params.tableId}"`) + expect(json.limit).toBe(params.limit || 50) +} + +describe("internal search", () => { + it("default query", async () => { + const response = await search.paginatedSearch({ + }, PARAMS) + checkLucene(response, `*:*`) + }) + + it("test equal query", async () => { + const response = await search.paginatedSearch({ + equal: { + "column": "1", + } + }, PARAMS) + checkLucene(response, `*:* AND column:"1"`) + }) + + it("test notEqual query", async () => { + const response = await search.paginatedSearch({ + notEqual: { + "column": "1", + } + }, PARAMS) + checkLucene(response, `*:* AND !column:"1"`) + }) + + it("test OR query", async () => { + const response = await search.paginatedSearch({ + allOr: true, + equal: { + "column": "2", + }, + notEqual: { + "column": "1", + } + }, PARAMS) + checkLucene(response, `column:"2" OR !column:"1"`) + }) + + it("test AND query", async () => { + const response = await search.paginatedSearch({ + equal: { + "column": "2", + }, + notEqual: { + "column": "1", + } + }, PARAMS) + checkLucene(response, `*:* AND column:"2" AND !column:"1"`) + }) + + it("test pagination query", async () => { + const updatedParams = { + ...PARAMS, + limit: 100, + bookmark: "awd", + sort: "column", + } + const response = await search.paginatedSearch({ + string: { + "column": "2", + }, + }, updatedParams) + checkLucene(response, `*:* AND column:2*`, updatedParams) + }) + + it("test range query", async () => { + const response = await search.paginatedSearch({ + range: { + "column": { low: 1, high: 2 }, + }, + }, PARAMS) + checkLucene(response, `*:* AND column:[1 TO 2]`, PARAMS) + }) + + it("test empty query", async () => { + const response = await search.paginatedSearch({ + empty: { + "column": "", + }, + }, PARAMS) + checkLucene(response, `*:* AND !column:["" TO *]`, PARAMS) + }) + + it("test notEmpty query", async () => { + const response = await search.paginatedSearch({ + notEmpty: { + "column": "", + }, + }, PARAMS) + checkLucene(response, `*:* AND column:["" TO *]`, PARAMS) + }) + + it("test oneOf query", async () => { + const response = await search.paginatedSearch({ + oneOf: { + "column": ["a", "b"], + }, + }, PARAMS) + checkLucene(response, `*:* AND column:("a" OR "b")`, PARAMS) + }) + + it("test contains query", async () => { + const response = await search.paginatedSearch({ + contains: { + "column": "a", + "colArr": [1, 2, 3], + }, + }, PARAMS) + checkLucene(response, `*:* AND column:a AND colArr:(1 AND 2 AND 3)`, PARAMS) + }) + + it("test multiple of same column", async () => { + const response = await search.paginatedSearch({ + allOr: true, + equal: { + "1:column": "a", + "2:column": "b", + "3:column": "c", + }, + }, PARAMS) + checkLucene(response, `column:"a" OR column:"b" OR column:"c"`, PARAMS) + }) + + it("check a weird case for lucene building", async () => { + const response = await search.paginatedSearch({ + equal: { + "1:1:column": "a", + }, + }, PARAMS) + checkLucene(response, `*:* AND 1\\:column:"a"`, PARAMS) + }) + + it("test containsAny query", async () => { + const response = await search.paginatedSearch({ + containsAny: { + "column": ["a", "b", "c"] + }, + }, PARAMS) + checkLucene(response, `*:* AND column:(a OR b OR c)`, PARAMS) + }) + + it("test notContains query", async () => { + const response = await search.paginatedSearch({ + notContains: { + "column": ["a", "b", "c"] + }, + }, PARAMS) + checkLucene(response, `*:* AND NOT column:(a AND b AND c)`, PARAMS) + }) +}) \ No newline at end of file diff --git a/packages/server/src/api/routes/tests/static.spec.js b/packages/server/src/api/routes/tests/static.spec.js index f0a1e6621c..37176f5cf5 100644 --- a/packages/server/src/api/routes/tests/static.spec.js +++ b/packages/server/src/api/routes/tests/static.spec.js @@ -36,7 +36,6 @@ describe("/static", () => { .expect(200) expect(res.text).toContain("Budibase") - expect(events.serve.servedBuilder).toBeCalledTimes(1) }) }) @@ -56,9 +55,6 @@ describe("/static", () => { .expect(200) expect(res.body.appId).toBe(config.prodAppId) - expect(events.serve.servedApp).toBeCalledTimes(1) - expect(events.serve.servedApp).toBeCalledWith(res.body) - expect(events.serve.servedAppPreview).not.toBeCalled() }) it("should serve the app by url", async () => { @@ -71,9 +67,6 @@ describe("/static", () => { .expect(200) expect(res.body.appId).toBe(config.prodAppId) - expect(events.serve.servedApp).toBeCalledTimes(1) - expect(events.serve.servedApp).toBeCalledWith(res.body) - expect(events.serve.servedAppPreview).not.toBeCalled() }) it("should serve the app preview by id", async () => { @@ -83,9 +76,6 @@ describe("/static", () => { .expect(200) expect(res.body.appId).toBe(config.appId) - expect(events.serve.servedAppPreview).toBeCalledTimes(1) - expect(events.serve.servedAppPreview).toBeCalledWith(res.body) - expect(events.serve.servedApp).not.toBeCalled() }) }) diff --git a/packages/server/src/app.ts b/packages/server/src/app.ts index 32951cc47e..62301d57ca 100644 --- a/packages/server/src/app.ts +++ b/packages/server/src/app.ts @@ -15,6 +15,7 @@ const Sentry = require("@sentry/node") const fileSystem = require("./utilities/fileSystem") const bullboard = require("./automations/bullboard") const { logAlert } = require("@budibase/backend-core/logging") +const { pinoSettings } = require("@budibase/backend-core") const { Thread } = require("./threads") import redis from "./utilities/redis" import * as migrations from "./migrations" @@ -35,14 +36,7 @@ app.use( }) ) -app.use( - pino({ - prettyPrint: { - levelFirst: true, - }, - level: env.LOG_LEVEL || "error", - }) -) +app.use(pino(pinoSettings())) if (!env.isTest()) { const plugin = bullboard.init() diff --git a/packages/server/src/automations/logging/index.ts b/packages/server/src/automations/logging/index.ts index f3c149d5ea..80779f0846 100644 --- a/packages/server/src/automations/logging/index.ts +++ b/packages/server/src/automations/logging/index.ts @@ -23,6 +23,9 @@ export async function checkAppMetadata(apps: App[]) { for (let [key, errors] of Object.entries(metadata.automationErrors)) { const updated = [] for (let error of errors) { + if (!error) { + continue + } const startDate = error.split(dbUtils.SEPARATOR)[2] if (startDate > maxStartDate) { updated.push(error) diff --git a/packages/server/src/automations/steps/queryRows.js b/packages/server/src/automations/steps/queryRows.js index 58e7313dd2..b02f31b1ec 100644 --- a/packages/server/src/automations/steps/queryRows.js +++ b/packages/server/src/automations/steps/queryRows.js @@ -14,6 +14,16 @@ const SortOrdersPretty = { [SortOrders.DESCENDING]: "Descending", } +const EmptyFilterOptions = { + RETURN_ALL: "all", + RETURN_NONE: "none", +} + +const EmptyFilterOptionsPretty = { + [EmptyFilterOptions.RETURN_ALL]: "Return all table rows", + [EmptyFilterOptions.RETURN_NONE]: "Return no rows", +} + exports.definition = { description: "Query rows from the database", icon: "Search", @@ -52,6 +62,12 @@ exports.definition = { title: "Limit", customType: "queryLimit", }, + onEmptyFilter: { + pretty: Object.values(EmptyFilterOptionsPretty), + enum: Object.values(EmptyFilterOptions), + type: "string", + title: "When Filter Empty", + }, }, required: ["tableId"], }, @@ -103,6 +119,10 @@ function typeCoercion(filters, table) { return filters } +const hasNullFilters = filters => + filters.length === 0 || + filters.some(filter => filter.value === null || filter.value === "") + exports.run = async function ({ inputs, appId }) { const { tableId, filters, sortColumn, sortOrder, limit } = inputs const table = await getTable(appId, tableId) @@ -127,9 +147,21 @@ exports.run = async function ({ inputs, appId }) { version: "1", }) try { - await rowController.search(ctx) + let rows + + if ( + inputs.onEmptyFilter === EmptyFilterOptions.RETURN_NONE && + inputs["filters-def"] && + hasNullFilters(inputs["filters-def"]) + ) { + rows = [] + } else { + await rowController.search(ctx) + rows = ctx.body ? ctx.body.rows : [] + } + return { - rows: ctx.body ? ctx.body.rows : [], + rows, success: ctx.status === 200, } } catch (err) { diff --git a/packages/server/src/automations/tests/queryRows.spec.js b/packages/server/src/automations/tests/queryRows.spec.js index ec966302a8..1ce7460806 100644 --- a/packages/server/src/automations/tests/queryRows.spec.js +++ b/packages/server/src/automations/tests/queryRows.spec.js @@ -16,7 +16,7 @@ describe("Test a query step automation", () => { let table let config = setup.getConfig() - beforeEach(async () => { + beforeAll(async () => { await config.init() table = await config.createTable() const row = { @@ -48,4 +48,70 @@ describe("Test a query step automation", () => { expect(res.rows.length).toBe(2) expect(res.rows[0].name).toBe(NAME) }) + + it("Returns all rows when onEmptyFilter has no value and no filters are passed", async () => { + const inputs = { + tableId: table._id, + filters: {}, + sortColumn: "name", + sortOrder: "ascending", + limit: 10, + } + const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs) + expect(res.success).toBe(true) + expect(res.rows).toBeDefined() + expect(res.rows.length).toBe(2) + expect(res.rows[0].name).toBe(NAME) + }) + + it("Returns no rows when onEmptyFilter is RETURN_NONE and theres no filters", async () => { + const inputs = { + tableId: table._id, + filters: {}, + "filters-def": [], + sortColumn: "name", + sortOrder: "ascending", + limit: 10, + onEmptyFilter: "none", + } + const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs) + expect(res.success).toBe(false) + expect(res.rows).toBeDefined() + expect(res.rows.length).toBe(0) + }) + + it("Returns no rows when onEmptyFilters RETURN_NONE and a filter is passed with a null value", async () => { + const inputs = { + tableId: table._id, + onEmptyFilter: "none", + filters: {}, + "filters-def": [ + { + value: null + } + ], + sortColumn: "name", + sortOrder: "ascending", + limit: 10, + } + const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs) + expect(res.success).toBe(false) + expect(res.rows).toBeDefined() + expect(res.rows.length).toBe(0) + }) + + it("Returns rows when onEmptyFilter is RETURN_ALL and no filter is passed", async () => { + const inputs = { + tableId: table._id, + onEmptyFilter: "all", + filters: {}, + sortColumn: "name", + sortOrder: "ascending", + limit: 10, + } + const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs) + expect(res.success).toBe(true) + expect(res.rows).toBeDefined() + expect(res.rows.length).toBe(2) + }) }) diff --git a/packages/server/src/db/linkedRows/linkUtils.js b/packages/server/src/db/linkedRows/linkUtils.js index 5af4aa919a..5fc393ecf0 100644 --- a/packages/server/src/db/linkedRows/linkUtils.js +++ b/packages/server/src/db/linkedRows/linkUtils.js @@ -1,5 +1,5 @@ const Sentry = require("@sentry/node") -const { ViewNames, getQueryIndex } = require("../utils") +const { ViewName, getQueryIndex } = require("../utils") const { FieldTypes } = require("../../constants") const { createLinkView } = require("../views/staticViews") const { getAppDB } = require("@budibase/backend-core/context") @@ -41,7 +41,7 @@ exports.getLinkDocuments = async function (args) { } params.include_docs = !!includeDocs try { - let linkRows = (await db.query(getQueryIndex(ViewNames.LINK), params)).rows + let linkRows = (await db.query(getQueryIndex(ViewName.LINK), params)).rows // filter to get unique entries const foundIds = [] linkRows = linkRows.filter(link => { diff --git a/packages/server/src/db/utils.js b/packages/server/src/db/utils.js index 8372040723..d23152a663 100644 --- a/packages/server/src/db/utils.js +++ b/packages/server/src/db/utils.js @@ -1,6 +1,6 @@ const newid = require("./newid") const { - DocumentTypes: CoreDocTypes, + DocumentType: CoreDocTypes, getRoleParams, generateRoleID, APP_DEV_PREFIX, @@ -12,7 +12,7 @@ const { getDevelopmentAppID, generateAppID, getQueryIndex, - ViewNames, + ViewName, } = require("@budibase/backend-core/db") const UNICODE_MAX = "\ufff0" @@ -23,7 +23,7 @@ const AppStatus = { DEPLOYED: "published", } -const DocumentTypes = { +const DocumentType = { ...CoreDocTypes, TABLE: "ta", ROW: "ro", @@ -66,12 +66,12 @@ exports.APP_PREFIX = APP_PREFIX exports.APP_DEV_PREFIX = APP_DEV_PREFIX exports.isDevAppID = isDevAppID exports.isProdAppID = isProdAppID -exports.USER_METDATA_PREFIX = `${DocumentTypes.ROW}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}` -exports.LINK_USER_METADATA_PREFIX = `${DocumentTypes.LINK}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}` -exports.TABLE_ROW_PREFIX = `${DocumentTypes.ROW}${SEPARATOR}${DocumentTypes.TABLE}` -exports.ViewNames = ViewNames +exports.USER_METDATA_PREFIX = `${DocumentType.ROW}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}` +exports.LINK_USER_METADATA_PREFIX = `${DocumentType.LINK}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}` +exports.TABLE_ROW_PREFIX = `${DocumentType.ROW}${SEPARATOR}${DocumentType.TABLE}` +exports.ViewName = ViewName exports.InternalTables = InternalTables -exports.DocumentTypes = DocumentTypes +exports.DocumentType = DocumentType exports.SEPARATOR = SEPARATOR exports.UNICODE_MAX = UNICODE_MAX exports.SearchIndexes = SearchIndexes @@ -114,7 +114,7 @@ exports.getDocParams = getDocParams * Gets parameters for retrieving tables, this is a utility function for the getDocParams function. */ exports.getTableParams = (tableId = null, otherProps = {}) => { - return getDocParams(DocumentTypes.TABLE, tableId, otherProps) + return getDocParams(DocumentType.TABLE, tableId, otherProps) } /** @@ -122,7 +122,7 @@ exports.getTableParams = (tableId = null, otherProps = {}) => { * @returns {string} The new table ID which the table doc can be stored under. */ exports.generateTableID = () => { - return `${DocumentTypes.TABLE}${SEPARATOR}${newid()}` + return `${DocumentType.TABLE}${SEPARATOR}${newid()}` } /** @@ -135,12 +135,12 @@ exports.generateTableID = () => { */ exports.getRowParams = (tableId = null, rowId = null, otherProps = {}) => { if (tableId == null) { - return getDocParams(DocumentTypes.ROW, null, otherProps) + return getDocParams(DocumentType.ROW, null, otherProps) } const endOfKey = rowId == null ? `${tableId}${SEPARATOR}` : rowId - return getDocParams(DocumentTypes.ROW, endOfKey, otherProps) + return getDocParams(DocumentType.ROW, endOfKey, otherProps) } /** @@ -150,9 +150,9 @@ exports.getRowParams = (tableId = null, rowId = null, otherProps = {}) => { */ exports.getTableIDFromRowID = rowId => { const components = rowId - .split(DocumentTypes.TABLE + SEPARATOR)[1] + .split(DocumentType.TABLE + SEPARATOR)[1] .split(SEPARATOR) - return `${DocumentTypes.TABLE}${SEPARATOR}${components[0]}` + return `${DocumentType.TABLE}${SEPARATOR}${components[0]}` } /** @@ -163,7 +163,7 @@ exports.getTableIDFromRowID = rowId => { */ exports.generateRowID = (tableId, id = null) => { id = id || newid() - return `${DocumentTypes.ROW}${SEPARATOR}${tableId}${SEPARATOR}${id}` + return `${DocumentType.ROW}${SEPARATOR}${tableId}${SEPARATOR}${id}` } /** @@ -186,7 +186,7 @@ exports.generateUserMetadataID = globalId => { * Breaks up the ID to get the global ID. */ exports.getGlobalIDFromUserMetadataID = id => { - const prefix = `${DocumentTypes.ROW}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}` + const prefix = `${DocumentType.ROW}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}` if (!id || !id.includes(prefix)) { return id } @@ -197,7 +197,7 @@ exports.getGlobalIDFromUserMetadataID = id => { * Gets parameters for retrieving automations, this is a utility function for the getDocParams function. */ exports.getAutomationParams = (automationId = null, otherProps = {}) => { - return getDocParams(DocumentTypes.AUTOMATION, automationId, otherProps) + return getDocParams(DocumentType.AUTOMATION, automationId, otherProps) } /** @@ -205,7 +205,7 @@ exports.getAutomationParams = (automationId = null, otherProps = {}) => { * @returns {string} The new automation ID which the automation doc can be stored under. */ exports.generateAutomationID = () => { - return `${DocumentTypes.AUTOMATION}${SEPARATOR}${newid()}` + return `${DocumentType.AUTOMATION}${SEPARATOR}${newid()}` } /** @@ -230,14 +230,14 @@ exports.generateLinkID = ( const tables = `${SEPARATOR}${tableId1}${SEPARATOR}${tableId2}` const rows = `${SEPARATOR}${rowId1}${SEPARATOR}${rowId2}` const fields = `${SEPARATOR}${fieldName1}${SEPARATOR}${fieldName2}` - return `${DocumentTypes.LINK}${tables}${rows}${fields}` + return `${DocumentType.LINK}${tables}${rows}${fields}` } /** * Gets parameters for retrieving link docs, this is a utility function for the getDocParams function. */ exports.getLinkParams = (otherProps = {}) => { - return getDocParams(DocumentTypes.LINK, null, otherProps) + return getDocParams(DocumentType.LINK, null, otherProps) } /** @@ -245,14 +245,14 @@ exports.getLinkParams = (otherProps = {}) => { * @returns {string} The new layout ID which the layout doc can be stored under. */ exports.generateLayoutID = id => { - return `${DocumentTypes.LAYOUT}${SEPARATOR}${id || newid()}` + return `${DocumentType.LAYOUT}${SEPARATOR}${id || newid()}` } /** * Gets parameters for retrieving layout, this is a utility function for the getDocParams function. */ exports.getLayoutParams = (layoutId = null, otherProps = {}) => { - return getDocParams(DocumentTypes.LAYOUT, layoutId, otherProps) + return getDocParams(DocumentType.LAYOUT, layoutId, otherProps) } /** @@ -260,14 +260,14 @@ exports.getLayoutParams = (layoutId = null, otherProps = {}) => { * @returns {string} The new screen ID which the screen doc can be stored under. */ exports.generateScreenID = () => { - return `${DocumentTypes.SCREEN}${SEPARATOR}${newid()}` + return `${DocumentType.SCREEN}${SEPARATOR}${newid()}` } /** * Gets parameters for retrieving screens, this is a utility function for the getDocParams function. */ exports.getScreenParams = (screenId = null, otherProps = {}) => { - return getDocParams(DocumentTypes.SCREEN, screenId, otherProps) + return getDocParams(DocumentType.SCREEN, screenId, otherProps) } /** @@ -275,14 +275,14 @@ exports.getScreenParams = (screenId = null, otherProps = {}) => { * @returns {string} The new webhook ID which the webhook doc can be stored under. */ exports.generateWebhookID = () => { - return `${DocumentTypes.WEBHOOK}${SEPARATOR}${newid()}` + return `${DocumentType.WEBHOOK}${SEPARATOR}${newid()}` } /** * Gets parameters for retrieving a webhook, this is a utility function for the getDocParams function. */ exports.getWebhookParams = (webhookId = null, otherProps = {}) => { - return getDocParams(DocumentTypes.WEBHOOK, webhookId, otherProps) + return getDocParams(DocumentType.WEBHOOK, webhookId, otherProps) } /** @@ -291,7 +291,7 @@ exports.getWebhookParams = (webhookId = null, otherProps = {}) => { */ exports.generateDatasourceID = ({ plus = false } = {}) => { return `${ - plus ? DocumentTypes.DATASOURCE_PLUS : DocumentTypes.DATASOURCE + plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE }${SEPARATOR}${newid()}` } @@ -299,7 +299,7 @@ exports.generateDatasourceID = ({ plus = false } = {}) => { * Gets parameters for retrieving a datasource, this is a utility function for the getDocParams function. */ exports.getDatasourceParams = (datasourceId = null, otherProps = {}) => { - return getDocParams(DocumentTypes.DATASOURCE, datasourceId, otherProps) + return getDocParams(DocumentType.DATASOURCE, datasourceId, otherProps) } /** @@ -308,7 +308,7 @@ exports.getDatasourceParams = (datasourceId = null, otherProps = {}) => { */ exports.generateQueryID = datasourceId => { return `${ - DocumentTypes.QUERY + DocumentType.QUERY }${SEPARATOR}${datasourceId}${SEPARATOR}${newid()}` } @@ -317,14 +317,14 @@ exports.generateQueryID = datasourceId => { * automations etc. */ exports.generateAutomationMetadataID = automationId => { - return `${DocumentTypes.AUTOMATION_METADATA}${SEPARATOR}${automationId}` + return `${DocumentType.AUTOMATION_METADATA}${SEPARATOR}${automationId}` } /** * Retrieve all automation metadata in an app database. */ exports.getAutomationMetadataParams = (otherProps = {}) => { - return getDocParams(DocumentTypes.AUTOMATION_METADATA, null, otherProps) + return getDocParams(DocumentType.AUTOMATION_METADATA, null, otherProps) } /** @@ -332,11 +332,11 @@ exports.getAutomationMetadataParams = (otherProps = {}) => { */ exports.getQueryParams = (datasourceId = null, otherProps = {}) => { if (datasourceId == null) { - return getDocParams(DocumentTypes.QUERY, null, otherProps) + return getDocParams(DocumentType.QUERY, null, otherProps) } return getDocParams( - DocumentTypes.QUERY, + DocumentType.QUERY, `${datasourceId}${SEPARATOR}`, otherProps ) @@ -347,11 +347,11 @@ exports.getQueryParams = (datasourceId = null, otherProps = {}) => { * @returns {string} The ID of the flag document that was generated. */ exports.generateUserFlagID = userId => { - return `${DocumentTypes.USER_FLAG}${SEPARATOR}${userId}` + return `${DocumentType.USER_FLAG}${SEPARATOR}${userId}` } exports.generateMetadataID = (type, entityId) => { - return `${DocumentTypes.METADATA}${SEPARATOR}${type}${SEPARATOR}${entityId}` + return `${DocumentType.METADATA}${SEPARATOR}${type}${SEPARATOR}${entityId}` } exports.getMetadataParams = (type, entityId = null, otherProps = {}) => { @@ -359,15 +359,15 @@ exports.getMetadataParams = (type, entityId = null, otherProps = {}) => { if (entityId != null) { docId += entityId } - return getDocParams(DocumentTypes.METADATA, docId, otherProps) + return getDocParams(DocumentType.METADATA, docId, otherProps) } exports.generateMemoryViewID = viewName => { - return `${DocumentTypes.MEM_VIEW}${SEPARATOR}${viewName}` + return `${DocumentType.MEM_VIEW}${SEPARATOR}${viewName}` } exports.getMemoryViewParams = (otherProps = {}) => { - return getDocParams(DocumentTypes.MEM_VIEW, null, otherProps) + return getDocParams(DocumentType.MEM_VIEW, null, otherProps) } /** diff --git a/packages/server/src/db/views/staticViews.js b/packages/server/src/db/views/staticViews.js index 5cfae746df..d715c02968 100644 --- a/packages/server/src/db/views/staticViews.js +++ b/packages/server/src/db/views/staticViews.js @@ -1,11 +1,6 @@ const { getAppDB } = require("@budibase/backend-core/context") -const { - DocumentTypes, - SEPARATOR, - ViewNames, - SearchIndexes, -} = require("../utils") -const SCREEN_PREFIX = DocumentTypes.SCREEN + SEPARATOR +const { DocumentType, SEPARATOR, ViewName, SearchIndexes } = require("../utils") +const SCREEN_PREFIX = DocumentType.SCREEN + SEPARATOR /************************************************** * INFORMATION * @@ -53,7 +48,7 @@ exports.createLinkView = async () => { } designDoc.views = { ...designDoc.views, - [ViewNames.LINK]: view, + [ViewName.LINK]: view, } await db.put(designDoc) } @@ -74,7 +69,7 @@ exports.createRoutingView = async () => { } designDoc.views = { ...designDoc.views, - [ViewNames.ROUTING]: view, + [ViewName.ROUTING]: view, } await db.put(designDoc) } diff --git a/packages/server/src/definitions/automations.ts b/packages/server/src/definitions/automations.ts index e1ac690bf1..ed1455c049 100644 --- a/packages/server/src/definitions/automations.ts +++ b/packages/server/src/definitions/automations.ts @@ -5,14 +5,14 @@ import { Document, } from "@budibase/types" -export enum LoopStepTypes { +export enum LoopStepType { ARRAY = "Array", STRING = "String", } export interface LoopStep extends AutomationStep { inputs: { - option: LoopStepTypes + option: LoopStepType [key: string]: any } } diff --git a/packages/server/src/definitions/common.ts b/packages/server/src/definitions/common.ts index f7e5a9a049..33eefefdc8 100644 --- a/packages/server/src/definitions/common.ts +++ b/packages/server/src/definitions/common.ts @@ -1,70 +1,19 @@ -export { Query, Datasource } from "./datasource" +import { Document } from "@budibase/types" +export { + Query, + Datasource, + FieldSchema, + TableSchema, + Table, + Document, + Row, +} from "@budibase/types" -export interface Base { - _id?: string - _rev?: string -} - -export interface Application extends Base { +export interface Application extends Document { + _id: string appId?: string } -export interface FieldSchema { - // TODO: replace with field types enum when done - type: string - externalType?: string - fieldName?: string - name: string - tableId?: string - relationshipType?: string - through?: string - foreignKey?: string - autocolumn?: boolean - subtype?: string - throughFrom?: string - throughTo?: string - formula?: string - formulaType?: string - main?: boolean - ignoreTimezones?: boolean - meta?: { - toTable: string - toKey: string - } - constraints?: { - type?: string - email?: boolean - inclusion?: string[] - length?: { - minimum?: string | number - maximum?: string | number - } - presence?: boolean - } -} - -export interface TableSchema { - [key: string]: FieldSchema -} - -export interface Table extends Base { - type?: string - views?: {} - name: string - primary?: string[] - schema: TableSchema - primaryDisplay?: string - sourceId?: string - relatedFormula?: string[] - constrained?: string[] -} - -export interface Row extends Base { - type?: string - tableId?: string - [key: string]: any -} - interface JsonSchemaField { properties: { [key: string]: { @@ -94,7 +43,7 @@ export interface AutomationStep { type: string } -export interface Automation extends Base { +export interface Automation extends Document { name: string type: string appId?: string diff --git a/packages/server/src/definitions/datasource.ts b/packages/server/src/definitions/datasource.ts index 9752fc947a..f44706c696 100644 --- a/packages/server/src/definitions/datasource.ts +++ b/packages/server/src/definitions/datasource.ts @@ -1,214 +1,13 @@ -import { Row, Table, Base } from "./common" - -export enum Operation { - CREATE = "CREATE", - READ = "READ", - UPDATE = "UPDATE", - DELETE = "DELETE", - BULK_CREATE = "BULK_CREATE", - CREATE_TABLE = "CREATE_TABLE", - UPDATE_TABLE = "UPDATE_TABLE", - DELETE_TABLE = "DELETE_TABLE", -} - -export enum SortDirection { - ASCENDING = "ASCENDING", - DESCENDING = "DESCENDING", -} - -export enum QueryTypes { - SQL = "sql", - JSON = "json", - FIELDS = "fields", -} - -export enum DatasourceFieldTypes { - STRING = "string", - LONGFORM = "longForm", - BOOLEAN = "boolean", - NUMBER = "number", - PASSWORD = "password", - LIST = "list", - OBJECT = "object", - JSON = "json", - FILE = "file", -} - -export enum SourceNames { - POSTGRES = "POSTGRES", - DYNAMODB = "DYNAMODB", - MONGODB = "MONGODB", - ELASTICSEARCH = "ELASTICSEARCH", - COUCHDB = "COUCHDB", - SQL_SERVER = "SQL_SERVER", - S3 = "S3", - AIRTABLE = "AIRTABLE", - MYSQL = "MYSQL", - ARANGODB = "ARANGODB", - REST = "REST", - ORACLE = "ORACLE", - GOOGLE_SHEETS = "GOOGLE_SHEETS", - FIRESTORE = "FIRESTORE", - REDIS = "REDIS", - SNOWFLAKE = "SNOWFLAKE", -} - -export enum IncludeRelationships { - INCLUDE = 1, - EXCLUDE = 0, -} - -export enum FilterTypes { - STRING = "string", - FUZZY = "fuzzy", - RANGE = "range", - EQUAL = "equal", - NOT_EQUAL = "notEqual", - EMPTY = "empty", - NOT_EMPTY = "notEmpty", - ONE_OF = "oneOf", -} - -export interface QueryDefinition { - type: QueryTypes - displayName?: string - readable?: boolean - customisable?: boolean - fields?: object - urlDisplay?: boolean -} - -export interface ExtraQueryConfig { - [key: string]: { - displayName: string - type: string - required: boolean - data?: object - } -} - -export interface Integration { - docs: string - plus?: boolean - auth?: { type: string } - relationships?: boolean - description: string - friendlyName: string - type?: string - datasource: {} - query: { - [key: string]: QueryDefinition - } - extra?: ExtraQueryConfig -} - -export interface SearchFilters { - allOr?: boolean - string?: { - [key: string]: string - } - fuzzy?: { - [key: string]: string - } - range?: { - [key: string]: { - high: number | string - low: number | string - } - } - equal?: { - [key: string]: any - } - notEqual?: { - [key: string]: any - } - empty?: { - [key: string]: any - } - notEmpty?: { - [key: string]: any - } - oneOf?: { - [key: string]: any[] - } - contains?: { - [key: string]: any - } -} - -export interface SortJson { - [key: string]: SortDirection -} - -export interface PaginationJson { - limit: number - page?: string | number -} - -export interface RenameColumn { - old: string - updated: string -} - -export interface RelationshipsJson { - through?: string - from?: string - to?: string - fromPrimary?: string - toPrimary?: string - tableName: string - column: string -} - -export interface QueryJson { - endpoint: { - datasourceId: string - entityId: string - operation: Operation - schema?: string - } - resource: { - fields: string[] - } - filters?: SearchFilters - sort?: SortJson - paginate?: PaginationJson - body?: Row | Row[] - table?: Table - meta?: { - table?: Table - tables?: Record - renamed: RenameColumn - } - extra?: { - idFilter?: SearchFilters - } - relationships?: RelationshipsJson[] -} - -export interface SqlQuery { - sql: string - bindings?: string[] -} +/******************************************** + * This file contains structures which are * + * internal to the server and don't need to * + * be exposed for use by other services. * + ********************************************/ export interface QueryOptions { disableReturning?: boolean } -export interface Datasource extends Base { - type: string - name: string - source: SourceNames - // the config is defined by the schema - config: { - [key: string]: string | number | boolean - } - plus: boolean - entities?: { - [key: string]: Table - } -} - export enum AuthType { BASIC = "basic", BEARER = "bearer", @@ -230,25 +29,6 @@ export interface BearerAuthConfig { token: string } -export interface QueryParameter { - name: string - default: string -} - -export interface RestQueryFields { - path: string - queryString?: string - headers: { [key: string]: any } - disabledHeaders: { [key: string]: any } - requestBody: any - bodyType: string - json: object - method: string - authConfigId: string - pagination: PaginationConfig | null - paginationValues: PaginationValues | null -} - export interface RestConfig { url: string defaultHeaders: { @@ -266,28 +46,3 @@ export interface RestConfig { } ] } - -export interface PaginationConfig { - type: string - location: string - pageParam: string - sizeParam: string | null - responseParam: string | null -} - -export interface PaginationValues { - page: string | number | null - limit: number | null -} - -export interface Query { - _id?: string - datasourceId: string - name: string - parameters: QueryParameter[] - fields: RestQueryFields | any - transformer: string | null - schema: any - readable: boolean - queryVerb: string -} diff --git a/packages/server/src/environment.js b/packages/server/src/environment.js index 99d099f8d5..c2e2815e00 100644 --- a/packages/server/src/environment.js +++ b/packages/server/src/environment.js @@ -63,6 +63,7 @@ module.exports = { DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL, TEMPLATE_REPOSITORY: process.env.TEMPLATE_REPOSITORY || "app", DISABLE_AUTO_PROD_APP_SYNC: process.env.DISABLE_AUTO_PROD_APP_SYNC, + SESSION_UPDATE_PERIOD: process.env.SESSION_UPDATE_PERIOD, // minor SALT_ROUNDS: process.env.SALT_ROUNDS, LOGGER: process.env.LOGGER, diff --git a/packages/server/src/integrations/airtable.ts b/packages/server/src/integrations/airtable.ts index ec49bf5e40..e4c941b21c 100644 --- a/packages/server/src/integrations/airtable.ts +++ b/packages/server/src/integrations/airtable.ts @@ -1,9 +1,9 @@ import { Integration, - DatasourceFieldTypes, - QueryTypes, -} from "../definitions/datasource" -import { IntegrationBase } from "./base/IntegrationBase" + DatasourceFieldType, + QueryType, + IntegrationBase, +} from "@budibase/types" module AirtableModule { const Airtable = require("airtable") @@ -21,56 +21,61 @@ module AirtableModule { type: "Spreadsheet", datasource: { apiKey: { - type: DatasourceFieldTypes.PASSWORD, + type: DatasourceFieldType.PASSWORD, default: "enter api key", required: true, }, base: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, default: "mybase", required: true, }, }, query: { create: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, customisable: true, fields: { table: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, }, read: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, fields: { table: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, view: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, numRecords: { - type: DatasourceFieldTypes.NUMBER, + type: DatasourceFieldType.NUMBER, default: 10, }, }, }, update: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, customisable: true, fields: { id: { - type: DatasourceFieldTypes.STRING, + display: "Record ID", + type: DatasourceFieldType.STRING, + required: true, + }, + table: { + type: DatasourceFieldType.STRING, required: true, }, }, }, delete: { - type: QueryTypes.JSON, + type: QueryType.JSON, }, }, } diff --git a/packages/server/src/integrations/arangodb.ts b/packages/server/src/integrations/arangodb.ts index e6e2c0db35..968197474a 100644 --- a/packages/server/src/integrations/arangodb.ts +++ b/packages/server/src/integrations/arangodb.ts @@ -1,9 +1,9 @@ import { Integration, - DatasourceFieldTypes, - QueryTypes, -} from "../definitions/datasource" -import { IntegrationBase } from "./base/IntegrationBase" + DatasourceFieldType, + QueryType, + IntegrationBase, +} from "@budibase/types" module ArangoModule { const { Database, aql } = require("arangojs") @@ -24,35 +24,35 @@ module ArangoModule { "ArangoDB is a scalable open-source multi-model database natively supporting graph, document and search. All supported data models & access patterns can be combined in queries allowing for maximal flexibility. ", datasource: { url: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, default: "http://localhost:8529", required: true, }, username: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, default: "root", required: true, }, password: { - type: DatasourceFieldTypes.PASSWORD, + type: DatasourceFieldType.PASSWORD, required: true, }, databaseName: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, default: "_system", required: true, }, collection: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, query: { read: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, create: { - type: QueryTypes.JSON, + type: QueryType.JSON, }, }, } diff --git a/packages/server/src/integrations/base/IntegrationBase.ts b/packages/server/src/integrations/base/IntegrationBase.ts deleted file mode 100644 index bfda4fbd4d..0000000000 --- a/packages/server/src/integrations/base/IntegrationBase.ts +++ /dev/null @@ -1,6 +0,0 @@ -export interface IntegrationBase { - create?(query: any): Promise - read?(query: any): Promise - update?(query: any): Promise - delete?(query: any): Promise -} diff --git a/packages/server/src/integrations/base/datasourcePlus.ts b/packages/server/src/integrations/base/datasourcePlus.ts deleted file mode 100644 index f55dcf0f9a..0000000000 --- a/packages/server/src/integrations/base/datasourcePlus.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Table } from "../../definitions/common" -import { IntegrationBase } from "./IntegrationBase" - -export interface DatasourcePlus extends IntegrationBase { - tables: Record - schemaErrors: Record - - // if the datasource supports the use of bindings directly (to protect against SQL injection) - // this returns the format of the identifier - getBindingIdentifier(): string - getStringConcat(parts: string[]): string - buildSchema(datasourceId: string, entities: Record): any -} diff --git a/packages/server/src/integrations/base/query.ts b/packages/server/src/integrations/base/query.ts new file mode 100644 index 0000000000..1f3ed3dd74 --- /dev/null +++ b/packages/server/src/integrations/base/query.ts @@ -0,0 +1,16 @@ +import { QueryJson, Datasource } from "@budibase/types" +const { integrations } = require("../index") + +export async function makeExternalQuery( + datasource: Datasource, + json: QueryJson +) { + const Integration = integrations[datasource.source] + // query is the opinionated function + if (Integration.prototype.query) { + const integration = new Integration(datasource.config) + return integration.query(json) + } else { + throw "Datasource does not support query." + } +} diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index 750564c6ff..f18e9d1d98 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -2,14 +2,15 @@ import { Knex, knex } from "knex" import { Operation, QueryJson, - QueryOptions, RelationshipsJson, SearchFilters, SortDirection, -} from "../../definitions/datasource" -import { isIsoDateString, SqlClients } from "../utils" +} from "@budibase/types" +import { QueryOptions } from "../../definitions/datasource" +import { isIsoDateString, SqlClient } from "../utils" import SqlTableQueryBuilder from "./sqlTable" import environment from "../../environment" +import { removeKeyNumbering } from "./utils" const envLimit = environment.SQL_MAX_ROWS ? parseInt(environment.SQL_MAX_ROWS) @@ -27,14 +28,14 @@ function likeKey(client: string, key: string): string { } let start: string, end: string switch (client) { - case SqlClients.MY_SQL: + case SqlClient.MY_SQL: start = end = "`" break - case SqlClients.ORACLE: - case SqlClients.POSTGRES: + case SqlClient.ORACLE: + case SqlClient.POSTGRES: start = end = '"' break - case SqlClients.MS_SQL: + case SqlClient.MS_SQL: start = "[" end = "]" break @@ -102,7 +103,7 @@ function generateSelectStatement( if ( columnName && schema?.[columnName] && - knex.client.config.client === SqlClients.POSTGRES + knex.client.config.client === SqlClient.POSTGRES ) { const externalType = schema[columnName].externalType if (externalType?.includes("money")) { @@ -133,12 +134,13 @@ class InternalBuilder { fn: (key: string, value: any) => void ) { for (let [key, value] of Object.entries(structure)) { - const isRelationshipField = key.includes(".") + const updatedKey = removeKeyNumbering(key) + const isRelationshipField = updatedKey.includes(".") if (!opts.relationship && !isRelationshipField) { - fn(`${opts.tableName}.${key}`, value) + fn(`${opts.tableName}.${updatedKey}`, value) } if (opts.relationship && isRelationshipField) { - fn(key, value) + fn(updatedKey, value) } } } @@ -146,7 +148,7 @@ class InternalBuilder { const like = (key: string, value: any) => { const fnc = allOr ? "orWhere" : "where" // postgres supports ilike, nothing else does - if (this.client === SqlClients.POSTGRES) { + if (this.client === SqlClient.POSTGRES) { query = query[fnc](key, "ilike", `%${value}%`) } else { const rawFnc = `${fnc}Raw` @@ -157,6 +159,61 @@ class InternalBuilder { } } + const contains = (mode: object, any: boolean = false) => { + const fnc = allOr ? "orWhere" : "where" + const rawFnc = `${fnc}Raw` + const not = mode === filters?.notContains ? "NOT " : "" + function stringifyArray(value: Array, quoteStyle = '"'): string { + for (let i in value) { + if (typeof value[i] === "string") { + value[i] = `${quoteStyle}${value[i]}${quoteStyle}` + } + } + return `[${value.join(",")}]` + } + if (this.client === SqlClient.POSTGRES) { + iterate(mode, (key: string, value: Array) => { + const wrap = any ? "" : "'" + const containsOp = any ? "\\?| array" : "@>" + const fieldNames = key.split(/\./g) + const tableName = fieldNames[0] + const columnName = fieldNames[1] + // @ts-ignore + query = query[rawFnc]( + `${not}"${tableName}"."${columnName}"::jsonb ${containsOp} ${wrap}${stringifyArray( + value, + any ? "'" : '"' + )}${wrap}` + ) + }) + } else if (this.client === SqlClient.MY_SQL) { + const jsonFnc = any ? "JSON_OVERLAPS" : "JSON_CONTAINS" + iterate(mode, (key: string, value: Array) => { + // @ts-ignore + query = query[rawFnc]( + `${not}${jsonFnc}(${key}, '${stringifyArray(value)}')` + ) + }) + } else { + const andOr = mode === filters?.containsAny ? " OR " : " AND " + iterate(mode, (key: string, value: Array) => { + let statement = "" + for (let i in value) { + if (typeof value[i] === "string") { + value[i] = `%"${value[i]}"%` + } else { + value[i] = `%${value[i]}%` + } + statement += + (statement ? andOr : "") + + `LOWER(${likeKey(this.client, key)}) LIKE ?` + } + // @ts-ignore + query = query[rawFnc](`${not}(${statement})`, value) + }) + } + } + if (!filters) { return query } @@ -173,7 +230,7 @@ class InternalBuilder { iterate(filters.string, (key, value) => { const fnc = allOr ? "orWhere" : "where" // postgres supports ilike, nothing else does - if (this.client === SqlClients.POSTGRES) { + if (this.client === SqlClient.POSTGRES) { query = query[fnc](key, "ilike", `${value}%`) } else { const rawFnc = `${fnc}Raw` @@ -227,32 +284,13 @@ class InternalBuilder { }) } if (filters.contains) { - const fnc = allOr ? "orWhere" : "where" - const rawFnc = `${fnc}Raw` - if (this.client === SqlClients.POSTGRES) { - iterate(filters.contains, (key: string, value: any) => { - const fieldNames = key.split(/\./g) - const tableName = fieldNames[0] - const columnName = fieldNames[1] - if (typeof value === "string") { - value = `"${value}"` - } - // @ts-ignore - query = query[rawFnc]( - `"${tableName}"."${columnName}"::jsonb @> '[${value}]'` - ) - }) - } else if (this.client === SqlClients.MY_SQL) { - iterate(filters.contains, (key: string, value: any) => { - if (typeof value === "string") { - value = `"${value}"` - } - // @ts-ignore - query = query[rawFnc](`JSON_CONTAINS(${key}, '${value}')`) - }) - } else { - iterate(filters.contains, like) - } + contains(filters.contains) + } + if (filters.notContains) { + contains(filters.notContains) + } + if (filters.containsAny) { + contains(filters.containsAny, true) } return query } @@ -265,7 +303,7 @@ class InternalBuilder { const direction = value === SortDirection.ASCENDING ? "asc" : "desc" query = query.orderBy(`${table?.name}.${key}`, direction) } - } else if (this.client === SqlClients.MS_SQL && paginate?.limit) { + } else if (this.client === SqlClient.MS_SQL && paginate?.limit) { // @ts-ignore query = query.orderBy(`${table?.name}.${table?.primary[0]}`) } @@ -414,7 +452,7 @@ class InternalBuilder { [tableName]: query, }).select(selectStatement) // have to add after as well (this breaks MS-SQL) - if (this.client !== SqlClients.MS_SQL) { + if (this.client !== SqlClient.MS_SQL) { preQuery = this.addSorting(preQuery, json) } // handle joins @@ -565,9 +603,9 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { // same as delete, manage returning if (operation === Operation.CREATE || operation === Operation.UPDATE) { let id - if (sqlClient === SqlClients.MS_SQL) { + if (sqlClient === SqlClient.MS_SQL) { id = results?.[0].id - } else if (sqlClient === SqlClients.MY_SQL) { + } else if (sqlClient === SqlClient.MY_SQL) { id = results?.insertId } row = processFn( @@ -582,4 +620,3 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { } export default SqlQueryBuilder -module.exports = SqlQueryBuilder diff --git a/packages/server/src/integrations/base/sqlTable.ts b/packages/server/src/integrations/base/sqlTable.ts index 71f9c4aa64..4b715e5f3a 100644 --- a/packages/server/src/integrations/base/sqlTable.ts +++ b/packages/server/src/integrations/base/sqlTable.ts @@ -1,10 +1,5 @@ import { Knex, knex } from "knex" -import { Table } from "../../definitions/common" -import { - Operation, - QueryJson, - RenameColumn, -} from "../../definitions/datasource" +import { Operation, QueryJson, RenameColumn, Table } from "@budibase/types" import { breakExternalTableId } from "../utils" import SchemaBuilder = Knex.SchemaBuilder import CreateTableBuilder = Knex.CreateTableBuilder diff --git a/packages/server/src/integrations/base/utils.ts b/packages/server/src/integrations/base/utils.ts index 086912b920..54efdb91a0 100644 --- a/packages/server/src/integrations/base/utils.ts +++ b/packages/server/src/integrations/base/utils.ts @@ -1,22 +1,12 @@ -import { QueryJson } from "../../definitions/datasource" -import { Datasource } from "../../definitions/common" +const QUERY_START_REGEX = /\d[0-9]*:/g -module DatasourceUtils { - const { integrations } = require("../index") - - export async function makeExternalQuery( - datasource: Datasource, - json: QueryJson - ) { - const Integration = integrations[datasource.source] - // query is the opinionated function - if (Integration.prototype.query) { - const integration = new Integration(datasource.config) - return integration.query(json) - } else { - throw "Datasource does not support query." - } +export function removeKeyNumbering(key: any): string { + if (typeof key === "string" && key.match(QUERY_START_REGEX) != null) { + const parts = key.split(":") + // remove the number + parts.shift() + return parts.join(":") + } else { + return key } - - module.exports.makeExternalQuery = makeExternalQuery } diff --git a/packages/server/src/integrations/couchdb.ts b/packages/server/src/integrations/couchdb.ts index 6d33658988..c23593dbd4 100644 --- a/packages/server/src/integrations/couchdb.ts +++ b/packages/server/src/integrations/couchdb.ts @@ -1,9 +1,9 @@ import { Integration, - DatasourceFieldTypes, - QueryTypes, -} from "../definitions/datasource" -import { IntegrationBase } from "./base/IntegrationBase" + DatasourceFieldType, + QueryType, + IntegrationBase, +} from "@budibase/types" module CouchDBModule { const PouchDB = require("pouchdb") @@ -21,30 +21,30 @@ module CouchDBModule { "Apache CouchDB is an open-source document-oriented NoSQL database, implemented in Erlang.", datasource: { url: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, default: "http://localhost:5984", }, database: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, query: { create: { - type: QueryTypes.JSON, + type: QueryType.JSON, }, read: { - type: QueryTypes.JSON, + type: QueryType.JSON, }, update: { - type: QueryTypes.JSON, + type: QueryType.JSON, }, delete: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, fields: { id: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, diff --git a/packages/server/src/integrations/dynamodb.ts b/packages/server/src/integrations/dynamodb.ts index f06bba02e9..5321da4791 100644 --- a/packages/server/src/integrations/dynamodb.ts +++ b/packages/server/src/integrations/dynamodb.ts @@ -1,9 +1,9 @@ import { Integration, - DatasourceFieldTypes, - QueryTypes, -} from "../definitions/datasource" -import { IntegrationBase } from "./base/IntegrationBase" + DatasourceFieldType, + QueryType, + IntegrationBase, +} from "@budibase/types" module DynamoModule { const AWS = require("aws-sdk") @@ -24,101 +24,101 @@ module DynamoModule { type: "Non-relational", datasource: { region: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, default: "us-east-1", }, accessKeyId: { - type: DatasourceFieldTypes.PASSWORD, + type: DatasourceFieldType.PASSWORD, required: true, }, secretAccessKey: { - type: DatasourceFieldTypes.PASSWORD, + type: DatasourceFieldType.PASSWORD, required: true, }, endpoint: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: false, default: "https://dynamodb.us-east-1.amazonaws.com", }, }, query: { create: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, customisable: true, fields: { table: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, }, read: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, customisable: true, readable: true, fields: { table: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, index: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, }, }, }, scan: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, customisable: true, readable: true, fields: { table: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, index: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, }, }, }, describe: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, customisable: true, readable: true, fields: { table: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, }, get: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, customisable: true, readable: true, fields: { table: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, }, update: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, customisable: true, fields: { table: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, }, delete: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, customisable: true, fields: { table: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, diff --git a/packages/server/src/integrations/elasticsearch.ts b/packages/server/src/integrations/elasticsearch.ts index eb27966df0..5c61545ecd 100644 --- a/packages/server/src/integrations/elasticsearch.ts +++ b/packages/server/src/integrations/elasticsearch.ts @@ -1,9 +1,9 @@ import { Integration, - DatasourceFieldTypes, - QueryTypes, -} from "../definitions/datasource" -import { IntegrationBase } from "./base/IntegrationBase" + DatasourceFieldType, + QueryType, + IntegrationBase, +} from "@budibase/types" module ElasticsearchModule { const { Client } = require("@elastic/elasticsearch") @@ -20,55 +20,55 @@ module ElasticsearchModule { type: "Non-relational", datasource: { url: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, default: "http://localhost:9200", }, }, query: { create: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, customisable: true, fields: { index: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, }, read: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, customisable: true, fields: { index: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, }, update: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, customisable: true, fields: { id: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, index: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, }, delete: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, fields: { index: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, id: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, diff --git a/packages/server/src/integrations/firebase.ts b/packages/server/src/integrations/firebase.ts index c0f5d3c798..1f82e9dafe 100644 --- a/packages/server/src/integrations/firebase.ts +++ b/packages/server/src/integrations/firebase.ts @@ -1,9 +1,9 @@ import { - DatasourceFieldTypes, + DatasourceFieldType, Integration, - QueryTypes, -} from "../definitions/datasource" -import { IntegrationBase } from "./base/IntegrationBase" + QueryType, + IntegrationBase, +} from "@budibase/types" import { Firestore, WhereFilterOp } from "@google-cloud/firestore" module Firebase { @@ -21,46 +21,46 @@ module Firebase { "Cloud Firestore is a flexible, scalable database for mobile, web, and server development from Firebase and Google Cloud.", datasource: { email: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, privateKey: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, projectId: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, query: { create: { - type: QueryTypes.JSON, + type: QueryType.JSON, }, read: { - type: QueryTypes.JSON, + type: QueryType.JSON, }, update: { - type: QueryTypes.JSON, + type: QueryType.JSON, }, delete: { - type: QueryTypes.JSON, + type: QueryType.JSON, }, }, extra: { collection: { displayName: "Collection", - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, filterField: { displayName: "Filter field", - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: false, }, filter: { displayName: "Filter comparison", - type: DatasourceFieldTypes.LIST, + type: DatasourceFieldType.LIST, required: false, data: { read: [ @@ -79,7 +79,7 @@ module Firebase { }, filterValue: { displayName: "Filter value", - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: false, }, }, diff --git a/packages/server/src/integrations/googlesheets.ts b/packages/server/src/integrations/googlesheets.ts index d2562ddccf..129ab485cd 100644 --- a/packages/server/src/integrations/googlesheets.ts +++ b/packages/server/src/integrations/googlesheets.ts @@ -1,12 +1,13 @@ import { - DatasourceFieldTypes, + DatasourceFieldType, Integration, + QueryType, + Table, + TableSchema, QueryJson, - QueryTypes, -} from "../definitions/datasource" + DatasourcePlus, +} from "@budibase/types" import { OAuth2Client } from "google-auth-library" -import { DatasourcePlus } from "./base/datasourcePlus" -import { Table, TableSchema } from "../definitions/common" import { buildExternalTableId } from "./utils" import { DataSourceOperation, FieldTypes } from "../constants" import { GoogleSpreadsheet } from "google-spreadsheet" @@ -53,59 +54,59 @@ module GoogleSheetsModule { datasource: { spreadsheetId: { display: "Google Sheet URL", - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, query: { create: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, fields: { sheet: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, row: { - type: QueryTypes.JSON, + type: QueryType.JSON, required: true, }, }, }, read: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, fields: { sheet: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, }, update: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, fields: { sheet: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, rowIndex: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, row: { - type: QueryTypes.JSON, + type: QueryType.JSON, required: true, }, }, }, delete: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, fields: { sheet: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, rowIndex: { - type: DatasourceFieldTypes.NUMBER, + type: DatasourceFieldType.NUMBER, required: true, }, }, diff --git a/packages/server/src/integrations/index.ts b/packages/server/src/integrations/index.ts index 6edc65f08c..fd2ce75391 100644 --- a/packages/server/src/integrations/index.ts +++ b/packages/server/src/integrations/index.ts @@ -13,54 +13,54 @@ const googlesheets = require("./googlesheets") const firebase = require("./firebase") const redis = require("./redis") const snowflake = require("./snowflake") -const { SourceNames } = require("../definitions/datasource") +const { SourceName } = require("@budibase/types") const environment = require("../environment") const DEFINITIONS = { - [SourceNames.POSTGRES]: postgres.schema, - [SourceNames.DYNAMODB]: dynamodb.schema, - [SourceNames.MONGODB]: mongodb.schema, - [SourceNames.ELASTICSEARCH]: elasticsearch.schema, - [SourceNames.COUCHDB]: couchdb.schema, - [SourceNames.SQL_SERVER]: sqlServer.schema, - [SourceNames.S3]: s3.schema, - [SourceNames.AIRTABLE]: airtable.schema, - [SourceNames.MYSQL]: mysql.schema, - [SourceNames.ARANGODB]: arangodb.schema, - [SourceNames.REST]: rest.schema, - [SourceNames.FIRESTORE]: firebase.schema, - [SourceNames.REDIS]: redis.schema, - [SourceNames.SNOWFLAKE]: snowflake.schema, + [SourceName.POSTGRES]: postgres.schema, + [SourceName.DYNAMODB]: dynamodb.schema, + [SourceName.MONGODB]: mongodb.schema, + [SourceName.ELASTICSEARCH]: elasticsearch.schema, + [SourceName.COUCHDB]: couchdb.schema, + [SourceName.SQL_SERVER]: sqlServer.schema, + [SourceName.S3]: s3.schema, + [SourceName.AIRTABLE]: airtable.schema, + [SourceName.MYSQL]: mysql.schema, + [SourceName.ARANGODB]: arangodb.schema, + [SourceName.REST]: rest.schema, + [SourceName.FIRESTORE]: firebase.schema, + [SourceName.REDIS]: redis.schema, + [SourceName.SNOWFLAKE]: snowflake.schema, } const INTEGRATIONS = { - [SourceNames.POSTGRES]: postgres.integration, - [SourceNames.DYNAMODB]: dynamodb.integration, - [SourceNames.MONGODB]: mongodb.integration, - [SourceNames.ELASTICSEARCH]: elasticsearch.integration, - [SourceNames.COUCHDB]: couchdb.integration, - [SourceNames.SQL_SERVER]: sqlServer.integration, - [SourceNames.S3]: s3.integration, - [SourceNames.AIRTABLE]: airtable.integration, - [SourceNames.MYSQL]: mysql.integration, - [SourceNames.ARANGODB]: arangodb.integration, - [SourceNames.REST]: rest.integration, - [SourceNames.FIRESTORE]: firebase.integration, - [SourceNames.GOOGLE_SHEETS]: googlesheets.integration, - [SourceNames.REDIS]: redis.integration, - [SourceNames.FIREBASE]: firebase.integration, - [SourceNames.SNOWFLAKE]: snowflake.integration, + [SourceName.POSTGRES]: postgres.integration, + [SourceName.DYNAMODB]: dynamodb.integration, + [SourceName.MONGODB]: mongodb.integration, + [SourceName.ELASTICSEARCH]: elasticsearch.integration, + [SourceName.COUCHDB]: couchdb.integration, + [SourceName.SQL_SERVER]: sqlServer.integration, + [SourceName.S3]: s3.integration, + [SourceName.AIRTABLE]: airtable.integration, + [SourceName.MYSQL]: mysql.integration, + [SourceName.ARANGODB]: arangodb.integration, + [SourceName.REST]: rest.integration, + [SourceName.FIRESTORE]: firebase.integration, + [SourceName.GOOGLE_SHEETS]: googlesheets.integration, + [SourceName.REDIS]: redis.integration, + [SourceName.FIREBASE]: firebase.integration, + [SourceName.SNOWFLAKE]: snowflake.integration, } // optionally add oracle integration if the oracle binary can be installed if (process.arch && !process.arch.startsWith("arm")) { const oracle = require("./oracle") - DEFINITIONS[SourceNames.ORACLE] = oracle.schema - INTEGRATIONS[SourceNames.ORACLE] = oracle.integration + DEFINITIONS[SourceName.ORACLE] = oracle.schema + INTEGRATIONS[SourceName.ORACLE] = oracle.integration } if (environment.SELF_HOSTED) { - DEFINITIONS[SourceNames.GOOGLE_SHEETS] = googlesheets.schema + DEFINITIONS[SourceName.GOOGLE_SHEETS] = googlesheets.schema } module.exports = { diff --git a/packages/server/src/integrations/microsoftSqlServer.ts b/packages/server/src/integrations/microsoftSqlServer.ts index 1e5664748d..6103fd90ce 100644 --- a/packages/server/src/integrations/microsoftSqlServer.ts +++ b/packages/server/src/integrations/microsoftSqlServer.ts @@ -1,24 +1,25 @@ import { - DatasourceFieldTypes, + DatasourceFieldType, Integration, Operation, + Table, + TableSchema, QueryJson, - QueryTypes, + QueryType, SqlQuery, -} from "../definitions/datasource" + DatasourcePlus, +} from "@budibase/types" import { getSqlQuery, buildExternalTableId, convertSqlType, finaliseExternalTables, - SqlClients, + SqlClient, } from "./utils" -import { DatasourcePlus } from "./base/datasourcePlus" -import { Table, TableSchema } from "../definitions/common" +import Sql from "./base/sql" module MSSQLModule { const sqlServer = require("mssql") - const Sql = require("./base/sql") const DEFAULT_SCHEMA = "dbo" interface MSSQLConfig { @@ -47,48 +48,48 @@ module MSSQLModule { type: "Relational", datasource: { user: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, default: "localhost", }, password: { - type: DatasourceFieldTypes.PASSWORD, + type: DatasourceFieldType.PASSWORD, required: true, }, server: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, default: "localhost", }, port: { - type: DatasourceFieldTypes.NUMBER, + type: DatasourceFieldType.NUMBER, required: false, default: 1433, }, database: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, default: "root", }, schema: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, default: DEFAULT_SCHEMA, }, encrypt: { - type: DatasourceFieldTypes.BOOLEAN, + type: DatasourceFieldType.BOOLEAN, default: true, }, }, query: { create: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, read: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, update: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, delete: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, }, } @@ -96,7 +97,8 @@ module MSSQLModule { class SqlServerIntegration extends Sql implements DatasourcePlus { private readonly config: MSSQLConfig private index: number = 0 - static pool: any + private readonly pool: any + private client: any public tables: Record = {} public schemaErrors: Record = {} @@ -111,7 +113,7 @@ module MSSQLModule { "SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE'" constructor(config: MSSQLConfig) { - super(SqlClients.MS_SQL) + super(SqlClient.MS_SQL) this.config = config const clientCfg = { ...this.config, diff --git a/packages/server/src/integrations/mongodb.ts b/packages/server/src/integrations/mongodb.ts index 802696ff40..4f2a901259 100644 --- a/packages/server/src/integrations/mongodb.ts +++ b/packages/server/src/integrations/mongodb.ts @@ -1,9 +1,9 @@ import { Integration, - DatasourceFieldTypes, - QueryTypes, -} from "../definitions/datasource" -import { IntegrationBase } from "./base/IntegrationBase" + DatasourceFieldType, + QueryType, + IntegrationBase, +} from "@budibase/types" import { MongoClient, ObjectID, @@ -29,38 +29,38 @@ module MongoDBModule { "MongoDB is a general purpose, document-based, distributed database built for modern application developers and for the cloud era.", datasource: { connectionString: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, default: "mongodb://localhost:27017", }, db: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, query: { create: { - type: QueryTypes.JSON, + type: QueryType.JSON, }, read: { - type: QueryTypes.JSON, + type: QueryType.JSON, }, update: { - type: QueryTypes.JSON, + type: QueryType.JSON, }, delete: { - type: QueryTypes.JSON, + type: QueryType.JSON, }, }, extra: { collection: { displayName: "Collection", - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, actionTypes: { displayName: "Action Types", - type: DatasourceFieldTypes.LIST, + type: DatasourceFieldType.LIST, required: true, data: { read: ["find", "findOne", "findOneAndUpdate", "count", "distinct"], @@ -92,12 +92,15 @@ module MongoDBModule { if (json[field] instanceof Object) { json[field] = self.createObjectIds(json[field]) } - if (field === "_id" && typeof json[field] === "string") { - const id = json["_id"].match( + if ( + (field === "_id" || field?.startsWith("$")) && + typeof json[field] === "string" + ) { + const id = json[field].match( /(?<=objectid\(['"]).*(?=['"]\))/gi )?.[0] if (id) { - json["_id"] = ObjectID.createFromHexString(id) + json[field] = ObjectID.createFromHexString(id) } } } @@ -114,10 +117,31 @@ module MongoDBModule { } parseQueryParams(params: string, mode: string) { - let queryParams = params.split(/(?<=}),[\n\s]*(?={)/g) - let group1 = queryParams[0] ? JSON.parse(queryParams[0]) : {} - let group2 = queryParams[1] ? JSON.parse(queryParams[1]) : {} - let group3 = queryParams[2] ? JSON.parse(queryParams[2]) : {} + let queryParams = [] + let openCount = 0 + let inQuotes = false + let i = 0 + let startIndex = 0 + for (let c of params) { + if (c === '"' && i > 0 && params[i - 1] !== "\\") { + inQuotes = !inQuotes + } + if (c === "{" && !inQuotes) { + openCount++ + if (openCount === 1) { + startIndex = i + } + } else if (c === "}" && !inQuotes) { + if (openCount === 1) { + queryParams.push(JSON.parse(params.substring(startIndex, i + 1))) + } + openCount-- + } + i++ + } + let group1 = queryParams[0] ?? {} + let group2 = queryParams[1] ?? {} + let group3 = queryParams[2] ?? {} if (mode === "update") { return { filter: group1, @@ -176,7 +200,10 @@ module MongoDBModule { return await collection.findOne(json) } case "findOneAndUpdate": { - let findAndUpdateJson = json as { + if (typeof query.json === "string") { + json = this.parseQueryParams(query.json, "update") + } + let findAndUpdateJson = this.createObjectIds(json) as { filter: FilterQuery update: UpdateQuery options: FindOneAndUpdateOption diff --git a/packages/server/src/integrations/mysql.ts b/packages/server/src/integrations/mysql.ts index 1fdab66701..466f2e6494 100644 --- a/packages/server/src/integrations/mysql.ts +++ b/packages/server/src/integrations/mysql.ts @@ -1,25 +1,26 @@ import { Integration, - DatasourceFieldTypes, - QueryTypes, + DatasourceFieldType, + QueryType, QueryJson, SqlQuery, -} from "../definitions/datasource" -import { Table, TableSchema } from "../definitions/common" + Table, + TableSchema, + DatasourcePlus, +} from "@budibase/types" import { getSqlQuery, - SqlClients, + SqlClient, buildExternalTableId, convertSqlType, finaliseExternalTables, } from "./utils" -import { DatasourcePlus } from "./base/datasourcePlus" import dayjs from "dayjs" const { NUMBER_REGEX } = require("../utilities") +import Sql from "./base/sql" module MySQLModule { const mysql = require("mysql2/promise") - const Sql = require("./base/sql") interface MySQLConfig { host: string @@ -41,51 +42,51 @@ module MySQLModule { "MySQL Database Service is a fully managed database service to deploy cloud-native applications. ", datasource: { host: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, default: "localhost", required: true, }, port: { - type: DatasourceFieldTypes.NUMBER, + type: DatasourceFieldType.NUMBER, default: 3306, required: false, }, user: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, default: "root", required: true, }, password: { - type: DatasourceFieldTypes.PASSWORD, + type: DatasourceFieldType.PASSWORD, default: "root", required: true, }, database: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, ssl: { - type: DatasourceFieldTypes.OBJECT, + type: DatasourceFieldType.OBJECT, required: false, }, rejectUnauthorized: { - type: DatasourceFieldTypes.BOOLEAN, + type: DatasourceFieldType.BOOLEAN, default: true, required: false, }, }, query: { create: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, read: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, update: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, delete: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, }, } @@ -119,7 +120,7 @@ module MySQLModule { public schemaErrors: Record = {} constructor(config: MySQLConfig) { - super(SqlClients.MY_SQL) + super(SqlClient.MY_SQL) this.config = config if (config.ssl && Object.keys(config.ssl).length === 0) { delete config.ssl diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index ab742d9009..fa8a3837b2 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -1,17 +1,19 @@ import { - DatasourceFieldTypes, + DatasourceFieldType, Integration, Operation, QueryJson, - QueryTypes, + QueryType, SqlQuery, -} from "../definitions/datasource" + Table, + DatasourcePlus, +} from "@budibase/types" import { buildExternalTableId, convertSqlType, finaliseExternalTables, getSqlQuery, - SqlClients, + SqlClient, } from "./utils" import oracledb, { BindParameters, @@ -21,8 +23,6 @@ import oracledb, { Result, } from "oracledb" import Sql from "./base/sql" -import { Table } from "../definitions/common" -import { DatasourcePlus } from "./base/datasourcePlus" import { FieldTypes } from "../constants" module OracleModule { @@ -45,40 +45,40 @@ module OracleModule { "Oracle Database is an object-relational database management system developed by Oracle Corporation", datasource: { host: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, default: "localhost", required: true, }, port: { - type: DatasourceFieldTypes.NUMBER, + type: DatasourceFieldType.NUMBER, required: true, default: 1521, }, database: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, user: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, password: { - type: DatasourceFieldTypes.PASSWORD, + type: DatasourceFieldType.PASSWORD, required: true, }, }, query: { create: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, read: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, update: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, delete: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, }, } @@ -172,7 +172,7 @@ module OracleModule { OR cons.status IS NULL) ` constructor(config: OracleConfig) { - super(SqlClients.ORACLE) + super(SqlClient.ORACLE) this.config = config } diff --git a/packages/server/src/integrations/postgres.ts b/packages/server/src/integrations/postgres.ts index 7cc01bdc79..316311e193 100644 --- a/packages/server/src/integrations/postgres.ts +++ b/packages/server/src/integrations/postgres.ts @@ -1,23 +1,23 @@ import { Integration, - DatasourceFieldTypes, - QueryTypes, + DatasourceFieldType, + QueryType, QueryJson, SqlQuery, -} from "../definitions/datasource" -import { Table } from "../definitions/common" + Table, + DatasourcePlus, +} from "@budibase/types" import { getSqlQuery, buildExternalTableId, convertSqlType, finaliseExternalTables, - SqlClients, + SqlClient, } from "./utils" -import { DatasourcePlus } from "./base/datasourcePlus" +import Sql from "./base/sql" module PostgresModule { const { Client, types } = require("pg") - const Sql = require("./base/sql") const { escapeDangerousCharacters } = require("../utilities") // Return "date" and "timestamp" types as plain strings. @@ -52,63 +52,63 @@ module PostgresModule { "PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.", datasource: { host: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, default: "localhost", required: true, }, port: { - type: DatasourceFieldTypes.NUMBER, + type: DatasourceFieldType.NUMBER, required: true, default: 5432, }, database: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, default: "postgres", required: true, }, user: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, default: "root", required: true, }, password: { - type: DatasourceFieldTypes.PASSWORD, + type: DatasourceFieldType.PASSWORD, default: "root", required: true, }, schema: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, default: "public", required: true, }, ssl: { - type: DatasourceFieldTypes.BOOLEAN, + type: DatasourceFieldType.BOOLEAN, default: false, required: false, }, rejectUnauthorized: { - type: DatasourceFieldTypes.BOOLEAN, + type: DatasourceFieldType.BOOLEAN, default: false, required: false, }, ca: { - type: DatasourceFieldTypes.LONGFORM, + type: DatasourceFieldType.LONGFORM, default: false, required: false, }, }, query: { create: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, read: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, update: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, delete: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, }, } @@ -117,6 +117,7 @@ module PostgresModule { private readonly client: any private readonly config: PostgresConfig private index: number = 1 + private open: boolean public tables: Record = {} public schemaErrors: Record = {} @@ -133,7 +134,7 @@ module PostgresModule { ` constructor(config: PostgresConfig) { - super(SqlClients.POSTGRES) + super(SqlClient.POSTGRES) this.config = config let newConfig = { diff --git a/packages/server/src/integrations/queries/sql.ts b/packages/server/src/integrations/queries/sql.ts index 271a414d44..6e66114ec2 100644 --- a/packages/server/src/integrations/queries/sql.ts +++ b/packages/server/src/integrations/queries/sql.ts @@ -1,6 +1,5 @@ import { findHBSBlocks, processStringSync } from "@budibase/string-templates" -import { Integration } from "../../definitions/datasource" -import { DatasourcePlus } from "../base/datasourcePlus" +import { DatasourcePlus } from "@budibase/types" const CONST_CHAR_REGEX = new RegExp("'[^']*'", "g") diff --git a/packages/server/src/integrations/redis.ts b/packages/server/src/integrations/redis.ts index a1a9804cc0..e8aa13560c 100644 --- a/packages/server/src/integrations/redis.ts +++ b/packages/server/src/integrations/redis.ts @@ -1,8 +1,4 @@ -import { - DatasourceFieldTypes, - Integration, - QueryTypes, -} from "../definitions/datasource" +import { DatasourceFieldType, Integration, QueryType } from "@budibase/types" import Redis from "ioredis" module RedisModule { @@ -40,36 +36,36 @@ module RedisModule { }, query: { create: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, fields: { key: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, value: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, ttl: { - type: DatasourceFieldTypes.NUMBER, + type: DatasourceFieldType.NUMBER, }, }, }, read: { readable: true, - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, fields: { key: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, }, delete: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, fields: { key: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, required: true, }, }, @@ -77,7 +73,7 @@ module RedisModule { command: { readable: true, displayName: "Redis Command", - type: QueryTypes.JSON, + type: QueryType.JSON, }, }, } diff --git a/packages/server/src/integrations/rest.ts b/packages/server/src/integrations/rest.ts index 9cc8e1a841..284d2a921a 100644 --- a/packages/server/src/integrations/rest.ts +++ b/packages/server/src/integrations/rest.ts @@ -1,16 +1,18 @@ import { Integration, - DatasourceFieldTypes, - QueryTypes, - RestConfig, - RestQueryFields as RestQuery, + DatasourceFieldType, + QueryType, PaginationConfig, + IntegrationBase, + PaginationValues, + RestQueryFields as RestQuery, +} from "@budibase/types" +import { + RestConfig, AuthType, BasicAuthConfig, BearerAuthConfig, - PaginationValues, } from "../definitions/datasource" -import { IntegrationBase } from "./base/IntegrationBase" import { get } from "lodash" const BodyTypes = { @@ -24,27 +26,27 @@ const BodyTypes = { const coreFields = { path: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, display: "URL", }, queryString: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, }, headers: { - type: DatasourceFieldTypes.OBJECT, + type: DatasourceFieldType.OBJECT, }, enabledHeaders: { - type: DatasourceFieldTypes.OBJECT, + type: DatasourceFieldType.OBJECT, }, requestBody: { - type: DatasourceFieldTypes.JSON, + type: DatasourceFieldType.JSON, }, bodyType: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, enum: Object.values(BodyTypes), }, pagination: { - type: DatasourceFieldTypes.OBJECT, + type: DatasourceFieldType.OBJECT, }, } @@ -67,13 +69,13 @@ module RestModule { type: "API", datasource: { url: { - type: DatasourceFieldTypes.STRING, + type: DatasourceFieldType.STRING, default: "", required: false, deprecated: true, }, defaultHeaders: { - type: DatasourceFieldTypes.OBJECT, + type: DatasourceFieldType.OBJECT, required: false, default: {}, }, @@ -82,30 +84,30 @@ module RestModule { create: { readable: true, displayName: "POST", - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, fields: coreFields, }, read: { displayName: "GET", readable: true, - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, fields: coreFields, }, update: { displayName: "PUT", readable: true, - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, fields: coreFields, }, patch: { displayName: "PATCH", readable: true, - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, fields: coreFields, }, delete: { displayName: "DELETE", - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, fields: coreFields, }, }, diff --git a/packages/server/src/integrations/s3.ts b/packages/server/src/integrations/s3.ts index 12371e703f..e8da696424 100644 --- a/packages/server/src/integrations/s3.ts +++ b/packages/server/src/integrations/s3.ts @@ -1,5 +1,4 @@ -import { Integration, QueryTypes } from "../definitions/datasource" -import { IntegrationBase } from "./base/IntegrationBase" +import { Integration, QueryType, IntegrationBase } from "@budibase/types" module S3Module { const AWS = require("aws-sdk") @@ -44,7 +43,7 @@ module S3Module { }, query: { read: { - type: QueryTypes.FIELDS, + type: QueryType.FIELDS, fields: { bucket: { type: "string", diff --git a/packages/server/src/integrations/snowflake.ts b/packages/server/src/integrations/snowflake.ts index 5e04e858e5..7155d27b66 100644 --- a/packages/server/src/integrations/snowflake.ts +++ b/packages/server/src/integrations/snowflake.ts @@ -1,4 +1,4 @@ -import { Integration, QueryTypes, SqlQuery } from "../definitions/datasource" +import { Integration, QueryType, SqlQuery } from "@budibase/types" import { Snowflake } from "snowflake-promise" module SnowflakeModule { @@ -45,16 +45,16 @@ module SnowflakeModule { }, query: { create: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, read: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, update: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, delete: { - type: QueryTypes.SQL, + type: QueryType.SQL, }, }, } diff --git a/packages/server/src/integrations/tests/airtable.spec.js b/packages/server/src/integrations/tests/airtable.spec.js index 4769430ded..df676a1397 100644 --- a/packages/server/src/integrations/tests/airtable.spec.js +++ b/packages/server/src/integrations/tests/airtable.spec.js @@ -48,7 +48,7 @@ describe("Airtable Integration", () => { it("calls the update method with the correct params", async () => { const response = await config.integration.update({ - table: "test", + table: "table", id: "123", json: { name: "test" diff --git a/packages/server/src/integrations/tests/mongo.spec.js b/packages/server/src/integrations/tests/mongo.spec.js index b0a49521ec..9687723528 100644 --- a/packages/server/src/integrations/tests/mongo.spec.js +++ b/packages/server/src/integrations/tests/mongo.spec.js @@ -102,4 +102,222 @@ describe("MongoDB Integration", () => { expect(error).toBeDefined() restore() }) + + it("creates ObjectIds if the _id fields contains a match on ObjectId", async () => { + const query = { + json: { + filter: { + _id: "ObjectId('ACBD12345678ABCD12345678')", + name: "ObjectId('name')" + }, + update: { + _id: "ObjectId('FFFF12345678ABCD12345678')", + name: "ObjectId('updatedName')", + }, + options: { + upsert: false, + }, + }, + extra: { collection: "testCollection", actionTypes: "updateOne" }, + } + await config.integration.update(query) + expect(config.integration.client.updateOne).toHaveBeenCalled() + + const args = config.integration.client.updateOne.mock.calls[0] + expect(args[0]).toEqual({ + _id: mongo.ObjectID.createFromHexString("ACBD12345678ABCD12345678"), + name: "ObjectId('name')", + }) + expect(args[1]).toEqual({ + _id: mongo.ObjectID.createFromHexString("FFFF12345678ABCD12345678"), + name: "ObjectId('updatedName')", + }) + expect(args[2]).toEqual({ + upsert: false + }) + }) + + it("creates ObjectIds if the $ operator fields contains a match on ObjectId", async () => { + const query = { + json: { + filter: { + _id: { + $eq: "ObjectId('ACBD12345678ABCD12345678')", + } + }, + update: { + $set: { + _id: "ObjectId('FFFF12345678ABCD12345678')", + }, + }, + options: { + upsert: true, + }, + }, + extra: { collection: "testCollection", actionTypes: "updateOne" }, + } + await config.integration.update(query) + expect(config.integration.client.updateOne).toHaveBeenCalled() + + const args = config.integration.client.updateOne.mock.calls[0] + expect(args[0]).toEqual({ + _id: { + $eq: mongo.ObjectID.createFromHexString("ACBD12345678ABCD12345678"), + } + }) + expect(args[1]).toEqual({ + $set: { + _id: mongo.ObjectID.createFromHexString("FFFF12345678ABCD12345678"), + } + }) + expect(args[2]).toEqual({ + upsert: true + }) + }) + + it("supports findOneAndUpdate", async () => { + const query = { + json: { + filter: { + _id: { + $eq: "ObjectId('ACBD12345678ABCD12345678')", + } + }, + update: { + $set: { + name: "UPDATED", + age: 99 + }, + }, + options: { + upsert: false, + }, + }, + extra: { collection: "testCollection", actionTypes: "findOneAndUpdate" }, + } + await config.integration.read(query) + expect(config.integration.client.findOneAndUpdate).toHaveBeenCalled() + + const args = config.integration.client.findOneAndUpdate.mock.calls[0] + expect(args[0]).toEqual({ + _id: { + $eq: mongo.ObjectID.createFromHexString("ACBD12345678ABCD12345678"), + } + }) + expect(args[1]).toEqual({ + $set: { + name: "UPDATED", + age: 99 + } + }) + expect(args[2]).toEqual({ + upsert: false + }) + }) + + it("can parse nested objects with arrays", async () => { + const query = { + json: `{ + "_id": { + "$eq": "ObjectId('ACBD12345678ABCD12345678')" + } + }, + { + "$set": { + "value": { + "data": [ + { "cid": 1 }, + { "cid": 2 }, + { "nested": { + "name": "test" + }} + ] + } + } + }, + { + "upsert": true + }`, + extra: { collection: "testCollection", actionTypes: "updateOne" }, + } + await config.integration.update(query) + expect(config.integration.client.updateOne).toHaveBeenCalled() + + const args = config.integration.client.updateOne.mock.calls[0] + expect(args[0]).toEqual({ + _id: { + $eq: mongo.ObjectID.createFromHexString("ACBD12345678ABCD12345678"), + } + }) + expect(args[1]).toEqual({ + $set: { + value: { + data: [ + { cid: 1 }, + { cid: 2 }, + { nested: { + name: "test" + }} + ] + }, + }, + }) + expect(args[2]).toEqual({ + upsert: true + }) + }) + + it("ignores braces within strings when parsing nested objects", async () => { + const query = { + json: `{ + "_id": { + "$eq": "ObjectId('ACBD12345678ABCD12345678')" + } + }, + { + "$set": { + "value": { + "data": [ + { "cid": 1 }, + { "cid": 2 }, + { "nested": { + "name": "te}st" + }} + ] + } + } + }, + { + "upsert": true, + "extra": "ad\\"{\\"d" + }`, + extra: { collection: "testCollection", actionTypes: "updateOne" }, + } + await config.integration.update(query) + expect(config.integration.client.updateOne).toHaveBeenCalled() + + const args = config.integration.client.updateOne.mock.calls[0] + expect(args[0]).toEqual({ + _id: { + $eq: mongo.ObjectID.createFromHexString("ACBD12345678ABCD12345678"), + } + }) + expect(args[1]).toEqual({ + $set: { + value: { + data: [ + { cid: 1 }, + { cid: 2 }, + { nested: { + name: "te}st" + }} + ] + }, + }, + }) + expect(args[2]).toEqual({ + upsert: true, + extra: "ad\"{\"d" + }) + }) }) diff --git a/packages/server/src/integrations/tests/sql.spec.js b/packages/server/src/integrations/tests/sql.spec.js index 55c762573a..3cc9f0fb3e 100644 --- a/packages/server/src/integrations/tests/sql.spec.js +++ b/packages/server/src/integrations/tests/sql.spec.js @@ -1,5 +1,5 @@ -const Sql = require("../base/sql") -const { SqlClients } = require("../utils") +const Sql = require("../base/sql").default +const { SqlClient } = require("../utils") const TABLE_NAME = "test" @@ -47,7 +47,7 @@ function generateDeleteJson(table = TABLE_NAME, filters = {}) { describe("SQL query builder", () => { const limit = 500 - const client = SqlClients.POSTGRES + const client = SqlClient.POSTGRES let sql beforeEach(() => { @@ -174,7 +174,7 @@ describe("SQL query builder", () => { }) it("should work with MS-SQL", () => { - const query = new Sql(SqlClients.MS_SQL, 10)._query(generateReadJson()) + const query = new Sql(SqlClient.MS_SQL, 10)._query(generateReadJson()) expect(query).toEqual({ bindings: [10], sql: `select * from (select top (@p0) * from [${TABLE_NAME}]) as [${TABLE_NAME}]` @@ -182,7 +182,7 @@ describe("SQL query builder", () => { }) it("should work with MySQL", () => { - const query = new Sql(SqlClients.MY_SQL, 10)._query(generateReadJson()) + const query = new Sql(SqlClient.MY_SQL, 10)._query(generateReadJson()) expect(query).toEqual({ bindings: [10], sql: `select * from (select * from \`${TABLE_NAME}\` limit ?) as \`${TABLE_NAME}\`` @@ -240,42 +240,42 @@ describe("SQL query builder", () => { }) }) - it("should use like expression for MS-SQL when filter is contains", () => { - const query = new Sql(SqlClients.MS_SQL, 10)._query(generateReadJson({ + it("should use AND like expression for MS-SQL when filter is contains", () => { + const query = new Sql(SqlClient.MS_SQL, 10)._query(generateReadJson({ filters: { contains: { - age: 20, - name: "John" + age: [20, 25], + name: ["John", "Mary"] } } })) expect(query).toEqual({ - bindings: [10, "%20%", "%John%"], - sql: `select * from (select top (@p0) * from [${TABLE_NAME}] where LOWER(${TABLE_NAME}.age) LIKE @p1 and LOWER(${TABLE_NAME}.name) LIKE @p2) as [${TABLE_NAME}]` + bindings: [10, "%20%", "%25%", `%"John"%`, `%"Mary"%`], + sql: `select * from (select top (@p0) * from [${TABLE_NAME}] where (LOWER(${TABLE_NAME}.age) LIKE @p1 AND LOWER(${TABLE_NAME}.age) LIKE @p2) and (LOWER(${TABLE_NAME}.name) LIKE @p3 AND LOWER(${TABLE_NAME}.name) LIKE @p4)) as [${TABLE_NAME}]` }) }) it("should use JSON_CONTAINS expression for MySQL when filter is contains", () => { - const query = new Sql(SqlClients.MY_SQL, 10)._query(generateReadJson({ + const query = new Sql(SqlClient.MY_SQL, 10)._query(generateReadJson({ filters: { contains: { - age: 20, - name: "John" + age: [20], + name: ["John"] } } })) expect(query).toEqual({ bindings: [10], - sql: `select * from (select * from \`${TABLE_NAME}\` where JSON_CONTAINS(${TABLE_NAME}.age, '20') and JSON_CONTAINS(${TABLE_NAME}.name, '"John"') limit ?) as \`${TABLE_NAME}\`` + sql: `select * from (select * from \`${TABLE_NAME}\` where JSON_CONTAINS(${TABLE_NAME}.age, '[20]') and JSON_CONTAINS(${TABLE_NAME}.name, '["John"]') limit ?) as \`${TABLE_NAME}\`` }) }) it("should use jsonb operator expression for PostgreSQL when filter is contains", () => { - const query = new Sql(SqlClients.POSTGRES, 10)._query(generateReadJson({ + const query = new Sql(SqlClient.POSTGRES, 10)._query(generateReadJson({ filters: { contains: { - age: 20, - name: "John" + age: [20], + name: ["John"] } } })) @@ -284,4 +284,94 @@ describe("SQL query builder", () => { sql: `select * from (select * from \"${TABLE_NAME}\" where \"${TABLE_NAME}\".\"age\"::jsonb @> '[20]' and \"${TABLE_NAME}\".\"name\"::jsonb @> '["John"]' limit $1) as \"${TABLE_NAME}\"` }) }) + + it("should use NOT like expression for MS-SQL when filter is notContains", () => { + const query = new Sql(SqlClient.MS_SQL, 10)._query(generateReadJson({ + filters: { + notContains: { + age: [20], + name: ["John"] + } + } + })) + expect(query).toEqual({ + bindings: [10, "%20%", `%"John"%`], + sql: `select * from (select top (@p0) * from [${TABLE_NAME}] where NOT (LOWER(${TABLE_NAME}.age) LIKE @p1) and NOT (LOWER(${TABLE_NAME}.name) LIKE @p2)) as [${TABLE_NAME}]` + }) + }) + + it("should use NOT JSON_CONTAINS expression for MySQL when filter is notContains", () => { + const query = new Sql(SqlClient.MY_SQL, 10)._query(generateReadJson({ + filters: { + notContains: { + age: [20], + name: ["John"] + } + } + })) + expect(query).toEqual({ + bindings: [10], + sql: `select * from (select * from \`${TABLE_NAME}\` where NOT JSON_CONTAINS(${TABLE_NAME}.age, '[20]') and NOT JSON_CONTAINS(${TABLE_NAME}.name, '["John"]') limit ?) as \`${TABLE_NAME}\`` + }) + }) + + it("should use jsonb operator NOT expression for PostgreSQL when filter is notContains", () => { + const query = new Sql(SqlClient.POSTGRES, 10)._query(generateReadJson({ + filters: { + notContains: { + age: [20], + name: ["John"] + } + } + })) + expect(query).toEqual({ + bindings: [10], + sql: `select * from (select * from \"${TABLE_NAME}\" where NOT \"${TABLE_NAME}\".\"age\"::jsonb @> '[20]' and NOT \"${TABLE_NAME}\".\"name\"::jsonb @> '["John"]' limit $1) as \"${TABLE_NAME}\"` + }) + }) + + it("should use OR like expression for MS-SQL when filter is containsAny", () => { + const query = new Sql(SqlClient.MS_SQL, 10)._query(generateReadJson({ + filters: { + containsAny: { + age: [20, 25], + name: ["John", "Mary"] + } + } + })) + expect(query).toEqual({ + bindings: [10, "%20%", "%25%", `%"John"%`, `%"Mary"%`], + sql: `select * from (select top (@p0) * from [${TABLE_NAME}] where (LOWER(${TABLE_NAME}.age) LIKE @p1 OR LOWER(${TABLE_NAME}.age) LIKE @p2) and (LOWER(${TABLE_NAME}.name) LIKE @p3 OR LOWER(${TABLE_NAME}.name) LIKE @p4)) as [${TABLE_NAME}]` + }) + }) + + it("should use JSON_OVERLAPS expression for MySQL when filter is containsAny", () => { + const query = new Sql(SqlClient.MY_SQL, 10)._query(generateReadJson({ + filters: { + containsAny: { + age: [20, 25], + name: ["John", "Mary"] + } + } + })) + expect(query).toEqual({ + bindings: [10], + sql: `select * from (select * from \`${TABLE_NAME}\` where JSON_OVERLAPS(${TABLE_NAME}.age, '[20,25]') and JSON_OVERLAPS(${TABLE_NAME}.name, '["John","Mary"]') limit ?) as \`${TABLE_NAME}\`` + }) + }) + + it("should use ?| operator expression for PostgreSQL when filter is containsAny", () => { + const query = new Sql(SqlClient.POSTGRES, 10)._query(generateReadJson({ + filters: { + containsAny: { + age: [20, 25], + name: ["John", "Mary"] + } + } + })) + expect(query).toEqual({ + bindings: [10], + sql: `select * from (select * from \"${TABLE_NAME}\" where \"${TABLE_NAME}\".\"age\"::jsonb ?| array [20,25] and \"${TABLE_NAME}\".\"name\"::jsonb ?| array ['John','Mary'] limit $1) as \"${TABLE_NAME}\"` + }) + }) }) diff --git a/packages/server/src/integrations/utils.ts b/packages/server/src/integrations/utils.ts index 7e4efad84f..287783eec6 100644 --- a/packages/server/src/integrations/utils.ts +++ b/packages/server/src/integrations/utils.ts @@ -1,6 +1,5 @@ -import { SourceNames, SqlQuery } from "../definitions/datasource" -import { Datasource, Table } from "../definitions/common" -import { DocumentTypes, SEPARATOR } from "../db/utils" +import { SourceName, SqlQuery, Datasource, Table } from "@budibase/types" +import { DocumentType, SEPARATOR } from "../db/utils" import { FieldTypes, BuildSchemaErrors, InvalidColumns } from "../constants" const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}` @@ -68,7 +67,7 @@ const SQL_TYPE_MAP = { ...SQL_MISC_TYPE_MAP, } -export enum SqlClients { +export enum SqlClient { MS_SQL = "mssql", POSTGRES = "pg", MY_SQL = "mysql2", @@ -76,7 +75,7 @@ export enum SqlClients { } export function isExternalTable(tableId: string) { - return tableId.includes(DocumentTypes.DATASOURCE) + return tableId.includes(DocumentType.DATASOURCE) } export function buildExternalTableId(datasourceId: string, tableName: string) { @@ -169,10 +168,10 @@ export function isSQL(datasource: Datasource): boolean { return false } const SQL = [ - SourceNames.POSTGRES, - SourceNames.SQL_SERVER, - SourceNames.MYSQL, - SourceNames.ORACLE, + SourceName.POSTGRES, + SourceName.SQL_SERVER, + SourceName.MYSQL, + SourceName.ORACLE, ] return SQL.indexOf(datasource.source) !== -1 } @@ -224,8 +223,9 @@ function shouldCopySpecialColumn( FieldTypes.ARRAY, FieldTypes.FORMULA, ] + // column has been deleted, remove if (column && !fetchedColumn) { - return true + return false } const fetchedIsNumber = !fetchedColumn || fetchedColumn.type === FieldTypes.NUMBER diff --git a/packages/server/src/middleware/builder.js b/packages/server/src/middleware/builder.js index b63a086d48..12c4b93c75 100644 --- a/packages/server/src/middleware/builder.js +++ b/packages/server/src/middleware/builder.js @@ -6,7 +6,7 @@ const { setDebounce, } = require("../utilities/redis") const { doWithDB } = require("@budibase/backend-core/db") -const { DocumentTypes, getGlobalIDFromUserMetadataID } = require("../db/utils") +const { DocumentType, getGlobalIDFromUserMetadataID } = require("../db/utils") const { PermissionTypes } = require("@budibase/backend-core/permissions") const { app: appCache } = require("@budibase/backend-core/cache") @@ -49,7 +49,7 @@ async function updateAppUpdatedAt(ctx) { return } await doWithDB(appId, async db => { - const metadata = await db.get(DocumentTypes.APP_METADATA) + const metadata = await db.get(DocumentType.APP_METADATA) metadata.updatedAt = new Date().toISOString() metadata.updatedBy = getGlobalIDFromUserMetadataID(ctx.user.userId) diff --git a/packages/server/src/migrations/functions/appUrls.ts b/packages/server/src/migrations/functions/appUrls.ts index 1446fcafc0..1b563bb932 100644 --- a/packages/server/src/migrations/functions/appUrls.ts +++ b/packages/server/src/migrations/functions/appUrls.ts @@ -1,4 +1,4 @@ -const { DocumentTypes } = require("@budibase/backend-core/db") +const { DocumentType } = require("@budibase/backend-core/db") import { getAppUrl } from "../../api/controllers/application" /** @@ -11,7 +11,7 @@ import { getAppUrl } from "../../api/controllers/application" export const run = async (appDb: any) => { let metadata try { - metadata = await appDb.get(DocumentTypes.APP_METADATA) + metadata = await appDb.get(DocumentType.APP_METADATA) } catch (e) { // sometimes the metadata document doesn't exist // exit early instead of failing the migration diff --git a/packages/server/src/migrations/functions/backfill/app.ts b/packages/server/src/migrations/functions/backfill/app.ts index 476344e7b0..51a37108b0 100644 --- a/packages/server/src/migrations/functions/backfill/app.ts +++ b/packages/server/src/migrations/functions/backfill/app.ts @@ -62,7 +62,7 @@ export const run = async (appDb: any) => { await events.backfillCache.start(EVENTS) let timestamp: string | number = DEFAULT_TIMESTAMP - const app: App = await appDb.get(dbUtils.DocumentTypes.APP_METADATA) + const app: App = await appDb.get(dbUtils.DocumentType.APP_METADATA) if (app.createdAt) { timestamp = app.createdAt as string } diff --git a/packages/server/src/migrations/functions/backfill/app/queries.ts b/packages/server/src/migrations/functions/backfill/app/queries.ts index b2ea31c19c..e66c7af841 100644 --- a/packages/server/src/migrations/functions/backfill/app/queries.ts +++ b/packages/server/src/migrations/functions/backfill/app/queries.ts @@ -1,6 +1,6 @@ import { events } from "@budibase/backend-core" import { getQueryParams } from "../../../../db/utils" -import { Query, Datasource } from "@budibase/types" +import { Query, Datasource, SourceName } from "@budibase/types" const getQueries = async (appDb: any): Promise => { const response = await appDb.allDocs( @@ -31,8 +31,9 @@ export const backfill = async (appDb: any, timestamp: string | number) => { // and the query has not if (e.status === 404) { datasource = { + type: "unknown", _id: query.datasourceId, - source: "unknown", + source: SourceName.UNKNOWN, } } else { throw e diff --git a/packages/server/src/migrations/functions/backfill/global/users.ts b/packages/server/src/migrations/functions/backfill/global/users.ts index 5f811614b4..05c5f8f56e 100644 --- a/packages/server/src/migrations/functions/backfill/global/users.ts +++ b/packages/server/src/migrations/functions/backfill/global/users.ts @@ -4,7 +4,7 @@ import { DEFAULT_TIMESTAMP } from ".." // manually define user doc params - normally server doesn't read users from the db const getUserParams = (props: any) => { - return dbUtils.getDocParams(dbUtils.DocumentTypes.USER, null, props) + return dbUtils.getDocParams(dbUtils.DocumentType.USER, null, props) } export const getUsers = async (globalDb: any): Promise => { diff --git a/packages/server/src/migrations/functions/tests/appUrls.spec.js b/packages/server/src/migrations/functions/tests/appUrls.spec.js index fe5b9aeeae..4551bac160 100644 --- a/packages/server/src/migrations/functions/tests/appUrls.spec.js +++ b/packages/server/src/migrations/functions/tests/appUrls.spec.js @@ -1,4 +1,4 @@ -const { DocumentTypes, doWithDB } = require("@budibase/backend-core/db") +const { DocumentType, doWithDB } = require("@budibase/backend-core/db") const TestConfig = require("../../../tests/utilities/TestConfiguration") const migration = require("../appUrls") @@ -15,11 +15,11 @@ describe("run", () => { it("runs successfully", async () => { const app = await config.createApp("testApp") const metadata = await doWithDB(app.appId, async db => { - const metadataDoc = await db.get(DocumentTypes.APP_METADATA) + const metadataDoc = await db.get( DocumentType.APP_METADATA) delete metadataDoc.url await db.put(metadataDoc) await migration.run(db) - return await db.get(DocumentTypes.APP_METADATA) + return await db.get( DocumentType.APP_METADATA) }) expect(metadata.url).toEqual("/testapp") }) diff --git a/packages/server/src/migrations/tests/index.spec.ts b/packages/server/src/migrations/tests/index.spec.ts index 72bf2ff1a9..e2c2fb5c96 100644 --- a/packages/server/src/migrations/tests/index.spec.ts +++ b/packages/server/src/migrations/tests/index.spec.ts @@ -2,7 +2,7 @@ import { events, migrations, tenancy, - DocumentTypes, + DocumentType, context, db, } from "@budibase/backend-core" @@ -17,7 +17,7 @@ const timestamp = mocks.date.MOCK_DATE.toISOString() const clearMigrations = async () => { const dbs = [context.getDevAppDB(), context.getProdAppDB()] for (const db of dbs) { - const doc = await db.get(DocumentTypes.MIGRATIONS) + const doc = await db.get(DocumentType.MIGRATIONS) const newDoc = { _id: doc._id, _rev: doc._rev } await db.put(newDoc) } diff --git a/packages/server/src/tests/utilities/TestConfiguration.js b/packages/server/src/tests/utilities/TestConfiguration.js index da108b6978..d1dbe59def 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.js +++ b/packages/server/src/tests/utilities/TestConfiguration.js @@ -25,6 +25,7 @@ const newid = require("../../db/newid") const context = require("@budibase/backend-core/context") const { generateDevInfoID, SEPARATOR } = require("@budibase/backend-core/db") const { encrypt } = require("@budibase/backend-core/encryption") +const { DocumentType } = require("../../db/utils") const GLOBAL_USER_ID = "us_uuid1" const EMAIL = "babs@babs.com" @@ -53,6 +54,10 @@ class TestConfiguration { return this.app } + getProdApp() { + return this.prodApp + } + getAppId() { return this.appId } @@ -106,19 +111,11 @@ class TestConfiguration { // UTILS - async _req(body, params, controlFunc, opts = { prodApp: false }) { + async _req(body, params, controlFunc) { // create a fake request ctx const request = {} - - // set the app id - let appId - if (opts.prodApp) { - appId = this.prodAppId - } else { - appId = this.appId - } + const appId = this.appId request.appId = appId - // fake cookies, we don't need them request.cookies = { set: () => {}, get: () => {} } request.config = { jwtSecret: env.JWT_SECRET } @@ -344,14 +341,10 @@ class TestConfiguration { await this._req(null, null, controllers.deploy.deployApp) const prodAppId = this.getAppId().replace("_dev", "") this.prodAppId = prodAppId + return context.doInAppContext(prodAppId, async () => { - const appPackage = await this._req( - null, - { appId: prodAppId }, - controllers.app.fetchAppPackage, - { prodApp: true } - ) - return appPackage.application + const db = context.getProdAppDB() + return await db.get(DocumentType.APP_METADATA) }) } diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index 0b8d2e89bd..d04c49ce79 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -11,7 +11,7 @@ import { storeLog } from "../automations/logging" import { Automation, AutomationStep, AutomationStatus } from "@budibase/types" import { LoopStep, - LoopStepTypes, + LoopStepType, LoopInput, AutomationEvent, TriggerOutput, @@ -35,12 +35,12 @@ function typecastForLooping(loopStep: LoopStep, input: LoopInput) { } try { switch (loopStep.inputs.option) { - case LoopStepTypes.ARRAY: + case LoopStepType.ARRAY: if (typeof input.binding === "string") { return JSON.parse(input.binding) } break - case LoopStepTypes.STRING: + case LoopStepType.STRING: if (Array.isArray(input.binding)) { return input.binding.join(",") } diff --git a/packages/server/src/utilities/routing/index.js b/packages/server/src/utilities/routing/index.js index b68001c3c3..963119130b 100644 --- a/packages/server/src/utilities/routing/index.js +++ b/packages/server/src/utilities/routing/index.js @@ -1,11 +1,11 @@ const { createRoutingView } = require("../../db/views/staticViews") -const { ViewNames, getQueryIndex, UNICODE_MAX } = require("../../db/utils") +const { ViewName, getQueryIndex, UNICODE_MAX } = require("../../db/utils") const { getAppDB } = require("@budibase/backend-core/context") exports.getRoutingInfo = async () => { const db = getAppDB() try { - const allRouting = await db.query(getQueryIndex(ViewNames.ROUTING), { + const allRouting = await db.query(getQueryIndex(ViewName.ROUTING), { startKey: "", endKey: UNICODE_MAX, }) diff --git a/packages/server/src/utilities/rowProcessor/index.js b/packages/server/src/utilities/rowProcessor/index.js index 36a02eb9b1..e4c364eaf3 100644 --- a/packages/server/src/utilities/rowProcessor/index.js +++ b/packages/server/src/utilities/rowProcessor/index.js @@ -278,7 +278,7 @@ exports.outputProcessing = async (table, rows, opts = { squash: true }) => { for (let [property, column] of Object.entries(table.schema)) { if (column.type === FieldTypes.ATTACHMENT) { for (let row of enriched) { - if (row[property] == null || row[property].length === 0) { + if (row[property] == null || !Array.isArray(row[property])) { continue } row[property].forEach(attachment => { diff --git a/packages/server/src/utilities/security.js b/packages/server/src/utilities/security.js index a0d9c8d57f..d8133e585b 100644 --- a/packages/server/src/utilities/security.js +++ b/packages/server/src/utilities/security.js @@ -8,7 +8,7 @@ const { lowerBuiltinRoleID, getBuiltinRoles, } = require("@budibase/backend-core/roles") -const { DocumentTypes } = require("../db/utils") +const { DocumentType } = require("../db/utils") const CURRENTLY_SUPPORTED_LEVELS = [ PermissionLevels.WRITE, @@ -17,19 +17,19 @@ const CURRENTLY_SUPPORTED_LEVELS = [ ] exports.getPermissionType = resourceId => { - const docType = Object.values(DocumentTypes).filter(docType => + const docType = Object.values(DocumentType).filter(docType => resourceId.startsWith(docType) )[0] switch (docType) { - case DocumentTypes.TABLE: - case DocumentTypes.ROW: + case DocumentType.TABLE: + case DocumentType.ROW: return PermissionTypes.TABLE - case DocumentTypes.AUTOMATION: + case DocumentType.AUTOMATION: return PermissionTypes.AUTOMATION - case DocumentTypes.WEBHOOK: + case DocumentType.WEBHOOK: return PermissionTypes.WEBHOOK - case DocumentTypes.QUERY: - case DocumentTypes.DATASOURCE: + case DocumentType.QUERY: + case DocumentType.DATASOURCE: return PermissionTypes.QUERY default: // views don't have an ID, will end up here diff --git a/packages/server/yarn.lock b/packages/server/yarn.lock index fa99a3e32b..6217a94e9f 100644 --- a/packages/server/yarn.lock +++ b/packages/server/yarn.lock @@ -156,9 +156,9 @@ adal-node "^0.2.2" "@azure/storage-blob@^12.5.0": - version "12.10.0" - resolved "https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.10.0.tgz#b92269f45a1765700a900b41ca81a474a6e36ea4" - integrity sha512-FBEPKGnvtQJS8V8Tg1P9obgmVD9AodrIfwtwhBpsjenClhFyugMp3HPJY0tF7rInUB/CivKBCbnQKrUnKxqxzw== + version "12.11.0" + resolved "https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.11.0.tgz#2e27902ab293715411ab1f7c8fae422ad0b4b827" + integrity sha512-na+FisoARuaOWaHWpmdtk3FeuTWf2VWamdJ9/TJJzj5ZdXPLC3juoDgFs6XVuJIoK30yuBpyFBEDXVRK4pB7Tg== dependencies: "@azure/abort-controller" "^1.0.0" "@azure/core-http" "^2.0.0" @@ -1094,18 +1094,19 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== -"@budibase/backend-core@1.1.33-alpha.1": - version "1.1.33-alpha.1" - resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.1.33-alpha.1.tgz#2c483f71c99f0dfd07e78dfc162be979e97a5a37" - integrity sha512-1oY1GPWQ4ym4m75QJxnCBOmEGc+yuXRGafKpBGXXqpyA7qMP5TSYpEGrrpX3uKrIsLIRsiaqw3oPr0T8aL2LQw== +"@budibase/backend-core@1.2.41-alpha.0": + version "1.2.41-alpha.0" + resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.2.41-alpha.0.tgz#ee3bc032ae89e347b9fb220303a9d040df6f5823" + integrity sha512-x6wJpBBFj6a1pGu2OR1vBPHiA4LvdQpVxr6f1lZa2+nAhuXVtEhrdnvBVzK2hJ52dOTTTGaw2A5tTkbf7861CA== dependencies: - "@budibase/types" "1.1.33-alpha.1" + "@budibase/types" "1.2.41-alpha.0" "@techpass/passport-openidconnect" "0.3.2" aws-sdk "2.1030.0" bcrypt "5.0.1" dotenv "16.0.1" emitter-listener "1.1.2" ioredis "4.28.0" + joi "17.6.0" jsonwebtoken "8.5.1" koa-passport "4.1.4" lodash "4.17.21" @@ -1177,13 +1178,13 @@ svelte-flatpickr "^3.2.3" svelte-portal "^1.0.0" -"@budibase/pro@1.1.33-alpha.1": - version "1.1.33-alpha.1" - resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-1.1.33-alpha.1.tgz#2778f4895b5dec982ef8245d0c36247c6242cd73" - integrity sha512-M0gust58aTyPtpp2CKhFqsmzVYu1ZttW1l6p9tePClhfp4FvkYTy/UsHLV7Pg3NbCLDo3qpCzTpPTYnePtf/vg== +"@budibase/pro@1.2.41-alpha.0": + version "1.2.41-alpha.0" + resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-1.2.41-alpha.0.tgz#4f27dbeee24ddc58e2a4e75cd90b53543f742722" + integrity sha512-LiKuO7/9GGzf3xfMmNTZQl2r4jmGgqnUf7fA5ub/MRyWYM7ZjPWEKW1V9OAk3vXwv9a+4V6FUxJQ1sDUZ3DT/Q== dependencies: - "@budibase/backend-core" "1.1.33-alpha.1" - "@budibase/types" "1.1.33-alpha.1" + "@budibase/backend-core" "1.2.41-alpha.0" + "@budibase/types" "1.2.41-alpha.0" "@koa/router" "8.0.8" joi "17.6.0" node-fetch "^2.6.1" @@ -1206,10 +1207,10 @@ svelte-apexcharts "^1.0.2" svelte-flatpickr "^3.1.0" -"@budibase/types@1.1.33-alpha.1": - version "1.1.33-alpha.1" - resolved "https://registry.yarnpkg.com/@budibase/types/-/types-1.1.33-alpha.1.tgz#8a1253208a618b439d85977c3879b0184b6487e8" - integrity sha512-BEIJqb9WjAck16rkgtGHlnxANhkK8OXbJLBxu+eB4x7F6tSmMI+T6zsBkGE0ehHmqa6YNzY7OldKLc967mAaLA== +"@budibase/types@1.2.41-alpha.0": + version "1.2.41-alpha.0" + resolved "https://registry.yarnpkg.com/@budibase/types/-/types-1.2.41-alpha.0.tgz#01071ce6ecef6799b04b1defce2b38e81b615f9f" + integrity sha512-2jkeToXsujYUXb6P2DFTFUOFv56GgImJ4webP8xr5cPGL0/xmhWGMd/lLxLt96RtMQp43UmVWZm1BA7TZw4fJg== "@bull-board/api@3.7.0": version "3.7.0" @@ -1343,33 +1344,16 @@ protobufjs "^6.11.3" yargs "^16.2.0" -"@hapi/address@^4.1.0": - version "4.1.0" - resolved "https://registry.yarnpkg.com/@hapi/address/-/address-4.1.0.tgz#d60c5c0d930e77456fdcde2598e77302e2955e1d" - integrity sha512-SkszZf13HVgGmChdHo/PxchnSaCJ6cetVqLzyciudzZRT0jcOouIF/Q93mgjw8cce+D+4F4C1Z/WrfFN+O3VHQ== - dependencies: - "@hapi/hoek" "^9.0.0" - "@hapi/bourne@^2.0.0": version "2.1.0" resolved "https://registry.yarnpkg.com/@hapi/bourne/-/bourne-2.1.0.tgz#66aff77094dc3080bd5df44ec63881f2676eb020" integrity sha512-i1BpaNDVLJdRBEKeJWkVO6tYX6DMFBuwMhSuWqLsY4ufeTKGVuV5rBsUhxPayXqnnWHgXUAmWK16H/ykO5Wj4Q== -"@hapi/formula@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@hapi/formula/-/formula-2.0.0.tgz#edade0619ed58c8e4f164f233cda70211e787128" - integrity sha512-V87P8fv7PI0LH7LiVi8Lkf3x+KCO7pQozXRssAHNXXL9L1K+uyu4XypLXwxqVDKgyQai6qj3/KteNlrqDx4W5A== - "@hapi/hoek@^9.0.0": version "9.3.0" resolved "https://registry.yarnpkg.com/@hapi/hoek/-/hoek-9.3.0.tgz#8368869dcb735be2e7f5cb7647de78e167a251fb" integrity sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ== -"@hapi/pinpoint@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@hapi/pinpoint/-/pinpoint-2.0.0.tgz#805b40d4dbec04fc116a73089494e00f073de8df" - integrity sha512-vzXR5MY7n4XeIvLpfl3HtE3coZYO4raKXW766R6DZw/6aLqR26iuZ109K7a0NtF2Db0jxqh7xz2AxkUwpUFybw== - "@hapi/topo@^5.0.0": version "5.1.0" resolved "https://registry.yarnpkg.com/@hapi/topo/-/topo-5.1.0.tgz#dc448e332c6c6e37a4dc02fd84ba8d44b9afb012" @@ -1959,29 +1943,24 @@ "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/gen-mapping@^0.3.0": - version "0.3.2" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" - integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + version "0.3.1" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.1.tgz#cf92a983c83466b8c0ce9124fadeaf09f7c66ea9" + integrity sha512-GcHwniMlA2z+WFPWuY8lp3fsza0I8xPFMWL5+n8LYyP6PSvPrXf4+n8stDHZY2DM0zy9sVkRDy1jDI4XGzYVqg== dependencies: - "@jridgewell/set-array" "^1.0.1" + "@jridgewell/set-array" "^1.0.0" "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping" "^0.3.9" "@jridgewell/resolve-uri@^3.0.3": - version "3.1.0" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" - integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + version "3.0.7" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.0.7.tgz#30cd49820a962aff48c8fffc5cd760151fca61fe" + integrity sha512-8cXDaBBHOr2pQ7j77Y6Vp5VDT2sIqWyWQ56TjEq4ih/a4iST3dItRe8Q9fp0rrIl9DoKhWQtUQz/YpOxLkXbNA== "@jridgewell/set-array@^1.0.0": version "1.1.1" resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.1.tgz#36a6acc93987adcf0ba50c66908bd0b70de8afea" integrity sha512-Ct5MqZkLGEXTVmQYbGtx9SVqD2fqwvdubdps5D3djjAkgkKwT918VNOz65pEHFaYTeWcukmJmH5SwsA9Tn2ObQ== -"@jridgewell/set-array@^1.0.1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" - integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== - "@jridgewell/source-map@^0.3.2": version "0.3.2" resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" @@ -1991,11 +1970,11 @@ "@jridgewell/trace-mapping" "^0.3.9" "@jridgewell/sourcemap-codec@^1.4.10": - version "1.4.14" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" - integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== + version "1.4.13" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.13.tgz#b6461fb0c2964356c469e115f504c95ad97ab88c" + integrity sha512-GryiOJmNcWbovBxTfZSF71V/mXbgcV3MewDe3kIMCLyIh5e7SKAeUZs+rMnJ8jkMolZ/4/VsdBmMrw3l+VdZ3w== -"@jridgewell/trace-mapping@^0.3.7": +"@jridgewell/trace-mapping@^0.3.7", "@jridgewell/trace-mapping@^0.3.9": version "0.3.13" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.13.tgz#dcfe3e95f224c8fe97a87a5235defec999aa92ea" integrity sha512-o1xbKhp9qnIAoHJSWd6KlCZfqslL4valSF81H8ImioOAxluWYWOpWkpyktY2vnt4tbrX9XYaxovq6cgowaJp2w== @@ -2003,14 +1982,6 @@ "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" -"@jridgewell/trace-mapping@^0.3.9": - version "0.3.14" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed" - integrity sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ== - dependencies: - "@jridgewell/resolve-uri" "^3.0.3" - "@jridgewell/sourcemap-codec" "^1.4.10" - "@jsdevtools/ono@^7.1.3": version "7.1.3" resolved "https://registry.yarnpkg.com/@jsdevtools/ono/-/ono-7.1.3.tgz#9df03bbd7c696a5c58885c34aa06da41c8543796" @@ -2855,7 +2826,7 @@ "@types/bson" "*" "@types/node" "*" -"@types/node-fetch@2.6.1", "@types/node-fetch@^2.5.0": +"@types/node-fetch@2.6.1": version "2.6.1" resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.1.tgz#8f127c50481db65886800ef496f20bbf15518975" integrity sha512-oMqjURCaxoSIsHSr1E47QHzbmzNR5rK8McHuNb11BOM9cHcIK3Avy0s/b2JlXHoQGTYS3NsvWzV1M0iK7l0wbA== @@ -2863,6 +2834,14 @@ "@types/node" "*" form-data "^3.0.0" +"@types/node-fetch@^2.5.0": + version "2.6.2" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.2.tgz#d1a9c5fd049d9415dce61571557104dec3ec81da" + integrity sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A== + dependencies: + "@types/node" "*" + form-data "^3.0.0" + "@types/node@*", "@types/node@>=12.12.47", "@types/node@>=13.13.4", "@types/node@>=13.7.0": version "17.0.41" resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.41.tgz#1607b2fd3da014ae5d4d1b31bc792a39348dfb9b" @@ -3731,6 +3710,11 @@ atomic-sleep@^1.0.0: resolved "https://registry.yarnpkg.com/atomic-sleep/-/atomic-sleep-1.0.0.tgz#eb85b77a601fc932cfe432c5acd364a9e2c9075b" integrity sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ== +available-typed-arrays@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7" + integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw== + aws-sdk@2.1030.0: version "2.1030.0" resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1030.0.tgz#24a856af3d2b8b37c14a8f59974993661c66fd82" @@ -3747,9 +3731,9 @@ aws-sdk@2.1030.0: xml2js "0.4.19" aws-sdk@^2.878.0: - version "2.1152.0" - resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1152.0.tgz#73e4fb81b3a9c289234b5d6848bcdb854f169bdf" - integrity sha512-Lqwk0bDhm3vzpYb3AAM9VgGHeDpbB8+o7UJnP9R+CO23kJfi/XRpKihAcbyKDD/AUQ+O1LJaUVpvaJYLS9Am7w== + version "2.1174.0" + resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1174.0.tgz#3e2acb1ee29229cc5d97015b2d1a18c41e967979" + integrity sha512-t/Cwbdunmoj3WAI+u+hw/kr6mla1sYCn+VncxxIjkACStA47+ZTsfd7cQfpoVMit5KubkHaJ3SHX4/qvmt0Jfg== dependencies: buffer "4.9.2" events "1.1.1" @@ -3758,6 +3742,7 @@ aws-sdk@^2.878.0: querystring "0.2.0" sax "1.2.1" url "0.10.3" + util "^0.12.4" uuid "8.0.0" xml2js "0.4.19" @@ -5498,7 +5483,7 @@ error-inject@^1.0.0: resolved "https://registry.yarnpkg.com/error-inject/-/error-inject-1.0.0.tgz#e2b3d91b54aed672f309d950d154850fa11d4f37" integrity sha512-JM8N6PytDbmIYm1IhPWlo8vr3NtfjhDY/1MhD/a5b/aad/USE8a0+NsqE9d5n+GVGmuNkPQWm4bFQWv18d8tMg== -es-abstract@^1.17.5, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5, es-abstract@^1.20.1: +es-abstract@^1.17.5, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5, es-abstract@^1.20.0, es-abstract@^1.20.1: version "1.20.1" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.1.tgz#027292cd6ef44bd12b1913b828116f54787d1814" integrity sha512-WEm2oBhfoI2sImeM4OF2zE2V3BYdSF+KnSi9Sidz51fQHd7+JuF8Xgcj9/0o+OWeIeIS/MiuNnlruQrJf16GQA== @@ -7448,6 +7433,14 @@ is-accessor-descriptor@^1.0.0: dependencies: kind-of "^6.0.0" +is-arguments@^1.0.4: + version "1.1.1" + resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.1.tgz#15b3f88fda01f2a97fec84ca761a560f123efa9b" + integrity sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" @@ -7754,6 +7747,17 @@ is-type-of@^1.0.0: is-class-hotfix "~0.0.6" isstream "~0.1.2" +is-typed-array@^1.1.3, is-typed-array@^1.1.9: + version "1.1.9" + resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.9.tgz#246d77d2871e7d9f5aeb1d54b9f52c71329ece67" + integrity sha512-kfrlnTTn8pZkfpJMUgYD7YZ3qzeJgWUn8XfVYBARc4wnmNOmLbmuuaAs3q5fvB0UJOn6yHAKaGTPM7d6ezoD/A== + dependencies: + available-typed-arrays "^1.0.5" + call-bind "^1.0.2" + es-abstract "^1.20.0" + for-each "^0.3.3" + has-tostringtag "^1.0.0" + is-typedarray@^1.0.0, is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" @@ -8653,17 +8657,6 @@ jmespath@0.16.0: resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.16.0.tgz#b15b0a85dfd4d930d43e69ed605943c802785076" integrity sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw== -joi@17.2.1: - version "17.2.1" - resolved "https://registry.yarnpkg.com/joi/-/joi-17.2.1.tgz#e5140fdf07e8fecf9bc977c2832d1bdb1e3f2a0a" - integrity sha512-YT3/4Ln+5YRpacdmfEfrrKh50/kkgX3LgBltjqnlMPIYiZ4hxXZuVJcxmsvxsdeHg9soZfE3qXxHC2tMpCCBOA== - dependencies: - "@hapi/address" "^4.1.0" - "@hapi/formula" "^2.0.0" - "@hapi/hoek" "^9.0.0" - "@hapi/pinpoint" "^2.0.0" - "@hapi/topo" "^5.0.0" - joi@17.6.0: version "17.6.0" resolved "https://registry.yarnpkg.com/joi/-/joi-17.6.0.tgz#0bb54f2f006c09a96e75ce687957bd04290054b2" @@ -9949,11 +9942,16 @@ moment-timezone@^0.5.15, moment-timezone@^0.5.31: dependencies: moment ">= 2.9.0" -"moment@>= 2.9.0", moment@^2.29.3: +"moment@>= 2.9.0": version "2.29.3" resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.3.tgz#edd47411c322413999f7a5940d526de183c031f3" integrity sha512-c6YRvhEo//6T2Jz/vVtYzqBzwvPT95JBQ+smCytzf7c50oMZRsR/a4w88aD34I+/QVSfnoAnSBFPJHItlOMJVw== +moment@^2.29.3: + version "2.29.4" + resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108" + integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w== + mongodb@3.6.3: version "3.6.3" resolved "https://registry.yarnpkg.com/mongodb/-/mongodb-3.6.3.tgz#eddaed0cc3598474d7a15f0f2a5b04848489fd05" @@ -12287,7 +12285,7 @@ signal-exit@^3.0.0, signal-exit@^3.0.2, signal-exit@^3.0.3: simple-lru-cache@^0.0.2: version "0.0.2" resolved "https://registry.yarnpkg.com/simple-lru-cache/-/simple-lru-cache-0.0.2.tgz#d59cc3a193c1a5d0320f84ee732f6e4713e511dd" - integrity sha1-1ZzDoZPBpdAyD4Tucy9uRxPlEd0= + integrity sha512-uEv/AFO0ADI7d99OHDmh1QfYzQk/izT1vCmu/riQfh7qjBVUUgRT87E5s5h7CxWCA/+YoZerykpEthzVrW3LIw== simple-swizzle@^0.2.2: version "0.2.2" @@ -12358,9 +12356,9 @@ snowflake-promise@^4.5.0: snowflake-sdk "^1.6.0" snowflake-sdk@^1.6.0: - version "1.6.10" - resolved "https://registry.yarnpkg.com/snowflake-sdk/-/snowflake-sdk-1.6.10.tgz#c6c4f267edbc50d3c1ef6fcc2651188bb8545dce" - integrity sha512-kguQQSGhmNqZfmN/yZNDaIaMMktTcrTYBjtyx+szJzV69b5F+5b77btpYp+bCFqao69otVM+IPUtb3sugvCVnQ== + version "1.6.11" + resolved "https://registry.yarnpkg.com/snowflake-sdk/-/snowflake-sdk-1.6.11.tgz#2797c816d0d2af6d56180949e1364e53df8a9c13" + integrity sha512-w4oCXjNQ1peAJjhnrwihr+epYw1pSxbe5/+PdxexYb2rzowyOn0RA5PFbir90q/dx0jzM2gvPiHDjnSBEZ1/zA== dependencies: "@azure/storage-blob" "^12.5.0" "@techteamer/ocsp" "1.0.0" @@ -13104,9 +13102,9 @@ terser-webpack-plugin@^5.1.3: terser "^5.7.2" terser@^5.7.2: - version "5.14.2" - resolved "https://registry.yarnpkg.com/terser/-/terser-5.14.2.tgz#9ac9f22b06994d736174f4091aa368db896f1c10" - integrity sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA== + version "5.14.0" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.14.0.tgz#eefeec9af5153f55798180ee2617f390bdd285e2" + integrity sha512-JC6qfIEkPBd9j1SMO3Pfn+A6w2kQV54tv+ABQLgZr7dA3k/DL/OBoYSWxzVpZev3J+bUHXfr55L8Mox7AaNo6g== dependencies: "@jridgewell/source-map" "^0.3.2" acorn "^8.5.0" @@ -13746,6 +13744,18 @@ util.promisify@^1.0.0, util.promisify@^1.0.1: has-symbols "^1.0.1" object.getownpropertydescriptors "^2.1.1" +util@^0.12.4: + version "0.12.4" + resolved "https://registry.yarnpkg.com/util/-/util-0.12.4.tgz#66121a31420df8f01ca0c464be15dfa1d1850253" + integrity sha512-bxZ9qtSlGUWSOy9Qa9Xgk11kSslpuZwaxCg4sNIDj6FLucDab2JxnHwyNTCpHMtK1MjoQiWQ6DiUMZYbSrO+Sw== + dependencies: + inherits "^2.0.3" + is-arguments "^1.0.4" + is-generator-function "^1.0.7" + is-typed-array "^1.1.3" + safe-buffer "^5.1.2" + which-typed-array "^1.1.2" + utils-merge@1.x.x: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" @@ -14029,6 +14039,18 @@ which-module@^2.0.0: resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= +which-typed-array@^1.1.2: + version "1.1.8" + resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.8.tgz#0cfd53401a6f334d90ed1125754a42ed663eb01f" + integrity sha512-Jn4e5PItbcAHyLoRDwvPj1ypu27DJbtdYXUa5zsinrUx77Uvfb0cXwwnGMTn7cjUfhhqgVQnVJCwF+7cgU7tpw== + dependencies: + available-typed-arrays "^1.0.5" + call-bind "^1.0.2" + es-abstract "^1.20.0" + for-each "^0.3.3" + has-tostringtag "^1.0.0" + is-typed-array "^1.1.9" + which@^1.2.9: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" @@ -14071,7 +14093,23 @@ winston-transport@^4.5.0: readable-stream "^3.6.0" triple-beam "^1.3.0" -winston@^3.1.0, winston@^3.3.3: +winston@^3.1.0: + version "3.8.1" + resolved "https://registry.yarnpkg.com/winston/-/winston-3.8.1.tgz#76f15b3478cde170b780234e0c4cf805c5a7fb57" + integrity sha512-r+6YAiCR4uI3N8eQNOg8k3P3PqwAm20cLKlzVD9E66Ch39+LZC+VH1UKf9JemQj2B3QoUHfKD7Poewn0Pr3Y1w== + dependencies: + "@dabh/diagnostics" "^2.0.2" + async "^3.2.3" + is-stream "^2.0.0" + logform "^2.4.0" + one-time "^1.0.0" + readable-stream "^3.4.0" + safe-stable-stringify "^2.3.1" + stack-trace "0.0.x" + triple-beam "^1.3.0" + winston-transport "^4.5.0" + +winston@^3.3.3: version "3.7.2" resolved "https://registry.yarnpkg.com/winston/-/winston-3.7.2.tgz#95b4eeddbec902b3db1424932ac634f887c400b1" integrity sha512-QziIqtojHBoyzUOdQvQiar1DH0Xp9nF1A1y7NVy2DGEsz82SBDtOalS0ulTRGVT14xPX3WRWkCsdcJKqNflKng== diff --git a/packages/string-templates/manifest.json b/packages/string-templates/manifest.json index 748330add2..f8600121f1 100644 --- a/packages/string-templates/manifest.json +++ b/packages/string-templates/manifest.json @@ -74,7 +74,7 @@ "b" ], "numArgs": 2, - "example": "{{ product 10 5 }} -> 50", + "example": "{{ multiply 10 5 }} -> 50", "description": "

Return the product of a times b.

\n" }, "plus": { diff --git a/packages/string-templates/package.json b/packages/string-templates/package.json index 4ea64ed854..586e4d5d39 100644 --- a/packages/string-templates/package.json +++ b/packages/string-templates/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/string-templates", - "version": "1.1.33-alpha.1", + "version": "1.2.41-alpha.0", "description": "Handlebars wrapper for Budibase templating.", "main": "src/index.cjs", "module": "dist/bundle.mjs", diff --git a/packages/string-templates/scripts/gen-collection-info.js b/packages/string-templates/scripts/gen-collection-info.js index 29df10423f..bfc0ec79ac 100644 --- a/packages/string-templates/scripts/gen-collection-info.js +++ b/packages/string-templates/scripts/gen-collection-info.js @@ -108,6 +108,10 @@ function getCommentInfo(file, func) { if (examples.length > 0) { docs.example = examples.join(" ") } + // hacky example fix + if (docs.example && docs.example.includes("product")) { + docs.example = docs.example.replace("product", "multiply") + } docs.description = blocks[0].trim() return docs } @@ -166,7 +170,7 @@ function run() { // convert all markdown to HTML for (let collection of Object.values(outputJSON)) { for (let helper of Object.values(collection)) { - helper.description = marked(helper.description) + helper.description = marked.parse(helper.description) } } fs.writeFileSync(FILENAME, JSON.stringify(outputJSON, null, 2)) diff --git a/packages/string-templates/src/conversion/index.js b/packages/string-templates/src/conversion/index.js new file mode 100644 index 0000000000..bbe0c33942 --- /dev/null +++ b/packages/string-templates/src/conversion/index.js @@ -0,0 +1,129 @@ +const { getHelperList } = require("../helpers") + +function getLayers(fullBlock) { + let layers = [] + while (fullBlock.length) { + const start = fullBlock.lastIndexOf("("), + end = fullBlock.indexOf(")") + let layer + if (start === -1 || end === -1) { + layer = fullBlock.trim() + fullBlock = "" + } else { + const untrimmed = fullBlock.substring(start, end + 1) + layer = untrimmed.substring(1, untrimmed.length - 1).trim() + fullBlock = + fullBlock.slice(0, start) + + fullBlock.slice(start + untrimmed.length + 1, fullBlock.length) + } + layers.push(layer) + } + return layers +} + +function getVariable(variableName) { + if (!variableName || typeof variableName !== "string") { + return variableName + } + // it is an array + const arrayOrObject = [",", "{", ":"] + let contains = false + arrayOrObject.forEach(char => { + if (variableName.includes(char)) { + contains = true + } + }) + if (variableName.startsWith("[") && contains) { + return variableName + } + // it is just a number + if (!isNaN(parseFloat(variableName))) { + return variableName + } + if (variableName.startsWith("'") || variableName.startsWith('"')) { + return variableName + } + // extract variable + return `$("${variableName}")` +} + +function buildList(parts, value) { + function build() { + return parts + .map(part => (part.startsWith("helper") ? part : getVariable(part))) + .join(", ") + } + if (!value) { + return parts.length > 1 ? `${build()}` : build() + } else { + return parts.length === 0 ? value : `${value}, ${build()}` + } +} + +function splitBySpace(layer) { + const parts = [] + let started = null, + endChar = null, + last = 0 + function add(str) { + const startsWith = ["]"] + while (startsWith.indexOf(str.substring(0, 1)) !== -1) { + str = str.substring(1, str.length) + } + if (str.length > 0) { + parts.push(str.trim()) + } + } + const continuationChars = ["[", "'", '"'] + for (let index = 0; index < layer.length; index++) { + const char = layer[index] + if (continuationChars.indexOf(char) !== -1 && started == null) { + started = index + endChar = char === "[" ? "]" : char + } else if ( + char === endChar && + started != null && + layer[index + 1] !== "." + ) { + add(layer.substring(started, index + 1)) + started = null + endChar = null + last = index + 1 + } else if (started == null && char === " ") { + add(layer.substring(last, index)) + last = index + } + } + if ( + (!layer.startsWith("[") || parts.length === 0) && + last !== layer.length - 1 + ) { + add(layer.substring(last, layer.length)) + } + return parts +} + +module.exports.convertHBSBlock = (block, blockNumber) => { + const braceLength = block[2] === "{" ? 3 : 2 + block = block.substring(braceLength, block.length - braceLength).trim() + const layers = getLayers(block) + + let value = null + const list = getHelperList() + for (let layer of layers) { + const parts = splitBySpace(layer) + if (value || parts.length > 1) { + // first of layer should always be the helper + const helper = parts.splice(0, 1) + if (list[helper]) { + value = `helpers.${helper}(${buildList(parts, value)})` + } + } + // no helpers + else { + value = getVariable(parts[0]) + } + } + // split by space will remove square brackets + return { variable: `var${blockNumber}`, value } +} diff --git a/packages/string-templates/src/helpers/external.js b/packages/string-templates/src/helpers/external.js index 0fa7f734d0..f461045f71 100644 --- a/packages/string-templates/src/helpers/external.js +++ b/packages/string-templates/src/helpers/external.js @@ -23,6 +23,9 @@ const ADDED_HELPERS = { duration: duration, } +exports.externalCollections = EXTERNAL_FUNCTION_COLLECTIONS +exports.addedHelpers = ADDED_HELPERS + exports.registerAll = handlebars => { for (let [name, helper] of Object.entries(ADDED_HELPERS)) { handlebars.registerHelper(name, helper) diff --git a/packages/string-templates/src/helpers/index.js b/packages/string-templates/src/helpers/index.js index 76a4c5d2ca..f04fa58399 100644 --- a/packages/string-templates/src/helpers/index.js +++ b/packages/string-templates/src/helpers/index.js @@ -7,6 +7,7 @@ const { HelperFunctionBuiltin, LITERAL_MARKER, } = require("./constants") +const { getHelperList } = require("./list") const HTML_SWAPS = { "<": "<", @@ -91,3 +92,5 @@ module.exports.unregisterAll = handlebars => { // unregister all imported helpers externalHandlebars.unregisterAll(handlebars) } + +module.exports.getHelperList = getHelperList diff --git a/packages/string-templates/src/helpers/javascript.js b/packages/string-templates/src/helpers/javascript.js index 0173be0b54..951a9f534a 100644 --- a/packages/string-templates/src/helpers/javascript.js +++ b/packages/string-templates/src/helpers/javascript.js @@ -1,6 +1,7 @@ const { atob } = require("../utilities") const { cloneDeep } = require("lodash/fp") const { LITERAL_MARKER } = require("../helpers/constants") +const { getHelperList } = require("./list") // The method of executing JS scripts depends on the bundle being built. // This setter is used in the entrypoint (either index.cjs or index.mjs). @@ -45,6 +46,7 @@ module.exports.processJS = (handlebars, context) => { // app context. const sandboxContext = { $: path => getContextValue(path, cloneDeep(context)), + helpers: getHelperList(), } // Create a sandbox with our context and run the JS diff --git a/packages/string-templates/src/helpers/list.js b/packages/string-templates/src/helpers/list.js new file mode 100644 index 0000000000..a309b9e57f --- /dev/null +++ b/packages/string-templates/src/helpers/list.js @@ -0,0 +1,19 @@ +const externalHandlebars = require("./external") +const helperList = require("@budibase/handlebars-helpers") + +module.exports.getHelperList = () => { + let constructed = [] + for (let collection of externalHandlebars.externalCollections) { + constructed.push(helperList[collection]()) + } + const fullMap = {} + for (let collection of constructed) { + for (let [key, func] of Object.entries(collection)) { + fullMap[key] = func + } + } + for (let key of Object.keys(externalHandlebars.addedHelpers)) { + fullMap[key] = externalHandlebars.addedHelpers[key] + } + return fullMap +} diff --git a/packages/string-templates/src/index.cjs b/packages/string-templates/src/index.cjs index d0de680aca..870e14493a 100644 --- a/packages/string-templates/src/index.cjs +++ b/packages/string-templates/src/index.cjs @@ -19,6 +19,7 @@ module.exports.doesContainStrings = templates.doesContainStrings module.exports.doesContainString = templates.doesContainString module.exports.disableEscaping = templates.disableEscaping module.exports.findHBSBlocks = templates.findHBSBlocks +module.exports.convertToJS = templates.convertToJS /** * Use vm2 to run JS scripts in a node env diff --git a/packages/string-templates/src/index.js b/packages/string-templates/src/index.js index f4feceac4b..eae545de14 100644 --- a/packages/string-templates/src/index.js +++ b/packages/string-templates/src/index.js @@ -8,6 +8,7 @@ const { FIND_ANY_HBS_REGEX, findDoubleHbsInstances, } = require("./utilities") +const { convertHBSBlock } = require("./conversion") const hbsInstance = handlebars.create() registerAll(hbsInstance) @@ -342,3 +343,31 @@ module.exports.findHBSBlocks = string => { module.exports.doesContainString = (template, string) => { return exports.doesContainStrings(template, [string]) } + +module.exports.convertToJS = hbs => { + const blocks = exports.findHBSBlocks(hbs) + let js = "return `", + prevBlock = null + const variables = {} + if (blocks.length === 0) { + js += hbs + } + let count = 1 + for (let block of blocks) { + let stringPart = hbs + if (prevBlock) { + stringPart = stringPart.split(prevBlock)[1] + } + stringPart = stringPart.split(block)[0] + prevBlock = block + const { variable, value } = convertHBSBlock(block, count++) + variables[variable] = value + js += `${stringPart.split()}\${${variable}}` + } + let varBlock = "" + for (let [variable, value] of Object.entries(variables)) { + varBlock += `const ${variable} = ${value};\n` + } + js += "`;" + return `${varBlock}${js}` +} diff --git a/packages/string-templates/src/index.mjs b/packages/string-templates/src/index.mjs index 3d115cdec1..34cb90ea34 100644 --- a/packages/string-templates/src/index.mjs +++ b/packages/string-templates/src/index.mjs @@ -19,6 +19,7 @@ export const doesContainStrings = templates.doesContainStrings export const doesContainString = templates.doesContainString export const disableEscaping = templates.disableEscaping export const findHBSBlocks = templates.findHBSBlocks +export const convertToJS = templates.convertToJS /** * Use polyfilled vm to run JS scripts in a browser Env diff --git a/packages/string-templates/test/hbsToJs.spec.js b/packages/string-templates/test/hbsToJs.spec.js new file mode 100644 index 0000000000..63bd80db81 --- /dev/null +++ b/packages/string-templates/test/hbsToJs.spec.js @@ -0,0 +1,132 @@ +const { + convertToJS +} = require("../src/index.cjs") + +function checkLines(response, lines) { + const toCheck = response.split("\n") + let count = 0 + for (let line of lines) { + expect(toCheck[count++]).toBe(line) + } +} + +describe("Test that the string processing works correctly", () => { + it("should convert string without HBS", () => { + const response = convertToJS("Hello my name is Michael") + expect(response).toBe("return `Hello my name is Michael`;") + }) + + it("basic example with square brackets", () => { + const response = convertToJS("{{ [query] }}") + checkLines(response, [ + "const var1 = $(\"[query]\");", + "return `${var1}`;", + ]) + }) + + it("handle properties", () => { + const response = convertToJS("{{ [query].id }}") + checkLines(response, [ + "const var1 = $(\"[query].id\");", + "return `${var1}`;", + ]) + }) + + it("should convert some basic HBS strings", () => { + const response = convertToJS("Hello {{ name }}, welcome to {{ company }}!") + checkLines(response, [ + "const var1 = $(\"name\");", + "const var2 = $(\"company\");", + "return `Hello ${var1}, welcome to ${var2}`;", + ]) + }) + + it("should handle many square brackets in helpers", () => { + const response = convertToJS("Hello {{ avg [user].[_id] [user].[_rev] }}") + checkLines(response, [ + "const var1 = helpers.avg($(\"[user].[_id]\"), $(\"[user].[_rev]\"));", + "return `Hello ${var1}`;", + ]) + }) + + it("should handle one of the examples (after)", () => { + const response = convertToJS("{{ after [1, 2, 3] 1}}") + checkLines(response, [ + "const var1 = helpers.after([1, 2, 3], 1);", + "return `${var1}`;", + ]) + }) + + it("should handle one of the examples (equalsLength)", () => { + const response = convertToJS("{{equalsLength '[1,2,3]' 3}}") + checkLines(response, [ + "const var1 = helpers.equalsLength('[1,2,3]', 3);", + "return `${var1}`;" + ]) + }) + + it("should handle one of the examples (pluck)", () => { + const response = convertToJS("{{pluck [{ 'name': 'Bob' }] 'name' }}") + checkLines(response, [ + "const var1 = helpers.pluck([{ 'name': 'Bob' }], 'name');", + "return `${var1}`;", + ]) + }) + + it("should handle sorting an array", () => { + const response = convertToJS("{{ sort ['b', 'a', 'c'] }}") + checkLines(response, [ + "const var1 = helpers.sort(['b', 'a', 'c']);", + "return `${var1}`;", + ]) + }) + + it("should handle a helper block", () => { + const response = convertToJS("This is the average: {{ avg array }}") + checkLines(response, [ + "const var1 = helpers.avg($(\"array\"));", + "return `This is the average: ${var1}`;", + ]) + }) + + it("should handle multi-variable helper", () => { + const response = convertToJS("This is the average: {{ join ( avg val1 val2 val3 ) }}") + checkLines(response, [ + "const var1 = helpers.join(helpers.avg($(\"val1\"), $(\"val2\"), $(\"val3\")));", + "return `This is the average: ${var1}`;", + ]) + }) + + it("should handle a complex statement", () => { + const response = convertToJS("This is the average: {{ join ( avg val1 val2 val3 ) val4 }}") + checkLines(response, [ + "const var1 = helpers.join(helpers.avg($(\"val1\"), $(\"val2\"), $(\"val3\")), $(\"val4\"));", + "return `This is the average: ${var1}`;", + ]) + }) + + it("should handle square brackets", () => { + const response = convertToJS("This is: {{ [val thing] }}") + checkLines(response, [ + "const var1 = $(\"[val thing]\");", + "return `This is: ${var1}`;", + ]) + }) + + it("should handle square brackets with properties", () => { + const response = convertToJS("{{ [user].[_id] }}") + checkLines(response, [ + "const var1 = $(\"[user].[_id]\");", + "return `${var1}`;", + ]) + }) + + it("should handle multiple complex statements", () => { + const response = convertToJS("average: {{ avg ( abs val1 ) val2 }} add: {{ add 1 2 }}") + checkLines(response, [ + "const var1 = helpers.avg(helpers.abs($(\"val1\")), $(\"val2\"));", + "const var2 = helpers.add(1, 2);", + "return `average: ${var1} add: ${var2}`;", + ]) + }) +}) \ No newline at end of file diff --git a/packages/types/package.json b/packages/types/package.json index 3be14d337c..ed22b489c1 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/types", - "version": "1.1.33-alpha.1", + "version": "1.2.41-alpha.0", "description": "Budibase types", "main": "dist/index.js", "types": "dist/index.d.ts", diff --git a/packages/types/src/api/index.ts b/packages/types/src/api/index.ts new file mode 100644 index 0000000000..36a400ee16 --- /dev/null +++ b/packages/types/src/api/index.ts @@ -0,0 +1 @@ +export * from "./web" diff --git a/packages/types/src/api/web/.keep b/packages/types/src/api/web/.keep deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/packages/types/src/api/web/analytics.ts b/packages/types/src/api/web/analytics.ts new file mode 100644 index 0000000000..018c85ded2 --- /dev/null +++ b/packages/types/src/api/web/analytics.ts @@ -0,0 +1,9 @@ +export enum PingSource { + BUILDER = "builder", + APP = "app", +} + +export interface AnalyticsPingRequest { + source: PingSource + timezone: string +} diff --git a/packages/types/src/api/web/index.ts b/packages/types/src/api/web/index.ts new file mode 100644 index 0000000000..b2258fe18e --- /dev/null +++ b/packages/types/src/api/web/index.ts @@ -0,0 +1 @@ +export * from "./analytics" diff --git a/packages/types/src/documents/app/datasource.ts b/packages/types/src/documents/app/datasource.ts index 3a8704a0a9..efdc2ca1bd 100644 --- a/packages/types/src/documents/app/datasource.ts +++ b/packages/types/src/documents/app/datasource.ts @@ -1,5 +1,17 @@ import { Document } from "../document" +import { SourceName } from "../../sdk" +import { Table } from "./table" export interface Datasource extends Document { - source: string + type: string + name?: string + source: SourceName + // the config is defined by the schema + config?: { + [key: string]: string | number | boolean + } + plus?: boolean + entities?: { + [key: string]: Table + } } diff --git a/packages/types/src/documents/app/row.ts b/packages/types/src/documents/app/row.ts index d053d3d938..ee5c0231e7 100644 --- a/packages/types/src/documents/app/row.ts +++ b/packages/types/src/documents/app/row.ts @@ -1,3 +1,23 @@ import { Document } from "../document" -export interface Row extends Document {} +export enum FieldType { + STRING = "string", + LONGFORM = "longform", + OPTIONS = "options", + NUMBER = "number", + BOOLEAN = "boolean", + ARRAY = "array", + DATETIME = "datetime", + ATTACHMENT = "attachment", + LINK = "link", + FORMULA = "formula", + AUTO = "auto", + JSON = "json", + INTERNAL = "internal", +} + +export interface Row extends Document { + type?: string + tableId?: string + [key: string]: any +} diff --git a/packages/types/src/documents/app/table.ts b/packages/types/src/documents/app/table.ts index 4ee7afd03c..72cff4f056 100644 --- a/packages/types/src/documents/app/table.ts +++ b/packages/types/src/documents/app/table.ts @@ -1,6 +1,52 @@ import { Document } from "../document" import { View } from "./view" -export interface Table extends Document { - views: { [key: string]: View } +export interface FieldSchema { + // TODO: replace with field types enum when done + type: string + externalType?: string + fieldName?: string + name: string + tableId?: string + relationshipType?: string + through?: string + foreignKey?: string + autocolumn?: boolean + subtype?: string + throughFrom?: string + throughTo?: string + formula?: string + formulaType?: string + main?: boolean + ignoreTimezones?: boolean + meta?: { + toTable: string + toKey: string + } + constraints?: { + type?: string + email?: boolean + inclusion?: string[] + length?: { + minimum?: string | number + maximum?: string | number + } + presence?: boolean + } +} + +export interface TableSchema { + [key: string]: FieldSchema +} + +export interface Table extends Document { + type?: string + views?: { [key: string]: View } + name: string + primary?: string[] + schema: TableSchema + primaryDisplay?: string + sourceId?: string + relatedFormula?: string[] + constrained?: string[] } diff --git a/packages/types/src/documents/index.ts b/packages/types/src/documents/index.ts index 7938052425..4f5b278a4b 100644 --- a/packages/types/src/documents/index.ts +++ b/packages/types/src/documents/index.ts @@ -2,3 +2,4 @@ export * from "./account" export * from "./app" export * from "./global" export * from "./platform" +export * from "./document" diff --git a/packages/types/src/index.ts b/packages/types/src/index.ts index b7453c7349..4a645c5266 100644 --- a/packages/types/src/index.ts +++ b/packages/types/src/index.ts @@ -2,3 +2,4 @@ export * from "./documents" export * from "./sdk/events" export * from "./sdk/licensing" export * from "./sdk" +export * from "./api" diff --git a/packages/types/src/sdk/datasources.ts b/packages/types/src/sdk/datasources.ts new file mode 100644 index 0000000000..970613b322 --- /dev/null +++ b/packages/types/src/sdk/datasources.ts @@ -0,0 +1,122 @@ +import { Table } from "../documents" + +export enum Operation { + CREATE = "CREATE", + READ = "READ", + UPDATE = "UPDATE", + DELETE = "DELETE", + BULK_CREATE = "BULK_CREATE", + CREATE_TABLE = "CREATE_TABLE", + UPDATE_TABLE = "UPDATE_TABLE", + DELETE_TABLE = "DELETE_TABLE", +} + +export enum SortDirection { + ASCENDING = "ASCENDING", + DESCENDING = "DESCENDING", +} + +export enum QueryType { + SQL = "sql", + JSON = "json", + FIELDS = "fields", +} + +export enum DatasourceFieldType { + STRING = "string", + LONGFORM = "longForm", + BOOLEAN = "boolean", + NUMBER = "number", + PASSWORD = "password", + LIST = "list", + OBJECT = "object", + JSON = "json", + FILE = "file", +} + +export enum SourceName { + POSTGRES = "POSTGRES", + DYNAMODB = "DYNAMODB", + MONGODB = "MONGODB", + ELASTICSEARCH = "ELASTICSEARCH", + COUCHDB = "COUCHDB", + SQL_SERVER = "SQL_SERVER", + S3 = "S3", + AIRTABLE = "AIRTABLE", + MYSQL = "MYSQL", + ARANGODB = "ARANGODB", + REST = "REST", + ORACLE = "ORACLE", + GOOGLE_SHEETS = "GOOGLE_SHEETS", + FIRESTORE = "FIRESTORE", + REDIS = "REDIS", + SNOWFLAKE = "SNOWFLAKE", + UNKNOWN = "unknown", +} + +export enum IncludeRelationship { + INCLUDE = 1, + EXCLUDE = 0, +} + +export enum FilterType { + STRING = "string", + FUZZY = "fuzzy", + RANGE = "range", + EQUAL = "equal", + NOT_EQUAL = "notEqual", + EMPTY = "empty", + NOT_EMPTY = "notEmpty", + ONE_OF = "oneOf", +} + +export interface QueryDefinition { + type: QueryType + displayName?: string + readable?: boolean + customisable?: boolean + fields?: object + urlDisplay?: boolean +} + +export interface ExtraQueryConfig { + [key: string]: { + displayName: string + type: string + required: boolean + data?: object + } +} + +export interface Integration { + docs: string + plus?: boolean + auth?: { type: string } + relationships?: boolean + description: string + friendlyName: string + type?: string + datasource: {} + query: { + [key: string]: QueryDefinition + } + extra?: ExtraQueryConfig +} + +export interface IntegrationBase { + create?(query: any): Promise + read?(query: any): Promise + update?(query: any): Promise + delete?(query: any): Promise +} + +export interface DatasourcePlus extends IntegrationBase { + tables: Record + schemaErrors: Record + + // if the datasource supports the use of bindings directly (to protect against SQL injection) + // this returns the format of the identifier + getBindingIdentifier(): string + getStringConcat(parts: string[]): string + buildSchema(datasourceId: string, entities: Record): any +} diff --git a/packages/types/src/sdk/events/event.ts b/packages/types/src/sdk/events/event.ts index f0e023df51..d7086370ec 100644 --- a/packages/types/src/sdk/events/event.ts +++ b/packages/types/src/sdk/events/event.ts @@ -135,7 +135,6 @@ export enum Event { // LICENSE LICENSE_UPGRADED = "license:upgraded", LICENSE_DOWNGRADED = "license:downgraded", - LICENSE_UPDATED = "license:updated", LICENSE_ACTIVATED = "license:activated", // ACCOUNT diff --git a/packages/types/src/sdk/events/serve.ts b/packages/types/src/sdk/events/serve.ts index fe5b723942..cb03c47e05 100644 --- a/packages/types/src/sdk/events/serve.ts +++ b/packages/types/src/sdk/events/serve.ts @@ -1,11 +1,15 @@ import { BaseEvent } from "./event" -export interface BuilderServedEvent extends BaseEvent {} +export interface BuilderServedEvent extends BaseEvent { + timezone: string +} export interface AppServedEvent extends BaseEvent { appVersion: string + timezone: string } export interface AppPreviewServedEvent extends BaseEvent { appVersion: string + timezone: string } diff --git a/packages/types/src/sdk/index.ts b/packages/types/src/sdk/index.ts index 0f2eee6e13..d6ca92f607 100644 --- a/packages/types/src/sdk/index.ts +++ b/packages/types/src/sdk/index.ts @@ -3,3 +3,5 @@ export * from "./context" export * from "./events" export * from "./licensing" export * from "./migrations" +export * from "./datasources" +export * from "./search" diff --git a/packages/types/src/sdk/migrations.ts b/packages/types/src/sdk/migrations.ts index bb32d2e045..23a4d6d097 100644 --- a/packages/types/src/sdk/migrations.ts +++ b/packages/types/src/sdk/migrations.ts @@ -46,6 +46,7 @@ export enum MigrationName { EVENT_APP_BACKFILL = "event_app_backfill", EVENT_GLOBAL_BACKFILL = "event_global_backfill", EVENT_INSTALLATION_BACKFILL = "event_installation_backfill", + GLOBAL_INFO_SYNC_USERS = "global_info_sync_users", } export interface MigrationDefinition { diff --git a/packages/types/src/sdk/search.ts b/packages/types/src/sdk/search.ts new file mode 100644 index 0000000000..954ad42ac9 --- /dev/null +++ b/packages/types/src/sdk/search.ts @@ -0,0 +1,97 @@ +import { Operation, SortDirection } from "./datasources" +import { Row, Table } from "../documents" + +export interface SearchFilters { + allOr?: boolean + string?: { + [key: string]: string + } + fuzzy?: { + [key: string]: string + } + range?: { + [key: string]: { + high: number | string + low: number | string + } + } + equal?: { + [key: string]: any + } + notEqual?: { + [key: string]: any + } + empty?: { + [key: string]: any + } + notEmpty?: { + [key: string]: any + } + oneOf?: { + [key: string]: any[] + } + contains?: { + [key: string]: any[] + } + notContains?: { + [key: string]: any[] + } + containsAny?: { + [key: string]: any[] + } +} + +export interface SortJson { + [key: string]: SortDirection +} + +export interface PaginationJson { + limit: number + page?: string | number +} + +export interface RenameColumn { + old: string + updated: string +} + +export interface RelationshipsJson { + through?: string + from?: string + to?: string + fromPrimary?: string + toPrimary?: string + tableName: string + column: string +} + +export interface QueryJson { + endpoint: { + datasourceId: string + entityId: string + operation: Operation + schema?: string + } + resource: { + fields: string[] + } + filters?: SearchFilters + sort?: SortJson + paginate?: PaginationJson + body?: Row | Row[] + table?: Table + meta?: { + table?: Table + tables?: Record + renamed: RenameColumn + } + extra?: { + idFilter?: SearchFilters + } + relationships?: RelationshipsJson[] +} + +export interface SqlQuery { + sql: string + bindings?: string[] +} diff --git a/packages/worker/Dockerfile b/packages/worker/Dockerfile index 607352ad92..883a6c299b 100644 --- a/packages/worker/Dockerfile +++ b/packages/worker/Dockerfile @@ -22,6 +22,6 @@ EXPOSE 4001 ENV NODE_ENV=production ENV CLUSTER_MODE=${CLUSTER_MODE} ENV SERVICE=worker-service -ENV POSTHOG_TOKEN=phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS +ENV POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU CMD ["./docker_run.sh"] diff --git a/packages/worker/package.json b/packages/worker/package.json index 27bb340968..ddff762a98 100644 --- a/packages/worker/package.json +++ b/packages/worker/package.json @@ -1,7 +1,7 @@ { "name": "@budibase/worker", "email": "hi@budibase.com", - "version": "1.1.33-alpha.1", + "version": "1.2.41-alpha.0", "description": "Budibase background service", "main": "src/index.ts", "repository": { @@ -35,10 +35,10 @@ "author": "Budibase", "license": "GPL-3.0", "dependencies": { - "@budibase/backend-core": "1.1.33-alpha.1", - "@budibase/pro": "1.1.33-alpha.1", - "@budibase/string-templates": "1.1.33-alpha.1", - "@budibase/types": "1.1.33-alpha.1", + "@budibase/backend-core": "1.2.41-alpha.0", + "@budibase/pro": "1.2.41-alpha.0", + "@budibase/string-templates": "1.2.41-alpha.0", + "@budibase/types": "1.2.41-alpha.0", "@koa/router": "8.0.8", "@sentry/node": "6.17.7", "@techpass/passport-openidconnect": "0.3.2", diff --git a/packages/worker/src/api/controllers/global/roles.js b/packages/worker/src/api/controllers/global/roles.js index 0859fc1a21..800311b6be 100644 --- a/packages/worker/src/api/controllers/global/roles.js +++ b/packages/worker/src/api/controllers/global/roles.js @@ -2,7 +2,7 @@ const { getAllRoles } = require("@budibase/backend-core/roles") const { getAllApps, getProdAppID, - DocumentTypes, + DocumentType, } = require("@budibase/backend-core/db") const { doInAppContext, getAppDB } = require("@budibase/backend-core/context") const { user: userCache } = require("@budibase/backend-core/cache") @@ -36,7 +36,7 @@ exports.find = async ctx => { const appId = ctx.params.appId await doInAppContext(appId, async () => { const db = getAppDB() - const app = await db.get(DocumentTypes.APP_METADATA) + const app = await db.get(DocumentType.APP_METADATA) ctx.body = { roles: await getAllRoles(), name: app.name, diff --git a/packages/worker/src/api/controllers/global/sessions.js b/packages/worker/src/api/controllers/global/sessions.js deleted file mode 100644 index 4a334037d4..0000000000 --- a/packages/worker/src/api/controllers/global/sessions.js +++ /dev/null @@ -1,37 +0,0 @@ -const { - getAllSessions, - getUserSessions, - invalidateSessions, -} = require("@budibase/backend-core/sessions") - -exports.fetch = async ctx => { - ctx.body = await getAllSessions() -} - -exports.find = async ctx => { - const { userId } = ctx.params - const sessions = await getUserSessions(userId) - ctx.body = sessions.map(session => session.value) -} - -exports.invalidateUser = async ctx => { - const { userId } = ctx.params - await invalidateSessions(userId) - ctx.body = { - message: "User sessions invalidated", - } -} - -exports.selfSessions = async ctx => { - const userId = ctx.user._id - ctx.body = await getUserSessions(userId) -} - -exports.invalidateSession = async ctx => { - const userId = ctx.user._id - const { sessionId } = ctx.params - await invalidateSessions(userId, sessionId) - ctx.body = { - message: "Session invalidated successfully.", - } -} diff --git a/packages/worker/src/api/controllers/global/users.ts b/packages/worker/src/api/controllers/global/users.ts index 17e655edb3..1f9af3514b 100644 --- a/packages/worker/src/api/controllers/global/users.ts +++ b/packages/worker/src/api/controllers/global/users.ts @@ -3,17 +3,18 @@ import { checkInviteCode } from "../../../utilities/redis" import { sendEmail } from "../../../utilities/email" import { users } from "../../../sdk" import env from "../../../environment" -import { User, CloudAccount } from "@budibase/types" +import { CloudAccount, User } from "@budibase/types" import { - events, - errors, accounts, - users as usersCore, - tenancy, cache, + errors, + events, + tenancy, + users as usersCore, } from "@budibase/backend-core" import { checkAnyUserExists } from "../../../utilities/users" import { groups as groupUtils } from "@budibase/pro" + const MAX_USERS_UPLOAD_LIMIT = 1000 export const save = async (ctx: any) => { @@ -117,8 +118,7 @@ export const adminUser = async (ctx: any) => { export const countByApp = async (ctx: any) => { const appId = ctx.params.appId try { - const response = await users.countUsersByApp(appId) - ctx.body = response + ctx.body = await users.countUsersByApp(appId) } catch (err: any) { ctx.throw(err.status || 400, err) } @@ -126,6 +126,9 @@ export const countByApp = async (ctx: any) => { export const destroy = async (ctx: any) => { const id = ctx.params.id + if (id === ctx.user._id) { + ctx.throw(400, "Unable to delete self.") + } await users.destroy(id, ctx.user) @@ -136,6 +139,10 @@ export const destroy = async (ctx: any) => { export const bulkDelete = async (ctx: any) => { const { userIds } = ctx.request.body + if (userIds?.indexOf(ctx.user._id) !== -1) { + ctx.throw(400, "Unable to delete self.") + } + try { let usersResponse = await users.bulkDelete(userIds) @@ -207,13 +214,13 @@ export const invite = async (ctx: any) => { } export const inviteMultiple = async (ctx: any) => { - let { emails, userInfo } = ctx.request.body + let users = ctx.request.body let existing = false let existingEmail - for (let email of emails) { - if (await usersCore.getGlobalUserByEmail(email)) { + for (let user of users) { + if (await usersCore.getGlobalUserByEmail(user.email)) { existing = true - existingEmail = email + existingEmail = user.email break } } @@ -221,17 +228,18 @@ export const inviteMultiple = async (ctx: any) => { if (existing) { ctx.throw(400, `${existingEmail} already exists`) } - if (!userInfo) { - userInfo = {} - } - userInfo.tenantId = tenancy.getTenantId() - const opts: any = { - subject: "{{ company }} platform invitation", - info: userInfo, - } - for (let i = 0; i < emails.length; i++) { - await sendEmail(emails[i], EmailTemplatePurpose.INVITATION, opts) + for (let i = 0; i < users.length; i++) { + let userInfo = users[i].userInfo + if (!userInfo) { + userInfo = {} + } + userInfo.tenantId = tenancy.getTenantId() + const opts: any = { + subject: "{{ company }} platform invitation", + info: userInfo, + } + await sendEmail(users[i].email, EmailTemplatePurpose.INVITATION, opts) } ctx.body = { diff --git a/packages/worker/src/api/controllers/system/migrations.ts b/packages/worker/src/api/controllers/system/migrations.ts new file mode 100644 index 0000000000..57a5f6261c --- /dev/null +++ b/packages/worker/src/api/controllers/system/migrations.ts @@ -0,0 +1,13 @@ +const { migrate, MIGRATIONS } = require("../../../migrations") + +export const runMigrations = async (ctx: any) => { + const options = ctx.request.body + // don't await as can take a while, just return + migrate(options) + ctx.status = 200 +} + +export const fetchDefinitions = async (ctx: any) => { + ctx.body = MIGRATIONS + ctx.status = 200 +} diff --git a/packages/worker/src/api/index.js b/packages/worker/src/api/index.js index 281d9d097c..ca56e0c5d2 100644 --- a/packages/worker/src/api/index.js +++ b/packages/worker/src/api/index.js @@ -106,7 +106,10 @@ router if (ctx.publicEndpoint) { return next() } - if ((!ctx.isAuthenticated || !ctx.user.budibaseAccess) && !ctx.internal) { + if ( + (!ctx.isAuthenticated || (ctx.user && !ctx.user.budibaseAccess)) && + !ctx.internal + ) { ctx.throw(403, "Unauthorized - no public worker access") } return next() diff --git a/packages/worker/src/api/routes/global/roles.js b/packages/worker/src/api/routes/global/roles.js index 4e27b7d54b..d99e0e5b56 100644 --- a/packages/worker/src/api/routes/global/roles.js +++ b/packages/worker/src/api/routes/global/roles.js @@ -1,12 +1,12 @@ const Router = require("@koa/router") const controller = require("../../controllers/global/roles") -const { adminOnly } = require("@budibase/backend-core/auth") +const { builderOrAdmin } = require("@budibase/backend-core/auth") const router = Router() router - .get("/api/global/roles", adminOnly, controller.fetch) - .get("/api/global/roles/:appId", adminOnly, controller.find) - .delete("/api/global/roles/:appId", adminOnly, controller.removeAppRole) + .get("/api/global/roles", builderOrAdmin, controller.fetch) + .get("/api/global/roles/:appId", builderOrAdmin, controller.find) + .delete("/api/global/roles/:appId", builderOrAdmin, controller.removeAppRole) module.exports = router diff --git a/packages/worker/src/api/routes/global/self.js b/packages/worker/src/api/routes/global/self.js index e1af7c2146..1683a94f37 100644 --- a/packages/worker/src/api/routes/global/self.js +++ b/packages/worker/src/api/routes/global/self.js @@ -1,6 +1,6 @@ const Router = require("@koa/router") const controller = require("../../controllers/global/self") -const builderOnly = require("../../../middleware/builderOnly") +const { builderOnly } = require("@budibase/backend-core/auth") const { users } = require("../validation") const router = Router() diff --git a/packages/worker/src/api/routes/global/sessions.js b/packages/worker/src/api/routes/global/sessions.js deleted file mode 100644 index 6ab4ad8e59..0000000000 --- a/packages/worker/src/api/routes/global/sessions.js +++ /dev/null @@ -1,14 +0,0 @@ -const Router = require("@koa/router") -const controller = require("../../controllers/global/sessions") -const { adminOnly } = require("@budibase/backend-core/auth") - -const router = Router() - -router - .get("/api/global/sessions", adminOnly, controller.fetch) - .get("/api/global/sessions/self", controller.selfSessions) - .get("/api/global/sessions/:userId", adminOnly, controller.find) - .delete("/api/global/sessions/:userId", adminOnly, controller.invalidateUser) - .delete("/api/global/sessions/self/:sessionId", controller.invalidateSession) - -module.exports = router diff --git a/packages/worker/src/api/routes/global/users.js b/packages/worker/src/api/routes/global/users.js index e62e996443..e0a221a795 100644 --- a/packages/worker/src/api/routes/global/users.js +++ b/packages/worker/src/api/routes/global/users.js @@ -6,7 +6,7 @@ const Joi = require("joi") const cloudRestricted = require("../../../middleware/cloudRestricted") const { users } = require("../validation") const selfController = require("../../controllers/global/self") -const builderOrAdmin = require("../../../middleware/builderOrAdmin") +const { builderOrAdmin } = require("@budibase/backend-core/auth") const router = Router() @@ -32,10 +32,12 @@ function buildInviteValidation() { function buildInviteMultipleValidation() { // prettier-ignore - return joiValidator.body(Joi.object({ - emails: Joi.array().required(), - userInfo: Joi.object().optional(), - }).required()) + return joiValidator.body(Joi.array().required().items( + Joi.object({ + email: Joi.string(), + userInfo: Joi.object().optional(), + }) + )) } function buildInviteAcceptValidation() { @@ -64,7 +66,7 @@ router .post("/api/global/users/search", builderOrAdmin, controller.search) .delete("/api/global/users/:id", adminOnly, controller.destroy) .post("/api/global/users/bulkDelete", adminOnly, controller.bulkDelete) - .get("/api/global/users/count/:appId", adminOnly, controller.countByApp) + .get("/api/global/users/count/:appId", builderOrAdmin, controller.countByApp) .get("/api/global/roles/:appId") .post( "/api/global/users/invite", @@ -79,7 +81,7 @@ router controller.invite ) .post( - "/api/global/users/inviteMultiple", + "/api/global/users/multi/invite", adminOnly, buildInviteMultipleValidation(), controller.inviteMultiple diff --git a/packages/worker/src/api/routes/index.js b/packages/worker/src/api/routes/index.js index 89c67bdf88..550d14a9a3 100644 --- a/packages/worker/src/api/routes/index.js +++ b/packages/worker/src/api/routes/index.js @@ -6,12 +6,12 @@ const templateRoutes = require("./global/templates") const emailRoutes = require("./global/email") const authRoutes = require("./global/auth") const roleRoutes = require("./global/roles") -const sessionRoutes = require("./global/sessions") const environmentRoutes = require("./system/environment") const tenantsRoutes = require("./system/tenants") const statusRoutes = require("./system/status") const selfRoutes = require("./global/self") const licenseRoutes = require("./global/license") +const migrationRoutes = require("./system/migrations") let userGroupRoutes = api.groups exports.routes = [ @@ -22,11 +22,11 @@ exports.routes = [ templateRoutes, tenantsRoutes, emailRoutes, - sessionRoutes, roleRoutes, environmentRoutes, statusRoutes, selfRoutes, licenseRoutes, userGroupRoutes, + migrationRoutes, ] diff --git a/packages/worker/src/api/routes/system/migrations.ts b/packages/worker/src/api/routes/system/migrations.ts new file mode 100644 index 0000000000..5dcf90c4de --- /dev/null +++ b/packages/worker/src/api/routes/system/migrations.ts @@ -0,0 +1,19 @@ +import Router from "@koa/router" +import * as migrationsController from "../../controllers/system/migrations" +import { auth } from "@budibase/backend-core" + +const router = new Router() + +router + .post( + "/api/system/migrations/run", + auth.internalApi, + migrationsController.runMigrations + ) + .get( + "/api/system/migrations/definitions", + auth.internalApi, + migrationsController.fetchDefinitions + ) + +export = router diff --git a/packages/worker/src/api/routes/validation/users.ts b/packages/worker/src/api/routes/validation/users.ts index e7ad4cca18..d84ae94ee6 100644 --- a/packages/worker/src/api/routes/validation/users.ts +++ b/packages/worker/src/api/routes/validation/users.ts @@ -1,4 +1,4 @@ -import joiValidator from "../../../middleware/joi-validator" +const { joiValidator } = require("@budibase/backend-core/auth") import Joi from "joi" let schema: any = { diff --git a/packages/worker/src/environment.js b/packages/worker/src/environment.js index 4c3cab1cab..bb45c1dd78 100644 --- a/packages/worker/src/environment.js +++ b/packages/worker/src/environment.js @@ -61,6 +61,7 @@ module.exports = { SMTP_FROM_ADDRESS: process.env.SMTP_FROM_ADDRESS, // other CHECKLIST_CACHE_TTL: parseIntSafe(process.env.CHECKLIST_CACHE_TTL) || 3600, + SESSION_UPDATE_PERIOD: process.env.SESSION_UPDATE_PERIOD, _set(key, value) { process.env[key] = value module.exports[key] = value diff --git a/packages/worker/src/index.ts b/packages/worker/src/index.ts index f8031abacb..6fb954a1b5 100644 --- a/packages/worker/src/index.ts +++ b/packages/worker/src/index.ts @@ -18,7 +18,7 @@ const http = require("http") const api = require("./api") const redis = require("./utilities/redis") const Sentry = require("@sentry/node") -import { events } from "@budibase/backend-core" +import { events, pinoSettings } from "@budibase/backend-core" // this will setup http and https proxies form env variables bootstrap() @@ -30,14 +30,7 @@ app.keys = ["secret", "key"] // set up top level koa middleware app.use(koaBody({ multipart: true })) app.use(koaSession(app)) -app.use( - logger({ - prettyPrint: { - levelFirst: true, - }, - level: env.LOG_LEVEL || "error", - }) -) +app.use(logger(pinoSettings())) // authentication app.use(passport.initialize()) diff --git a/packages/worker/src/middleware/adminOnly.js b/packages/worker/src/middleware/adminOnly.js deleted file mode 100644 index 4bfdf83848..0000000000 --- a/packages/worker/src/middleware/adminOnly.js +++ /dev/null @@ -1,9 +0,0 @@ -module.exports = async (ctx, next) => { - if ( - !ctx.internal && - (!ctx.user || !ctx.user.admin || !ctx.user.admin.global) - ) { - ctx.throw(403, "Admin user only endpoint.") - } - return next() -} diff --git a/packages/worker/src/middleware/joi-validator.js b/packages/worker/src/middleware/joi-validator.js deleted file mode 100644 index 1686b0e727..0000000000 --- a/packages/worker/src/middleware/joi-validator.js +++ /dev/null @@ -1,28 +0,0 @@ -function validate(schema, property) { - // Return a Koa middleware function - return (ctx, next) => { - if (!schema) { - return next() - } - let params = null - if (ctx[property] != null) { - params = ctx[property] - } else if (ctx.request[property] != null) { - params = ctx.request[property] - } - const { error } = schema.validate(params) - if (error) { - ctx.throw(400, `Invalid ${property} - ${error.message}`) - return - } - return next() - } -} - -module.exports.body = schema => { - return validate(schema, "body") -} - -module.exports.params = schema => { - return validate(schema, "params") -} diff --git a/packages/worker/src/migrations/functions/globalInfoSyncUsers.ts b/packages/worker/src/migrations/functions/globalInfoSyncUsers.ts new file mode 100644 index 0000000000..cae6c6af51 --- /dev/null +++ b/packages/worker/src/migrations/functions/globalInfoSyncUsers.ts @@ -0,0 +1,20 @@ +import { User } from "@budibase/types" +import * as sdk from "../../sdk" + +/** + * Date: + * Aug 2022 + * + * Description: + * Re-sync the global-db users to the global-info db users + */ +export const run = async (globalDb: any) => { + const users = (await sdk.users.allUsers()) as User[] + const promises = [] + for (let user of users) { + promises.push( + sdk.users.addTenant(user.tenantId, user._id as string, user.email) + ) + } + await Promise.all(promises) +} diff --git a/packages/worker/src/migrations/index.ts b/packages/worker/src/migrations/index.ts new file mode 100644 index 0000000000..6900596216 --- /dev/null +++ b/packages/worker/src/migrations/index.ts @@ -0,0 +1,74 @@ +import { migrations, redis } from "@budibase/backend-core" +import { Migration, MigrationOptions, MigrationName } from "@budibase/types" +import env from "../environment" + +// migration functions +import * as syncUserInfo from "./functions/globalInfoSyncUsers" + +/** + * Populate the migration function and additional configuration from + * the static migration definitions. + */ +export const buildMigrations = () => { + const definitions = migrations.DEFINITIONS + const workerMigrations: Migration[] = [] + + for (const definition of definitions) { + switch (definition.name) { + case MigrationName.GLOBAL_INFO_SYNC_USERS: { + // only needed in cloud + if (!env.SELF_HOSTED) { + workerMigrations.push({ + ...definition, + fn: syncUserInfo.run, + }) + } + break + } + } + } + + return workerMigrations +} + +export const MIGRATIONS = buildMigrations() + +export const migrate = async (options?: MigrationOptions) => { + if (env.SELF_HOSTED) { + await migrateWithLock(options) + } else { + await migrations.runMigrations(MIGRATIONS, options) + } +} + +const migrateWithLock = async (options?: MigrationOptions) => { + // get a new lock client + const redlock = await redis.clients.getMigrationsRedlock() + // lock for 15 minutes + const ttl = 1000 * 60 * 15 + + let migrationLock + + // acquire lock + try { + migrationLock = await redlock.lock("migrations", ttl) + } catch (e: any) { + if (e.name === "LockError") { + return + } else { + throw e + } + } + + // run migrations + try { + await migrations.runMigrations(MIGRATIONS, options) + } finally { + // release lock + try { + await migrationLock.unlock() + } catch (e) { + console.error("unable to release migration lock") + } + } +} diff --git a/packages/worker/src/sdk/users/users.ts b/packages/worker/src/sdk/users/users.ts index ea7f2517e0..58c2decabf 100644 --- a/packages/worker/src/sdk/users/users.ts +++ b/packages/worker/src/sdk/users/users.ts @@ -101,12 +101,11 @@ interface SaveUserOpts { bulkCreate?: boolean } -export const buildUser = async ( +const buildUser = async ( user: any, opts: SaveUserOpts = { hashPassword: true, requirePassword: true, - bulkCreate: false, }, tenantId: string, dbUser?: any @@ -185,15 +184,12 @@ export const save = async ( dbUser = await db.get(_id) } - let builtUser = await buildUser( - user, - { - hashPassword: true, - requirePassword: user.requirePassword, - }, - tenantId, - dbUser - ) + let builtUser = await buildUser(user, opts, tenantId, dbUser) + + // make sure we set the _id field for a new user + if (!_id) { + _id = builtUser._id + } try { const putOpts = { @@ -220,7 +216,7 @@ export const save = async ( await addTenant(tenantId, _id, email) await cache.user.invalidateUser(response.id) // let server know to sync user - await apps.syncUserInApps(builtUser._id) + await apps.syncUserInApps(_id) return { _id: response.id, @@ -293,7 +289,6 @@ export const bulkCreate = async ( { hashPassword: true, requirePassword: user.requirePassword, - bulkCreate: false, }, tenantId ) @@ -305,6 +300,9 @@ export const bulkCreate = async ( // Post processing of bulk added users, i.e events and cache operations for (const user of usersToBulkSave) { + // TODO: Refactor to bulk insert users into the info db + // instead of relying on looping tenant creation + await addTenant(tenantId, user._id, user.email) await eventHelpers.handleSaveEvents(user, null) await apps.syncUserInApps(user._id) } @@ -372,6 +370,7 @@ export const bulkDelete = async (userIds: any) => { export const destroy = async (id: string, currentUser: any) => { const db = tenancy.getGlobalDB() const dbUser = await db.get(id) + const userId = dbUser._id as string let groups = dbUser.userGroups if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { @@ -389,7 +388,7 @@ export const destroy = async (id: string, currentUser: any) => { await deprovisioning.removeUserFromInfoDB(dbUser) - await db.remove(dbUser._id, dbUser._rev) + await db.remove(userId, dbUser._rev) if (groups) { await groupUtils.deleteGroupUsers(groups, dbUser) @@ -397,17 +396,18 @@ export const destroy = async (id: string, currentUser: any) => { await eventHelpers.handleDeleteEvents(dbUser) await quotas.removeUser(dbUser) - await cache.user.invalidateUser(dbUser._id) - await sessions.invalidateSessions(dbUser._id) + await cache.user.invalidateUser(userId) + await sessions.invalidateSessions(userId, { reason: "deletion" }) // let server know to sync user - await apps.syncUserInApps(dbUser._id) + await apps.syncUserInApps(userId) } const bulkDeleteProcessing = async (dbUser: User) => { + const userId = dbUser._id as string await deprovisioning.removeUserFromInfoDB(dbUser) await eventHelpers.handleDeleteEvents(dbUser) - await cache.user.invalidateUser(dbUser._id) - await sessions.invalidateSessions(dbUser._id) + await cache.user.invalidateUser(userId) + await sessions.invalidateSessions(userId, { reason: "bulk-deletion" }) // let server know to sync user - await apps.syncUserInApps(dbUser._id) + await apps.syncUserInApps(userId) } diff --git a/packages/worker/yarn.lock b/packages/worker/yarn.lock index 93c140605d..b475340214 100644 --- a/packages/worker/yarn.lock +++ b/packages/worker/yarn.lock @@ -291,18 +291,19 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== -"@budibase/backend-core@1.1.33-alpha.1": - version "1.1.33-alpha.1" - resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.1.33-alpha.1.tgz#2c483f71c99f0dfd07e78dfc162be979e97a5a37" - integrity sha512-1oY1GPWQ4ym4m75QJxnCBOmEGc+yuXRGafKpBGXXqpyA7qMP5TSYpEGrrpX3uKrIsLIRsiaqw3oPr0T8aL2LQw== +"@budibase/backend-core@1.2.41-alpha.0": + version "1.2.41-alpha.0" + resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.2.41-alpha.0.tgz#ee3bc032ae89e347b9fb220303a9d040df6f5823" + integrity sha512-x6wJpBBFj6a1pGu2OR1vBPHiA4LvdQpVxr6f1lZa2+nAhuXVtEhrdnvBVzK2hJ52dOTTTGaw2A5tTkbf7861CA== dependencies: - "@budibase/types" "1.1.33-alpha.1" + "@budibase/types" "1.2.41-alpha.0" "@techpass/passport-openidconnect" "0.3.2" aws-sdk "2.1030.0" bcrypt "5.0.1" dotenv "16.0.1" emitter-listener "1.1.2" ioredis "4.28.0" + joi "17.6.0" jsonwebtoken "8.5.1" koa-passport "4.1.4" lodash "4.17.21" @@ -324,21 +325,21 @@ uuid "8.3.2" zlib "1.0.5" -"@budibase/pro@1.1.33-alpha.1": - version "1.1.33-alpha.1" - resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-1.1.33-alpha.1.tgz#2778f4895b5dec982ef8245d0c36247c6242cd73" - integrity sha512-M0gust58aTyPtpp2CKhFqsmzVYu1ZttW1l6p9tePClhfp4FvkYTy/UsHLV7Pg3NbCLDo3qpCzTpPTYnePtf/vg== +"@budibase/pro@1.2.41-alpha.0": + version "1.2.41-alpha.0" + resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-1.2.41-alpha.0.tgz#4f27dbeee24ddc58e2a4e75cd90b53543f742722" + integrity sha512-LiKuO7/9GGzf3xfMmNTZQl2r4jmGgqnUf7fA5ub/MRyWYM7ZjPWEKW1V9OAk3vXwv9a+4V6FUxJQ1sDUZ3DT/Q== dependencies: - "@budibase/backend-core" "1.1.33-alpha.1" - "@budibase/types" "1.1.33-alpha.1" + "@budibase/backend-core" "1.2.41-alpha.0" + "@budibase/types" "1.2.41-alpha.0" "@koa/router" "8.0.8" joi "17.6.0" node-fetch "^2.6.1" -"@budibase/types@1.1.33-alpha.1": - version "1.1.33-alpha.1" - resolved "https://registry.yarnpkg.com/@budibase/types/-/types-1.1.33-alpha.1.tgz#8a1253208a618b439d85977c3879b0184b6487e8" - integrity sha512-BEIJqb9WjAck16rkgtGHlnxANhkK8OXbJLBxu+eB4x7F6tSmMI+T6zsBkGE0ehHmqa6YNzY7OldKLc967mAaLA== +"@budibase/types@1.2.41-alpha.0": + version "1.2.41-alpha.0" + resolved "https://registry.yarnpkg.com/@budibase/types/-/types-1.2.41-alpha.0.tgz#01071ce6ecef6799b04b1defce2b38e81b615f9f" + integrity sha512-2jkeToXsujYUXb6P2DFTFUOFv56GgImJ4webP8xr5cPGL0/xmhWGMd/lLxLt96RtMQp43UmVWZm1BA7TZw4fJg== "@cspotcode/source-map-consumer@0.8.0": version "0.8.0"