1
0
Fork 0
mirror of synced 2024-07-07 07:15:43 +12:00

Merge branch 'master' of github.com:Budibase/budibase into labday/sqs

This commit is contained in:
mike12345567 2023-10-17 14:53:09 +01:00
commit b267e4ca28
168 changed files with 4443 additions and 4096 deletions

View file

@ -1,9 +1,14 @@
packages/server/node_modules
packages/builder
packages/frontend-core
packages/backend-core
packages/worker/node_modules
packages/cli
packages/client
packages/bbui
packages/string-templates
*
!/packages/
!/scripts/
/packages/*/node_modules
packages/server/scripts/
!packages/server/scripts/integrations/oracle
!nx.json
!/hosting/single/
!/hosting/letsencrypt /
!package.json
!yarn.lock
!lerna.json
!.yarnrc

View file

@ -10,7 +10,6 @@ on:
push:
branches:
- master
- develop
pull_request:
workflow_dispatch:
@ -20,18 +19,12 @@ env:
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
NX_BASE_BRANCH: origin/${{ github.base_ref }}
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' && github.base_ref != 'master'}}
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
lint:
runs-on: ubuntu-latest
steps:
- name: Maximize build space
uses: easimon/maximize-build-space@master
with:
root-reserve-mb: 35000
swap-size-mb: 1024
remove-android: "true"
remove-dotnet: "true"
- name: Checkout repo and submodules
uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
@ -268,20 +261,21 @@ jobs:
branch="${{ github.base_ref || github.ref_name }}"
echo "Running on branch '$branch' (base_ref=${{ github.base_ref }}, ref_name=${{ github.head_ref }})"
if [[ $branch == "master" ]]; then
base_commit=$(git rev-parse origin/master)
base_commit=$(git rev-parse origin/master)
if [[ ! -z $base_commit ]]; then
echo "target_branch=$branch"
echo "target_branch=$branch" >> "$GITHUB_OUTPUT"
echo "pro_commit=$pro_commit"
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
echo "base_commit=$base_commit"
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
else
base_commit=$(git rev-parse origin/develop)
echo "Nothing to do - branch to branch merge."
fi
echo "target_branch=$branch"
echo "target_branch=$branch" >> "$GITHUB_OUTPUT"
echo "pro_commit=$pro_commit"
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
echo "base_commit=$base_commit"
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
- name: Check submodule merged to develop
- name: Check submodule merged to base branch
if: ${{ steps.get_pro_commits.outputs.base_commit != '' }}
uses: actions/github-script@v4
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
@ -290,7 +284,7 @@ jobs:
const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}';
if (submoduleCommit !== baseCommit) {
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}"" branch.');
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}" branch.');
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md')
process.exit(1);
} else {

View file

@ -1,29 +0,0 @@
name: check_unreleased_changes
on:
pull_request:
branches:
- master
jobs:
check_unreleased:
runs-on: ubuntu-latest
steps:
- name: Check for unreleased changes
env:
REPO: "Budibase/budibase"
TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
RELEASE_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \
"https://api.github.com/repos/$REPO/releases/latest" | \
jq -r .published_at)
COMMIT_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \
"https://api.github.com/repos/$REPO/commits/master" | \
jq -r .commit.committer.date)
RELEASE_SECONDS=$(date --date="$RELEASE_TIMESTAMP" "+%s")
COMMIT_SECONDS=$(date --date="$COMMIT_TIMESTAMP" "+%s")
if (( COMMIT_SECONDS > RELEASE_SECONDS )); then
echo "There are unreleased changes. Please release these changes before merging."
exit 1
fi
echo "No unreleased changes detected."

View file

@ -4,7 +4,13 @@ on:
pull_request:
types: [closed]
branches:
- develop
- master
workflow_dispatch:
inputs:
BRANCH:
type: string
description: Which featurebranch branch to destroy?
required: true
jobs:
release:
@ -13,7 +19,7 @@ jobs:
- uses: actions/checkout@v3
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_BRANCH: ${{ github.head_ref }}
PAYLOAD_BRANCH: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.BRANCH || github.head_ref }}
PAYLOAD_PR_NUMBER: ${{ github.event.pull_request.number }}
with:
repository: budibase/budibase-deploys

View file

@ -3,7 +3,7 @@ name: deploy-featurebranch
on:
pull_request:
branches:
- develop
- master
jobs:
release:

View file

@ -1,41 +0,0 @@
name: "deploy-preprod"
on:
workflow_dispatch:
workflow_call:
jobs:
deploy-to-legacy-preprod-env:
runs-on: ubuntu-latest
steps:
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- name: Get the latest budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
with:
repository: budibase/budibase-deploys
event: budicloud-preprod-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View file

@ -1,124 +0,0 @@
name: Budibase Prerelease
concurrency:
group: release-prerelease
cancel-in-progress: false
on:
push:
tags:
- "*-alpha.*"
workflow_dispatch:
env:
# Posthog token used by ui at build time
# disable unless needed for testing
# POSTHOG_TOKEN: phc_uDYOfnFt6wAbBAXkC6STjcrTpAFiWIhqgFcsC1UVO5F
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
FEATURE_PREVIEW_URL: https://budirelease.live
jobs:
release-images:
runs-on: ubuntu-latest
steps:
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
fetch-depth: 0
- name: Fail if tag is not develop
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/develop; then
echo "Tag is not in develop"
exit 1
fi
- uses: actions/setup-node@v1
with:
node-version: 18.x
- run: yarn install --frozen-lockfile
- name: Update versions
run: ./scripts/updateVersions.sh
- run: yarn build
- run: yarn build:sdk
- name: Publish budibase packages to NPM
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
# setup the username and email.
git config --global user.name "Budibase Staging Release Bot"
git config --global user.email "<>"
git submodule foreach git commit -a -m 'Release process'
git commit -a -m 'Release process'
echo //registry.npmjs.org/:_authToken=${NPM_TOKEN} >> .npmrc
yarn release:develop
- name: Build/release Docker images
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build:docker:develop
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
release-helm-chart:
needs: [release-images]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Helm
uses: azure/setup-helm@v1
id: helm-install
# due to helm repo index issue: https://github.com/helm/helm/issues/7363
# we need to create new package in a different dir, merge the index and move the package back
- name: Build and release helm chart
run: |
git config user.name "Budibase Helm Bot"
git config user.email "<>"
git reset --hard
git fetch
mkdir sync
echo "Packaging chart to sync dir"
helm package charts/budibase --version 0.0.0-develop --app-version develop --destination sync
echo "Packaging successful"
git checkout gh-pages
echo "Indexing helm repo"
helm repo index --merge docs/index.yaml sync
mv -f sync/* docs
rm -rf sync
echo "Pushing new helm release"
git add -A
git commit -m "Helm Release: develop"
git push
trigger-deploy-to-qa-env:
needs: [release-helm-chart]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Get the current budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
with:
repository: budibase/budibase-deploys
event: budicloud-qa-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View file

@ -110,19 +110,13 @@ jobs:
git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}"
git push
deploy-to-legacy-preprod-env:
needs: [release-images]
uses: ./.github/workflows/deploy-preprod.yml
secrets: inherit
# Trigger deploy to new EKS preprod environment
trigger-deploy-to-preprod-env:
trigger-deploy-to-qa-env:
needs: [release-helm-chart]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Get the latest budibase release version
- name: Get the current budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
@ -133,5 +127,5 @@ jobs:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
with:
repository: budibase/budibase-deploys
event: budicloud-preprod-deploy
event: budicloud-qa-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View file

@ -0,0 +1,69 @@
name: Test
on:
workflow_dispatch:
env:
CI: true
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
REGISTRY_URL: registry.hub.docker.com
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
build:
name: "build"
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [18.x]
steps:
- name: "Checkout"
uses: actions/checkout@v4
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
cache: "yarn"
- name: Setup QEMU
uses: docker/setup-qemu-action@v3
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3
- name: Run Yarn
run: yarn
- name: Run Yarn Build
run: yarn build --scope @budibase/server --scope @budibase/worker
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_API_KEY }}
- name: Get the latest release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo $release_version
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Budibase service docker image
uses: docker/build-push-action@v5
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
tags: budibase/budibase-test:test
file: ./hosting/single/Dockerfile.v2
cache-from: type=registry,ref=budibase/budibase-test:test
cache-to: type=inline
- name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/amd64
build-args: TARGETBUILD=aas
tags: budibase/budibase-test:aas
file: ./hosting/single/Dockerfile.v2

View file

@ -18,7 +18,7 @@ jobs:
- name: Maximize build space
uses: easimon/maximize-build-space@master
with:
root-reserve-mb: 35000
root-reserve-mb: 30000
swap-size-mb: 1024
remove-android: 'true'
remove-dotnet: 'true'
@ -33,14 +33,6 @@ jobs:
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
@ -55,10 +47,6 @@ jobs:
run: yarn
- name: Update versions
run: ./scripts/updateVersions.sh
- name: Runt Yarn Lint
run: yarn lint
- name: Update versions
run: ./scripts/updateVersions.sh
- name: Run Yarn Build
run: yarn build:docker:pre
- name: Login to Docker Hub

View file

@ -2,7 +2,7 @@ name: Close stale issues and PRs # https://github.com/actions/stale
on:
workflow_dispatch:
schedule:
- cron: '*/30 * * * *' # Every 30 mins
- cron: "*/30 * * * *" # Every 30 mins
jobs:
stale:
@ -10,20 +10,37 @@ jobs:
steps:
- uses: actions/stale@v8
with:
# stale rules
days-before-stale: 60
operations-per-run: 1
# stale rules for PRs
days-before-pr-stale: 7
stale-issue-label: stale
stale-issue-message: "This issue has been automatically marked as stale because it has not had any activity for 60 days."
# close rules
# days after being marked as stale to close
days-before-close: 30
close-issue-label: closed-stale
close-issue-message: This issue has been automatically closed it has not had any activity in 90 days."
days-before-pr-close: 7
# exemptions
exempt-pr-labels: pinned,security,roadmap
days-before-pr-close: 7
- uses: actions/stale@v8
with:
operations-per-run: 3
# stale rules for high priority bugs
days-before-stale: 30
only-issue-labels: bug,High priority
stale-issue-label: warn
- uses: actions/stale@v8
with:
operations-per-run: 3
# stale rules for medium priority bugs
days-before-stale: 90
only-issue-labels: bug,Medium priority
stale-issue-label: warn
- uses: actions/stale@v8
with:
operations-per-run: 3
# stale rules for all bugs
days-before-stale: 180
stale-issue-label: stale
only-issue-labels: bug
stale-issue-message: "This issue has been automatically marked as stale because it has not had any activity for six months."
days-before-close: 30

View file

@ -1,42 +0,0 @@
name: Tag prerelease
concurrency:
group: tag-prerelease
cancel-in-progress: false
on:
push:
branches:
- develop
paths:
- ".aws/**"
- ".github/**"
- "charts/**"
- "packages/**"
- "scripts/**"
- "package.json"
- "yarn.lock"
workflow_dispatch:
jobs:
tag-prerelease:
runs-on: ubuntu-latest
steps:
- name: Fail if branch is not develop
if: github.ref != 'refs/heads/develop'
run: |
echo "Ref is not develop, you must run this job from develop."
exit 1
- uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- run: cd scripts && yarn
- name: Tag prerelease
run: |
cd scripts
# setup the username and email.
git config --global user.name "Budibase Staging Release Bot"
git config --global user.email "<>"
./versionCommit.sh prerelease

View file

@ -4,17 +4,6 @@ concurrency:
cancel-in-progress: false
on:
push:
branches:
- master
paths:
- ".aws/**"
- ".github/**"
- "charts/**"
- "packages/**"
- "scripts/**"
- "package.json"
- "yarn.lock"
workflow_dispatch:
inputs:
versioning:

View file

@ -1 +1 @@
network-timeout 100000
network-timeout 1000000

View file

@ -138,6 +138,8 @@ To develop the Budibase platform you'll need [Docker](https://www.docker.com/) a
`yarn setup` will check that all necessary components are installed and setup the repo for usage.
If you have access to the `@budibase/pro` submodule then please follow the Pro section of this guide before running the above command.
##### Manual method
The following commands can be executed to manually get Budibase up and running (assuming Docker/Docker Compose has been installed).
@ -146,6 +148,8 @@ The following commands can be executed to manually get Budibase up and running (
`yarn build` will build all budibase packages.
If you have access to the `@budibase/pro` submodule then please follow the Pro section of this guide before running the above commands.
#### 4. Running
To run the budibase server and builder in dev mode (i.e. with live reloading):

View file

@ -12,14 +12,14 @@ RUN chmod +x /cleanup.sh
WORKDIR /app
ADD packages/server .
COPY yarn.lock .
RUN yarn install --production=true
RUN yarn install --production=true --network-timeout 100000
RUN /cleanup.sh
# build worker
WORKDIR /worker
ADD packages/worker .
COPY yarn.lock .
RUN yarn install --production=true
RUN yarn install --production=true --network-timeout 100000
RUN /cleanup.sh
FROM budibase/couchdb

View file

@ -0,0 +1,126 @@
FROM node:18-slim as build
# install node-gyp dependencies
RUN apt-get update && apt-get install -y --no-install-recommends g++ make python3 jq
# copy and install dependencies
WORKDIR /app
COPY package.json .
COPY yarn.lock .
COPY lerna.json .
COPY .yarnrc .
COPY packages/server/package.json packages/server/package.json
COPY packages/worker/package.json packages/worker/package.json
# string-templates does not get bundled during the esbuild process, so we want to use the local version
COPY packages/string-templates/package.json packages/string-templates/package.json
COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh
RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
RUN ./scripts/removeWorkspaceDependencies.sh
# We will never want to sync pro, but the script is still required
RUN echo '' > scripts/syncProPackage.js
RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production
# copy the actual code
COPY packages/server/dist packages/server/dist
COPY packages/server/pm2.config.js packages/server/pm2.config.js
COPY packages/server/client packages/server/client
COPY packages/server/builder packages/server/builder
COPY packages/worker/dist packages/worker/dist
COPY packages/worker/pm2.config.js packages/worker/pm2.config.js
COPY packages/string-templates packages/string-templates
FROM budibase/couchdb as runner
ARG TARGETARCH
ENV TARGETARCH $TARGETARCH
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD=single
ENV TARGETBUILD $TARGETBUILD
# install base dependencies
RUN apt-get update && \
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server
# Install postgres client for pg_dump utils
RUN apt install software-properties-common apt-transport-https gpg -y \
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
&& apt update -y \
&& apt install postgresql-client-15 -y \
&& apt remove software-properties-common apt-transport-https gpg -y
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
WORKDIR /nodejs
RUN curl -sL https://deb.nodesource.com/setup_18.x -o /tmp/nodesource_setup.sh && \
bash /tmp/nodesource_setup.sh && \
apt-get install -y --no-install-recommends libaio1 nodejs && \
npm install --global yarn pm2
# setup nginx
COPY hosting/single/nginx/nginx.conf /etc/nginx
COPY hosting/single/nginx/nginx-default-site.conf /etc/nginx/sites-enabled/default
RUN mkdir -p /var/log/nginx && \
touch /var/log/nginx/error.log && \
touch /var/run/nginx.pid && \
usermod -a -G tty www-data
WORKDIR /
RUN mkdir -p scripts/integrations/oracle
COPY packages/server/scripts/integrations/oracle scripts/integrations/oracle
RUN /bin/bash -e ./scripts/integrations/oracle/instantclient/linux/install.sh
# setup minio
WORKDIR /minio
COPY scripts/install-minio.sh ./install.sh
RUN chmod +x install.sh && ./install.sh
# setup runner file
WORKDIR /
COPY hosting/single/runner.sh .
RUN chmod +x ./runner.sh
COPY hosting/single/healthcheck.sh .
RUN chmod +x ./healthcheck.sh
# Script below sets the path for storing data based on $DATA_DIR
# For Azure App Service install SSH & point data locations to /home
COPY hosting/single/ssh/sshd_config /etc/
COPY hosting/single/ssh/ssh_setup.sh /tmp
RUN /build-target-paths.sh
# setup letsencrypt certificate
RUN apt-get install -y certbot python3-certbot-nginx
COPY hosting/letsencrypt /app/letsencrypt
RUN chmod +x /app/letsencrypt/certificate-request.sh /app/letsencrypt/certificate-renew.sh
COPY --from=build /app/node_modules /node_modules
COPY --from=build /app/package.json /package.json
COPY --from=build /app/packages/server /app
COPY --from=build /app/packages/worker /worker
COPY --from=build /app/packages/string-templates /string-templates
RUN cd /string-templates && yarn link && cd ../app && yarn link @budibase/string-templates && cd ../worker && yarn link @budibase/string-templates
EXPOSE 80
EXPOSE 443
# Expose port 2222 for SSH on Azure App Service build
EXPOSE 2222
VOLUME /data
HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh"
# must set this just before running
ENV NODE_ENV=production
WORKDIR /
CMD ["./runner.sh"]

View file

@ -7,16 +7,16 @@ declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONME
[[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION
[[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80
[[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker
[[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://localhost:9000
[[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://127.0.0.1:9000
[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production
[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
[[ -z "${TENANT_FEATURE_FLAGS}" ]] && export TENANT_FEATURE_FLAGS="*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR"
[[ -z "${ACCOUNT_PORTAL_URL}" ]] && export ACCOUNT_PORTAL_URL=https://account.budibase.app
[[ -z "${REDIS_URL}" ]] && export REDIS_URL=localhost:6379
[[ -z "${REDIS_URL}" ]] && export REDIS_URL=127.0.0.1:6379
[[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1
[[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002
[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://localhost:4002
[[ -z "${APPS_URL}" ]] && export APPS_URL=http://localhost:4001
[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://127.0.0.1:4002
[[ -z "${APPS_URL}" ]] && export APPS_URL=http://127.0.0.1:4001
[[ -z "${SERVER_TOP_LEVEL_PATH}" ]] && export SERVER_TOP_LEVEL_PATH=/app
# export CUSTOM_DOMAIN=budi001.custom.com
@ -51,7 +51,7 @@ do
fi
done
if [[ -z "${COUCH_DB_URL}" ]]; then
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@127.0.0.1:5984
fi
if [ ! -f "${DATA_DIR}/.env" ]; then
touch ${DATA_DIR}/.env

View file

@ -1,5 +1,5 @@
{
"version": "2.11.5-alpha.3",
"version": "2.11.35",
"npmClient": "yarn",
"packages": [
"packages/*"

View file

@ -8,5 +8,9 @@
}
}
},
"targetDefaults": {}
"targetDefaults": {
"build": {
"inputs": ["{workspaceRoot}/scripts/build.js"]
}
}
}

View file

@ -3,14 +3,11 @@
"private": true,
"devDependencies": {
"@esbuild-plugins/tsconfig-paths": "^0.1.2",
"@nx/js": "16.4.3",
"@rollup/plugin-json": "^4.0.2",
"@typescript-eslint/parser": "6.7.2",
"esbuild": "^0.18.17",
"esbuild-node-externals": "^1.8.0",
"eslint": "^8.44.0",
"husky": "^8.0.3",
"js-yaml": "^4.1.0",
"kill-port": "^1.6.1",
"lerna": "7.1.1",
"madge": "^6.0.0",
@ -19,8 +16,6 @@
"nx-cloud": "16.0.5",
"prettier": "2.8.8",
"prettier-plugin-svelte": "^2.3.0",
"rimraf": "^3.0.2",
"rollup-plugin-replace": "^2.2.0",
"svelte": "3.49.0",
"typescript": "5.2.2",
"@babel/core": "^7.22.5",
@ -51,7 +46,7 @@
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
"dev:server": "yarn run kill-server && lerna run --stream dev:builder --scope @budibase/worker --scope @budibase/server",
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
"dev:docker": "yarn build:docker:pre && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
"dev:docker": "yarn build && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
"test": "lerna run --stream test --stream",
"lint:eslint": "eslint packages qa-core --max-warnings=0",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"",
@ -61,7 +56,6 @@
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"build:specs": "lerna run --stream specs",
"build:docker": "lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
"build:docker:pre": "yarn build && lerna run --stream predocker",
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
"build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
"build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
@ -69,12 +63,10 @@
"build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild",
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
"build:docker:single:image": "docker build -f hosting/single/Dockerfile -t budibase:latest .",
"build:docker:single": "yarn build && lerna run --concurrency 1 predocker && yarn build:docker:single:image",
"build:docker:single": "./scripts/build-single-image.sh",
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
"build:docs": "lerna run --stream build:docs",
"release:helm": "node scripts/releaseHelmChart",
"env:multi:enable": "lerna run --stream env:multi:enable",
"env:multi:disable": "lerna run --stream env:multi:disable",

View file

@ -26,7 +26,7 @@
"@budibase/shared-core": "0.0.0",
"@budibase/types": "0.0.0",
"@techpass/passport-openidconnect": "0.3.2",
"aws-cloudfront-sign": "2.2.0",
"aws-cloudfront-sign": "3.0.2",
"aws-sdk": "2.1030.0",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3",
@ -62,7 +62,7 @@
"@trendyol/jest-testcontainers": "^2.1.1",
"@types/chance": "1.1.3",
"@types/cookies": "0.7.8",
"@types/jest": "29.5.3",
"@types/jest": "29.5.5",
"@types/lodash": "4.14.180",
"@types/node": "18.17.0",
"@types/node-fetch": "2.6.4",

View file

@ -1,5 +1,10 @@
import { prefixed, DocumentType } from "@budibase/types"
export { SEPARATOR, UNICODE_MAX, DocumentType } from "@budibase/types"
export {
SEPARATOR,
UNICODE_MAX,
DocumentType,
InternalTable,
} from "@budibase/types"
/**
* Can be used to create a few different forms of querying a view.
@ -30,10 +35,6 @@ export const DeprecatedViews = {
],
}
export enum InternalTable {
USER_METADATA = "ta_users",
}
export const StaticDatabases = {
GLOBAL: {
name: "global-db",

View file

@ -45,6 +45,11 @@ export function generateGlobalUserID(id?: any) {
return `${DocumentType.USER}${SEPARATOR}${id || newid()}`
}
const isGlobalUserIDRegex = new RegExp(`^${DocumentType.USER}${SEPARATOR}.+`)
export function isGlobalUserID(id: string) {
return isGlobalUserIDRegex.test(id)
}
/**
* Generates a new user ID based on the passed in global ID.
* @param {string} globalId The ID of the global user.

View file

@ -1,5 +1,5 @@
import env from "../environment"
const cfsign = require("aws-cloudfront-sign")
import * as cfsign from "aws-cloudfront-sign"
let PRIVATE_KEY: string | undefined
@ -21,7 +21,7 @@ function getPrivateKey() {
const getCloudfrontSignParams = () => {
return {
keypairId: env.CLOUDFRONT_PUBLIC_KEY_ID,
keypairId: env.CLOUDFRONT_PUBLIC_KEY_ID!,
privateKeyString: getPrivateKey(),
expireTime: new Date().getTime() + 1000 * 60 * 60, // 1 hour
}

View file

@ -14,13 +14,14 @@ import {
} from "../db"
import {
BulkDocsResponse,
ContextUser,
SearchQuery,
SearchQueryOperators,
SearchUsersRequest,
User,
ContextUser,
} from "@budibase/types"
import { getGlobalDB } from "../context"
import * as context from "../context"
import { user as userCache } from "../cache"
import { getGlobalDB } from "../context"
type GetOpts = { cleanup?: boolean }
@ -39,6 +40,31 @@ function removeUserPassword(users: User | User[]) {
return users
}
export const isSupportedUserSearch = (query: SearchQuery) => {
const allowed = [
{ op: SearchQueryOperators.STRING, key: "email" },
{ op: SearchQueryOperators.EQUAL, key: "_id" },
]
for (let [key, operation] of Object.entries(query)) {
if (typeof operation !== "object") {
return false
}
const fields = Object.keys(operation || {})
// this filter doesn't contain options - ignore
if (fields.length === 0) {
continue
}
const allowedOperation = allowed.find(
allow =>
allow.op === key && fields.length === 1 && fields[0] === allow.key
)
if (!allowedOperation) {
return false
}
}
return true
}
export const bulkGetGlobalUsersById = async (
userIds: string[],
opts?: GetOpts
@ -211,8 +237,8 @@ export const searchGlobalUsersByEmail = async (
const PAGE_LIMIT = 8
export const paginatedUsers = async ({
page,
email,
bookmark,
query,
appId,
}: SearchUsersRequest = {}) => {
const db = getGlobalDB()
@ -222,18 +248,20 @@ export const paginatedUsers = async ({
limit: PAGE_LIMIT + 1,
}
// add a startkey if the page was specified (anchor)
if (page) {
opts.startkey = page
if (bookmark) {
opts.startkey = bookmark
}
// property specifies what to use for the page/anchor
let userList: User[],
property = "_id",
getKey
if (appId) {
if (query?.equal?._id) {
userList = [await getById(query.equal._id)]
} else if (appId) {
userList = await searchGlobalUsersByApp(appId, opts)
getKey = (doc: any) => getGlobalUserByAppPage(appId, doc)
} else if (email) {
userList = await searchGlobalUsersByEmail(email, opts)
} else if (query?.string?.email) {
userList = await searchGlobalUsersByEmail(query?.string?.email, opts)
property = "email"
} else {
// no search, query allDocs

View file

@ -82,9 +82,9 @@
"@spectrum-css/vars": "3.0.1",
"dayjs": "^1.10.8",
"easymde": "^2.16.1",
"svelte-dnd-action": "^0.9.8",
"svelte-flatpickr": "3.2.3",
"svelte-portal": "^1.0.0",
"svelte-dnd-action": "^0.9.8"
"svelte-portal": "^1.0.0"
},
"resolutions": {
"loader-utils": "1.4.1"

View file

@ -21,14 +21,6 @@
"hsla(240, 90%, 75%, 0.3)",
"hsla(320, 90%, 75%, 0.3)",
]
$: {
if (constraints.inclusion.length) {
options = constraints.inclusion.map(value => ({
name: value,
id: Math.random(),
}))
}
}
const removeInput = idx => {
delete optionColors[options[idx].name]
constraints.inclusion = constraints.inclusion.filter((e, i) => i !== idx)
@ -80,6 +72,11 @@
// Initialize anchor arrays on mount, assuming 'options' is already populated
colorPopovers = constraints.inclusion.map(() => undefined)
anchors = constraints.inclusion.map(() => undefined)
options = constraints.inclusion.map(value => ({
name: value,
id: Math.random(),
}))
})
</script>

View file

@ -1,8 +0,0 @@
const ncp = require("ncp").ncp
ncp("./dist", "../server/builder", function (err) {
if (err) {
return console.error(err)
}
console.log("Copied dist folder to ../server/builder")
})

View file

@ -85,8 +85,8 @@
"@babel/core": "^7.12.14",
"@babel/plugin-transform-runtime": "^7.13.10",
"@babel/preset-env": "^7.13.12",
"@rollup/plugin-replace": "^2.4.2",
"@roxi/routify": "2.18.5",
"@rollup/plugin-replace": "^5.0.3",
"@roxi/routify": "2.18.12",
"@sveltejs/vite-plugin-svelte": "1.0.1",
"@testing-library/jest-dom": "5.17.0",
"@testing-library/svelte": "^3.2.2",
@ -95,16 +95,18 @@
"jest": "29.6.2",
"jsdom": "^21.1.1",
"ncp": "^2.0.0",
"rollup": "^2.44.0",
"svelte": "^3.48.0",
"svelte-jester": "^1.3.2",
"vite": "^3.0.8",
"vite-plugin-static-copy": "^0.16.0",
"vite": "^4.4.11",
"vite-plugin-static-copy": "^0.17.0",
"vitest": "^0.29.2"
},
"nx": {
"targets": {
"build": {
"outputs": [
"{workspaceRoot}/packages/server/builder"
],
"dependsOn": [
{
"projects": [

View file

@ -948,12 +948,15 @@ export const buildFormSchema = (component, asset) => {
if (component._component.endsWith("formblock")) {
let schema = {}
const datasource = getDatasourceForProvider(asset, component)
const info = getSchemaForDatasource(component, datasource)
if (!info?.schema) {
return schema
}
if (!component.fields) {
Object.values(info?.schema)
Object.values(info.schema)
.filter(
({ autocolumn, name }) =>
!autocolumn && !["_rev", "_id"].includes(name)

View file

@ -64,6 +64,7 @@ const INITIAL_FRONTEND_STATE = {
},
features: {
componentValidation: false,
disableUserMetadata: false,
},
errors: [],
hasAppPackage: false,

View file

@ -110,20 +110,7 @@
<div class="schema-fields">
{#each schemaFields as [field, schema]}
{#if !schema.autocolumn && schema.type !== "attachment"}
<DrawerBindableSlot
fillWidth
title={value.title}
label={field}
panel={AutomationBindingPanel}
type={schema.type}
{schema}
value={value[field]}
on:change={e => onChange(e, field)}
{bindings}
allowJS={true}
updateOnChange={false}
drawerLeft="260px"
>
{#if isTestModal}
<RowSelectorTypes
{isTestModal}
{field}
@ -132,7 +119,31 @@
{value}
{onChange}
/>
</DrawerBindableSlot>
{:else}
<DrawerBindableSlot
fillWidth
title={value.title}
label={field}
panel={AutomationBindingPanel}
type={schema.type}
{schema}
value={value[field]}
on:change={e => onChange(e, field)}
{bindings}
allowJS={true}
updateOnChange={false}
drawerLeft="260px"
>
<RowSelectorTypes
{isTestModal}
{field}
{schema}
bindings={parsedBindings}
{value}
{onChange}
/>
</DrawerBindableSlot>
{/if}
{/if}
{#if isUpdateRow && schema.type === "link"}
<div class="checkbox-field">

View file

@ -4,6 +4,7 @@
import { TableNames } from "constants"
import { Grid } from "@budibase/frontend-core"
import { API } from "api"
import { store } from "builderStore"
import GridAddColumnModal from "components/backend/DataTable/modals/grid/GridCreateColumnModal.svelte"
import GridCreateEditRowModal from "components/backend/DataTable/modals/grid/GridCreateEditRowModal.svelte"
import GridEditUserModal from "components/backend/DataTable/modals/grid/GridEditUserModal.svelte"
@ -17,11 +18,11 @@
import GridUsersTableButton from "components/backend/DataTable/modals/grid/GridUsersTableButton.svelte"
const userSchemaOverrides = {
firstName: { displayName: "First name" },
lastName: { displayName: "Last name" },
email: { displayName: "Email" },
roleId: { displayName: "Role" },
status: { displayName: "Status" },
firstName: { displayName: "First name", disabled: true },
lastName: { displayName: "Last name", disabled: true },
email: { displayName: "Email", disabled: true },
roleId: { displayName: "Role", disabled: true },
status: { displayName: "Status", disabled: true },
}
$: id = $tables.selected?._id
@ -60,14 +61,14 @@
datasource={gridDatasource}
canAddRows={!isUsersTable}
canDeleteRows={!isUsersTable}
canEditRows={!isUsersTable}
canEditColumns={!isUsersTable}
canEditRows={!isUsersTable || !$store.features.disableUserMetadata}
canEditColumns={!isUsersTable || !$store.features.disableUserMetadata}
schemaOverrides={isUsersTable ? userSchemaOverrides : null}
showAvatars={false}
on:updatedatasource={handleGridTableUpdate}
>
<svelte:fragment slot="filter">
{#if isUsersTable}
{#if isUsersTable && $store.features.disableUserMetadata}
<GridUsersTableButton />
{/if}
<GridFilterButton />

View file

@ -13,7 +13,13 @@
let modal
$: tempValue = filters || []
$: schemaFields = Object.values(schema || {})
$: schemaFields = Object.entries(schema || {}).map(
([fieldName, fieldSchema]) => ({
name: fieldName, // Using the key as name if not defined in the schema, for example in some autogenerated columns
...fieldSchema,
})
)
$: text = getText(filters)
$: selected = tempValue.filter(x => !x.onEmptyFilter)?.length > 0

View file

@ -33,17 +33,16 @@
import { getBindings } from "components/backend/DataTable/formula"
import JSONSchemaModal from "./JSONSchemaModal.svelte"
import { ValidColumnNameRegex } from "@budibase/shared-core"
import { FieldType } from "@budibase/types"
import { FieldType, FieldSubtype, SourceName } from "@budibase/types"
import RelationshipSelector from "components/common/RelationshipSelector.svelte"
const AUTO_TYPE = "auto"
const AUTO_TYPE = FIELDS.AUTO.type
const FORMULA_TYPE = FIELDS.FORMULA.type
const LINK_TYPE = FIELDS.LINK.type
const STRING_TYPE = FIELDS.STRING.type
const NUMBER_TYPE = FIELDS.NUMBER.type
const JSON_TYPE = FIELDS.JSON.type
const DATE_TYPE = FIELDS.DATETIME.type
const USER_REFRENCE_TYPE = FIELDS.BB_REFERENCE_USER.compositeType
const dispatch = createEventDispatcher()
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
@ -52,7 +51,24 @@
export let field
let mounted = false
let fieldDefinitions = cloneDeep(FIELDS)
const fieldDefinitions = Object.values(FIELDS).reduce(
// Storing the fields by complex field id
(acc, field) => ({
...acc,
[makeFieldId(field.type, field.subtype)]: field,
}),
{}
)
function makeFieldId(type, subtype, autocolumn) {
// don't make field IDs for auto types
if (type === AUTO_TYPE || autocolumn) {
return type.toUpperCase()
} else {
return `${type}${subtype || ""}`.toUpperCase()
}
}
let originalName
let linkEditDisabled
let primaryDisplay
@ -72,8 +88,8 @@
let jsonSchemaModal
let allowedTypes = []
let editableColumn = {
type: fieldDefinitions.STRING.type,
constraints: fieldDefinitions.STRING.constraints,
type: FIELDS.STRING.type,
constraints: FIELDS.STRING.constraints,
// Initial value for column name in other table for linked records
fieldName: $tables.selected.name,
}
@ -139,9 +155,6 @@
$tables.selected.primaryDisplay == null ||
$tables.selected.primaryDisplay === editableColumn.name
if (editableColumn.type === FieldType.BB_REFERENCE) {
editableColumn.type = `${editableColumn.type}_${editableColumn.subtype}`
}
// Here we are setting the relationship values based on the editableColumn
// This part of the code is used when viewing an existing field hence the check
// for the tableId
@ -172,7 +185,18 @@
}
}
allowedTypes = getAllowedTypes()
if (!savingColumn) {
editableColumn.fieldId = makeFieldId(
editableColumn.type,
editableColumn.subtype,
editableColumn.autocolumn
)
allowedTypes = getAllowedTypes().map(t => ({
fieldId: makeFieldId(t.type, t.subtype),
...t,
}))
}
}
$: initialiseField(field, savingColumn)
@ -249,13 +273,7 @@
let saveColumn = cloneDeep(editableColumn)
// Handle types on composite types
const definition = fieldDefinitions[saveColumn.type.toUpperCase()]
if (definition && saveColumn.type === definition.compositeType) {
saveColumn.type = definition.type
saveColumn.subtype = definition.subtype
delete saveColumn.compositeType
}
delete saveColumn.fieldId
if (saveColumn.type === AUTO_TYPE) {
saveColumn = buildAutoColumn(
@ -320,27 +338,33 @@
}
}
function handleTypeChange(event) {
function onHandleTypeChange(event) {
handleTypeChange(event.detail)
}
function handleTypeChange(type) {
// remove any extra fields that may not be related to this type
delete editableColumn.autocolumn
delete editableColumn.subtype
delete editableColumn.tableId
delete editableColumn.relationshipType
delete editableColumn.formulaType
delete editableColumn.constraints
// Add in defaults and initial definition
const definition = fieldDefinitions[event.detail?.toUpperCase()]
const definition = fieldDefinitions[type?.toUpperCase()]
if (definition?.constraints) {
editableColumn.constraints = definition.constraints
}
editableColumn.type = definition.type
editableColumn.subtype = definition.subtype
// Default relationships many to many
if (editableColumn.type === LINK_TYPE) {
editableColumn.relationshipType = RelationshipType.MANY_TO_MANY
} else if (editableColumn.type === FORMULA_TYPE) {
editableColumn.formulaType = "dynamic"
} else if (editableColumn.type === USER_REFRENCE_TYPE) {
editableColumn.relationshipType = RelationshipType.ONE_TO_MANY
}
}
@ -381,10 +405,27 @@
return ALLOWABLE_NUMBER_OPTIONS
}
const isUsers =
editableColumn.type === FieldType.BB_REFERENCE &&
editableColumn.subtype === FieldSubtype.USERS
if (!external) {
return [
...Object.values(fieldDefinitions),
{ name: "Auto Column", type: AUTO_TYPE },
FIELDS.STRING,
FIELDS.BARCODEQR,
FIELDS.LONGFORM,
FIELDS.OPTIONS,
FIELDS.ARRAY,
FIELDS.NUMBER,
FIELDS.BIGINT,
FIELDS.BOOLEAN,
FIELDS.DATETIME,
FIELDS.ATTACHMENT,
FIELDS.LINK,
FIELDS.FORMULA,
FIELDS.JSON,
isUsers ? FIELDS.USERS : FIELDS.USER,
FIELDS.AUTO,
]
} else {
let fields = [
@ -397,7 +438,7 @@
FIELDS.BOOLEAN,
FIELDS.FORMULA,
FIELDS.BIGINT,
FIELDS.BB_REFERENCE_USER,
isUsers ? FIELDS.USERS : FIELDS.USER,
]
// no-sql or a spreadsheet
if (!external || table.sql) {
@ -472,6 +513,13 @@
return newError
}
function isUsersColumn(column) {
return (
column.type === FieldType.BB_REFERENCE &&
[FieldSubtype.USER, FieldSubtype.USERS].includes(column.subtype)
)
}
onMount(() => {
mounted = true
})
@ -489,14 +537,14 @@
{/if}
<Select
disabled={!typeEnabled}
bind:value={editableColumn.type}
on:change={handleTypeChange}
bind:value={editableColumn.fieldId}
on:change={onHandleTypeChange}
options={allowedTypes}
getOptionLabel={field => field.name}
getOptionValue={field => field.compositeType || field.type}
getOptionValue={field => field.fieldId}
getOptionIcon={field => field.icon}
isOptionEnabled={option => {
if (option.type == AUTO_TYPE) {
if (option.type === AUTO_TYPE) {
return availableAutoColumnKeys?.length > 0
}
return true
@ -555,7 +603,7 @@
<DatePicker bind:value={editableColumn.constraints.datetime.latest} />
</div>
</div>
{#if datasource?.source !== "ORACLE" && datasource?.source !== "SQL_SERVER" && !editableColumn.dateOnly}
{#if datasource?.source !== SourceName.ORACLE && datasource?.source !== SourceName.SQL_SERVER && !editableColumn.dateOnly}
<div>
<div class="row">
<Label>Time zones</Label>
@ -659,13 +707,16 @@
<Button primary text on:click={openJsonSchemaEditor}
>Open schema editor</Button
>
{:else if editableColumn.type === USER_REFRENCE_TYPE}
{:else if isUsersColumn(editableColumn) && datasource?.source !== SourceName.GOOGLE_SHEETS}
<Toggle
value={editableColumn.relationshipType === RelationshipType.MANY_TO_MANY}
value={editableColumn.subtype === FieldSubtype.USERS}
on:change={e =>
(editableColumn.relationshipType = e.detail
? RelationshipType.MANY_TO_MANY
: RelationshipType.ONE_TO_MANY)}
handleTypeChange(
makeFieldId(
FieldType.BB_REFERENCE,
e.detail ? FieldSubtype.USERS : FieldSubtype.USER
)
)}
disabled={!isCreating}
thin
text="Allow multiple users"

View file

@ -13,6 +13,8 @@
import { Helpers } from "@budibase/bbui"
import { RelationshipErrorChecker } from "./relationshipErrors"
import { onMount } from "svelte"
import RelationshipSelector from "components/common/RelationshipSelector.svelte"
import { PrettyRelationshipDefinitions } from "constants/backend"
export let save
export let datasource
@ -22,16 +24,21 @@
export let selectedFromTable
export let close
const relationshipTypes = [
{
label: "One to Many",
value: RelationshipType.MANY_TO_ONE,
let relationshipMap = {
[RelationshipType.MANY_TO_MANY]: {
part1: PrettyRelationshipDefinitions.MANY,
part2: PrettyRelationshipDefinitions.MANY,
},
{
label: "Many to Many",
value: RelationshipType.MANY_TO_MANY,
[RelationshipType.MANY_TO_ONE]: {
part1: PrettyRelationshipDefinitions.ONE,
part2: PrettyRelationshipDefinitions.MANY,
},
]
}
let relationshipOpts1 = Object.values(PrettyRelationshipDefinitions)
let relationshipOpts2 = Object.values(PrettyRelationshipDefinitions)
let relationshipPart1 = PrettyRelationshipDefinitions.MANY
let relationshipPart2 = PrettyRelationshipDefinitions.ONE
let originalFromColumnName = toRelationship.name,
originalToColumnName = fromRelationship.name
@ -49,14 +56,32 @@
)
let errors = {}
let fromPrimary, fromForeign, fromColumn, toColumn
let fromId, toId, throughId, throughToKey, throughFromKey
let throughId, throughToKey, throughFromKey
let isManyToMany, isManyToOne, relationshipType
let hasValidated = false
$: fromId = null
$: toId = null
$: tableOptions = plusTables.map(table => ({
label: table.name,
value: table._id,
name: table.name,
_id: table._id,
}))
$: {
// Determine the relationship type based on the selected values of both parts
relationshipType = Object.entries(relationshipMap).find(
([_, parts]) =>
parts.part1 === relationshipPart1 && parts.part2 === relationshipPart2
)?.[0]
changed(() => {
hasValidated = false
})
}
$: valid =
getErrorCount(errors) === 0 && allRequiredAttributesSet(relationshipType)
$: isManyToMany = relationshipType === RelationshipType.MANY_TO_MANY
@ -338,33 +363,34 @@
onConfirm={saveRelationship}
disabled={!valid}
>
<Select
label="Relationship type"
options={relationshipTypes}
bind:value={relationshipType}
bind:error={errors.relationshipType}
on:change={() =>
changed(() => {
hasValidated = false
})}
/>
<div class="headings">
<Detail>Tables</Detail>
</div>
{#if !selectedFromTable}
<Select
label="Select from table"
options={tableOptions}
bind:value={fromId}
bind:error={errors.fromTable}
on:change={e =>
changed(() => {
const table = plusTables.find(tbl => tbl._id === e.detail)
fromColumn = table?.name || ""
fromPrimary = table?.primary?.[0]
})}
/>
{/if}
<RelationshipSelector
bind:relationshipPart1
bind:relationshipPart2
bind:relationshipTableIdPrimary={fromId}
bind:relationshipTableIdSecondary={toId}
{relationshipOpts1}
{relationshipOpts2}
{tableOptions}
{errors}
primaryDisabled={selectedFromTable}
primaryTableChanged={e =>
changed(() => {
const table = plusTables.find(tbl => tbl._id === e.detail)
fromColumn = table?.name || ""
fromPrimary = table?.primary?.[0]
})}
secondaryTableChanged={e =>
changed(() => {
const table = plusTables.find(tbl => tbl._id === e.detail)
toColumn = table.name || ""
fromForeign = null
})}
/>
{#if isManyToOne && fromId}
<Select
label={`Primary Key (${getTable(fromId).name})`}
@ -374,18 +400,6 @@
on:change={changed}
/>
{/if}
<Select
label={"Select to table"}
options={tableOptions}
bind:value={toId}
bind:error={errors.toTable}
on:change={e =>
changed(() => {
const table = plusTables.find(tbl => tbl._id === e.detail)
toColumn = table.name || ""
fromForeign = null
})}
/>
{#if isManyToMany}
<Select
label={"Through"}

View file

@ -57,7 +57,7 @@
{#if $store.error}
<InlineAlert
type="error"
header={$store.error.title}
header="Error fetching {tableType}"
message={$store.error.description}
/>
{/if}

View file

@ -1,6 +1,6 @@
import { derived, writable, get } from "svelte/store"
import { keepOpen, notifications } from "@budibase/bbui"
import { datasources, ImportTableError, tables } from "stores/backend"
import { datasources, tables } from "stores/backend"
export const createTableSelectionStore = (integration, datasource) => {
const tableNamesStore = writable([])
@ -30,12 +30,7 @@ export const createTableSelectionStore = (integration, datasource) => {
notifications.success(`Tables fetched successfully.`)
await onComplete()
} catch (err) {
if (err instanceof ImportTableError) {
errorStore.set(err)
} else {
notifications.error("Error fetching tables.")
}
errorStore.set(err)
return keepOpen
}
}

View file

@ -49,6 +49,15 @@
label: "Long Form Text",
value: FIELDS.LONGFORM.type,
},
{
label: "User",
value: `${FIELDS.USER.type}${FIELDS.USER.subtype}`,
},
{
label: "Users",
value: `${FIELDS.USERS.type}${FIELDS.USERS.subtype}`,
},
]
$: {
@ -143,7 +152,7 @@
<div class="field">
<span>{name}</span>
<Select
value={schema[name]?.type}
value={`${schema[name]?.type}${schema[name]?.subtype || ""}`}
options={typeOptions}
placeholder={null}
getOptionLabel={option => option.label}

View file

@ -3,6 +3,7 @@
import { FIELDS } from "constants/backend"
import { API } from "api"
import { parseFile } from "./utils"
import { canBeDisplayColumn } from "@budibase/shared-core"
export let rows = []
export let schema = {}
@ -10,36 +11,82 @@
export let displayColumn = null
export let promptUpload = false
const typeOptions = [
{
const typeOptions = {
[FIELDS.STRING.type]: {
label: "Text",
value: FIELDS.STRING.type,
config: {
type: FIELDS.STRING.type,
constraints: FIELDS.STRING.constraints,
},
},
{
[FIELDS.NUMBER.type]: {
label: "Number",
value: FIELDS.NUMBER.type,
config: {
type: FIELDS.NUMBER.type,
constraints: FIELDS.NUMBER.constraints,
},
},
{
[FIELDS.DATETIME.type]: {
label: "Date",
value: FIELDS.DATETIME.type,
config: {
type: FIELDS.DATETIME.type,
constraints: FIELDS.DATETIME.constraints,
},
},
{
[FIELDS.OPTIONS.type]: {
label: "Options",
value: FIELDS.OPTIONS.type,
config: {
type: FIELDS.OPTIONS.type,
constraints: FIELDS.OPTIONS.constraints,
},
},
{
[FIELDS.ARRAY.type]: {
label: "Multi-select",
value: FIELDS.ARRAY.type,
config: {
type: FIELDS.ARRAY.type,
constraints: FIELDS.ARRAY.constraints,
},
},
{
[FIELDS.BARCODEQR.type]: {
label: "Barcode/QR",
value: FIELDS.BARCODEQR.type,
config: {
type: FIELDS.BARCODEQR.type,
constraints: FIELDS.BARCODEQR.constraints,
},
},
{
[FIELDS.LONGFORM.type]: {
label: "Long Form Text",
value: FIELDS.LONGFORM.type,
config: {
type: FIELDS.LONGFORM.type,
constraints: FIELDS.LONGFORM.constraints,
},
},
]
user: {
label: "User",
value: "user",
config: {
type: FIELDS.USER.type,
subtype: FIELDS.USER.subtype,
constraints: FIELDS.USER.constraints,
},
},
users: {
label: "Users",
value: "users",
config: {
type: FIELDS.USERS.type,
subtype: FIELDS.USERS.subtype,
constraints: FIELDS.USERS.constraints,
},
},
}
let fileInput
let error = null
@ -48,10 +95,16 @@
let validation = {}
let validateHash = ""
let errors = {}
let selectedColumnTypes = {}
$: displayColumnOptions = Object.keys(schema || {}).filter(column => {
return validation[column]
return validation[column] && canBeDisplayColumn(schema[column].type)
})
$: if (displayColumn && !canBeDisplayColumn(schema[displayColumn].type)) {
displayColumn = null
}
$: {
// binding in consumer is causing double renders here
const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema)
@ -72,6 +125,13 @@
rows = response.rows
schema = response.schema
fileName = response.fileName
selectedColumnTypes = Object.entries(response.schema).reduce(
(acc, [colName, fieldConfig]) => ({
...acc,
[colName]: fieldConfig.type,
}),
{}
)
} catch (e) {
loading = false
error = e
@ -98,8 +158,10 @@
}
const handleChange = (name, e) => {
schema[name].type = e.detail
schema[name].constraints = FIELDS[e.detail.toUpperCase()].constraints
const { config } = typeOptions[e.detail]
schema[name].type = config.type
schema[name].subtype = config.subtype
schema[name].constraints = config.constraints
}
const openFileUpload = (promptUpload, fileInput) => {
@ -142,9 +204,9 @@
<div class="field">
<span>{column.name}</span>
<Select
bind:value={column.type}
bind:value={selectedColumnTypes[column.name]}
on:change={e => handleChange(name, e)}
options={typeOptions}
options={Object.values(typeOptions)}
placeholder={null}
getOptionLabel={option => option.label}
getOptionValue={option => option.value}

View file

@ -102,7 +102,7 @@
</div>
{/if}
<div class="text" title={showTooltip ? text : null}>
{text}
<span title={text}>{text}</span>
{#if selectedBy}
<UserAvatars size="XS" users={selectedBy} />
{/if}
@ -227,9 +227,6 @@
.text {
font-weight: 600;
font-size: 12px;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
flex: 1 1 auto;
color: var(--spectrum-global-color-gray-900);
order: 2;
@ -238,6 +235,11 @@
align-items: center;
gap: 8px;
}
.text span {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.scrollable .text {
flex: 0 0 auto;
max-width: 160px;

View file

@ -6,11 +6,14 @@
export let relationshipTableIdPrimary
export let relationshipTableIdSecondary
export let editableColumn
export let linkEditDisabled
export let linkEditDisabled = false
export let tableOptions
export let errors
export let relationshipOpts1
export let relationshipOpts2
export let primaryTableChanged
export let secondaryTableChanged
export let primaryDisabled = true
</script>
<div class="relationship-container">
@ -19,16 +22,19 @@
disabled={linkEditDisabled}
bind:value={relationshipPart1}
options={relationshipOpts1}
bind:error={errors.relationshipType}
/>
</div>
<div class="relationship-label">in</div>
<div class="relationship-part">
<Select
disabled
disabled={primaryDisabled}
options={tableOptions}
getOptionLabel={table => table.name}
getOptionValue={table => table._id}
bind:value={relationshipTableIdPrimary}
on:change={primaryTableChanged}
bind:error={errors.fromTable}
/>
</div>
</div>
@ -46,20 +52,24 @@
<Select
disabled={linkEditDisabled}
bind:value={relationshipTableIdSecondary}
bind:error={errors.toTable}
options={tableOptions.filter(
table => table._id !== relationshipTableIdPrimary
)}
getOptionLabel={table => table.name}
getOptionValue={table => table._id}
on:change={secondaryTableChanged}
/>
</div>
</div>
<Input
disabled={linkEditDisabled}
label={`Column name in other table`}
bind:value={editableColumn.fieldName}
error={errors.relatedName}
/>
{#if editableColumn}
<Input
disabled={linkEditDisabled}
label={`Column name in other table`}
bind:value={editableColumn.fieldName}
error={errors.relatedName}
/>
{/if}
<style>
.relationship-container {

View file

@ -1,91 +0,0 @@
<script>
import { Button, ActionButton, Drawer } from "@budibase/bbui"
import { createEventDispatcher } from "svelte"
import ColumnDrawer from "./ColumnEditor/ColumnDrawer.svelte"
import { cloneDeep } from "lodash/fp"
import {
getDatasourceForProvider,
getSchemaForDatasource,
} from "builderStore/dataBinding"
import { currentAsset } from "builderStore"
import { getFields } from "helpers/searchFields"
export let componentInstance
export let value = []
export let allowCellEditing = true
export let subject = "Table"
const dispatch = createEventDispatcher()
let drawer
let boundValue
$: datasource = getDatasourceForProvider($currentAsset, componentInstance)
$: schema = getSchema($currentAsset, datasource)
$: options = allowCellEditing
? Object.keys(schema || {})
: enrichedSchemaFields?.map(field => field.name)
$: sanitisedValue = getValidColumns(value, options)
$: updateBoundValue(sanitisedValue)
$: enrichedSchemaFields = getFields(Object.values(schema || {}), {
allowLinks: true,
})
const getSchema = (asset, datasource) => {
const schema = getSchemaForDatasource(asset, datasource).schema
// Don't show ID and rev in tables
if (schema) {
delete schema._id
delete schema._rev
}
return schema
}
const updateBoundValue = value => {
boundValue = cloneDeep(value)
}
const getValidColumns = (columns, options) => {
if (!Array.isArray(columns) || !columns.length) {
return []
}
// We need to account for legacy configs which would just be an array
// of strings
if (typeof columns[0] === "string") {
columns = columns.map(col => ({
name: col,
displayName: col,
}))
}
return columns.filter(column => {
return options.includes(column.name)
})
}
const open = () => {
updateBoundValue(sanitisedValue)
drawer.show()
}
const save = () => {
dispatch("change", getValidColumns(boundValue, options))
drawer.hide()
}
</script>
<ActionButton on:click={open}>Configure columns</ActionButton>
<Drawer bind:this={drawer} title="{subject} Columns">
<svelte:fragment slot="description">
Configure the columns in your {subject.toLowerCase()}.
</svelte:fragment>
<Button cta slot="buttons" on:click={save}>Save</Button>
<ColumnDrawer
slot="body"
bind:columns={boundValue}
{options}
{schema}
{allowCellEditing}
/>
</Drawer>

View file

@ -37,7 +37,7 @@
}
$: datasource = getDatasourceForProvider($currentAsset, componentInstance)
$: resourceId = datasource.resourceId || datasource.tableId
$: resourceId = datasource?.resourceId || datasource?.tableId
$: if (!isEqual(value, cachedValue)) {
cachedValue = cloneDeep(value)

View file

@ -3,21 +3,23 @@
Body,
Button,
Combobox,
Multiselect,
DatePicker,
DrawerContent,
Icon,
Input,
Layout,
Select,
Label,
Layout,
Multiselect,
Select,
} from "@budibase/bbui"
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
import { generate } from "shortid"
import { LuceneUtils, Constants } from "@budibase/frontend-core"
import { Constants, LuceneUtils } from "@budibase/frontend-core"
import { getFields } from "helpers/searchFields"
import { FieldType } from "@budibase/types"
import { createEventDispatcher, onMount } from "svelte"
import FilterUsers from "./FilterUsers.svelte"
export let schemaFields
export let filters = []
@ -29,7 +31,6 @@
const dispatch = createEventDispatcher()
const { OperatorOptions } = Constants
const { getValidOperatorsForType } = LuceneUtils
const KeyedFieldRegex = /\d[0-9]*:/g
const behaviourOptions = [
{ value: "and", label: "Match all filters" },
@ -120,22 +121,19 @@
return enrichedSchemaFields.find(field => field.name === filter.field)
}
const santizeTypes = filter => {
const sanitizeTypes = filter => {
// Update type based on field
const fieldSchema = enrichedSchemaFields.find(x => x.name === filter.field)
filter.type = fieldSchema?.type
filter.subtype = fieldSchema?.subtype
// Update external type based on field
filter.externalType = getSchema(filter)?.externalType
}
const santizeOperator = filter => {
const sanitizeOperator = filter => {
// Ensure a valid operator is selected
const operators = getValidOperatorsForType(
filter.type,
filter.field,
datasource
).map(x => x.value)
const operators = getValidOperatorsForType(filter).map(x => x.value)
if (!operators.includes(filter.operator)) {
filter.operator = operators[0] ?? OperatorOptions.Equals.value
}
@ -148,7 +146,7 @@
filter.noValue = noValueOptions.includes(filter.operator)
}
const santizeValue = filter => {
const sanitizeValue = (filter, previousType) => {
// Check if the operator allows a value at all
if (filter.noValue) {
filter.value = null
@ -162,28 +160,47 @@
}
} else if (filter.type === "array" && filter.valueType === "Value") {
filter.value = []
} else if (
previousType !== filter.type &&
(previousType === FieldType.BB_REFERENCE ||
filter.type === FieldType.BB_REFERENCE)
) {
filter.value = filter.type === "array" ? [] : null
}
}
const onFieldChange = filter => {
santizeTypes(filter)
santizeOperator(filter)
santizeValue(filter)
const previousType = filter.type
sanitizeTypes(filter)
sanitizeOperator(filter)
sanitizeValue(filter, previousType)
}
const onOperatorChange = filter => {
santizeOperator(filter)
santizeValue(filter)
sanitizeOperator(filter)
sanitizeValue(filter, filter.type)
}
const onValueTypeChange = filter => {
santizeValue(filter)
sanitizeValue(filter)
}
const getFieldOptions = field => {
const schema = enrichedSchemaFields.find(x => x.name === field)
return schema?.constraints?.inclusion || []
}
const getValidOperatorsForType = filter => {
if (!filter?.field) {
return []
}
return LuceneUtils.getValidOperatorsForType(
{ type: filter.type, subtype: filter.subtype },
filter.field,
datasource
)
}
</script>
<DrawerContent>
@ -228,11 +245,7 @@
/>
<Select
disabled={!filter.field}
options={getValidOperatorsForType(
filter.type,
filter.field,
datasource
)}
options={getValidOperatorsForType(filter)}
bind:value={filter.operator}
on:change={() => onOperatorChange(filter)}
placeholder={null}
@ -285,6 +298,15 @@
timeOnly={getSchema(filter)?.timeOnly}
bind:value={filter.value}
/>
{:else if filter.type === FieldType.BB_REFERENCE}
<FilterUsers
bind:value={filter.value}
multiselect={[
OperatorOptions.In.value,
OperatorOptions.ContainsAny.value,
].includes(filter.operator)}
disabled={filter.noValue}
/>
{:else}
<DrawerBindableInput disabled />
{/if}

View file

@ -0,0 +1,34 @@
<script>
import { Select, Multiselect } from "@budibase/bbui"
import { fetchData } from "@budibase/frontend-core"
import { API } from "api"
export let value = null
export let disabled
export let multiselect = false
$: fetch = fetchData({
API,
datasource: {
type: "user",
},
options: {
limit: 100,
},
})
$: options = $fetch.rows
$: component = multiselect ? Multiselect : Select
</script>
<svelte:component
this={component}
bind:value
autocomplete
{options}
getOptionLabel={option => option.email}
getOptionValue={option => option._id}
{disabled}
/>

View file

@ -20,9 +20,7 @@
const getSortableFields = schema => {
return Object.entries(schema || {})
.filter(
entry => !UNSORTABLE_TYPES.includes(entry[1].type) && entry[1].sortable
)
.filter(entry => !UNSORTABLE_TYPES.includes(entry[1].type))
.map(entry => entry[0])
}

View file

@ -54,6 +54,7 @@
label="App export"
on:change={e => {
file = e.detail?.[0]
encrypted = file?.name?.endsWith(".enc.tar.gz")
}}
/>
<Toggle text="Encrypted" bind:value={encrypted} />

View file

@ -1,133 +1,4 @@
export const FIELDS = {
STRING: {
name: "Text",
type: "string",
icon: "Text",
constraints: {
type: "string",
length: {},
presence: false,
},
},
BARCODEQR: {
name: "Barcode/QR",
type: "barcodeqr",
icon: "Camera",
constraints: {
type: "string",
length: {},
presence: false,
},
},
LONGFORM: {
name: "Long Form Text",
type: "longform",
icon: "TextAlignLeft",
constraints: {
type: "string",
length: {},
presence: false,
},
},
OPTIONS: {
name: "Options",
type: "options",
icon: "Dropdown",
constraints: {
type: "string",
presence: false,
inclusion: [],
},
},
ARRAY: {
name: "Multi-select",
type: "array",
icon: "Duplicate",
constraints: {
type: "array",
presence: false,
inclusion: [],
},
},
NUMBER: {
name: "Number",
type: "number",
icon: "123",
constraints: {
type: "number",
presence: false,
numericality: { greaterThanOrEqualTo: "", lessThanOrEqualTo: "" },
},
},
BIGINT: {
name: "BigInt",
type: "bigint",
icon: "TagBold",
},
BOOLEAN: {
name: "Boolean",
type: "boolean",
icon: "Boolean",
constraints: {
type: "boolean",
presence: false,
},
},
DATETIME: {
name: "Date/Time",
type: "datetime",
icon: "Calendar",
constraints: {
type: "string",
length: {},
presence: false,
datetime: {
latest: "",
earliest: "",
},
},
},
ATTACHMENT: {
name: "Attachment",
type: "attachment",
icon: "Folder",
constraints: {
type: "array",
presence: false,
},
},
LINK: {
name: "Relationship",
type: "link",
icon: "Link",
constraints: {
type: "array",
presence: false,
},
},
FORMULA: {
name: "Formula",
type: "formula",
icon: "Calculator",
constraints: {},
},
JSON: {
name: "JSON",
type: "json",
icon: "Brackets",
constraints: {
type: "object",
presence: false,
},
},
BB_REFERENCE_USER: {
name: "User",
type: "bb_reference",
subtype: "user",
compositeType: "bb_reference_user", // Used for working with the subtype on CreateEditColumn as is it was a primary type
icon: "User",
},
}
import { FieldType, FieldSubtype } from "@budibase/types"
export const AUTO_COLUMN_SUB_TYPES = {
AUTO_ID: "autoID",
@ -145,6 +16,151 @@ export const AUTO_COLUMN_DISPLAY_NAMES = {
UPDATED_AT: "Updated At",
}
export const FIELDS = {
STRING: {
name: "Text",
type: FieldType.STRING,
icon: "Text",
constraints: {
type: "string",
length: {},
presence: false,
},
},
BARCODEQR: {
name: "Barcode/QR",
type: FieldType.BARCODEQR,
icon: "Camera",
constraints: {
type: "string",
length: {},
presence: false,
},
},
LONGFORM: {
name: "Long Form Text",
type: FieldType.LONGFORM,
icon: "TextAlignLeft",
constraints: {
type: "string",
length: {},
presence: false,
},
},
OPTIONS: {
name: "Options",
type: FieldType.OPTIONS,
icon: "Dropdown",
constraints: {
type: "string",
presence: false,
inclusion: [],
},
},
ARRAY: {
name: "Multi-select",
type: FieldType.ARRAY,
icon: "Duplicate",
constraints: {
type: "array",
presence: false,
inclusion: [],
},
},
NUMBER: {
name: "Number",
type: FieldType.NUMBER,
icon: "123",
constraints: {
type: "number",
presence: false,
numericality: { greaterThanOrEqualTo: "", lessThanOrEqualTo: "" },
},
},
BIGINT: {
name: "BigInt",
type: FieldType.BIGINT,
icon: "TagBold",
},
BOOLEAN: {
name: "Boolean",
type: FieldType.BOOLEAN,
icon: "Boolean",
constraints: {
type: "boolean",
presence: false,
},
},
DATETIME: {
name: "Date/Time",
type: FieldType.DATETIME,
icon: "Calendar",
constraints: {
type: "string",
length: {},
presence: false,
datetime: {
latest: "",
earliest: "",
},
},
},
ATTACHMENT: {
name: "Attachment",
type: FieldType.ATTACHMENT,
icon: "Folder",
constraints: {
type: "array",
presence: false,
},
},
LINK: {
name: "Relationship",
type: FieldType.LINK,
icon: "Link",
constraints: {
type: "array",
presence: false,
},
},
AUTO: {
name: "Auto Column",
type: FieldType.AUTO,
icon: "MagicWand",
constraints: {},
},
FORMULA: {
name: "Formula",
type: FieldType.FORMULA,
icon: "Calculator",
constraints: {},
},
JSON: {
name: "JSON",
type: FieldType.JSON,
icon: "Brackets",
constraints: {
type: "object",
presence: false,
},
},
USER: {
name: "User",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USER,
icon: "User",
},
USERS: {
name: "Users",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
icon: "User",
constraints: {
type: "array",
},
},
}
export const FILE_TYPES = {
IMAGE: ["png", "tiff", "gif", "raw", "jpg", "jpeg"],
CODE: ["js", "rs", "py", "java", "rb", "hs", "yml"],

View file

@ -3,16 +3,17 @@
* e.g.
* name all names result
* ------ ----------- --------
* ("foo") ["foo"] "foo (1)"
* ("foo") ["foo", "foo (1)"] "foo (2)"
* ("foo (1)") ["foo", "foo (1)"] "foo (2)"
* ("foo") ["foo", "foo (2)"] "foo (1)"
* ("foo") ["foo"] "foo 1"
* ("foo") ["foo", "foo 1"] "foo 2"
* ("foo 1") ["foo", "foo 1"] "foo 2"
* ("foo") ["foo", "foo 2"] "foo 1"
*
* Repl
*/
export const duplicateName = (name, allNames) => {
const baseName = name.split(" (")[0]
const isDuplicate = new RegExp(`${baseName}\\s\\((\\d+)\\)$`)
const duplicatePattern = new RegExp(`\\s(\\d+)$`)
const baseName = name.split(duplicatePattern)[0]
const isDuplicate = new RegExp(`${baseName}\\s(\\d+)$`)
// get the sequence from matched names
const sequence = []
@ -28,7 +29,6 @@ export const duplicateName = (name, allNames) => {
return false
})
sequence.sort((a, b) => a - b)
// get the next number in the sequence
let number
if (sequence.length === 0) {
@ -46,5 +46,5 @@ export const duplicateName = (name, allNames) => {
}
}
return `${baseName} (${number})`
return `${baseName} ${number}`
}

View file

@ -9,34 +9,34 @@ describe("duplicate", () => {
const duplicate = duplicateName(name, names)
expect(duplicate).toBe("foo (1)")
expect(duplicate).toBe("foo 1")
})
it("with multiple existing", async () => {
const names = ["foo", "foo (1)", "foo (2)"]
const names = ["foo", "foo 1", "foo 2"]
const name = "foo"
const duplicate = duplicateName(name, names)
expect(duplicate).toBe("foo (3)")
expect(duplicate).toBe("foo 3")
})
it("with mixed multiple existing", async () => {
const names = ["foo", "foo (1)", "foo (2)", "bar", "bar (1)", "bar (2)"]
const names = ["foo", "foo 1", "foo 2", "bar", "bar 1", "bar 2"]
const name = "foo"
const duplicate = duplicateName(name, names)
expect(duplicate).toBe("foo (3)")
expect(duplicate).toBe("foo 3")
})
it("with incomplete sequence", async () => {
const names = ["foo", "foo (2)", "foo (3)"]
const names = ["foo", "foo 2", "foo 3"]
const name = "foo"
const duplicate = duplicateName(name, names)
expect(duplicate).toBe("foo (1)")
expect(duplicate).toBe("foo 1")
})
})
})

View file

@ -118,7 +118,7 @@
}
const getOperatorOptions = condition => {
return LuceneUtils.getValidOperatorsForType(condition.valueType)
return LuceneUtils.getValidOperatorsForType({ type: condition.valueType })
}
const onOperatorChange = (condition, newOperator) => {
@ -137,9 +137,9 @@
condition.referenceValue = null
// Ensure a valid operator is set
const validOperators = LuceneUtils.getValidOperatorsForType(newType).map(
x => x.value
)
const validOperators = LuceneUtils.getValidOperatorsForType({
type: newType,
}).map(x => x.value)
if (!validOperators.includes(condition.operator)) {
condition.operator =
validOperators[0] ?? Constants.OperatorOptions.Equals.value

View file

@ -13,7 +13,7 @@
import ExportAppModal from "components/start/ExportAppModal.svelte"
import ImportAppModal from "components/start/ImportAppModal.svelte"
$: filteredApps = $apps.filter(app => app.devId == $store.appId)
$: filteredApps = $apps.filter(app => app.devId === $store.appId)
$: app = filteredApps.length ? filteredApps[0] : {}
$: appDeployed = app?.status === AppStatus.DEPLOYED

View file

@ -123,7 +123,10 @@
prevUserSearch = search
try {
userPageInfo.loading()
await users.search({ userPage, email: search })
await users.search({
bookmark: userPage,
query: { string: { email: search } },
})
userPageInfo.fetched($users.hasNextPage, $users.nextPage)
} catch (error) {
notifications.error("Error getting user list")

View file

@ -31,7 +31,10 @@
prevSearch = search
try {
pageInfo.loading()
await users.search({ page, email: search })
await users.search({
bookmark: page,
query: { string: { email: search } },
})
pageInfo.fetched($users.hasNextPage, $users.nextPage)
} catch (error) {
notifications.error("Error getting user list")

View file

@ -9,15 +9,19 @@ import { API } from "api"
import { DatasourceFeature } from "@budibase/types"
import { TableNames } from "constants"
export class ImportTableError extends Error {
constructor(message) {
super(message)
const [title, description] = message.split(" - ")
class TableImportError extends Error {
constructor(errors) {
super()
this.name = "TableImportError"
this.errors = errors
}
this.name = "TableSelectionError"
// Capitalize the first character of both the title and description
this.title = title[0].toUpperCase() + title.substr(1)
this.description = description[0].toUpperCase() + description.substr(1)
get description() {
let message = ""
for (const key in this.errors) {
message += `${key}: ${this.errors[key]}\n`
}
return message
}
}
@ -25,7 +29,6 @@ export function createDatasourcesStore() {
const store = writable({
list: [],
selectedDatasourceId: null,
schemaError: null,
})
const derivedStore = derived([store, tables], ([$store, $tables]) => {
@ -75,18 +78,13 @@ export function createDatasourcesStore() {
store.update(state => ({
...state,
selectedDatasourceId: id,
// Remove any possible schema error
schemaError: null,
}))
}
const updateDatasource = response => {
const { datasource, error } = response
if (error) {
store.update(state => ({
...state,
schemaError: error,
}))
const { datasource, errors } = response
if (errors && Object.keys(errors).length > 0) {
throw new TableImportError(errors)
}
replaceDatasource(datasource._id, datasource)
select(datasource._id)
@ -94,20 +92,11 @@ export function createDatasourcesStore() {
}
const updateSchema = async (datasource, tablesFilter) => {
try {
const response = await API.buildDatasourceSchema({
datasourceId: datasource?._id,
tablesFilter,
})
updateDatasource(response)
} catch (e) {
// buildDatasourceSchema call returns user presentable errors with two parts divided with a " - ".
if (e.message.split(" - ").length === 2) {
throw new ImportTableError(e.message)
} else {
throw e
}
}
const response = await API.buildDatasourceSchema({
datasourceId: datasource?._id,
tablesFilter,
})
updateDatasource(response)
}
const sourceCount = source => {
@ -172,12 +161,6 @@ export function createDatasourcesStore() {
replaceDatasource(datasource._id, null)
}
const removeSchemaError = () => {
store.update(state => {
return { ...state, schemaError: null }
})
}
const replaceDatasource = (datasourceId, datasource) => {
if (!datasourceId) {
return
@ -230,7 +213,6 @@ export function createDatasourcesStore() {
create,
update,
delete: deleteDatasource,
removeSchemaError,
replaceDatasource,
getTableNames,
}

View file

@ -4,7 +4,7 @@ export { views } from "./views"
export { viewsV2 } from "./viewsV2"
export { permissions } from "./permissions"
export { roles } from "./roles"
export { datasources, ImportTableError } from "./datasources"
export { datasources } from "./datasources"
export { integrations } from "./integrations"
export { sortedIntegrations } from "./sortedIntegrations"
export { queries } from "./queries"

View file

@ -57,7 +57,8 @@ export async function checkDockerConfigured() {
"docker/docker-compose has not been installed, please follow instructions at: https://docs.budibase.com/docs/docker-compose"
const docker = await lookpath("docker")
const compose = await lookpath("docker-compose")
if (!docker || !compose) {
const composeV2 = await lookpath("docker compose")
if (!docker || (!compose && !composeV2)) {
throw error
}
}

View file

@ -12,6 +12,10 @@ if (!process.argv[0].includes("node")) {
checkForBinaries()
}
function localPrebuildPath() {
return join(process.execPath, "..", PREBUILDS)
}
function checkForBinaries() {
const readDir = join(__filename, "..", "..", "..", "cli", PREBUILDS, ARCH)
if (fs.existsSync(PREBUILD_DIR) || !fs.existsSync(readDir)) {
@ -19,17 +23,21 @@ function checkForBinaries() {
}
const natives = fs.readdirSync(readDir)
if (fs.existsSync(readDir)) {
const writePath = join(process.execPath, PREBUILDS, ARCH)
const writePath = join(localPrebuildPath(), ARCH)
fs.mkdirSync(writePath, { recursive: true })
for (let native of natives) {
const filename = `${native.split(".fake")[0]}.node`
fs.cpSync(join(readDir, native), join(writePath, filename))
}
console.log("copied something")
}
}
function cleanup(evt?: number) {
// cleanup prebuilds first
const path = localPrebuildPath()
if (fs.existsSync(path)) {
fs.rmSync(path, { recursive: true })
}
if (evt && !isNaN(evt)) {
return
}
@ -41,10 +49,6 @@ function cleanup(evt?: number) {
)
console.error(error(evt))
}
const path = join(process.execPath, PREBUILDS)
if (fs.existsSync(path)) {
fs.rmSync(path, { recursive: true })
}
}
const events = ["exit", "SIGINT", "SIGUSR1", "SIGUSR2", "uncaughtException"]

View file

@ -3419,6 +3419,17 @@
"value": "custom"
}
},
{
"type": "event",
"label": "On change",
"key": "onChange",
"context": [
{
"label": "Field Value",
"key": "value"
}
]
},
{
"type": "validation/string",
"label": "Validation",
@ -5598,6 +5609,21 @@
}
]
},
{
"type": "event",
"label": "On row click",
"key": "onRowClick",
"context": [
{
"label": "Clicked row",
"key": "row"
}
],
"dependsOn": {
"setting": "allowEditRows",
"value": false
}
},
{
"type": "boolean",
"label": "Add rows",
@ -5673,11 +5699,6 @@
"label": "Validation",
"key": "validation"
},
{
"type": "filter/relationship",
"label": "Filtering",
"key": "filter"
},
{
"type": "boolean",
"label": "Search",

View file

@ -14,12 +14,14 @@
export let initialSortOrder = null
export let fixedRowHeight = null
export let columns = null
export let onRowClick = null
const component = getContext("component")
const { styleable, API, builderStore, notificationStore } = getContext("sdk")
$: columnWhitelist = columns?.map(col => col.name)
$: schemaOverrides = getSchemaOverrides(columns)
$: handleRowClick = allowEditRows ? undefined : onRowClick
const getSchemaOverrides = columns => {
let overrides = {}
@ -56,6 +58,7 @@
showControls={false}
notifySuccess={notificationStore.actions.success}
notifyError={notificationStore.actions.error}
on:rowclick={e => handleRowClick?.({ row: e.detail })}
/>
</div>

View file

@ -63,7 +63,7 @@
// Ensure a valid operator is set
const validOperators = LuceneUtils.getValidOperatorsForType(
expression.type,
{ type: expression.type },
expression.field,
datasource
).map(x => x.value)
@ -125,7 +125,7 @@
<Select
disabled={!filter.field}
options={LuceneUtils.getValidOperatorsForType(
filter.type,
{ type: filter.type, subtype: filter.subtype },
filter.field,
datasource
)}

View file

@ -1,9 +1,28 @@
<script>
import RelationshipField from "./RelationshipField.svelte"
import { sdk } from "@budibase/shared-core"
export let defaultValue
function updateUserIDs(value) {
if (Array.isArray(value)) {
return value.map(val => sdk.users.getGlobalUserID(val))
} else {
return sdk.users.getGlobalUserID(value)
}
}
function updateReferences(value) {
if (sdk.users.containsUserID(value)) {
return updateUserIDs(value)
}
return value
}
</script>
<RelationshipField
{...$$props}
datasourceType={"user"}
primaryDisplay={"email"}
defaultValue={updateReferences(defaultValue)}
/>

View file

@ -128,6 +128,7 @@
<div class="manual-input">
<Input
bind:value
updateOnChange={false}
on:change={() => {
dispatch("change", value)
}}

View file

@ -105,19 +105,25 @@
}
}
$: fetchRows(searchTerm, primaryDisplay)
$: fetchRows(searchTerm, primaryDisplay, defaultValue)
const fetchRows = (searchTerm, primaryDisplay) => {
const fetchRows = async (searchTerm, primaryDisplay, defaultVal) => {
const allRowsFetched =
$fetch.loaded &&
!Object.keys($fetch.query?.string || {}).length &&
!$fetch.hasNextPage
// Don't request until we have the primary display
if (!allRowsFetched && primaryDisplay) {
fetch.update({
query: { string: { [primaryDisplay]: searchTerm } },
// Don't request until we have the primary display or default value has been fetched
if (allRowsFetched || !primaryDisplay) {
return
}
if (defaultVal && !optionsObj[defaultVal]) {
await fetch.update({
query: { equal: { _id: defaultVal } },
})
}
await fetch.update({
query: { string: { [primaryDisplay]: searchTerm } },
})
}
const flatten = values => {
@ -160,7 +166,9 @@
const handleChange = value => {
const changed = fieldApi.setValue(value)
if (onChange && changed) {
onChange({ value })
onChange({
value,
})
}
}

View file

@ -10,24 +10,28 @@ export const buildUserEndpoints = API => ({
/**
* Gets a list of users in the current tenant.
* @param {string} page The page to retrieve
* @param {string} search The starts with string to search username/email by.
* @param {string} bookmark The page to retrieve
* @param {object} query search filters for lookup by user (all operators not supported).
* @param {string} appId Facilitate app/role based user searching
* @param {boolean} paginated Allow the disabling of pagination
* @param {boolean} paginate Allow the disabling of pagination
* @param {number} limit How many users to retrieve in a single search
*/
searchUsers: async ({ paginated, page, email, appId } = {}) => {
searchUsers: async ({ paginate, bookmark, query, appId, limit } = {}) => {
const opts = {}
if (page) {
opts.page = page
if (bookmark) {
opts.bookmark = bookmark
}
if (email) {
opts.email = email
if (query) {
opts.query = query
}
if (appId) {
opts.appId = appId
}
if (typeof paginated === "boolean") {
opts.paginated = paginated
if (typeof paginate === "boolean") {
opts.paginate = paginate
}
if (limit) {
opts.limit = limit
}
return await API.post({
url: `/api/global/users/search`,

View file

@ -1,7 +1,7 @@
<script>
import { getContext } from "svelte"
import RelationshipCell from "./RelationshipCell.svelte"
import { FieldSubtype } from "@budibase/types"
import { FieldSubtype, RelationshipType } from "@budibase/types"
export let api
@ -12,10 +12,14 @@
...$$props.schema,
// This is not really used, just adding some content to be able to render the relationship cell
tableId: "external",
relationshipType:
subtype === FieldSubtype.USER
? RelationshipType.ONE_TO_MANY
: RelationshipType.MANY_TO_MANY,
}
async function searchFunction(searchParams) {
if (subtype !== FieldSubtype.USER) {
if (subtype !== FieldSubtype.USER && subtype !== FieldSubtype.USERS) {
throw `Search for '${subtype}' not implemented`
}
@ -23,7 +27,7 @@
const email = Object.values(searchParams.query.string)[0]
const results = await API.searchUsers({
email,
query: { string: { email } },
})
// Mapping to the expected data within RelationshipCell

View file

@ -17,13 +17,24 @@
const { config, dispatch, selectedRows } = getContext("grid")
const svelteDispatch = createEventDispatcher()
const select = () => {
const select = e => {
e.stopPropagation()
svelteDispatch("select")
const id = row?._id
if (id) {
selectedRows.actions.toggleRow(id)
}
}
const bulkDelete = e => {
e.stopPropagation()
dispatch("request-bulk-delete")
}
const expand = e => {
e.stopPropagation()
svelteDispatch("expand")
}
</script>
<GridCell
@ -56,7 +67,7 @@
{/if}
{/if}
{#if rowSelected && $config.canDeleteRows}
<div class="delete" on:click={() => dispatch("request-bulk-delete")}>
<div class="delete" on:click={bulkDelete}>
<Icon
name="Delete"
size="S"
@ -65,12 +76,7 @@
</div>
{:else}
<div class="expand" class:visible={$config.canExpandRows && expandable}>
<Icon
size="S"
name="Maximize"
hoverable
on:click={() => svelteDispatch("expand")}
/>
<Icon size="S" name="Maximize" hoverable on:click={expand} />
</div>
{/if}
</div>

View file

@ -1,7 +1,8 @@
<script>
import { getContext, onMount, tick } from "svelte"
import GridCell from "./GridCell.svelte"
import { canBeDisplayColumn } from "@budibase/shared-core"
import { Icon, Popover, Menu, MenuItem, clickOutside } from "@budibase/bbui"
import GridCell from "./GridCell.svelte"
import { getColumnIcon } from "../lib/utils"
export let column
@ -24,14 +25,6 @@
datasource,
} = getContext("grid")
const bannedDisplayColumnTypes = [
"link",
"array",
"attachment",
"boolean",
"json",
]
let anchor
let open = false
let editIsOpen = false
@ -231,8 +224,7 @@
<MenuItem
icon="Label"
on:click={makeDisplayColumn}
disabled={idx === "sticky" ||
bannedDisplayColumnTypes.includes(column.schema.type)}
disabled={idx === "sticky" || !canBeDisplayColumn(column.schema.type)}
>
Use as display column
</MenuItem>

View file

@ -35,7 +35,7 @@
</script>
<div bind:this={body} class="grid-body">
<GridScrollWrapper scrollHorizontally scrollVertically wheelInteractive>
<GridScrollWrapper scrollHorizontally scrollVertically attachHandlers>
{#each $renderedRows as row, idx}
<GridRow
{row}

View file

@ -17,6 +17,7 @@
columnHorizontalInversionIndex,
contentLines,
isDragging,
dispatch,
} = getContext("grid")
$: rowSelected = !!$selectedRows[row._id]
@ -30,6 +31,7 @@
on:focus
on:mouseenter={$isDragging ? null : () => ($hoveredRowId = row._id)}
on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)}
on:click={() => dispatch("rowclick", row)}
>
{#each $renderedColumns as column, columnIdx (column.name)}
{@const cellId = `${row._id}-${column.name}`}

View file

@ -17,7 +17,11 @@
export let scrollVertically = false
export let scrollHorizontally = false
export let wheelInteractive = false
export let attachHandlers = false
// Used for tracking touch events
let initialTouchX
let initialTouchY
$: style = generateStyle($scroll, $rowHeight, $hiddenColumnsWidth)
@ -27,17 +31,47 @@
return `transform: translate3d(${offsetX}px, ${offsetY}px, 0);`
}
// Handles a wheel even and updates the scroll offsets
// Handles a mouse wheel event and updates scroll state
const handleWheel = e => {
e.preventDefault()
debouncedHandleWheel(e.deltaX, e.deltaY, e.clientY)
updateScroll(e.deltaX, e.deltaY, e.clientY)
// If a context menu was visible, hide it
if ($menu.visible) {
menu.actions.close()
}
}
const debouncedHandleWheel = domDebounce((deltaX, deltaY, clientY) => {
// Handles touch start events
const handleTouchStart = e => {
if (!e.touches?.[0]) return
initialTouchX = e.touches[0].clientX
initialTouchY = e.touches[0].clientY
}
// Handles touch move events and updates scroll state
const handleTouchMove = e => {
if (!e.touches?.[0]) return
e.preventDefault()
// Compute delta from previous event, and update scroll
const deltaX = initialTouchX - e.touches[0].clientX
const deltaY = initialTouchY - e.touches[0].clientY
updateScroll(deltaX, deltaY)
// Store position to reference in next event
initialTouchX = e.touches[0].clientX
initialTouchY = e.touches[0].clientY
// If a context menu was visible, hide it
if ($menu.visible) {
menu.actions.close()
}
}
// Updates the scroll offset by a certain delta, and ensure scrolling
// stays within sensible bounds. Debounced for performance.
const updateScroll = domDebounce((deltaX, deltaY, clientY) => {
const { top, left } = $scroll
// Calculate new scroll top
@ -55,15 +89,19 @@
})
// Hover row under cursor
const y = clientY - $bounds.top + (newScrollTop % $rowHeight)
const hoveredRow = $renderedRows[Math.floor(y / $rowHeight)]
hoveredRowId.set(hoveredRow?._id)
if (clientY != null) {
const y = clientY - $bounds.top + (newScrollTop % $rowHeight)
const hoveredRow = $renderedRows[Math.floor(y / $rowHeight)]
hoveredRowId.set(hoveredRow?._id)
}
})
</script>
<div
class="outer"
on:wheel={wheelInteractive ? handleWheel : null}
on:wheel={attachHandlers ? handleWheel : null}
on:touchstart={attachHandlers ? handleTouchStart : null}
on:touchmove={attachHandlers ? handleTouchMove : null}
on:click|self={() => ($focusedCellId = null)}
>
<div {style} class="inner">

View file

@ -205,7 +205,7 @@
{/if}
</div>
<div class="normal-columns" transition:fade|local={{ duration: 130 }}>
<GridScrollWrapper scrollHorizontally wheelInteractive>
<GridScrollWrapper scrollHorizontally attachHandlers>
<div class="row">
{#each $renderedColumns as column, columnIdx}
{@const cellId = `new-${column.name}`}

View file

@ -64,7 +64,7 @@
</div>
<div class="content" on:mouseleave={() => ($hoveredRowId = null)}>
<GridScrollWrapper scrollVertically wheelInteractive>
<GridScrollWrapper scrollVertically attachHandlers>
{#each $renderedRows as row, idx}
{@const rowSelected = !!$selectedRows[row._id]}
{@const rowHovered = $hoveredRowId === row._id}
@ -74,6 +74,7 @@
class="row"
on:mouseenter={$isDragging ? null : () => ($hoveredRowId = row._id)}
on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)}
on:click={() => dispatch("rowclick", row)}
>
<GutterCell {row} {rowFocused} {rowHovered} {rowSelected} />
{#if $stickyColumn}

View file

@ -21,6 +21,7 @@ const TypeIconMap = {
bigint: "TagBold",
bb_reference: {
user: "User",
users: "UserGroup",
},
}

View file

@ -53,18 +53,27 @@
}
}
const getLocation = e => {
return {
y: e.touches?.[0]?.clientY ?? e.clientY,
x: e.touches?.[0]?.clientX ?? e.clientX,
}
}
// V scrollbar drag handlers
const startVDragging = e => {
e.preventDefault()
initialMouse = e.clientY
initialMouse = getLocation(e).y
initialScroll = $scrollTop
document.addEventListener("mousemove", moveVDragging)
document.addEventListener("touchmove", moveVDragging)
document.addEventListener("mouseup", stopVDragging)
document.addEventListener("touchend", stopVDragging)
isDraggingV = true
closeMenu()
}
const moveVDragging = domDebounce(e => {
const delta = e.clientY - initialMouse
const delta = getLocation(e).y - initialMouse
const weight = delta / availHeight
const newScrollTop = initialScroll + weight * $maxScrollTop
scroll.update(state => ({
@ -74,22 +83,26 @@
})
const stopVDragging = () => {
document.removeEventListener("mousemove", moveVDragging)
document.removeEventListener("touchmove", moveVDragging)
document.removeEventListener("mouseup", stopVDragging)
document.removeEventListener("touchend", stopVDragging)
isDraggingV = false
}
// H scrollbar drag handlers
const startHDragging = e => {
e.preventDefault()
initialMouse = e.clientX
initialMouse = getLocation(e).x
initialScroll = $scrollLeft
document.addEventListener("mousemove", moveHDragging)
document.addEventListener("touchmove", moveHDragging)
document.addEventListener("mouseup", stopHDragging)
document.addEventListener("touchend", stopHDragging)
isDraggingH = true
closeMenu()
}
const moveHDragging = domDebounce(e => {
const delta = e.clientX - initialMouse
const delta = getLocation(e).x - initialMouse
const weight = delta / availWidth
const newScrollLeft = initialScroll + weight * $maxScrollLeft
scroll.update(state => ({
@ -99,7 +112,9 @@
})
const stopHDragging = () => {
document.removeEventListener("mousemove", moveHDragging)
document.removeEventListener("touchmove", moveHDragging)
document.removeEventListener("mouseup", stopHDragging)
document.removeEventListener("touchend", stopHDragging)
isDraggingH = false
}
</script>
@ -109,6 +124,7 @@
class="v-scrollbar"
style="--size:{ScrollBarSize}px; top:{barTop}px; height:{barHeight}px;"
on:mousedown={startVDragging}
on:touchstart={startVDragging}
class:dragging={isDraggingV}
/>
{/if}
@ -117,6 +133,7 @@
class="h-scrollbar"
style="--size:{ScrollBarSize}px; left:{barLeft}px; width:{barWidth}px;"
on:mousedown={startHDragging}
on:touchstart={startHDragging}
class:dragging={isDraggingH}
/>
{/if}

View file

@ -1,4 +1,5 @@
import { writable, get } from "svelte/store"
import { Helpers } from "@budibase/bbui"
export const createStores = () => {
const copiedCell = writable(null)
@ -12,7 +13,16 @@ export const createActions = context => {
const { copiedCell, focusedCellAPI } = context
const copy = () => {
copiedCell.set(get(focusedCellAPI)?.getValue())
const value = get(focusedCellAPI)?.getValue()
copiedCell.set(value)
// Also copy a stringified version to the clipboard
let stringified = ""
if (value != null && value !== "") {
// Only conditionally stringify to avoid redundant quotes around text
stringified = typeof value === "object" ? JSON.stringify(value) : value
}
Helpers.copyToClipboard(stringified)
}
const paste = () => {

View file

@ -1,6 +1,7 @@
import { get } from "svelte/store"
import DataFetch from "./DataFetch.js"
import { TableNames } from "../constants"
import { LuceneUtils } from "../utils"
export default class UserFetch extends DataFetch {
constructor(opts) {
@ -27,16 +28,25 @@ export default class UserFetch extends DataFetch {
}
async getData() {
const { limit, paginate } = this.options
const { cursor, query } = get(this.store)
let finalQuery
// convert old format to new one - we now allow use of the lucene format
const { appId, paginated, ...rest } = query
if (!LuceneUtils.hasFilters(query) && rest.email) {
finalQuery = { string: { email: rest.email } }
} else {
finalQuery = rest
}
try {
// "query" normally contains a lucene query, but users uses a non-standard
// search endpoint so we use query uniquely here
const res = await this.API.searchUsers({
page: cursor,
email: query.email,
appId: query.appId,
paginated: query.paginated,
})
const opts = {
bookmark: cursor,
query: finalQuery,
appId: appId,
paginate: paginated || paginate,
limit,
}
const res = await this.API.searchUsers(opts)
return {
rows: res?.data || [],
hasNextPage: res?.hasNextPage || false,

@ -1 +1 @@
Subproject commit 30385682141e5ba9d98de7d71d5be1672109cd15
Subproject commit 044bec6447066b215932d6726c437e7ec5a9e42e

View file

@ -11,16 +11,14 @@
"scripts": {
"prebuild": "rimraf dist/",
"build": "node ./scripts/build.js",
"postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && copyfiles -f ../../yarn.lock ./dist/",
"check:types": "tsc -p tsconfig.json --noEmit --paths null",
"postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"debug": "yarn build && node --expose-gc --inspect=9222 dist/index.js",
"test": "bash scripts/test.sh",
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
"test:watch": "jest --watch",
"predocker": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && yarn build && cp ../../yarn.lock ./dist/",
"build:docker": "yarn predocker && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
"build:docs": "node ./scripts/docs/generate.js open",
"build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
"run:docker": "node dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:up": "node scripts/dev/manage.js up",
@ -55,7 +53,7 @@
"@bull-board/api": "3.7.0",
"@bull-board/koa": "3.9.4",
"@elastic/elasticsearch": "7.10.0",
"@google-cloud/firestore": "5.0.2",
"@google-cloud/firestore": "6.8.0",
"@koa/router": "8.0.8",
"@sentry/node": "6.17.7",
"@socket.io/redis-adapter": "^8.2.1",
@ -71,7 +69,6 @@
"curlconverter": "3.21.0",
"dd-trace": "3.13.2",
"dotenv": "8.2.0",
"fix-path": "3.0.0",
"form-data": "4.0.0",
"global-agent": "3.0.0",
"google-auth-library": "7.12.0",
@ -97,12 +94,11 @@
"object-sizeof": "2.6.1",
"open": "8.4.0",
"openai": "^3.2.1",
"openapi-types": "9.3.1",
"pg": "8.10.0",
"posthog-node": "1.3.0",
"pouchdb": "7.3.0",
"pouchdb-all-dbs": "1.0.2",
"pouchdb-all-dbs": "1.1.1",
"pouchdb-find": "7.2.2",
"pouchdb-replication-stream": "1.2.9",
"redis": "4",
"server-destroy": "1.0.1",
"snowflake-promise": "^4.5.0",
@ -114,10 +110,9 @@
"to-json-schema": "0.2.5",
"uuid": "3.3.2",
"validate.js": "0.13.1",
"vm2": "3.9.17",
"vm2": "^3.9.19",
"worker-farm": "1.7.0",
"xml2js": "0.5.0",
"yargs": "13.2.4"
"xml2js": "0.5.0"
},
"devDependencies": {
"@babel/core": "7.17.4",
@ -127,7 +122,7 @@
"@trendyol/jest-testcontainers": "2.1.1",
"@types/global-agent": "2.1.1",
"@types/google-spreadsheet": "3.1.5",
"@types/jest": "29.5.3",
"@types/jest": "29.5.5",
"@types/koa": "2.13.4",
"@types/koa__router": "8.0.8",
"@types/lodash": "4.14.180",
@ -147,7 +142,6 @@
"jest-runner": "29.6.2",
"jest-serial-runner": "1.2.1",
"nodemon": "2.0.15",
"openapi-types": "9.3.1",
"openapi-typescript": "5.2.0",
"path-to-regexp": "6.2.0",
"rimraf": "3.0.2",
@ -157,7 +151,8 @@
"ts-node": "10.8.1",
"tsconfig-paths": "4.0.0",
"typescript": "5.2.2",
"update-dotenv": "1.1.1"
"update-dotenv": "1.1.1",
"yargs": "13.2.4"
},
"optionalDependencies": {
"oracledb": "5.3.0"
@ -174,6 +169,22 @@
"target": "build"
}
]
},
"build": {
"outputs": [
"{projectRoot}/builder",
"{projectRoot}/client",
"{projectRoot}/dist"
],
"dependsOn": [
{
"projects": [
"@budibase/client",
"@budibase/builder"
],
"target": "build"
}
]
}
}
}

View file

@ -1,31 +0,0 @@
### Documentation
This directory contains the scripts required to generate the APIDoc based documentation.
You can find the docs about comment structure at the [APIDocs page](https://apidocjs.com/).
In general most API endpoints will look like:
```js
/**
* @api {post} /api/:param/url Give it a name
* @apiName Give it a name
* @apiGroup group
* @apiPermission permission
* @apiDescription Describe what the endpoint does, any special cases the user
* should be aware of.
*
* @apiParam {string} param describe a URL parameter.
*
* @apiParam (Body) input describe a field on the body.
*
* @apiSuccess {object} output describe the output.
*/
```
There are a few key points to note when writing API docs:
1. Don't use `@apiBody` - this isn't currently supported by our swagger converter.
2. Make sure to always have an `@api` definition at the start, which must always have the
HTTP verb, the endpoint URL and the name.
3. There are three ways you can specify parameters used as inputs for your endpoint,
`@apiParam` for a URL param, `@apiParam (Body)` for a field on the request body and `@apiParam (Query)`
for query string parameters.
4. The `@apiGroup` should be the same for all API Doc comments in a route file.

View file

@ -1,74 +0,0 @@
const fs = require("fs")
const { join } = require("path")
const { createDoc } = require("apidoc")
const packageJson = require("../../package.json")
const toSwagger = require("./toSwagger")
const open = require("open")
const config = {
name: "Budibase API",
version: packageJson.version,
description: "Documenting the Budibase backend API",
title: "Budibase app service API",
}
const shouldOpen = process.argv[2]
const disallowed = []
function filter(parsedRouteFiles) {
const tagToSearch = "url"
for (let routeFile of parsedRouteFiles) {
for (let route of routeFile) {
let routeInfo = route["local"]
if (disallowed.includes(routeInfo[tagToSearch])) {
const idx = routeFile.indexOf(route)
routeFile.splice(idx, 1)
}
}
}
}
async function generate() {
// start by writing a config file
const configPath = join(__dirname, "config.json")
fs.writeFileSync(configPath, JSON.stringify(config))
const mainPath = join(__dirname, "..", "..")
const srcPath = join(mainPath, "src", "api", "routes")
const assetsPath = join(mainPath, "builder", "assets", "docs")
if (!fs.existsSync(assetsPath)) {
fs.mkdirSync(assetsPath, { recursive: true })
}
const options = {
src: [srcPath],
dest: assetsPath,
filters: {
main: {
postFilter: filter,
},
},
config: configPath,
}
const doc = createDoc(options)
if (typeof doc !== "boolean") {
const swagger = toSwagger(JSON.parse(doc.data), JSON.parse(doc.project))
fs.writeFileSync(join(assetsPath, "swagger.json"), JSON.stringify(swagger))
fs.writeFileSync(join(assetsPath, "apidoc.json"), doc.data)
fs.writeFileSync(join(assetsPath, "project.json"), doc.project)
console.log(
`Docs generated successfully, find in ${assetsPath}, swagger.json, apidoc.json and project.json`
)
} else {
throw "Unable to generate docs."
}
// delete the temporary config file
fs.unlinkSync(configPath)
setTimeout(async () => {
if (shouldOpen === "open") {
await open(join(assetsPath, "index.html"), { wait: false })
}
}, 2000)
}
generate().catch(err => {
console.error(err)
})

View file

@ -1,320 +0,0 @@
let _ = require("lodash")
let { pathToRegexp } = require("path-to-regexp")
/********************************************************
* Based on: https://github.com/fsbahman/apidoc-swagger *
********************************************************/
let swagger = {
swagger: "2.0",
info: {},
paths: {},
definitions: {},
}
function toSwagger(apidocJson, projectJson) {
swagger.info = addInfo(projectJson)
swagger.paths = extractPaths(apidocJson)
return swagger
}
let tagsRegex = /(<([^>]+)>)/gi
// Removes <p> </p> tags from text
function removeTags(text) {
return text ? text.replace(tagsRegex, "") : text
}
function addInfo(projectJson) {
let info = {}
info["title"] = projectJson.title || projectJson.name
info["version"] = projectJson.version
info["description"] = projectJson.description
return info
}
/**
* Extracts paths provided in json format
* post, patch, put request parameters are extracted in body
* get and delete are extracted to path parameters
* @param apidocJson
* @returns {{}}
*/
function extractPaths(apidocJson) {
let apiPaths = groupByUrl(apidocJson)
let paths = {}
for (let i = 0; i < apiPaths.length; i++) {
let verbs = apiPaths[i].verbs
let url = verbs[0].url
let pattern = pathToRegexp(url, null)
let matches = pattern.exec(url)
// Surrounds URL parameters with curly brackets -> :email with {email}
let pathKeys = []
for (let j = 1; j < matches.length; j++) {
let key = matches[j].slice(1)
url = url.replace(matches[j], "{" + key + "}")
pathKeys.push(key)
}
for (let j = 0; j < verbs.length; j++) {
let verb = verbs[j]
let type = verb.type
let obj = (paths[url] = paths[url] || {})
if (type === "post" || type === "patch" || type === "put") {
_.extend(
obj,
createPostPushPutOutput(verb, swagger.definitions, pathKeys)
)
} else {
_.extend(obj, createGetDeleteOutput(verb, swagger.definitions))
}
}
}
return paths
}
function createPostPushPutOutput(verbs, definitions, pathKeys) {
let pathItemObject = {}
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
let params = []
let pathParams = createPathParameters(verbs, pathKeys)
pathParams = _.filter(pathParams, function (param) {
let hasKey = pathKeys.indexOf(param.name) !== -1
return !(param.in === "path" && !hasKey)
})
params = params.concat(pathParams)
let required =
verbs.parameter &&
verbs.parameter.fields &&
verbs.parameter.fields.Parameter &&
verbs.parameter.fields.Parameter.length > 0
params.push({
in: "body",
name: "body",
description: removeTags(verbs.description),
required: required,
schema: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelParametersRef,
},
})
pathItemObject[verbs.type] = {
tags: [verbs.group],
summary: removeTags(verbs.description),
consumes: ["application/json"],
produces: ["application/json"],
parameters: params,
}
if (verbDefinitionResult.topLevelSuccessRef) {
pathItemObject[verbs.type].responses = {
200: {
description: "successful operation",
schema: {
type: verbDefinitionResult.topLevelSuccessRefType,
items: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
},
},
},
}
}
return pathItemObject
}
function createVerbDefinitions(verbs, definitions) {
let result = {
topLevelParametersRef: null,
topLevelSuccessRef: null,
topLevelSuccessRefType: null,
}
let defaultObjectName = verbs.name
let fieldArrayResult = {}
if (verbs && verbs.parameter && verbs.parameter.fields) {
fieldArrayResult = createFieldArrayDefinitions(
verbs.parameter.fields.Parameter,
definitions,
verbs.name,
defaultObjectName
)
result.topLevelParametersRef = fieldArrayResult.topLevelRef
}
if (verbs && verbs.success && verbs.success.fields) {
fieldArrayResult = createFieldArrayDefinitions(
verbs.success.fields["Success 200"],
definitions,
verbs.name,
defaultObjectName
)
result.topLevelSuccessRef = fieldArrayResult.topLevelRef
result.topLevelSuccessRefType = fieldArrayResult.topLevelRefType
}
return result
}
function createFieldArrayDefinitions(
fieldArray,
definitions,
topLevelRef,
defaultObjectName
) {
let result = {
topLevelRef: topLevelRef,
topLevelRefType: null,
}
if (!fieldArray) {
return result
}
for (let i = 0; i < fieldArray.length; i++) {
let parameter = fieldArray[i]
let nestedName = createNestedName(parameter.field)
let objectName = nestedName.objectName
if (!objectName) {
objectName = defaultObjectName
}
let type = parameter.type
if (i === 0) {
result.topLevelRefType = type
if (parameter.type === "Object") {
objectName = nestedName.propertyName
nestedName.propertyName = null
} else if (parameter.type === "Array") {
objectName = nestedName.propertyName
nestedName.propertyName = null
result.topLevelRefType = "array"
}
result.topLevelRef = objectName
}
definitions[objectName] = definitions[objectName] || {
properties: {},
required: [],
}
if (nestedName.propertyName) {
let prop = {
type: (parameter.type || "").toLowerCase(),
description: removeTags(parameter.description),
}
if (parameter.type === "Object") {
prop.$ref = "#/definitions/" + parameter.field
}
let typeIndex = type.indexOf("[]")
if (typeIndex !== -1 && typeIndex === type.length - 2) {
prop.type = "array"
prop.items = {
type: type.slice(0, type.length - 2),
}
}
definitions[objectName]["properties"][nestedName.propertyName] = prop
if (!parameter.optional) {
let arr = definitions[objectName]["required"]
if (arr.indexOf(nestedName.propertyName) === -1) {
arr.push(nestedName.propertyName)
}
}
}
}
return result
}
function createNestedName(field) {
let propertyName = field
let objectName
let propertyNames = field.split(".")
if (propertyNames && propertyNames.length > 1) {
propertyName = propertyNames[propertyNames.length - 1]
propertyNames.pop()
objectName = propertyNames.join(".")
}
return {
propertyName: propertyName,
objectName: objectName,
}
}
/**
* Generate get, delete method output
* @param verbs
* @param definitions
* @returns {{}}
*/
function createGetDeleteOutput(verbs, definitions) {
let pathItemObject = {}
verbs.type = verbs.type === "del" ? "delete" : verbs.type
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
pathItemObject[verbs.type] = {
tags: [verbs.group],
summary: removeTags(verbs.description),
consumes: ["application/json"],
produces: ["application/json"],
parameters: createPathParameters(verbs),
}
if (verbDefinitionResult.topLevelSuccessRef) {
pathItemObject[verbs.type].responses = {
200: {
description: "successful operation",
schema: {
type: verbDefinitionResult.topLevelSuccessRefType,
items: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
},
},
},
}
}
return pathItemObject
}
/**
* Iterate through all method parameters and create array of parameter objects which are stored as path parameters
* @param verbs
* @returns {Array}
*/
function createPathParameters(verbs) {
let pathItemObject = []
if (verbs.parameter && verbs.parameter.fields.Parameter) {
for (let i = 0; i < verbs.parameter.fields.Parameter.length; i++) {
let param = verbs.parameter.fields.Parameter[i]
let field = param.field
let type = param.type
pathItemObject.push({
name: field,
in: type === "file" ? "formData" : "path",
required: !param.optional,
type: param.type.toLowerCase(),
description: removeTags(param.description),
})
}
}
return pathItemObject
}
function groupByUrl(apidocJson) {
return _.chain(apidocJson)
.groupBy("url")
.toPairs()
.map(function (element) {
return _.zipObject(["url", "verbs"], element)
})
.value()
}
module.exports = toSwagger

View file

@ -2,7 +2,7 @@ version: "3.8"
services:
db:
container_name: postgres
image: postgres
image: postgres:15
restart: unless-stopped
environment:
POSTGRES_USER: root
@ -25,4 +25,4 @@ services:
- "5050:80"
volumes:
pg_data:
pg_data:

View file

@ -859,7 +859,8 @@
"json",
"internal",
"barcodeqr",
"bigint"
"bigint",
"bb_reference"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},
@ -1064,7 +1065,8 @@
"json",
"internal",
"barcodeqr",
"bigint"
"bigint",
"bb_reference"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},
@ -1280,7 +1282,8 @@
"json",
"internal",
"barcodeqr",
"bigint"
"bigint",
"bb_reference"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},

View file

@ -782,6 +782,7 @@ components:
- internal
- barcodeqr
- bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:
@ -946,6 +947,7 @@ components:
- internal
- barcodeqr
- bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:
@ -1117,6 +1119,7 @@ components:
- internal
- barcodeqr
- bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:

View file

@ -289,6 +289,7 @@ async function performAppCreate(ctx: UserCtx) {
},
features: {
componentValidation: true,
disableUserMetadata: true,
},
}
@ -310,10 +311,13 @@ async function performAppCreate(ctx: UserCtx) {
}
})
// Keep existing validation setting
// Keep existing feature flags
if (!existing.features?.componentValidation) {
newApplication.features!.componentValidation = false
}
if (!existing.features?.disableUserMetadata) {
newApplication.features!.disableUserMetadata = false
}
// Migrate navigation settings and screens if required
if (existing) {

View file

@ -5,7 +5,6 @@ import {
getTableParams,
} from "../../db/utils"
import { destroy as tableDestroy } from "./table/internal"
import { BuildSchemaErrors, InvalidColumns } from "../../constants"
import { getIntegration } from "../../integrations"
import { invalidateDynamicVariables } from "../../threads/utils"
import { context, db as dbCore, events } from "@budibase/backend-core"
@ -14,10 +13,13 @@ import {
CreateDatasourceResponse,
Datasource,
DatasourcePlus,
ExternalTable,
FetchDatasourceInfoRequest,
FetchDatasourceInfoResponse,
IntegrationBase,
Schema,
SourceName,
Table,
UpdateDatasourceResponse,
UserCtx,
VerifyDatasourceRequest,
@ -27,23 +29,6 @@ import sdk from "../../sdk"
import { builderSocket } from "../../websockets"
import { setupCreationAuth as googleSetupCreationAuth } from "../../integrations/googlesheets"
function getErrorTables(errors: any, errorType: string) {
return Object.entries(errors)
.filter(entry => entry[1] === errorType)
.map(([name]) => name)
}
function updateError(error: any, newError: any, tables: string[]) {
if (!error) {
error = ""
}
if (error.length > 0) {
error += "\n"
}
error += `${newError} ${tables.join(", ")}`
return error
}
async function getConnector(
datasource: Datasource
): Promise<IntegrationBase | DatasourcePlus> {
@ -71,48 +56,36 @@ async function getAndMergeDatasource(datasource: Datasource) {
return await sdk.datasources.enrich(enrichedDatasource)
}
async function buildSchemaHelper(datasource: Datasource) {
async function buildSchemaHelper(datasource: Datasource): Promise<Schema> {
const connector = (await getConnector(datasource)) as DatasourcePlus
await connector.buildSchema(datasource._id!, datasource.entities!)
const errors = connector.schemaErrors
let error = null
if (errors && Object.keys(errors).length > 0) {
const noKey = getErrorTables(errors, BuildSchemaErrors.NO_KEY)
const invalidCol = getErrorTables(errors, BuildSchemaErrors.INVALID_COLUMN)
if (noKey.length) {
error = updateError(
error,
"No primary key constraint found for the following:",
noKey
)
}
if (invalidCol.length) {
const invalidCols = Object.values(InvalidColumns).join(", ")
error = updateError(
error,
`Cannot use columns ${invalidCols} found in following:`,
invalidCol
)
}
}
return { tables: connector.tables, error }
return await connector.buildSchema(
datasource._id!,
datasource.entities! as Record<string, ExternalTable>
)
}
async function buildFilteredSchema(datasource: Datasource, filter?: string[]) {
let { tables, error } = await buildSchemaHelper(datasource)
let finalTables = tables
if (filter) {
finalTables = {}
for (let key in tables) {
if (
filter.some((filter: any) => filter.toLowerCase() === key.toLowerCase())
) {
finalTables[key] = tables[key]
}
async function buildFilteredSchema(
datasource: Datasource,
filter?: string[]
): Promise<Schema> {
let schema = await buildSchemaHelper(datasource)
if (!filter) {
return schema
}
let filteredSchema: Schema = { tables: {}, errors: {} }
for (let key in schema.tables) {
if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
filteredSchema.tables[key] = schema.tables[key]
}
}
return { tables: finalTables, error }
for (let key in schema.errors) {
if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
filteredSchema.errors[key] = schema.errors[key]
}
}
return filteredSchema
}
export async function fetch(ctx: UserCtx) {
@ -156,7 +129,7 @@ export async function buildSchemaFromDb(ctx: UserCtx) {
const tablesFilter = ctx.request.body.tablesFilter
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
const { tables, error } = await buildFilteredSchema(datasource, tablesFilter)
const { tables, errors } = await buildFilteredSchema(datasource, tablesFilter)
datasource.entities = tables
setDefaultDisplayColumns(datasource)
@ -164,13 +137,11 @@ export async function buildSchemaFromDb(ctx: UserCtx) {
sdk.tables.populateExternalTableSchemas(datasource)
)
datasource._rev = dbResp.rev
const cleanedDatasource = await sdk.datasources.removeSecretSingle(datasource)
const res: any = { datasource: cleanedDatasource }
if (error) {
res.error = error
ctx.body = {
datasource: await sdk.datasources.removeSecretSingle(datasource),
errors,
}
ctx.body = res
}
/**
@ -298,15 +269,12 @@ export async function save(
type: plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE,
}
let schemaError = null
let errors: Record<string, string> = {}
if (fetchSchema) {
const { tables, error } = await buildFilteredSchema(
datasource,
tablesFilter
)
schemaError = error
datasource.entities = tables
const schema = await buildFilteredSchema(datasource, tablesFilter)
datasource.entities = schema.tables
setDefaultDisplayColumns(datasource)
errors = schema.errors
}
if (preSaveAction[datasource.source]) {
@ -327,13 +295,10 @@ export async function save(
}
}
const response: CreateDatasourceResponse = {
ctx.body = {
datasource: await sdk.datasources.removeSecretSingle(datasource),
errors,
}
if (schemaError) {
response.error = schemaError
}
ctx.body = response
builderSocket?.emitDatasourceUpdate(ctx, datasource)
}

View file

@ -5,8 +5,11 @@ import {
FieldType,
FilterType,
IncludeRelationship,
OneToManyRelationshipFieldMetadata,
Operation,
PaginationJson,
RelationshipFieldMetadata,
RelationshipsJson,
RelationshipType,
Row,
SearchFilters,
@ -22,16 +25,21 @@ import {
isSQL,
} from "../../../integrations/utils"
import {
generateIdForRow,
buildExternalRelationships,
buildSqlFieldList,
generateIdForRow,
sqlOutputProcessing,
squashRelationshipColumns,
updateRelationshipColumns,
fixArrayTypes,
isManyToMany,
} from "./utils"
import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils"
import { AutoFieldSubTypes, FieldTypes } from "../../../constants"
import { processObjectSync } from "@budibase/string-templates"
import { cloneDeep } from "lodash/fp"
import { db as dbCore } from "@budibase/backend-core"
import { processDates, processFormulas } from "../../../utilities/rowProcessor"
import sdk from "../../../sdk"
export interface ManyRelationship {
@ -161,7 +169,60 @@ function getEndpoint(tableId: string | undefined, operation: string) {
}
}
function isOneSide(field: FieldSchema) {
// need to handle table name + field or just field, depending on if relationships used
function extractFieldValue({
row,
tableName,
fieldName,
isLinked,
}: {
row: Row
tableName: string
fieldName: string
isLinked: boolean
}) {
let value = row[`${tableName}.${fieldName}`]
if (value == null && !isLinked) {
value = row[fieldName]
}
return value
}
function basicProcessing({
row,
table,
isLinked,
}: {
row: Row
table: Table
isLinked: boolean
}): Row {
const thisRow: Row = {}
// filter the row down to what is actually the row (not joined)
for (let field of Object.values(table.schema)) {
const fieldName = field.name
const value = extractFieldValue({
row,
tableName: table.name,
fieldName,
isLinked,
})
// all responses include "select col as table.col" so that overlaps are handled
if (value != null) {
thisRow[fieldName] = value
}
}
thisRow._id = generateIdForRow(row, table, isLinked)
thisRow.tableId = table._id
thisRow._rev = "rev"
return processFormulas(table, thisRow)
}
function isOneSide(
field: RelationshipFieldMetadata
): field is OneToManyRelationshipFieldMetadata {
return (
field.relationshipType && field.relationshipType.split("-")[0] === "one"
)
@ -259,11 +320,11 @@ export class ExternalRequest<T extends Operation> {
}
}
// many to many
else if (field.through) {
else if (isManyToMany(field)) {
// we're not inserting a doc, will be a bunch of update calls
const otherKey: string = field.throughFrom || linkTablePrimary
const thisKey: string = field.throughTo || tablePrimary
row[key].forEach((relationship: any) => {
for (const relationship of row[key]) {
manyRelationships.push({
tableId: field.through || field.tableId,
isUpdate: false,
@ -272,14 +333,14 @@ export class ExternalRequest<T extends Operation> {
// leave the ID for enrichment later
[thisKey]: `{{ literal ${tablePrimary} }}`,
})
})
}
}
// many to one
else {
const thisKey: string = "id"
// @ts-ignore
const otherKey: string = field.fieldName
row[key].forEach((relationship: any) => {
for (const relationship of row[key]) {
manyRelationships.push({
tableId: field.tableId,
isUpdate: true,
@ -288,7 +349,7 @@ export class ExternalRequest<T extends Operation> {
// leave the ID for enrichment later
[otherKey]: `{{ literal ${tablePrimary} }}`,
})
})
}
}
}
// we return the relationships that may need to be created in the through table
@ -297,6 +358,57 @@ export class ExternalRequest<T extends Operation> {
return { row: newRow, manyRelationships }
}
outputProcessing(
rows: Row[] = [],
table: Table,
relationships: RelationshipsJson[]
) {
if (!rows || rows.length === 0 || rows[0].read === true) {
return []
}
const tableMap = this.tables
let finalRows: { [key: string]: Row } = {}
for (let row of rows) {
const rowId = generateIdForRow(row, table)
row._id = rowId
// this is a relationship of some sort
if (finalRows[rowId]) {
finalRows = updateRelationshipColumns(
table,
tableMap,
row,
finalRows,
relationships
)
continue
}
const thisRow = fixArrayTypes(
basicProcessing({ row, table, isLinked: false }),
table
)
if (thisRow._id == null) {
throw "Unable to generate row ID for SQL rows"
}
finalRows[thisRow._id] = thisRow
// do this at end once its been added to the final rows
finalRows = updateRelationshipColumns(
table,
tableMap,
row,
finalRows,
relationships
)
}
// Process some additional data types
let finalRowArray = Object.values(finalRows)
finalRowArray = processDates(table, finalRowArray)
finalRowArray = processFormulas(table, finalRowArray) as Row[]
return finalRowArray.map((row: Row) =>
squashRelationshipColumns(table, tableMap, row, relationships)
)
}
/**
* This is a cached lookup, of relationship records, this is mainly for creating/deleting junction
* information.
@ -312,7 +424,7 @@ export class ExternalRequest<T extends Operation> {
const primaryKey = table.primary[0]
// make a new request to get the row with all its relationships
// we need this to work out if any relationships need removed
for (let field of Object.values(table.schema) as FieldSchema[]) {
for (const field of Object.values(table.schema)) {
if (
field.type !== FieldTypes.LINK ||
!field.fieldName ||
@ -325,9 +437,9 @@ export class ExternalRequest<T extends Operation> {
const { tableName: relatedTableName } = breakExternalTableId(tableId)
// @ts-ignore
const linkPrimaryKey = this.tables[relatedTableName].primary[0]
const manyKey = field.throughTo || primaryKey
const lookupField = isMany ? primaryKey : field.foreignKey
const fieldName = isMany ? manyKey : field.fieldName
const fieldName = isMany ? field.throughTo || primaryKey : field.fieldName
if (!lookupField || !row[lookupField]) {
continue
}
@ -384,7 +496,7 @@ export class ExternalRequest<T extends Operation> {
linkPrimary,
linkSecondary,
}: {
row: { [key: string]: any }
row: Record<string, any>
linkPrimary: string
linkSecondary?: string
}) {
@ -446,41 +558,6 @@ export class ExternalRequest<T extends Operation> {
await Promise.all(promises)
}
/**
* This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which
* you have column overlap in relationships, e.g. we join a few different tables and they all have the
* concept of an ID, but for some of them it will be null (if they say don't have a relationship).
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us
* is more performant and has the added benefit of protecting against this scenario.
*/
buildFields(table: Table, includeRelations: boolean) {
function extractRealFields(table: Table, existing: string[] = []) {
return Object.entries(table.schema)
.filter(
column =>
column[1].type !== FieldTypes.LINK &&
column[1].type !== FieldTypes.FORMULA &&
!existing.find((field: string) => field === column[0])
)
.map(column => `${table.name}.${column[0]}`)
}
let fields = extractRealFields(table)
for (let field of Object.values(table.schema)) {
if (field.type !== FieldTypes.LINK || !includeRelations) {
continue
}
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
if (linkTableName) {
const linkTable = this.tables[linkTableName]
if (linkTable) {
const linkedFields = extractRealFields(linkTable, fields)
fields = fields.concat(linkedFields)
}
}
}
return fields
}
async run(config: RunConfig): Promise<ExternalRequestReturnType<T>> {
const { operation, tableId } = this
let { datasourceId, tableName } = breakExternalTableId(tableId)
@ -580,11 +657,10 @@ export class ExternalRequest<T extends Operation> {
relationships
)
// if reading it'll just be an array of rows, return whole thing
const result = (
return (
operation === Operation.READ && Array.isArray(response)
? output
: { row: output[0], table }
) as ExternalRequestReturnType<T>
return result
}
}

View file

@ -167,7 +167,10 @@ export async function destroy(ctx: UserCtx) {
}
const table = await sdk.tables.getTable(row.tableId)
// update the row to include full relationships before deleting them
row = await outputProcessing(table, row, { squash: false })
row = await outputProcessing(table, row, {
squash: false,
skipBBReferences: true,
})
// now remove the relationships
await linkRows.updateLinks({
eventType: linkRows.EventType.ROW_DELETE,
@ -201,6 +204,7 @@ export async function bulkDestroy(ctx: UserCtx) {
// they need to be the full rows (including previous relationships) for automations
const processedRows = (await outputProcessing(table, rows, {
squash: false,
skipBBReferences: true,
})) as Row[]
// remove the relationships first

View file

@ -1,4 +1,12 @@
import { FieldType, RelationshipsJson, Row, Table } from "@budibase/types"
import {
FieldType,
ManyToManyRelationshipFieldMetadata,
OneToManyRelationshipFieldMetadata,
RelationshipFieldMetadata,
RelationshipsJson,
Row,
Table,
} from "@budibase/types"
import { processFormulas } from "../../../../utilities/rowProcessor"
import {
breakExternalTableId,
@ -9,6 +17,12 @@ import { generateJunctionTableID } from "../../../../db/utils"
type TableMap = Record<string, Table>
export function isManyToMany(
field: RelationshipFieldMetadata
): field is ManyToManyRelationshipFieldMetadata {
return !!(field as ManyToManyRelationshipFieldMetadata).through
}
export function squashRelationshipColumns(
table: Table,
tables: TableMap,
@ -88,7 +102,7 @@ export function updateRelationshipColumns(
columns[relationship.column] = linked
}
for (let [column, related] of Object.entries(columns)) {
let rowId: string = row._id!
let rowId = row._id
if (opts?.internal) {
const { _id } = basicProcessing({
row,
@ -137,15 +151,16 @@ export function buildExternalRelationships(
if (!table.primary || !linkTable.primary) {
continue
}
const definition: any = {
const foreignKey = (field as OneToManyRelationshipFieldMetadata).foreignKey
const definition: RelationshipsJson = {
// if no foreign key specified then use the name of the field in other table
from: field.foreignKey || table.primary[0],
from: foreignKey || table.primary[0],
to: field.fieldName,
tableName: linkTableName,
// need to specify where to put this back into
column: fieldName,
}
if (field.through) {
if (isManyToMany(field) && field.through) {
const { tableName: throughTableName } = breakExternalTableId(
field.through
)
@ -168,6 +183,9 @@ export function buildInternalRelationships(table: Table): RelationshipsJson[] {
)
const tableId = table._id!
for (let link of links) {
if (link.type !== FieldType.LINK) {
continue
}
const linkTableId = link.tableId!
const junctionTableId = generateJunctionTableID(tableId, linkTableId)
const isFirstTable = tableId > linkTableId

View file

@ -1,4 +1,4 @@
import { FieldTypes, FormulaTypes } from "../../../constants"
import { FormulaTypes } from "../../../constants"
import { clearColumns } from "./utils"
import { doesContainStrings } from "@budibase/string-templates"
import { cloneDeep } from "lodash/fp"
@ -6,12 +6,20 @@ import isEqual from "lodash/isEqual"
import uniq from "lodash/uniq"
import { updateAllFormulasInTable } from "../row/staticFormula"
import { context } from "@budibase/backend-core"
import { FieldSchema, Table } from "@budibase/types"
import {
FieldSchema,
FieldType,
FormulaFieldMetadata,
Table,
} from "@budibase/types"
import sdk from "../../../sdk"
import { isRelationshipColumn } from "../../../db/utils"
function isStaticFormula(column: FieldSchema) {
function isStaticFormula(
column: FieldSchema
): column is FormulaFieldMetadata & { formulaType: FormulaTypes.STATIC } {
return (
column.type === FieldTypes.FORMULA &&
column.type === FieldType.FORMULA &&
column.formulaType === FormulaTypes.STATIC
)
}
@ -56,8 +64,9 @@ async function checkIfFormulaNeedsCleared(
for (let removed of removedColumns) {
let tableToUse: Table | undefined = table
// if relationship, get the related table
if (removed.type === FieldTypes.LINK) {
tableToUse = tables.find(table => table._id === removed.tableId)
if (removed.type === FieldType.LINK) {
const removedTableId = removed.tableId
tableToUse = tables.find(table => table._id === removedTableId)
}
if (!tableToUse) {
continue
@ -73,17 +82,18 @@ async function checkIfFormulaNeedsCleared(
}
for (let relatedTableId of table.relatedFormula) {
const relatedColumns = Object.values(table.schema).filter(
column => column.tableId === relatedTableId
column =>
column.type === FieldType.LINK && column.tableId === relatedTableId
)
const relatedTable = tables.find(table => table._id === relatedTableId)
// look to see if the column was used in a relationship formula,
// relationships won't be used for this
if (relatedTable && relatedColumns && removed.type !== FieldTypes.LINK) {
if (relatedTable && relatedColumns && removed.type !== FieldType.LINK) {
let relatedFormulaToRemove: string[] = []
for (let column of relatedColumns) {
relatedFormulaToRemove = relatedFormulaToRemove.concat(
getFormulaThatUseColumn(relatedTable, [
column.fieldName!,
(column as any).fieldName!,
removed.name,
])
)
@ -116,7 +126,7 @@ async function updateRelatedFormulaLinksOnTables(
const initialTables = cloneDeep(tables)
// first find the related column names
const relatedColumns = Object.values(table.schema).filter(
col => col.type === FieldTypes.LINK
isRelationshipColumn
)
// we start by removing the formula field from all tables
for (let otherTable of tables) {
@ -135,6 +145,7 @@ async function updateRelatedFormulaLinksOnTables(
if (!columns || columns.length === 0) {
continue
}
const relatedTable = tables.find(
related => related._id === relatedCol.tableId
)

View file

@ -15,11 +15,16 @@ import { handleRequest } from "../row/external"
import { context, events } from "@budibase/backend-core"
import { isRows, isSchema, parse } from "../../../utilities/schema"
import {
AutoReason,
BulkImportRequest,
BulkImportResponse,
Datasource,
FieldSchema,
ManyToManyRelationshipFieldMetadata,
ManyToOneRelationshipFieldMetadata,
OneToManyRelationshipFieldMetadata,
Operation,
QueryJson,
RelationshipFieldMetadata,
RelationshipType,
RenameColumn,
SaveTableRequest,
@ -74,10 +79,13 @@ function cleanupRelationships(
schema.type === FieldTypes.LINK &&
(!oldTable || table.schema[key] == null)
) {
const schemaTableId = schema.tableId
const relatedTable = Object.values(tables).find(
table => table._id === schema.tableId
table => table._id === schemaTableId
)
const foreignKey = schema.foreignKey
const foreignKey =
schema.relationshipType !== RelationshipType.MANY_TO_MANY &&
schema.foreignKey
if (!relatedTable || !foreignKey) {
continue
}
@ -116,7 +124,7 @@ function otherRelationshipType(type?: string) {
function generateManyLinkSchema(
datasource: Datasource,
column: FieldSchema,
column: ManyToManyRelationshipFieldMetadata,
table: Table,
relatedTable: Table
): Table {
@ -151,10 +159,12 @@ function generateManyLinkSchema(
}
function generateLinkSchema(
column: FieldSchema,
column:
| OneToManyRelationshipFieldMetadata
| ManyToOneRelationshipFieldMetadata,
table: Table,
relatedTable: Table,
type: RelationshipType
type: RelationshipType.ONE_TO_MANY | RelationshipType.MANY_TO_ONE
) {
if (!table.primary || !relatedTable.primary) {
throw new Error("Unable to generate link schema, no primary keys")
@ -170,20 +180,22 @@ function generateLinkSchema(
}
function generateRelatedSchema(
linkColumn: FieldSchema,
linkColumn: RelationshipFieldMetadata,
table: Table,
relatedTable: Table,
columnName: string
) {
// generate column for other table
const relatedSchema = cloneDeep(linkColumn)
const isMany2Many =
linkColumn.relationshipType === RelationshipType.MANY_TO_MANY
// swap them from the main link
if (linkColumn.foreignKey) {
if (!isMany2Many && linkColumn.foreignKey) {
relatedSchema.fieldName = linkColumn.foreignKey
relatedSchema.foreignKey = linkColumn.fieldName
}
// is many to many
else {
else if (isMany2Many) {
// don't need to copy through, already got it
relatedSchema.fieldName = linkColumn.throughTo
relatedSchema.throughTo = linkColumn.throughFrom
@ -197,8 +209,8 @@ function generateRelatedSchema(
table.schema[columnName] = relatedSchema
}
function isRelationshipSetup(column: FieldSchema) {
return column.foreignKey || column.through
function isRelationshipSetup(column: RelationshipFieldMetadata) {
return (column as any).foreignKey || (column as any).through
}
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
@ -257,14 +269,15 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
if (schema.type !== FieldTypes.LINK || isRelationshipSetup(schema)) {
continue
}
const schemaTableId = schema.tableId
const relatedTable = Object.values(tables).find(
table => table._id === schema.tableId
table => table._id === schemaTableId
)
if (!relatedTable) {
continue
}
const relatedColumnName = schema.fieldName!
const relationType = schema.relationshipType!
const relationType = schema.relationshipType
if (relationType === RelationshipType.MANY_TO_MANY) {
const junctionTable = generateManyLinkSchema(
datasource,
@ -374,10 +387,12 @@ export async function destroy(ctx: UserCtx) {
return tableToDelete
}
export async function bulkImport(ctx: UserCtx) {
export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) {
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows }: { rows: unknown } = ctx.request.body
const schema: unknown = table.schema
const { rows } = ctx.request.body
const schema = table.schema
if (!rows || !isRows(rows) || !isSchema(schema)) {
ctx.throw(400, "Provided data import information is invalid.")

View file

@ -8,6 +8,8 @@ import {
import { isExternalTable, isSQL } from "../../../integrations/utils"
import { events } from "@budibase/backend-core"
import {
BulkImportRequest,
BulkImportResponse,
FetchTablesResponse,
SaveTableRequest,
SaveTableResponse,
@ -18,7 +20,7 @@ import {
import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv"
import { builderSocket } from "../../../websockets"
import { cloneDeep } from "lodash"
import { cloneDeep, isEqual } from "lodash"
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
if (table && !tableId) {
@ -97,9 +99,17 @@ export async function destroy(ctx: UserCtx) {
builderSocket?.emitTableDeletion(ctx, deletedTable)
}
export async function bulkImport(ctx: UserCtx) {
export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) {
const tableId = ctx.params.tableId
await pickApi({ tableId }).bulkImport(ctx)
let tableBefore = await sdk.tables.getTable(tableId)
let tableAfter = await pickApi({ tableId }).bulkImport(ctx)
if (!isEqual(tableBefore, tableAfter)) {
await sdk.tables.saveTable(tableAfter)
}
// right now we don't trigger anything for bulk import because it
// can only be done in the builder, but in the future we may need to
// think about events for bulk items

View file

@ -10,6 +10,8 @@ import {
} from "../../../utilities/rowProcessor"
import { runStaticFormulaChecks } from "./bulkFormula"
import {
BulkImportRequest,
BulkImportResponse,
RenameColumn,
SaveTableRequest,
SaveTableResponse,
@ -78,10 +80,10 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
// make sure that types don't change of a column, have to remove
// the column if you want to change the type
if (oldTable && oldTable.schema) {
for (let propKey of Object.keys(tableToSave.schema)) {
for (const propKey of Object.keys(tableToSave.schema)) {
let oldColumn = oldTable.schema[propKey]
if (oldColumn && oldColumn.type === FieldTypes.INTERNAL) {
oldColumn.type = FieldTypes.AUTO
oldTable.schema[propKey].type = FieldTypes.AUTO
}
}
}
@ -206,7 +208,9 @@ export async function destroy(ctx: any) {
return tableToDelete
}
export async function bulkImport(ctx: any) {
export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) {
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows, identifierFields } = ctx.request.body
await handleDataImport(ctx.user, table, rows, identifierFields)

View file

@ -20,8 +20,14 @@ import viewTemplate from "../view/viewBuilder"
import { cloneDeep } from "lodash/fp"
import { quotas } from "@budibase/pro"
import { events, context } from "@budibase/backend-core"
import { ContextUser, Datasource, SourceName, Table } from "@budibase/types"
import { addTableToSqlite } from "./sqlite"
import {
ContextUser,
Datasource,
Row,
SourceName,
Table,
} from "@budibase/types"
export async function clearColumns(table: any, columnNames: any) {
const db = context.getAppDB()
@ -145,12 +151,12 @@ export async function importToRows(
}
export async function handleDataImport(
user: any,
table: any,
rows: any,
user: ContextUser,
table: Table,
rows: Row[],
identifierFields: Array<string> = []
) {
const schema: unknown = table.schema
const schema = table.schema
if (!rows || !isRows(rows) || !isSchema(schema)) {
return table

View file

@ -43,3 +43,7 @@ export enum Format {
export function isFormat(format: any): format is Format {
return Object.values(Format).includes(format as Format)
}
export function parseCsvExport<T>(value: string) {
return JSON.parse(value?.replace(/'/g, '"')) as T
}

View file

@ -23,7 +23,10 @@ describe("/applications/:appId/import", () => {
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.message).toBe("app updated")
const appPackage = await config.api.application.get(appId!)
expect(appPackage.navigation?.links?.length).toBe(2)
expect(expect(appPackage.navigation?.links?.[0].url).toBe("/blank"))
expect(expect(appPackage.navigation?.links?.[1].url).toBe("/derp"))
const screens = await config.api.screen.list()
expect(screens.length).toBe(2)
expect(screens[0].routing.route).toBe("/derp")

View file

@ -37,7 +37,7 @@ describe("/datasources", () => {
.expect(200)
expect(res.body.datasource.name).toEqual("Test")
expect(res.body.errors).toBeUndefined()
expect(res.body.errors).toEqual({})
expect(events.datasource.created).toBeCalledTimes(1)
})
})

Some files were not shown because too many files have changed in this diff Show more