1
0
Fork 0
mirror of synced 2024-08-04 21:02:01 +12:00

Merge branch 'master' of github.com:Budibase/budibase into cheeks-fixes

This commit is contained in:
Andrew Kingston 2024-06-10 07:52:09 +01:00
commit e1bc9d54f1
88 changed files with 2160 additions and 1051 deletions

View file

@ -9,7 +9,7 @@ on:
jobs: jobs:
ensure-is-master-tag: ensure-is-master-tag:
name: Ensure is a master tag name: Ensure is a master tag
runs-on: qa-arc-runner-set runs-on: ubuntu-latest
steps: steps:
- name: Checkout monorepo - name: Checkout monorepo
uses: actions/checkout@v4 uses: actions/checkout@v4

38
.github/workflows/pr-labeler.yml vendored Normal file
View file

@ -0,0 +1,38 @@
name: PR labeler
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
on:
pull_request:
types: [opened, synchronize]
jobs:
size-labeler:
runs-on: ubuntu-latest
steps:
- uses: codelytv/pr-size-labeler@v1
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
xs_max_size: "10"
s_max_size: "100"
m_max_size: "500"
l_max_size: "1000"
fail_if_xl: "false"
files_to_ignore: "yarn.lock"
team-labeler:
runs-on: ubuntu-latest
if: ${{ github.event.action == 'opened' }}
steps:
- uses: rodrigoarias/auto-label-per-user@v1.0.0
with:
git-token: ${{ secrets.GITHUB_TOKEN }}
user-team-map: |
{
"adrinr": "firestorm",
"samwho": "firestorm",
"PClmnt": "firestorm",
"mike12345567": "firestorm"
}

View file

@ -1,6 +1,6 @@
dependencies: dependencies:
- name: couchdb - name: couchdb
repository: https://apache.github.io/couchdb-helm repository: https://apache.github.io/couchdb-helm
version: 4.3.0 version: 4.5.6
digest: sha256:94449a7f195b186f5af33ec5aa66d58b36bede240fae710f021ca87837b30606 digest: sha256:405f098633e632d6f4e140175f156ed4f02918b0d89193f1b66c9cbea211d6c9
generated: "2023-11-20T17:43:02.777596Z" generated: "2024-06-05T14:41:05.979052+01:00"

View file

@ -17,6 +17,6 @@ version: 0.0.0
appVersion: 0.0.0 appVersion: 0.0.0
dependencies: dependencies:
- name: couchdb - name: couchdb
version: 4.3.0 version: 4.5.6
repository: https://apache.github.io/couchdb-helm repository: https://apache.github.io/couchdb-helm
condition: services.couchdb.enabled condition: services.couchdb.enabled

View file

@ -112,7 +112,9 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| awsAlbIngress.enabled | bool | `false` | Whether to create an ALB Ingress resource pointing to the Budibase proxy. Requires the AWS ALB Ingress Controller. | | awsAlbIngress.enabled | bool | `false` | Whether to create an ALB Ingress resource pointing to the Budibase proxy. Requires the AWS ALB Ingress Controller. |
| couchdb.clusterSize | int | `1` | The number of replicas to run in the CouchDB cluster. We set this to 1 by default to make things simpler, but you can set it to 3 if you need a high-availability CouchDB cluster. | | couchdb.clusterSize | int | `1` | The number of replicas to run in the CouchDB cluster. We set this to 1 by default to make things simpler, but you can set it to 3 if you need a high-availability CouchDB cluster. |
| couchdb.couchdbConfig.couchdb.uuid | string | `"budibase-couchdb"` | Unique identifier for this CouchDB server instance. You shouldn't need to change this. | | couchdb.couchdbConfig.couchdb.uuid | string | `"budibase-couchdb"` | Unique identifier for this CouchDB server instance. You shouldn't need to change this. |
| couchdb.extraPorts[0] | object | `{"containerPort":4984,"name":"sqs"}` | Extra ports to expose on the CouchDB service. We expose the SQS port by default, but you can add more ports here if you need to. |
| couchdb.image | object | `{}` | We use a custom CouchDB image for running Budibase and we don't support using any other CouchDB image. You shouldn't change this, and if you do we can't guarantee that Budibase will work. | | couchdb.image | object | `{}` | We use a custom CouchDB image for running Budibase and we don't support using any other CouchDB image. You shouldn't change this, and if you do we can't guarantee that Budibase will work. |
| couchdb.service.extraPorts[0] | object | `{"name":"sqs","port":4984,"protocol":"TCP","targetPort":4984}` | Extra ports to expose on the CouchDB service. We expose the SQS port by default, but you can add more ports here if you need to. |
| globals.apiEncryptionKey | string | `""` | Used for encrypting API keys and environment variables when stored in the database. You don't need to set this if `createSecrets` is true. | | globals.apiEncryptionKey | string | `""` | Used for encrypting API keys and environment variables when stored in the database. You don't need to set this if `createSecrets` is true. |
| globals.appVersion | string | `""` | The version of Budibase to deploy. Defaults to what's specified by {{ .Chart.AppVersion }}. Ends up being used as the image version tag for the apps, proxy, and worker images. | | globals.appVersion | string | `""` | The version of Budibase to deploy. Defaults to what's specified by {{ .Chart.AppVersion }}. Ends up being used as the image version tag for the apps, proxy, and worker images. |
| globals.automationMaxIterations | string | `"200"` | The maximum number of iterations allows for an automation loop step. You can read more about looping here: <https://docs.budibase.com/docs/looping>. | | globals.automationMaxIterations | string | `"200"` | The maximum number of iterations allows for an automation loop step. You can read more about looping here: <https://docs.budibase.com/docs/looping>. |
@ -135,6 +137,8 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| globals.smtp.password | string | `""` | The password to use when authenticating with your SMTP server. | | globals.smtp.password | string | `""` | The password to use when authenticating with your SMTP server. |
| globals.smtp.port | string | `"587"` | The port of your SMTP server. | | globals.smtp.port | string | `"587"` | The port of your SMTP server. |
| globals.smtp.user | string | `""` | The username to use when authenticating with your SMTP server. | | globals.smtp.user | string | `""` | The username to use when authenticating with your SMTP server. |
| globals.sqs.enabled | bool | `false` | Whether to use the CouchDB "structured query service" or not. This is disabled by default for now, but will become the default in a future release. |
| globals.tempBucketName | string | `""` | |
| globals.tenantFeatureFlags | string | `"*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR"` | Sets what feature flags are enabled and for which tenants. Should not ordinarily need to be changed. | | globals.tenantFeatureFlags | string | `"*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR"` | Sets what feature flags are enabled and for which tenants. Should not ordinarily need to be changed. |
| imagePullSecrets | list | `[]` | Passed to all pods created by this chart. Should not ordinarily need to be changed. | | imagePullSecrets | list | `[]` | Passed to all pods created by this chart. Should not ordinarily need to be changed. |
| ingress.className | string | `""` | What ingress class to use. | | ingress.className | string | `""` | What ingress class to use. |
@ -152,6 +156,7 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.apps.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the apps service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the apps pods. | | services.apps.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the apps service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the apps pods. |
| services.apps.extraContainers | list | `[]` | Additional containers to be added to the apps pod. | | services.apps.extraContainers | list | `[]` | Additional containers to be added to the apps pod. |
| services.apps.extraEnv | list | `[]` | Extra environment variables to set for apps pods. Takes a list of name=value pairs. | | services.apps.extraEnv | list | `[]` | Extra environment variables to set for apps pods. Takes a list of name=value pairs. |
| services.apps.extraEnvFromSecret | list | `[]` | Name of the K8s Secret in the same namespace which contains the extra environment variables. This can be used to avoid storing sensitive information in the values.yaml file. |
| services.apps.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main apps container. | | services.apps.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main apps container. |
| services.apps.extraVolumes | list | `[]` | Additional volumes to the apps pod. | | services.apps.extraVolumes | list | `[]` | Additional volumes to the apps pod. |
| services.apps.httpLogging | int | `1` | Whether or not to log HTTP requests to the apps service. | | services.apps.httpLogging | int | `1` | Whether or not to log HTTP requests to the apps service. |
@ -168,6 +173,7 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.automationWorkers.enabled | bool | `true` | Whether or not to enable the automation worker service. If you disable this, automations will be processed by the apps service. | | services.automationWorkers.enabled | bool | `true` | Whether or not to enable the automation worker service. If you disable this, automations will be processed by the apps service. |
| services.automationWorkers.extraContainers | list | `[]` | Additional containers to be added to the automationWorkers pod. | | services.automationWorkers.extraContainers | list | `[]` | Additional containers to be added to the automationWorkers pod. |
| services.automationWorkers.extraEnv | list | `[]` | Extra environment variables to set for automation worker pods. Takes a list of name=value pairs. | | services.automationWorkers.extraEnv | list | `[]` | Extra environment variables to set for automation worker pods. Takes a list of name=value pairs. |
| services.automationWorkers.extraEnvFromSecret | list | `[]` | Name of the K8s Secret in the same namespace which contains the extra environment variables. This can be used to avoid storing sensitive information in the values.yaml file. |
| services.automationWorkers.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main automationWorkers container. | | services.automationWorkers.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main automationWorkers container. |
| services.automationWorkers.extraVolumes | list | `[]` | Additional volumes to the automationWorkers pod. | | services.automationWorkers.extraVolumes | list | `[]` | Additional volumes to the automationWorkers pod. |
| services.automationWorkers.livenessProbe | object | HTTP health checks. | Liveness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> | | services.automationWorkers.livenessProbe | object | HTTP health checks. | Liveness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
@ -195,7 +201,7 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.objectStore.region | string | `""` | AWS_REGION if using S3 | | services.objectStore.region | string | `""` | AWS_REGION if using S3 |
| services.objectStore.resources | object | `{}` | The resources to use for Minio pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. | | services.objectStore.resources | object | `{}` | The resources to use for Minio pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. |
| services.objectStore.secretKey | string | `""` | AWS_SECRET_ACCESS_KEY if using S3 | | services.objectStore.secretKey | string | `""` | AWS_SECRET_ACCESS_KEY if using S3 |
| services.objectStore.storage | string | `"100Mi"` | How much storage to give Minio in its PersistentVolumeClaim. | | services.objectStore.storage | string | `"2Gi"` | How much storage to give Minio in its PersistentVolumeClaim. |
| services.objectStore.storageClass | string | `""` | If defined, storageClassName: <storageClass> If set to "-", storageClassName: "", which disables dynamic provisioning If undefined (the default) or set to null, no storageClassName spec is set, choosing the default provisioner. | | services.objectStore.storageClass | string | `""` | If defined, storageClassName: <storageClass> If set to "-", storageClassName: "", which disables dynamic provisioning If undefined (the default) or set to null, no storageClassName spec is set, choosing the default provisioner. |
| services.objectStore.url | string | `"http://minio-service:9000"` | URL to use for object storage. Only change this if you're using an external object store, such as S3. Remember to set `minio: false` if you do this. | | services.objectStore.url | string | `"http://minio-service:9000"` | URL to use for object storage. Only change this if you're using an external object store, such as S3. Remember to set `minio: false` if you do this. |
| services.proxy.autoscaling.enabled | bool | `false` | Whether to enable horizontal pod autoscaling for the proxy service. | | services.proxy.autoscaling.enabled | bool | `false` | Whether to enable horizontal pod autoscaling for the proxy service. |
@ -227,6 +233,7 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.worker.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the worker service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the worker pods. | | services.worker.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the worker service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the worker pods. |
| services.worker.extraContainers | list | `[]` | Additional containers to be added to the worker pod. | | services.worker.extraContainers | list | `[]` | Additional containers to be added to the worker pod. |
| services.worker.extraEnv | list | `[]` | Extra environment variables to set for worker pods. Takes a list of name=value pairs. | | services.worker.extraEnv | list | `[]` | Extra environment variables to set for worker pods. Takes a list of name=value pairs. |
| services.worker.extraEnvFromSecret | list | `[]` | Name of the K8s Secret in the same namespace which contains the extra environment variables. This can be used to avoid storing sensitive information in the values.yaml file. |
| services.worker.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main worker container. | | services.worker.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main worker container. |
| services.worker.extraVolumes | list | `[]` | Additional volumes to the worker pod. | | services.worker.extraVolumes | list | `[]` | Additional volumes to the worker pod. |
| services.worker.httpLogging | int | `1` | Whether or not to log HTTP requests to the worker service. | | services.worker.httpLogging | int | `1` | Whether or not to log HTTP requests to the worker service. |

Binary file not shown.

View file

@ -42,6 +42,14 @@ spec:
{{ else }} {{ else }}
value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }} value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }}
{{ end }} {{ end }}
{{ if .Values.globals.sqs.enabled }}
- name: COUCH_DB_SQL_URL
{{ if .Values.globals.sqs.url }}
value: {{ .Values.globals.sqs.url }}
{{ else }}
value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.globals.sqs.port }}
{{ end }}
{{ end }}
{{ if .Values.services.couchdb.enabled }} {{ if .Values.services.couchdb.enabled }}
- name: COUCH_DB_USER - name: COUCH_DB_USER
valueFrom: valueFrom:
@ -198,10 +206,21 @@ spec:
- name: APP_FEATURES - name: APP_FEATURES
value: "api" value: "api"
{{- end }} {{- end }}
{{- if .Values.globals.sqs.enabled }}
- name: SQS_SEARCH_ENABLE
value: "true"
{{- end }}
{{- range .Values.services.apps.extraEnv }} {{- range .Values.services.apps.extraEnv }}
- name: {{ .name }} - name: {{ .name }}
value: {{ .value | quote }} value: {{ .value | quote }}
{{- end }} {{- end }}
{{- range .Values.services.apps.extraEnvFromSecret}}
- name: {{ .name }}
valueFrom:
secretKeyRef:
name: {{ .secretName }}
key: {{ .secretKey | quote }}
{{- end}}
image: budibase/apps:{{ .Values.globals.appVersion | default .Chart.AppVersion }} image: budibase/apps:{{ .Values.globals.appVersion | default .Chart.AppVersion }}
imagePullPolicy: Always imagePullPolicy: Always
{{- if .Values.services.apps.startupProbe }} {{- if .Values.services.apps.startupProbe }}

View file

@ -201,6 +201,13 @@ spec:
- name: {{ .name }} - name: {{ .name }}
value: {{ .value | quote }} value: {{ .value | quote }}
{{- end }} {{- end }}
{{- range .Values.services.automationWorkers.extraEnvFromSecret}}
- name: {{ .name }}
valueFrom:
secretKeyRef:
name: {{ .secretName }}
key: {{ .secretKey | quote }}
{{- end}}
image: budibase/apps:{{ .Values.globals.appVersion | default .Chart.AppVersion }} image: budibase/apps:{{ .Values.globals.appVersion | default .Chart.AppVersion }}
imagePullPolicy: Always imagePullPolicy: Always
@ -272,4 +279,4 @@ spec:
{{- toYaml .Values.services.automationWorkers.extraVolumes | nindent 8 }} {{- toYaml .Values.services.automationWorkers.extraVolumes | nindent 8 }}
{{ end }} {{ end }}
status: {} status: {}
{{- end }} {{- end }}

View file

@ -56,6 +56,14 @@ spec:
{{ else }} {{ else }}
value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }} value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }}
{{ end }} {{ end }}
{{ if .Values.globals.sqs.enabled }}
- name: COUCH_DB_SQL_URL
{{ if .Values.globals.sqs.url }}
value: {{ .Values.globals.sqs.url }}
{{ else }}
value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.globals.sqs.port }}
{{ end }}
{{ end }}
- name: API_ENCRYPTION_KEY - name: API_ENCRYPTION_KEY
value: {{ .Values.globals.apiEncryptionKey | quote }} value: {{ .Values.globals.apiEncryptionKey | quote }}
- name: HTTP_LOGGING - name: HTTP_LOGGING
@ -184,10 +192,21 @@ spec:
- name: NODE_TLS_REJECT_UNAUTHORIZED - name: NODE_TLS_REJECT_UNAUTHORIZED
value: {{ .Values.services.tlsRejectUnauthorized }} value: {{ .Values.services.tlsRejectUnauthorized }}
{{ end }} {{ end }}
{{- if .Values.globals.sqs.enabled }}
- name: SQS_SEARCH_ENABLE
value: "true"
{{- end }}
{{- range .Values.services.worker.extraEnv }} {{- range .Values.services.worker.extraEnv }}
- name: {{ .name }} - name: {{ .name }}
value: {{ .value | quote }} value: {{ .value | quote }}
{{- end }} {{- end }}
{{- range .Values.services.worker.extraEnvFromSecret}}
- name: {{ .name }}
valueFrom:
secretKeyRef:
name: {{ .secretName }}
key: {{ .secretKey | quote }}
{{- end}}
image: budibase/worker:{{ .Values.globals.appVersion | default .Chart.AppVersion }} image: budibase/worker:{{ .Values.globals.appVersion | default .Chart.AppVersion }}
imagePullPolicy: Always imagePullPolicy: Always
{{- if .Values.services.worker.startupProbe }} {{- if .Values.services.worker.startupProbe }}

View file

@ -138,6 +138,15 @@ globals:
# -- The password to use when authenticating with your SMTP server. # -- The password to use when authenticating with your SMTP server.
password: "" password: ""
sqs:
# -- Whether to use the CouchDB "structured query service" or not. This is disabled by
# default for now, but will become the default in a future release.
enabled: false
# @ignore
url: ""
# @ignore
port: "4984"
services: services:
# -- The DNS suffix to use for service discovery. You only need to change this # -- The DNS suffix to use for service discovery. You only need to change this
# if you've configured your cluster to use a different DNS suffix. # if you've configured your cluster to use a different DNS suffix.
@ -240,6 +249,13 @@ services:
# -- Extra environment variables to set for apps pods. Takes a list of # -- Extra environment variables to set for apps pods. Takes a list of
# name=value pairs. # name=value pairs.
extraEnv: [] extraEnv: []
# -- Name of the K8s Secret in the same namespace which contains the extra environment variables.
# This can be used to avoid storing sensitive information in the values.yaml file.
extraEnvFromSecret: []
# - name: MY_SECRET_KEY
# secretName : my-secret
# secretKey: my-secret-key
# -- Startup probe configuration for apps pods. You shouldn't need to # -- Startup probe configuration for apps pods. You shouldn't need to
# change this, but if you want to you can find more information here: # change this, but if you want to you can find more information here:
# <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> # <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/>
@ -323,6 +339,13 @@ services:
# -- Extra environment variables to set for automation worker pods. Takes a list of # -- Extra environment variables to set for automation worker pods. Takes a list of
# name=value pairs. # name=value pairs.
extraEnv: [] extraEnv: []
# -- Name of the K8s Secret in the same namespace which contains the extra environment variables.
# This can be used to avoid storing sensitive information in the values.yaml file.
extraEnvFromSecret: []
# - name: MY_SECRET_KEY
# secretName : my-secret
# secretKey: my-secret-key
# -- Startup probe configuration for automation worker pods. You shouldn't # -- Startup probe configuration for automation worker pods. You shouldn't
# need to change this, but if you want to you can find more information # need to change this, but if you want to you can find more information
# here: # here:
@ -408,6 +431,13 @@ services:
# -- Extra environment variables to set for worker pods. Takes a list of # -- Extra environment variables to set for worker pods. Takes a list of
# name=value pairs. # name=value pairs.
extraEnv: [] extraEnv: []
# -- Name of the K8s Secret in the same namespace which contains the extra environment variables.
# This can be used to avoid storing sensitive information in the values.yaml file.
extraEnvFromSecret: []
# - name: MY_SECRET_KEY
# secretName : my-secret
# secretKey: my-secret-key
# -- Startup probe configuration for worker pods. You shouldn't need to # -- Startup probe configuration for worker pods. You shouldn't need to
# change this, but if you want to you can find more information here: # change this, but if you want to you can find more information here:
# <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> # <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/>
@ -611,10 +641,25 @@ couchdb:
# @ignore # @ignore
repository: budibase/couchdb repository: budibase/couchdb
# @ignore # @ignore
tag: v3.2.1 tag: v3.3.3
# @ignore # @ignore
pullPolicy: Always pullPolicy: Always
extraPorts:
# -- Extra ports to expose on the CouchDB service. We expose the SQS port
# by default, but you can add more ports here if you need to.
- name: sqs
containerPort: 4984
service:
extraPorts:
# -- Extra ports to expose on the CouchDB service. We expose the SQS port
# by default, but you can add more ports here if you need to.
- name: sqs
port: 4984
targetPort: 4984
protocol: TCP
# @ignore # @ignore
# This should remain false. We ship Clouseau ourselves as part of the # This should remain false. We ship Clouseau ourselves as part of the
# budibase/couchdb image, and it's not possible to disable it because it's a # budibase/couchdb image, and it's not possible to disable it because it's a

View file

@ -74,6 +74,7 @@ http {
add_header X-Content-Type-Options nosniff always; add_header X-Content-Type-Options nosniff always;
add_header X-XSS-Protection "1; mode=block" always; add_header X-XSS-Protection "1; mode=block" always;
add_header Content-Security-Policy "${csp_default}; ${csp_script}; ${csp_style}; ${csp_object}; ${csp_base_uri}; ${csp_connect}; ${csp_font}; ${csp_frame}; ${csp_img}; ${csp_manifest}; ${csp_media}; ${csp_worker};" always; add_header Content-Security-Policy "${csp_default}; ${csp_script}; ${csp_style}; ${csp_object}; ${csp_base_uri}; ${csp_connect}; ${csp_font}; ${csp_frame}; ${csp_img}; ${csp_manifest}; ${csp_media}; ${csp_worker};" always;
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains; preload" always;
# upstreams # upstreams
set $apps ${APPS_UPSTREAM_URL}; set $apps ${APPS_UPSTREAM_URL};

View file

@ -1,5 +1,5 @@
{ {
"version": "2.27.5", "version": "2.28.4",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View file

@ -37,8 +37,8 @@
"build": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream", "build": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream",
"build:apps": "yarn build --scope @budibase/server --scope @budibase/worker", "build:apps": "yarn build --scope @budibase/server --scope @budibase/worker",
"build:cli": "yarn build --scope @budibase/cli", "build:cli": "yarn build --scope @budibase/cli",
"build:oss": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui", "build:oss": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui",
"build:account-portal": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --scope @budibase/account-portal --scope @budibase/account-portal-server --scope @budibase/account-portal-ui", "build:account-portal": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --scope @budibase/account-portal-server --scope @budibase/account-portal-ui",
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput", "build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
"check:types": "lerna run --concurrency 2 check:types", "check:types": "lerna run --concurrency 2 check:types",
"build:sdk": "lerna run --stream build:sdk", "build:sdk": "lerna run --stream build:sdk",

View file

@ -8,6 +8,7 @@ import {
DatabaseOpts, DatabaseOpts,
DatabasePutOpts, DatabasePutOpts,
DatabaseQueryOpts, DatabaseQueryOpts,
DBError,
Document, Document,
isDocument, isDocument,
RowResponse, RowResponse,
@ -41,7 +42,7 @@ function buildNano(couchInfo: { url: string; cookie: string }) {
type DBCall<T> = () => Promise<T> type DBCall<T> = () => Promise<T>
class CouchDBError extends Error { class CouchDBError extends Error implements DBError {
status: number status: number
statusCode: number statusCode: number
reason: string reason: string
@ -328,7 +329,14 @@ export class DatabaseImpl implements Database {
async sqlDiskCleanup(): Promise<void> { async sqlDiskCleanup(): Promise<void> {
const dbName = this.name const dbName = this.name
const url = `/${dbName}/_cleanup` const url = `/${dbName}/_cleanup`
return await this._sqlQuery<void>(url, "POST") try {
await this._sqlQuery<void>(url, "POST")
} catch (err: any) {
// hack for now - SQS throws a 500 when there is nothing to clean-up
if (err.status !== 500) {
throw err
}
}
} }
// removes a document from sqlite // removes a document from sqlite
@ -352,18 +360,15 @@ export class DatabaseImpl implements Database {
} }
async destroy() { async destroy() {
if (env.SQS_SEARCH_ENABLE && (await this.exists(SQLITE_DESIGN_DOC_ID))) {
// delete the design document, then run the cleanup operation
const definition = await this.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
// remove all tables - save the definition then trigger a cleanup
definition.sql.tables = {}
await this.put(definition)
await this.sqlDiskCleanup()
}
try { try {
if (env.SQS_SEARCH_ENABLE) {
// delete the design document, then run the cleanup operation
try {
const definition = await this.get<SQLiteDefinition>(
SQLITE_DESIGN_DOC_ID
)
await this.remove(SQLITE_DESIGN_DOC_ID, definition._rev)
} finally {
await this.sqlDiskCleanup()
}
}
return await this.nano().db.destroy(this.name) return await this.nano().db.destroy(this.name)
} catch (err: any) { } catch (err: any) {
// didn't exist, don't worry // didn't exist, don't worry

View file

@ -3,7 +3,8 @@ import { Ctx } from "@budibase/types"
function validate( function validate(
schema: Joi.ObjectSchema | Joi.ArraySchema, schema: Joi.ObjectSchema | Joi.ArraySchema,
property: string property: string,
opts: { errorPrefix: string } = { errorPrefix: `Invalid ${property}` }
) { ) {
// Return a Koa middleware function // Return a Koa middleware function
return (ctx: Ctx, next: any) => { return (ctx: Ctx, next: any) => {
@ -29,16 +30,26 @@ function validate(
const { error } = schema.validate(params) const { error } = schema.validate(params)
if (error) { if (error) {
ctx.throw(400, `Invalid ${property} - ${error.message}`) let message = error.message
if (opts.errorPrefix) {
message = `Invalid ${property} - ${message}`
}
ctx.throw(400, message)
} }
return next() return next()
} }
} }
export function body(schema: Joi.ObjectSchema | Joi.ArraySchema) { export function body(
return validate(schema, "body") schema: Joi.ObjectSchema | Joi.ArraySchema,
opts?: { errorPrefix: string }
) {
return validate(schema, "body", opts)
} }
export function params(schema: Joi.ObjectSchema | Joi.ArraySchema) { export function params(
return validate(schema, "params") schema: Joi.ObjectSchema | Joi.ArraySchema,
opts?: { errorPrefix: string }
) {
return validate(schema, "params", opts)
} }

View file

@ -14,6 +14,7 @@ import { v4 } from "uuid"
import { APP_PREFIX, APP_DEV_PREFIX } from "../db" import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
import fsp from "fs/promises" import fsp from "fs/promises"
import { HeadObjectOutput } from "aws-sdk/clients/s3" import { HeadObjectOutput } from "aws-sdk/clients/s3"
import { ReadableStream } from "stream/web"
const streamPipeline = promisify(stream.pipeline) const streamPipeline = promisify(stream.pipeline)
// use this as a temporary store of buckets that are being created // use this as a temporary store of buckets that are being created
@ -41,10 +42,7 @@ type UploadParams = BaseUploadParams & {
path?: string | PathLike path?: string | PathLike
} }
export type StreamTypes = export type StreamTypes = ReadStream | NodeJS.ReadableStream
| ReadStream
| NodeJS.ReadableStream
| ReadableStream<Uint8Array>
export type StreamUploadParams = BaseUploadParams & { export type StreamUploadParams = BaseUploadParams & {
stream?: StreamTypes stream?: StreamTypes
@ -222,6 +220,9 @@ export async function streamUpload({
extra, extra,
ttl, ttl,
}: StreamUploadParams) { }: StreamUploadParams) {
if (!stream) {
throw new Error("Stream to upload is invalid/undefined")
}
const extension = filename.split(".").pop() const extension = filename.split(".").pop()
const objectStore = ObjectStore(bucketName) const objectStore = ObjectStore(bucketName)
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName) const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
@ -251,14 +252,27 @@ export async function streamUpload({
: CONTENT_TYPE_MAP.txt : CONTENT_TYPE_MAP.txt
} }
const bucket = sanitizeBucket(bucketName),
objKey = sanitizeKey(filename)
const params = { const params = {
Bucket: sanitizeBucket(bucketName), Bucket: bucket,
Key: sanitizeKey(filename), Key: objKey,
Body: stream, Body: stream,
ContentType: contentType, ContentType: contentType,
...extra, ...extra,
} }
return objectStore.upload(params).promise()
const details = await objectStore.upload(params).promise()
const headDetails = await objectStore
.headObject({
Bucket: bucket,
Key: objKey,
})
.promise()
return {
...details,
ContentLength: headDetails.ContentLength,
}
} }
/** /**

View file

@ -21,6 +21,7 @@ let cleanupInterval: NodeJS.Timeout
async function cleanup() { async function cleanup() {
for (let queue of QUEUES) { for (let queue of QUEUES) {
await queue.clean(CLEANUP_PERIOD_MS, "completed") await queue.clean(CLEANUP_PERIOD_MS, "completed")
await queue.clean(CLEANUP_PERIOD_MS, "failed")
} }
} }

View file

@ -15,6 +15,7 @@
Checkbox, Checkbox,
DatePicker, DatePicker,
DrawerContent, DrawerContent,
Toggle,
} from "@budibase/bbui" } from "@budibase/bbui"
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte" import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
import { automationStore, selectedAutomation, tables } from "stores/builder" import { automationStore, selectedAutomation, tables } from "stores/builder"
@ -118,7 +119,6 @@
searchableSchema: true, searchableSchema: true,
}).schema }).schema
} }
try { try {
if (isTestModal) { if (isTestModal) {
let newTestData = { schema } let newTestData = { schema }
@ -385,6 +385,16 @@
return params return params
} }
function toggleAttachmentBinding(e, key) {
onChange(
{
detail: "",
},
key
)
onChange({ detail: { useAttachmentBinding: e.detail } }, "meta")
}
onMount(async () => { onMount(async () => {
try { try {
await environment.loadVariables() await environment.loadVariables()
@ -462,27 +472,64 @@
<div class="label-wrapper"> <div class="label-wrapper">
<Label>{label}</Label> <Label>{label}</Label>
</div> </div>
<div class="attachment-field-width"> <div class="toggle-container">
<KeyValueBuilder <Toggle
on:change={e => value={inputData?.meta?.useAttachmentBinding}
onChange( text={"Use bindings"}
{ size={"XS"}
detail: e.detail.map(({ name, value }) => ({ on:change={e => toggleAttachmentBinding(e, key)}
url: name,
filename: value,
})),
},
key
)}
object={handleAttachmentParams(inputData[key])}
allowJS
{bindings}
keyBindings
customButtonText={"Add attachment"}
keyPlaceholder={"URL"}
valuePlaceholder={"Filename"}
/> />
</div> </div>
<div class="attachment-field-width">
{#if !inputData?.meta?.useAttachmentBinding}
<KeyValueBuilder
on:change={e =>
onChange(
{
detail: e.detail.map(({ name, value }) => ({
url: name,
filename: value,
})),
},
key
)}
object={handleAttachmentParams(inputData[key])}
allowJS
{bindings}
keyBindings
customButtonText={"Add attachment"}
keyPlaceholder={"URL"}
valuePlaceholder={"Filename"}
/>
{:else if isTestModal}
<ModalBindableInput
title={value.title || label}
value={inputData[key]}
panel={AutomationBindingPanel}
type={value.customType}
on:change={e => onChange(e, key)}
{bindings}
updateOnChange={false}
/>
{:else}
<div class="test">
<DrawerBindableInput
title={value.title ?? label}
panel={AutomationBindingPanel}
type={value.customType}
value={inputData[key]}
on:change={e => onChange(e, key)}
{bindings}
updateOnChange={false}
placeholder={value.customType === "queryLimit"
? queryLimit
: ""}
drawerLeft="260px"
/>
</div>
{/if}
</div>
</div> </div>
{:else if value.customType === "filters"} {:else if value.customType === "filters"}
<ActionButton on:click={drawer.show}>Define filters</ActionButton> <ActionButton on:click={drawer.show}>Define filters</ActionButton>

View file

@ -10,12 +10,12 @@
import { TableNames } from "constants" import { TableNames } from "constants"
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
export let value export let value
export let meta export let meta
export let bindings export let bindings
export let isTestModal export let isTestModal
export let isUpdateRow export let isUpdateRow
$: parsedBindings = bindings.map(binding => { $: parsedBindings = bindings.map(binding => {
let clone = Object.assign({}, binding) let clone = Object.assign({}, binding)
clone.icon = "ShareAndroid" clone.icon = "ShareAndroid"
@ -94,17 +94,22 @@
dispatch("change", newValue) dispatch("change", newValue)
} }
const onChangeSetting = (e, field) => { const onChangeSetting = (field, key, value) => {
let fields = {} let newField = {}
fields[field] = { newField[field] = {
clearRelationships: e.detail, [key]: value,
} }
let updatedFields = {
...meta?.fields,
...newField,
}
dispatch("change", { dispatch("change", {
key: "meta", key: "meta",
fields, fields: updatedFields,
}) })
} }
// Ensure any nullish tableId values get set to empty string so // Ensure any nullish tableId values get set to empty string so
// that the select works // that the select works
$: if (value?.tableId == null) value = { tableId: "" } $: if (value?.tableId == null) value = { tableId: "" }
@ -157,6 +162,9 @@
bindings={parsedBindings} bindings={parsedBindings}
{value} {value}
{onChange} {onChange}
useAttachmentBinding={meta?.fields?.[field]
?.useAttachmentBinding}
{onChangeSetting}
/> />
</DrawerBindableSlot> </DrawerBindableSlot>
{/if} {/if}
@ -167,7 +175,8 @@
value={meta.fields?.[field]?.clearRelationships} value={meta.fields?.[field]?.clearRelationships}
text={"Clear relationships if empty?"} text={"Clear relationships if empty?"}
size={"S"} size={"S"}
on:change={e => onChangeSetting(e, field)} on:change={e =>
onChangeSetting(field, "clearRelationships", e.detail)}
/> />
</div> </div>
{/if} {/if}

View file

@ -1,5 +1,11 @@
<script> <script>
import { Select, DatePicker, Multiselect, TextArea } from "@budibase/bbui" import {
Select,
DatePicker,
Multiselect,
TextArea,
Toggle,
} from "@budibase/bbui"
import { FieldType } from "@budibase/types" import { FieldType } from "@budibase/types"
import LinkedRowSelector from "components/common/LinkedRowSelector.svelte" import LinkedRowSelector from "components/common/LinkedRowSelector.svelte"
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte" import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
@ -14,6 +20,8 @@
export let value export let value
export let bindings export let bindings
export let isTestModal export let isTestModal
export let useAttachmentBinding
export let onChangeSetting
$: parsedBindings = bindings.map(binding => { $: parsedBindings = bindings.map(binding => {
let clone = Object.assign({}, binding) let clone = Object.assign({}, binding)
@ -27,6 +35,8 @@
FieldType.SIGNATURE_SINGLE, FieldType.SIGNATURE_SINGLE,
] ]
let previousBindingState = useAttachmentBinding
function schemaHasOptions(schema) { function schemaHasOptions(schema) {
return !!schema.constraints?.inclusion?.length return !!schema.constraints?.inclusion?.length
} }
@ -34,13 +44,6 @@
function handleAttachmentParams(keyValueObj) { function handleAttachmentParams(keyValueObj) {
let params = {} let params = {}
if (
(schema.type === FieldType.ATTACHMENT_SINGLE ||
schema.type === FieldType.SIGNATURE_SINGLE) &&
Object.keys(keyValueObj).length === 0
) {
return []
}
if (!Array.isArray(keyValueObj) && keyValueObj) { if (!Array.isArray(keyValueObj) && keyValueObj) {
keyValueObj = [keyValueObj] keyValueObj = [keyValueObj]
} }
@ -52,6 +55,26 @@
} }
return params return params
} }
async function handleToggleChange(toggleField, event) {
if (event.detail === true) {
value[toggleField] = []
} else {
value[toggleField] = ""
}
previousBindingState = event.detail
onChangeSetting(toggleField, "useAttachmentBinding", event.detail)
onChange({ detail: value[toggleField] }, toggleField)
}
$: if (useAttachmentBinding !== previousBindingState) {
if (useAttachmentBinding) {
value[field] = []
} else {
value[field] = ""
}
previousBindingState = useAttachmentBinding
}
</script> </script>
{#if schemaHasOptions(schema) && schema.type !== "array"} {#if schemaHasOptions(schema) && schema.type !== "array"}
@ -108,38 +131,65 @@
useLabel={false} useLabel={false}
/> />
{:else if attachmentTypes.includes(schema.type)} {:else if attachmentTypes.includes(schema.type)}
<div class="attachment-field-spacinng"> <div class="attachment-field-container">
<KeyValueBuilder <div class="toggle-container">
on:change={e => <Toggle
onChange( value={useAttachmentBinding}
{ text={"Use bindings"}
detail: size={"XS"}
schema.type === FieldType.ATTACHMENT_SINGLE || on:change={e => handleToggleChange(field, e)}
schema.type === FieldType.SIGNATURE_SINGLE />
? e.detail.length > 0 </div>
? { {#if !useAttachmentBinding}
url: e.detail[0].name, <div class="attachment-field-spacing">
filename: e.detail[0].value, <KeyValueBuilder
} on:change={async e => {
: {} onChange(
: e.detail.map(({ name, value }) => ({ {
url: name, detail:
filename: value, schema.type === FieldType.ATTACHMENT_SINGLE ||
})), schema.type === FieldType.SIGNATURE_SINGLE
}, ? e.detail.length > 0
field ? {
)} url: e.detail[0].name,
object={handleAttachmentParams(value[field])} filename: e.detail[0].value,
allowJS }
{bindings} : {}
keyBindings : e.detail.map(({ name, value }) => ({
customButtonText={"Add attachment"} url: name,
keyPlaceholder={"URL"} filename: value,
valuePlaceholder={"Filename"} })),
actionButtonDisabled={(schema.type === FieldType.ATTACHMENT_SINGLE || },
schema.type === FieldType.SIGNATURE) && field
Object.keys(value[field]).length >= 1} )
/> }}
object={handleAttachmentParams(value[field])}
allowJS
{bindings}
keyBindings
customButtonText={"Add attachment"}
keyPlaceholder={"URL"}
valuePlaceholder={"Filename"}
actionButtonDisabled={(schema.type === FieldType.ATTACHMENT_SINGLE ||
schema.type === FieldType.SIGNATURE) &&
Object.keys(value[field]).length >= 1}
/>
</div>
{:else}
<div class="json-input-spacing">
<svelte:component
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
panel={AutomationBindingPanel}
value={value[field]}
on:change={e => onChange(e, field)}
type="string"
bindings={parsedBindings}
allowJS={true}
updateOnChange={false}
title={schema.name}
/>
</div>
{/if}
</div> </div>
{:else if ["string", "number", "bigint", "barcodeqr", "array"].includes(schema.type)} {:else if ["string", "number", "bigint", "barcodeqr", "array"].includes(schema.type)}
<svelte:component <svelte:component
@ -156,7 +206,8 @@
{/if} {/if}
<style> <style>
.attachment-field-spacinng { .attachment-field-spacing,
.json-input-spacing {
margin-top: var(--spacing-s); margin-top: var(--spacing-s);
margin-bottom: var(--spacing-l); margin-bottom: var(--spacing-l);
} }

View file

@ -1,6 +1,6 @@
<script> <script>
import { viewsV2 } from "stores/builder" import { viewsV2 } from "stores/builder"
import { admin } from "stores/portal" import { admin, licensing } from "stores/portal"
import { Grid } from "@budibase/frontend-core" import { Grid } from "@budibase/frontend-core"
import { API } from "api" import { API } from "api"
import GridCreateEditRowModal from "components/backend/DataTable/modals/grid/GridCreateEditRowModal.svelte" import GridCreateEditRowModal from "components/backend/DataTable/modals/grid/GridCreateEditRowModal.svelte"
@ -28,6 +28,7 @@
showAvatars={false} showAvatars={false}
on:updatedatasource={handleGridViewUpdate} on:updatedatasource={handleGridViewUpdate}
isCloud={$admin.cloud} isCloud={$admin.cloud}
allowViewReadonlyColumns={$licensing.isViewReadonlyColumnsEnabled}
> >
<svelte:fragment slot="filter"> <svelte:fragment slot="filter">
<GridFilterButton /> <GridFilterButton />

View file

@ -25,6 +25,8 @@
}, },
] ]
const MAX_DURATION = 120000 // Maximum duration in milliseconds (2 minutes)
onMount(() => { onMount(() => {
if (!parameters.type) { if (!parameters.type) {
parameters.type = "success" parameters.type = "success"
@ -33,6 +35,14 @@
parameters.autoDismiss = true parameters.autoDismiss = true
} }
}) })
function handleDurationChange(event) {
let newDuration = event.detail
if (newDuration > MAX_DURATION) {
newDuration = MAX_DURATION
}
parameters.duration = newDuration
}
</script> </script>
<div class="root"> <div class="root">
@ -47,6 +57,16 @@
/> />
<Label /> <Label />
<Checkbox text="Auto dismiss" bind:value={parameters.autoDismiss} /> <Checkbox text="Auto dismiss" bind:value={parameters.autoDismiss} />
{#if parameters.autoDismiss}
<Label>Duration (ms)</Label>
<DrawerBindableInput
title="Duration"
{bindings}
value={parameters.duration}
placeholder="3000"
on:change={handleDurationChange}
/>
{/if}
</div> </div>
<style> <style>

View file

@ -30,6 +30,7 @@ import ActionDefinitions from "components/design/settings/controls/ButtonActionE
import { environment, licensing } from "stores/portal" import { environment, licensing } from "stores/portal"
import { convertOldFieldFormat } from "components/design/settings/controls/FieldConfiguration/utils" import { convertOldFieldFormat } from "components/design/settings/controls/FieldConfiguration/utils"
import { FIELDS } from "constants/backend" import { FIELDS } from "constants/backend"
import { FieldType } from "@budibase/types"
const { ContextScopes } = Constants const { ContextScopes } = Constants
@ -555,6 +556,9 @@ const getComponentBindingCategory = (component, context, def) => {
export const getUserBindings = () => { export const getUserBindings = () => {
let bindings = [] let bindings = []
const { schema } = getSchemaForDatasourcePlus(TableNames.USERS) const { schema } = getSchemaForDatasourcePlus(TableNames.USERS)
// add props that are not in the user metadata table schema
// but will be there for logged-in user
schema["globalId"] = { type: FieldType.STRING }
const keys = Object.keys(schema).sort() const keys = Object.keys(schema).sort()
const safeUser = makePropSafe("user") const safeUser = makePropSafe("user")
@ -728,7 +732,7 @@ const getRoleBindings = () => {
return (get(rolesStore) || []).map(role => { return (get(rolesStore) || []).map(role => {
return { return {
type: "context", type: "context",
runtimeBinding: `trim "${role._id}"`, runtimeBinding: `'${role._id}'`,
readableBinding: `Role.${role.name}`, readableBinding: `Role.${role.name}`,
category: "Role", category: "Role",
icon: "UserGroup", icon: "UserGroup",

View file

@ -138,6 +138,11 @@ export const createLicensingStore = () => {
const isViewPermissionsEnabled = license.features.includes( const isViewPermissionsEnabled = license.features.includes(
Constants.Features.VIEW_PERMISSIONS Constants.Features.VIEW_PERMISSIONS
) )
const isViewReadonlyColumnsEnabled = license.features.includes(
Constants.Features.VIEW_READONLY_COLUMNS
)
store.update(state => { store.update(state => {
return { return {
...state, ...state,
@ -157,6 +162,7 @@ export const createLicensingStore = () => {
triggerAutomationRunEnabled, triggerAutomationRunEnabled,
isViewPermissionsEnabled, isViewPermissionsEnabled,
perAppBuildersEnabled, perAppBuildersEnabled,
isViewReadonlyColumnsEnabled,
} }
}) })
}, },

View file

@ -68,6 +68,15 @@
maximum: schema?.constraints?.length?.maximum, maximum: schema?.constraints?.length?.maximum,
} }
}, },
[FieldType.DATETIME]: (_field, schema) => {
const props = {
valueAsTimestamp: !schema?.timeOnly,
}
if (schema?.dateOnly) {
props.enableTime = false
}
return props
},
} }
const fieldSchema = getFieldSchema(field) const fieldSchema = getFieldSchema(field)

View file

@ -16,15 +16,37 @@
export let onChange export let onChange
export let span export let span
export let helpText = null export let helpText = null
export let valueAsTimestamp = false
let fieldState let fieldState
let fieldApi let fieldApi
const handleChange = e => { const handleChange = e => {
const changed = fieldApi.setValue(e.detail) let value = e.detail
if (onChange && changed) { if (timeOnly && valueAsTimestamp) {
onChange({ value: e.detail }) if (!isValidDate(value)) {
// Handle time only fields that are timestamps under the hood
value = timeToDateISOString(value)
}
} }
const changed = fieldApi.setValue(value)
if (onChange && changed) {
onChange({ value })
}
}
const isValidDate = value => !isNaN(new Date(value))
const timeToDateISOString = value => {
let [hours, minutes] = value.split(":").map(Number)
const date = new Date()
date.setHours(hours)
date.setMinutes(minutes)
date.setSeconds(0)
date.setMilliseconds(0)
return date.toISOString()
} }
</script> </script>

View file

@ -206,7 +206,7 @@
error: initialError, error: initialError,
disabled: disabled:
disabled || fieldDisabled || (isAutoColumn && !editAutoColumns), disabled || fieldDisabled || (isAutoColumn && !editAutoColumns),
readonly: readonly || fieldReadOnly, readonly: readonly || fieldReadOnly || schema?.[field]?.readonly,
defaultValue, defaultValue,
validator, validator,
lastUpdate: Date.now(), lastUpdate: Date.now(),

View file

@ -1,7 +1,7 @@
import { writable, get } from "svelte/store" import { writable, get } from "svelte/store"
import { routeStore } from "./routes" import { routeStore } from "./routes"
const NOTIFICATION_TIMEOUT = 3000 const DEFAULT_NOTIFICATION_TIMEOUT = 3000
const createNotificationStore = () => { const createNotificationStore = () => {
let block = false let block = false
@ -18,13 +18,13 @@ const createNotificationStore = () => {
type = "info", type = "info",
icon, icon,
autoDismiss = true, autoDismiss = true,
duration,
count = 1 count = 1
) => { ) => {
if (block) { if (block) {
return return
} }
// If peeking, pass notifications back to parent window
if (get(routeStore).queryParams?.peek) { if (get(routeStore).queryParams?.peek) {
window.parent.postMessage({ window.parent.postMessage({
type: "notification", type: "notification",
@ -32,11 +32,13 @@ const createNotificationStore = () => {
message, message,
type, type,
icon, icon,
duration,
autoDismiss, autoDismiss,
}, },
}) })
return return
} }
const _id = id() const _id = id()
store.update(state => { store.update(state => {
const duplicateError = state.find(err => err.message === message) const duplicateError = state.find(err => err.message === message)
@ -60,7 +62,7 @@ const createNotificationStore = () => {
if (autoDismiss) { if (autoDismiss) {
setTimeout(() => { setTimeout(() => {
dismiss(_id) dismiss(_id)
}, NOTIFICATION_TIMEOUT) }, duration || DEFAULT_NOTIFICATION_TIMEOUT)
} }
} }
@ -74,14 +76,14 @@ const createNotificationStore = () => {
subscribe: store.subscribe, subscribe: store.subscribe,
actions: { actions: {
send, send,
info: (msg, autoDismiss) => info: (msg, autoDismiss, duration) =>
send(msg, "info", "Info", autoDismiss ?? true), send(msg, "info", "Info", autoDismiss ?? true, duration),
success: (msg, autoDismiss) => success: (msg, autoDismiss, duration) =>
send(msg, "success", "CheckmarkCircle", autoDismiss ?? true), send(msg, "success", "CheckmarkCircle", autoDismiss ?? true, duration),
warning: (msg, autoDismiss) => warning: (msg, autoDismiss, duration) =>
send(msg, "warning", "Alert", autoDismiss ?? true), send(msg, "warning", "Alert", autoDismiss ?? true, duration),
error: (msg, autoDismiss) => error: (msg, autoDismiss, duration) =>
send(msg, "error", "Alert", autoDismiss ?? false), send(msg, "error", "Alert", autoDismiss ?? false, duration),
blockNotifications, blockNotifications,
dismiss, dismiss,
}, },

View file

@ -416,11 +416,11 @@ const continueIfHandler = action => {
} }
const showNotificationHandler = action => { const showNotificationHandler = action => {
const { message, type, autoDismiss } = action.parameters const { message, type, autoDismiss, duration } = action.parameters
if (!message || !type) { if (!message || !type) {
return return
} }
notificationStore.actions[type]?.(message, autoDismiss) notificationStore.actions[type]?.(message, autoDismiss, duration)
} }
const promptUserHandler = () => {} const promptUserHandler = () => {}

View file

@ -33,7 +33,8 @@
column.schema.autocolumn || column.schema.autocolumn ||
column.schema.disabled || column.schema.disabled ||
column.schema.type === "formula" || column.schema.type === "formula" ||
(!$config.canEditRows && !row._isNewRow) (!$config.canEditRows && !row._isNewRow) ||
column.schema.readonly
// Register this cell API if the row is focused // Register this cell API if the row is focused
$: { $: {

View file

@ -1,49 +1,98 @@
<script> <script>
import { getContext } from "svelte" import { getContext } from "svelte"
import { ActionButton, Popover, Icon } from "@budibase/bbui" import { ActionButton, Popover, Icon, notifications } from "@budibase/bbui"
import { getColumnIcon } from "../lib/utils" import { getColumnIcon } from "../lib/utils"
import ToggleActionButtonGroup from "./ToggleActionButtonGroup.svelte" import ToggleActionButtonGroup from "./ToggleActionButtonGroup.svelte"
import { helpers } from "@budibase/shared-core"
export let allowViewReadonlyColumns = false
const { columns, datasource, stickyColumn, dispatch } = getContext("grid") const { columns, datasource, stickyColumn, dispatch } = getContext("grid")
let open = false let open = false
let anchor let anchor
$: anyHidden = $columns.some(col => !col.visible) $: allColumns = $stickyColumn ? [$stickyColumn, ...$columns] : $columns
$: text = getText($columns)
$: restrictedColumns = allColumns.filter(col => !col.visible || col.readonly)
$: anyRestricted = restrictedColumns.length
$: text = anyRestricted ? `Columns (${anyRestricted} restricted)` : "Columns"
const toggleColumn = async (column, permission) => { const toggleColumn = async (column, permission) => {
const visible = permission !== PERMISSION_OPTIONS.HIDDEN const visible = permission !== PERMISSION_OPTIONS.HIDDEN
const readonly = permission === PERMISSION_OPTIONS.READONLY
datasource.actions.addSchemaMutation(column.name, { visible }) await datasource.actions.addSchemaMutation(column.name, {
await datasource.actions.saveSchemaMutations() visible,
readonly,
})
try {
await datasource.actions.saveSchemaMutations()
} catch (e) {
notifications.error(e.message)
} finally {
await datasource.actions.resetSchemaMutations()
await datasource.actions.refreshDefinition()
}
dispatch(visible ? "show-column" : "hide-column") dispatch(visible ? "show-column" : "hide-column")
} }
const getText = columns => {
const hidden = columns.filter(col => !col.visible).length
return hidden ? `Columns (${hidden} restricted)` : "Columns"
}
const PERMISSION_OPTIONS = { const PERMISSION_OPTIONS = {
WRITABLE: "writable", WRITABLE: "writable",
READONLY: "readonly",
HIDDEN: "hidden", HIDDEN: "hidden",
} }
const options = [ $: displayColumns = allColumns.map(c => {
{ icon: "Edit", value: PERMISSION_OPTIONS.WRITABLE, tooltip: "Writable" }, const isRequired = helpers.schema.isRequired(c.schema.constraints)
{ const isDisplayColumn = $stickyColumn === c
const requiredTooltip = isRequired && "Required columns must be writable"
const editEnabled =
!isRequired ||
columnToPermissionOptions(c) !== PERMISSION_OPTIONS.WRITABLE
const options = [
{
icon: "Edit",
value: PERMISSION_OPTIONS.WRITABLE,
tooltip: (!editEnabled && requiredTooltip) || "Writable",
disabled: !editEnabled,
},
]
if ($datasource.type === "viewV2") {
options.push({
icon: "Visibility",
value: PERMISSION_OPTIONS.READONLY,
tooltip: allowViewReadonlyColumns
? requiredTooltip || "Read only"
: "Read only (premium feature)",
disabled: !allowViewReadonlyColumns || isRequired,
})
}
options.push({
icon: "VisibilityOff", icon: "VisibilityOff",
value: PERMISSION_OPTIONS.HIDDEN, value: PERMISSION_OPTIONS.HIDDEN,
tooltip: "Hidden", disabled: isDisplayColumn || isRequired,
}, tooltip:
] (isDisplayColumn && "Display column cannot be hidden") ||
requiredTooltip ||
"Hidden",
})
return { ...c, options }
})
function columnToPermissionOptions(column) { function columnToPermissionOptions(column) {
if (!column.visible) { if (!column.schema.visible) {
return PERMISSION_OPTIONS.HIDDEN return PERMISSION_OPTIONS.HIDDEN
} }
if (column.schema.readonly) {
return PERMISSION_OPTIONS.READONLY
}
return PERMISSION_OPTIONS.WRITABLE return PERMISSION_OPTIONS.WRITABLE
} }
</script> </script>
@ -54,7 +103,7 @@
quiet quiet
size="M" size="M"
on:click={() => (open = !open)} on:click={() => (open = !open)}
selected={open || anyHidden} selected={open || anyRestricted}
disabled={!$columns.length} disabled={!$columns.length}
> >
{text} {text}
@ -64,19 +113,7 @@
<Popover bind:open {anchor} align="left"> <Popover bind:open {anchor} align="left">
<div class="content"> <div class="content">
<div class="columns"> <div class="columns">
{#if $stickyColumn} {#each displayColumns as column}
<div class="column">
<Icon size="S" name={getColumnIcon($stickyColumn)} />
{$stickyColumn.label}
</div>
<ToggleActionButtonGroup
disabled
value={PERMISSION_OPTIONS.WRITABLE}
{options}
/>
{/if}
{#each $columns as column}
<div class="column"> <div class="column">
<Icon size="S" name={getColumnIcon(column)} /> <Icon size="S" name={getColumnIcon(column)} />
{column.label} {column.label}
@ -84,7 +121,7 @@
<ToggleActionButtonGroup <ToggleActionButtonGroup
on:click={e => toggleColumn(column, e.detail)} on:click={e => toggleColumn(column, e.detail)}
value={columnToPermissionOptions(column)} value={columnToPermissionOptions(column)}
{options} options={column.options}
/> />
{/each} {/each}
</div> </div>

View file

@ -7,7 +7,6 @@
export let value export let value
export let options export let options
export let disabled
</script> </script>
<div class="permissionPicker"> <div class="permissionPicker">
@ -15,7 +14,7 @@
<AbsTooltip text={option.tooltip} type={TooltipType.Info}> <AbsTooltip text={option.tooltip} type={TooltipType.Info}>
<ActionButton <ActionButton
on:click={() => dispatch("click", option.value)} on:click={() => dispatch("click", option.value)}
{disabled} disabled={option.disabled}
size="S" size="S"
icon={option.icon} icon={option.icon}
quiet quiet

View file

@ -57,6 +57,7 @@
export let buttons = null export let buttons = null
export let darkMode export let darkMode
export let isCloud = null export let isCloud = null
export let allowViewReadonlyColumns = false
// Unique identifier for DOM nodes inside this instance // Unique identifier for DOM nodes inside this instance
const gridID = `grid-${Math.random().toString().slice(2)}` const gridID = `grid-${Math.random().toString().slice(2)}`
@ -153,7 +154,7 @@
<div class="controls-left"> <div class="controls-left">
<slot name="filter" /> <slot name="filter" />
<SortButton /> <SortButton />
<ColumnsSettingButton /> <ColumnsSettingButton {allowViewReadonlyColumns} />
<SizeButton /> <SizeButton />
<slot name="controls" /> <slot name="controls" />
</div> </div>

View file

@ -146,6 +146,7 @@ export const initialise = context => {
schema: fieldSchema, schema: fieldSchema,
width: fieldSchema.width || oldColumn?.width || DefaultColumnWidth, width: fieldSchema.width || oldColumn?.width || DefaultColumnWidth,
visible: fieldSchema.visible ?? true, visible: fieldSchema.visible ?? true,
readonly: fieldSchema.readonly,
order: fieldSchema.order ?? oldColumn?.order, order: fieldSchema.order ?? oldColumn?.order,
primaryDisplay: field === primaryDisplay, primaryDisplay: field === primaryDisplay,
} }

View file

@ -204,6 +204,10 @@ export const createActions = context => {
...$definition, ...$definition,
schema: newSchema, schema: newSchema,
}) })
resetSchemaMutations()
}
const resetSchemaMutations = () => {
schemaMutations.set({}) schemaMutations.set({})
} }
@ -253,6 +257,7 @@ export const createActions = context => {
addSchemaMutation, addSchemaMutation,
addSchemaMutations, addSchemaMutations,
saveSchemaMutations, saveSchemaMutations,
resetSchemaMutations,
}, },
}, },
} }

@ -1 +1 @@
Subproject commit 5189b83bea1868574ff7f4c51fe5db38a11badb8 Subproject commit 85b4fc9ea01472bf69840d046733ad596ef893e2

View file

@ -68,7 +68,6 @@
"aws-sdk": "2.1030.0", "aws-sdk": "2.1030.0",
"bcrypt": "5.1.0", "bcrypt": "5.1.0",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
"bl": "^6.0.12",
"bull": "4.10.1", "bull": "4.10.1",
"chokidar": "3.5.3", "chokidar": "3.5.3",
"content-disposition": "^0.5.4", "content-disposition": "^0.5.4",
@ -116,7 +115,8 @@
"uuid": "^8.3.2", "uuid": "^8.3.2",
"validate.js": "0.13.1", "validate.js": "0.13.1",
"worker-farm": "1.7.0", "worker-farm": "1.7.0",
"xml2js": "0.5.0" "xml2js": "0.5.0",
"tmp": "0.2.3"
}, },
"devDependencies": { "devDependencies": {
"@babel/preset-env": "7.16.11", "@babel/preset-env": "7.16.11",
@ -137,6 +137,7 @@
"@types/supertest": "2.0.14", "@types/supertest": "2.0.14",
"@types/tar": "6.1.5", "@types/tar": "6.1.5",
"@types/uuid": "8.3.4", "@types/uuid": "8.3.4",
"@types/tmp": "0.2.6",
"copyfiles": "2.4.1", "copyfiles": "2.4.1",
"docker-compose": "0.23.17", "docker-compose": "0.23.17",
"jest": "29.7.0", "jest": "29.7.0",

View file

@ -48,6 +48,7 @@ async function init() {
HTTP_LOGGING: "0", HTTP_LOGGING: "0",
VERSION: "0.0.0+local", VERSION: "0.0.0+local",
PASSWORD_MIN_LENGTH: "1", PASSWORD_MIN_LENGTH: "1",
SQS_SEARCH_ENABLE: "1",
} }
config = { ...config, ...existingConfig } config = { ...config, ...existingConfig }

View file

@ -358,11 +358,14 @@ async function performAppCreate(ctx: UserCtx<CreateAppRequest, App>) {
await createApp(appId) await createApp(appId)
} }
// Initialise the app migration version as the latest one const latestMigrationId = appMigrations.getLatestEnabledMigrationId()
await appMigrations.updateAppMigrationMetadata({ if (latestMigrationId) {
appId, // Initialise the app migration version as the latest one
version: appMigrations.getLatestMigrationId(), await appMigrations.updateAppMigrationMetadata({
}) appId,
version: latestMigrationId,
})
}
await cache.app.invalidateAppMetadata(appId, newApplication) await cache.app.invalidateAppMetadata(appId, newApplication)
return newApplication return newApplication

View file

@ -1,7 +1,7 @@
import { outputProcessing } from "../../utilities/rowProcessor" import { outputProcessing } from "../../utilities/rowProcessor"
import { InternalTables } from "../../db/utils" import { InternalTables } from "../../db/utils"
import { getFullUser } from "../../utilities/users" import { getFullUser } from "../../utilities/users"
import { roles, context } from "@budibase/backend-core" import { roles, context, db as dbCore } from "@budibase/backend-core"
import { ContextUser, Row, UserCtx } from "@budibase/types" import { ContextUser, Row, UserCtx } from "@budibase/types"
import sdk from "../../sdk" import sdk from "../../sdk"
import { processUser } from "../../utilities/global" import { processUser } from "../../utilities/global"
@ -27,6 +27,8 @@ export async function fetchSelf(ctx: UserCtx) {
const appId = context.getAppId() const appId = context.getAppId()
let user: ContextUser = await getFullUser(userId) let user: ContextUser = await getFullUser(userId)
// add globalId of user
user.globalId = dbCore.getGlobalIDFromUserMetadataID(userId)
// this shouldn't be returned by the app self // this shouldn't be returned by the app self
delete user.roles delete user.roles
// forward the csrf token from the session // forward the csrf token from the session

View file

@ -3,7 +3,7 @@ import { migrate as migrationImpl, MIGRATIONS } from "../../migrations"
import { Ctx } from "@budibase/types" import { Ctx } from "@budibase/types"
import { import {
getAppMigrationVersion, getAppMigrationVersion,
getLatestMigrationId, getLatestEnabledMigrationId,
} from "../../appMigrations" } from "../../appMigrations"
export async function migrate(ctx: Ctx) { export async function migrate(ctx: Ctx) {
@ -27,7 +27,9 @@ export async function getMigrationStatus(ctx: Ctx) {
const latestAppliedMigration = await getAppMigrationVersion(appId) const latestAppliedMigration = await getAppMigrationVersion(appId)
const migrated = latestAppliedMigration === getLatestMigrationId() const latestMigrationId = getLatestEnabledMigrationId()
const migrated =
!latestMigrationId || latestAppliedMigration >= latestMigrationId
ctx.body = { migrated } ctx.body = { migrated }
ctx.status = 200 ctx.status = 200

View file

@ -25,6 +25,7 @@ import {
outputProcessing, outputProcessing,
} from "../../../utilities/rowProcessor" } from "../../../utilities/rowProcessor"
import { cloneDeep } from "lodash" import { cloneDeep } from "lodash"
import { generateIdForRow } from "./utils"
export async function handleRequest<T extends Operation>( export async function handleRequest<T extends Operation>(
operation: T, operation: T,
@ -55,11 +56,19 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
throw { validation: validateResult.errors } throw { validation: validateResult.errors }
} }
const beforeRow = await sdk.rows.external.getRow(tableId, _id, {
relationships: true,
})
const response = await handleRequest(Operation.UPDATE, tableId, { const response = await handleRequest(Operation.UPDATE, tableId, {
id: breakRowIdField(_id), id: breakRowIdField(_id),
row: dataToUpdate, row: dataToUpdate,
}) })
const row = await sdk.rows.external.getRow(tableId, _id, {
// The id might have been changed, so the refetching would fail. Recalculating the id just in case
const updatedId =
generateIdForRow({ ...beforeRow, ...dataToUpdate }, table) || _id
const row = await sdk.rows.external.getRow(tableId, updatedId, {
relationships: true, relationships: true,
}) })
const enrichedRow = await outputProcessing(table, row, { const enrichedRow = await outputProcessing(table, row, {

View file

@ -31,7 +31,7 @@ import {
} from "@budibase/types" } from "@budibase/types"
import { import {
getAppMigrationVersion, getAppMigrationVersion,
getLatestMigrationId, getLatestEnabledMigrationId,
} from "../../../appMigrations" } from "../../../appMigrations"
import send from "koa-send" import send from "koa-send"
@ -133,7 +133,7 @@ const requiresMigration = async (ctx: Ctx) => {
ctx.throw("AppId could not be found") ctx.throw("AppId could not be found")
} }
const latestMigration = getLatestMigrationId() const latestMigration = getLatestEnabledMigrationId()
if (!latestMigration) { if (!latestMigration) {
return false return false
} }

View file

@ -1,91 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`/datasources fetch returns all the datasources from the server 1`] = `
[
{
"config": {},
"entities": [
{
"_id": "ta_users",
"_rev": "1-73b7912e6cbdd3d696febc60f3715844",
"createdAt": "2020-01-01T00:00:00.000Z",
"name": "Users",
"primaryDisplay": "email",
"schema": {
"email": {
"constraints": {
"email": true,
"length": {
"maximum": "",
},
"presence": true,
"type": "string",
},
"name": "email",
"type": "string",
},
"firstName": {
"constraints": {
"presence": false,
"type": "string",
},
"name": "firstName",
"type": "string",
},
"lastName": {
"constraints": {
"presence": false,
"type": "string",
},
"name": "lastName",
"type": "string",
},
"roleId": {
"constraints": {
"inclusion": [
"ADMIN",
"POWER",
"BASIC",
"PUBLIC",
],
"presence": false,
"type": "string",
},
"name": "roleId",
"type": "options",
},
"status": {
"constraints": {
"inclusion": [
"active",
"inactive",
],
"presence": false,
"type": "string",
},
"name": "status",
"type": "options",
},
},
"sourceId": "bb_internal",
"sourceType": "internal",
"type": "table",
"updatedAt": "2020-01-01T00:00:00.000Z",
"views": {},
},
],
"name": "Budibase DB",
"source": "BUDIBASE",
"type": "budibase",
},
{
"config": {},
"createdAt": "2020-01-01T00:00:00.000Z",
"isSQL": true,
"name": "Test",
"source": "POSTGRES",
"type": "datasource",
"updatedAt": "2020-01-01T00:00:00.000Z",
},
]
`;

View file

@ -334,6 +334,12 @@ describe("/applications", () => {
expect(events.app.deleted).toHaveBeenCalledTimes(1) expect(events.app.deleted).toHaveBeenCalledTimes(1)
expect(events.app.unpublished).toHaveBeenCalledTimes(1) expect(events.app.unpublished).toHaveBeenCalledTimes(1)
}) })
it("should be able to delete an app after SQS_SEARCH_ENABLE has been set but app hasn't been migrated", async () => {
await config.withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, async () => {
await config.api.application.delete(app.appId)
})
})
}) })
describe("POST /api/applications/:appId/duplicate", () => { describe("POST /api/applications/:appId/duplicate", () => {

View file

@ -1,5 +1,8 @@
const setup = require("./utilities") const setup = require("./utilities")
const { generateUserMetadataID } = require("../../../db/utils") const {
generateUserMetadataID,
getGlobalIDFromUserMetadataID,
} = require("../../../db/utils")
describe("/authenticate", () => { describe("/authenticate", () => {
let request = setup.getRequest() let request = setup.getRequest()
@ -20,5 +23,16 @@ describe("/authenticate", () => {
.expect(200) .expect(200)
expect(res.body._id).toEqual(generateUserMetadataID(config.user._id)) expect(res.body._id).toEqual(generateUserMetadataID(config.user._id))
}) })
it("should container the global user ID", async () => {
const res = await request
.get(`/api/self`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.globalId).toEqual(
getGlobalIDFromUserMetadataID(config.user._id)
)
})
}) })
}) })

View file

@ -4,14 +4,12 @@ import { getCachedVariable } from "../../../threads/utils"
import { context, events } from "@budibase/backend-core" import { context, events } from "@budibase/backend-core"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import tk from "timekeeper" import { generator } from "@budibase/backend-core/tests"
import { mocks } from "@budibase/backend-core/tests"
import { import {
Datasource, Datasource,
FieldSchema, FieldSchema,
BBReferenceFieldSubType, BBReferenceFieldSubType,
FieldType, FieldType,
QueryPreview,
RelationshipType, RelationshipType,
SourceName, SourceName,
Table, Table,
@ -21,36 +19,34 @@ import {
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import { tableForDatasource } from "../../../tests/utilities/structures" import { tableForDatasource } from "../../../tests/utilities/structures"
tk.freeze(mocks.date.MOCK_DATE)
let { basicDatasource } = setup.structures
describe("/datasources", () => { describe("/datasources", () => {
let request = setup.getRequest() const config = setup.getConfig()
let config = setup.getConfig() let datasource: Datasource
let datasource: any
beforeAll(async () => {
await config.init()
})
afterAll(setup.afterAll) afterAll(setup.afterAll)
async function setupTest() { beforeEach(async () => {
await config.init() datasource = await config.api.datasource.create({
datasource = await config.createDatasource() type: "datasource",
name: "Test",
source: SourceName.POSTGRES,
config: {},
})
jest.clearAllMocks() jest.clearAllMocks()
} })
beforeAll(setupTest)
describe("create", () => { describe("create", () => {
it("should create a new datasource", async () => { it("should create a new datasource", async () => {
const res = await request const ds = await config.api.datasource.create({
.post(`/api/datasources`) type: "datasource",
.send(basicDatasource()) name: "Test",
.set(config.defaultHeaders()) source: SourceName.POSTGRES,
.expect("Content-Type", /json/) config: {},
.expect(200) })
expect(ds.name).toEqual("Test")
expect(res.body.datasource.name).toEqual("Test")
expect(res.body.errors).toEqual({})
expect(events.datasource.created).toHaveBeenCalledTimes(1) expect(events.datasource.created).toHaveBeenCalledTimes(1)
}) })
@ -72,88 +68,71 @@ describe("/datasources", () => {
}) })
}) })
describe("update", () => { describe("dynamic variables", () => {
it("should update an existing datasource", async () => { it("should invalidate changed or removed variables", async () => {
datasource.name = "Updated Test" let datasource = await config.api.datasource.create({
const res = await request type: "datasource",
.put(`/api/datasources/${datasource._id}`) name: "Rest",
.send(datasource) source: SourceName.REST,
.set(config.defaultHeaders()) config: {},
.expect("Content-Type", /json/) })
.expect(200)
expect(res.body.datasource.name).toEqual("Updated Test") const query = await config.api.query.save({
expect(res.body.errors).toBeUndefined() datasourceId: datasource._id!,
expect(events.datasource.updated).toHaveBeenCalledTimes(1) fields: {
}) path: "www.google.com",
},
parameters: [],
transformer: null,
queryVerb: "read",
name: datasource.name!,
schema: {},
readable: true,
})
describe("dynamic variables", () => { datasource = await config.api.datasource.update({
async function preview( ...datasource,
datasource: any, config: {
fields: { path: string; queryString: string } dynamicVariables: [
) { {
const queryPreview: QueryPreview = { queryId: query._id,
fields, name: "variable3",
datasourceId: datasource._id, value: "{{ data.0.[value] }}",
parameters: [], },
transformer: null, ],
queryVerb: "read", },
name: datasource.name, })
schema: {},
readable: true,
}
return config.api.query.preview(queryPreview)
}
it("should invalidate changed or removed variables", async () => { // preview once to cache variables
const { datasource, query } = await config.dynamicVariableDatasource() await config.api.query.preview({
// preview once to cache variables fields: {
await preview(datasource, {
path: "www.example.com", path: "www.example.com",
queryString: "test={{ variable3 }}", queryString: "test={{ variable3 }}",
}) },
// check variables in cache datasourceId: datasource._id!,
let contents = await getCachedVariable(query._id!, "variable3") parameters: [],
expect(contents.rows.length).toEqual(1) transformer: null,
queryVerb: "read",
// update the datasource to remove the variables name: datasource.name!,
datasource.config!.dynamicVariables = [] schema: {},
const res = await request readable: true,
.put(`/api/datasources/${datasource._id}`)
.send(datasource)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.errors).toBeUndefined()
// check variables no longer in cache
contents = await getCachedVariable(query._id!, "variable3")
expect(contents).toBe(null)
}) })
// check variables in cache
let contents = await getCachedVariable(query._id!, "variable3")
expect(contents.rows.length).toEqual(1)
// update the datasource to remove the variables
datasource.config!.dynamicVariables = []
await config.api.datasource.update(datasource)
// check variables no longer in cache
contents = await getCachedVariable(query._id!, "variable3")
expect(contents).toBe(null)
}) })
}) })
describe("fetch", () => { describe("permissions", () => {
beforeAll(setupTest)
it("returns all the datasources from the server", async () => {
const res = await request
.get(`/api/datasources`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
const datasources = res.body
// remove non-deterministic fields
for (let source of datasources) {
delete source._id
delete source._rev
}
expect(datasources).toMatchSnapshot()
})
it("should apply authorization to endpoint", async () => { it("should apply authorization to endpoint", async () => {
await checkBuilderEndpoint({ await checkBuilderEndpoint({
config, config,
@ -161,41 +140,8 @@ describe("/datasources", () => {
url: `/api/datasources`, url: `/api/datasources`,
}) })
}) })
})
describe("find", () => { it("should apply authorization to delete endpoint", async () => {
it("should be able to find a datasource", async () => {
const res = await request
.get(`/api/datasources/${datasource._id}`)
.set(config.defaultHeaders())
.expect(200)
expect(res.body._rev).toBeDefined()
expect(res.body._id).toEqual(datasource._id)
})
})
describe("destroy", () => {
beforeAll(setupTest)
it("deletes queries for the datasource after deletion and returns a success message", async () => {
await config.createQuery()
await request
.delete(`/api/datasources/${datasource._id}/${datasource._rev}`)
.set(config.defaultHeaders())
.expect(200)
const res = await request
.get(`/api/datasources`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.length).toEqual(1)
expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
})
it("should apply authorization to endpoint", async () => {
await checkBuilderEndpoint({ await checkBuilderEndpoint({
config, config,
method: "DELETE", method: "DELETE",
@ -204,175 +150,287 @@ describe("/datasources", () => {
}) })
}) })
describe("check secret replacement", () => {
async function makeDatasource() {
datasource = basicDatasource()
datasource.datasource.config.password = "testing"
const res = await request
.post(`/api/datasources`)
.send(datasource)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
return res.body.datasource
}
it("should save a datasource with password", async () => {
const datasource = await makeDatasource()
expect(datasource.config.password).toBe("--secret-value--")
})
it("should not the password on update with the --secret-value--", async () => {
const datasource = await makeDatasource()
await request
.put(`/api/datasources/${datasource._id}`)
.send(datasource)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
await context.doInAppContext(config.getAppId(), async () => {
const dbDatasource: any = await sdk.datasources.get(datasource._id)
expect(dbDatasource.config.password).toBe("testing")
})
})
})
describe.each([ describe.each([
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("fetch schema (%s)", (_, dsProvider) => { ])("%s", (_, dsProvider) => {
beforeAll(async () => { let rawDatasource: Datasource
datasource = await config.api.datasource.create(await dsProvider) beforeEach(async () => {
rawDatasource = await dsProvider
datasource = await config.api.datasource.create(rawDatasource)
}) })
it("fetching schema will not drop tables or columns", async () => { describe("get", () => {
const datasourceId = datasource!._id! it("should be able to get a datasource", async () => {
const ds = await config.api.datasource.get(datasource._id!)
expect(ds._id).toEqual(datasource._id)
expect(ds._rev).toBeDefined()
})
const simpleTable = await config.api.table.save( it("should not return database password", async () => {
tableForDatasource(datasource, { const ds = await config.api.datasource.get(datasource._id!)
name: "simple", expect(ds.config!.password).toBe("--secret-value--")
schema: { })
name: { })
name: "name",
type: FieldType.STRING, describe("list", () => {
it("returns all the datasources", async () => {
const datasources = await config.api.datasource.fetch()
expect(datasources).toContainEqual(expect.objectContaining(datasource))
})
})
describe("put", () => {
it("should update an existing datasource", async () => {
const newName = generator.guid()
datasource.name = newName
const updatedDs = await config.api.datasource.update(datasource)
expect(updatedDs.name).toEqual(newName)
expect(events.datasource.updated).toHaveBeenCalledTimes(1)
})
it("should not overwrite database password with --secret-value--", async () => {
const password = await context.doInAppContext(
config.getAppId(),
async () => {
const ds = await sdk.datasources.get(datasource._id!)
return ds.config!.password
}
)
expect(password).not.toBe("--secret-value--")
const ds = await config.api.datasource.get(datasource._id!)
expect(ds.config!.password).toBe("--secret-value--")
await config.api.datasource.update(
await config.api.datasource.get(datasource._id!)
)
const newPassword = await context.doInAppContext(
config.getAppId(),
async () => {
const ds = await sdk.datasources.get(datasource._id!)
return ds.config!.password
}
)
expect(newPassword).not.toBe("--secret-value--")
expect(newPassword).toBe(password)
})
})
describe("destroy", () => {
it("deletes queries for the datasource after deletion and returns a success message", async () => {
await config.api.query.save({
datasourceId: datasource._id!,
name: "Test Query",
parameters: [],
fields: {},
schema: {},
queryVerb: "read",
transformer: null,
readable: true,
})
await config.api.datasource.delete(datasource)
const datasources = await config.api.datasource.fetch()
expect(datasources).not.toContainEqual(
expect.objectContaining(datasource)
)
expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
})
})
describe("schema", () => {
it("fetching schema will not drop tables or columns", async () => {
const datasourceId = datasource!._id!
const simpleTable = await config.api.table.save(
tableForDatasource(datasource, {
name: "simple",
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
},
})
)
const stringName = "string"
const fullSchema: {
[type in SupportedSqlTypes]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: stringName,
type: FieldType.STRING,
constraints: {
presence: true,
}, },
}, },
}) [FieldType.LONGFORM]: {
) name: "longform",
type: FieldType.LONGFORM,
const fullSchema: {
[type in SupportedSqlTypes]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: "string",
type: FieldType.STRING,
constraints: {
presence: true,
}, },
}, [FieldType.OPTIONS]: {
[FieldType.LONGFORM]: { name: "options",
name: "longform", type: FieldType.OPTIONS,
type: FieldType.LONGFORM, constraints: {
}, presence: { allowEmpty: false },
[FieldType.OPTIONS]: {
name: "options",
type: FieldType.OPTIONS,
constraints: {
presence: { allowEmpty: false },
},
},
[FieldType.NUMBER]: {
name: "number",
type: FieldType.NUMBER,
},
[FieldType.BOOLEAN]: {
name: "boolean",
type: FieldType.BOOLEAN,
},
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
},
[FieldType.DATETIME]: {
name: "datetime",
type: FieldType.DATETIME,
dateOnly: true,
timeOnly: false,
},
[FieldType.LINK]: {
name: "link",
type: FieldType.LINK,
tableId: simpleTable._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
fieldName: "link",
},
[FieldType.FORMULA]: {
name: "formula",
type: FieldType.FORMULA,
formula: "any formula",
},
[FieldType.BARCODEQR]: {
name: "barcodeqr",
type: FieldType.BARCODEQR,
},
[FieldType.BIGINT]: {
name: "bigint",
type: FieldType.BIGINT,
},
[FieldType.BB_REFERENCE]: {
name: "bb_reference",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
},
[FieldType.BB_REFERENCE_SINGLE]: {
name: "bb_reference_single",
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
},
}
await config.api.table.save(
tableForDatasource(datasource, {
name: "full",
schema: fullSchema,
})
)
const persisted = await config.api.datasource.get(datasourceId)
await config.api.datasource.fetchSchema(datasourceId)
const updated = await config.api.datasource.get(datasourceId)
const expected: Datasource = {
...persisted,
entities:
persisted?.entities &&
Object.entries(persisted.entities).reduce<Record<string, Table>>(
(acc, [tableName, table]) => {
acc[tableName] = {
...table,
primaryDisplay: expect.not.stringMatching(
new RegExp(`^${table.primaryDisplay || ""}$`)
),
schema: Object.entries(table.schema).reduce<TableSchema>(
(acc, [fieldName, field]) => {
acc[fieldName] = expect.objectContaining({
...field,
})
return acc
},
{}
),
}
return acc
}, },
{} },
), [FieldType.NUMBER]: {
name: "number",
type: FieldType.NUMBER,
},
[FieldType.BOOLEAN]: {
name: "boolean",
type: FieldType.BOOLEAN,
},
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
},
[FieldType.DATETIME]: {
name: "datetime",
type: FieldType.DATETIME,
dateOnly: true,
timeOnly: false,
},
[FieldType.LINK]: {
name: "link",
type: FieldType.LINK,
tableId: simpleTable._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
fieldName: "link",
},
[FieldType.FORMULA]: {
name: "formula",
type: FieldType.FORMULA,
formula: "any formula",
},
[FieldType.BARCODEQR]: {
name: "barcodeqr",
type: FieldType.BARCODEQR,
},
[FieldType.BIGINT]: {
name: "bigint",
type: FieldType.BIGINT,
},
[FieldType.BB_REFERENCE]: {
name: "bb_reference",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
},
[FieldType.BB_REFERENCE_SINGLE]: {
name: "bb_reference_single",
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
},
}
_rev: expect.any(String), await config.api.table.save(
} tableForDatasource(datasource, {
expect(updated).toEqual(expected) name: "full",
schema: fullSchema,
})
)
const persisted = await config.api.datasource.get(datasourceId)
await config.api.datasource.fetchSchema({ datasourceId })
const updated = await config.api.datasource.get(datasourceId)
const expected: Datasource = {
...persisted,
entities:
persisted?.entities &&
Object.entries(persisted.entities).reduce<Record<string, Table>>(
(acc, [tableName, table]) => {
acc[tableName] = {
...table,
primaryDisplay: expect.not.stringMatching(
new RegExp(`^${table.primaryDisplay || ""}$`)
),
schema: Object.entries(table.schema).reduce<TableSchema>(
(acc, [fieldName, field]) => {
// the constraint will be unset - as the DB doesn't recognise it as not null
if (fieldName === stringName) {
field.constraints = {}
}
acc[fieldName] = expect.objectContaining({
...field,
})
return acc
},
{}
),
}
return acc
},
{}
),
_rev: expect.any(String),
updatedAt: expect.any(String),
}
expect(updated).toEqual(expected)
})
})
describe("verify", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
datasource: rawDatasource,
},
{
body: {
connected: true,
},
}
)
})
it("should state an invalid datasource cannot connect", async () => {
await config.api.datasource.verify(
{
datasource: {
...rawDatasource,
config: {
...rawDatasource.config,
password: "wrongpassword",
},
},
},
{
body: {
connected: false,
error: /.*/, // error message differs between databases
},
}
)
})
})
describe("info", () => {
it("should fetch information about postgres datasource", async () => {
const table = await config.api.table.save(
tableForDatasource(datasource, {
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
},
})
)
const info = await config.api.datasource.info(datasource)
expect(info.tableNames).toContain(table.name)
})
}) })
}) })
}) })

View file

@ -38,7 +38,7 @@ describe.each([
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/rows (%s)", (__, dsProvider) => { ])("/rows (%s)", (providerType, dsProvider) => {
const isInternal = dsProvider === undefined const isInternal = dsProvider === undefined
const config = setup.getConfig() const config = setup.getConfig()
@ -693,6 +693,49 @@ describe.each([
}) })
expect(resp.relationship.length).toBe(1) expect(resp.relationship.length).toBe(1)
}) })
!isInternal &&
// TODO: SQL is having issues creating composite keys
providerType !== DatabaseName.SQL_SERVER &&
it("should support updating fields that are part of a composite key", async () => {
const tableRequest = saveTableRequest({
primary: ["number", "string"],
schema: {
string: {
type: FieldType.STRING,
name: "string",
},
number: {
type: FieldType.NUMBER,
name: "number",
},
},
})
delete tableRequest.schema.id
const table = await config.api.table.save(tableRequest)
const stringValue = generator.word()
const naturalValue = generator.integer({ min: 0, max: 1000 })
const existing = await config.api.row.save(table._id!, {
string: stringValue,
number: naturalValue,
})
expect(existing._id).toEqual(`%5B${naturalValue}%2C'${stringValue}'%5D`)
const row = await config.api.row.patch(table._id!, {
_id: existing._id!,
_rev: existing._rev!,
tableId: table._id!,
string: stringValue,
number: 1500,
})
expect(row._id).toEqual(`%5B${"1500"}%2C'${stringValue}'%5D`)
})
}) })
describe("destroy", () => { describe("destroy", () => {

View file

@ -22,10 +22,7 @@ import { generator, mocks } from "@budibase/backend-core/tests"
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import merge from "lodash/merge" import merge from "lodash/merge"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { roles } from "@budibase/backend-core" import { db, roles } from "@budibase/backend-core"
import * as schemaUtils from "../../../utilities/schema"
jest.mock("../../../utilities/schema")
describe.each([ describe.each([
["internal", undefined], ["internal", undefined],
@ -120,6 +117,9 @@ describe.each([
const newView: CreateViewRequest = { const newView: CreateViewRequest = {
name: generator.name(), name: generator.name(),
tableId: table._id!, tableId: table._id!,
schema: {
id: { visible: true },
},
} }
const res = await config.api.viewV2.create(newView) const res = await config.api.viewV2.create(newView)
@ -134,7 +134,7 @@ describe.each([
const newView: Required<CreateViewRequest> = { const newView: Required<CreateViewRequest> = {
name: generator.name(), name: generator.name(),
tableId: table._id!, tableId: table._id!,
primaryDisplay: generator.word(), primaryDisplay: "id",
query: [ query: [
{ {
operator: SearchFilterOperator.EQUAL, operator: SearchFilterOperator.EQUAL,
@ -148,6 +148,7 @@ describe.each([
type: SortType.STRING, type: SortType.STRING,
}, },
schema: { schema: {
id: { visible: true },
Price: { Price: {
visible: true, visible: true,
}, },
@ -158,6 +159,7 @@ describe.each([
expect(res).toEqual({ expect(res).toEqual({
...newView, ...newView,
schema: { schema: {
id: { visible: true },
Price: { Price: {
visible: true, visible: true,
}, },
@ -172,6 +174,11 @@ describe.each([
name: generator.name(), name: generator.name(),
tableId: table._id!, tableId: table._id!,
schema: { schema: {
id: {
name: "id",
type: FieldType.NUMBER,
visible: true,
},
Price: { Price: {
name: "Price", name: "Price",
type: FieldType.NUMBER, type: FieldType.NUMBER,
@ -193,6 +200,7 @@ describe.each([
expect(createdView).toEqual({ expect(createdView).toEqual({
...newView, ...newView,
schema: { schema: {
id: { visible: true },
Price: { Price: {
visible: true, visible: true,
order: 1, order: 1,
@ -209,6 +217,12 @@ describe.each([
name: generator.name(), name: generator.name(),
tableId: table._id!, tableId: table._id!,
schema: { schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
visible: true,
},
Price: { Price: {
name: "Price", name: "Price",
type: FieldType.NUMBER, type: FieldType.NUMBER,
@ -230,8 +244,9 @@ describe.each([
const newView: CreateViewRequest = { const newView: CreateViewRequest = {
name: generator.name(), name: generator.name(),
tableId: table._id!, tableId: table._id!,
primaryDisplay: generator.word(), primaryDisplay: "id",
schema: { schema: {
id: { visible: true },
Price: { visible: true }, Price: { visible: true },
Category: { visible: false }, Category: { visible: false },
}, },
@ -241,6 +256,7 @@ describe.each([
expect(res).toEqual({ expect(res).toEqual({
...newView, ...newView,
schema: { schema: {
id: { visible: true },
Price: { Price: {
visible: true, visible: true,
}, },
@ -255,6 +271,7 @@ describe.each([
name: generator.name(), name: generator.name(),
tableId: table._id!, tableId: table._id!,
schema: { schema: {
id: { visible: true },
nonExisting: { nonExisting: {
visible: true, visible: true,
}, },
@ -293,6 +310,7 @@ describe.each([
name: generator.name(), name: generator.name(),
tableId: table._id!, tableId: table._id!,
schema: { schema: {
id: { visible: true },
name: { name: {
visible: true, visible: true,
readonly: true, readonly: true,
@ -306,6 +324,7 @@ describe.each([
const res = await config.api.viewV2.create(newView) const res = await config.api.viewV2.create(newView)
expect(res.schema).toEqual({ expect(res.schema).toEqual({
id: { visible: true },
name: { name: {
visible: true, visible: true,
readonly: true, readonly: true,
@ -318,15 +337,13 @@ describe.each([
}) })
it("required fields cannot be marked as readonly", async () => { it("required fields cannot be marked as readonly", async () => {
const isRequiredSpy = jest.spyOn(schemaUtils, "isRequired")
isRequiredSpy.mockReturnValueOnce(true)
const table = await config.api.table.save( const table = await config.api.table.save(
saveTableRequest({ saveTableRequest({
schema: { schema: {
name: { name: {
name: "name", name: "name",
type: FieldType.STRING, type: FieldType.STRING,
constraints: { presence: true },
}, },
description: { description: {
name: "description", name: "description",
@ -340,7 +357,9 @@ describe.each([
name: generator.name(), name: generator.name(),
tableId: table._id!, tableId: table._id!,
schema: { schema: {
id: { visible: true },
name: { name: {
visible: true,
readonly: true, readonly: true,
}, },
}, },
@ -350,7 +369,7 @@ describe.each([
status: 400, status: 400,
body: { body: {
message: message:
'Field "name" cannot be readonly as it is a required field', 'You can\'t make "name" readonly because it is a required field.',
status: 400, status: 400,
}, },
}) })
@ -376,6 +395,7 @@ describe.each([
name: generator.name(), name: generator.name(),
tableId: table._id!, tableId: table._id!,
schema: { schema: {
id: { visible: true },
name: { name: {
visible: false, visible: false,
readonly: true, readonly: true,
@ -414,6 +434,7 @@ describe.each([
name: generator.name(), name: generator.name(),
tableId: table._id!, tableId: table._id!,
schema: { schema: {
id: { visible: true },
name: { name: {
visible: true, visible: true,
readonly: true, readonly: true,
@ -424,12 +445,84 @@ describe.each([
await config.api.viewV2.create(newView, { await config.api.viewV2.create(newView, {
status: 400, status: 400,
body: { body: {
message: "Readonly fields are not enabled for your tenant", message: "Readonly fields are not enabled",
status: 400, status: 400,
}, },
}) })
}) })
}) })
it("display fields must be visible", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
description: {
name: "description",
type: FieldType.STRING,
},
},
})
)
const newView: CreateViewRequest = {
name: generator.name(),
tableId: table._id!,
primaryDisplay: "name",
schema: {
id: { visible: true },
name: {
visible: false,
},
},
}
await config.api.viewV2.create(newView, {
status: 400,
body: {
message: 'You can\'t hide "name" because it is the display column.',
status: 400,
},
})
})
it("display fields can be readonly", async () => {
mocks.licenses.useViewReadonlyColumns()
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
description: {
name: "description",
type: FieldType.STRING,
},
},
})
)
const newView: CreateViewRequest = {
name: generator.name(),
tableId: table._id!,
primaryDisplay: "name",
schema: {
id: { visible: true },
name: {
visible: true,
readonly: true,
},
},
}
await config.api.viewV2.create(newView, {
status: 201,
})
})
}) })
describe("update", () => { describe("update", () => {
@ -441,6 +534,9 @@ describe.each([
view = await config.api.viewV2.create({ view = await config.api.viewV2.create({
tableId: table._id!, tableId: table._id!,
name: generator.guid(), name: generator.guid(),
schema: {
id: { visible: true },
},
}) })
}) })
@ -475,7 +571,7 @@ describe.each([
id: view.id, id: view.id,
tableId, tableId,
name: view.name, name: view.name,
primaryDisplay: generator.word(), primaryDisplay: "Price",
query: [ query: [
{ {
operator: SearchFilterOperator.EQUAL, operator: SearchFilterOperator.EQUAL,
@ -489,6 +585,7 @@ describe.each([
type: SortType.STRING, type: SortType.STRING,
}, },
schema: { schema: {
id: { visible: true },
Category: { Category: {
visible: false, visible: false,
}, },
@ -506,7 +603,7 @@ describe.each([
schema: { schema: {
...table.schema, ...table.schema,
id: expect.objectContaining({ id: expect.objectContaining({
visible: false, visible: true,
}), }),
Category: expect.objectContaining({ Category: expect.objectContaining({
visible: false, visible: false,
@ -603,6 +700,9 @@ describe.each([
const anotherView = await config.api.viewV2.create({ const anotherView = await config.api.viewV2.create({
tableId: table._id!, tableId: table._id!,
name: generator.guid(), name: generator.guid(),
schema: {
id: { visible: true },
},
}) })
const result = await config const result = await config
.request!.put(`/api/v2/views/${anotherView.id}`) .request!.put(`/api/v2/views/${anotherView.id}`)
@ -621,6 +721,7 @@ describe.each([
const updatedView = await config.api.viewV2.update({ const updatedView = await config.api.viewV2.update({
...view, ...view,
schema: { schema: {
...view.schema,
Price: { Price: {
name: "Price", name: "Price",
type: FieldType.NUMBER, type: FieldType.NUMBER,
@ -640,6 +741,7 @@ describe.each([
expect(updatedView).toEqual({ expect(updatedView).toEqual({
...view, ...view,
schema: { schema: {
id: { visible: true },
Price: { Price: {
visible: true, visible: true,
order: 1, order: 1,
@ -656,6 +758,7 @@ describe.each([
{ {
...view, ...view,
schema: { schema: {
...view.schema,
Price: { Price: {
name: "Price", name: "Price",
type: FieldType.NUMBER, type: FieldType.NUMBER,
@ -679,6 +782,7 @@ describe.each([
view = await config.api.viewV2.update({ view = await config.api.viewV2.update({
...view, ...view,
schema: { schema: {
id: { visible: true },
Price: { Price: {
visible: true, visible: true,
readonly: true, readonly: true,
@ -690,7 +794,7 @@ describe.each([
await config.api.viewV2.update(view, { await config.api.viewV2.update(view, {
status: 400, status: 400,
body: { body: {
message: "Readonly fields are not enabled for your tenant", message: "Readonly fields are not enabled",
}, },
}) })
}) })
@ -701,6 +805,7 @@ describe.each([
view = await config.api.viewV2.update({ view = await config.api.viewV2.update({
...view, ...view,
schema: { schema: {
id: { visible: true },
Price: { Price: {
visible: true, visible: true,
readonly: true, readonly: true,
@ -715,6 +820,7 @@ describe.each([
const res = await config.api.viewV2.update({ const res = await config.api.viewV2.update({
...view, ...view,
schema: { schema: {
id: { visible: true },
Price: { Price: {
visible: true, visible: true,
readonly: false, readonly: false,
@ -725,6 +831,7 @@ describe.each([
expect.objectContaining({ expect.objectContaining({
...view, ...view,
schema: { schema: {
id: { visible: true },
Price: { Price: {
visible: true, visible: true,
readonly: false, readonly: false,
@ -733,6 +840,53 @@ describe.each([
}) })
) )
}) })
isInternal &&
it("updating schema will only validate modified field", async () => {
let view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
id: { visible: true },
Price: {
visible: true,
},
Category: { visible: true },
},
})
// Update the view to an invalid state
const tableToUpdate = await config.api.table.get(table._id!)
;(tableToUpdate.views![view.name] as ViewV2).schema!.id.visible = false
await db.getDB(config.appId!).put(tableToUpdate)
view = await config.api.viewV2.get(view.id)
await config.api.viewV2.update({
...view,
schema: {
...view.schema,
Price: {
visible: false,
},
},
})
expect(await config.api.viewV2.get(view.id)).toEqual(
expect.objectContaining({
schema: {
id: expect.objectContaining({
visible: false,
}),
Price: expect.objectContaining({
visible: false,
}),
Category: expect.objectContaining({
visible: true,
}),
},
})
)
})
}) })
describe("delete", () => { describe("delete", () => {
@ -742,6 +896,9 @@ describe.each([
view = await config.api.viewV2.create({ view = await config.api.viewV2.create({
tableId: table._id!, tableId: table._id!,
name: generator.guid(), name: generator.guid(),
schema: {
id: { visible: true },
},
}) })
}) })
@ -764,6 +921,7 @@ describe.each([
name: generator.name(), name: generator.name(),
tableId: table._id!, tableId: table._id!,
schema: { schema: {
id: { visible: true },
Price: { visible: false }, Price: { visible: false },
Category: { visible: true }, Category: { visible: true },
}, },
@ -786,6 +944,7 @@ describe.each([
name: generator.name(), name: generator.name(),
tableId: table._id!, tableId: table._id!,
schema: { schema: {
id: { visible: true },
Price: { visible: true, readonly: true }, Price: { visible: true, readonly: true },
}, },
}) })
@ -821,6 +980,7 @@ describe.each([
tableId: table._id!, tableId: table._id!,
name: generator.guid(), name: generator.guid(),
schema: { schema: {
id: { visible: true },
Country: { Country: {
visible: true, visible: true,
}, },
@ -855,6 +1015,7 @@ describe.each([
tableId: table._id!, tableId: table._id!,
name: generator.guid(), name: generator.guid(),
schema: { schema: {
id: { visible: true },
two: { visible: true }, two: { visible: true },
}, },
}) })
@ -880,6 +1041,7 @@ describe.each([
tableId: table._id!, tableId: table._id!,
name: generator.guid(), name: generator.guid(),
schema: { schema: {
id: { visible: true },
one: { visible: true, readonly: true }, one: { visible: true, readonly: true },
two: { visible: true }, two: { visible: true },
}, },
@ -921,6 +1083,7 @@ describe.each([
tableId: table._id!, tableId: table._id!,
name: generator.guid(), name: generator.guid(),
schema: { schema: {
id: { visible: true },
one: { visible: true, readonly: true }, one: { visible: true, readonly: true },
two: { visible: true }, two: { visible: true },
}, },
@ -988,6 +1151,7 @@ describe.each([
rows.map(r => ({ rows.map(r => ({
_viewId: view.id, _viewId: view.id,
tableId: table._id, tableId: table._id,
id: r.id,
_id: r._id, _id: r._id,
_rev: r._rev, _rev: r._rev,
...(isInternal ...(isInternal
@ -1028,6 +1192,7 @@ describe.each([
}, },
], ],
schema: { schema: {
id: { visible: true },
two: { visible: true }, two: { visible: true },
}, },
}) })
@ -1039,6 +1204,7 @@ describe.each([
{ {
_viewId: view.id, _viewId: view.id,
tableId: table._id, tableId: table._id,
id: two.id,
two: two.two, two: two.two,
_id: two._id, _id: two._id,
_rev: two._rev, _rev: two._rev,
@ -1192,7 +1358,11 @@ describe.each([
describe("sorting", () => { describe("sorting", () => {
let table: Table let table: Table
const viewSchema = { age: { visible: true }, name: { visible: true } } const viewSchema = {
id: { visible: true },
age: { visible: true },
name: { visible: true },
}
beforeAll(async () => { beforeAll(async () => {
table = await config.api.table.save( table = await config.api.table.save(
@ -1348,4 +1518,123 @@ describe.each([
}) })
}) })
}) })
describe("updating table schema", () => {
describe("existing columns changed to required", () => {
beforeEach(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
},
name: {
name: "name",
type: FieldType.STRING,
},
},
})
)
})
it("allows updating when no views constrains the field", async () => {
await config.api.viewV2.create({
name: "view a",
tableId: table._id!,
schema: {
id: { visible: true },
name: { visible: true },
},
})
table = await config.api.table.get(table._id!)
await config.api.table.save(
{
...table,
schema: {
...table.schema,
name: {
name: "name",
type: FieldType.STRING,
constraints: { presence: { allowEmpty: false } },
},
},
},
{ status: 200 }
)
})
it("rejects if field is readonly in any view", async () => {
mocks.licenses.useViewReadonlyColumns()
await config.api.viewV2.create({
name: "view a",
tableId: table._id!,
schema: {
id: { visible: true },
name: {
visible: true,
readonly: true,
},
},
})
table = await config.api.table.get(table._id!)
await config.api.table.save(
{
...table,
schema: {
...table.schema,
name: {
name: "name",
type: FieldType.STRING,
constraints: { presence: true },
},
},
},
{
status: 400,
body: {
status: 400,
message:
'To make field "name" required, this field must be present and writable in views: view a.',
},
}
)
})
it("rejects if field is hidden in any view", async () => {
await config.api.viewV2.create({
name: "view a",
tableId: table._id!,
schema: { id: { visible: true } },
})
table = await config.api.table.get(table._id!)
await config.api.table.save(
{
...table,
schema: {
...table.schema,
name: {
name: "name",
type: FieldType.STRING,
constraints: { presence: true },
},
},
},
{
status: 400,
body: {
status: 400,
message:
'To make field "name" required, this field must be present and writable in views: view a.',
},
}
)
})
})
})
}) })

View file

@ -1,51 +1,89 @@
import { auth, permissions } from "@budibase/backend-core" import { auth, permissions } from "@budibase/backend-core"
import { DataSourceOperation } from "../../../constants" import { DataSourceOperation } from "../../../constants"
import { WebhookActionType } from "@budibase/types" import { Table, WebhookActionType } from "@budibase/types"
import Joi from "joi" import Joi, { CustomValidator } from "joi"
import { ValidSnippetNameRegex } from "@budibase/shared-core" import { ValidSnippetNameRegex, helpers } from "@budibase/shared-core"
import sdk from "../../../sdk"
const { isRequired } = helpers.schema
const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("") const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("")
const OPTIONAL_NUMBER = Joi.number().optional().allow(null) const OPTIONAL_NUMBER = Joi.number().optional().allow(null)
const OPTIONAL_BOOLEAN = Joi.boolean().optional().allow(null) const OPTIONAL_BOOLEAN = Joi.boolean().optional().allow(null)
const APP_NAME_REGEX = /^[\w\s]+$/ const APP_NAME_REGEX = /^[\w\s]+$/
const validateViewSchemas: CustomValidator<Table> = (table, helpers) => {
if (table.views && Object.entries(table.views).length) {
const requiredFields = Object.entries(table.schema)
.filter(([_, v]) => isRequired(v.constraints))
.map(([key]) => key)
if (requiredFields.length) {
for (const view of Object.values(table.views)) {
if (!sdk.views.isV2(view)) {
continue
}
const editableViewFields = Object.entries(view.schema || {})
.filter(([_, f]) => f.visible && !f.readonly)
.map(([key]) => key)
const missingField = requiredFields.find(
f => !editableViewFields.includes(f)
)
if (missingField) {
return helpers.message({
custom: `To make field "${missingField}" required, this field must be present and writable in views: ${view.name}.`,
})
}
}
}
}
return table
}
export function tableValidator() { export function tableValidator() {
// prettier-ignore return auth.joiValidator.body(
return auth.joiValidator.body(Joi.object({ Joi.object({
_id: OPTIONAL_STRING, _id: OPTIONAL_STRING,
_rev: OPTIONAL_STRING, _rev: OPTIONAL_STRING,
type: OPTIONAL_STRING.valid("table", "internal", "external"), type: OPTIONAL_STRING.valid("table", "internal", "external"),
primaryDisplay: OPTIONAL_STRING, primaryDisplay: OPTIONAL_STRING,
schema: Joi.object().required(), schema: Joi.object().required(),
name: Joi.string().required(), name: Joi.string().required(),
views: Joi.object(), views: Joi.object(),
rows: Joi.array(), rows: Joi.array(),
}).unknown(true)) })
.custom(validateViewSchemas)
.unknown(true),
{ errorPrefix: "" }
)
} }
export function nameValidator() { export function nameValidator() {
// prettier-ignore return auth.joiValidator.body(
return auth.joiValidator.body(Joi.object({ Joi.object({
name: OPTIONAL_STRING, name: OPTIONAL_STRING,
})) })
)
} }
export function datasourceValidator() { export function datasourceValidator() {
// prettier-ignore return auth.joiValidator.body(
return auth.joiValidator.body(Joi.object({ Joi.object({
_id: Joi.string(), _id: Joi.string(),
_rev: Joi.string(), _rev: Joi.string(),
type: OPTIONAL_STRING.allow("datasource_plus"), type: OPTIONAL_STRING.allow("datasource_plus"),
relationships: Joi.array().items(Joi.object({ relationships: Joi.array().items(
from: Joi.string().required(), Joi.object({
to: Joi.string().required(), from: Joi.string().required(),
cardinality: Joi.valid("1:N", "1:1", "N:N").required() to: Joi.string().required(),
})), cardinality: Joi.valid("1:N", "1:1", "N:N").required(),
}).unknown(true)) })
),
}).unknown(true)
)
} }
function filterObject() { function filterObject() {
// prettier-ignore
return Joi.object({ return Joi.object({
string: Joi.object().optional(), string: Joi.object().optional(),
fuzzy: Joi.object().optional(), fuzzy: Joi.object().optional(),
@ -62,17 +100,20 @@ function filterObject() {
} }
export function internalSearchValidator() { export function internalSearchValidator() {
// prettier-ignore return auth.joiValidator.body(
return auth.joiValidator.body(Joi.object({ Joi.object({
tableId: OPTIONAL_STRING, tableId: OPTIONAL_STRING,
query: filterObject(), query: filterObject(),
limit: OPTIONAL_NUMBER, limit: OPTIONAL_NUMBER,
sort: OPTIONAL_STRING, sort: OPTIONAL_STRING,
sortOrder: OPTIONAL_STRING, sortOrder: OPTIONAL_STRING,
sortType: OPTIONAL_STRING, sortType: OPTIONAL_STRING,
paginate: Joi.boolean(), paginate: Joi.boolean(),
bookmark: Joi.alternatives().try(OPTIONAL_STRING, OPTIONAL_NUMBER).optional(), bookmark: Joi.alternatives()
})) .try(OPTIONAL_STRING, OPTIONAL_NUMBER)
.optional(),
})
)
} }
export function externalSearchValidator() { export function externalSearchValidator() {
@ -94,92 +135,110 @@ export function externalSearchValidator() {
} }
export function datasourceQueryValidator() { export function datasourceQueryValidator() {
// prettier-ignore return auth.joiValidator.body(
return auth.joiValidator.body(Joi.object({ Joi.object({
endpoint: Joi.object({ endpoint: Joi.object({
datasourceId: Joi.string().required(), datasourceId: Joi.string().required(),
operation: Joi.string().required().valid(...Object.values(DataSourceOperation)), operation: Joi.string()
entityId: Joi.string().required(), .required()
}).required(), .valid(...Object.values(DataSourceOperation)),
resource: Joi.object({ entityId: Joi.string().required(),
fields: Joi.array().items(Joi.string()).optional(), }).required(),
}).optional(), resource: Joi.object({
body: Joi.object().optional(), fields: Joi.array().items(Joi.string()).optional(),
sort: Joi.object().optional(), }).optional(),
filters: filterObject().optional(), body: Joi.object().optional(),
paginate: Joi.object({ sort: Joi.object().optional(),
page: Joi.string().alphanum().optional(), filters: filterObject().optional(),
limit: Joi.number().optional(), paginate: Joi.object({
}).optional(), page: Joi.string().alphanum().optional(),
})) limit: Joi.number().optional(),
}).optional(),
})
)
} }
export function webhookValidator() { export function webhookValidator() {
// prettier-ignore return auth.joiValidator.body(
return auth.joiValidator.body(Joi.object({ Joi.object({
live: Joi.bool(), live: Joi.bool(),
_id: OPTIONAL_STRING, _id: OPTIONAL_STRING,
_rev: OPTIONAL_STRING, _rev: OPTIONAL_STRING,
name: Joi.string().required(), name: Joi.string().required(),
bodySchema: Joi.object().optional(), bodySchema: Joi.object().optional(),
action: Joi.object({ action: Joi.object({
type: Joi.string().required().valid(WebhookActionType.AUTOMATION), type: Joi.string().required().valid(WebhookActionType.AUTOMATION),
target: Joi.string().required(), target: Joi.string().required(),
}).required(), }).required(),
}).unknown(true)) }).unknown(true)
)
} }
export function roleValidator() { export function roleValidator() {
const permLevelArray = Object.values(permissions.PermissionLevel) const permLevelArray = Object.values(permissions.PermissionLevel)
// prettier-ignore
return auth.joiValidator.body(Joi.object({ return auth.joiValidator.body(
_id: OPTIONAL_STRING, Joi.object({
_rev: OPTIONAL_STRING, _id: OPTIONAL_STRING,
name: Joi.string().regex(/^[a-zA-Z0-9_]*$/).required(), _rev: OPTIONAL_STRING,
// this is the base permission ID (for now a built in) name: Joi.string()
permissionId: Joi.string().valid(...Object.values(permissions.BuiltinPermissionID)).required(), .regex(/^[a-zA-Z0-9_]*$/)
permissions: Joi.object() .required(),
.pattern(/.*/, [Joi.string().valid(...permLevelArray)]) // this is the base permission ID (for now a built in)
.optional(), permissionId: Joi.string()
inherits: OPTIONAL_STRING, .valid(...Object.values(permissions.BuiltinPermissionID))
}).unknown(true)) .required(),
permissions: Joi.object()
.pattern(/.*/, [Joi.string().valid(...permLevelArray)])
.optional(),
inherits: OPTIONAL_STRING,
}).unknown(true)
)
} }
export function permissionValidator() { export function permissionValidator() {
const permLevelArray = Object.values(permissions.PermissionLevel) const permLevelArray = Object.values(permissions.PermissionLevel)
// prettier-ignore
return auth.joiValidator.params(Joi.object({ return auth.joiValidator.params(
level: Joi.string().valid(...permLevelArray).required(), Joi.object({
resourceId: Joi.string(), level: Joi.string()
roleId: Joi.string(), .valid(...permLevelArray)
}).unknown(true)) .required(),
resourceId: Joi.string(),
roleId: Joi.string(),
}).unknown(true)
)
} }
export function screenValidator() { export function screenValidator() {
// prettier-ignore return auth.joiValidator.body(
return auth.joiValidator.body(Joi.object({ Joi.object({
name: Joi.string().required(), name: Joi.string().required(),
showNavigation: OPTIONAL_BOOLEAN, showNavigation: OPTIONAL_BOOLEAN,
width: OPTIONAL_STRING, width: OPTIONAL_STRING,
routing: Joi.object({ routing: Joi.object({
route: Joi.string().required(), route: Joi.string().required(),
roleId: Joi.string().required().allow(""), roleId: Joi.string().required().allow(""),
homeScreen: OPTIONAL_BOOLEAN, homeScreen: OPTIONAL_BOOLEAN,
}).required().unknown(true), })
props: Joi.object({ .required()
_id: Joi.string().required(), .unknown(true),
_component: Joi.string().required(), props: Joi.object({
_children: Joi.array().required(), _id: Joi.string().required(),
_styles: Joi.object().required(), _component: Joi.string().required(),
type: OPTIONAL_STRING, _children: Joi.array().required(),
table: OPTIONAL_STRING, _styles: Joi.object().required(),
layoutId: OPTIONAL_STRING, type: OPTIONAL_STRING,
}).required().unknown(true), table: OPTIONAL_STRING,
}).unknown(true)) layoutId: OPTIONAL_STRING,
})
.required()
.unknown(true),
}).unknown(true)
)
} }
function generateStepSchema(allowStepTypes: string[]) { function generateStepSchema(allowStepTypes: string[]) {
// prettier-ignore
return Joi.object({ return Joi.object({
stepId: Joi.string().required(), stepId: Joi.string().required(),
id: Joi.string().required(), id: Joi.string().required(),
@ -189,33 +248,39 @@ function generateStepSchema(allowStepTypes: string[]) {
icon: Joi.string().required(), icon: Joi.string().required(),
params: Joi.object(), params: Joi.object(),
args: Joi.object(), args: Joi.object(),
type: Joi.string().required().valid(...allowStepTypes), type: Joi.string()
.required()
.valid(...allowStepTypes),
}).unknown(true) }).unknown(true)
} }
export function automationValidator(existing = false) { export function automationValidator(existing = false) {
// prettier-ignore return auth.joiValidator.body(
return auth.joiValidator.body(Joi.object({ Joi.object({
_id: existing ? Joi.string().required() : OPTIONAL_STRING, _id: existing ? Joi.string().required() : OPTIONAL_STRING,
_rev: existing ? Joi.string().required() : OPTIONAL_STRING, _rev: existing ? Joi.string().required() : OPTIONAL_STRING,
name: Joi.string().required(), name: Joi.string().required(),
type: Joi.string().valid("automation").required(), type: Joi.string().valid("automation").required(),
definition: Joi.object({ definition: Joi.object({
steps: Joi.array().required().items(generateStepSchema(["ACTION", "LOGIC"])), steps: Joi.array()
trigger: generateStepSchema(["TRIGGER"]).allow(null), .required()
}).required().unknown(true), .items(generateStepSchema(["ACTION", "LOGIC"])),
}).unknown(true)) trigger: generateStepSchema(["TRIGGER"]).allow(null),
})
.required()
.unknown(true),
}).unknown(true)
)
} }
export function applicationValidator(opts = { isCreate: true }) { export function applicationValidator(opts = { isCreate: true }) {
// prettier-ignore
const base: any = { const base: any = {
_id: OPTIONAL_STRING, _id: OPTIONAL_STRING,
_rev: OPTIONAL_STRING, _rev: OPTIONAL_STRING,
url: OPTIONAL_STRING, url: OPTIONAL_STRING,
template: Joi.object({ template: Joi.object({
templateString: OPTIONAL_STRING, templateString: OPTIONAL_STRING,
}) }),
} }
const appNameValidator = Joi.string() const appNameValidator = Joi.string()

View file

@ -33,7 +33,7 @@ export async function getAppMigrationVersion(appId: string): Promise<string> {
let version let version
try { try {
metadata = await getFromDB(appId) metadata = await getFromDB(appId)
version = metadata.version version = metadata.version || ""
} catch (err: any) { } catch (err: any) {
if (err.status !== 404) { if (err.status !== 404) {
throw err throw err

View file

@ -10,14 +10,25 @@ export * from "./appMigrationMetadata"
export type AppMigration = { export type AppMigration = {
id: string id: string
func: () => Promise<void> func: () => Promise<void>
// disabled so that by default all migrations listed are enabled
disabled?: boolean
} }
export const getLatestMigrationId = () => export function getLatestEnabledMigrationId(migrations?: AppMigration[]) {
MIGRATIONS.map(m => m.id) let latestMigrationId: string | undefined
.sort() for (let migration of migrations || MIGRATIONS) {
.reverse()[0] // if a migration is disabled, all migrations after it are disabled
if (migration.disabled) {
break
}
latestMigrationId = migration.id
}
return latestMigrationId
}
const getTimestamp = (versionId: string) => versionId?.split("_")[0] || "" function getTimestamp(versionId: string) {
return versionId?.split("_")[0] || ""
}
export async function checkMissingMigrations( export async function checkMissingMigrations(
ctx: UserCtx, ctx: UserCtx,
@ -25,17 +36,18 @@ export async function checkMissingMigrations(
appId: string appId: string
) { ) {
const currentVersion = await getAppMigrationVersion(appId) const currentVersion = await getAppMigrationVersion(appId)
const latestMigration = getLatestMigrationId() const latestMigration = getLatestEnabledMigrationId()
if (getTimestamp(currentVersion) < getTimestamp(latestMigration)) { if (
latestMigration &&
getTimestamp(currentVersion) < getTimestamp(latestMigration)
) {
await queue.add( await queue.add(
{ {
appId, appId,
}, },
{ {
jobId: `${appId}_${latestMigration}`, jobId: `${appId}_${latestMigration}`,
removeOnComplete: true,
removeOnFail: true,
} }
) )

View file

@ -1,7 +1,15 @@
// This file should never be manually modified, use `yarn add-app-migration` in order to add a new one // This file should never be manually modified, use `yarn add-app-migration` in order to add a new one
import env from "../environment"
import { AppMigration } from "." import { AppMigration } from "."
import m20240604153647_initial_sqs from "./migrations/20240604153647_initial_sqs"
// Migrations will be executed sorted by ID
export const MIGRATIONS: AppMigration[] = [ export const MIGRATIONS: AppMigration[] = [
// Migrations will be executed sorted by id {
id: "20240604153647_initial_sqs",
func: m20240604153647_initial_sqs,
disabled: !env.SQS_SEARCH_ENABLE,
},
] ]

View file

@ -0,0 +1,52 @@
import { context } from "@budibase/backend-core"
import { allLinkDocs } from "../../db/utils"
import LinkDocumentImpl from "../../db/linkedRows/LinkDocument"
import sdk from "../../sdk"
import env from "../../environment"
const migration = async () => {
const linkDocs = await allLinkDocs()
const docsToUpdate = []
for (const linkDoc of linkDocs) {
if (linkDoc.tableId) {
// It already had the required data
continue
}
// it already has the junction table ID - no need to migrate
if (!linkDoc.tableId) {
const newLink = new LinkDocumentImpl(
linkDoc.doc1.tableId,
linkDoc.doc1.fieldName,
linkDoc.doc1.rowId,
linkDoc.doc2.tableId,
linkDoc.doc2.fieldName,
linkDoc.doc2.rowId
)
newLink._id = linkDoc._id!
newLink._rev = linkDoc._rev
docsToUpdate.push(newLink)
}
}
const db = context.getAppDB()
if (docsToUpdate.length) {
await db.bulkDocs(docsToUpdate)
}
// at the end make sure design doc is ready
await sdk.tables.sqs.syncDefinition()
// only do initial search if environment is using SQS already
// initial search makes sure that all the indexes have been created
// and are ready to use, avoiding any initial waits for large tables
if (env.SQS_SEARCH_ENABLE) {
const tables = await sdk.tables.getAllInternalTables()
// do these one by one - running in parallel could cause problems
for (let table of tables) {
await db.sql(`select * from ${table._id} limit 1`)
}
}
}
export default migration

View file

@ -0,0 +1,116 @@
import * as setup from "../../../api/routes/tests/utilities"
import { basicTable } from "../../../tests/utilities/structures"
import {
db as dbCore,
SQLITE_DESIGN_DOC_ID,
context,
} from "@budibase/backend-core"
import {
LinkDocument,
DocumentType,
SQLiteDefinition,
SQLiteType,
} from "@budibase/types"
import {
generateJunctionTableID,
generateLinkID,
generateRowID,
} from "../../../db/utils"
import migration from "../20240604153647_initial_sqs"
const config = setup.getConfig()
let tableId: string
function oldLinkDocInfo() {
const tableId1 = `${DocumentType.TABLE}_a`,
tableId2 = `${DocumentType.TABLE}_b`
return {
tableId1,
tableId2,
rowId1: generateRowID(tableId1, "b"),
rowId2: generateRowID(tableId2, "a"),
col1: "columnB",
col2: "columnA",
}
}
function oldLinkDocID() {
const { tableId1, tableId2, rowId1, rowId2, col1, col2 } = oldLinkDocInfo()
return generateLinkID(tableId1, tableId2, rowId1, rowId2, col1, col2)
}
function oldLinkDocument(): Omit<LinkDocument, "tableId"> {
const { tableId1, tableId2, rowId1, rowId2, col1, col2 } = oldLinkDocInfo()
return {
type: "link",
_id: oldLinkDocID(),
doc1: {
tableId: tableId1,
fieldName: col1,
rowId: rowId1,
},
doc2: {
tableId: tableId2,
fieldName: col2,
rowId: rowId2,
},
}
}
async function sqsDisabled(cb: () => Promise<void>) {
await config.withEnv({ SQS_SEARCH_ENABLE: "" }, cb)
}
async function sqsEnabled(cb: () => Promise<void>) {
await config.withEnv({ SQS_SEARCH_ENABLE: "1" }, cb)
}
beforeAll(async () => {
await sqsDisabled(async () => {
await config.init()
const table = await config.api.table.save(basicTable())
tableId = table._id!
const db = dbCore.getDB(config.appId!)
// old link document
await db.put(oldLinkDocument())
})
})
describe("SQS migration", () => {
it("test migration runs as expected against an older DB", async () => {
const db = dbCore.getDB(config.appId!)
// confirm nothing exists initially
await sqsDisabled(async () => {
let error: any | undefined
try {
await db.get(SQLITE_DESIGN_DOC_ID)
} catch (err: any) {
error = err
}
expect(error).toBeDefined()
expect(error.status).toBe(404)
})
await sqsEnabled(async () => {
await context.doInAppContext(config.appId!, async () => {
await migration()
})
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
expect(designDoc.sql.tables).toBeDefined()
const mainTableDef = designDoc.sql.tables[tableId]
expect(mainTableDef).toBeDefined()
expect(mainTableDef.fields.name).toEqual(SQLiteType.TEXT)
expect(mainTableDef.fields.description).toEqual(SQLiteType.TEXT)
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
expect(linkDoc.tableId).toEqual(
generateJunctionTableID(tableId1, tableId2)
)
// should have swapped the documents
expect(linkDoc.doc1.tableId).toEqual(tableId2)
expect(linkDoc.doc1.rowId).toEqual(rowId2)
expect(linkDoc.doc2.tableId).toEqual(tableId1)
expect(linkDoc.doc2.rowId).toEqual(rowId1)
})
})
})

View file

@ -1,4 +1,4 @@
import { context, locks } from "@budibase/backend-core" import { context, locks, logging } from "@budibase/backend-core"
import { LockName, LockType } from "@budibase/types" import { LockName, LockType } from "@budibase/types"
import { import {
@ -12,47 +12,56 @@ export async function processMigrations(
migrations: AppMigration[] migrations: AppMigration[]
) { ) {
console.log(`Processing app migration for "${appId}"`) console.log(`Processing app migration for "${appId}"`)
// have to wrap in context, this gets the tenant from the app ID
await context.doInAppContext(appId, async () => {
await locks.doWithLock(
{
name: LockName.APP_MIGRATION,
type: LockType.AUTO_EXTEND,
resource: appId,
},
async () => {
try {
await context.doInAppMigrationContext(appId, async () => {
let currentVersion = await getAppMigrationVersion(appId)
await locks.doWithLock( const pendingMigrations = migrations
{ .filter(m => m.id > currentVersion)
name: LockName.APP_MIGRATION, .sort((a, b) => a.id.localeCompare(b.id))
type: LockType.AUTO_EXTEND,
resource: appId,
},
async () => {
await context.doInAppMigrationContext(appId, async () => {
let currentVersion = await getAppMigrationVersion(appId)
const pendingMigrations = migrations const migrationIds = migrations.map(m => m.id).sort()
.filter(m => m.id > currentVersion)
.sort((a, b) => a.id.localeCompare(b.id))
const migrationIds = migrations.map(m => m.id).sort() let index = 0
for (const { id, func } of pendingMigrations) {
const expectedMigration =
migrationIds[migrationIds.indexOf(currentVersion) + 1]
let index = 0 if (expectedMigration !== id) {
for (const { id, func } of pendingMigrations) { throw new Error(
const expectedMigration = `Migration ${id} could not run, update for "${id}" is running but ${expectedMigration} is expected`
migrationIds[migrationIds.indexOf(currentVersion) + 1] )
}
if (expectedMigration !== id) { const counter = `(${++index}/${pendingMigrations.length})`
throw `Migration ${id} could not run, update for "${id}" is running but ${expectedMigration} is expected` console.info(`Running migration ${id}... ${counter}`, {
} migrationId: id,
appId,
const counter = `(${++index}/${pendingMigrations.length})` })
console.info(`Running migration ${id}... ${counter}`, { await func()
migrationId: id, await updateAppMigrationMetadata({
appId, appId,
version: id,
})
currentVersion = id
}
}) })
await func() } catch (err) {
await updateAppMigrationMetadata({ logging.logAlert("Failed to run app migration", err)
appId, throw err
version: id,
})
currentVersion = id
} }
}) }
} )
)
console.log(`App migration for "${appId}" processed`) console.log(`App migration for "${appId}" processed`)
})
} }

View file

@ -1,9 +1,23 @@
import { queue } from "@budibase/backend-core" import { queue, logging } from "@budibase/backend-core"
import { Job } from "bull" import { Job } from "bull"
import { MIGRATIONS } from "./migrations" import { MIGRATIONS } from "./migrations"
import { processMigrations } from "./migrationsProcessor" import { processMigrations } from "./migrationsProcessor"
const appMigrationQueue = queue.createQueue(queue.JobQueue.APP_MIGRATION) const MAX_ATTEMPTS = 3
const appMigrationQueue = queue.createQueue(queue.JobQueue.APP_MIGRATION, {
jobOptions: {
attempts: MAX_ATTEMPTS,
removeOnComplete: true,
removeOnFail: true,
},
maxStalledCount: MAX_ATTEMPTS,
removeStalledCb: async (job: Job) => {
logging.logAlert(
`App migration failed, queue job ID: ${job.id} - reason: ${job.failedReason}`
)
},
})
appMigrationQueue.process(processMessage) appMigrationQueue.process(processMessage)
async function processMessage(job: Job) { async function processMessage(job: Job) {

View file

@ -1,6 +1,7 @@
import { Header } from "@budibase/backend-core" import { Header } from "@budibase/backend-core"
import * as setup from "../../api/routes/tests/utilities" import * as setup from "../../api/routes/tests/utilities"
import * as migrations from "../migrations" import * as migrations from "../migrations"
import { AppMigration, getLatestEnabledMigrationId } from "../index"
import { getAppMigrationVersion } from "../appMigrationMetadata" import { getAppMigrationVersion } from "../appMigrationMetadata"
jest.mock<typeof migrations>("../migrations", () => ({ jest.mock<typeof migrations>("../migrations", () => ({
@ -52,4 +53,29 @@ describe("migrations", () => {
}, },
}) })
}) })
it("should disable all migrations after one that is disabled", () => {
const MIGRATION_ID1 = "20231211105810_new-test",
MIGRATION_ID2 = "20231211105812_new-test",
MIGRATION_ID3 = "20231211105814_new-test"
// create some migrations to test with
const migrations: AppMigration[] = [
{
id: MIGRATION_ID1,
func: async () => {},
},
{
id: MIGRATION_ID2,
func: async () => {},
},
{
id: MIGRATION_ID3,
func: async () => {},
},
]
expect(getLatestEnabledMigrationId(migrations)).toBe(MIGRATION_ID3)
migrations[1].disabled = true
expect(getLatestEnabledMigrationId(migrations)).toBe(MIGRATION_ID1)
})
}) })

View file

@ -99,6 +99,15 @@ export function getError(err: any) {
return typeof err !== "string" ? err.toString() : err return typeof err !== "string" ? err.toString() : err
} }
export function guardAttachment(attachmentObject: any) {
if (!("url" in attachmentObject) || !("filename" in attachmentObject)) {
const providedKeys = Object.keys(attachmentObject).join(", ")
throw new Error(
`Attachments must have both "url" and "filename" keys. You have provided: ${providedKeys}`
)
}
}
export async function sendAutomationAttachmentsToStorage( export async function sendAutomationAttachmentsToStorage(
tableId: string, tableId: string,
row: Row row: Row
@ -116,9 +125,15 @@ export async function sendAutomationAttachmentsToStorage(
schema?.type === FieldType.ATTACHMENT_SINGLE || schema?.type === FieldType.ATTACHMENT_SINGLE ||
schema?.type === FieldType.SIGNATURE_SINGLE schema?.type === FieldType.SIGNATURE_SINGLE
) { ) {
if (Array.isArray(value)) {
value.forEach(item => guardAttachment(item))
} else {
guardAttachment(value)
}
attachmentRows[prop] = value attachmentRows[prop] = value
} }
} }
for (const [prop, attachments] of Object.entries(attachmentRows)) { for (const [prop, attachments] of Object.entries(attachmentRows)) {
if (Array.isArray(attachments)) { if (Array.isArray(attachments)) {
if (attachments.length) { if (attachments.length) {
@ -133,7 +148,6 @@ export async function sendAutomationAttachmentsToStorage(
return row return row
} }
async function generateAttachmentRow(attachment: AutomationAttachment) { async function generateAttachmentRow(attachment: AutomationAttachment) {
const prodAppId = context.getProdAppId() const prodAppId = context.getProdAppId()

View file

@ -90,7 +90,6 @@ export async function run({ inputs, appId, emitter }: AutomationStepInput) {
tableId: inputs.row.tableId, tableId: inputs.row.tableId,
}, },
}) })
try { try {
inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row) inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row)
inputs.row = await sendAutomationAttachmentsToStorage( inputs.row = await sendAutomationAttachmentsToStorage(

View file

@ -118,6 +118,14 @@ export async function run({ inputs }: AutomationStepInput) {
} }
to = to || undefined to = to || undefined
if (attachments) {
if (Array.isArray(attachments)) {
attachments.forEach(item => automationUtils.guardAttachment(item))
} else {
automationUtils.guardAttachment(attachments)
}
}
try { try {
let response = await sendSmtpEmail({ let response = await sendSmtpEmail({
to, to,

View file

@ -128,4 +128,31 @@ describe("test the create row action", () => {
expect(objectData).toBeDefined() expect(objectData).toBeDefined()
expect(objectData.ContentLength).toBeGreaterThan(0) expect(objectData.ContentLength).toBeGreaterThan(0)
}) })
it("should check that attachment without the correct keys throws an error", async () => {
let attachmentTable = await config.createTable(
basicTableWithAttachmentField()
)
let attachmentRow: any = {
tableId: attachmentTable._id,
}
let filename = "test2.txt"
let presignedUrl = await uploadTestFile(filename)
let attachmentObject = {
wrongKey: presignedUrl,
anotherWrongKey: filename,
}
attachmentRow.single_file_attachment = attachmentObject
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
row: attachmentRow,
})
expect(res.success).toEqual(false)
expect(res.response).toEqual(
'Error: Attachments must have both "url" and "filename" keys. You have provided: wrongKey, anotherWrongKey'
)
})
}) })

View file

@ -59,6 +59,9 @@ class LinkDocumentImpl implements LinkDocument {
this.doc1 = docA.tableId > docB.tableId ? docA : docB this.doc1 = docA.tableId > docB.tableId ? docA : docB
this.doc2 = docA.tableId > docB.tableId ? docB : docA this.doc2 = docA.tableId > docB.tableId ? docB : docA
} }
_rev?: string | undefined
createdAt?: string | number | undefined
updatedAt?: string | undefined
} }
export default LinkDocumentImpl export default LinkDocumentImpl

View file

@ -1,5 +1,5 @@
import newid from "./newid" import newid from "./newid"
import { db as dbCore } from "@budibase/backend-core" import { context, db as dbCore } from "@budibase/backend-core"
import { import {
DatabaseQueryOpts, DatabaseQueryOpts,
Datasource, Datasource,
@ -10,6 +10,7 @@ import {
RelationshipFieldMetadata, RelationshipFieldMetadata,
SourceName, SourceName,
VirtualDocumentType, VirtualDocumentType,
LinkDocument,
} from "@budibase/types" } from "@budibase/types"
export { DocumentType, VirtualDocumentType } from "@budibase/types" export { DocumentType, VirtualDocumentType } from "@budibase/types"
@ -137,10 +138,24 @@ export function generateLinkID(
/** /**
* Gets parameters for retrieving link docs, this is a utility function for the getDocParams function. * Gets parameters for retrieving link docs, this is a utility function for the getDocParams function.
*/ */
export function getLinkParams(otherProps: any = {}) { function getLinkParams(otherProps: Partial<DatabaseQueryOpts> = {}) {
return getDocParams(DocumentType.LINK, null, otherProps) return getDocParams(DocumentType.LINK, null, otherProps)
} }
/**
* Gets all the link docs document from the current app db.
*/
export async function allLinkDocs() {
const db = context.getAppDB()
const response = await db.allDocs<LinkDocument>(
getLinkParams({
include_docs: true,
})
)
return response.rows.map(row => row.doc!)
}
/** /**
* Generates a new layout ID. * Generates a new layout ID.
* @returns The new layout ID which the layout doc can be stored under. * @returns The new layout ID which the layout doc can be stored under.

View file

@ -96,6 +96,7 @@ const environment = {
DISABLE_THREADING: process.env.DISABLE_THREADING, DISABLE_THREADING: process.env.DISABLE_THREADING,
DISABLE_AUTOMATION_LOGS: process.env.DISABLE_AUTOMATION_LOGS, DISABLE_AUTOMATION_LOGS: process.env.DISABLE_AUTOMATION_LOGS,
DISABLE_RATE_LIMITING: process.env.DISABLE_RATE_LIMITING, DISABLE_RATE_LIMITING: process.env.DISABLE_RATE_LIMITING,
DISABLE_APP_MIGRATIONS: process.env.SKIP_APP_MIGRATIONS || false,
MULTI_TENANCY: process.env.MULTI_TENANCY, MULTI_TENANCY: process.env.MULTI_TENANCY,
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS, ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
SELF_HOSTED: process.env.SELF_HOSTED, SELF_HOSTED: process.env.SELF_HOSTED,

View file

@ -4,19 +4,14 @@ import {
MakeRequestResponse, MakeRequestResponse,
} from "../api/routes/public/tests/utils" } from "../api/routes/public/tests/utils"
import * as setup from "../api/routes/tests/utilities" import * as setup from "../api/routes/tests/utilities"
import { import { Datasource, FieldType } from "@budibase/types"
Datasource,
FieldType,
Table,
TableRequest,
TableSourceType,
} from "@budibase/types"
import { import {
DatabaseName, DatabaseName,
getDatasource, getDatasource,
rawQuery, rawQuery,
} from "../integrations/tests/utils" } from "../integrations/tests/utils"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { tableForDatasource } from "../../src/tests/utilities/structures"
// @ts-ignore // @ts-ignore
fetch.mockSearch() fetch.mockSearch()
@ -47,8 +42,7 @@ jest.mock("../websockets", () => ({
describe("mysql integrations", () => { describe("mysql integrations", () => {
let makeRequest: MakeRequestResponse, let makeRequest: MakeRequestResponse,
rawDatasource: Datasource, rawDatasource: Datasource,
datasource: Datasource, datasource: Datasource
primaryMySqlTable: Table
beforeAll(async () => { beforeAll(async () => {
await config.init() await config.init()
@ -60,38 +54,12 @@ describe("mysql integrations", () => {
datasource = await config.api.datasource.create(rawDatasource) datasource = await config.api.datasource.create(rawDatasource)
}) })
beforeEach(async () => {
primaryMySqlTable = await config.createTable({
name: uniqueTableName(),
type: "table",
primary: ["id"],
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
},
name: {
name: "name",
type: FieldType.STRING,
},
description: {
name: "description",
type: FieldType.STRING,
},
value: {
name: "value",
type: FieldType.NUMBER,
},
},
sourceId: datasource._id,
sourceType: TableSourceType.EXTERNAL,
})
})
afterAll(config.end) afterAll(config.end)
it("validate table schema", async () => { it("validate table schema", async () => {
// Creating a table so that `entities` is populated.
await config.api.table.save(tableForDatasource(datasource))
const res = await makeRequest("get", `/api/datasources/${datasource._id}`) const res = await makeRequest("get", `/api/datasources/${datasource._id}`)
expect(res.status).toBe(200) expect(res.status).toBe(200)
@ -115,54 +83,6 @@ describe("mysql integrations", () => {
}) })
}) })
describe("POST /api/datasources/verify", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
datasource: rawDatasource,
},
{
body: {
connected: true,
},
}
)
})
it("should state an invalid datasource cannot connect", async () => {
await config.api.datasource.verify(
{
datasource: {
...rawDatasource,
config: {
...rawDatasource.config,
password: "wrongpassword",
},
},
},
{
body: {
connected: false,
error:
"Access denied for the specified user. User does not have the necessary privileges or the provided credentials are incorrect. Please verify the credentials, and ensure that the user has appropriate permissions.",
},
}
)
})
})
describe("POST /api/datasources/info", () => {
it("should fetch information about mysql datasource", async () => {
const primaryName = primaryMySqlTable.name
const response = await makeRequest("post", "/api/datasources/info", {
datasource: datasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1)
})
})
describe("Integration compatibility with mysql search_path", () => { describe("Integration compatibility with mysql search_path", () => {
let datasource: Datasource, rawDatasource: Datasource let datasource: Datasource, rawDatasource: Datasource
const database = generator.guid() const database = generator.guid()
@ -231,57 +151,6 @@ describe("mysql integrations", () => {
}) })
}) })
describe("POST /api/tables/", () => {
it("will rename a column", async () => {
await makeRequest("post", "/api/tables/", primaryMySqlTable)
let renameColumnOnTable: TableRequest = {
...primaryMySqlTable,
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
externalType: "unsigned integer",
},
name: {
name: "name",
type: FieldType.STRING,
externalType: "text",
},
description: {
name: "description",
type: FieldType.STRING,
externalType: "text",
},
age: {
name: "age",
type: FieldType.NUMBER,
externalType: "float(8,2)",
},
},
}
const response = await makeRequest(
"post",
"/api/tables/",
renameColumnOnTable
)
const ds = (
await makeRequest("post", `/api/datasources/${datasource._id}/schema`)
).body.datasource
expect(response.status).toEqual(200)
expect(Object.keys(ds.entities![primaryMySqlTable.name].schema)).toEqual([
"id",
"name",
"description",
"age",
])
})
})
describe("POST /api/datasources/:datasourceId/schema", () => { describe("POST /api/datasources/:datasourceId/schema", () => {
let tableName: string let tableName: string

View file

@ -1035,54 +1035,6 @@ describe("postgres integrations", () => {
}) })
}) })
describe("POST /api/datasources/verify", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
datasource: await getDatasource(DatabaseName.POSTGRES),
},
{
body: {
connected: true,
},
}
)
})
it("should state an invalid datasource cannot connect", async () => {
const dbConfig = await getDatasource(DatabaseName.POSTGRES)
await config.api.datasource.verify(
{
datasource: {
...dbConfig,
config: {
...dbConfig.config,
password: "wrongpassword",
},
},
},
{
body: {
connected: false,
error: 'password authentication failed for user "postgres"',
},
}
)
})
})
describe("POST /api/datasources/info", () => {
it("should fetch information about postgres datasource", async () => {
const primaryName = primaryPostgresTable.name
const response = await makeRequest("post", "/api/datasources/info", {
datasource: datasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1)
})
})
describe("POST /api/datasources/:datasourceId/schema", () => { describe("POST /api/datasources/:datasourceId/schema", () => {
let tableName: string let tableName: string
@ -1097,12 +1049,11 @@ describe("postgres integrations", () => {
it("recognises when a table has no primary key", async () => { it("recognises when a table has no primary key", async () => {
await rawQuery(rawDatasource, `CREATE TABLE "${tableName}" (id SERIAL)`) await rawQuery(rawDatasource, `CREATE TABLE "${tableName}" (id SERIAL)`)
const response = await makeRequest( const response = await config.api.datasource.fetchSchema({
"post", datasourceId: datasource._id!,
`/api/datasources/${datasource._id}/schema` })
)
expect(response.body.errors).toEqual({ expect(response.errors).toEqual({
[tableName]: "Table must have a primary key.", [tableName]: "Table must have a primary key.",
}) })
}) })
@ -1113,12 +1064,11 @@ describe("postgres integrations", () => {
`CREATE TABLE "${tableName}" (_id SERIAL PRIMARY KEY) ` `CREATE TABLE "${tableName}" (_id SERIAL PRIMARY KEY) `
) )
const response = await makeRequest( const response = await config.api.datasource.fetchSchema({
"post", datasourceId: datasource._id!,
`/api/datasources/${datasource._id}/schema` })
)
expect(response.body.errors).toEqual({ expect(response.errors).toEqual({
[tableName]: "Table contains invalid columns.", [tableName]: "Table contains invalid columns.",
}) })
}) })
@ -1143,15 +1093,14 @@ describe("postgres integrations", () => {
` `
) )
const response = await makeRequest( const response = await config.api.datasource.fetchSchema({
"post", datasourceId: datasource._id!,
`/api/datasources/${datasource._id}/schema` })
)
const table = response.body.datasource.entities[tableName] const table = response.datasource.entities?.[tableName]
expect(table).toBeDefined() expect(table).toBeDefined()
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS) expect(table?.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
}) })
}) })
@ -1215,20 +1164,16 @@ describe("postgres integrations", () => {
rawDatasource, rawDatasource,
`CREATE TABLE "${schema2}".${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);` `CREATE TABLE "${schema2}".${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
) )
const response = await makeRequest(
"post", const response = await config.api.datasource.fetchSchema({
`/api/datasources/${datasource._id}/schema`, datasourceId: datasource._id!,
{ tablesFilter: [repeated_table_name],
tablesFilter: [repeated_table_name], })
}
)
expect(response.status).toBe(200)
expect( expect(
response.body.datasource.entities[repeated_table_name].schema response.datasource.entities?.[repeated_table_name].schema
).toBeDefined() ).toBeDefined()
const schema = const schema = response.datasource.entities?.[repeated_table_name].schema
response.body.datasource.entities[repeated_table_name].schema expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"])
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
}) })
}) })
@ -1246,16 +1191,14 @@ describe("postgres integrations", () => {
}) })
it("should handle binary columns", async () => { it("should handle binary columns", async () => {
const response = await makeRequest( const response = await config.api.datasource.fetchSchema({
"post", datasourceId: datasource._id!,
`/api/datasources/${datasource._id}/schema` })
) expect(response.datasource.entities).toBeDefined()
expect(response.body).toBeDefined() const table = response.datasource.entities?.["binarytable"]
expect(response.body.datasource.entities).toBeDefined()
const table = response.body.datasource.entities["binarytable"]
expect(table).toBeDefined() expect(table).toBeDefined()
expect(table.schema.id.externalType).toBe("bytea") expect(table?.schema.id.externalType).toBe("bytea")
const row = await config.api.row.save(table._id, { const row = await config.api.row.save(table?._id!, {
id: "1111", id: "1111",
column1: "hello", column1: "hello",
column2: 222, column2: 222,
@ -1265,4 +1208,48 @@ describe("postgres integrations", () => {
expect(JSON.parse(decoded)[0]).toBe("1111") expect(JSON.parse(decoded)[0]).toBe("1111")
}) })
}) })
describe("check fetching null/not null table", () => {
beforeAll(async () => {
await rawQuery(
rawDatasource,
`CREATE TABLE nullableTable (
order_id SERIAL PRIMARY KEY,
order_number INT NOT NULL
);
`
)
})
it("should be able to change the table to allow nullable and refetch this", async () => {
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const entities = response.datasource.entities
expect(entities).toBeDefined()
const nullableTable = entities?.["nullabletable"]
expect(nullableTable).toBeDefined()
expect(
nullableTable?.schema["order_number"].constraints?.presence
).toEqual(true)
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
// is aware of - therefore we can try to fetch and make sure BB updates correctly
await rawQuery(
rawDatasource,
`ALTER TABLE nullableTable
ALTER COLUMN order_number DROP NOT NULL;
`
)
const responseAfter = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const entitiesAfter = responseAfter.datasource.entities
expect(entitiesAfter).toBeDefined()
const nullableTableAfter = entitiesAfter?.["nullabletable"]
expect(nullableTableAfter).toBeDefined()
expect(
nullableTableAfter?.schema["order_number"].constraints?.presence
).toBeUndefined()
})
})
}) })

View file

@ -149,13 +149,12 @@ class RestIntegration implements IntegrationBase {
{ downloadImages: this.config.downloadImages } { downloadImages: this.config.downloadImages }
) )
let contentLength = response.headers.get("content-length") let contentLength = response.headers.get("content-length")
if (!contentLength && raw) { let isSuccess = response.status >= 200 && response.status < 300
contentLength = Buffer.byteLength(raw, "utf8").toString()
}
if ( if (
contentDisposition.includes("filename") || (contentDisposition.includes("filename") ||
contentDisposition.includes("attachment") || contentDisposition.includes("attachment") ||
contentDisposition.includes("form-data") contentDisposition.includes("form-data")) &&
isSuccess
) { ) {
filename = filename =
path.basename(parse(contentDisposition).parameters?.filename) || "" path.basename(parse(contentDisposition).parameters?.filename) || ""
@ -168,6 +167,9 @@ class RestIntegration implements IntegrationBase {
return handleFileResponse(response, filename, this.startTimeMs) return handleFileResponse(response, filename, this.startTimeMs)
} else { } else {
responseTxt = response.text ? await response.text() : "" responseTxt = response.text ? await response.text() : ""
if (!contentLength && responseTxt) {
contentLength = Buffer.byteLength(responseTxt, "utf8").toString()
}
const hasContent = const hasContent =
(contentLength && parseInt(contentLength) > 0) || (contentLength && parseInt(contentLength) > 0) ||
responseTxt.length > 0 responseTxt.length > 0

View file

@ -657,6 +657,7 @@ describe("REST Integration", () => {
mockReadable.push(null) mockReadable.push(null)
;(fetch as unknown as jest.Mock).mockImplementationOnce(() => ;(fetch as unknown as jest.Mock).mockImplementationOnce(() =>
Promise.resolve({ Promise.resolve({
status: 200,
headers: { headers: {
raw: () => ({ raw: () => ({
"content-type": [contentType], "content-type": [contentType],
@ -700,6 +701,7 @@ describe("REST Integration", () => {
mockReadable.push(null) mockReadable.push(null)
;(fetch as unknown as jest.Mock).mockImplementationOnce(() => ;(fetch as unknown as jest.Mock).mockImplementationOnce(() =>
Promise.resolve({ Promise.resolve({
status: 200,
headers: { headers: {
raw: () => ({ raw: () => ({
"content-type": [contentType], "content-type": [contentType],

View file

@ -18,7 +18,7 @@ class MariaDBWaitStrategy extends AbstractWaitStrategy {
await logs.waitUntilReady(container, boundPorts, startTime) await logs.waitUntilReady(container, boundPorts, startTime)
const command = Wait.forSuccessfulCommand( const command = Wait.forSuccessfulCommand(
`mysqladmin ping -h localhost -P 3306 -u root -ppassword` `/usr/local/bin/healthcheck.sh --innodb_initialized`
) )
await command.waitUntilReady(container) await command.waitUntilReady(container)
} }

View file

@ -9,10 +9,12 @@ import { context, objectStore, sql } from "@budibase/backend-core"
import { v4 } from "uuid" import { v4 } from "uuid"
import { parseStringPromise as xmlParser } from "xml2js" import { parseStringPromise as xmlParser } from "xml2js"
import { formatBytes } from "../../utilities" import { formatBytes } from "../../utilities"
import bl from "bl"
import env from "../../environment" import env from "../../environment"
import { InvalidColumns } from "../../constants" import { InvalidColumns } from "../../constants"
import { helpers, utils } from "@budibase/shared-core" import { helpers, utils } from "@budibase/shared-core"
import { pipeline } from "stream/promises"
import tmp from "tmp"
import fs from "fs"
type PrimitiveTypes = type PrimitiveTypes =
| FieldType.STRING | FieldType.STRING
@ -278,12 +280,35 @@ function copyExistingPropsOver(
utils.unreachable(existingColumnType) utils.unreachable(existingColumnType)
} }
// copy the BB schema in case of special props
if (shouldKeepSchema) { if (shouldKeepSchema) {
const fetchedColumnDefinition: FieldSchema | undefined =
table.schema[key]
table.schema[key] = { table.schema[key] = {
...existingTableSchema[key], ...existingTableSchema[key],
externalType: externalType:
existingTableSchema[key].externalType || existingTableSchema[key].externalType ||
table.schema[key]?.externalType, table.schema[key]?.externalType,
autocolumn: fetchedColumnDefinition?.autocolumn,
} as FieldSchema
// check constraints which can be fetched from the DB (they could be updated)
if (fetchedColumnDefinition?.constraints) {
// inclusions are the enum values (select/options)
const fetchedConstraints = fetchedColumnDefinition.constraints
const oldConstraints = table.schema[key].constraints
table.schema[key].constraints = {
...table.schema[key].constraints,
inclusion: fetchedConstraints.inclusion?.length
? fetchedConstraints.inclusion
: oldConstraints?.inclusion,
}
// true or undefined - consistent with old API
if (fetchedConstraints.presence) {
table.schema[key].constraints!.presence =
fetchedConstraints.presence
} else if (oldConstraints?.presence === true) {
delete table.schema[key].constraints?.presence
}
} }
} }
} }
@ -360,35 +385,44 @@ export async function handleFileResponse(
const key = `${context.getProdAppId()}/${processedFileName}` const key = `${context.getProdAppId()}/${processedFileName}`
const bucket = objectStore.ObjectStoreBuckets.TEMP const bucket = objectStore.ObjectStoreBuckets.TEMP
const stream = response.body.pipe(bl((error, data) => data)) // put the response stream to disk temporarily as a buffer
const tmpObj = tmp.fileSync()
try {
await pipeline(response.body, fs.createWriteStream(tmpObj.name))
if (response.body) {
const contentLength = response.headers.get("content-length")
if (contentLength) {
size = parseInt(contentLength, 10)
}
if (response.body) { const details = await objectStore.streamUpload({
const contentLength = response.headers.get("content-length") bucket,
if (contentLength) { filename: key,
size = parseInt(contentLength, 10) stream: fs.createReadStream(tmpObj.name),
ttl: 1,
type: response.headers["content-type"],
})
if (!size && details.ContentLength) {
size = details.ContentLength
}
} }
presignedUrl = objectStore.getPresignedUrl(bucket, key)
await objectStore.streamUpload({ return {
bucket, data: {
filename: key, size,
stream, name: processedFileName,
ttl: 1, url: presignedUrl,
type: response.headers["content-type"], extension: fileExtension,
}) key: key,
} },
presignedUrl = objectStore.getPresignedUrl(bucket, key) info: {
return { code: response.status,
data: { size: formatBytes(size.toString()),
size, time: `${Math.round(performance.now() - startTime)}ms`,
name: processedFileName, },
url: presignedUrl, }
extension: fileExtension, } finally {
key: key, // cleanup tmp
}, tmpObj.removeCallback()
info: {
code: response.status,
size: formatBytes(size.toString()),
time: `${Math.round(performance.now() - startTime)}ms`,
},
} }
} }

View file

@ -1,9 +1,16 @@
import { UserCtx } from "@budibase/types" import { UserCtx } from "@budibase/types"
import { checkMissingMigrations } from "../appMigrations" import { checkMissingMigrations } from "../appMigrations"
import env from "../environment"
export default async (ctx: UserCtx, next: any) => { export default async (ctx: UserCtx, next: any) => {
const { appId } = ctx const { appId } = ctx
// migrations can be disabled via environment variable if you
// need to completely disable migrations, e.g. for testing
if (env.DISABLE_APP_MIGRATIONS) {
return next()
}
if (!appId) { if (!appId) {
return next() return next()
} }

View file

@ -14,6 +14,7 @@ import {
CONSTANT_INTERNAL_ROW_COLS, CONSTANT_INTERNAL_ROW_COLS,
generateJunctionTableID, generateJunctionTableID,
} from "../../../../db/utils" } from "../../../../db/utils"
import { isEqual } from "lodash"
const FieldTypeMap: Record<FieldType, SQLiteType> = { const FieldTypeMap: Record<FieldType, SQLiteType> = {
[FieldType.BOOLEAN]: SQLiteType.NUMERIC, [FieldType.BOOLEAN]: SQLiteType.NUMERIC,
@ -107,8 +108,22 @@ async function buildBaseDefinition(): Promise<PreSaveSQLiteDefinition> {
export async function syncDefinition(): Promise<void> { export async function syncDefinition(): Promise<void> {
const db = context.getAppDB() const db = context.getAppDB()
let existing: SQLiteDefinition | undefined
try {
existing = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
} catch (err: any) {
if (err.status !== 404) {
throw err
}
}
const definition = await buildBaseDefinition() const definition = await buildBaseDefinition()
await db.put(definition) if (existing) {
definition._rev = existing._rev
}
// only write if something has changed
if (!existing || !isEqual(existing.sql, definition.sql)) {
await db.put(definition)
}
} }
export async function addTable(table: Table) { export async function addTable(table: Table) {

View file

@ -8,7 +8,8 @@ import {
} from "@budibase/types" } from "@budibase/types"
import { HTTPError, db as dbCore } from "@budibase/backend-core" import { HTTPError, db as dbCore } from "@budibase/backend-core"
import { features } from "@budibase/pro" import { features } from "@budibase/pro"
import { cloneDeep } from "lodash" import { helpers } from "@budibase/shared-core"
import { cloneDeep } from "lodash/fp"
import * as utils from "../../../db/utils" import * as utils from "../../../db/utils"
import { isExternalTableID } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
@ -16,7 +17,6 @@ import { isExternalTableID } from "../../../integrations/utils"
import * as internal from "./internal" import * as internal from "./internal"
import * as external from "./external" import * as external from "./external"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { isRequired } from "../../../utilities/schema"
function pickApi(tableId: any) { function pickApi(tableId: any) {
if (isExternalTableID(tableId)) { if (isExternalTableID(tableId)) {
@ -37,11 +37,9 @@ export async function getEnriched(viewId: string): Promise<ViewV2Enriched> {
async function guardViewSchema( async function guardViewSchema(
tableId: string, tableId: string,
viewSchema?: Record<string, ViewUIFieldMetadata> view: Omit<ViewV2, "id" | "version">
) { ) {
if (!viewSchema || !Object.keys(viewSchema).length) { const viewSchema = view.schema || {}
return
}
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
for (const field of Object.keys(viewSchema)) { for (const field of Object.keys(viewSchema)) {
@ -54,18 +52,11 @@ async function guardViewSchema(
} }
if (viewSchema[field].readonly) { if (viewSchema[field].readonly) {
if (!(await features.isViewReadonlyColumnsEnabled())) { if (
throw new HTTPError( !(await features.isViewReadonlyColumnsEnabled()) &&
`Readonly fields are not enabled for your tenant`, !(tableSchemaField as ViewUIFieldMetadata).readonly
400 ) {
) throw new HTTPError(`Readonly fields are not enabled`, 400)
}
if (isRequired(tableSchemaField.constraints)) {
throw new HTTPError(
`Field "${field}" cannot be readonly as it is a required field`,
400
)
} }
if (!viewSchema[field].visible) { if (!viewSchema[field].visible) {
@ -76,19 +67,61 @@ async function guardViewSchema(
} }
} }
} }
const existingView =
table?.views && (table.views[view.name] as ViewV2 | undefined)
for (const field of Object.values(table.schema)) {
if (!helpers.schema.isRequired(field.constraints)) {
continue
}
const viewSchemaField = viewSchema[field.name]
const existingViewSchema =
existingView?.schema && existingView.schema[field.name]
if (!viewSchemaField && !existingViewSchema?.visible) {
// Supporting existing configs with required columns but hidden in views
continue
}
if (!viewSchemaField?.visible) {
throw new HTTPError(
`You can't hide "${field.name}" because it is a required field.`,
400
)
}
if (viewSchemaField.readonly) {
throw new HTTPError(
`You can't make "${field.name}" readonly because it is a required field.`,
400
)
}
}
if (view.primaryDisplay) {
const viewSchemaField = viewSchema[view.primaryDisplay]
if (!viewSchemaField?.visible) {
throw new HTTPError(
`You can't hide "${view.primaryDisplay}" because it is the display column.`,
400
)
}
}
} }
export async function create( export async function create(
tableId: string, tableId: string,
viewRequest: Omit<ViewV2, "id" | "version"> viewRequest: Omit<ViewV2, "id" | "version">
): Promise<ViewV2> { ): Promise<ViewV2> {
await guardViewSchema(tableId, viewRequest.schema) await guardViewSchema(tableId, viewRequest)
return pickApi(tableId).create(tableId, viewRequest) return pickApi(tableId).create(tableId, viewRequest)
} }
export async function update(tableId: string, view: ViewV2): Promise<ViewV2> { export async function update(tableId: string, view: ViewV2): Promise<ViewV2> {
await guardViewSchema(tableId, view.schema) await guardViewSchema(tableId, view)
return pickApi(tableId).update(tableId, view) return pickApi(tableId).update(tableId, view)
} }

View file

@ -6,6 +6,7 @@ import {
UpdateDatasourceRequest, UpdateDatasourceRequest,
QueryJson, QueryJson,
BuildSchemaFromSourceResponse, BuildSchemaFromSourceResponse,
FetchDatasourceInfoResponse,
} from "@budibase/types" } from "@budibase/types"
import { Expectations, TestAPI } from "./base" import { Expectations, TestAPI } from "./base"
@ -61,6 +62,10 @@ export class DatasourceAPI extends TestAPI {
}) })
} }
fetch = async (expectations?: Expectations) => {
return await this._get<Datasource[]>(`/api/datasources`, { expectations })
}
query = async ( query = async (
query: Omit<QueryJson, "meta"> & Partial<Pick<QueryJson, "meta">>, query: Omit<QueryJson, "meta"> & Partial<Pick<QueryJson, "meta">>,
expectations?: Expectations expectations?: Expectations
@ -71,10 +76,29 @@ export class DatasourceAPI extends TestAPI {
}) })
} }
fetchSchema = async (id: string, expectations?: Expectations) => { fetchSchema = async (
{
datasourceId,
tablesFilter,
}: { datasourceId: string; tablesFilter?: string[] },
expectations?: Expectations
) => {
return await this._post<BuildSchemaFromSourceResponse>( return await this._post<BuildSchemaFromSourceResponse>(
`/api/datasources/${id}/schema`, `/api/datasources/${datasourceId}/schema`,
{ {
expectations: expectations,
body: {
tablesFilter: tablesFilter,
},
}
)
}
info = async (datasource: Datasource, expectations?: Expectations) => {
return await this._post<FetchDatasourceInfoResponse>(
`/api/datasources/info`,
{
body: { datasource },
expectations, expectations,
} }
) )

View file

@ -196,12 +196,22 @@ class QueryRunner {
return { rows, keys, info, extra, pagination } return { rows, keys, info, extra, pagination }
} }
async runAnotherQuery(queryId: string, parameters: any) { async runAnotherQuery(
queryId: string,
currentParameters: Record<string, any>
) {
const db = context.getAppDB() const db = context.getAppDB()
const query = await db.get<Query>(queryId) const query = await db.get<Query>(queryId)
const datasource = await sdk.datasources.get(query.datasourceId, { const datasource = await sdk.datasources.get(query.datasourceId, {
enriched: true, enriched: true,
}) })
// enrich parameters with dynamic queries defaults
const defaultParams = query.parameters || []
for (let param of defaultParams) {
if (!currentParameters[param.name]) {
currentParameters[param.name] = param.default
}
}
return new QueryRunner( return new QueryRunner(
{ {
schema: query.schema, schema: query.schema,
@ -210,7 +220,7 @@ class QueryRunner {
transformer: query.transformer, transformer: query.transformer,
nullDefaultSupport: query.nullDefaultSupport, nullDefaultSupport: query.nullDefaultSupport,
ctx: this.ctx, ctx: this.ctx,
parameters, parameters: currentParameters,
datasource, datasource,
queryId, queryId,
}, },

View file

@ -4,9 +4,8 @@ import {
TableSchema, TableSchema,
FieldSchema, FieldSchema,
Row, Row,
FieldConstraints,
} from "@budibase/types" } from "@budibase/types"
import { ValidColumnNameRegex, utils } from "@budibase/shared-core" import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core"
import { db } from "@budibase/backend-core" import { db } from "@budibase/backend-core"
import { parseCsvExport } from "../api/controllers/view/exporters" import { parseCsvExport } from "../api/controllers/view/exporters"
@ -41,15 +40,6 @@ export function isRows(rows: any): rows is Rows {
return Array.isArray(rows) && rows.every(row => typeof row === "object") return Array.isArray(rows) && rows.every(row => typeof row === "object")
} }
export function isRequired(constraints: FieldConstraints | undefined) {
const isRequired =
!!constraints &&
((typeof constraints.presence !== "boolean" &&
constraints.presence?.allowEmpty === false) ||
constraints.presence === true)
return isRequired
}
export function validate(rows: Rows, schema: TableSchema): ValidationResults { export function validate(rows: Rows, schema: TableSchema): ValidationResults {
const results: ValidationResults = { const results: ValidationResults = {
schemaValidation: {}, schemaValidation: {},
@ -109,7 +99,7 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
columnData, columnData,
columnType, columnType,
columnSubtype, columnSubtype,
isRequired(constraints) helpers.schema.isRequired(constraints)
) )
) { ) {
results.schemaValidation[columnName] = false results.schemaValidation[columnName] = false

View file

@ -1,5 +1,6 @@
import { import {
BBReferenceFieldSubType, BBReferenceFieldSubType,
FieldConstraints,
FieldSchema, FieldSchema,
FieldType, FieldType,
} from "@budibase/types" } from "@budibase/types"
@ -16,3 +17,12 @@ export function isDeprecatedSingleUserColumn(
schema.constraints?.type !== "array" schema.constraints?.type !== "array"
return result return result
} }
export function isRequired(constraints: FieldConstraints | undefined) {
const isRequired =
!!constraints &&
((typeof constraints.presence !== "boolean" &&
constraints.presence?.allowEmpty === false) ||
constraints.presence === true)
return isRequired
}

View file

@ -33,7 +33,12 @@ const removeSquareBrackets = (value: string) => {
// Our context getter function provided to JS code as $. // Our context getter function provided to JS code as $.
// Extracts a value from context. // Extracts a value from context.
const getContextValue = (path: string, context: any) => { const getContextValue = (path: string, context: any) => {
const literalStringRegex = /^(["'`]).*\1$/
let data = context let data = context
// check if it's a literal string - just return path if its quoted
if (literalStringRegex.test(path)) {
return path.substring(1, path.length - 1)
}
path.split(".").forEach(key => { path.split(".").forEach(key => {
if (data == null || typeof data !== "object") { if (data == null || typeof data !== "object") {
return null return null

View file

@ -149,4 +149,11 @@ describe("Javascript", () => {
expect(output).toMatch(UUID_REGEX) expect(output).toMatch(UUID_REGEX)
}) })
}) })
describe("JS literal strings", () => {
it("should be able to handle a literal string that is quoted (like role IDs)", () => {
const output = processJS(`return $("'Custom'")`)
expect(output).toBe("Custom")
})
})
}) })

View file

@ -245,7 +245,7 @@ export type AutomationAttachment = {
export type AutomationAttachmentContent = { export type AutomationAttachmentContent = {
filename: string filename: string
content: ReadStream | NodeJS.ReadableStream | ReadableStream<Uint8Array> content: ReadStream | NodeJS.ReadableStream
} }
export type BucketedContent = AutomationAttachmentContent & { export type BucketedContent = AutomationAttachmentContent & {

View file

@ -30,4 +30,7 @@ export interface SQLiteDefinition {
} }
} }
export type PreSaveSQLiteDefinition = Omit<SQLiteDefinition, "_rev"> export interface PreSaveSQLiteDefinition
extends Omit<SQLiteDefinition, "_rev"> {
_rev?: string
}

View file

@ -165,3 +165,13 @@ export interface Database {
deleteIndex(...args: any[]): Promise<any> deleteIndex(...args: any[]): Promise<any>
getIndexes(...args: any[]): Promise<any> getIndexes(...args: any[]): Promise<any>
} }
export interface DBError extends Error {
status: number
statusCode: number
reason: string
name: string
errid: string
error: string
description: string
}

View file

@ -31,6 +31,7 @@ async function init() {
HTTP_LOGGING: "0", HTTP_LOGGING: "0",
VERSION: "0.0.0+local", VERSION: "0.0.0+local",
PASSWORD_MIN_LENGTH: "1", PASSWORD_MIN_LENGTH: "1",
SQS_SEARCH_ENABLE: "1",
} }
config = { ...config, ...existingConfig } config = { ...config, ...existingConfig }

View file

@ -21,7 +21,9 @@ const generateTimestamp = () => {
} }
const createMigrationFile = () => { const createMigrationFile = () => {
const migrationFilename = `${generateTimestamp()}_${title}` const migrationFilename = `${generateTimestamp()}_${title
.replace(/-/g, "_")
.replace(/ /g, "_")}`
const migrationsDir = "../packages/server/src/appMigrations" const migrationsDir = "../packages/server/src/appMigrations"
const template = `const migration = async () => { const template = `const migration = async () => {

View file

@ -6348,6 +6348,11 @@
dependencies: dependencies:
"@types/estree" "*" "@types/estree" "*"
"@types/tmp@0.2.6":
version "0.2.6"
resolved "https://registry.yarnpkg.com/@types/tmp/-/tmp-0.2.6.tgz#d785ee90c52d7cc020e249c948c36f7b32d1e217"
integrity sha512-chhaNf2oKHlRkDGt+tiKE2Z5aJ6qalm7Z9rlLdBwmOiAAf09YQvvoLXjWK4HWPF1xU/fqvMgfNfpVoBscA/tKA==
"@types/tough-cookie@*", "@types/tough-cookie@^4.0.2": "@types/tough-cookie@*", "@types/tough-cookie@^4.0.2":
version "4.0.2" version "4.0.2"
resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.2.tgz#6286b4c7228d58ab7866d19716f3696e03a09397" resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.2.tgz#6286b4c7228d58ab7866d19716f3696e03a09397"
@ -7700,7 +7705,7 @@ bl@^4.0.3, bl@^4.1.0:
inherits "^2.0.4" inherits "^2.0.4"
readable-stream "^3.4.0" readable-stream "^3.4.0"
bl@^6.0.12, bl@^6.0.3: bl@^6.0.3:
version "6.0.12" version "6.0.12"
resolved "https://registry.yarnpkg.com/bl/-/bl-6.0.12.tgz#77c35b96e13aeff028496c798b75389ddee9c7f8" resolved "https://registry.yarnpkg.com/bl/-/bl-6.0.12.tgz#77c35b96e13aeff028496c798b75389ddee9c7f8"
integrity sha512-EnEYHilP93oaOa2MnmNEjAcovPS3JlQZOyzGXi3EyEpPhm9qWvdDp7BmAVEVusGzp8LlwQK56Av+OkDoRjzE0w== integrity sha512-EnEYHilP93oaOa2MnmNEjAcovPS3JlQZOyzGXi3EyEpPhm9qWvdDp7BmAVEVusGzp8LlwQK56Av+OkDoRjzE0w==
@ -16065,10 +16070,10 @@ mute-stream@~1.0.0:
resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-1.0.0.tgz#e31bd9fe62f0aed23520aa4324ea6671531e013e" resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-1.0.0.tgz#e31bd9fe62f0aed23520aa4324ea6671531e013e"
integrity sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA== integrity sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==
mysql2@3.9.7: mysql2@3.9.8:
version "3.9.7" version "3.9.8"
resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-3.9.7.tgz#843755daf65b5ef08afe545fe14b8fb62824741a" resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-3.9.8.tgz#fe8a0f975f2c495ed76ca988ddc5505801dc49ce"
integrity sha512-KnJT8vYRcNAZv73uf9zpXqNbvBG7DJrs+1nACsjZP1HMJ1TgXEy8wnNilXAn/5i57JizXKtrUtwDB7HxT9DDpw== integrity sha512-+5JKNjPuks1FNMoy9TYpl77f+5frbTklz7eb3XDwbpsERRLEeXiW2PDEkakYF50UuKU2qwfGnyXpKYvukv8mGA==
dependencies: dependencies:
denque "^2.1.0" denque "^2.1.0"
generate-function "^2.3.1" generate-function "^2.3.1"
@ -21283,6 +21288,11 @@ tlhunter-sorted-set@^0.1.0:
resolved "https://registry.yarnpkg.com/tlhunter-sorted-set/-/tlhunter-sorted-set-0.1.0.tgz#1c3eae28c0fa4dff97e9501d2e3c204b86406f4b" resolved "https://registry.yarnpkg.com/tlhunter-sorted-set/-/tlhunter-sorted-set-0.1.0.tgz#1c3eae28c0fa4dff97e9501d2e3c204b86406f4b"
integrity sha512-eGYW4bjf1DtrHzUYxYfAcSytpOkA44zsr7G2n3PV7yOUR23vmkGe3LL4R+1jL9OsXtbsFOwe8XtbCrabeaEFnw== integrity sha512-eGYW4bjf1DtrHzUYxYfAcSytpOkA44zsr7G2n3PV7yOUR23vmkGe3LL4R+1jL9OsXtbsFOwe8XtbCrabeaEFnw==
tmp@0.2.3:
version "0.2.3"
resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.2.3.tgz#eb783cc22bc1e8bebd0671476d46ea4eb32a79ae"
integrity sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==
tmp@^0.0.33: tmp@^0.0.33:
version "0.0.33" version "0.0.33"
resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9"