1
0
Fork 0
mirror of synced 2024-07-11 09:15:48 +12:00

Merge branch 'develop' of github.com:Budibase/budibase into fix/cli-memory-issues

This commit is contained in:
mike12345567 2023-02-07 09:56:49 +00:00
commit 464ab7a1bb
135 changed files with 2412 additions and 959 deletions

View file

@ -2,7 +2,7 @@
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
labels: ["bug", "linear"]
assignees: ''
---

1
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View file

@ -0,0 +1 @@
blank_issues_enabled: false

View file

@ -2,7 +2,7 @@
name: Feature Request
about: Request a new budibase feature or enhancement
title: ''
labels: enhancement
labels: ["enhancement", "linear"]
assignees: ''
---

View file

@ -19,10 +19,11 @@ COUCH_DB_PORT=4005
REDIS_PORT=6379
WATCHTOWER_PORT=6161
BUDIBASE_ENVIRONMENT=PRODUCTION
SQL_MAX_ROWS=
# An admin user can be automatically created initially if these are set
BB_ADMIN_USER_EMAIL=
BB_ADMIN_USER_PASSWORD=
# A path that is watched for plugin bundles. Any bundles found are imported automatically/
PLUGINS_DIR=
PLUGINS_DIR=

View file

@ -0,0 +1,32 @@
FROM couchdb:3.2.1
ENV COUCHDB_USER admin
ENV COUCHDB_PASSWORD admin
EXPOSE 5984
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
apt-add-repository 'deb http://security.debian.org/debian-security stretch/updates main' && \
apt-get update && apt-get install -y --no-install-recommends openjdk-8-jre && \
rm -rf /var/lib/apt/lists/
# setup clouseau
WORKDIR /
RUN wget https://github.com/cloudant-labs/clouseau/releases/download/2.21.0/clouseau-2.21.0-dist.zip && \
unzip clouseau-2.21.0-dist.zip && \
mv clouseau-2.21.0 /opt/clouseau && \
rm clouseau-2.21.0-dist.zip
WORKDIR /opt/clouseau
RUN mkdir ./bin
ADD clouseau/clouseau ./bin/
ADD clouseau/log4j.properties clouseau/clouseau.ini ./
# setup CouchDB
WORKDIR /opt/couchdb
ADD couch/vm.args couch/local.ini ./etc/
WORKDIR /
ADD build-target-paths.sh .
ADD runner.sh ./bbcouch-runner.sh
RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau ./build-target-paths.sh
CMD ["./bbcouch-runner.sh"]

View file

@ -0,0 +1,24 @@
#!/bin/bash
echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
DATA_DIR=/home
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
mkdir -p $DATA_DIR/{search,minio,couch}
mkdir -p $DATA_DIR/couch/{dbs,views}
chown -R couchdb:couchdb $DATA_DIR/couch/
apt update
apt-get install -y openssh-server
echo "root:Docker!" | chpasswd
mkdir -p /tmp
chmod +x /tmp/ssh_setup.sh \
&& (sleep 1;/tmp/ssh_setup.sh 2>&1 > /dev/null)
cp /etc/sshd_config /etc/ssh/sshd_config
/etc/init.d/ssh restart
sed -i "s#DATA_DIR#/home#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/home#g" /opt/couchdb/etc/local.ini
else
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
fi

14
hosting/couchdb/runner.sh Normal file
View file

@ -0,0 +1,14 @@
#!/bin/bash
DATA_DIR=${DATA_DIR:-/data}
mkdir -p ${DATA_DIR}
mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/search
chown -R couchdb:couchdb ${DATA_DIR}/couch
/build-target-paths.sh
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
sleep 10
curl -X PUT http://${COUCHDB_USER}:${COUCHDB_PASSWORD}@localhost:5984/_users
curl -X PUT http://${COUCHDB_USER}:${COUCHDB_PASSWORD}@localhost:5984/_replicator
sleep infinity

View file

@ -0,0 +1,23 @@
FROM budibase/couchdb
ENV DATA_DIR /data
RUN mkdir /data
RUN apt-get update && \
apt-get install -y --no-install-recommends redis-server
WORKDIR /minio
ADD scripts/install-minio.sh ./install.sh
RUN chmod +x install.sh && ./install.sh
WORKDIR /
ADD dependencies/runner.sh .
RUN chmod +x ./runner.sh
EXPOSE 5984
EXPOSE 9000
EXPOSE 9001
EXPOSE 6379
CMD ["./runner.sh"]

View file

@ -0,0 +1,57 @@
# Docker Image for Running Budibase Tests
## Overview
This image contains the basic setup for running
## Usage
- Build the Image
- Run the Container
### Build the Image
The guidance below is based on building the Budibase single image on Debian 11 and AlmaLinux 8. If you use another distro or OS you will need to amend the commands to suit.
#### Install Node
Budibase requires a more recent version of node (14+) than is available in the base Debian repos so:
```
curl -sL https://deb.nodesource.com/setup_16.x | sudo bash -
apt install -y nodejs
node -v
```
Install yarn and lerna:
```
npm install -g yarn jest lerna
```
#### Install Docker
```
apt install -y docker.io
```
Check the versions of each installed version. This process was tested with the version numbers below so YMMV using anything else:
- Docker: 20.10.5
- node: 16.15.1
- yarn: 1.22.19
- lerna: 5.1.4
#### Get the Code
Clone the Budibase repo
```
git clone https://github.com/Budibase/budibase.git
cd budibase
```
#### Setup Node
Node setup:
```
node ./hosting/scripts/setup.js
yarn
yarn bootstrap
yarn build
```
#### Build Image
The following yarn command does some prep and then runs the docker build command:
```
yarn build:docker:dependencies
```

View file

@ -0,0 +1,8 @@
#!/bin/bash
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
/bbcouch-runner.sh &
/minio/minio server ${DATA_DIR}/minio --console-address ":9001" > /dev/stdout 2>&1 &
echo "Budibase dependencies started..."
sleep infinity

View file

@ -42,25 +42,16 @@ services:
couchdb-service:
# platform: linux/amd64
container_name: budi-couchdb-dev
container_name: budi-couchdb3-dev
restart: on-failure
image: ibmcom/couchdb3
image: budibase/couchdb
environment:
- COUCHDB_PASSWORD=${COUCH_DB_PASSWORD}
- COUCHDB_USER=${COUCH_DB_USER}
ports:
- "${COUCH_DB_PORT}:5984"
volumes:
- couchdb3_data:/opt/couchdb/data
couch-init:
container_name: budi-couchdb-init-dev
image: curlimages/curl
environment:
PUT_CALL: "curl -u ${COUCH_DB_USER}:${COUCH_DB_PASSWORD} -X PUT couchdb-service:5984"
depends_on:
- couchdb-service
command: ["sh","-c","sleep 10 && $${PUT_CALL}/_users && $${PUT_CALL}/_replicator; fg;"]
- couchdb_data:/data
redis-service:
container_name: budi-redis-dev
@ -73,7 +64,7 @@ services:
- redis_data:/data
volumes:
couchdb3_data:
couchdb_data:
driver: local
minio_data:
driver: local

View file

@ -0,0 +1,47 @@
version: "3"
# optional ports are specified throughout for more advanced use cases.
services:
minio-service:
restart: on-failure
# Last version that supports the "fs" backend
image: minio/minio:RELEASE.2022-10-24T18-35-07Z
ports:
- 9000
- 9001
environment:
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
command: server /data --console-address ":9001"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s
timeout: 20s
retries: 3
couchdb-service:
# platform: linux/amd64
restart: on-failure
image: budibase/couchdb
environment:
- COUCHDB_PASSWORD=${COUCH_DB_PASSWORD}
- COUCHDB_USER=${COUCH_DB_USER}
ports:
- 5984
- 4369
- 9100
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5984/_up"]
interval: 30s
timeout: 20s
retries: 3
redis-service:
restart: on-failure
image: redis
command: redis-server --requirepass ${REDIS_PASSWORD}
ports:
- 6379
healthcheck:
test: ["CMD", "redis-cli", "ping"]

View file

@ -0,0 +1,10 @@
#!/bin/bash
if [[ $TARGETARCH == arm* ]] ;
then
echo "INSTALLING ARM64 MINIO"
wget https://dl.min.io/server/minio/release/linux-arm64/minio
else
echo "INSTALLING AMD64 MINIO"
wget https://dl.min.io/server/minio/release/linux-amd64/minio
fi
chmod +x minio

View file

@ -0,0 +1,15 @@
#!/bin/bash
tag=$1
if [[ ! "$tag" ]]; then
echo "No tag present. You must pass a tag to this script"
exit 1
fi
echo "Tagging images with tag: $tag"
docker tag budibase-couchdb budibase/couchdb:$tag
docker push --all-tags budibase/couchdb

View file

@ -18,7 +18,7 @@ WORKDIR /worker
ADD packages/worker .
RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh
FROM couchdb:3.2.1
FROM budibase/couchdb
ARG TARGETARCH
ENV TARGETARCH $TARGETARCH
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
@ -29,23 +29,9 @@ ENV TARGETBUILD $TARGETBUILD
COPY --from=build /app /app
COPY --from=build /worker /worker
# ENV CUSTOM_DOMAIN=budi001.custom.com \
# See runner.sh for Env Vars
# These secret env variables are generated by the runner at startup
# their values can be overriden by the user, they will be written
# to the .env file in the /data directory for use later on
# REDIS_PASSWORD=budibase \
# COUCHDB_PASSWORD=budibase \
# COUCHDB_USER=budibase \
# COUCH_DB_URL=http://budibase:budibase@localhost:5984 \
# INTERNAL_API_KEY=budibase \
# JWT_SECRET=testsecret \
# MINIO_ACCESS_KEY=budibase \
# MINIO_SECRET_KEY=budibase \
# install base dependencies
RUN apt-get update && \
apt-get install -y software-properties-common wget nginx uuid-runtime && \
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server && \
apt-add-repository 'deb http://security.debian.org/debian-security stretch/updates main' && \
apt-get update
@ -53,7 +39,7 @@ RUN apt-get update && \
WORKDIR /nodejs
RUN curl -sL https://deb.nodesource.com/setup_16.x -o /tmp/nodesource_setup.sh && \
bash /tmp/nodesource_setup.sh && \
apt-get install -y libaio1 nodejs nginx openjdk-8-jdk redis-server unzip && \
apt-get install -y --no-install-recommends libaio1 nodejs && \
npm install --global yarn pm2
# setup nginx
@ -69,23 +55,6 @@ RUN mkdir -p scripts/integrations/oracle
ADD packages/server/scripts/integrations/oracle scripts/integrations/oracle
RUN /bin/bash -e ./scripts/integrations/oracle/instantclient/linux/install.sh
# setup clouseau
WORKDIR /
RUN wget https://github.com/cloudant-labs/clouseau/releases/download/2.21.0/clouseau-2.21.0-dist.zip && \
unzip clouseau-2.21.0-dist.zip && \
mv clouseau-2.21.0 /opt/clouseau && \
rm clouseau-2.21.0-dist.zip
WORKDIR /opt/clouseau
RUN mkdir ./bin
ADD hosting/single/clouseau/clouseau ./bin/
ADD hosting/single/clouseau/log4j.properties hosting/single/clouseau/clouseau.ini ./
RUN chmod +x ./bin/clouseau
# setup CouchDB
WORKDIR /opt/couchdb
ADD hosting/single/couch/vm.args hosting/single/couch/local.ini ./etc/
# setup minio
WORKDIR /minio
ADD scripts/install-minio.sh ./install.sh
@ -98,9 +67,6 @@ RUN chmod +x ./runner.sh
ADD hosting/single/healthcheck.sh .
RUN chmod +x ./healthcheck.sh
ADD hosting/scripts/build-target-paths.sh .
RUN chmod +x ./build-target-paths.sh
# Script below sets the path for storing data based on $DATA_DIR
# For Azure App Service install SSH & point data locations to /home
ADD hosting/single/ssh/sshd_config /etc/

View file

@ -72,14 +72,11 @@ for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
ln -s ${DATA_DIR}/.env /app/.env
ln -s ${DATA_DIR}/.env /worker/.env
# make these directories in runner, incase of mount
mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/minio
mkdir -p ${DATA_DIR}/search
chown -R couchdb:couchdb ${DATA_DIR}/couch
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
/bbcouch-runner.sh &
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
/etc/init.d/nginx restart
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
# Add monthly cron job to renew certbot certificate
@ -90,15 +87,14 @@ if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
/etc/init.d/nginx restart
fi
# wait for backend services to start
sleep 10
pushd app
pm2 start -l /dev/stdout --name app "yarn run:docker"
popd
pushd worker
pm2 start -l /dev/stdout --name worker "yarn run:docker"
popd
sleep 10
echo "curl to couchdb endpoints"
curl -X PUT ${COUCH_DB_URL}/_users
curl -X PUT ${COUCH_DB_URL}/_replicator
echo "end of runner.sh, sleeping ..."
sleep infinity

View file

@ -0,0 +1,9 @@
module.exports = () => {
return {
dockerCompose: {
composeFilePath: "../../hosting",
composeFile: "docker-compose.test.yaml",
startupTimeout: 10000,
},
}
}

View file

@ -1,5 +1,5 @@
{
"version": "2.3.0",
"version": "2.3.1",
"npmClient": "yarn",
"packages": [
"packages/*"

View file

@ -3,6 +3,7 @@
"private": true,
"devDependencies": {
"@rollup/plugin-json": "^4.0.2",
"@types/supertest": "^2.0.12",
"@typescript-eslint/parser": "5.45.0",
"babel-eslint": "^10.0.3",
"eslint": "^7.28.0",
@ -62,6 +63,9 @@
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
"build:docker:single:image": "docker build -f hosting/single/Dockerfile -t budibase:latest .",
"build:docker:single": "npm run build:docker:pre && npm run build:docker:single:image",
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
"build:docs": "lerna run build:docs",
"release:helm": "node scripts/releaseHelmChart",
"env:multi:enable": "lerna run env:multi:enable",

View file

@ -0,0 +1,8 @@
const { join } = require("path")
require("dotenv").config({
path: join(__dirname, "..", "..", "hosting", ".env"),
})
const jestTestcontainersConfigGenerator = require("../../jestTestcontainersConfigGenerator")
module.exports = jestTestcontainersConfigGenerator()

View file

@ -1,11 +1,11 @@
import { Config } from "@jest/types"
const preset = require("ts-jest/jest-preset")
const config: Config.InitialOptions = {
preset: "ts-jest",
testEnvironment: "node",
setupFiles: ["./tests/jestSetup.ts"],
collectCoverageFrom: ["src/**/*.{js,ts}"],
coverageReporters: ["lcov", "json", "clover"],
const baseConfig: Config.InitialProjectOptions = {
...preset,
preset: "@trendyol/jest-testcontainers",
setupFiles: ["./tests/jestEnv.ts"],
setupFilesAfterEnv: ["./tests/jestSetup.ts"],
transform: {
"^.+\\.ts?$": "@swc/jest",
},
@ -13,12 +13,28 @@ const config: Config.InitialOptions = {
if (!process.env.CI) {
// use sources when not in CI
config.moduleNameMapper = {
baseConfig.moduleNameMapper = {
"@budibase/types": "<rootDir>/../types/src",
"^axios.*$": "<rootDir>/node_modules/axios/lib/axios.js",
}
} else {
console.log("Running tests with compiled dependency sources")
}
const config: Config.InitialOptions = {
projects: [
{
...baseConfig,
displayName: "sequential test",
testMatch: ["<rootDir>/**/*.seq.spec.[jt]s"],
runner: "jest-serial-runner",
},
{
...baseConfig,
testMatch: ["<rootDir>/**/!(*.seq).spec.[jt]s"],
},
],
collectCoverageFrom: ["src/**/*.{js,ts}"],
coverageReporters: ["lcov", "json", "clover"],
}
export default config

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "2.3.0",
"version": "2.3.1",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@ -23,7 +23,7 @@
},
"dependencies": {
"@budibase/nano": "10.1.1",
"@budibase/types": "^2.3.0",
"@budibase/types": "^2.3.1",
"@shopify/jest-koa-mocks": "5.0.1",
"@techpass/passport-openidconnect": "0.3.2",
"aws-cloudfront-sign": "2.2.0",
@ -59,6 +59,7 @@
"devDependencies": {
"@swc/core": "^1.3.25",
"@swc/jest": "^0.2.24",
"@trendyol/jest-testcontainers": "^2.1.1",
"@types/chance": "1.1.3",
"@types/ioredis": "4.28.0",
"@types/jest": "27.5.1",
@ -76,6 +77,7 @@
"chance": "1.1.8",
"ioredis-mock": "5.8.0",
"jest": "28.1.1",
"jest-serial-runner": "^1.2.1",
"koa": "2.13.4",
"nodemon": "2.0.16",
"pouchdb-adapter-memory": "7.2.2",

View file

@ -9,16 +9,8 @@ import {
jwt as jwtPassport,
local,
authenticated,
auditLog,
tenancy,
authError,
ssoCallbackUrl,
csrf,
internalApi,
adminOnly,
builderOnly,
builderOrAdmin,
joiValidator,
oidc,
google,
} from "../middleware"

View file

@ -2,14 +2,16 @@ require("../../../tests")
const { Writethrough } = require("../writethrough")
const { getDB } = require("../../db")
const tk = require("timekeeper")
const { structures } = require("../../../tests")
const START_DATE = Date.now()
tk.freeze(START_DATE)
const DELAY = 5000
const db = getDB("test")
const db2 = getDB("test2")
const db = getDB(structures.db.id())
const db2 = getDB(structures.db.id())
const writethrough = new Writethrough(db, DELAY), writethrough2 = new Writethrough(db2, DELAY)
describe("writethrough", () => {

View file

@ -83,7 +83,14 @@ export class DatabaseImpl implements Database {
throw new Error("DB does not exist")
}
if (!exists) {
await this.nano().db.create(this.name)
try {
await this.nano().db.create(this.name)
} catch (err: any) {
// Handling race conditions
if (err.statusCode !== 412) {
throw err
}
}
}
return this.nano().db.use(this.name)
}
@ -178,7 +185,7 @@ export class DatabaseImpl implements Database {
async destroy() {
try {
await this.nano().db.destroy(this.name)
return await this.nano().db.destroy(this.name)
} catch (err: any) {
// didn't exist, don't worry
if (err.statusCode === 404) {

View file

@ -6,12 +6,6 @@ import { DatabaseImpl } from "../db"
const dbList = new Set()
export function getDB(dbName?: string, opts?: any): Database {
// TODO: once using the test image, need to remove this
if (env.isTest()) {
dbList.add(dbName)
// @ts-ignore
return getPouchDB(dbName, opts)
}
return new DatabaseImpl(dbName, opts)
}

View file

@ -1,19 +1,19 @@
require("../../../tests")
const { getDB } = require("../")
const { structures } = require("../../../tests")
const { getDB } = require("../db")
describe("db", () => {
describe("db", () => {
describe("getDB", () => {
it("returns a db", async () => {
const db = getDB("test")
const dbName = structures.db.id()
const db = getDB(dbName)
expect(db).toBeDefined()
expect(db._adapter).toBe("memory")
expect(db.prefix).toBe("_pouch_")
expect(db.name).toBe("test")
expect(db.name).toBe(dbName)
})
it("uses the custom put function", async () => {
const db = getDB("test")
const db = getDB(structures.db.id())
let doc = { _id: "test" }
await db.put(doc)
doc = await db.get(doc._id)
@ -23,4 +23,3 @@ describe("db", () => {
})
})
})

View file

@ -8,6 +8,7 @@ const {
const { generateAppID, getPlatformUrl, getScopedConfig } = require("../utils")
const tenancy = require("../../tenancy")
const { Config, DEFAULT_TENANT_ID } = require("../../constants")
import { generator } from "../../../tests"
import env from "../../environment"
describe("utils", () => {
@ -66,17 +67,16 @@ describe("utils", () => {
})
})
const DB_URL = "http://dburl.com"
const DEFAULT_URL = "http://localhost:10000"
const ENV_URL = "http://env.com"
const setDbPlatformUrl = async () => {
const setDbPlatformUrl = async (dbUrl: string) => {
const db = tenancy.getGlobalDB()
db.put({
await db.put({
_id: "config_settings",
type: Config.SETTINGS,
config: {
platformUrl: DB_URL,
platformUrl: dbUrl,
},
})
}
@ -119,9 +119,10 @@ describe("getPlatformUrl", () => {
it("gets the platform url from the database", async () => {
await tenancy.doInTenant(null, async () => {
await setDbPlatformUrl()
const dbUrl = generator.url()
await setDbPlatformUrl(dbUrl)
const url = await getPlatformUrl()
expect(url).toBe(DB_URL)
expect(url).toBe(dbUrl)
})
})
})
@ -152,7 +153,7 @@ describe("getPlatformUrl", () => {
it("never gets the platform url from the database", async () => {
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
await setDbPlatformUrl()
await setDbPlatformUrl(generator.url())
const url = await getPlatformUrl()
expect(url).toBe(TENANT_AWARE_URL)
})
@ -170,10 +171,11 @@ describe("getScopedConfig", () => {
it("returns the platform url with an existing config", async () => {
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
await setDbPlatformUrl()
const dbUrl = generator.url()
await setDbPlatformUrl(dbUrl)
const db = tenancy.getGlobalDB()
const config = await getScopedConfig(db, { type: Config.SETTINGS })
expect(config.platformUrl).toBe(DB_URL)
expect(config.platformUrl).toBe(dbUrl)
})
})

View file

@ -10,7 +10,7 @@ import {
APP_PREFIX,
} from "../constants"
import { getTenantId, getGlobalDB, getGlobalDBName } from "../context"
import { doWithDB, allDbs, directCouchAllDbs } from "./db"
import { doWithDB, directCouchAllDbs } from "./db"
import { getAppMetadata } from "../cache/appMetadata"
import { isDevApp, isDevAppID, getProdAppID } from "./conversions"
import * as events from "../events"
@ -262,10 +262,7 @@ export function getStartEndKeyURL(baseKey: any, tenantId?: string) {
*/
export async function getAllDbs(opts = { efficient: false }) {
const efficient = opts && opts.efficient
// specifically for testing we use the pouch package for this
if (env.isTest()) {
return allDbs()
}
let dbs: any[] = []
async function addDbs(queryString?: string) {
const json = await directCouchAllDbs(queryString)

View file

@ -2,7 +2,7 @@ import { newid } from "./utils"
import * as events from "./events"
import { StaticDatabases } from "./db"
import { doWithDB } from "./db"
import { Installation, IdentityType } from "@budibase/types"
import { Installation, IdentityType, Database } from "@budibase/types"
import * as context from "./context"
import semver from "semver"
import { bustCache, withCache, TTL, CacheKey } from "./cache/generic"
@ -14,6 +14,24 @@ export const getInstall = async (): Promise<Installation> => {
useTenancy: false,
})
}
async function createInstallDoc(platformDb: Database) {
const install: Installation = {
_id: StaticDatabases.PLATFORM_INFO.docs.install,
installId: newid(),
version: pkg.version,
}
try {
const resp = await platformDb.put(install)
install._rev = resp.rev
return install
} catch (err: any) {
if (err.status === 409) {
return getInstallFromDB()
} else {
throw err
}
}
}
const getInstallFromDB = async (): Promise<Installation> => {
return doWithDB(
@ -26,13 +44,7 @@ const getInstallFromDB = async (): Promise<Installation> => {
)
} catch (e: any) {
if (e.status === 404) {
install = {
_id: StaticDatabases.PLATFORM_INFO.docs.install,
installId: newid(),
version: pkg.version,
}
const resp = await platformDb.put(install)
install._rev = resp.rev
install = await createInstallDoc(platformDb)
} else {
throw e
}

View file

@ -64,7 +64,9 @@ const print = (fn: any, data: any[]) => {
message = message + ` [identityId=${identityId}]`
}
fn(message, data)
if (!process.env.CI) {
fn(message, data)
}
}
const logging = (ctx: any, next: any) => {

View file

@ -3,7 +3,7 @@
exports[`migrations should match snapshot 1`] = `
Object {
"_id": "migrations",
"_rev": "1-a32b0b708e59eeb006ed5e063cfeb36a",
"_rev": "1-2f64479842a0513aa8b97f356b0b9127",
"createdAt": "2020-01-01T00:00:00.000Z",
"test": 1577836800000,
"updatedAt": "2020-01-01T00:00:00.000Z",

View file

@ -1,9 +1,9 @@
require("../../../tests")
const { runMigrations, getMigrationsDoc } = require("../index")
const { getDB } = require("../../db")
const {
StaticDatabases,
} = require("../../constants")
const { getGlobalDBName, getDB } = require("../../db")
const { structures, testEnv } = require("../../../tests")
testEnv.multiTenant()
let db
@ -17,8 +17,11 @@ describe("migrations", () => {
fn: migrationFunction
}]
let tenantId
beforeEach(() => {
db = getDB(StaticDatabases.GLOBAL.name)
tenantId = structures.tenant.id()
db = getDB(getGlobalDBName(tenantId))
})
afterEach(async () => {
@ -27,7 +30,7 @@ describe("migrations", () => {
})
const migrate = () => {
return runMigrations(MIGRATIONS)
return runMigrations(MIGRATIONS, { tenantIds: [tenantId]})
}
it("should run a new migration", async () => {

View file

@ -361,8 +361,8 @@ export const deleteFolder = async (
Prefix: folder,
}
let response: any = await client.listObjects(listParams).promise()
if (response.Contents.length === 0) {
const existingObjectsResponse = await client.listObjects(listParams).promise()
if (existingObjectsResponse.Contents?.length === 0) {
return
}
const deleteParams: any = {
@ -372,13 +372,13 @@ export const deleteFolder = async (
},
}
response.Contents.forEach((content: any) => {
existingObjectsResponse.Contents?.forEach((content: any) => {
deleteParams.Delete.Objects.push({ Key: content.Key })
})
response = await client.deleteObjects(deleteParams).promise()
const deleteResponse = await client.deleteObjects(deleteParams).promise()
// can only empty 1000 items at once
if (response.Deleted.length === 1000) {
if (deleteResponse.Deleted?.length === 1000) {
return deleteFolder(bucketName, folder)
}
}

View file

@ -2,13 +2,14 @@ import { structures } from "../../../tests"
import * as utils from "../../utils"
import * as events from "../../events"
import * as db from "../../db"
import { DEFAULT_TENANT_ID, Header } from "../../constants"
import { Header } from "../../constants"
import { doInTenant } from "../../context"
import { newid } from "../../utils"
describe("utils", () => {
describe("platformLogout", () => {
it("should call platform logout", async () => {
await doInTenant(DEFAULT_TENANT_ID, async () => {
await doInTenant(structures.tenant.id(), async () => {
const ctx = structures.koa.newContext()
await utils.platformLogout({ ctx, userId: "test" })
expect(events.auth.logout).toBeCalledTimes(1)
@ -54,7 +55,7 @@ describe("utils", () => {
const app = structures.apps.app(expected)
// set custom url
const appUrl = "custom-url"
const appUrl = newid()
app.url = `/${appUrl}`
ctx.path = `/app/${appUrl}`

View file

@ -0,0 +1,23 @@
import env from "../src/environment"
import { mocks } from "./utilities"
// must explicitly enable fetch mock
mocks.fetch.enable()
// mock all dates to 2020-01-01T00:00:00.000Z
// use tk.reset() to use real dates in individual tests
import tk from "timekeeper"
tk.freeze(mocks.date.MOCK_DATE)
env._set("SELF_HOSTED", "1")
env._set("NODE_ENV", "jest")
if (!process.env.DEBUG) {
global.console.log = jest.fn() // console.log are ignored in tests
}
if (!process.env.CI) {
// set a longer timeout in dev for debugging
// 100 seconds
jest.setTimeout(100000)
}

View file

@ -1,28 +1,4 @@
import env from "../src/environment"
import { mocks } from "./utilities"
import { testContainerUtils } from "./utilities"
// must explicitly enable fetch mock
mocks.fetch.enable()
// mock all dates to 2020-01-01T00:00:00.000Z
// use tk.reset() to use real dates in individual tests
import tk from "timekeeper"
tk.freeze(mocks.date.MOCK_DATE)
env._set("SELF_HOSTED", "1")
env._set("NODE_ENV", "jest")
env._set("JWT_SECRET", "test-jwtsecret")
env._set("LOG_LEVEL", "silent")
env._set("MINIO_URL", "http://localhost")
env._set("MINIO_ACCESS_KEY", "test")
env._set("MINIO_SECRET_KEY", "test")
if (!process.env.DEBUG) {
global.console.log = jest.fn() // console.log are ignored in tests
}
if (!process.env.CI) {
// set a longer timeout in dev for debugging
// 100 seconds
jest.setTimeout(100000)
}
testContainerUtils.setupEnv(env)

View file

@ -2,6 +2,7 @@ export * as mocks from "./mocks"
export * as structures from "./structures"
export { generator } from "./structures"
export * as testEnv from "./testEnv"
export * as testContainerUtils from "./testContainerUtils"
import * as dbConfig from "./db"
dbConfig.init()

View file

@ -0,0 +1,5 @@
import { newid } from "../../../src/newid"
export function id() {
return `db_${newid()}`
}

View file

@ -8,3 +8,5 @@ export * as apps from "./apps"
export * as koa from "./koa"
export * as licenses from "./licenses"
export * as plugins from "./plugins"
export * as tenant from "./tenants"
export * as db from "./db"

View file

@ -0,0 +1,5 @@
import { newid } from "../../../src/newid"
export function id() {
return `tenant-${newid()}`
}

View file

@ -0,0 +1,42 @@
function getTestContainerSettings(serverName: string, key: string) {
const entry = Object.entries(global).find(
([k]) =>
k.includes(`_${serverName.toUpperCase()}`) &&
k.includes(`_${key.toUpperCase()}__`)
)
if (!entry) {
return null
}
return entry[1]
}
function getCouchConfig() {
const port = getTestContainerSettings("COUCHDB-SERVICE", "PORT_5984")
return {
port,
url: `http://${getTestContainerSettings("COUCHDB-SERVICE", "IP")}:${port}`,
}
}
function getMinioConfig() {
const port = getTestContainerSettings("MINIO-SERVICE", "PORT_9000")
return {
port,
url: `http://${getTestContainerSettings("MINIO-SERVICE", "IP")}:${port}`,
}
}
export function setupEnv(...envs: any[]) {
const configs = [
{ key: "COUCH_DB_PORT", value: getCouchConfig().port },
{ key: "COUCH_DB_URL", value: getCouchConfig().url },
{ key: "MINIO_PORT", value: getMinioConfig().port },
{ key: "MINIO_URL", value: getMinioConfig().url },
]
for (const config of configs.filter(x => x.value !== null)) {
for (const env of envs) {
env._set(config.key, config.value)
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
"version": "2.3.0",
"version": "2.3.1",
"license": "MPL-2.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
@ -38,7 +38,7 @@
],
"dependencies": {
"@adobe/spectrum-css-workflow-icons": "1.2.1",
"@budibase/string-templates": "^2.3.0",
"@budibase/string-templates": "^2.3.1",
"@spectrum-css/accordion": "3.0.24",
"@spectrum-css/actionbutton": "1.0.1",
"@spectrum-css/actiongroup": "1.0.1",

View file

@ -25,7 +25,7 @@
let open = false
//eslint-disable-next-line
const STRIP_NAME_REGEX = /(?<=\.)(.*?)(?=\ })/g
const STRIP_NAME_REGEX = /(\w+?)(?=\ })/g
// Strips the name out of the value which is {{ env.Variable }} resulting in an array like ["Variable"]
$: hbsValue = String(value)?.match(STRIP_NAME_REGEX) || []

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/builder",
"version": "2.3.0",
"version": "2.3.1",
"license": "GPL-3.0",
"private": true,
"scripts": {
@ -58,10 +58,10 @@
}
},
"dependencies": {
"@budibase/bbui": "^2.3.0",
"@budibase/client": "^2.3.0",
"@budibase/frontend-core": "^2.3.0",
"@budibase/string-templates": "^2.3.0",
"@budibase/bbui": "^2.3.1",
"@budibase/client": "^2.3.1",
"@budibase/frontend-core": "^2.3.1",
"@budibase/string-templates": "^2.3.1",
"@fortawesome/fontawesome-svg-core": "^6.2.1",
"@fortawesome/free-brands-svg-icons": "^6.2.1",
"@fortawesome/free-solid-svg-icons": "^6.2.1",

View file

@ -70,7 +70,7 @@
return Number(value)
}
if (type === "options") {
return [value]
return value
}
if (type === "array") {
if (Array.isArray(value)) {

View file

@ -1,6 +1,7 @@
import { writable } from "svelte/store"
import { writable, get } from "svelte/store"
import { API } from "api"
import { Constants } from "@budibase/frontend-core"
import { licensing } from "stores/portal"
export function createEnvironmentStore() {
const { subscribe, update } = writable({
@ -17,12 +18,14 @@ export function createEnvironmentStore() {
}
async function loadVariables() {
const envVars = await API.fetchEnvironmentVariables()
const mappedVars = envVars.variables.map(name => ({ name }))
update(store => {
store.variables = mappedVars
return store
})
if (get(licensing).environmentVariablesEnabled) {
const envVars = await API.fetchEnvironmentVariables()
const mappedVars = envVars.variables.map(name => ({ name }))
update(store => {
store.variables = mappedVars
return store
})
}
}
async function createVariable(data) {

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/cli",
"version": "2.3.0",
"version": "2.3.1",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js",
"bin": {
@ -26,9 +26,9 @@
"outputPath": "build"
},
"dependencies": {
"@budibase/backend-core": "^2.3.0",
"@budibase/string-templates": "^2.3.0",
"@budibase/types": "^2.3.0",
"@budibase/backend-core": "^2.3.1",
"@budibase/string-templates": "^2.3.1",
"@budibase/types": "^2.3.1",
"axios": "0.21.2",
"chalk": "4.1.0",
"cli-progress": "3.11.2",

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/client",
"version": "2.3.0",
"version": "2.3.1",
"license": "MPL-2.0",
"module": "dist/budibase-client.js",
"main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw"
},
"dependencies": {
"@budibase/bbui": "^2.3.0",
"@budibase/frontend-core": "^2.3.0",
"@budibase/string-templates": "^2.3.0",
"@budibase/bbui": "^2.3.1",
"@budibase/frontend-core": "^2.3.1",
"@budibase/string-templates": "^2.3.1",
"@spectrum-css/button": "^3.0.3",
"@spectrum-css/card": "^3.0.3",
"@spectrum-css/divider": "^1.0.3",

View file

@ -1,12 +1,12 @@
{
"name": "@budibase/frontend-core",
"version": "2.3.0",
"version": "2.3.1",
"description": "Budibase frontend core libraries used in builder and client",
"author": "Budibase",
"license": "MPL-2.0",
"svelte": "src/index.js",
"dependencies": {
"@budibase/bbui": "^2.3.0",
"@budibase/bbui": "^2.3.1",
"lodash": "^4.17.21",
"svelte": "^3.46.2"
}

View file

@ -1,6 +1,6 @@
{
"name": "@budibase/sdk",
"version": "2.3.0",
"version": "2.3.1",
"description": "Budibase Public API SDK",
"author": "Budibase",
"license": "MPL-2.0",

View file

@ -34,7 +34,7 @@ module AwsMock {
// @ts-ignore
this.listObjects = jest.fn(
response({
Contents: {},
Contents: [],
})
)

View file

@ -0,0 +1,8 @@
const { join } = require("path")
require("dotenv").config({
path: join(__dirname, "..", "..", "hosting", ".env"),
})
const jestTestcontainersConfigGenerator = require("../../jestTestcontainersConfigGenerator")
module.exports = jestTestcontainersConfigGenerator()

View file

@ -1,16 +1,13 @@
import { Config } from "@jest/types"
import * as fs from "fs"
const config: Config.InitialOptions = {
testEnvironment: "node",
import * as fs from "fs"
const preset = require("ts-jest/jest-preset")
const baseConfig: Config.InitialProjectOptions = {
...preset,
preset: "@trendyol/jest-testcontainers",
setupFiles: ["./src/tests/jestEnv.ts"],
setupFilesAfterEnv: ["./src/tests/jestSetup.ts"],
collectCoverageFrom: [
"src/**/*.{js,ts}",
// The use of coverage with couchdb view functions breaks tests
"!src/db/views/staticViews.*",
],
coverageReporters: ["lcov", "json", "clover"],
transform: {
"^.+\\.ts?$": "@swc/jest",
},
@ -18,19 +15,39 @@ const config: Config.InitialOptions = {
if (!process.env.CI) {
// use sources when not in CI
config.moduleNameMapper = {
baseConfig.moduleNameMapper = {
"@budibase/backend-core/(.*)": "<rootDir>/../backend-core/$1",
"@budibase/backend-core": "<rootDir>/../backend-core/src",
"@budibase/types": "<rootDir>/../types/src",
"^axios.*$": "<rootDir>/node_modules/axios/lib/axios.js",
}
// add pro sources if they exist
if (fs.existsSync("../../../budibase-pro")) {
config.moduleNameMapper["@budibase/pro"] =
baseConfig.moduleNameMapper["@budibase/pro"] =
"<rootDir>/../../../budibase-pro/packages/pro/src"
}
} else {
console.log("Running tests with compiled dependency sources")
}
const config: Config.InitialOptions = {
projects: [
{
...baseConfig,
displayName: "sequential test",
testMatch: ["<rootDir>/**/*.seq.spec.[jt]s"],
runner: "jest-serial-runner",
},
{
...baseConfig,
testMatch: ["<rootDir>/**/!(*.seq).spec.[jt]s"],
},
],
collectCoverageFrom: [
"src/**/*.{js,ts}",
// The use of coverage with couchdb view functions breaks tests
"!src/db/views/staticViews.*",
],
coverageReporters: ["lcov", "json", "clover"],
}
export default config

View file

@ -1,7 +1,7 @@
{
"name": "@budibase/server",
"email": "hi@budibase.com",
"version": "2.3.0",
"version": "2.3.1",
"description": "Budibase Web Server",
"main": "src/index.ts",
"repository": {
@ -43,11 +43,11 @@
"license": "GPL-3.0",
"dependencies": {
"@apidevtools/swagger-parser": "10.0.3",
"@budibase/backend-core": "^2.3.0",
"@budibase/client": "^2.3.0",
"@budibase/pro": "2.3.0",
"@budibase/string-templates": "^2.3.0",
"@budibase/types": "^2.3.0",
"@budibase/backend-core": "^2.3.1",
"@budibase/client": "^2.3.1",
"@budibase/pro": "2.3.1",
"@budibase/string-templates": "^2.3.1",
"@budibase/types": "^2.3.1",
"@bull-board/api": "3.7.0",
"@bull-board/koa": "3.9.4",
"@elastic/elasticsearch": "7.10.0",
@ -125,6 +125,7 @@
"@jest/test-sequencer": "24.9.0",
"@swc/core": "^1.3.25",
"@swc/jest": "^0.2.24",
"@trendyol/jest-testcontainers": "^2.1.1",
"@types/apidoc": "0.50.0",
"@types/bson": "4.2.0",
"@types/global-agent": "2.1.1",
@ -151,6 +152,7 @@
"is-wsl": "2.2.0",
"jest": "28.1.1",
"jest-openapi": "0.14.2",
"jest-serial-runner": "^1.2.1",
"nodemon": "2.0.15",
"openapi-types": "9.3.1",
"openapi-typescript": "5.2.0",

View file

@ -41,7 +41,7 @@ const datasets = {
describe("Rest Importer", () => {
const config = new TestConfig(false)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

View file

@ -7,7 +7,7 @@ Array [
"entities": Array [
Object {
"_id": "ta_users",
"_rev": "1-6f4013e796887f1771bf7837598d87e7",
"_rev": "1-2375e1bc58aeec664dc1b1f04ad43e44",
"createdAt": "2020-01-01T00:00:00.000Z",
"name": "Users",
"primaryDisplay": "email",

View file

@ -10,8 +10,11 @@ describe("/static", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
app = await config.init()
})
beforeEach(()=>{
jest.clearAllMocks()
})

View file

@ -7,7 +7,7 @@ describe("/api/keys", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

View file

@ -14,18 +14,22 @@ jest.mock("../../../utilities/redis", () => ({
import { clearAllApps, checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities"
import { AppStatus } from "../../../db/utils"
import { events } from "@budibase/backend-core"
import { events, utils } from "@budibase/backend-core"
import env from "../../../environment"
jest.setTimeout(15000)
describe("/applications", () => {
let request = setup.getRequest()
let config = setup.getConfig()
afterAll(setup.afterAll)
beforeEach(async () => {
await clearAllApps()
beforeAll(async () => {
await config.init()
})
beforeEach(async () => {
jest.clearAllMocks()
})
@ -33,7 +37,7 @@ describe("/applications", () => {
it("creates empty app", async () => {
const res = await request
.post("/api/applications")
.field("name", "My App")
.field("name", utils.newid())
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
@ -44,7 +48,7 @@ describe("/applications", () => {
it("creates app from template", async () => {
const res = await request
.post("/api/applications")
.field("name", "My App")
.field("name", utils.newid())
.field("useTemplate", "true")
.field("templateKey", "test")
.field("templateString", "{}") // override the file download
@ -59,7 +63,7 @@ describe("/applications", () => {
it("creates app from file", async () => {
const res = await request
.post("/api/applications")
.field("name", "My App")
.field("name", utils.newid())
.field("useTemplate", "true")
.set(config.defaultHeaders())
.attach("templateFile", "src/api/routes/tests/data/export.txt")
@ -106,6 +110,11 @@ describe("/applications", () => {
})
describe("fetch", () => {
beforeEach(async () => {
// Clean all apps but the onde from config
await clearAllApps(config.getTenantId(), [config.getAppId()!])
})
it("lists all applications", async () => {
await config.createApp("app1")
await config.createApp("app2")
@ -266,6 +275,11 @@ describe("/applications", () => {
})
describe("unpublish", () => {
beforeEach(async () => {
// We want to republish as the unpublish will delete the prod app
await config.publish()
})
it("should unpublish app with dev app ID", async () => {
const appId = config.getAppId()
await request

View file

@ -7,7 +7,7 @@ describe("/authenticate", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})
@ -18,7 +18,7 @@ describe("/authenticate", () => {
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body._id).toEqual(generateUserMetadataID("us_uuid1"))
expect(res.body._id).toEqual(generateUserMetadataID(config.user._id))
})
})
})

View file

@ -10,12 +10,16 @@ const MAX_RETRIES = 4
const { TRIGGER_DEFINITIONS, ACTION_DEFINITIONS } = require("../../../automations")
const { events } = require("@budibase/backend-core")
jest.setTimeout(30000)
describe("/automations", () => {
let request = setup.getRequest()
let config = setup.getConfig()
afterAll(setup.afterAll)
// For some reason this cannot be a beforeAll or the test "tests the automation successfully" fail
beforeEach(async () => {
await config.init()
})
@ -305,7 +309,7 @@ describe("/automations", () => {
.expect('Content-Type', /json/)
.expect(200)
expect(res.body[0]).toEqual(expect.objectContaining(autoConfig))
expect(res.body[0]).toEqual(expect.objectContaining(autoConfig))
})
it("should apply authorization to endpoint", async () => {

View file

@ -1,17 +1,8 @@
jest.mock("@budibase/backend-core", () => {
const core = jest.requireActual("@budibase/backend-core")
return {
...core,
objectStore: {
budibaseTempDir: core.objectStore.budibaseTempDir,
},
}
})
import * as setup from "./utilities"
import { events } from "@budibase/backend-core"
import sdk from "../../../sdk"
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
describe("/backups", () => {
let request = setup.getRequest()
let config = setup.getConfig()
@ -44,9 +35,9 @@ describe("/backups", () => {
describe("calculateBackupStats", () => {
it("should be able to calculate the backup statistics", async () => {
config.createAutomation()
config.createScreen()
let res = await sdk.backups.calculateBackupStats(config.getAppId())
await config.createAutomation()
await config.createScreen()
let res = await sdk.backups.calculateBackupStats(config.getAppId()!)
expect(res.automations).toEqual(1)
expect(res.datasources).toEqual(1)
expect(res.screens).toEqual(1)

View file

@ -1,14 +1,20 @@
import { db as dbCore } from "@budibase/backend-core"
import { AppStatus } from "../../../db/utils"
import * as setup from "./utilities"
import { wipeDb } from "./utilities/TestFunctions"
describe("/cloud", () => {
let request = setup.getRequest()
let config = setup.getConfig()
afterAll(setup.afterAll)
beforeAll(() => {
// Importing is only allowed in self hosted environments
config.modeSelf()
})
beforeEach(async () => {
await config.init()
})
@ -22,19 +28,7 @@ describe("/cloud", () => {
it("should be able to import apps", async () => {
// first we need to delete any existing apps on the system so it looks clean otherwise the
// import will not run
await request
.post(
`/api/applications/${dbCore.getProdAppID(
config.getAppId()
)}/unpublish`
)
.set(config.defaultHeaders())
.expect(204)
await request
.delete(`/api/applications/${config.getAppId()}`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
await wipeDb()
// get a count of apps before the import
const preImportApps = await request

View file

@ -7,7 +7,7 @@ describe("/component", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

View file

@ -5,6 +5,10 @@ import { checkCacheForDynamicVariable } from "../../../threads/utils"
import { context, events } from "@budibase/backend-core"
import sdk from "../../../sdk"
import tk from "timekeeper"
import { mocks } from "@budibase/backend-core/tests"
tk.freeze(mocks.date.MOCK_DATE)
let { basicDatasource } = setup.structures
const pg = require("pg")
@ -15,11 +19,13 @@ describe("/datasources", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
async function setupTest() {
await config.init()
datasource = await config.createDatasource()
jest.clearAllMocks()
})
}
beforeAll(setupTest)
describe("create", () => {
it("should create a new datasource", async () => {
@ -56,7 +62,14 @@ describe("/datasources", () => {
datasource: any,
fields: { path: string; queryString: string }
) {
return config.previewQuery(request, config, datasource, fields)
return config.previewQuery(
request,
config,
datasource,
fields,
undefined,
""
)
}
it("should invalidate changed or removed variables", async () => {
@ -91,6 +104,8 @@ describe("/datasources", () => {
})
describe("fetch", () => {
beforeAll(setupTest)
it("returns all the datasources from the server", async () => {
const res = await request
.get(`/api/datasources`)
@ -159,6 +174,8 @@ describe("/datasources", () => {
})
describe("destroy", () => {
beforeAll(setupTest)
it("deletes queries for the datasource after deletion and returns a success message", async () => {
await config.createQuery()

View file

@ -8,7 +8,7 @@ describe("/dev", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
jest.clearAllMocks()
})

View file

@ -7,7 +7,7 @@ describe("/integrations", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

View file

@ -10,7 +10,7 @@ describe("/layouts", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
layout = await config.createLayout()
jest.clearAllMocks()

View file

@ -9,7 +9,7 @@ describe("/metadata", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
automation = await config.createAutomation()
})

View file

@ -7,7 +7,7 @@ describe("run misc tests", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

View file

@ -15,8 +15,11 @@ describe("/permission", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})
beforeEach(async () => {
table = await config.createTable()
row = await config.createRow()
perms = await config.addPermission(STD_ROLE_ID, table._id)

View file

@ -1,3 +1,6 @@
const tk = require( "timekeeper")
tk.freeze(Date.now())
// Mock out postgres for this
jest.mock("pg")
jest.mock("node-fetch")
@ -27,10 +30,15 @@ describe("/queries", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
const setupTest = async()=>{
await config.init()
datasource = await config.createDatasource()
query = await config.createQuery()
}
beforeAll(async () => {
await setupTest()
})
async function createInvalidIntegration() {
@ -101,6 +109,10 @@ describe("/queries", () => {
})
describe("fetch", () => {
beforeEach(async() => {
await setupTest()
})
it("returns all the queries from the server", async () => {
const res = await request
.get(`/api/queries`)
@ -178,6 +190,10 @@ describe("/queries", () => {
})
describe("destroy", () => {
beforeEach(async() => {
await setupTest()
})
it("deletes a query and returns a success message", async () => {
await request
.delete(`/api/queries/${query._id}/${query._rev}`)
@ -239,6 +255,10 @@ describe("/queries", () => {
})
describe("execute", () => {
beforeEach(async() => {
await setupTest()
})
it("should be able to execute the query", async () => {
const res = await request
.post(`/api/queries/${query._id}`)

View file

@ -10,7 +10,7 @@ describe("/roles", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})
@ -60,6 +60,11 @@ describe("/roles", () => {
})
describe("fetch", () => {
beforeAll(async () => {
// Recreate the app
await config.init()
})
it("should list custom roles, plus 2 default roles", async () => {
const customRole = await config.createRole()

View file

@ -16,7 +16,7 @@ describe("/routing", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
screen = basicScreen()
screen.routing.route = route

View file

@ -1,3 +1,8 @@
const tk = require( "timekeeper")
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
tk.freeze(timestamp)
const { outputProcessing } = require("../../../utilities/rowProcessor")
const setup = require("./utilities")
const { basicRow } = setup.structures
@ -20,8 +25,11 @@ describe("/rows", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})
beforeEach(async()=>{
table = await config.createTable()
row = basicRow(table._id)
})
@ -111,8 +119,8 @@ describe("/rows", () => {
_id: existing._id,
_rev: existing._rev,
type: "row",
createdAt: "2020-01-01T00:00:00.000Z",
updatedAt: "2020-01-01T00:00:00.000Z",
createdAt: timestamp,
updatedAt: timestamp,
})
await assertQueryUsage(queryUsage + 1)
})

View file

@ -10,7 +10,7 @@ describe("/screens", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
screen = await config.createScreen()
})

View file

@ -8,7 +8,8 @@ describe("/static", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
config.modeSelf()
app = await config.init()
})

View file

@ -10,7 +10,7 @@ describe("/tables", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
const app = await config.init()
appId = app.appId
})

View file

@ -6,7 +6,7 @@ describe("/templates", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

View file

@ -3,6 +3,8 @@ const { checkPermissionsEndpoint } = require("./utilities/TestFunctions")
const setup = require("./utilities")
const { BUILTIN_ROLE_IDS } = roles
jest.setTimeout(30000)
jest.mock("../../../utilities/workerRequests", () => ({
getGlobalUsers: jest.fn(() => {
return {}
@ -19,6 +21,7 @@ describe("/users", () => {
afterAll(setup.afterAll)
// For some reason this cannot be a beforeAll or the test "should be able to update the user" fail
beforeEach(async () => {
await config.init()
})

View file

@ -4,6 +4,8 @@ import { AppStatus } from "../../../../db/utils"
import { roles, tenancy, context } from "@budibase/backend-core"
import { TENANT_ID } from "../../../../tests/utilities/structures"
import env from "../../../../environment"
import { db } from "@budibase/backend-core"
import Nano from "@budibase/nano"
class Request {
appId: any
@ -30,7 +32,10 @@ export const getAllTableRows = async (config: any) => {
return req.body
}
export const clearAllApps = async (tenantId = TENANT_ID) => {
export const clearAllApps = async (
tenantId = TENANT_ID,
exceptions: Array<string> = []
) => {
await tenancy.doInTenant(tenantId, async () => {
const req: any = { query: { status: AppStatus.DEV }, user: { tenantId } }
await appController.fetch(req)
@ -38,7 +43,7 @@ export const clearAllApps = async (tenantId = TENANT_ID) => {
if (!apps || apps.length <= 0) {
return
}
for (let app of apps) {
for (let app of apps.filter((x: any) => !exceptions.includes(x.appId))) {
const { appId } = app
const req = new Request(null, { appId })
await runRequest(appId, appController.destroy, req)
@ -55,6 +60,24 @@ export const clearAllAutomations = async (config: any) => {
}
}
export const wipeDb = async () => {
const couchInfo = db.getCouchInfo()
const nano = Nano({
url: couchInfo.url,
requestDefaults: {
headers: {
Authorization: couchInfo.cookie,
},
},
parseUrl: false,
})
let dbs
do {
dbs = await nano.db.list()
await Promise.all(dbs.map(x => nano.db.destroy(x)))
} while (dbs.length)
}
export const createRequest = (
request: any,
method: any,

View file

@ -1,5 +1,6 @@
import TestConfig from "../../../../tests/utilities/TestConfiguration"
import env from "../../../../environment"
import supertest from "supertest"
export * as structures from "../../../../tests/utilities/structures"
function user() {
@ -44,7 +45,8 @@ export function delay(ms: number) {
return new Promise(resolve => setTimeout(resolve, ms))
}
let request: any, config: any
let request: supertest.SuperTest<supertest.Test> | undefined | null,
config: TestConfig | null
export function beforeAll() {
config = new TestConfig()
@ -65,14 +67,14 @@ export function getRequest() {
if (!request) {
beforeAll()
}
return request
return request!
}
export function getConfig() {
if (!config) {
beforeAll()
}
return config
return config!
}
export async function switchToSelfHosted(func: any) {

View file

@ -28,8 +28,11 @@ describe("/views", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})
beforeEach(async() => {
table = await config.createTable(priceTable())
})

View file

@ -9,7 +9,8 @@ describe("/webhooks", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
const setupTest = async () => {
config.modeSelf()
await config.init()
const autoConfig = basicAutomation()
autoConfig.definition.trigger = {
@ -18,7 +19,9 @@ describe("/webhooks", () => {
}
await config.createAutomation(autoConfig)
webhook = await config.createWebhook()
})
}
beforeAll(setupTest)
describe("create", () => {
it("should create a webhook successfully", async () => {
@ -44,6 +47,8 @@ describe("/webhooks", () => {
})
describe("fetch", () => {
beforeAll(setupTest)
it("returns the correct routing for basic user", async () => {
const res = await request
.get(`/api/webhooks`)
@ -64,6 +69,8 @@ describe("/webhooks", () => {
})
describe("delete", () => {
beforeAll(setupTest)
it("should successfully delete", async () => {
const res = await request
.delete(`/api/webhooks/${webhook._id}/${webhook._rev}`)
@ -84,6 +91,8 @@ describe("/webhooks", () => {
})
describe("build schema", () => {
beforeAll(setupTest)
it("should allow building a schema", async () => {
const res = await request
.post(`/api/webhooks/schema/${config.getAppId()}/${webhook._id}`)

View file

@ -62,6 +62,7 @@ initialiseWebsockets(server)
let shuttingDown = false,
errCode = 0
server.on("close", async () => {
// already in process
if (shuttingDown) {
@ -71,7 +72,7 @@ server.on("close", async () => {
console.log("Server Closed")
await automations.shutdown()
await redis.shutdown()
await events.shutdown()
events.shutdown()
await Thread.shutdown()
api.shutdown()
if (!env.isTest()) {

View file

@ -21,7 +21,7 @@ const setup = require("./utilities")
describe("Run through some parts of the automations system", () => {
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await automation.init()
await config.init()
})
@ -29,9 +29,9 @@ describe("Run through some parts of the automations system", () => {
afterAll(setup.afterAll)
it("should be able to init in builder", async () => {
await triggers.externalTrigger(basicAutomation(), { a: 1, appId: "app_123" })
await wait(100)
expect(thread.execute).toHaveBeenCalled()
await triggers.externalTrigger(basicAutomation(), { a: 1, appId: config.appId })
await wait(100)
expect(thread.execute).toHaveBeenCalled()
})
it("should check coercion", async () => {

View file

@ -3,7 +3,7 @@ const setup = require("./utilities")
describe("test the bash action", () => {
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})
afterAll(setup.afterAll)

View file

@ -7,7 +7,7 @@ describe("test the outgoing webhook action", () => {
let inputs
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await config.init()
inputs = {
username: "joe_bloggs",

View file

@ -1,10 +1,9 @@
const setup = require("./utilities")
describe("test the execute query action", () => {
let datasource
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await config.init()
await config.createDatasource()
@ -41,7 +40,7 @@ describe("test the execute query action", () => {
query: { queryId: "wrong_id" }
}
)
expect(res.response).toEqual('{"status":404,"name":"not_found","message":"missing","reason":"missing"}')
expect(res.response).toEqual('Error: missing')
expect(res.success).toEqual(false)
})

View file

@ -3,7 +3,7 @@ const setup = require("./utilities")
describe("test the execute script action", () => {
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})
afterAll(setup.afterAll)

View file

@ -7,7 +7,7 @@ describe("test the outgoing webhook action", () => {
let inputs
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await config.init()
inputs = {
requestMethod: "POST",

View file

@ -36,7 +36,7 @@ const setup = require("./utilities")
describe("test the outgoing webhook action", () => {
let inputs
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

Some files were not shown because too many files have changed in this diff Show more