1
0
Fork 0
mirror of synced 2024-07-07 15:25:52 +12:00

Merge branch 'master' into grid-inline-searching

This commit is contained in:
Andrew Kingston 2023-10-24 13:30:07 +01:00 committed by GitHub
commit 7e3d59a582
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
36 changed files with 157 additions and 183 deletions

View file

@ -18,8 +18,7 @@ env:
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
NX_BASE_BRANCH: origin/${{ github.base_ref }}
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' && github.base_ref != 'master'}}
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' }}
jobs:
lint:

20
.github/workflows/deploy-qa.yml vendored Normal file
View file

@ -0,0 +1,20 @@
name: Deploy QA
on:
push:
branches:
- master
workflow_dispatch:
jobs:
trigger-deploy-to-qa-env:
runs-on: ubuntu-latest
steps:
- uses: peter-evans/repository-dispatch@v2
env:
PAYLOAD_VERSION: ${{ github.sha }}
REF_NAME: ${{ github.ref_name}}
with:
repository: budibase/budibase-deploys
event-type: budicloud-qa-deploy
token: ${{ secrets.GH_ACCESS_TOKEN }}

View file

@ -123,6 +123,7 @@ jobs:
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
REF_NAME: ${{ github.ref_name}}
with:
repository: budibase/budibase-deploys
event: budicloud-qa-deploy

View file

@ -54,6 +54,7 @@ jobs:
push: true
pull: true
platforms: linux/amd64,linux/arm64
build-args: BUDIBASE_VERSION=0.0.0+test
tags: budibase/budibase-test:test
file: ./hosting/single/Dockerfile.v2
cache-from: type=registry,ref=budibase/budibase-test:test
@ -64,6 +65,8 @@ jobs:
context: .
push: true
platforms: linux/amd64
build-args: TARGETBUILD=aas
build-args: |
TARGETBUILD=aas
BUDIBASE_VERSION=0.0.0+test
tags: budibase/budibase-test:aas
file: ./hosting/single/Dockerfile.v2

View file

@ -7,6 +7,8 @@ services:
build:
context: ..
dockerfile: packages/server/Dockerfile.v2
args:
- BUDIBASE_VERSION=0.0.0+dev-docker
container_name: build-bbapps
environment:
SELF_HOSTED: 1
@ -30,13 +32,13 @@ services:
depends_on:
- worker-service
- redis-service
# volumes:
# - /some/path/to/plugins:/plugins
worker-service:
build:
context: ..
dockerfile: packages/worker/Dockerfile.v2
args:
- BUDIBASE_VERSION=0.0.0+dev-docker
container_name: build-bbworker
environment:
SELF_HOSTED: 1

View file

@ -26,6 +26,7 @@ RUN ./scripts/removeWorkspaceDependencies.sh packages/worker/package.json
# We will never want to sync pro, but the script is still required
RUN echo '' > scripts/syncProPackage.js
RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json
RUN ./scripts/removeWorkspaceDependencies.sh package.json
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production
# copy the actual code
@ -117,6 +118,10 @@ EXPOSE 443
EXPOSE 2222
VOLUME /data
ARG BUDIBASE_VERSION
# Ensuring the version argument is sent
RUN test -n "$BUDIBASE_VERSION"
ENV BUDIBASE_VERSION=$BUDIBASE_VERSION
HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh"

View file

@ -1,5 +1,5 @@
{
"version": "2.11.39",
"version": "2.11.43",
"npmClient": "yarn",
"packages": [
"packages/*"

View file

@ -3,14 +3,16 @@
"default": {
"runner": "nx-cloud",
"options": {
"cacheableOperations": ["build", "test", "check:types"],
"accessToken": "MmM4OGYxNzItMDBlYy00ZmE3LTk4MTYtNmJhYWMyZjBjZTUyfHJlYWQ="
"cacheableOperations": ["build", "test", "check:types"]
}
}
},
"targetDefaults": {
"build": {
"inputs": ["{workspaceRoot}/scripts/build.js"]
"inputs": [
"{workspaceRoot}/scripts/build.js",
"{workspaceRoot}/lerna.json"
]
}
}
}

View file

@ -119,8 +119,8 @@ export class Writethrough {
this.writeRateMs = writeRateMs
}
async put(doc: any, writeRateMs: number = this.writeRateMs) {
return put(this.db, doc, writeRateMs)
async put(doc: any) {
return put(this.db, doc, this.writeRateMs)
}
async get(id: string) {

View file

@ -75,12 +75,12 @@ function getPackageJsonFields(): {
const content = readFileSync(packageJsonFile!, "utf-8")
const parsedContent = JSON.parse(content)
return {
VERSION: parsedContent.version,
VERSION: process.env.BUDIBASE_VERSION || parsedContent.version,
SERVICE_NAME: parsedContent.name,
}
} catch {
// throwing an error here is confusing/causes backend-core to be hard to import
return { VERSION: "", SERVICE_NAME: "" }
return { VERSION: process.env.BUDIBASE_VERSION || "", SERVICE_NAME: "" }
}
}

View file

@ -25,17 +25,12 @@ import {
import {
getAccountHolderFromUserIds,
isAdmin,
isCreator,
validateUniqueUser,
} from "./utils"
import { searchExistingEmails } from "./lookup"
import { hash } from "../utils"
type QuotaUpdateFn = (
change: number,
creatorsChange: number,
cb?: () => Promise<any>
) => Promise<any>
type QuotaUpdateFn = (change: number, cb?: () => Promise<any>) => Promise<any>
type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any>
type FeatureFn = () => Promise<Boolean>
type GroupGetFn = (ids: string[]) => Promise<UserGroup[]>
@ -164,14 +159,14 @@ export class UserDB {
}
}
static async getUsersByAppAccess(appId?: string) {
const opts: any = {
static async getUsersByAppAccess(opts: { appId?: string; limit?: number }) {
const params: any = {
include_docs: true,
limit: 50,
limit: opts.limit || 50,
}
let response: User[] = await usersCore.searchGlobalUsersByAppAccess(
appId,
opts
opts.appId,
params
)
return response
}
@ -250,8 +245,7 @@ export class UserDB {
}
const change = dbUser ? 0 : 1 // no change if there is existing user
const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0
return UserDB.quotas.addUsers(change, creatorsChange, async () => {
return UserDB.quotas.addUsers(change, async () => {
await validateUniqueUser(email, tenantId)
let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)
@ -313,7 +307,6 @@ export class UserDB {
let usersToSave: any[] = []
let newUsers: any[] = []
let newCreators: any[] = []
const emails = newUsersRequested.map((user: User) => user.email)
const existingEmails = await searchExistingEmails(emails)
@ -334,66 +327,59 @@ export class UserDB {
}
newUser.userGroups = groups
newUsers.push(newUser)
if (isCreator(newUser)) {
newCreators.push(newUser)
}
}
const account = await accountSdk.getAccountByTenantId(tenantId)
return UserDB.quotas.addUsers(
newUsers.length,
newCreators.length,
async () => {
// create the promises array that will be called by bulkDocs
newUsers.forEach((user: any) => {
usersToSave.push(
UserDB.buildUser(
user,
{
hashPassword: true,
requirePassword: user.requirePassword,
},
tenantId,
undefined, // no dbUser
account
)
return UserDB.quotas.addUsers(newUsers.length, async () => {
// create the promises array that will be called by bulkDocs
newUsers.forEach((user: any) => {
usersToSave.push(
UserDB.buildUser(
user,
{
hashPassword: true,
requirePassword: user.requirePassword,
},
tenantId,
undefined, // no dbUser
account
)
})
)
})
const usersToBulkSave = await Promise.all(usersToSave)
await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)
const usersToBulkSave = await Promise.all(usersToSave)
await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)
// Post-processing of bulk added users, e.g. events and cache operations
for (const user of usersToBulkSave) {
// TODO: Refactor to bulk insert users into the info db
// instead of relying on looping tenant creation
await platform.users.addUser(tenantId, user._id, user.email)
await eventHelpers.handleSaveEvents(user, undefined)
}
const saved = usersToBulkSave.map(user => {
return {
_id: user._id,
email: user.email,
}
})
// now update the groups
if (Array.isArray(saved) && groups) {
const groupPromises = []
const createdUserIds = saved.map(user => user._id)
for (let groupId of groups) {
groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
}
await Promise.all(groupPromises)
}
return {
successful: saved,
unsuccessful,
}
// Post-processing of bulk added users, e.g. events and cache operations
for (const user of usersToBulkSave) {
// TODO: Refactor to bulk insert users into the info db
// instead of relying on looping tenant creation
await platform.users.addUser(tenantId, user._id, user.email)
await eventHelpers.handleSaveEvents(user, undefined)
}
)
const saved = usersToBulkSave.map(user => {
return {
_id: user._id,
email: user.email,
}
})
// now update the groups
if (Array.isArray(saved) && groups) {
const groupPromises = []
const createdUserIds = saved.map(user => user._id)
for (let groupId of groups) {
groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
}
await Promise.all(groupPromises)
}
return {
successful: saved,
unsuccessful,
}
})
}
static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> {
@ -433,12 +419,11 @@ export class UserDB {
_deleted: true,
}))
const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)
const creatorsToDelete = usersToDelete.filter(isCreator)
await UserDB.quotas.removeUsers(toDelete.length)
for (let user of usersToDelete) {
await bulkDeleteProcessing(user)
}
await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length)
// Build Response
// index users by id
@ -487,8 +472,7 @@ export class UserDB {
await db.remove(userId, dbUser._rev)
const creatorsToDelete = isCreator(dbUser) ? 1 : 0
await UserDB.quotas.removeUsers(1, creatorsToDelete)
await UserDB.quotas.removeUsers(1)
await eventHelpers.handleDeleteEvents(dbUser)
await cache.user.invalidateUser(userId)
await sessions.invalidateSessions(userId, { reason: "deletion" })

View file

@ -14,11 +14,12 @@ import {
} from "../db"
import {
BulkDocsResponse,
ContextUser,
SearchQuery,
SearchQueryOperators,
SearchUsersRequest,
User,
ContextUser,
DatabaseQueryOpts,
} from "@budibase/types"
import { getGlobalDB } from "../context"
import * as context from "../context"
@ -241,12 +242,14 @@ export const paginatedUsers = async ({
bookmark,
query,
appId,
limit,
}: SearchUsersRequest = {}) => {
const db = getGlobalDB()
const pageLimit = limit ? limit + 1 : PAGE_LIMIT + 1
// get one extra document, to have the next page
const opts: any = {
const opts: DatabaseQueryOpts = {
include_docs: true,
limit: PAGE_LIMIT + 1,
limit: pageLimit,
}
// add a startkey if the page was specified (anchor)
if (bookmark) {
@ -269,7 +272,7 @@ export const paginatedUsers = async ({
const response = await db.allDocs(getGlobalUserParams(null, opts))
userList = response.rows.map((row: any) => row.doc)
}
return pagination(userList, PAGE_LIMIT, {
return pagination(userList, pageLimit, {
paginate: true,
property,
getKey,

View file

@ -1,54 +0,0 @@
const _ = require('lodash/fp')
const {structures} = require("../../../tests")
jest.mock("../../../src/context")
jest.mock("../../../src/db")
const context = require("../../../src/context")
const db = require("../../../src/db")
const {getCreatorCount} = require('../../../src/users/users')
describe("Users", () => {
let getGlobalDBMock
let getGlobalUserParamsMock
let paginationMock
beforeEach(() => {
jest.resetAllMocks()
getGlobalDBMock = jest.spyOn(context, "getGlobalDB")
getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams")
paginationMock = jest.spyOn(db, "pagination")
})
it("Retrieves the number of creators", async () => {
const getUsers = (offset, limit, creators = false) => {
const range = _.range(offset, limit)
const opts = creators ? {builder: {global: true}} : undefined
return range.map(() => structures.users.user(opts))
}
const page1Data = getUsers(0, 8)
const page2Data = getUsers(8, 12, true)
getGlobalDBMock.mockImplementation(() => ({
name : "fake-db",
allDocs: () => ({
rows: [...page1Data, ...page2Data]
})
}))
paginationMock.mockImplementationOnce(() => ({
data: page1Data,
hasNextPage: true,
nextPage: "1"
}))
paginationMock.mockImplementation(() => ({
data: page2Data,
hasNextPage: false,
nextPage: undefined
}))
const creatorsCount = await getCreatorCount()
expect(creatorsCount).toBe(4)
expect(paginationMock).toHaveBeenCalledTimes(2)
})
})

View file

@ -123,10 +123,6 @@ export function customer(): Customer {
export function subscription(): Subscription {
return {
amount: 10000,
amounts: {
user: 10000,
creator: 0,
},
cancelAt: undefined,
currency: "usd",
currentPeriodEnd: 0,
@ -135,10 +131,6 @@ export function subscription(): Subscription {
duration: PriceDuration.MONTHLY,
pastDueAt: undefined,
quantity: 0,
quantities: {
user: 0,
creator: 0,
},
status: "active",
}
}

View file

@ -114,8 +114,9 @@
query: {
appId: query || !filterByAppAccess ? null : prodAppId,
email: query,
paginated: query || !filterByAppAccess ? null : false,
},
limit: 50,
paginate: query || !filterByAppAccess ? null : false,
})
await usersFetch.refresh()

View file

@ -81,9 +81,9 @@ export function createDatasourcesStore() {
}))
}
const updateDatasource = response => {
const updateDatasource = (response, { ignoreErrors } = {}) => {
const { datasource, errors } = response
if (errors && Object.keys(errors).length > 0) {
if (!ignoreErrors && errors && Object.keys(errors).length > 0) {
throw new TableImportError(errors)
}
replaceDatasource(datasource._id, datasource)
@ -137,7 +137,7 @@ export function createDatasourcesStore() {
fetchSchema: integration.plus,
})
return updateDatasource(response)
return updateDatasource(response, { ignoreErrors: true })
}
const update = async ({ integration, datasource }) => {

@ -1 +1 @@
Subproject commit 570d14aa44aa88f4d053856322210f0008ba5c76
Subproject commit d24c0dc3a30014cbe61860252aa48104cad36376

View file

@ -67,6 +67,11 @@ COPY packages/server/docker_run.sh .
COPY packages/server/builder/ builder/
COPY packages/server/client/ client/
ARG BUDIBASE_VERSION
# Ensuring the version argument is sent
RUN test -n "$BUDIBASE_VERSION"
ENV BUDIBASE_VERSION=$BUDIBASE_VERSION
EXPOSE 4001
# have to add node environment production after install

View file

@ -18,7 +18,7 @@
"test": "bash scripts/test.sh",
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
"test:watch": "jest --watch",
"build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
"build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION",
"run:docker": "node dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:up": "node scripts/dev/manage.js up",

View file

@ -47,6 +47,7 @@ async function init() {
TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR",
HTTP_MIGRATIONS: "0",
HTTP_LOGGING: "0",
VERSION: "0.0.0+local",
}
let envFile = ""
Object.keys(envFileJson).forEach(key => {

View file

@ -41,7 +41,7 @@ describe("/component", () => {
.expect("Content-Type", /json/)
.expect(200)
expect(res.body).toEqual({
budibaseVersion: "0.0.0",
budibaseVersion: "0.0.0+jest",
cpuArch: "arm64",
cpuCores: 1,
cpuInfo: "test",

View file

@ -1,6 +1,6 @@
const setup = require("./utilities")
const { events } = require("@budibase/backend-core")
const version = require("../../../../package.json").version
describe("/dev", () => {
let request = setup.getRequest()
@ -32,9 +32,9 @@ describe("/dev", () => {
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.version).toBe(version)
expect(res.body.version).toBe('0.0.0+jest')
expect(events.installation.versionChecked).toBeCalledTimes(1)
expect(events.installation.versionChecked).toBeCalledWith(version)
expect(events.installation.versionChecked).toBeCalledWith('0.0.0+jest')
})
})
})

View file

@ -9,3 +9,4 @@ process.env.LOG_LEVEL = process.env.LOG_LEVEL || "error"
process.env.MOCK_REDIS = "1"
process.env.PLATFORM_URL = "http://localhost:10000"
process.env.REDIS_PASSWORD = "budibase"
process.env.BUDIBASE_VERSION = "0.0.0+jest"

View file

@ -55,6 +55,7 @@ export interface SearchUsersRequest {
bookmark?: string
query?: SearchQuery
appId?: string
limit?: number
paginate?: boolean
}

View file

@ -1,8 +1,5 @@
export enum FeatureFlag {
LICENSING = "LICENSING",
// Feature IDs in Posthog
PER_CREATOR_PER_USER_PRICE = "18873",
PER_CREATOR_PER_USER_PRICE_ALERT = "18530",
}
export interface TenantFeatureFlags {

View file

@ -5,17 +5,10 @@ export interface Customer {
currency: string | null | undefined
}
export interface SubscriptionItems {
user: number | undefined
creator: number | undefined
}
export interface Subscription {
amount: number
amounts: SubscriptionItems | undefined
currency: string
quantity: number
quantities: SubscriptionItems | undefined
duration: PriceDuration
cancelAt: number | null | undefined
currentPeriodStart: number

View file

@ -4,9 +4,7 @@ export enum PlanType {
PRO = "pro",
/** @deprecated */
TEAM = "team",
/** @deprecated */
PREMIUM = "premium",
PREMIUM_PLUS = "premium_plus",
BUSINESS = "business",
ENTERPRISE = "enterprise",
}
@ -28,12 +26,10 @@ export interface AvailablePrice {
currency: string
duration: PriceDuration
priceId: string
type?: string
}
export enum PlanModel {
PER_USER = "perUser",
PER_CREATOR_PER_USER = "per_creator_per_user",
DAY_PASS = "dayPass",
}

View file

@ -50,4 +50,9 @@ ENV POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
ENV TENANT_FEATURE_FLAGS=*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR
ENV ACCOUNT_PORTAL_URL=https://account.budibase.app
ARG BUDIBASE_VERSION
# Ensuring the version argument is sent
RUN test -n "$BUDIBASE_VERSION"
ENV BUDIBASE_VERSION=$BUDIBASE_VERSION
CMD ["./docker_run.sh"]

View file

@ -20,7 +20,7 @@
"run:docker": "node dist/index.js",
"debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js",
"build:docker": "yarn build && docker build . -t worker-service --label version=$BUDIBASE_RELEASE_VERSION",
"build:docker": "yarn build && docker build . -t worker-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION",
"dev:stack:init": "node ./scripts/dev/manage.js init",
"dev:builder": "npm run dev:stack:init && nodemon",
"dev:built": "yarn run dev:stack:init && yarn run run:docker",

View file

@ -31,6 +31,7 @@ async function init() {
TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR",
ENABLE_EMAIL_TEST_MODE: 1,
HTTP_LOGGING: 0,
VERSION: "0.0.0+local",
}
let envFile = ""
Object.keys(envFileJson).forEach(key => {

View file

@ -189,7 +189,10 @@ export const destroy = async (ctx: any) => {
export const getAppUsers = async (ctx: Ctx<SearchUsersRequest>) => {
const body = ctx.request.body
const users = await userSdk.db.getUsersByAppAccess(body?.appId)
const users = await userSdk.db.getUsersByAppAccess({
appId: body.appId,
limit: body.limit,
})
ctx.body = { data: users }
}

View file

@ -569,9 +569,13 @@ describe("/api/global/users", () => {
{
query: { equal: { firstName: user.firstName } },
},
501
{ status: 501 }
)
})
it("should throw an error if public query performed", async () => {
await config.api.users.searchUsers({}, { status: 403, noHeaders: true })
})
})
describe("DELETE /api/global/users/:userId", () => {

View file

@ -72,7 +72,8 @@ router
)
.get("/api/global/users", auth.builderOrAdmin, controller.fetch)
.post("/api/global/users/search", auth.builderOrAdmin, controller.search)
// search can be used by any user now, to retrieve users for user column
.post("/api/global/users/search", controller.search)
.delete("/api/global/users/:id", auth.adminOnly, controller.destroy)
.get(
"/api/global/users/count/:appId",

View file

@ -134,13 +134,19 @@ export class UserAPI extends TestAPI {
.expect(status ? status : 200)
}
searchUsers = ({ query }: { query?: SearchQuery }, status = 200) => {
return this.request
searchUsers = (
{ query }: { query?: SearchQuery },
opts?: { status?: number; noHeaders?: boolean }
) => {
const req = this.request
.post("/api/global/users/search")
.set(this.config.defaultHeaders())
.send({ query })
.expect("Content-Type", /json/)
.expect(status ? status : 200)
.expect(opts?.status ? opts.status : 200)
if (!opts?.noHeaders) {
req.set(this.config.defaultHeaders())
}
return req
}
getUser = (userId: string, opts?: TestAPIOpts) => {

View file

@ -10,3 +10,4 @@ process.env.PLATFORM_URL = "http://localhost:10000"
process.env.INTERNAL_API_KEY = "tet"
process.env.DISABLE_ACCOUNT_PORTAL = "0"
process.env.MOCK_REDIS = "1"
process.env.BUDIBASE_VERSION = "0.0.0+jest"

View file

@ -1,3 +1,4 @@
#!/bin/bash
yarn build --scope @budibase/server --scope @budibase/worker
docker build -f hosting/single/Dockerfile.v2 -t budibase:latest .
version=$(./scripts/getCurrentVersion.sh)
docker build -f hosting/single/Dockerfile.v2 -t budibase:latest --build-arg BUDIBASE_VERSION=$version .