1
0
Fork 0
mirror of synced 2024-08-15 18:11:40 +12:00

Merge branch 'master' into grid-inline-searching

This commit is contained in:
Andrew Kingston 2023-10-24 13:30:07 +01:00 committed by GitHub
commit 7e3d59a582
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
36 changed files with 157 additions and 183 deletions

View file

@ -18,8 +18,7 @@ env:
BASE_BRANCH: ${{ github.event.pull_request.base.ref}} BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
NX_BASE_BRANCH: origin/${{ github.base_ref }} NX_BASE_BRANCH: origin/${{ github.base_ref }}
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' && github.base_ref != 'master'}} USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' }}
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs: jobs:
lint: lint:

20
.github/workflows/deploy-qa.yml vendored Normal file
View file

@ -0,0 +1,20 @@
name: Deploy QA
on:
push:
branches:
- master
workflow_dispatch:
jobs:
trigger-deploy-to-qa-env:
runs-on: ubuntu-latest
steps:
- uses: peter-evans/repository-dispatch@v2
env:
PAYLOAD_VERSION: ${{ github.sha }}
REF_NAME: ${{ github.ref_name}}
with:
repository: budibase/budibase-deploys
event-type: budicloud-qa-deploy
token: ${{ secrets.GH_ACCESS_TOKEN }}

View file

@ -123,6 +123,7 @@ jobs:
- uses: passeidireto/trigger-external-workflow-action@main - uses: passeidireto/trigger-external-workflow-action@main
env: env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }} PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
REF_NAME: ${{ github.ref_name}}
with: with:
repository: budibase/budibase-deploys repository: budibase/budibase-deploys
event: budicloud-qa-deploy event: budicloud-qa-deploy

View file

@ -54,6 +54,7 @@ jobs:
push: true push: true
pull: true pull: true
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
build-args: BUDIBASE_VERSION=0.0.0+test
tags: budibase/budibase-test:test tags: budibase/budibase-test:test
file: ./hosting/single/Dockerfile.v2 file: ./hosting/single/Dockerfile.v2
cache-from: type=registry,ref=budibase/budibase-test:test cache-from: type=registry,ref=budibase/budibase-test:test
@ -64,6 +65,8 @@ jobs:
context: . context: .
push: true push: true
platforms: linux/amd64 platforms: linux/amd64
build-args: TARGETBUILD=aas build-args: |
TARGETBUILD=aas
BUDIBASE_VERSION=0.0.0+test
tags: budibase/budibase-test:aas tags: budibase/budibase-test:aas
file: ./hosting/single/Dockerfile.v2 file: ./hosting/single/Dockerfile.v2

View file

@ -7,6 +7,8 @@ services:
build: build:
context: .. context: ..
dockerfile: packages/server/Dockerfile.v2 dockerfile: packages/server/Dockerfile.v2
args:
- BUDIBASE_VERSION=0.0.0+dev-docker
container_name: build-bbapps container_name: build-bbapps
environment: environment:
SELF_HOSTED: 1 SELF_HOSTED: 1
@ -30,13 +32,13 @@ services:
depends_on: depends_on:
- worker-service - worker-service
- redis-service - redis-service
# volumes:
# - /some/path/to/plugins:/plugins
worker-service: worker-service:
build: build:
context: .. context: ..
dockerfile: packages/worker/Dockerfile.v2 dockerfile: packages/worker/Dockerfile.v2
args:
- BUDIBASE_VERSION=0.0.0+dev-docker
container_name: build-bbworker container_name: build-bbworker
environment: environment:
SELF_HOSTED: 1 SELF_HOSTED: 1

View file

@ -26,6 +26,7 @@ RUN ./scripts/removeWorkspaceDependencies.sh packages/worker/package.json
# We will never want to sync pro, but the script is still required # We will never want to sync pro, but the script is still required
RUN echo '' > scripts/syncProPackage.js RUN echo '' > scripts/syncProPackage.js
RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json
RUN ./scripts/removeWorkspaceDependencies.sh package.json
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production
# copy the actual code # copy the actual code
@ -117,6 +118,10 @@ EXPOSE 443
EXPOSE 2222 EXPOSE 2222
VOLUME /data VOLUME /data
ARG BUDIBASE_VERSION
# Ensuring the version argument is sent
RUN test -n "$BUDIBASE_VERSION"
ENV BUDIBASE_VERSION=$BUDIBASE_VERSION
HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh" HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh"

View file

@ -1,5 +1,5 @@
{ {
"version": "2.11.39", "version": "2.11.43",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View file

@ -3,14 +3,16 @@
"default": { "default": {
"runner": "nx-cloud", "runner": "nx-cloud",
"options": { "options": {
"cacheableOperations": ["build", "test", "check:types"], "cacheableOperations": ["build", "test", "check:types"]
"accessToken": "MmM4OGYxNzItMDBlYy00ZmE3LTk4MTYtNmJhYWMyZjBjZTUyfHJlYWQ="
} }
} }
}, },
"targetDefaults": { "targetDefaults": {
"build": { "build": {
"inputs": ["{workspaceRoot}/scripts/build.js"] "inputs": [
"{workspaceRoot}/scripts/build.js",
"{workspaceRoot}/lerna.json"
]
} }
} }
} }

View file

@ -119,8 +119,8 @@ export class Writethrough {
this.writeRateMs = writeRateMs this.writeRateMs = writeRateMs
} }
async put(doc: any, writeRateMs: number = this.writeRateMs) { async put(doc: any) {
return put(this.db, doc, writeRateMs) return put(this.db, doc, this.writeRateMs)
} }
async get(id: string) { async get(id: string) {

View file

@ -75,12 +75,12 @@ function getPackageJsonFields(): {
const content = readFileSync(packageJsonFile!, "utf-8") const content = readFileSync(packageJsonFile!, "utf-8")
const parsedContent = JSON.parse(content) const parsedContent = JSON.parse(content)
return { return {
VERSION: parsedContent.version, VERSION: process.env.BUDIBASE_VERSION || parsedContent.version,
SERVICE_NAME: parsedContent.name, SERVICE_NAME: parsedContent.name,
} }
} catch { } catch {
// throwing an error here is confusing/causes backend-core to be hard to import // throwing an error here is confusing/causes backend-core to be hard to import
return { VERSION: "", SERVICE_NAME: "" } return { VERSION: process.env.BUDIBASE_VERSION || "", SERVICE_NAME: "" }
} }
} }

View file

@ -25,17 +25,12 @@ import {
import { import {
getAccountHolderFromUserIds, getAccountHolderFromUserIds,
isAdmin, isAdmin,
isCreator,
validateUniqueUser, validateUniqueUser,
} from "./utils" } from "./utils"
import { searchExistingEmails } from "./lookup" import { searchExistingEmails } from "./lookup"
import { hash } from "../utils" import { hash } from "../utils"
type QuotaUpdateFn = ( type QuotaUpdateFn = (change: number, cb?: () => Promise<any>) => Promise<any>
change: number,
creatorsChange: number,
cb?: () => Promise<any>
) => Promise<any>
type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any> type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any>
type FeatureFn = () => Promise<Boolean> type FeatureFn = () => Promise<Boolean>
type GroupGetFn = (ids: string[]) => Promise<UserGroup[]> type GroupGetFn = (ids: string[]) => Promise<UserGroup[]>
@ -164,14 +159,14 @@ export class UserDB {
} }
} }
static async getUsersByAppAccess(appId?: string) { static async getUsersByAppAccess(opts: { appId?: string; limit?: number }) {
const opts: any = { const params: any = {
include_docs: true, include_docs: true,
limit: 50, limit: opts.limit || 50,
} }
let response: User[] = await usersCore.searchGlobalUsersByAppAccess( let response: User[] = await usersCore.searchGlobalUsersByAppAccess(
appId, opts.appId,
opts params
) )
return response return response
} }
@ -250,8 +245,7 @@ export class UserDB {
} }
const change = dbUser ? 0 : 1 // no change if there is existing user const change = dbUser ? 0 : 1 // no change if there is existing user
const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0 return UserDB.quotas.addUsers(change, async () => {
return UserDB.quotas.addUsers(change, creatorsChange, async () => {
await validateUniqueUser(email, tenantId) await validateUniqueUser(email, tenantId)
let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser) let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)
@ -313,7 +307,6 @@ export class UserDB {
let usersToSave: any[] = [] let usersToSave: any[] = []
let newUsers: any[] = [] let newUsers: any[] = []
let newCreators: any[] = []
const emails = newUsersRequested.map((user: User) => user.email) const emails = newUsersRequested.map((user: User) => user.email)
const existingEmails = await searchExistingEmails(emails) const existingEmails = await searchExistingEmails(emails)
@ -334,66 +327,59 @@ export class UserDB {
} }
newUser.userGroups = groups newUser.userGroups = groups
newUsers.push(newUser) newUsers.push(newUser)
if (isCreator(newUser)) {
newCreators.push(newUser)
}
} }
const account = await accountSdk.getAccountByTenantId(tenantId) const account = await accountSdk.getAccountByTenantId(tenantId)
return UserDB.quotas.addUsers( return UserDB.quotas.addUsers(newUsers.length, async () => {
newUsers.length, // create the promises array that will be called by bulkDocs
newCreators.length, newUsers.forEach((user: any) => {
async () => { usersToSave.push(
// create the promises array that will be called by bulkDocs UserDB.buildUser(
newUsers.forEach((user: any) => { user,
usersToSave.push( {
UserDB.buildUser( hashPassword: true,
user, requirePassword: user.requirePassword,
{ },
hashPassword: true, tenantId,
requirePassword: user.requirePassword, undefined, // no dbUser
}, account
tenantId,
undefined, // no dbUser
account
)
) )
}) )
})
const usersToBulkSave = await Promise.all(usersToSave) const usersToBulkSave = await Promise.all(usersToSave)
await usersCore.bulkUpdateGlobalUsers(usersToBulkSave) await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)
// Post-processing of bulk added users, e.g. events and cache operations // Post-processing of bulk added users, e.g. events and cache operations
for (const user of usersToBulkSave) { for (const user of usersToBulkSave) {
// TODO: Refactor to bulk insert users into the info db // TODO: Refactor to bulk insert users into the info db
// instead of relying on looping tenant creation // instead of relying on looping tenant creation
await platform.users.addUser(tenantId, user._id, user.email) await platform.users.addUser(tenantId, user._id, user.email)
await eventHelpers.handleSaveEvents(user, undefined) await eventHelpers.handleSaveEvents(user, undefined)
}
const saved = usersToBulkSave.map(user => {
return {
_id: user._id,
email: user.email,
}
})
// now update the groups
if (Array.isArray(saved) && groups) {
const groupPromises = []
const createdUserIds = saved.map(user => user._id)
for (let groupId of groups) {
groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
}
await Promise.all(groupPromises)
}
return {
successful: saved,
unsuccessful,
}
} }
)
const saved = usersToBulkSave.map(user => {
return {
_id: user._id,
email: user.email,
}
})
// now update the groups
if (Array.isArray(saved) && groups) {
const groupPromises = []
const createdUserIds = saved.map(user => user._id)
for (let groupId of groups) {
groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
}
await Promise.all(groupPromises)
}
return {
successful: saved,
unsuccessful,
}
})
} }
static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> { static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> {
@ -433,12 +419,11 @@ export class UserDB {
_deleted: true, _deleted: true,
})) }))
const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete) const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)
const creatorsToDelete = usersToDelete.filter(isCreator)
await UserDB.quotas.removeUsers(toDelete.length)
for (let user of usersToDelete) { for (let user of usersToDelete) {
await bulkDeleteProcessing(user) await bulkDeleteProcessing(user)
} }
await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length)
// Build Response // Build Response
// index users by id // index users by id
@ -487,8 +472,7 @@ export class UserDB {
await db.remove(userId, dbUser._rev) await db.remove(userId, dbUser._rev)
const creatorsToDelete = isCreator(dbUser) ? 1 : 0 await UserDB.quotas.removeUsers(1)
await UserDB.quotas.removeUsers(1, creatorsToDelete)
await eventHelpers.handleDeleteEvents(dbUser) await eventHelpers.handleDeleteEvents(dbUser)
await cache.user.invalidateUser(userId) await cache.user.invalidateUser(userId)
await sessions.invalidateSessions(userId, { reason: "deletion" }) await sessions.invalidateSessions(userId, { reason: "deletion" })

View file

@ -14,11 +14,12 @@ import {
} from "../db" } from "../db"
import { import {
BulkDocsResponse, BulkDocsResponse,
ContextUser,
SearchQuery, SearchQuery,
SearchQueryOperators, SearchQueryOperators,
SearchUsersRequest, SearchUsersRequest,
User, User,
ContextUser, DatabaseQueryOpts,
} from "@budibase/types" } from "@budibase/types"
import { getGlobalDB } from "../context" import { getGlobalDB } from "../context"
import * as context from "../context" import * as context from "../context"
@ -241,12 +242,14 @@ export const paginatedUsers = async ({
bookmark, bookmark,
query, query,
appId, appId,
limit,
}: SearchUsersRequest = {}) => { }: SearchUsersRequest = {}) => {
const db = getGlobalDB() const db = getGlobalDB()
const pageLimit = limit ? limit + 1 : PAGE_LIMIT + 1
// get one extra document, to have the next page // get one extra document, to have the next page
const opts: any = { const opts: DatabaseQueryOpts = {
include_docs: true, include_docs: true,
limit: PAGE_LIMIT + 1, limit: pageLimit,
} }
// add a startkey if the page was specified (anchor) // add a startkey if the page was specified (anchor)
if (bookmark) { if (bookmark) {
@ -269,7 +272,7 @@ export const paginatedUsers = async ({
const response = await db.allDocs(getGlobalUserParams(null, opts)) const response = await db.allDocs(getGlobalUserParams(null, opts))
userList = response.rows.map((row: any) => row.doc) userList = response.rows.map((row: any) => row.doc)
} }
return pagination(userList, PAGE_LIMIT, { return pagination(userList, pageLimit, {
paginate: true, paginate: true,
property, property,
getKey, getKey,

View file

@ -1,54 +0,0 @@
const _ = require('lodash/fp')
const {structures} = require("../../../tests")
jest.mock("../../../src/context")
jest.mock("../../../src/db")
const context = require("../../../src/context")
const db = require("../../../src/db")
const {getCreatorCount} = require('../../../src/users/users')
describe("Users", () => {
let getGlobalDBMock
let getGlobalUserParamsMock
let paginationMock
beforeEach(() => {
jest.resetAllMocks()
getGlobalDBMock = jest.spyOn(context, "getGlobalDB")
getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams")
paginationMock = jest.spyOn(db, "pagination")
})
it("Retrieves the number of creators", async () => {
const getUsers = (offset, limit, creators = false) => {
const range = _.range(offset, limit)
const opts = creators ? {builder: {global: true}} : undefined
return range.map(() => structures.users.user(opts))
}
const page1Data = getUsers(0, 8)
const page2Data = getUsers(8, 12, true)
getGlobalDBMock.mockImplementation(() => ({
name : "fake-db",
allDocs: () => ({
rows: [...page1Data, ...page2Data]
})
}))
paginationMock.mockImplementationOnce(() => ({
data: page1Data,
hasNextPage: true,
nextPage: "1"
}))
paginationMock.mockImplementation(() => ({
data: page2Data,
hasNextPage: false,
nextPage: undefined
}))
const creatorsCount = await getCreatorCount()
expect(creatorsCount).toBe(4)
expect(paginationMock).toHaveBeenCalledTimes(2)
})
})

View file

@ -123,10 +123,6 @@ export function customer(): Customer {
export function subscription(): Subscription { export function subscription(): Subscription {
return { return {
amount: 10000, amount: 10000,
amounts: {
user: 10000,
creator: 0,
},
cancelAt: undefined, cancelAt: undefined,
currency: "usd", currency: "usd",
currentPeriodEnd: 0, currentPeriodEnd: 0,
@ -135,10 +131,6 @@ export function subscription(): Subscription {
duration: PriceDuration.MONTHLY, duration: PriceDuration.MONTHLY,
pastDueAt: undefined, pastDueAt: undefined,
quantity: 0, quantity: 0,
quantities: {
user: 0,
creator: 0,
},
status: "active", status: "active",
} }
} }

View file

@ -114,8 +114,9 @@
query: { query: {
appId: query || !filterByAppAccess ? null : prodAppId, appId: query || !filterByAppAccess ? null : prodAppId,
email: query, email: query,
paginated: query || !filterByAppAccess ? null : false,
}, },
limit: 50,
paginate: query || !filterByAppAccess ? null : false,
}) })
await usersFetch.refresh() await usersFetch.refresh()

View file

@ -81,9 +81,9 @@ export function createDatasourcesStore() {
})) }))
} }
const updateDatasource = response => { const updateDatasource = (response, { ignoreErrors } = {}) => {
const { datasource, errors } = response const { datasource, errors } = response
if (errors && Object.keys(errors).length > 0) { if (!ignoreErrors && errors && Object.keys(errors).length > 0) {
throw new TableImportError(errors) throw new TableImportError(errors)
} }
replaceDatasource(datasource._id, datasource) replaceDatasource(datasource._id, datasource)
@ -137,7 +137,7 @@ export function createDatasourcesStore() {
fetchSchema: integration.plus, fetchSchema: integration.plus,
}) })
return updateDatasource(response) return updateDatasource(response, { ignoreErrors: true })
} }
const update = async ({ integration, datasource }) => { const update = async ({ integration, datasource }) => {

@ -1 +1 @@
Subproject commit 570d14aa44aa88f4d053856322210f0008ba5c76 Subproject commit d24c0dc3a30014cbe61860252aa48104cad36376

View file

@ -67,6 +67,11 @@ COPY packages/server/docker_run.sh .
COPY packages/server/builder/ builder/ COPY packages/server/builder/ builder/
COPY packages/server/client/ client/ COPY packages/server/client/ client/
ARG BUDIBASE_VERSION
# Ensuring the version argument is sent
RUN test -n "$BUDIBASE_VERSION"
ENV BUDIBASE_VERSION=$BUDIBASE_VERSION
EXPOSE 4001 EXPOSE 4001
# have to add node environment production after install # have to add node environment production after install

View file

@ -18,7 +18,7 @@
"test": "bash scripts/test.sh", "test": "bash scripts/test.sh",
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit", "test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
"test:watch": "jest --watch", "test:watch": "jest --watch",
"build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION", "build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION",
"run:docker": "node dist/index.js", "run:docker": "node dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js", "run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:up": "node scripts/dev/manage.js up", "dev:stack:up": "node scripts/dev/manage.js up",

View file

@ -47,6 +47,7 @@ async function init() {
TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR", TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR",
HTTP_MIGRATIONS: "0", HTTP_MIGRATIONS: "0",
HTTP_LOGGING: "0", HTTP_LOGGING: "0",
VERSION: "0.0.0+local",
} }
let envFile = "" let envFile = ""
Object.keys(envFileJson).forEach(key => { Object.keys(envFileJson).forEach(key => {

View file

@ -41,7 +41,7 @@ describe("/component", () => {
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body).toEqual({ expect(res.body).toEqual({
budibaseVersion: "0.0.0", budibaseVersion: "0.0.0+jest",
cpuArch: "arm64", cpuArch: "arm64",
cpuCores: 1, cpuCores: 1,
cpuInfo: "test", cpuInfo: "test",

View file

@ -1,6 +1,6 @@
const setup = require("./utilities") const setup = require("./utilities")
const { events } = require("@budibase/backend-core") const { events } = require("@budibase/backend-core")
const version = require("../../../../package.json").version
describe("/dev", () => { describe("/dev", () => {
let request = setup.getRequest() let request = setup.getRequest()
@ -32,9 +32,9 @@ describe("/dev", () => {
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body.version).toBe(version) expect(res.body.version).toBe('0.0.0+jest')
expect(events.installation.versionChecked).toBeCalledTimes(1) expect(events.installation.versionChecked).toBeCalledTimes(1)
expect(events.installation.versionChecked).toBeCalledWith(version) expect(events.installation.versionChecked).toBeCalledWith('0.0.0+jest')
}) })
}) })
}) })

View file

@ -9,3 +9,4 @@ process.env.LOG_LEVEL = process.env.LOG_LEVEL || "error"
process.env.MOCK_REDIS = "1" process.env.MOCK_REDIS = "1"
process.env.PLATFORM_URL = "http://localhost:10000" process.env.PLATFORM_URL = "http://localhost:10000"
process.env.REDIS_PASSWORD = "budibase" process.env.REDIS_PASSWORD = "budibase"
process.env.BUDIBASE_VERSION = "0.0.0+jest"

View file

@ -55,6 +55,7 @@ export interface SearchUsersRequest {
bookmark?: string bookmark?: string
query?: SearchQuery query?: SearchQuery
appId?: string appId?: string
limit?: number
paginate?: boolean paginate?: boolean
} }

View file

@ -1,8 +1,5 @@
export enum FeatureFlag { export enum FeatureFlag {
LICENSING = "LICENSING", LICENSING = "LICENSING",
// Feature IDs in Posthog
PER_CREATOR_PER_USER_PRICE = "18873",
PER_CREATOR_PER_USER_PRICE_ALERT = "18530",
} }
export interface TenantFeatureFlags { export interface TenantFeatureFlags {

View file

@ -5,17 +5,10 @@ export interface Customer {
currency: string | null | undefined currency: string | null | undefined
} }
export interface SubscriptionItems {
user: number | undefined
creator: number | undefined
}
export interface Subscription { export interface Subscription {
amount: number amount: number
amounts: SubscriptionItems | undefined
currency: string currency: string
quantity: number quantity: number
quantities: SubscriptionItems | undefined
duration: PriceDuration duration: PriceDuration
cancelAt: number | null | undefined cancelAt: number | null | undefined
currentPeriodStart: number currentPeriodStart: number

View file

@ -4,9 +4,7 @@ export enum PlanType {
PRO = "pro", PRO = "pro",
/** @deprecated */ /** @deprecated */
TEAM = "team", TEAM = "team",
/** @deprecated */
PREMIUM = "premium", PREMIUM = "premium",
PREMIUM_PLUS = "premium_plus",
BUSINESS = "business", BUSINESS = "business",
ENTERPRISE = "enterprise", ENTERPRISE = "enterprise",
} }
@ -28,12 +26,10 @@ export interface AvailablePrice {
currency: string currency: string
duration: PriceDuration duration: PriceDuration
priceId: string priceId: string
type?: string
} }
export enum PlanModel { export enum PlanModel {
PER_USER = "perUser", PER_USER = "perUser",
PER_CREATOR_PER_USER = "per_creator_per_user",
DAY_PASS = "dayPass", DAY_PASS = "dayPass",
} }

View file

@ -50,4 +50,9 @@ ENV POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
ENV TENANT_FEATURE_FLAGS=*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR ENV TENANT_FEATURE_FLAGS=*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR
ENV ACCOUNT_PORTAL_URL=https://account.budibase.app ENV ACCOUNT_PORTAL_URL=https://account.budibase.app
ARG BUDIBASE_VERSION
# Ensuring the version argument is sent
RUN test -n "$BUDIBASE_VERSION"
ENV BUDIBASE_VERSION=$BUDIBASE_VERSION
CMD ["./docker_run.sh"] CMD ["./docker_run.sh"]

View file

@ -20,7 +20,7 @@
"run:docker": "node dist/index.js", "run:docker": "node dist/index.js",
"debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js", "debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js", "run:docker:cluster": "pm2-runtime start pm2.config.js",
"build:docker": "yarn build && docker build . -t worker-service --label version=$BUDIBASE_RELEASE_VERSION", "build:docker": "yarn build && docker build . -t worker-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION",
"dev:stack:init": "node ./scripts/dev/manage.js init", "dev:stack:init": "node ./scripts/dev/manage.js init",
"dev:builder": "npm run dev:stack:init && nodemon", "dev:builder": "npm run dev:stack:init && nodemon",
"dev:built": "yarn run dev:stack:init && yarn run run:docker", "dev:built": "yarn run dev:stack:init && yarn run run:docker",

View file

@ -31,6 +31,7 @@ async function init() {
TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR", TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR",
ENABLE_EMAIL_TEST_MODE: 1, ENABLE_EMAIL_TEST_MODE: 1,
HTTP_LOGGING: 0, HTTP_LOGGING: 0,
VERSION: "0.0.0+local",
} }
let envFile = "" let envFile = ""
Object.keys(envFileJson).forEach(key => { Object.keys(envFileJson).forEach(key => {

View file

@ -189,7 +189,10 @@ export const destroy = async (ctx: any) => {
export const getAppUsers = async (ctx: Ctx<SearchUsersRequest>) => { export const getAppUsers = async (ctx: Ctx<SearchUsersRequest>) => {
const body = ctx.request.body const body = ctx.request.body
const users = await userSdk.db.getUsersByAppAccess(body?.appId) const users = await userSdk.db.getUsersByAppAccess({
appId: body.appId,
limit: body.limit,
})
ctx.body = { data: users } ctx.body = { data: users }
} }

View file

@ -569,9 +569,13 @@ describe("/api/global/users", () => {
{ {
query: { equal: { firstName: user.firstName } }, query: { equal: { firstName: user.firstName } },
}, },
501 { status: 501 }
) )
}) })
it("should throw an error if public query performed", async () => {
await config.api.users.searchUsers({}, { status: 403, noHeaders: true })
})
}) })
describe("DELETE /api/global/users/:userId", () => { describe("DELETE /api/global/users/:userId", () => {

View file

@ -72,7 +72,8 @@ router
) )
.get("/api/global/users", auth.builderOrAdmin, controller.fetch) .get("/api/global/users", auth.builderOrAdmin, controller.fetch)
.post("/api/global/users/search", auth.builderOrAdmin, controller.search) // search can be used by any user now, to retrieve users for user column
.post("/api/global/users/search", controller.search)
.delete("/api/global/users/:id", auth.adminOnly, controller.destroy) .delete("/api/global/users/:id", auth.adminOnly, controller.destroy)
.get( .get(
"/api/global/users/count/:appId", "/api/global/users/count/:appId",

View file

@ -134,13 +134,19 @@ export class UserAPI extends TestAPI {
.expect(status ? status : 200) .expect(status ? status : 200)
} }
searchUsers = ({ query }: { query?: SearchQuery }, status = 200) => { searchUsers = (
return this.request { query }: { query?: SearchQuery },
opts?: { status?: number; noHeaders?: boolean }
) => {
const req = this.request
.post("/api/global/users/search") .post("/api/global/users/search")
.set(this.config.defaultHeaders())
.send({ query }) .send({ query })
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(status ? status : 200) .expect(opts?.status ? opts.status : 200)
if (!opts?.noHeaders) {
req.set(this.config.defaultHeaders())
}
return req
} }
getUser = (userId: string, opts?: TestAPIOpts) => { getUser = (userId: string, opts?: TestAPIOpts) => {

View file

@ -10,3 +10,4 @@ process.env.PLATFORM_URL = "http://localhost:10000"
process.env.INTERNAL_API_KEY = "tet" process.env.INTERNAL_API_KEY = "tet"
process.env.DISABLE_ACCOUNT_PORTAL = "0" process.env.DISABLE_ACCOUNT_PORTAL = "0"
process.env.MOCK_REDIS = "1" process.env.MOCK_REDIS = "1"
process.env.BUDIBASE_VERSION = "0.0.0+jest"

View file

@ -1,3 +1,4 @@
#!/bin/bash #!/bin/bash
yarn build --scope @budibase/server --scope @budibase/worker yarn build --scope @budibase/server --scope @budibase/worker
docker build -f hosting/single/Dockerfile.v2 -t budibase:latest . version=$(./scripts/getCurrentVersion.sh)
docker build -f hosting/single/Dockerfile.v2 -t budibase:latest --build-arg BUDIBASE_VERSION=$version .