1
0
Fork 0
mirror of synced 2024-07-07 07:15:43 +12:00

Merge remote-tracking branch 'origin/master' into feat/automation-naming-ux-updates

This commit is contained in:
Peter Clement 2023-11-02 09:19:19 +00:00
commit 6a4c71cd67
35 changed files with 433 additions and 130 deletions

View file

@ -66,14 +66,21 @@ jobs:
context: .
push: true
platforms: linux/amd64,linux/arm64
build-args: BUDIBASE_VERSION=$BUDIBASE_VERSION
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile.v2
env:
BUDIBASE_VERSION: ${{ env.RELEASE_VERSION }}
- name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/amd64
build-args: TARGETBUILD=aas
build-args: |
TARGETBUILD=aas
BUDIBASE_VERSION=$BUDIBASE_VERSION
tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile.v2
env:
BUDIBASE_VERSION: ${{ env.RELEASE_VERSION }}

View file

@ -1,10 +0,0 @@
#!/bin/bash
if [[ $TARGETARCH == arm* ]] ;
then
echo "INSTALLING ARM64 MINIO"
wget https://dl.min.io/server/minio/release/linux-arm64/minio
else
echo "INSTALLING AMD64 MINIO"
wget https://dl.min.io/server/minio/release/linux-amd64/minio
fi
chmod +x minio

View file

@ -42,6 +42,7 @@ COPY packages/string-templates packages/string-templates
FROM budibase/couchdb as runner
ARG TARGETARCH
ENV TARGETARCH $TARGETARCH
ENV NODE_MAJOR 18
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD=single
@ -49,10 +50,10 @@ ENV TARGETBUILD $TARGETBUILD
# install base dependencies
RUN apt-get update && \
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server libaio1
# Install postgres client for pg_dump utils
RUN apt install software-properties-common apt-transport-https gpg -y \
RUN apt install -y software-properties-common apt-transport-https ca-certificates gnupg \
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
&& apt update -y \
@ -61,10 +62,8 @@ RUN apt install software-properties-common apt-transport-https gpg -y \
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
WORKDIR /nodejs
RUN curl -sL https://deb.nodesource.com/setup_18.x -o /tmp/nodesource_setup.sh && \
bash /tmp/nodesource_setup.sh && \
apt-get install -y --no-install-recommends libaio1 nodejs && \
npm install --global yarn pm2
COPY scripts/install-node.sh ./install.sh
RUN chmod +x install.sh && ./install.sh
# setup nginx
COPY hosting/single/nginx/nginx.conf /etc/nginx

View file

@ -77,7 +77,7 @@ mkdir -p ${DATA_DIR}/minio
chown -R couchdb:couchdb ${DATA_DIR}/couch
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
/bbcouch-runner.sh &
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
/etc/init.d/nginx restart
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
# Add monthly cron job to renew certbot certificate

View file

@ -1,5 +1,5 @@
{
"version": "2.12.1",
"version": "2.12.4",
"npmClient": "yarn",
"packages": [
"packages/*"

View file

@ -3,6 +3,7 @@ const mockS3 = {
deleteObject: jest.fn().mockReturnThis(),
deleteObjects: jest.fn().mockReturnThis(),
createBucket: jest.fn().mockReturnThis(),
getObject: jest.fn().mockReturnThis(),
listObject: jest.fn().mockReturnThis(),
getSignedUrl: jest.fn((operation: string, params: any) => {
return `http://s3.example.com/${params.Bucket}/${params.Key}`

View file

@ -30,6 +30,7 @@ export * as timers from "./timers"
export { default as env } from "./environment"
export * as blacklist from "./blacklist"
export * as docUpdates from "./docUpdates"
export * from "./utils/Duration"
export { SearchParams } from "./db"
// Add context to tenancy for backwards compatibility
// only do this for external usages to prevent internal

View file

@ -36,7 +36,7 @@ class InMemoryQueue {
* @param opts This is not used by the in memory queue as there is no real use
* case when in memory, but is the same API as Bull
*/
constructor(name: string, opts = null) {
constructor(name: string, opts?: any) {
this._name = name
this._opts = opts
this._messages = []

View file

@ -2,11 +2,18 @@ import env from "../environment"
import { getRedisOptions } from "../redis/utils"
import { JobQueue } from "./constants"
import InMemoryQueue from "./inMemoryQueue"
import BullQueue from "bull"
import BullQueue, { QueueOptions } from "bull"
import { addListeners, StalledFn } from "./listeners"
import { Duration } from "../utils"
import * as timers from "../timers"
import * as Redis from "ioredis"
const CLEANUP_PERIOD_MS = 60 * 1000
// the queue lock is held for 5 minutes
const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs()
// queue lock is refreshed every 30 seconds
const QUEUE_LOCK_RENEW_INTERNAL_MS = Duration.fromSeconds(30).toMs()
// cleanup the queue every 60 seconds
const CLEANUP_PERIOD_MS = Duration.fromSeconds(60).toMs()
let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = []
let cleanupInterval: NodeJS.Timeout
@ -21,7 +28,14 @@ export function createQueue<T>(
opts: { removeStalledCb?: StalledFn } = {}
): BullQueue.Queue<T> {
const { opts: redisOpts, redisProtocolUrl } = getRedisOptions()
const queueConfig: any = redisProtocolUrl || { redis: redisOpts }
const queueConfig: QueueOptions = {
redis: redisProtocolUrl! || (redisOpts as Redis.RedisOptions),
settings: {
maxStalledCount: 0,
lockDuration: QUEUE_LOCK_MS,
lockRenewTime: QUEUE_LOCK_RENEW_INTERNAL_MS,
},
}
let queue: any
if (!env.isTest()) {
queue = new BullQueue(jobQueue, queueConfig)

View file

@ -249,7 +249,8 @@ export const paginatedUsers = async ({
limit,
}: SearchUsersRequest = {}) => {
const db = getGlobalDB()
const pageLimit = limit ? limit + 1 : PAGE_LIMIT + 1
const pageSize = limit ?? PAGE_LIMIT
const pageLimit = pageSize + 1
// get one extra document, to have the next page
const opts: DatabaseQueryOpts = {
include_docs: true,
@ -276,7 +277,7 @@ export const paginatedUsers = async ({
const response = await db.allDocs(getGlobalUserParams(null, opts))
userList = response.rows.map((row: any) => row.doc)
}
return pagination(userList, pageLimit, {
return pagination(userList, pageSize, {
paginate: true,
property,
getKey,

View file

@ -0,0 +1,49 @@
export enum DurationType {
MILLISECONDS = "milliseconds",
SECONDS = "seconds",
MINUTES = "minutes",
HOURS = "hours",
DAYS = "days",
}
const conversion: Record<DurationType, number> = {
milliseconds: 1,
seconds: 1000,
minutes: 60 * 1000,
hours: 60 * 60 * 1000,
days: 24 * 60 * 60 * 1000,
}
export class Duration {
static convert(from: DurationType, to: DurationType, duration: number) {
const milliseconds = duration * conversion[from]
return milliseconds / conversion[to]
}
static from(from: DurationType, duration: number) {
return {
to: (to: DurationType) => {
return Duration.convert(from, to, duration)
},
toMs: () => {
return Duration.convert(from, DurationType.MILLISECONDS, duration)
},
}
}
static fromSeconds(duration: number) {
return Duration.from(DurationType.SECONDS, duration)
}
static fromMinutes(duration: number) {
return Duration.from(DurationType.MINUTES, duration)
}
static fromHours(duration: number) {
return Duration.from(DurationType.HOURS, duration)
}
static fromDays(duration: number) {
return Duration.from(DurationType.DAYS, duration)
}
}

View file

@ -1,3 +1,4 @@
export * from "./hashing"
export * from "./utils"
export * from "./stringUtils"
export * from "./Duration"

View file

@ -0,0 +1,19 @@
import { Duration, DurationType } from "../Duration"
describe("duration", () => {
it("should convert minutes to milliseconds", () => {
expect(Duration.fromMinutes(5).toMs()).toBe(300000)
})
it("should convert seconds to milliseconds", () => {
expect(Duration.fromSeconds(30).toMs()).toBe(30000)
})
it("should convert days to milliseconds", () => {
expect(Duration.fromDays(1).toMs()).toBe(86400000)
})
it("should convert minutes to days", () => {
expect(Duration.fromMinutes(1440).to(DurationType.DAYS)).toBe(1)
})
})

View file

@ -1,2 +1,3 @@
export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
export const MOCK_DATE_TIMESTAMP = 1577836800000

View file

@ -3,7 +3,6 @@
Heading,
Body,
Button,
ButtonGroup,
Table,
Layout,
Modal,
@ -46,6 +45,10 @@
datasource: {
type: "user",
},
options: {
paginate: true,
limit: 10,
},
})
let groupsLoaded = !$licensing.groupsEnabled || $groups?.length
@ -65,10 +68,12 @@
{ column: "role", component: RoleTableRenderer },
]
let userData = []
let invitesLoaded = false
let pendingInvites = []
let parsedInvites = []
$: isOwner = $auth.accountPortalAccess && $admin.cloud
$: readonly = !sdk.users.isAdmin($auth.user) || $features.isScimEnabled
$: debouncedUpdateFetch(searchEmail)
$: schema = {
email: {
@ -88,16 +93,6 @@
width: "1fr",
},
}
const getPendingSchema = tblSchema => {
if (!tblSchema) {
return {}
}
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
pendingSchema.email.displayName = "Pending Invites"
return pendingSchema
}
$: pendingSchema = getPendingSchema(schema)
$: userData = []
$: inviteUsersResponse = { successful: [], unsuccessful: [] }
@ -121,9 +116,15 @@
}
})
}
let invitesLoaded = false
let pendingInvites = []
let parsedInvites = []
const getPendingSchema = tblSchema => {
if (!tblSchema) {
return {}
}
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
pendingSchema.email.displayName = "Pending Invites"
return pendingSchema
}
const invitesToSchema = invites => {
return invites.map(invite => {
@ -143,7 +144,9 @@
const updateFetch = email => {
fetch.update({
query: {
email,
string: {
email,
},
},
})
}
@ -296,7 +299,7 @@
{/if}
<div class="controls">
{#if !readonly}
<ButtonGroup>
<div class="buttons">
<Button
disabled={readonly}
on:click={$licensing.userLimitReached
@ -315,7 +318,7 @@
>
Import
</Button>
</ButtonGroup>
</div>
{:else}
<ScimBanner />
{/if}
@ -390,12 +393,15 @@
</Modal>
<style>
.buttons {
display: flex;
gap: 10px;
}
.pagination {
display: flex;
flex-direction: row;
justify-content: flex-end;
}
.controls {
display: flex;
flex-direction: row;
@ -403,7 +409,6 @@
align-items: center;
gap: var(--spacing-xl);
}
.controls-right {
display: flex;
flex-direction: row;
@ -411,7 +416,6 @@
align-items: center;
gap: var(--spacing-xl);
}
.controls-right :global(.spectrum-Search) {
width: 200px;
}

View file

@ -5467,17 +5467,17 @@
},
"settings": [
{
"type": "select",
"type": "table",
"label": "Data",
"key": "dataSource"
},
{
"type": "radio",
"label": "Type",
"key": "actionType",
"options": ["Create", "Update", "View"],
"defaultValue": "Create"
},
{
"type": "table",
"label": "Data",
"key": "dataSource"
},
{
"type": "text",
"label": "Title",
@ -5508,13 +5508,37 @@
},
{
"type": "text",
"label": "Empty text",
"label": "No rows found",
"key": "noRowsMessage",
"defaultValue": "We couldn't find a row to display",
"nested": true
}
]
},
{
"section": true,
"name": "Fields",
"settings": [
{
"type": "fieldConfiguration",
"key": "fields",
"nested": true,
"resetOn": "dataSource",
"selectAllFields": true
},
{
"type": "boolean",
"label": "Disabled",
"key": "disabled",
"defaultValue": false,
"dependsOn": {
"setting": "actionType",
"value": "View",
"invert": true
}
}
]
},
{
"section": true,
"name": "Buttons",
@ -5566,30 +5590,6 @@
}
]
},
{
"section": true,
"name": "Fields",
"settings": [
{
"type": "fieldConfiguration",
"key": "fields",
"nested": true,
"resetOn": "dataSource",
"selectAllFields": true
},
{
"type": "boolean",
"label": "Disabled",
"key": "disabled",
"defaultValue": false,
"dependsOn": {
"setting": "actionType",
"value": "View",
"invert": true
}
}
]
},
{
"tag": "style",
"type": "select",
@ -5924,4 +5924,4 @@
}
]
}
}
}

View file

@ -220,15 +220,11 @@
</BlockComponent>
{/if}
</BlockComponent>
{#if description}
<BlockComponent
type="text"
props={{ text: description }}
order={1}
/>
{/if}
</BlockComponent>
{/if}
{#if description}
<BlockComponent type="text" props={{ text: description }} order={1} />
{/if}
{#key fields}
<BlockComponent type="fieldgroup" props={{ labelPosition }} order={1}>
{#each fields as field, idx}

View file

@ -70,6 +70,13 @@ module AwsMock {
Contents: {},
})
)
// @ts-ignore
this.getObject = jest.fn(
response({
Body: "",
})
)
}
aws.DynamoDB = { DocumentClient }

View file

@ -32,11 +32,8 @@ import {
tenancy,
users,
} from "@budibase/backend-core"
import { USERS_TABLE_SCHEMA } from "../../constants"
import {
buildDefaultDocs,
DEFAULT_BB_DATASOURCE_ID,
} from "../../db/defaultData/datasource_bb_default"
import { USERS_TABLE_SCHEMA, DEFAULT_BB_DATASOURCE_ID } from "../../constants"
import { buildDefaultDocs } from "../../db/defaultData/datasource_bb_default"
import { removeAppFromUserRoles } from "../../utilities/workerRequests"
import { stringToReadStream } from "../../utilities"
import { doesUserHaveLock } from "../../utilities/redis"

View file

@ -5,6 +5,8 @@ import sdk from "../../../sdk"
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import { mocks } from "@budibase/backend-core/tests"
mocks.licenses.useBackups()
describe("/backups", () => {
let request = setup.getRequest()
let config = setup.getConfig()
@ -12,16 +14,17 @@ describe("/backups", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
tk.reset()
await config.init()
})
describe("exportAppDump", () => {
describe("/api/backups/export", () => {
it("should be able to export app", async () => {
const res = await request
.post(`/api/backups/export?appId=${config.getAppId()}`)
.set(config.defaultHeaders())
.expect(200)
expect(res.headers["content-type"]).toEqual("application/gzip")
const { body, headers } = await config.api.backup.exportBasicBackup(
config.getAppId()!
)
expect(body instanceof Buffer).toBe(true)
expect(headers["content-type"]).toEqual("application/gzip")
expect(events.app.exported).toBeCalledTimes(1)
})
@ -36,11 +39,11 @@ describe("/backups", () => {
it("should infer the app name from the app", async () => {
tk.freeze(mocks.date.MOCK_DATE)
const res = await request
.post(`/api/backups/export?appId=${config.getAppId()}`)
.set(config.defaultHeaders())
const { headers } = await config.api.backup.exportBasicBackup(
config.getAppId()!
)
expect(res.headers["content-disposition"]).toEqual(
expect(headers["content-disposition"]).toEqual(
`attachment; filename="${
config.getApp()!.name
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`
@ -48,6 +51,21 @@ describe("/backups", () => {
})
})
describe("/api/backups/import", () => {
it("should be able to import an app", async () => {
const appId = config.getAppId()!
const automation = await config.createAutomation()
await config.createAutomationLog(automation, appId)
await config.createScreen()
const exportRes = await config.api.backup.createBackup(appId)
expect(exportRes.backupId).toBeDefined()
const importRes = await config.api.backup.importBackup(
appId,
exportRes.backupId
)
})
})
describe("calculateBackupStats", () => {
it("should be able to calculate the backup statistics", async () => {
await config.createAutomation()

View file

@ -172,3 +172,8 @@ export enum AutomationErrors {
export const ObjectStoreBuckets = objectStore.ObjectStoreBuckets
export const MAX_AUTOMATION_RECURRING_ERRORS = 5
export const GOOGLE_SHEETS_PRIMARY_KEY = "rowNumber"
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"

View file

@ -1,4 +1,12 @@
import { AutoFieldSubTypes, FieldTypes } from "../../constants"
import {
AutoFieldSubTypes,
FieldTypes,
DEFAULT_BB_DATASOURCE_ID,
DEFAULT_INVENTORY_TABLE_ID,
DEFAULT_EMPLOYEE_TABLE_ID,
DEFAULT_EXPENSES_TABLE_ID,
DEFAULT_JOBS_TABLE_ID,
} from "../../constants"
import { importToRows } from "../../api/controllers/table/utils"
import { cloneDeep } from "lodash/fp"
import LinkDocument from "../linkedRows/LinkDocument"
@ -16,12 +24,6 @@ import {
TableSourceType,
} from "@budibase/types"
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
const defaultDatasource = {
_id: DEFAULT_BB_DATASOURCE_ID,
type: dbCore.BUDIBASE_DATASOURCE_TYPE,

View file

@ -7,10 +7,12 @@ import {
TableSourceType,
} from "@budibase/types"
import { DocumentType, SEPARATOR } from "../db/utils"
import { InvalidColumns, NoEmptyFilterStrings } from "../constants"
import {
InvalidColumns,
NoEmptyFilterStrings,
DEFAULT_BB_DATASOURCE_ID,
} from "../constants"
import { helpers } from "@budibase/shared-core"
import * as external from "../api/controllers/table/external"
import * as internal from "../api/controllers/table/internal"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
const ROW_ID_REGEX = /^\[.*]$/g
@ -96,7 +98,8 @@ export function isInternalTableID(tableId: string) {
export function isExternalTable(table: Table) {
if (
table?.sourceId &&
table.sourceId.includes(DocumentType.DATASOURCE + SEPARATOR)
table.sourceId.includes(DocumentType.DATASOURCE + SEPARATOR) &&
table?.sourceId !== DEFAULT_BB_DATASOURCE_ID
) {
return true
} else if (table?.sourceType === TableSourceType.EXTERNAL) {

View file

@ -26,7 +26,6 @@ export interface DBDumpOpts {
export interface ExportOpts extends DBDumpOpts {
tar?: boolean
excludeRows?: boolean
excludeLogs?: boolean
encryptPassword?: string
}
@ -83,14 +82,15 @@ export async function exportDB(
})
}
function defineFilter(excludeRows?: boolean, excludeLogs?: boolean) {
const ids = [USER_METDATA_PREFIX, LINK_USER_METADATA_PREFIX]
function defineFilter(excludeRows?: boolean) {
const ids = [
USER_METDATA_PREFIX,
LINK_USER_METADATA_PREFIX,
AUTOMATION_LOG_PREFIX,
]
if (excludeRows) {
ids.push(TABLE_ROW_PREFIX)
}
if (excludeLogs) {
ids.push(AUTOMATION_LOG_PREFIX)
}
return (doc: any) =>
!ids.map(key => doc._id.includes(key)).reduce((prev, curr) => prev || curr)
}
@ -118,7 +118,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
fs.writeFileSync(join(tmpPath, path), contents)
}
}
// get all of the files
// get all the files
else {
tmpPath = await objectStore.retrieveDirectory(
ObjectStoreBuckets.APPS,
@ -141,7 +141,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
// enforce an export of app DB to the tmp path
const dbPath = join(tmpPath, DB_EXPORT_FILE)
await exportDB(appId, {
filter: defineFilter(config?.excludeRows, config?.excludeLogs),
filter: defineFilter(config?.excludeRows),
exportPath: dbPath,
})
@ -191,7 +191,6 @@ export async function streamExportApp({
}) {
const tmpPath = await exportApp(appId, {
excludeRows,
excludeLogs: true,
tar: true,
encryptPassword,
})

View file

@ -805,8 +805,9 @@ class TestConfiguration {
// AUTOMATION LOG
async createAutomationLog(automation: Automation) {
return await context.doInAppContext(this.getProdAppId(), async () => {
async createAutomationLog(automation: Automation, appId?: string) {
appId = appId || this.getProdAppId()
return await context.doInAppContext(appId!, async () => {
return await pro.sdk.automations.logs.storeLog(
automation,
basicAutomationResults(automation._id!)

View file

@ -0,0 +1,45 @@
import {
CreateAppBackupResponse,
ImportAppBackupResponse,
} from "@budibase/types"
import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base"
export class BackupAPI extends TestAPI {
constructor(config: TestConfiguration) {
super(config)
}
exportBasicBackup = async (appId: string) => {
const result = await this.request
.post(`/api/backups/export?appId=${appId}`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /application\/gzip/)
.expect(200)
return {
body: result.body as Buffer,
headers: result.headers,
}
}
createBackup = async (appId: string) => {
const result = await this.request
.post(`/api/apps/${appId}/backups`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
return result.body as CreateAppBackupResponse
}
importBackup = async (
appId: string,
backupId: string
): Promise<ImportAppBackupResponse> => {
const result = await this.request
.post(`/api/apps/${appId}/backups/${backupId}/import`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
return result.body as ImportAppBackupResponse
}
}

View file

@ -7,6 +7,7 @@ import { DatasourceAPI } from "./datasource"
import { LegacyViewAPI } from "./legacyView"
import { ScreenAPI } from "./screen"
import { ApplicationAPI } from "./application"
import { BackupAPI } from "./backup"
import { AttachmentAPI } from "./attachment"
export default class API {
@ -18,6 +19,7 @@ export default class API {
datasource: DatasourceAPI
screen: ScreenAPI
application: ApplicationAPI
backup: BackupAPI
attachment: AttachmentAPI
constructor(config: TestConfiguration) {
@ -29,6 +31,7 @@ export default class API {
this.datasource = new DatasourceAPI(config)
this.screen = new ScreenAPI(config)
this.application = new ApplicationAPI(config)
this.backup = new BackupAPI(config)
this.attachment = new AttachmentAPI(config)
}
}

View file

@ -20,3 +20,8 @@ export interface CreateAppBackupResponse {
export interface UpdateAppBackupRequest {
name: string
}
export interface ImportAppBackupResponse {
restoreId: string
message: string
}

View file

@ -17,7 +17,7 @@
"test:notify": "node scripts/testResultsWebhook",
"test:cloud:prod": "yarn run test --testPathIgnorePatterns=\\.integration\\.",
"test:cloud:qa": "yarn run test",
"test:self:ci": "yarn run test --testPathIgnorePatterns=\\.integration\\. \\.cloud\\. \\.license\\.",
"test:self:ci": "yarn run test --testPathIgnorePatterns=\\.integration\\. \\.cloud\\. \\.licensing\\.",
"serve:test:self:ci": "start-server-and-test dev:built http://localhost:4001/health test:self:ci",
"serve": "start-server-and-test dev:built http://localhost:4001/health",
"dev:built": "cd ../ && yarn dev:built"

View file

@ -99,9 +99,11 @@ export default class LicenseAPI extends BaseAPI {
}, opts)
}
async updatePlan(opts: APIRequestOpts = { status: 200 }) {
async updatePlan(priceId: string, opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.put(`/api/license/plan`)
return this.client.put(`/api/license/plan`, {
body: { priceId },
})
}, opts)
}

View file

@ -38,9 +38,19 @@ export default class StripeAPI extends BaseAPI {
}, opts)
}
async linkStripeCustomer(opts: APIRequestOpts = { status: 200 }) {
async linkStripeCustomer(
accountId: string,
stripeCustomerId: string,
opts: APIRequestOpts = { status: 200 }
) {
return this.doRequest(() => {
return this.client.post(`/api/stripe/link`)
return this.client.post(`/api/stripe/link`, {
body: {
accountId,
stripeCustomerId,
},
internal: true,
})
}, opts)
}

View file

@ -0,0 +1,114 @@
import TestConfiguration from "../../config/TestConfiguration"
import * as fixtures from "../../fixtures"
import { Hosting, PlanType } from "@budibase/types"
const stripe = require("stripe")(process.env.STRIPE_SECRET_KEY)
describe("license management", () => {
const config = new TestConfiguration()
beforeAll(async () => {
await config.beforeAll()
})
afterAll(async () => {
await config.afterAll()
})
it("retrieves plans, creates checkout session, and updates license", async () => {
// Create cloud account
const createAccountRequest = fixtures.accounts.generateAccount({
hosting: Hosting.CLOUD,
})
const [createAccountRes, account] =
await config.accountsApi.accounts.create(createAccountRequest, {
autoVerify: true,
})
// Self response has free license
await config.doInNewState(async () => {
await config.loginAsAccount(createAccountRequest)
const [selfRes, selfBody] = await config.api.accounts.self()
expect(selfBody.license.plan.type).toBe(PlanType.FREE)
})
// Retrieve plans
const [plansRes, planBody] = await config.api.licenses.getPlans()
// Select priceId from premium plan
let premiumPriceId = null
let businessPriceId = ""
for (const plan of planBody) {
if (plan.type === PlanType.PREMIUM) {
premiumPriceId = plan.prices[0].priceId
}
if (plan.type === PlanType.BUSINESS) {
businessPriceId = plan.prices[0].priceId
}
}
// Create checkout session for price
const checkoutSessionRes = await config.api.stripe.createCheckoutSession(
premiumPriceId
)
const checkoutSessionUrl = checkoutSessionRes[1].url
expect(checkoutSessionUrl).toContain("checkout.stripe.com")
// Create stripe customer
const customer = await stripe.customers.create({
email: createAccountRequest.email,
})
// Create payment method
const paymentMethod = await stripe.paymentMethods.create({
type: "card",
card: {
token: "tok_visa", // Test Visa Card
},
})
// Attach payment method to customer
await stripe.paymentMethods.attach(paymentMethod.id, {
customer: customer.id,
})
// Update customer
await stripe.customers.update(customer.id, {
invoice_settings: {
default_payment_method: paymentMethod.id,
},
})
// Create subscription for premium plan
const subscription = await stripe.subscriptions.create({
customer: customer.id,
items: [
{
price: premiumPriceId,
quantity: 1,
},
],
default_payment_method: paymentMethod.id,
collection_method: "charge_automatically",
})
await config.doInNewState(async () => {
// License updated from Free to Premium
await config.loginAsAccount(createAccountRequest)
await config.api.stripe.linkStripeCustomer(account.accountId, customer.id)
const [_, selfBodyPremium] = await config.api.accounts.self()
expect(selfBodyPremium.license.plan.type).toBe(PlanType.PREMIUM)
// Create portal session - Check URL
const [portalRes, portalSessionBody] =
await config.api.stripe.createPortalSession(customer.id)
expect(portalSessionBody.url).toContain("billing.stripe.com")
// Update subscription from premium to business license
await config.api.licenses.updatePlan(businessPriceId)
// License updated to Business
const [selfRes, selfBodyBusiness] = await config.api.accounts.self()
expect(selfBodyBusiness.license.plan.type).toBe(PlanType.BUSINESS)
})
})
})

View file

@ -28,6 +28,7 @@ const env = {
MARIADB_DB: process.env.MARIADB_DB,
MARIADB_USER: process.env.MARIADB_USER,
MARIADB_PASSWORD: process.env.MARIADB_PASSWORD,
STRIPE_SECRET_KEY: process.env.STRIPE_SECRET_KEY,
}
export = env

View file

@ -2,9 +2,9 @@
if [[ $TARGETARCH == arm* ]] ;
then
echo "INSTALLING ARM64 MINIO"
wget https://dl.min.io/server/minio/release/linux-arm64/minio
wget wget https://dl.min.io/server/minio/release/linux-arm64/archive/minio.deb -O minio.deb
else
echo "INSTALLING AMD64 MINIO"
wget https://dl.min.io/server/minio/release/linux-amd64/minio
wget wget https://dl.min.io/server/minio/release/linux-amd64/archive/minio.deb -O minio.deb
fi
chmod +x minio
dpkg -i minio.deb

8
scripts/install-node.sh Normal file
View file

@ -0,0 +1,8 @@
#!/bin/bash
apt-get install -y gnupg
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor | tee /usr/share/keyrings/nodesource.gpg > /dev/null
echo "deb [signed-by=/usr/share/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
apt-get update
echo "INSTALLING NODE $NODE_MAJOR"
apt-get install -y --no-install-recommends nodejs
npm install --global yarn pm2