1
0
Fork 0
mirror of synced 2024-06-28 11:00:55 +12:00

Merge branch 'master' into new-branding

This commit is contained in:
Andrew Kingston 2023-11-06 09:11:54 +00:00 committed by GitHub
commit fb1258072c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
52 changed files with 712 additions and 374 deletions

View file

@ -11,10 +11,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: peter-evans/repository-dispatch@v2
env:
PAYLOAD_VERSION: ${{ github.sha }}
REF_NAME: ${{ github.ref_name}}
with:
repository: budibase/budibase-deploys
event-type: budicloud-qa-deploy
token: ${{ secrets.GH_ACCESS_TOKEN }}
client-payload: |-
{
"VERSION": "${{ github.sha }}",
"REF_NAME": "${{ github.ref_name}}"
}

View file

@ -165,17 +165,14 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Get the current budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
REF_NAME: ${{ github.ref_name}}
- uses: peter-evans/repository-dispatch@v2
with:
repository: budibase/budibase-deploys
event: budicloud-qa-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}
event-type: budicloud-qa-deploy
token: ${{ secrets.GH_ACCESS_TOKEN }}
client-payload: |-
{
"VERSION": "${{ github.ref_name }}",
"REF_NAME": "${{ github.ref_name}}"
}

View file

@ -66,14 +66,21 @@ jobs:
context: .
push: true
platforms: linux/amd64,linux/arm64
build-args: BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile.v2
env:
BUDIBASE_VERSION: ${{ env.RELEASE_VERSION }}
- name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/amd64
build-args: TARGETBUILD=aas
build-args: |
TARGETBUILD=aas
BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile.v2
env:
BUDIBASE_VERSION: ${{ env.RELEASE_VERSION }}

View file

@ -1,10 +0,0 @@
#!/bin/bash
if [[ $TARGETARCH == arm* ]] ;
then
echo "INSTALLING ARM64 MINIO"
wget https://dl.min.io/server/minio/release/linux-arm64/minio
else
echo "INSTALLING AMD64 MINIO"
wget https://dl.min.io/server/minio/release/linux-amd64/minio
fi
chmod +x minio

View file

@ -42,6 +42,7 @@ COPY packages/string-templates packages/string-templates
FROM budibase/couchdb as runner
ARG TARGETARCH
ENV TARGETARCH $TARGETARCH
ENV NODE_MAJOR 18
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD=single
@ -49,10 +50,10 @@ ENV TARGETBUILD $TARGETBUILD
# install base dependencies
RUN apt-get update && \
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server libaio1
# Install postgres client for pg_dump utils
RUN apt install software-properties-common apt-transport-https gpg -y \
RUN apt install -y software-properties-common apt-transport-https ca-certificates gnupg \
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
&& apt update -y \
@ -61,10 +62,8 @@ RUN apt install software-properties-common apt-transport-https gpg -y \
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
WORKDIR /nodejs
RUN curl -sL https://deb.nodesource.com/setup_18.x -o /tmp/nodesource_setup.sh && \
bash /tmp/nodesource_setup.sh && \
apt-get install -y --no-install-recommends libaio1 nodejs && \
npm install --global yarn pm2
COPY scripts/install-node.sh ./install.sh
RUN chmod +x install.sh && ./install.sh
# setup nginx
COPY hosting/single/nginx/nginx.conf /etc/nginx

View file

@ -1,5 +1,5 @@
{
"version": "2.12.1",
"version": "2.12.11",
"npmClient": "yarn",
"packages": [
"packages/*"

View file

@ -27,7 +27,7 @@
"scripts": {
"preinstall": "node scripts/syncProPackage.js",
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
"build": "lerna run build --stream",
"build": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream",
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
"check:types": "lerna run check:types",
"build:sdk": "lerna run --stream build:sdk",

View file

@ -3,6 +3,7 @@ const mockS3 = {
deleteObject: jest.fn().mockReturnThis(),
deleteObjects: jest.fn().mockReturnThis(),
createBucket: jest.fn().mockReturnThis(),
getObject: jest.fn().mockReturnThis(),
listObject: jest.fn().mockReturnThis(),
getSignedUrl: jest.fn((operation: string, params: any) => {
return `http://s3.example.com/${params.Bucket}/${params.Key}`

View file

@ -18,8 +18,12 @@ export const ObjectStoreBuckets = {
}
const bbTmp = join(tmpdir(), ".budibase")
if (!fs.existsSync(bbTmp)) {
try {
fs.mkdirSync(bbTmp)
} catch (e: any) {
if (e.code !== "EEXIST") {
throw e
}
}
export function budibaseTempDir() {

View file

@ -249,7 +249,8 @@ export const paginatedUsers = async ({
limit,
}: SearchUsersRequest = {}) => {
const db = getGlobalDB()
const pageLimit = limit ? limit + 1 : PAGE_LIMIT + 1
const pageSize = limit ?? PAGE_LIMIT
const pageLimit = pageSize + 1
// get one extra document, to have the next page
const opts: DatabaseQueryOpts = {
include_docs: true,
@ -276,7 +277,7 @@ export const paginatedUsers = async ({
const response = await db.allDocs(getGlobalUserParams(null, opts))
userList = response.rows.map((row: any) => row.doc)
}
return pagination(userList, pageLimit, {
return pagination(userList, pageSize, {
paginate: true,
property,
getKey,

View file

@ -1,2 +1,3 @@
export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
export const MOCK_DATE_TIMESTAMP = 1577836800000

View file

@ -30,15 +30,15 @@
part2: PrettyRelationshipDefinitions.MANY,
},
[RelationshipType.MANY_TO_ONE]: {
part1: PrettyRelationshipDefinitions.ONE,
part2: PrettyRelationshipDefinitions.MANY,
part1: PrettyRelationshipDefinitions.MANY,
part2: PrettyRelationshipDefinitions.ONE,
},
}
let relationshipOpts1 = Object.values(PrettyRelationshipDefinitions)
let relationshipOpts2 = Object.values(PrettyRelationshipDefinitions)
let relationshipPart1 = PrettyRelationshipDefinitions.MANY
let relationshipPart2 = PrettyRelationshipDefinitions.ONE
let relationshipPart1 = PrettyRelationshipDefinitions.ONE
let relationshipPart2 = PrettyRelationshipDefinitions.MANY
let originalFromColumnName = toRelationship.name,
originalToColumnName = fromRelationship.name

View file

@ -1,5 +1,5 @@
import { Checkbox, Select, RadioGroup, Stepper, Input } from "@budibase/bbui"
import DataSourceSelect from "./controls/DataSourceSelect.svelte"
import DataSourceSelect from "./controls/DataSourceSelect/DataSourceSelect.svelte"
import S3DataSourceSelect from "./controls/S3DataSourceSelect.svelte"
import DataProviderSelect from "./controls/DataProviderSelect.svelte"
import ButtonActionEditor from "./controls/ButtonActionEditor/ButtonActionEditor.svelte"

View file

@ -0,0 +1,55 @@
<script>
import { Divider, Heading } from "@budibase/bbui"
export let dividerState
export let heading
export let dataSet
export let value
export let onSelect
</script>
{#if dividerState}
<Divider />
{/if}
{#if heading}
<div class="title">
<Heading size="XS">{heading}</Heading>
</div>
{/if}
<ul class="spectrum-Menu" role="listbox">
{#each dataSet as data}
<li
class="spectrum-Menu-item"
class:is-selected={value?.label === data.label &&
value?.type === data.type}
role="option"
aria-selected="true"
tabindex="0"
on:click={() => onSelect(data)}
>
<span class="spectrum-Menu-itemLabel">
{data.label}
</span>
<svg
class="spectrum-Icon spectrum-UIIcon-Checkmark100 spectrum-Menu-checkmark spectrum-Menu-itemIcon"
focusable="false"
aria-hidden="true"
>
<use xlink:href="#spectrum-css-icon-Checkmark100" />
</svg>
</li>
{/each}
</ul>
<style>
.title {
padding: 0 var(--spacing-m) var(--spacing-s) var(--spacing-m);
}
ul {
list-style: none;
padding-left: 0px;
margin: 0px;
width: 100%;
}
</style>

View file

@ -7,10 +7,8 @@
import {
Button,
Popover,
Divider,
Select,
Layout,
Heading,
Drawer,
DrawerContent,
Icon,
@ -32,6 +30,7 @@
import IntegrationQueryEditor from "components/integration/index.svelte"
import { makePropSafe as safe } from "@budibase/string-templates"
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
import DataSourceCategory from "components/design/settings/controls/DataSourceSelect/DataSourceCategory.svelte"
import { API } from "api"
export let value = {}
@ -279,102 +278,81 @@
</div>
<Popover bind:this={dropdownRight} anchor={anchorRight}>
<div class="dropdown">
<div class="title">
<Heading size="XS">Tables</Heading>
</div>
<ul>
{#each tables as table}
<li on:click={() => handleSelected(table)}>{table.label}</li>
{/each}
</ul>
<DataSourceCategory
heading="Tables"
dataSet={tables}
{value}
onSelect={handleSelected}
/>
{#if views?.length}
<Divider />
<div class="title">
<Heading size="XS">Views</Heading>
</div>
<ul>
{#each views as view}
<li on:click={() => handleSelected(view)}>{view.label}</li>
{/each}
</ul>
<DataSourceCategory
dividerState={true}
heading="Views"
dataSet={views}
{value}
onSelect={handleSelected}
/>
{/if}
{#if queries?.length}
<Divider />
<div class="title">
<Heading size="XS">Queries</Heading>
</div>
<ul>
{#each queries as query}
<li
class:selected={value === query}
on:click={() => handleSelected(query)}
>
{query.label}
</li>
{/each}
</ul>
<DataSourceCategory
dividerState={true}
heading="Queries"
dataSet={queries}
{value}
onSelect={handleSelected}
/>
{/if}
{#if links?.length}
<Divider />
<div class="title">
<Heading size="XS">Relationships</Heading>
</div>
<ul>
{#each links as link}
<li on:click={() => handleSelected(link)}>{link.label}</li>
{/each}
</ul>
<DataSourceCategory
dividerState={true}
heading="Links"
dataSet={links}
{value}
onSelect={handleSelected}
/>
{/if}
{#if fields?.length}
<Divider />
<div class="title">
<Heading size="XS">Fields</Heading>
</div>
<ul>
{#each fields as field}
<li on:click={() => handleSelected(field)}>{field.label}</li>
{/each}
</ul>
<DataSourceCategory
dividerState={true}
heading="Fields"
dataSet={fields}
{value}
onSelect={handleSelected}
/>
{/if}
{#if jsonArrays?.length}
<Divider />
<div class="title">
<Heading size="XS">JSON Arrays</Heading>
</div>
<ul>
{#each jsonArrays as field}
<li on:click={() => handleSelected(field)}>{field.label}</li>
{/each}
</ul>
<DataSourceCategory
dividerState={true}
heading="JSON Arrays"
dataSet={jsonArrays}
{value}
onSelect={handleSelected}
/>
{/if}
{#if showDataProviders && dataProviders?.length}
<Divider />
<div class="title">
<Heading size="XS">Data Providers</Heading>
</div>
<ul>
{#each dataProviders as provider}
<li
class:selected={value === provider}
on:click={() => handleSelected(provider)}
>
{provider.label}
</li>
{/each}
</ul>
<DataSourceCategory
dividerState={true}
heading="Data Providers"
dataSet={dataProviders}
{value}
onSelect={handleSelected}
/>
{/if}
<DataSourceCategory
dividerState={true}
heading="Other"
dataSet={[custom]}
{value}
onSelect={handleSelected}
/>
{#if otherSources?.length}
<DataSourceCategory
dividerState={false}
dataSet={otherSources}
{value}
onSelect={handleSelected}
/>
{/if}
<Divider />
<div class="title">
<Heading size="XS">Other</Heading>
</div>
<ul>
<li on:click={() => handleSelected(custom)}>{custom.label}</li>
{#if otherSources?.length}
{#each otherSources as source}
<li on:click={() => handleSelected(source)}>{source.label}</li>
{/each}
{/if}
</ul>
</div>
</Popover>
@ -398,31 +376,6 @@
.dropdown {
padding: var(--spacing-m) 0;
z-index: 99999999;
overflow-y: scroll;
}
.title {
padding: 0 var(--spacing-m) var(--spacing-s) var(--spacing-m);
}
ul {
list-style: none;
padding-left: 0px;
margin: 0px;
}
li {
cursor: pointer;
margin: 0px;
padding: var(--spacing-s) var(--spacing-m);
font-size: var(--font-size-m);
}
.selected {
color: var(--spectrum-global-color-blue-600);
}
li:hover {
background-color: var(--spectrum-global-color-gray-200);
}
.icon {

View file

@ -1,5 +1,5 @@
<script>
import DataSourceSelect from "./DataSourceSelect.svelte"
import DataSourceSelect from "./DataSourceSelect/DataSourceSelect.svelte"
const otherSources = [{ name: "Custom", label: "Custom" }]
</script>

View file

@ -3,7 +3,6 @@
Heading,
Body,
Button,
ButtonGroup,
Table,
Layout,
Modal,
@ -46,6 +45,10 @@
datasource: {
type: "user",
},
options: {
paginate: true,
limit: 10,
},
})
let groupsLoaded = !$licensing.groupsEnabled || $groups?.length
@ -65,10 +68,12 @@
{ column: "role", component: RoleTableRenderer },
]
let userData = []
let invitesLoaded = false
let pendingInvites = []
let parsedInvites = []
$: isOwner = $auth.accountPortalAccess && $admin.cloud
$: readonly = !sdk.users.isAdmin($auth.user) || $features.isScimEnabled
$: debouncedUpdateFetch(searchEmail)
$: schema = {
email: {
@ -88,16 +93,6 @@
width: "1fr",
},
}
const getPendingSchema = tblSchema => {
if (!tblSchema) {
return {}
}
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
pendingSchema.email.displayName = "Pending Invites"
return pendingSchema
}
$: pendingSchema = getPendingSchema(schema)
$: userData = []
$: inviteUsersResponse = { successful: [], unsuccessful: [] }
@ -121,9 +116,15 @@
}
})
}
let invitesLoaded = false
let pendingInvites = []
let parsedInvites = []
const getPendingSchema = tblSchema => {
if (!tblSchema) {
return {}
}
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
pendingSchema.email.displayName = "Pending Invites"
return pendingSchema
}
const invitesToSchema = invites => {
return invites.map(invite => {
@ -143,7 +144,9 @@
const updateFetch = email => {
fetch.update({
query: {
email,
string: {
email,
},
},
})
}
@ -296,7 +299,7 @@
{/if}
<div class="controls">
{#if !readonly}
<ButtonGroup>
<div class="buttons">
<Button
disabled={readonly}
on:click={$licensing.userLimitReached
@ -315,7 +318,7 @@
>
Import
</Button>
</ButtonGroup>
</div>
{:else}
<ScimBanner />
{/if}
@ -390,12 +393,15 @@
</Modal>
<style>
.buttons {
display: flex;
gap: 10px;
}
.pagination {
display: flex;
flex-direction: row;
justify-content: flex-end;
}
.controls {
display: flex;
flex-direction: row;
@ -403,7 +409,6 @@
align-items: center;
gap: var(--spacing-xl);
}
.controls-right {
display: flex;
flex-direction: row;
@ -411,7 +416,6 @@
align-items: center;
gap: var(--spacing-xl);
}
.controls-right :global(.spectrum-Search) {
width: 200px;
}

View file

@ -5467,17 +5467,17 @@
},
"settings": [
{
"type": "select",
"type": "table",
"label": "Data",
"key": "dataSource"
},
{
"type": "radio",
"label": "Type",
"key": "actionType",
"options": ["Create", "Update", "View"],
"defaultValue": "Create"
},
{
"type": "table",
"label": "Data",
"key": "dataSource"
},
{
"type": "text",
"label": "Title",
@ -5508,13 +5508,37 @@
},
{
"type": "text",
"label": "Empty text",
"label": "No rows found",
"key": "noRowsMessage",
"defaultValue": "We couldn't find a row to display",
"nested": true
}
]
},
{
"section": true,
"name": "Fields",
"settings": [
{
"type": "fieldConfiguration",
"key": "fields",
"nested": true,
"resetOn": "dataSource",
"selectAllFields": true
},
{
"type": "boolean",
"label": "Disabled",
"key": "disabled",
"defaultValue": false,
"dependsOn": {
"setting": "actionType",
"value": "View",
"invert": true
}
}
]
},
{
"section": true,
"name": "Buttons",
@ -5566,30 +5590,6 @@
}
]
},
{
"section": true,
"name": "Fields",
"settings": [
{
"type": "fieldConfiguration",
"key": "fields",
"nested": true,
"resetOn": "dataSource",
"selectAllFields": true
},
{
"type": "boolean",
"label": "Disabled",
"key": "disabled",
"defaultValue": false,
"dependsOn": {
"setting": "actionType",
"value": "View",
"invert": true
}
}
]
},
{
"tag": "style",
"type": "select",
@ -5924,4 +5924,4 @@
}
]
}
}
}

View file

@ -220,15 +220,11 @@
</BlockComponent>
{/if}
</BlockComponent>
{#if description}
<BlockComponent
type="text"
props={{ text: description }}
order={1}
/>
{/if}
</BlockComponent>
{/if}
{#if description}
<BlockComponent type="text" props={{ text: description }} order={1} />
{/if}
{#key fields}
<BlockComponent type="fieldgroup" props={{ labelPosition }} order={1}>
{#each fields as field, idx}

View file

@ -1,27 +1,10 @@
<script context="module">
// We can create a module level cache for all relationship cells to avoid
// having to fetch the table definition one time for each cell
let primaryDisplayCache = {}
const getPrimaryDisplayForTableId = async (API, tableId) => {
if (primaryDisplayCache[tableId]) {
return primaryDisplayCache[tableId]
}
const definition = await API.fetchTableDefinition(tableId)
const primaryDisplay =
definition?.primaryDisplay || definition?.schema?.[0]?.name
primaryDisplayCache[tableId] = primaryDisplay
return primaryDisplay
}
</script>
<script>
import { getColor } from "../lib/utils"
import { onMount, getContext } from "svelte"
import { Icon, Input, ProgressCircle, clickOutside } from "@budibase/bbui"
import { debounce } from "../../../utils/utils"
const { API, dispatch } = getContext("grid")
const { API, dispatch, cache } = getContext("grid")
export let value
export let api
@ -147,7 +130,9 @@
// Find the primary display for the related table
if (!primaryDisplay) {
searching = true
primaryDisplay = await getPrimaryDisplayForTableId(API, schema.tableId)
primaryDisplay = await cache.actions.getPrimaryDisplayForTableId(
schema.tableId
)
}
// Show initial list of results
@ -195,7 +180,7 @@
const toggleRow = async row => {
if (value?.some(x => x._id === row._id)) {
// If the row is already included, remove it and update the candidate
// row to be the the same position if possible
// row to be the same position if possible
if (oneRowOnly) {
await onChange([])
} else {
@ -260,31 +245,29 @@
class:wrap={editable || contentLines > 1}
on:wheel={e => (focused ? e.stopPropagation() : null)}
>
{#if Array.isArray(value) && value.length}
{#each value as relationship}
{#if relationship[primaryDisplay] || relationship.primaryDisplay}
<div class="badge">
<span
on:click={editable
? () => showRelationship(relationship._id)
: null}
>
{readable(
relationship[primaryDisplay] || relationship.primaryDisplay
)}
</span>
{#if editable}
<Icon
name="Close"
size="XS"
hoverable
on:click={() => toggleRow(relationship)}
/>
{/if}
</div>
{/if}
{/each}
{/if}
{#each value || [] as relationship}
{#if relationship[primaryDisplay] || relationship.primaryDisplay}
<div class="badge">
<span
on:click={editable
? () => showRelationship(relationship._id)
: null}
>
{readable(
relationship[primaryDisplay] || relationship.primaryDisplay
)}
</span>
{#if editable}
<Icon
name="Close"
size="XS"
hoverable
on:click={() => toggleRow(relationship)}
/>
{/if}
</div>
{/if}
{/each}
{#if editable}
<div class="add" on:click={open}>
<Icon name="Add" size="S" />
@ -320,7 +303,7 @@
<div class="searching">
<ProgressCircle size="S" />
</div>
{:else if Array.isArray(searchResults) && searchResults.length}
{:else if searchResults?.length}
<div class="results">
{#each searchResults as row, idx}
<div

View file

@ -0,0 +1,47 @@
export const createActions = context => {
const { API } = context
// Cache for the primary display columns of different tables.
// If we ever need to cache table definitions for other purposes then we can
// expand this to be a more generic cache.
let primaryDisplayCache = {}
const resetPrimaryDisplayCache = () => {
primaryDisplayCache = {}
}
const getPrimaryDisplayForTableId = async tableId => {
// If we've never encountered this tableId before then store a promise that
// resolves to the primary display so that subsequent invocations before the
// promise completes can reuse this promise
if (!primaryDisplayCache[tableId]) {
primaryDisplayCache[tableId] = new Promise(resolve => {
API.fetchTableDefinition(tableId).then(def => {
const display = def?.primaryDisplay || def?.schema?.[0]?.name
primaryDisplayCache[tableId] = display
resolve(display)
})
})
}
// We await the result so that we account for both promises and primitives
return await primaryDisplayCache[tableId]
}
return {
cache: {
actions: {
getPrimaryDisplayForTableId,
resetPrimaryDisplayCache,
},
},
}
}
export const initialise = context => {
const { datasource, cache } = context
// Wipe the caches whenever the datasource changes to ensure we aren't
// storing any stale information
datasource.subscribe(cache.actions.resetPrimaryDisplayCache)
}

View file

@ -160,11 +160,6 @@ export const createActions = context => {
return getAPI()?.actions.canUseColumn(name)
}
// Gets the default number of rows for a single page
const getFeatures = () => {
return getAPI()?.actions.getFeatures()
}
return {
datasource: {
...datasource,
@ -177,7 +172,6 @@ export const createActions = context => {
getRow,
isDatasourceValid,
canUseColumn,
getFeatures,
},
},
}

View file

@ -35,11 +35,6 @@ export const createActions = context => {
return $columns.some(col => col.name === name) || $sticky?.name === name
}
const getFeatures = () => {
// We don't support any features
return {}
}
return {
nonPlus: {
actions: {
@ -50,7 +45,6 @@ export const createActions = context => {
getRow,
isDatasourceValid,
canUseColumn,
getFeatures,
},
},
}

View file

@ -1,5 +1,4 @@
import { get } from "svelte/store"
import TableFetch from "../../../../fetch/TableFetch"
const SuppressErrors = true
@ -46,10 +45,6 @@ export const createActions = context => {
return $columns.some(col => col.name === name) || $sticky?.name === name
}
const getFeatures = () => {
return new TableFetch({ API }).determineFeatureFlags()
}
return {
table: {
actions: {
@ -60,7 +55,6 @@ export const createActions = context => {
getRow,
isDatasourceValid,
canUseColumn,
getFeatures,
},
},
}

View file

@ -1,5 +1,4 @@
import { get } from "svelte/store"
import ViewV2Fetch from "../../../../fetch/ViewV2Fetch"
const SuppressErrors = true
@ -46,10 +45,6 @@ export const createActions = context => {
)
}
const getFeatures = () => {
return new ViewV2Fetch({ API }).determineFeatureFlags()
}
return {
viewV2: {
actions: {
@ -60,7 +55,6 @@ export const createActions = context => {
getRow,
isDatasourceValid,
canUseColumn,
getFeatures,
},
},
}

View file

@ -19,6 +19,7 @@ import * as Datasource from "./datasource"
import * as Table from "./datasources/table"
import * as ViewV2 from "./datasources/viewV2"
import * as NonPlus from "./datasources/nonPlus"
import * as Cache from "./cache"
const DependencyOrderedStores = [
Sort,
@ -42,6 +43,7 @@ const DependencyOrderedStores = [
Clipboard,
Config,
Notifications,
Cache,
]
export const attachStores = context => {

View file

@ -114,10 +114,6 @@ export const createActions = context => {
const $allFilters = get(allFilters)
const $sort = get(sort)
// Determine how many rows to fetch per page
const features = datasource.actions.getFeatures()
const limit = features?.supportsPagination ? RowPageSize : null
// Create new fetch model
const newFetch = fetchData({
API,
@ -126,8 +122,12 @@ export const createActions = context => {
filter: $allFilters,
sortColumn: $sort.column,
sortOrder: $sort.order,
limit,
limit: RowPageSize,
paginate: true,
// Disable client side limiting, so that for queries and custom data
// sources we don't impose fake row limits. We want all the data.
clientSideLimiting: false,
},
})

View file

@ -43,6 +43,11 @@ export default class DataFetch {
// Pagination config
paginate: true,
// Client side feature customisation
clientSideSearching: true,
clientSideSorting: true,
clientSideLimiting: true,
}
// State of the fetch
@ -208,24 +213,32 @@ export default class DataFetch {
* Fetches some filtered, sorted and paginated data
*/
async getPage() {
const { sortColumn, sortOrder, sortType, limit } = this.options
const {
sortColumn,
sortOrder,
sortType,
limit,
clientSideSearching,
clientSideSorting,
clientSideLimiting,
} = this.options
const { query } = get(this.store)
// Get the actual data
let { rows, info, hasNextPage, cursor, error } = await this.getData()
// If we don't support searching, do a client search
if (!this.features.supportsSearch) {
if (!this.features.supportsSearch && clientSideSearching) {
rows = runLuceneQuery(rows, query)
}
// If we don't support sorting, do a client-side sort
if (!this.features.supportsSort) {
if (!this.features.supportsSort && clientSideSorting) {
rows = luceneSort(rows, sortColumn, sortOrder, sortType)
}
// If we don't support pagination, do a client-side limit
if (!this.features.supportsPagination) {
if (!this.features.supportsPagination && clientSideLimiting) {
rows = luceneLimit(rows, limit)
}

View file

@ -70,6 +70,13 @@ module AwsMock {
Contents: {},
})
)
// @ts-ignore
this.getObject = jest.fn(
response({
Body: "",
})
)
}
aws.DynamoDB = { DocumentClient }

View file

@ -32,11 +32,8 @@ import {
tenancy,
users,
} from "@budibase/backend-core"
import { USERS_TABLE_SCHEMA } from "../../constants"
import {
buildDefaultDocs,
DEFAULT_BB_DATASOURCE_ID,
} from "../../db/defaultData/datasource_bb_default"
import { USERS_TABLE_SCHEMA, DEFAULT_BB_DATASOURCE_ID } from "../../constants"
import { buildDefaultDocs } from "../../db/defaultData/datasource_bb_default"
import { removeAppFromUserRoles } from "../../utilities/workerRequests"
import { stringToReadStream } from "../../utilities"
import { doesUserHaveLock } from "../../utilities/redis"

View file

@ -5,6 +5,8 @@ import sdk from "../../../sdk"
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import { mocks } from "@budibase/backend-core/tests"
mocks.licenses.useBackups()
describe("/backups", () => {
let request = setup.getRequest()
let config = setup.getConfig()
@ -12,16 +14,17 @@ describe("/backups", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
tk.reset()
await config.init()
})
describe("exportAppDump", () => {
describe("/api/backups/export", () => {
it("should be able to export app", async () => {
const res = await request
.post(`/api/backups/export?appId=${config.getAppId()}`)
.set(config.defaultHeaders())
.expect(200)
expect(res.headers["content-type"]).toEqual("application/gzip")
const { body, headers } = await config.api.backup.exportBasicBackup(
config.getAppId()!
)
expect(body instanceof Buffer).toBe(true)
expect(headers["content-type"]).toEqual("application/gzip")
expect(events.app.exported).toBeCalledTimes(1)
})
@ -36,11 +39,11 @@ describe("/backups", () => {
it("should infer the app name from the app", async () => {
tk.freeze(mocks.date.MOCK_DATE)
const res = await request
.post(`/api/backups/export?appId=${config.getAppId()}`)
.set(config.defaultHeaders())
const { headers } = await config.api.backup.exportBasicBackup(
config.getAppId()!
)
expect(res.headers["content-disposition"]).toEqual(
expect(headers["content-disposition"]).toEqual(
`attachment; filename="${
config.getApp()!.name
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`
@ -48,6 +51,21 @@ describe("/backups", () => {
})
})
describe("/api/backups/import", () => {
it("should be able to import an app", async () => {
const appId = config.getAppId()!
const automation = await config.createAutomation()
await config.createAutomationLog(automation, appId)
await config.createScreen()
const exportRes = await config.api.backup.createBackup(appId)
expect(exportRes.backupId).toBeDefined()
const importRes = await config.api.backup.importBackup(
appId,
exportRes.backupId
)
})
})
describe("calculateBackupStats", () => {
it("should be able to calculate the backup statistics", async () => {
await config.createAutomation()

View file

@ -49,7 +49,12 @@ describe.each([
let table: Table
let tableId: string
afterAll(setup.afterAll)
afterAll(async () => {
if (dsProvider) {
await dsProvider.stopContainer()
}
setup.afterAll()
})
beforeAll(async () => {
await config.init()
@ -521,20 +526,17 @@ describe.each([
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const res = await config.api.row.patch(table._id!, {
const row = await config.api.row.patch(table._id!, {
_id: existing._id!,
_rev: existing._rev!,
tableId: table._id!,
name: "Updated Name",
})
expect((res as any).res.statusMessage).toEqual(
`${table.name} updated successfully.`
)
expect(res.body.name).toEqual("Updated Name")
expect(res.body.description).toEqual(existing.description)
expect(row.name).toEqual("Updated Name")
expect(row.description).toEqual(existing.description)
const savedRow = await loadRow(res.body._id, table._id!)
const savedRow = await loadRow(row._id!, table._id!)
expect(savedRow.body.description).toEqual(existing.description)
expect(savedRow.body.name).toEqual("Updated Name")

View file

@ -492,6 +492,67 @@ describe("/tables", () => {
}
})
it("should succeed when the row is created from the other side of the relationship", async () => {
// We found a bug just after releasing this feature where if the row was created from the
// users table, not the table linking to it, the migration would succeed but lose the data.
// This happened because the order of the documents in the link was reversed.
const table = await config.api.table.create({
name: "table",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
"user relationship": {
type: FieldType.LINK,
fieldName: "test",
name: "user relationship",
constraints: {
type: "array",
presence: false,
},
relationshipType: RelationshipType.MANY_TO_ONE,
tableId: InternalTable.USER_METADATA,
},
},
})
let testRow = await config.api.row.save(table._id!, {})
await Promise.all(
users.map(u =>
config.api.row.patch(InternalTable.USER_METADATA, {
tableId: InternalTable.USER_METADATA,
_rev: u._rev!,
_id: u._id!,
test: [testRow],
})
)
)
await config.api.table.migrate(table._id!, {
oldColumn: table.schema["user relationship"],
newColumn: {
name: "user column",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
})
const migratedTable = await config.api.table.get(table._id!)
expect(migratedTable.schema["user column"]).toBeDefined()
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
const resp = await config.api.row.get(table._id!, testRow._id!)
const migratedRow = resp.body as Row
expect(migratedRow["user column"]).toBeDefined()
expect(migratedRow["user relationship"]).not.toBeDefined()
expect(migratedRow["user column"]).toHaveLength(3)
expect(migratedRow["user column"].map((u: Row) => u._id)).toEqual(
expect.arrayContaining(users.map(u => u._id))
)
})
it("should successfully migrate a many-to-many user relationship to a users column", async () => {
const table = await config.api.table.create({
name: "table",

View file

@ -172,3 +172,8 @@ export enum AutomationErrors {
export const ObjectStoreBuckets = objectStore.ObjectStoreBuckets
export const MAX_AUTOMATION_RECURRING_ERRORS = 5
export const GOOGLE_SHEETS_PRIMARY_KEY = "rowNumber"
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"

View file

@ -1,4 +1,12 @@
import { AutoFieldSubTypes, FieldTypes } from "../../constants"
import {
AutoFieldSubTypes,
FieldTypes,
DEFAULT_BB_DATASOURCE_ID,
DEFAULT_INVENTORY_TABLE_ID,
DEFAULT_EMPLOYEE_TABLE_ID,
DEFAULT_EXPENSES_TABLE_ID,
DEFAULT_JOBS_TABLE_ID,
} from "../../constants"
import { importToRows } from "../../api/controllers/table/utils"
import { cloneDeep } from "lodash/fp"
import LinkDocument from "../linkedRows/LinkDocument"
@ -16,12 +24,6 @@ import {
TableSourceType,
} from "@budibase/types"
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
const defaultDatasource = {
_id: DEFAULT_BB_DATASOURCE_ID,
type: dbCore.BUDIBASE_DATASOURCE_TYPE,

View file

@ -7,10 +7,12 @@ import {
TableSourceType,
} from "@budibase/types"
import { DocumentType, SEPARATOR } from "../db/utils"
import { InvalidColumns, NoEmptyFilterStrings } from "../constants"
import {
InvalidColumns,
NoEmptyFilterStrings,
DEFAULT_BB_DATASOURCE_ID,
} from "../constants"
import { helpers } from "@budibase/shared-core"
import * as external from "../api/controllers/table/external"
import * as internal from "../api/controllers/table/internal"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
const ROW_ID_REGEX = /^\[.*]$/g
@ -96,7 +98,8 @@ export function isInternalTableID(tableId: string) {
export function isExternalTable(table: Table) {
if (
table?.sourceId &&
table.sourceId.includes(DocumentType.DATASOURCE + SEPARATOR)
table.sourceId.includes(DocumentType.DATASOURCE + SEPARATOR) &&
table?.sourceId !== DEFAULT_BB_DATASOURCE_ID
) {
return true
} else if (table?.sourceType === TableSourceType.EXTERNAL) {

View file

@ -26,7 +26,6 @@ export interface DBDumpOpts {
export interface ExportOpts extends DBDumpOpts {
tar?: boolean
excludeRows?: boolean
excludeLogs?: boolean
encryptPassword?: string
}
@ -83,14 +82,15 @@ export async function exportDB(
})
}
function defineFilter(excludeRows?: boolean, excludeLogs?: boolean) {
const ids = [USER_METDATA_PREFIX, LINK_USER_METADATA_PREFIX]
function defineFilter(excludeRows?: boolean) {
const ids = [
USER_METDATA_PREFIX,
LINK_USER_METADATA_PREFIX,
AUTOMATION_LOG_PREFIX,
]
if (excludeRows) {
ids.push(TABLE_ROW_PREFIX)
}
if (excludeLogs) {
ids.push(AUTOMATION_LOG_PREFIX)
}
return (doc: any) =>
!ids.map(key => doc._id.includes(key)).reduce((prev, curr) => prev || curr)
}
@ -118,7 +118,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
fs.writeFileSync(join(tmpPath, path), contents)
}
}
// get all of the files
// get all the files
else {
tmpPath = await objectStore.retrieveDirectory(
ObjectStoreBuckets.APPS,
@ -141,7 +141,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
// enforce an export of app DB to the tmp path
const dbPath = join(tmpPath, DB_EXPORT_FILE)
await exportDB(appId, {
filter: defineFilter(config?.excludeRows, config?.excludeLogs),
filter: defineFilter(config?.excludeRows),
exportPath: dbPath,
})
@ -191,7 +191,6 @@ export async function streamExportApp({
}) {
const tmpPath = await exportApp(appId, {
excludeRows,
excludeLogs: true,
tar: true,
encryptPassword,
})

View file

@ -7,6 +7,7 @@ import {
isBBReferenceField,
isRelationshipField,
LinkDocument,
LinkInfo,
RelationshipFieldMetadata,
RelationshipType,
Row,
@ -125,7 +126,23 @@ abstract class UserColumnMigrator implements ColumnMigrator {
protected newColumn: BBReferenceFieldMetadata
) {}
abstract updateRow(row: Row, link: LinkDocument): void
abstract updateRow(row: Row, linkInfo: LinkInfo): void
pickUserTableLinkSide(link: LinkDocument): LinkInfo {
if (link.doc1.tableId === InternalTable.USER_METADATA) {
return link.doc1
} else {
return link.doc2
}
}
pickOtherTableLinkSide(link: LinkDocument): LinkInfo {
if (link.doc1.tableId === InternalTable.USER_METADATA) {
return link.doc2
} else {
return link.doc1
}
}
async doMigration(): Promise<MigrationResult> {
let oldTable = cloneDeep(this.table)
@ -137,15 +154,17 @@ abstract class UserColumnMigrator implements ColumnMigrator {
let links = await sdk.links.fetchWithDocument(this.table._id!)
for (let link of links) {
const userSide = this.pickUserTableLinkSide(link)
const otherSide = this.pickOtherTableLinkSide(link)
if (
link.doc1.tableId !== this.table._id ||
link.doc1.fieldName !== this.oldColumn.name ||
link.doc2.tableId !== InternalTable.USER_METADATA
otherSide.tableId !== this.table._id ||
otherSide.fieldName !== this.oldColumn.name ||
userSide.tableId !== InternalTable.USER_METADATA
) {
continue
}
let row = rowsById[link.doc1.rowId]
let row = rowsById[otherSide.rowId]
if (!row) {
// This can happen if the row has been deleted but the link hasn't,
// which was a state that was found during the initial testing of this
@ -153,7 +172,7 @@ abstract class UserColumnMigrator implements ColumnMigrator {
continue
}
this.updateRow(row, link)
this.updateRow(row, userSide)
}
let db = context.getAppDB()
@ -175,20 +194,20 @@ abstract class UserColumnMigrator implements ColumnMigrator {
}
class SingleUserColumnMigrator extends UserColumnMigrator {
updateRow(row: Row, link: LinkDocument): void {
updateRow(row: Row, linkInfo: LinkInfo): void {
row[this.newColumn.name] = dbCore.getGlobalIDFromUserMetadataID(
link.doc2.rowId
linkInfo.rowId
)
}
}
class MultiUserColumnMigrator extends UserColumnMigrator {
updateRow(row: Row, link: LinkDocument): void {
updateRow(row: Row, linkInfo: LinkInfo): void {
if (!row[this.newColumn.name]) {
row[this.newColumn.name] = []
}
row[this.newColumn.name].push(
dbCore.getGlobalIDFromUserMetadataID(link.doc2.rowId)
dbCore.getGlobalIDFromUserMetadataID(linkInfo.rowId)
)
}
}

View file

@ -805,8 +805,9 @@ class TestConfiguration {
// AUTOMATION LOG
async createAutomationLog(automation: Automation) {
return await context.doInAppContext(this.getProdAppId(), async () => {
async createAutomationLog(automation: Automation, appId?: string) {
appId = appId || this.getProdAppId()
return await context.doInAppContext(appId!, async () => {
return await pro.sdk.automations.logs.storeLog(
automation,
basicAutomationResults(automation._id!)

View file

@ -0,0 +1,45 @@
import {
CreateAppBackupResponse,
ImportAppBackupResponse,
} from "@budibase/types"
import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base"
export class BackupAPI extends TestAPI {
constructor(config: TestConfiguration) {
super(config)
}
exportBasicBackup = async (appId: string) => {
const result = await this.request
.post(`/api/backups/export?appId=${appId}`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /application\/gzip/)
.expect(200)
return {
body: result.body as Buffer,
headers: result.headers,
}
}
createBackup = async (appId: string) => {
const result = await this.request
.post(`/api/apps/${appId}/backups`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
return result.body as CreateAppBackupResponse
}
importBackup = async (
appId: string,
backupId: string
): Promise<ImportAppBackupResponse> => {
const result = await this.request
.post(`/api/apps/${appId}/backups/${backupId}/import`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
return result.body as ImportAppBackupResponse
}
}

View file

@ -7,6 +7,7 @@ import { DatasourceAPI } from "./datasource"
import { LegacyViewAPI } from "./legacyView"
import { ScreenAPI } from "./screen"
import { ApplicationAPI } from "./application"
import { BackupAPI } from "./backup"
import { AttachmentAPI } from "./attachment"
export default class API {
@ -18,6 +19,7 @@ export default class API {
datasource: DatasourceAPI
screen: ScreenAPI
application: ApplicationAPI
backup: BackupAPI
attachment: AttachmentAPI
constructor(config: TestConfiguration) {
@ -29,6 +31,7 @@ export default class API {
this.datasource = new DatasourceAPI(config)
this.screen = new ScreenAPI(config)
this.application = new ApplicationAPI(config)
this.backup = new BackupAPI(config)
this.attachment = new AttachmentAPI(config)
}
}

View file

@ -55,7 +55,13 @@ export class RowAPI extends TestAPI {
.send(row)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
if (resp.status !== expectStatus) {
throw new Error(
`Expected status ${expectStatus} but got ${
resp.status
}, body: ${JSON.stringify(resp.body)}`
)
}
return resp.body as Row
}
@ -77,13 +83,20 @@ export class RowAPI extends TestAPI {
sourceId: string,
row: PatchRowRequest,
{ expectStatus } = { expectStatus: 200 }
) => {
return this.request
): Promise<Row> => {
let resp = await this.request
.patch(`/api/${sourceId}/rows`)
.send(row)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
if (resp.status !== expectStatus) {
throw new Error(
`Expected status ${expectStatus} but got ${
resp.status
}, body: ${JSON.stringify(resp.body)}`
)
}
return resp.body as Row
}
delete = async (

View file

@ -20,3 +20,8 @@ export interface CreateAppBackupResponse {
export interface UpdateAppBackupRequest {
name: string
}
export interface ImportAppBackupResponse {
restoreId: string
message: string
}

View file

@ -1,17 +1,15 @@
import { Document } from "../document"
export interface LinkInfo {
rowId: string
fieldName: string
tableId: string
}
export interface LinkDocument extends Document {
type: string
doc1: {
rowId: string
fieldName: string
tableId: string
}
doc2: {
rowId: string
fieldName: string
tableId: string
}
doc1: LinkInfo
doc2: LinkInfo
}
export interface LinkDocumentValue {

View file

@ -1,7 +1,7 @@
## Description
_Describe the problem or feature in addition to a link to the relevant github issues._
Addresses:
### Addresses:
- `<Enter the Link to the issue(s) this PR addresses>`
- ...more if required
@ -10,9 +10,3 @@ Addresses:
## Screenshots
_If a UI facing feature, a short video of the happy path, and some screenshots of the new functionality._
## Documentation
- [ ] I have reviewed the budibase documentatation to verify if this feature requires any changes. If changes or new docs are required I have written them.

View file

@ -17,7 +17,7 @@
"test:notify": "node scripts/testResultsWebhook",
"test:cloud:prod": "yarn run test --testPathIgnorePatterns=\\.integration\\.",
"test:cloud:qa": "yarn run test",
"test:self:ci": "yarn run test --testPathIgnorePatterns=\\.integration\\. \\.cloud\\. \\.license\\.",
"test:self:ci": "yarn run test --testPathIgnorePatterns=\\.integration\\. \\.cloud\\. \\.licensing\\.",
"serve:test:self:ci": "start-server-and-test dev:built http://localhost:4001/health test:self:ci",
"serve": "start-server-and-test dev:built http://localhost:4001/health",
"dev:built": "cd ../ && yarn dev:built"

View file

@ -99,9 +99,11 @@ export default class LicenseAPI extends BaseAPI {
}, opts)
}
async updatePlan(opts: APIRequestOpts = { status: 200 }) {
async updatePlan(priceId: string, opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.put(`/api/license/plan`)
return this.client.put(`/api/license/plan`, {
body: { priceId },
})
}, opts)
}

View file

@ -38,9 +38,19 @@ export default class StripeAPI extends BaseAPI {
}, opts)
}
async linkStripeCustomer(opts: APIRequestOpts = { status: 200 }) {
async linkStripeCustomer(
accountId: string,
stripeCustomerId: string,
opts: APIRequestOpts = { status: 200 }
) {
return this.doRequest(() => {
return this.client.post(`/api/stripe/link`)
return this.client.post(`/api/stripe/link`, {
body: {
accountId,
stripeCustomerId,
},
internal: true,
})
}, opts)
}

View file

@ -0,0 +1,114 @@
import TestConfiguration from "../../config/TestConfiguration"
import * as fixtures from "../../fixtures"
import { Hosting, PlanType } from "@budibase/types"
const stripe = require("stripe")(process.env.STRIPE_SECRET_KEY)
describe("license management", () => {
const config = new TestConfiguration()
beforeAll(async () => {
await config.beforeAll()
})
afterAll(async () => {
await config.afterAll()
})
it("retrieves plans, creates checkout session, and updates license", async () => {
// Create cloud account
const createAccountRequest = fixtures.accounts.generateAccount({
hosting: Hosting.CLOUD,
})
const [createAccountRes, account] =
await config.accountsApi.accounts.create(createAccountRequest, {
autoVerify: true,
})
// Self response has free license
await config.doInNewState(async () => {
await config.loginAsAccount(createAccountRequest)
const [selfRes, selfBody] = await config.api.accounts.self()
expect(selfBody.license.plan.type).toBe(PlanType.FREE)
})
// Retrieve plans
const [plansRes, planBody] = await config.api.licenses.getPlans()
// Select priceId from premium plan
let premiumPriceId = null
let businessPriceId = ""
for (const plan of planBody) {
if (plan.type === PlanType.PREMIUM) {
premiumPriceId = plan.prices[0].priceId
}
if (plan.type === PlanType.BUSINESS) {
businessPriceId = plan.prices[0].priceId
}
}
// Create checkout session for price
const checkoutSessionRes = await config.api.stripe.createCheckoutSession(
premiumPriceId
)
const checkoutSessionUrl = checkoutSessionRes[1].url
expect(checkoutSessionUrl).toContain("checkout.stripe.com")
// Create stripe customer
const customer = await stripe.customers.create({
email: createAccountRequest.email,
})
// Create payment method
const paymentMethod = await stripe.paymentMethods.create({
type: "card",
card: {
token: "tok_visa", // Test Visa Card
},
})
// Attach payment method to customer
await stripe.paymentMethods.attach(paymentMethod.id, {
customer: customer.id,
})
// Update customer
await stripe.customers.update(customer.id, {
invoice_settings: {
default_payment_method: paymentMethod.id,
},
})
// Create subscription for premium plan
const subscription = await stripe.subscriptions.create({
customer: customer.id,
items: [
{
price: premiumPriceId,
quantity: 1,
},
],
default_payment_method: paymentMethod.id,
collection_method: "charge_automatically",
})
await config.doInNewState(async () => {
// License updated from Free to Premium
await config.loginAsAccount(createAccountRequest)
await config.api.stripe.linkStripeCustomer(account.accountId, customer.id)
const [_, selfBodyPremium] = await config.api.accounts.self()
expect(selfBodyPremium.license.plan.type).toBe(PlanType.PREMIUM)
// Create portal session - Check URL
const [portalRes, portalSessionBody] =
await config.api.stripe.createPortalSession(customer.id)
expect(portalSessionBody.url).toContain("billing.stripe.com")
// Update subscription from premium to business license
await config.api.licenses.updatePlan(businessPriceId)
// License updated to Business
const [selfRes, selfBodyBusiness] = await config.api.accounts.self()
expect(selfBodyBusiness.license.plan.type).toBe(PlanType.BUSINESS)
})
})
})

View file

@ -28,6 +28,7 @@ const env = {
MARIADB_DB: process.env.MARIADB_DB,
MARIADB_USER: process.env.MARIADB_USER,
MARIADB_PASSWORD: process.env.MARIADB_PASSWORD,
STRIPE_SECRET_KEY: process.env.STRIPE_SECRET_KEY,
}
export = env

View file

@ -7,4 +7,4 @@ else
echo "INSTALLING AMD64 MINIO"
wget https://dl.min.io/server/minio/release/linux-amd64/minio
fi
chmod +x minio
chmod +x minio

8
scripts/install-node.sh Normal file
View file

@ -0,0 +1,8 @@
#!/bin/bash
apt-get install -y gnupg
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor | tee /usr/share/keyrings/nodesource.gpg > /dev/null
echo "deb [signed-by=/usr/share/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
apt-get update
echo "INSTALLING NODE $NODE_MAJOR"
apt-get install -y --no-install-recommends nodejs
npm install --global yarn pm2