diff --git a/lerna.json b/lerna.json index 54e106cd5a..623fbf6d43 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.20.10", + "version": "2.20.11", "npmClient": "yarn", "packages": [ "packages/*", diff --git a/packages/account-portal b/packages/account-portal index ab324e35d8..de6d44c372 160000 --- a/packages/account-portal +++ b/packages/account-portal @@ -1 +1 @@ -Subproject commit ab324e35d855012bd0f49caa53c6dd765223c6fa +Subproject commit de6d44c372a7f48ca0ce8c6c0c19311d4bc21646 diff --git a/packages/backend-core/src/objectStore/buckets/plugins.ts b/packages/backend-core/src/objectStore/buckets/plugins.ts index 6f1b7116ae..02be9345ab 100644 --- a/packages/backend-core/src/objectStore/buckets/plugins.ts +++ b/packages/backend-core/src/objectStore/buckets/plugins.ts @@ -6,7 +6,7 @@ import { Plugin } from "@budibase/types" // URLS -export function enrichPluginURLs(plugins: Plugin[]) { +export function enrichPluginURLs(plugins?: Plugin[]): Plugin[] { if (!plugins || !plugins.length) { return [] } diff --git a/packages/backend-core/src/redis/utils.ts b/packages/backend-core/src/redis/utils.ts index 4d8b1bb9a4..7b93458b52 100644 --- a/packages/backend-core/src/redis/utils.ts +++ b/packages/backend-core/src/redis/utils.ts @@ -29,6 +29,7 @@ export enum Databases { WRITE_THROUGH = "writeThrough", LOCKS = "locks", SOCKET_IO = "socket_io", + BPM_EVENTS = "bpmEvents", } /** diff --git a/packages/builder/src/components/common/HelpMenu.svelte b/packages/builder/src/components/common/HelpMenu.svelte index f6e2f42c98..baff9a5a27 100644 --- a/packages/builder/src/components/common/HelpMenu.svelte +++ b/packages/builder/src/components/common/HelpMenu.svelte @@ -1,11 +1,11 @@ @@ -67,13 +95,29 @@ options={componentOptions} on:change={() => (parameters.columns = [])} /> + - { + const columns = e.detail + parameters.customHeaders = columns.reduce((headerMap, column) => { + return { + [column.name]: column.displayName, + ...headerMap, + } + }, {}) + }} /> @@ -97,8 +141,8 @@ .params { display: grid; column-gap: var(--spacing-xs); - row-gap: var(--spacing-s); - grid-template-columns: 90px 1fr; + row-gap: var(--spacing-m); + grid-template-columns: 90px 1fr 90px; align-items: center; } diff --git a/packages/builder/src/components/design/settings/controls/ColumnEditor/ColumnEditor.svelte b/packages/builder/src/components/design/settings/controls/ColumnEditor/ColumnEditor.svelte index 2b9fa573c2..742ab785a1 100644 --- a/packages/builder/src/components/design/settings/controls/ColumnEditor/ColumnEditor.svelte +++ b/packages/builder/src/components/design/settings/controls/ColumnEditor/ColumnEditor.svelte @@ -29,6 +29,12 @@ allowLinks: true, }) + $: { + value = (value || []).filter( + column => (schema || {})[column.name || column] !== undefined + ) + } + const getText = value => { if (!value?.length) { return "All columns" diff --git a/packages/builder/src/helpers/data/utils.js b/packages/builder/src/helpers/data/utils.js index a29ce8db6d..a592b57a26 100644 --- a/packages/builder/src/helpers/data/utils.js +++ b/packages/builder/src/helpers/data/utils.js @@ -17,6 +17,10 @@ export function breakQueryString(qs) { return paramObj } +function isEncoded(str) { + return typeof str == "string" && decodeURIComponent(str) !== str +} + export function buildQueryString(obj) { let str = "" if (obj) { @@ -35,7 +39,7 @@ export function buildQueryString(obj) { value = value.replace(binding, marker) bindingMarkers[marker] = binding }) - let encoded = encodeURIComponent(value || "") + let encoded = isEncoded(value) ? value : encodeURIComponent(value || "") Object.entries(bindingMarkers).forEach(([marker, binding]) => { encoded = encoded.replace(marker, binding) }) diff --git a/packages/builder/src/helpers/planTitle.js b/packages/builder/src/helpers/planTitle.js index 098bfb4529..79f2bc2382 100644 --- a/packages/builder/src/helpers/planTitle.js +++ b/packages/builder/src/helpers/planTitle.js @@ -25,3 +25,7 @@ export function getFormattedPlanName(userPlanType) { } return `${planName} Plan` } + +export function isPremiumOrAbove(userPlanType) { + return ![PlanType.PRO, PlanType.TEAM, PlanType.FREE].includes(userPlanType) +} diff --git a/packages/builder/src/helpers/tests/dataUtils.test.js b/packages/builder/src/helpers/tests/dataUtils.test.js index 8fc2d706d7..bd207ea339 100644 --- a/packages/builder/src/helpers/tests/dataUtils.test.js +++ b/packages/builder/src/helpers/tests/dataUtils.test.js @@ -39,4 +39,11 @@ describe("check query string utils", () => { expect(broken.key1).toBe(obj2.key1) expect(broken.key2).toBe(obj2.key2) }) + + it("should not encode a URL more than once when building the query string", () => { + const queryString = buildQueryString({ + values: "a%2Cb%2Cc", + }) + expect(queryString).toBe("values=a%2Cb%2Cc") + }) }) diff --git a/packages/client/src/utils/buttonActions.js b/packages/client/src/utils/buttonActions.js index b2068ad152..68478b76ac 100644 --- a/packages/client/src/utils/buttonActions.js +++ b/packages/client/src/utils/buttonActions.js @@ -341,7 +341,11 @@ const exportDataHandler = async action => { tableId: selection.tableId, rows: selection.selectedRows, format: action.parameters.type, - columns: action.parameters.columns, + columns: action.parameters.columns?.map( + column => column.name || column + ), + delimiter: action.parameters.delimiter, + customHeaders: action.parameters.customHeaders, }) download( new Blob([data], { type: "text/plain" }), diff --git a/packages/frontend-core/src/api/rows.js b/packages/frontend-core/src/api/rows.js index 79f837e864..0a0d48da43 100644 --- a/packages/frontend-core/src/api/rows.js +++ b/packages/frontend-core/src/api/rows.js @@ -89,13 +89,24 @@ export const buildRowEndpoints = API => ({ * @param rows the array of rows to export * @param format the format to export (csv or json) * @param columns which columns to export (all if undefined) + * @param delimiter how values should be separated in a CSV (default is comma) */ - exportRows: async ({ tableId, rows, format, columns, search }) => { + exportRows: async ({ + tableId, + rows, + format, + columns, + search, + delimiter, + customHeaders, + }) => { return await API.post({ url: `/api/${tableId}/rows/exportRows?format=${format}`, body: { rows, columns, + delimiter, + customHeaders, ...search, }, parseResponse: async response => { diff --git a/packages/sdk/package.json b/packages/sdk/package.json index 0fc14b03c4..ed625e10fa 100644 --- a/packages/sdk/package.json +++ b/packages/sdk/package.json @@ -11,9 +11,10 @@ "build:sdk": "yarn run generate && rollup -c" }, "devDependencies": { - "@rollup/plugin-commonjs": "^18.0.0", - "@rollup/plugin-node-resolve": "^11.2.1", - "rollup": "^2.44.0", - "rollup-plugin-terser": "^7.0.2" + "@rollup/plugin-commonjs": "^25.0.7", + "@rollup/plugin-node-resolve": "^15.2.3", + "rollup": "^4.9.6", + "rollup-plugin-terser": "^7.0.2", + "rollup-plugin-polyfill-node": "^0.13.0" } } diff --git a/packages/server/scripts/test.sh b/packages/server/scripts/test.sh index 9efef05526..3ecf8bb794 100644 --- a/packages/server/scripts/test.sh +++ b/packages/server/scripts/test.sh @@ -3,12 +3,12 @@ set -e if [[ -n $CI ]] then - export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot" + export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot $NODE_OPTIONS" echo "jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@" jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@ else # --maxWorkers performs better in development - export NODE_OPTIONS="--no-node-snapshot" + export NODE_OPTIONS="--no-node-snapshot $NODE_OPTIONS" echo "jest --coverage --maxWorkers=2 --forceExit $@" jest --coverage --maxWorkers=2 --forceExit $@ fi \ No newline at end of file diff --git a/packages/server/src/api/controllers/application.ts b/packages/server/src/api/controllers/application.ts index 33582cf656..0bc93888ae 100644 --- a/packages/server/src/api/controllers/application.ts +++ b/packages/server/src/api/controllers/application.ts @@ -47,6 +47,9 @@ import { PlanType, Screen, UserCtx, + CreateAppRequest, + FetchAppDefinitionResponse, + FetchAppPackageResponse, } from "@budibase/types" import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts" import sdk from "../../sdk" @@ -58,23 +61,23 @@ import * as appMigrations from "../../appMigrations" async function getLayouts() { const db = context.getAppDB() return ( - await db.allDocs( + await db.allDocs( getLayoutParams(null, { include_docs: true, }) ) - ).rows.map((row: any) => row.doc) + ).rows.map(row => row.doc!) } async function getScreens() { const db = context.getAppDB() return ( - await db.allDocs( + await db.allDocs( getScreenParams(null, { include_docs: true, }) ) - ).rows.map((row: any) => row.doc) + ).rows.map(row => row.doc!) } function getUserRoleId(ctx: UserCtx) { @@ -116,8 +119,8 @@ function checkAppName( } interface AppTemplate { - templateString: string - useTemplate: string + templateString?: string + useTemplate?: string file?: { type: string path: string @@ -174,14 +177,16 @@ export const addSampleData = async (ctx: UserCtx) => { ctx.status = 200 } -export async function fetch(ctx: UserCtx) { +export async function fetch(ctx: UserCtx) { ctx.body = await sdk.applications.fetch( ctx.query.status as AppStatus, ctx.user ) } -export async function fetchAppDefinition(ctx: UserCtx) { +export async function fetchAppDefinition( + ctx: UserCtx +) { const layouts = await getLayouts() const userRoleId = getUserRoleId(ctx) const accessController = new roles.AccessController() @@ -196,10 +201,12 @@ export async function fetchAppDefinition(ctx: UserCtx) { } } -export async function fetchAppPackage(ctx: UserCtx) { +export async function fetchAppPackage( + ctx: UserCtx +) { const db = context.getAppDB() const appId = context.getAppId() - let application = await db.get(DocumentType.APP_METADATA) + let application = await db.get(DocumentType.APP_METADATA) const layouts = await getLayouts() let screens = await getScreens() const license = await licensing.cache.getCachedLicense() @@ -231,17 +238,21 @@ export async function fetchAppPackage(ctx: UserCtx) { } } -async function performAppCreate(ctx: UserCtx) { +async function performAppCreate(ctx: UserCtx) { const apps = (await dbCore.getAllApps({ dev: true })) as App[] - const name = ctx.request.body.name, - possibleUrl = ctx.request.body.url, - encryptionPassword = ctx.request.body.encryptionPassword + const { + name, + url, + encryptionPassword, + useTemplate, + templateKey, + templateString, + } = ctx.request.body checkAppName(ctx, apps, name) - const url = sdk.applications.getAppUrl({ name, url: possibleUrl }) - checkAppUrl(ctx, apps, url) + const appUrl = sdk.applications.getAppUrl({ name, url }) + checkAppUrl(ctx, apps, appUrl) - const { useTemplate, templateKey, templateString } = ctx.request.body const instanceConfig: AppTemplate = { useTemplate, key: templateKey, @@ -268,7 +279,7 @@ async function performAppCreate(ctx: UserCtx) { version: envCore.VERSION, componentLibraries: ["@budibase/standard-components"], name: name, - url: url, + url: appUrl, template: templateKey, instance, tenantId: tenancy.getTenantId(), @@ -420,7 +431,9 @@ export async function create(ctx: UserCtx) { // This endpoint currently operates as a PATCH rather than a PUT // Thus name and url fields are handled only if present -export async function update(ctx: UserCtx) { +export async function update( + ctx: UserCtx<{ name?: string; url?: string }, App> +) { const apps = (await dbCore.getAllApps({ dev: true })) as App[] // validation const name = ctx.request.body.name, @@ -493,7 +506,7 @@ export async function revertClient(ctx: UserCtx) { const revertedToVersion = application.revertableVersion const appPackageUpdates = { version: revertedToVersion, - revertableVersion: null, + revertableVersion: undefined, } const app = await updateAppPackage(appPackageUpdates, ctx.params.appId) await events.app.versionReverted(app, currentVersion, revertedToVersion) @@ -613,12 +626,15 @@ export async function importToApp(ctx: UserCtx) { ctx.body = { message: "app updated" } } -export async function updateAppPackage(appPackage: any, appId: any) { +export async function updateAppPackage( + appPackage: Partial, + appId: string +) { return context.doInAppContext(appId, async () => { const db = context.getAppDB() const application = await db.get(DocumentType.APP_METADATA) - const newAppPackage = { ...application, ...appPackage } + const newAppPackage: App = { ...application, ...appPackage } if (appPackage._rev !== application._rev) { newAppPackage._rev = application._rev } diff --git a/packages/server/src/api/controllers/row/index.ts b/packages/server/src/api/controllers/row/index.ts index 1ad8a2a695..ec56919d12 100644 --- a/packages/server/src/api/controllers/row/index.ts +++ b/packages/server/src/api/controllers/row/index.ts @@ -223,7 +223,8 @@ export const exportRows = async ( const format = ctx.query.format - const { rows, columns, query, sort, sortOrder } = ctx.request.body + const { rows, columns, query, sort, sortOrder, delimiter, customHeaders } = + ctx.request.body if (typeof format !== "string" || !exporters.isFormat(format)) { ctx.throw( 400, @@ -241,6 +242,8 @@ export const exportRows = async ( query, sort, sortOrder, + delimiter, + customHeaders, }) ctx.attachment(fileName) ctx.body = apiFileReturn(content) diff --git a/packages/server/src/api/controllers/script.ts b/packages/server/src/api/controllers/script.ts index 93e1ad7df9..b69fc430a6 100644 --- a/packages/server/src/api/controllers/script.ts +++ b/packages/server/src/api/controllers/script.ts @@ -1,13 +1,11 @@ import { Ctx } from "@budibase/types" import { IsolatedVM } from "../../jsRunner/vm" +import { iifeWrapper } from "../../jsRunner/utilities" export async function execute(ctx: Ctx) { const { script, context } = ctx.request.body const vm = new IsolatedVM() - const result = vm.withContext(context, () => - vm.execute(`(function(){\n${script}\n})();`) - ) - ctx.body = result + ctx.body = vm.withContext(context, () => vm.execute(iifeWrapper(script))) } export async function save(ctx: Ctx) { diff --git a/packages/server/src/api/controllers/view/exporters.ts b/packages/server/src/api/controllers/view/exporters.ts index d6caff6035..3b5f951dca 100644 --- a/packages/server/src/api/controllers/view/exporters.ts +++ b/packages/server/src/api/controllers/view/exporters.ts @@ -1,7 +1,19 @@ import { Row, TableSchema } from "@budibase/types" -export function csv(headers: string[], rows: Row[]) { - let csv = headers.map(key => `"${key}"`).join(",") +function getHeaders( + headers: string[], + customHeaders: { [key: string]: string } +) { + return headers.map(header => `"${customHeaders[header] || header}"`) +} + +export function csv( + headers: string[], + rows: Row[], + delimiter: string = ",", + customHeaders: { [key: string]: string } = {} +) { + let csv = getHeaders(headers, customHeaders).join(delimiter) for (let row of rows) { csv = `${csv}\n${headers @@ -15,7 +27,7 @@ export function csv(headers: string[], rows: Row[]) { : "" return val.trim() }) - .join(",")}` + .join(delimiter)}` } return csv } diff --git a/packages/server/src/api/routes/application.ts b/packages/server/src/api/routes/application.ts index babcb1b44b..7e01a3c2ef 100644 --- a/packages/server/src/api/routes/application.ts +++ b/packages/server/src/api/routes/application.ts @@ -4,7 +4,6 @@ import * as deploymentController from "../controllers/deploy" import authorized from "../../middleware/authorized" import { permissions } from "@budibase/backend-core" import { applicationValidator } from "./utils/validators" -import { importToApp } from "../controllers/application" const router: Router = new Router() diff --git a/packages/server/src/api/routes/tests/application.spec.ts b/packages/server/src/api/routes/tests/application.spec.ts index fa5cb0a983..dbe4eb51ae 100644 --- a/packages/server/src/api/routes/tests/application.spec.ts +++ b/packages/server/src/api/routes/tests/application.spec.ts @@ -11,65 +11,54 @@ jest.mock("../../../utilities/redis", () => ({ checkDebounce: jest.fn(), shutdown: jest.fn(), })) -import { clearAllApps, checkBuilderEndpoint } from "./utilities/TestFunctions" +import { checkBuilderEndpoint } from "./utilities/TestFunctions" import * as setup from "./utilities" import { AppStatus } from "../../../db/utils" import { events, utils, context } from "@budibase/backend-core" import env from "../../../environment" - -jest.setTimeout(15000) +import type { App } from "@budibase/types" +import tk from "timekeeper" describe("/applications", () => { - let request = setup.getRequest() let config = setup.getConfig() + let app: App afterAll(setup.afterAll) - - beforeAll(async () => { - await config.init() - }) + beforeAll(async () => await config.init()) beforeEach(async () => { + app = await config.api.application.create({ name: utils.newid() }) + const deployment = await config.api.application.publish(app.appId) + expect(deployment.status).toBe("SUCCESS") jest.clearAllMocks() }) describe("create", () => { it("creates empty app", async () => { - const res = await request - .post("/api/applications") - .field("name", utils.newid()) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body._id).toBeDefined() + const app = await config.api.application.create({ name: utils.newid() }) + expect(app._id).toBeDefined() expect(events.app.created).toBeCalledTimes(1) }) it("creates app from template", async () => { - const res = await request - .post("/api/applications") - .field("name", utils.newid()) - .field("useTemplate", "true") - .field("templateKey", "test") - .field("templateString", "{}") // override the file download - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body._id).toBeDefined() + const app = await config.api.application.create({ + name: utils.newid(), + useTemplate: "true", + templateKey: "test", + templateString: "{}", + }) + expect(app._id).toBeDefined() expect(events.app.created).toBeCalledTimes(1) expect(events.app.templateImported).toBeCalledTimes(1) }) it("creates app from file", async () => { - const res = await request - .post("/api/applications") - .field("name", utils.newid()) - .field("useTemplate", "true") - .set(config.defaultHeaders()) - .attach("templateFile", "src/api/routes/tests/data/export.txt") - .expect("Content-Type", /json/) - .expect(200) - expect(res.body._id).toBeDefined() + const app = await config.api.application.create({ + name: utils.newid(), + useTemplate: "true", + templateFile: "src/api/routes/tests/data/export.txt", + }) + expect(app._id).toBeDefined() expect(events.app.created).toBeCalledTimes(1) expect(events.app.fileImported).toBeCalledTimes(1) }) @@ -84,24 +73,21 @@ describe("/applications", () => { }) it("migrates navigation settings from old apps", async () => { - const res = await request - .post("/api/applications") - .field("name", "Old App") - .field("useTemplate", "true") - .set(config.defaultHeaders()) - .attach("templateFile", "src/api/routes/tests/data/old-app.txt") - .expect("Content-Type", /json/) - .expect(200) - expect(res.body._id).toBeDefined() - expect(res.body.navigation).toBeDefined() - expect(res.body.navigation.hideLogo).toBe(true) - expect(res.body.navigation.title).toBe("Custom Title") - expect(res.body.navigation.hideLogo).toBe(true) - expect(res.body.navigation.navigation).toBe("Left") - expect(res.body.navigation.navBackground).toBe( + const app = await config.api.application.create({ + name: utils.newid(), + useTemplate: "true", + templateFile: "src/api/routes/tests/data/old-app.txt", + }) + expect(app._id).toBeDefined() + expect(app.navigation).toBeDefined() + expect(app.navigation!.hideLogo).toBe(true) + expect(app.navigation!.title).toBe("Custom Title") + expect(app.navigation!.hideLogo).toBe(true) + expect(app.navigation!.navigation).toBe("Left") + expect(app.navigation!.navBackground).toBe( "var(--spectrum-global-color-blue-600)" ) - expect(res.body.navigation.navTextColor).toBe( + expect(app.navigation!.navTextColor).toBe( "var(--spectrum-global-color-gray-50)" ) expect(events.app.created).toBeCalledTimes(1) @@ -110,164 +96,106 @@ describe("/applications", () => { }) describe("fetch", () => { - beforeEach(async () => { - // Clean all apps but the onde from config - await clearAllApps(config.getTenantId(), [config.getAppId()!]) - }) - it("lists all applications", async () => { - await config.createApp("app1") - await config.createApp("app2") - - const res = await request - .get(`/api/applications?status=${AppStatus.DEV}`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - - // two created apps + the inited app - expect(res.body.length).toBe(3) + const apps = await config.api.application.fetch({ status: AppStatus.DEV }) + expect(apps.length).toBeGreaterThan(0) }) }) describe("fetchAppDefinition", () => { it("should be able to get an apps definition", async () => { - const res = await request - .get(`/api/applications/${config.getAppId()}/definition`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body.libraries.length).toEqual(1) + const res = await config.api.application.getDefinition(app.appId) + expect(res.libraries.length).toEqual(1) }) }) describe("fetchAppPackage", () => { it("should be able to fetch the app package", async () => { - const res = await request - .get(`/api/applications/${config.getAppId()}/appPackage`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body.application).toBeDefined() - expect(res.body.application.appId).toEqual(config.getAppId()) + const res = await config.api.application.getAppPackage(app.appId) + expect(res.application).toBeDefined() + expect(res.application.appId).toEqual(config.getAppId()) }) }) describe("update", () => { it("should be able to update the app package", async () => { - const res = await request - .put(`/api/applications/${config.getAppId()}`) - .send({ - name: "TEST_APP", - }) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body._rev).toBeDefined() + const updatedApp = await config.api.application.update(app.appId, { + name: "TEST_APP", + }) + expect(updatedApp._rev).toBeDefined() expect(events.app.updated).toBeCalledTimes(1) }) }) describe("publish", () => { it("should publish app with dev app ID", async () => { - const appId = config.getAppId() - await request - .post(`/api/applications/${appId}/publish`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + await config.api.application.publish(app.appId) expect(events.app.published).toBeCalledTimes(1) }) it("should publish app with prod app ID", async () => { - const appId = config.getProdAppId() - await request - .post(`/api/applications/${appId}/publish`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + await config.api.application.publish(app.appId.replace("_dev", "")) expect(events.app.published).toBeCalledTimes(1) }) }) describe("manage client library version", () => { it("should be able to update the app client library version", async () => { - await request - .post(`/api/applications/${config.getAppId()}/client/update`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + await config.api.application.updateClient(app.appId) expect(events.app.versionUpdated).toBeCalledTimes(1) }) it("should be able to revert the app client library version", async () => { - // We need to first update the version so that we can then revert - await request - .post(`/api/applications/${config.getAppId()}/client/update`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - await request - .post(`/api/applications/${config.getAppId()}/client/revert`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + await config.api.application.updateClient(app.appId) + await config.api.application.revertClient(app.appId) expect(events.app.versionReverted).toBeCalledTimes(1) }) }) describe("edited at", () => { - it("middleware should set edited at", async () => { - const headers = config.defaultHeaders() - headers["referer"] = `/${config.getAppId()}/test` - const res = await request - .put(`/api/applications/${config.getAppId()}`) - .send({ - name: "UPDATED_NAME", - }) - .set(headers) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body._rev).toBeDefined() - // retrieve the app to check it - const getRes = await request - .get(`/api/applications/${config.getAppId()}/appPackage`) - .set(headers) - .expect("Content-Type", /json/) - .expect(200) - expect(getRes.body.application.updatedAt).toBeDefined() + it("middleware should set updatedAt", async () => { + const app = await tk.withFreeze( + "2021-01-01", + async () => await config.api.application.create({ name: utils.newid() }) + ) + expect(app.updatedAt).toEqual("2021-01-01T00:00:00.000Z") + + const updatedApp = await tk.withFreeze( + "2021-02-01", + async () => + await config.api.application.update(app.appId, { + name: "UPDATED_NAME", + }) + ) + expect(updatedApp._rev).toBeDefined() + expect(updatedApp.updatedAt).toEqual("2021-02-01T00:00:00.000Z") + + const fetchedApp = await config.api.application.get(app.appId) + expect(fetchedApp.updatedAt).toEqual("2021-02-01T00:00:00.000Z") }) }) describe("sync", () => { it("app should sync correctly", async () => { - const res = await request - .post(`/api/applications/${config.getAppId()}/sync`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body.message).toEqual("App sync completed successfully.") + const { message } = await config.api.application.sync(app.appId) + expect(message).toEqual("App sync completed successfully.") }) it("app should not sync if production", async () => { - const res = await request - .post(`/api/applications/app_123456/sync`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(400) - expect(res.body.message).toEqual( + const { message } = await config.api.application.sync( + app.appId.replace("_dev", ""), + { statusCode: 400 } + ) + + expect(message).toEqual( "This action cannot be performed for production apps" ) }) it("app should not sync if sync is disabled", async () => { env._set("DISABLE_AUTO_PROD_APP_SYNC", true) - const res = await request - .post(`/api/applications/${config.getAppId()}/sync`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body.message).toEqual( + const { message } = await config.api.application.sync(app.appId) + expect(message).toEqual( "App sync disabled. You can reenable with the DISABLE_AUTO_PROD_APP_SYNC environment variable." ) env._set("DISABLE_AUTO_PROD_APP_SYNC", false) @@ -275,51 +203,26 @@ describe("/applications", () => { }) describe("unpublish", () => { - beforeEach(async () => { - // We want to republish as the unpublish will delete the prod app - await config.publish() - }) - it("should unpublish app with dev app ID", async () => { - const appId = config.getAppId() - await request - .post(`/api/applications/${appId}/unpublish`) - .set(config.defaultHeaders()) - .expect(204) + await config.api.application.unpublish(app.appId) expect(events.app.unpublished).toBeCalledTimes(1) }) it("should unpublish app with prod app ID", async () => { - const appId = config.getProdAppId() - await request - .post(`/api/applications/${appId}/unpublish`) - .set(config.defaultHeaders()) - .expect(204) + await config.api.application.unpublish(app.appId.replace("_dev", "")) expect(events.app.unpublished).toBeCalledTimes(1) }) }) describe("delete", () => { it("should delete published app and dev apps with dev app ID", async () => { - await config.createApp("to-delete") - const appId = config.getAppId() - await request - .delete(`/api/applications/${appId}`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + await config.api.application.delete(app.appId) expect(events.app.deleted).toBeCalledTimes(1) expect(events.app.unpublished).toBeCalledTimes(1) }) it("should delete published app and dev app with prod app ID", async () => { - await config.createApp("to-delete") - const appId = config.getProdAppId() - await request - .delete(`/api/applications/${appId}`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + await config.api.application.delete(app.appId.replace("_dev", "")) expect(events.app.deleted).toBeCalledTimes(1) expect(events.app.unpublished).toBeCalledTimes(1) }) @@ -327,28 +230,18 @@ describe("/applications", () => { describe("POST /api/applications/:appId/sync", () => { it("should not sync automation logs", async () => { - // setup the apps - await config.createApp("testing-auto-logs") const automation = await config.createAutomation() - await config.publish() - await context.doInAppContext(config.getProdAppId(), () => { - return config.createAutomationLog(automation) - }) + await context.doInAppContext(app.appId, () => + config.createAutomationLog(automation) + ) - // do the sync - const appId = config.getAppId() - await request - .post(`/api/applications/${appId}/sync`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + await config.api.application.sync(app.appId) // does exist in prod const prodLogs = await config.getAutomationLogs() expect(prodLogs.data.length).toBe(1) - // delete prod app so we revert to dev log search - await config.unpublish() + await config.api.application.unpublish(app.appId) // doesn't exist in dev const devLogs = await config.getAutomationLogs() diff --git a/packages/server/src/jsRunner/index.ts b/packages/server/src/jsRunner/index.ts index 0fbfed3b66..0c9f5d9f01 100644 --- a/packages/server/src/jsRunner/index.ts +++ b/packages/server/src/jsRunner/index.ts @@ -7,7 +7,6 @@ import { } from "@budibase/string-templates" import { context, logging } from "@budibase/backend-core" import tracer from "dd-trace" - import { IsolatedVM } from "./vm" export function init() { diff --git a/packages/server/src/jsRunner/tests/isolatedVM.spec.ts b/packages/server/src/jsRunner/tests/isolatedVM.spec.ts new file mode 100644 index 0000000000..5296598ef1 --- /dev/null +++ b/packages/server/src/jsRunner/tests/isolatedVM.spec.ts @@ -0,0 +1,110 @@ +import fs from "fs" +import path from "path" +import { IsolatedVM } from "../vm" +import { iifeWrapper } from "../utilities" + +function runJSWithIsolatedVM(script: string, context: Record) { + const runner = new IsolatedVM() + return runner.withContext(context, () => { + return runner.execute(iifeWrapper(script)) + }) +} + +describe("Test isolated vm directly", () => { + it("should handle a very large file", () => { + const marked = fs.readFileSync( + path.join(__dirname, "largeJSExample.txt"), + "utf-8" + ) + const result = runJSWithIsolatedVM(marked, { + trigger: { row: { Message: "dddd" } }, + }) + expect(result).toBe("

dddd

\n") + }) + + it("handle a mapping case", async () => { + const context = { + data: { + data: { + searchProducts: { + results: [{ imageLinks: ["_S/"] }], + }, + }, + }, + } + const result = await runJSWithIsolatedVM( + ` + const dataUnnested = data.data.searchProducts.results + const emptyLink = "https://budibase.com" + let pImage = emptyLink + let sImage = emptyLink + let uImage = emptyLink + let lImage = emptyLink + let b1Image = emptyLink + let b2Image = emptyLink + + const dataTransformed = dataUnnested.map(x=> { + let imageLinks = x.imageLinks + for (let i = 0; i < imageLinks.length; i++){ + if(imageLinks[i].includes("_P/") || imageLinks[i].includes("_p/")){ + pImage = imageLinks[i] + } else if (imageLinks[i].includes("_S/") || imageLinks[i].includes("_s/")){ + sImage = imageLinks[i] + } else if (imageLinks[i].includes("_U/") || imageLinks[i].includes("_u/")){ + uImage = imageLinks[i] + } else if (imageLinks[i].includes("_L/") || imageLinks[i].includes("_l/")){ + lImage = imageLinks[i] + } else if (imageLinks[i].includes("_B/") || imageLinks[i].includes("_b/")){ + b1Image = imageLinks[i] + } else if (imageLinks[i].includes("_B2/") || imageLinks[i].includes("_b2/")){ + b2Image = imageLinks[i] + } + } + + const arrangedLinks = [pImage, sImage, uImage, lImage, b1Image, b2Image] + x.imageLinks = arrangedLinks + + return x + }) + + return dataTransformed + `, + context + ) + expect(result).toBeDefined() + expect(result.length).toBe(1) + expect(result[0].imageLinks).toEqual([ + "https://budibase.com", + "_S/", + "https://budibase.com", + "https://budibase.com", + "https://budibase.com", + "https://budibase.com", + ]) + }) + + it("should handle automation script example", () => { + const context = { + steps: [{}, { response: "hello" }, { items: [{ rows: [{ a: 1 }] }] }], + } + const result = runJSWithIsolatedVM( + `const queryResults = steps[2].items; + + const intervals = steps[1].response; + const whereNoItemsReturned = []; + let index = 0; + + for (let queryResult of queryResults) { + if (queryResult.rows.length === 0) { + whereNoItemsReturned.push(intervals[index]); + } + index++; + } + + return whereNoItemsReturned; + `, + context + ) + expect(result).toEqual([]) + }) +}) diff --git a/packages/server/src/jsRunner/tests/jsRunner.spec.ts b/packages/server/src/jsRunner/tests/jsRunner.spec.ts index 30e29885b1..54983aa470 100644 --- a/packages/server/src/jsRunner/tests/jsRunner.spec.ts +++ b/packages/server/src/jsRunner/tests/jsRunner.spec.ts @@ -7,7 +7,9 @@ import tk from "timekeeper" import { init } from ".." import TestConfiguration from "../../tests/utilities/TestConfiguration" -tk.freeze("2021-01-21T12:00:00") +const DATE = "2021-01-21T12:00:00" + +tk.freeze(DATE) describe("jsRunner (using isolated-vm)", () => { const config = new TestConfiguration() @@ -70,4 +72,278 @@ describe("jsRunner (using isolated-vm)", () => { }) }) }) + + // the test cases here were extracted from templates/real world examples of JS in Budibase + describe("real test cases from Budicloud", () => { + const context = { + "Unit Value": 2, + Quantity: 1, + } + it("handle test case 1", async () => { + const result = await processJS( + ` + var Gross = $("[Unit Value]") * $("[Quantity]") + return Gross.toFixed(2)`, + context + ) + expect(result).toBeDefined() + expect(result).toBe("2.00") + }) + + it("handle test case 2", async () => { + const context = { + "Purchase Date": DATE, + } + const result = await processJS( + ` + var purchase = new Date($("[Purchase Date]")); + let purchaseyear = purchase.getFullYear(); + let purchasemonth = purchase.getMonth(); + + var today = new Date (); + let todayyear = today.getFullYear(); + let todaymonth = today.getMonth(); + + var age = todayyear - purchaseyear + + if (((todaymonth - purchasemonth) < 6) == true){ + return age + } + `, + context + ) + expect(result).toBeDefined() + expect(result).toBe(3) + }) + + it("should handle test case 3", async () => { + const context = { + Escalate: true, + "Budget ($)": 1100, + } + const result = await processJS( + ` + if ($("[Escalate]") == true) { + if ($("Budget ($)") <= 1000) + {return 2;} + if ($("Budget ($)") > 1000) + {return 3;} + } + else { + if ($("Budget ($)") <= 1000) + {return 1;} + if ($("Budget ($)") > 1000) + if ($("Budget ($)") < 10000) + {return 2;} + else + {return 3} + } + `, + context + ) + expect(result).toBeDefined() + expect(result).toBe(3) + }) + + it("should handle test case 4", async () => { + const context = { + "Time Sheets": ["a", "b"], + } + const result = await processJS( + ` + let hours = 0 + if (($("[Time Sheets]") != null) == true){ + for (i = 0; i < $("[Time Sheets]").length; i++){ + let hoursLogged = "Time Sheets." + i + ".Hours" + hours += $(hoursLogged) + } + return hours + } + if (($("[Time Sheets]") != null) == false){ + return hours + } + `, + context + ) + expect(result).toBeDefined() + expect(result).toBe("0ab") + }) + + it("should handle test case 5", async () => { + const context = { + change: JSON.stringify({ a: 1, primaryDisplay: "a" }), + previous: JSON.stringify({ a: 2, primaryDisplay: "b" }), + } + const result = await processJS( + ` + let change = $("[change]") ? JSON.parse($("[change]")) : {} + let previous = $("[previous]") ? JSON.parse($("[previous]")) : {} + + function simplifyLink(originalKey, value, parent) { + if (Array.isArray(value)) { + if (value.filter(item => Object.keys(item || {}).includes("primaryDisplay")).length > 0) { + parent[originalKey] = value.map(link => link.primaryDisplay) + } + } + } + + for (let entry of Object.entries(change)) { + simplifyLink(entry[0], entry[1], change) + } + for (let entry of Object.entries(previous)) { + simplifyLink(entry[0], entry[1], previous) + } + + let diff = Object.fromEntries(Object.entries(change).filter(([k, v]) => previous[k]?.toString() !== v?.toString())) + + delete diff.audit_change + delete diff.audit_previous + delete diff._id + delete diff._rev + delete diff.tableId + delete diff.audit + + for (let entry of Object.entries(diff)) { + simplifyLink(entry[0], entry[1], diff) + } + + return JSON.stringify(change)?.replaceAll(",\\"", ",\\n\\t\\"").replaceAll("{\\"", "{\\n\\t\\"").replaceAll("}", "\\n}") + `, + context + ) + expect(result).toBe(`{\n\t"a":1,\n\t"primaryDisplay":"a"\n}`) + }) + + it("should handle test case 6", async () => { + const context = { + "Join Date": DATE, + } + const result = await processJS( + ` + var rate = 5; + var today = new Date(); + + // comment + function monthDiff(dateFrom, dateTo) { + return dateTo.getMonth() - dateFrom.getMonth() + + (12 * (dateTo.getFullYear() - dateFrom.getFullYear())) + } + var serviceMonths = monthDiff( new Date($("[Join Date]")), today); + var serviceYears = serviceMonths / 12; + + if (serviceYears >= 1 && serviceYears < 5){ + rate = 10; + } + if (serviceYears >= 5 && serviceYears < 10){ + rate = 15; + } + if (serviceYears >= 10){ + rate = 15; + rate += 0.5 * (Number(serviceYears.toFixed(0)) - 10); + } + return rate; + `, + context + ) + expect(result).toBe(10) + }) + + it("should handle test case 7", async () => { + const context = { + "P I": "Pass", + "PA I": "Pass", + "F I": "Fail", + "V I": "Pass", + } + const result = await processJS( + `if (($("[P I]") == "Pass") == true) + if (($("[ P I]") == "Pass") == true) + if (($("[F I]") == "Pass") == true) + if (($("[V I]") == "Pass") == true) + {return "Pass"} + + if (($("[PA I]") == "Fail") == true) + {return "Fail"} + if (($("[ P I]") == "Fail") == true) + {return "Fail"} + if (($("[F I]") == "Fail") == true) + {return "Fail"} + if (($("[V I]") == "Fail") == true) + {return "Fail"} + + else + {return ""}`, + context + ) + expect(result).toBe("Fail") + }) + + it("should handle test case 8", async () => { + const context = { + "T L": [{ Hours: 10 }], + "B H": 50, + } + const result = await processJS( + `var totalHours = 0; + if (($("[T L]") != null) == true){ + for (let i = 0; i < ($("[T L]").length); i++){ + var individualHours = "T L." + i + ".Hours"; + var hoursNum = Number($(individualHours)); + totalHours += hoursNum; + } + return totalHours.toFixed(2); + } + if (($("[T L]") != null) == false) { + return totalHours.toFixed(2); + } + `, + context + ) + expect(result).toBe("10.00") + }) + + it("should handle test case 9", async () => { + const context = { + "T L": [{ Hours: 10 }], + "B H": 50, + } + const result = await processJS( + `var totalHours = 0; + if (($("[T L]") != null) == true){ + for (let i = 0; i < ($("[T L]").length); i++){ + var individualHours = "T L." + i + ".Hours"; + var hoursNum = Number($(individualHours)); + totalHours += hoursNum; + } + return ($("[B H]") - totalHours).toFixed(2); + } + if (($("[T L]") != null) == false) { + return ($("[B H]") - totalHours).toFixed(2); + }`, + context + ) + expect(result).toBe("40.00") + }) + + it("should handle test case 10", async () => { + const context = { + "F F": [{ "F S": 10 }], + } + const result = await processJS( + `var rating = 0; + + if ($("[F F]") != null){ + for (i = 0; i < $("[F F]").length; i++){ + var individualRating = $("F F." + i + ".F S"); + rating += individualRating; + } + rating = (rating / $("[F F]").length); + } + return rating; + `, + context + ) + expect(result).toBe(10) + }) + }) }) diff --git a/packages/server/src/jsRunner/tests/largeJSExample.txt b/packages/server/src/jsRunner/tests/largeJSExample.txt new file mode 100644 index 0000000000..c5c01d7f0f --- /dev/null +++ b/packages/server/src/jsRunner/tests/largeJSExample.txt @@ -0,0 +1,3018 @@ +/** + * marked - a markdown parser + * Copyright (c) 2011-2022, Christopher Jeffrey. (MIT Licensed) + * https://github.com/markedjs/marked + */ + +/** + * DO NOT EDIT THIS FILE + * The code in this file is generated from files in ./src/ + */ + +function getDefaults() { + return { + baseUrl: null, + breaks: false, + extensions: null, + gfm: true, + headerIds: true, + headerPrefix: "", + highlight: null, + langPrefix: "language-", + mangle: true, + pedantic: false, + renderer: null, + sanitize: false, + sanitizer: null, + silent: false, + smartLists: false, + smartypants: false, + tokenizer: null, + walkTokens: null, + xhtml: false, + } +} + +let defaults = getDefaults() + +function changeDefaults(newDefaults) { + defaults = newDefaults +} + +/** + * Helpers + */ +const escapeTest = /[&<>"']/ +const escapeReplace = /[&<>"']/g +const escapeTestNoEncode = /[<>"']|&(?!#?\w+;)/ +const escapeReplaceNoEncode = /[<>"']|&(?!#?\w+;)/g +const escapeReplacements = { + "&": "&", + "<": "<", + ">": ">", + '"': """, + "'": "'", +} +const getEscapeReplacement = ch => escapeReplacements[ch] +function escape(html, encode) { + if (encode) { + if (escapeTest.test(html)) { + return html.replace(escapeReplace, getEscapeReplacement) + } + } else { + if (escapeTestNoEncode.test(html)) { + return html.replace(escapeReplaceNoEncode, getEscapeReplacement) + } + } + + return html +} + +const unescapeTest = /&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/gi + +/** + * @param {string} html + */ +function unescape(html) { + // explicitly match decimal, hex, and named HTML entities + return html.replace(unescapeTest, (_, n) => { + n = n.toLowerCase() + if (n === "colon") return ":" + if (n.charAt(0) === "#") { + return n.charAt(1) === "x" + ? String.fromCharCode(parseInt(n.substring(2), 16)) + : String.fromCharCode(+n.substring(1)) + } + return "" + }) +} + +const caret = /(^|[^\[])\^/g + +/** + * @param {string | RegExp} regex + * @param {string} opt + */ +function edit(regex, opt) { + regex = typeof regex === "string" ? regex : regex.source + opt = opt || "" + const obj = { + replace: (name, val) => { + val = val.source || val + val = val.replace(caret, "$1") + regex = regex.replace(name, val) + return obj + }, + getRegex: () => { + return new RegExp(regex, opt) + }, + } + return obj +} + +const nonWordAndColonTest = /[^\w:]/g +const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i + +/** + * @param {boolean} sanitize + * @param {string} base + * @param {string} href + */ +function cleanUrl(sanitize, base, href) { + if (sanitize) { + let prot + try { + prot = decodeURIComponent(unescape(href)) + .replace(nonWordAndColonTest, "") + .toLowerCase() + } catch (e) { + return null + } + if ( + prot.indexOf("javascript:") === 0 || + prot.indexOf("vbscript:") === 0 || + prot.indexOf("data:") === 0 + ) { + return null + } + } + if (base && !originIndependentUrl.test(href)) { + href = resolveUrl(base, href) + } + try { + href = encodeURI(href).replace(/%25/g, "%") + } catch (e) { + return null + } + return href +} + +const baseUrls = {} +const justDomain = /^[^:]+:\/*[^/]*$/ +const protocol = /^([^:]+:)[\s\S]*$/ +const domain = /^([^:]+:\/*[^/]*)[\s\S]*$/ + +/** + * @param {string} base + * @param {string} href + */ +function resolveUrl(base, href) { + if (!baseUrls[" " + base]) { + // we can ignore everything in base after the last slash of its path component, + // but we might need to add _that_ + // https://tools.ietf.org/html/rfc3986#section-3 + if (justDomain.test(base)) { + baseUrls[" " + base] = base + "/" + } else { + baseUrls[" " + base] = rtrim(base, "/", true) + } + } + base = baseUrls[" " + base] + const relativeBase = base.indexOf(":") === -1 + + if (href.substring(0, 2) === "//") { + if (relativeBase) { + return href + } + return base.replace(protocol, "$1") + href + } else if (href.charAt(0) === "/") { + if (relativeBase) { + return href + } + return base.replace(domain, "$1") + href + } else { + return base + href + } +} + +const noopTest = { exec: function noopTest() {} } + +function merge(obj) { + let i = 1, + target, + key + + for (; i < arguments.length; i++) { + target = arguments[i] + for (key in target) { + if (Object.prototype.hasOwnProperty.call(target, key)) { + obj[key] = target[key] + } + } + } + + return obj +} + +function splitCells(tableRow, count) { + // ensure that every cell-delimiting pipe has a space + // before it to distinguish it from an escaped pipe + const row = tableRow.replace(/\|/g, (match, offset, str) => { + let escaped = false, + curr = offset + while (--curr >= 0 && str[curr] === "\\") escaped = !escaped + if (escaped) { + // odd number of slashes means | is escaped + // so we leave it alone + return "|" + } else { + // add space before unescaped | + return " |" + } + }), + cells = row.split(/ \|/) + let i = 0 + + // First/last cell in a row cannot be empty if it has no leading/trailing pipe + if (!cells[0].trim()) { + cells.shift() + } + if (cells.length > 0 && !cells[cells.length - 1].trim()) { + cells.pop() + } + + if (cells.length > count) { + cells.splice(count) + } else { + while (cells.length < count) cells.push("") + } + + for (; i < cells.length; i++) { + // leading or trailing whitespace is ignored per the gfm spec + cells[i] = cells[i].trim().replace(/\\\|/g, "|") + } + return cells +} + +/** + * Remove trailing 'c's. Equivalent to str.replace(/c*$/, ''). + * /c*$/ is vulnerable to REDOS. + * + * @param {string} str + * @param {string} c + * @param {boolean} invert Remove suffix of non-c chars instead. Default falsey. + */ +function rtrim(str, c, invert) { + const l = str.length + if (l === 0) { + return "" + } + + // Length of suffix matching the invert condition. + let suffLen = 0 + + // Step left until we fail to match the invert condition. + while (suffLen < l) { + const currChar = str.charAt(l - suffLen - 1) + if (currChar === c && !invert) { + suffLen++ + } else if (currChar !== c && invert) { + suffLen++ + } else { + break + } + } + + return str.slice(0, l - suffLen) +} + +function findClosingBracket(str, b) { + if (str.indexOf(b[1]) === -1) { + return -1 + } + const l = str.length + let level = 0, + i = 0 + for (; i < l; i++) { + if (str[i] === "\\") { + i++ + } else if (str[i] === b[0]) { + level++ + } else if (str[i] === b[1]) { + level-- + if (level < 0) { + return i + } + } + } + return -1 +} + +function checkSanitizeDeprecation(opt) { + if (opt && opt.sanitize && !opt.silent) { + console.warn( + "marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options" + ) + } +} + +// copied from https://stackoverflow.com/a/5450113/806777 +/** + * @param {string} pattern + * @param {number} count + */ +function repeatString(pattern, count) { + if (count < 1) { + return "" + } + let result = "" + while (count > 1) { + if (count & 1) { + result += pattern + } + count >>= 1 + pattern += pattern + } + return result + pattern +} + +function outputLink(cap, link, raw, lexer) { + const href = link.href + const title = link.title ? escape(link.title) : null + const text = cap[1].replace(/\\([\[\]])/g, "$1") + + if (cap[0].charAt(0) !== "!") { + lexer.state.inLink = true + const token = { + type: "link", + raw, + href, + title, + text, + tokens: lexer.inlineTokens(text, []), + } + lexer.state.inLink = false + return token + } + return { + type: "image", + raw, + href, + title, + text: escape(text), + } +} + +function indentCodeCompensation(raw, text) { + const matchIndentToCode = raw.match(/^(\s+)(?:```)/) + + if (matchIndentToCode === null) { + return text + } + + const indentToCode = matchIndentToCode[1] + + return text + .split("\n") + .map(node => { + const matchIndentInNode = node.match(/^\s+/) + if (matchIndentInNode === null) { + return node + } + + const [indentInNode] = matchIndentInNode + + if (indentInNode.length >= indentToCode.length) { + return node.slice(indentToCode.length) + } + + return node + }) + .join("\n") +} + +/** + * Tokenizer + */ +class Tokenizer { + constructor(options) { + this.options = options || defaults + } + + space(src) { + const cap = this.rules.block.newline.exec(src) + if (cap && cap[0].length > 0) { + return { + type: "space", + raw: cap[0], + } + } + } + + code(src) { + const cap = this.rules.block.code.exec(src) + if (cap) { + const text = cap[0].replace(/^ {1,4}/gm, "") + return { + type: "code", + raw: cap[0], + codeBlockStyle: "indented", + text: !this.options.pedantic ? rtrim(text, "\n") : text, + } + } + } + + fences(src) { + const cap = this.rules.block.fences.exec(src) + if (cap) { + const raw = cap[0] + const text = indentCodeCompensation(raw, cap[3] || "") + + return { + type: "code", + raw, + lang: cap[2] ? cap[2].trim() : cap[2], + text, + } + } + } + + heading(src) { + const cap = this.rules.block.heading.exec(src) + if (cap) { + let text = cap[2].trim() + + // remove trailing #s + if (/#$/.test(text)) { + const trimmed = rtrim(text, "#") + if (this.options.pedantic) { + text = trimmed.trim() + } else if (!trimmed || / $/.test(trimmed)) { + // CommonMark requires space before trailing #s + text = trimmed.trim() + } + } + + const token = { + type: "heading", + raw: cap[0], + depth: cap[1].length, + text, + tokens: [], + } + this.lexer.inline(token.text, token.tokens) + return token + } + } + + hr(src) { + const cap = this.rules.block.hr.exec(src) + if (cap) { + return { + type: "hr", + raw: cap[0], + } + } + } + + blockquote(src) { + const cap = this.rules.block.blockquote.exec(src) + if (cap) { + const text = cap[0].replace(/^ *>[ \t]?/gm, "") + + return { + type: "blockquote", + raw: cap[0], + tokens: this.lexer.blockTokens(text, []), + text, + } + } + } + + list(src) { + let cap = this.rules.block.list.exec(src) + if (cap) { + let raw, + istask, + ischecked, + indent, + i, + blankLine, + endsWithBlankLine, + line, + nextLine, + rawLine, + itemContents, + endEarly + + let bull = cap[1].trim() + const isordered = bull.length > 1 + + const list = { + type: "list", + raw: "", + ordered: isordered, + start: isordered ? +bull.slice(0, -1) : "", + loose: false, + items: [], + } + + bull = isordered ? `\\d{1,9}\\${bull.slice(-1)}` : `\\${bull}` + + if (this.options.pedantic) { + bull = isordered ? bull : "[*+-]" + } + + // Get next list item + const itemRegex = new RegExp( + `^( {0,3}${bull})((?:[\t ][^\\n]*)?(?:\\n|$))` + ) + + // Check if current bullet point can start a new List Item + while (src) { + endEarly = false + if (!(cap = itemRegex.exec(src))) { + break + } + + if (this.rules.block.hr.test(src)) { + // End list if bullet was actually HR (possibly move into itemRegex?) + break + } + + raw = cap[0] + src = src.substring(raw.length) + + line = cap[2].split("\n", 1)[0] + nextLine = src.split("\n", 1)[0] + + if (this.options.pedantic) { + indent = 2 + itemContents = line.trimLeft() + } else { + indent = cap[2].search(/[^ ]/) // Find first non-space char + indent = indent > 4 ? 1 : indent // Treat indented code blocks (> 4 spaces) as having only 1 indent + itemContents = line.slice(indent) + indent += cap[1].length + } + + blankLine = false + + if (!line && /^ *$/.test(nextLine)) { + // Items begin with at most one blank line + raw += nextLine + "\n" + src = src.substring(nextLine.length + 1) + endEarly = true + } + + if (!endEarly) { + const nextBulletRegex = new RegExp( + `^ {0,${Math.min( + 3, + indent - 1 + )}}(?:[*+-]|\\d{1,9}[.)])((?: [^\\n]*)?(?:\\n|$))` + ) + const hrRegex = new RegExp( + `^ {0,${Math.min( + 3, + indent - 1 + )}}((?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$)` + ) + + // Check if following lines should be included in List Item + while (src) { + rawLine = src.split("\n", 1)[0] + line = rawLine + + // Re-align to follow commonmark nesting rules + if (this.options.pedantic) { + line = line.replace(/^ {1,4}(?=( {4})*[^ ])/g, " ") + } + + // End list item if found start of new bullet + if (nextBulletRegex.test(line)) { + break + } + + // Horizontal rule found + if (hrRegex.test(src)) { + break + } + + if (line.search(/[^ ]/) >= indent || !line.trim()) { + // Dedent if possible + itemContents += "\n" + line.slice(indent) + } else if (!blankLine) { + // Until blank line, item doesn't need indentation + itemContents += "\n" + line + } else { + // Otherwise, improper indentation ends this item + break + } + + if (!blankLine && !line.trim()) { + // Check if current line is blank + blankLine = true + } + + raw += rawLine + "\n" + src = src.substring(rawLine.length + 1) + } + } + + if (!list.loose) { + // If the previous item ended with a blank line, the list is loose + if (endsWithBlankLine) { + list.loose = true + } else if (/\n *\n *$/.test(raw)) { + endsWithBlankLine = true + } + } + + // Check for task list items + if (this.options.gfm) { + istask = /^\[[ xX]\] /.exec(itemContents) + if (istask) { + ischecked = istask[0] !== "[ ] " + itemContents = itemContents.replace(/^\[[ xX]\] +/, "") + } + } + + list.items.push({ + type: "list_item", + raw, + task: !!istask, + checked: ischecked, + loose: false, + text: itemContents, + }) + + list.raw += raw + } + + // Do not consume newlines at end of final item. Alternatively, make itemRegex *start* with any newlines to simplify/speed up endsWithBlankLine logic + list.items[list.items.length - 1].raw = raw.trimRight() + list.items[list.items.length - 1].text = itemContents.trimRight() + list.raw = list.raw.trimRight() + + const l = list.items.length + + // Item child tokens handled here at end because we needed to have the final item to trim it first + for (i = 0; i < l; i++) { + this.lexer.state.top = false + list.items[i].tokens = this.lexer.blockTokens(list.items[i].text, []) + const spacers = list.items[i].tokens.filter(t => t.type === "space") + const hasMultipleLineBreaks = spacers.every(t => { + const chars = t.raw.split("") + let lineBreaks = 0 + for (const char of chars) { + if (char === "\n") { + lineBreaks += 1 + } + if (lineBreaks > 1) { + return true + } + } + + return false + }) + + if (!list.loose && spacers.length && hasMultipleLineBreaks) { + // Having a single line break doesn't mean a list is loose. A single line break is terminating the last list item + list.loose = true + list.items[i].loose = true + } + } + + return list + } + } + + html(src) { + const cap = this.rules.block.html.exec(src) + if (cap) { + const token = { + type: "html", + raw: cap[0], + pre: + !this.options.sanitizer && + (cap[1] === "pre" || cap[1] === "script" || cap[1] === "style"), + text: cap[0], + } + if (this.options.sanitize) { + token.type = "paragraph" + token.text = this.options.sanitizer + ? this.options.sanitizer(cap[0]) + : escape(cap[0]) + token.tokens = [] + this.lexer.inline(token.text, token.tokens) + } + return token + } + } + + def(src) { + const cap = this.rules.block.def.exec(src) + if (cap) { + if (cap[3]) cap[3] = cap[3].substring(1, cap[3].length - 1) + const tag = cap[1].toLowerCase().replace(/\s+/g, " ") + return { + type: "def", + tag, + raw: cap[0], + href: cap[2], + title: cap[3], + } + } + } + + table(src) { + const cap = this.rules.block.table.exec(src) + if (cap) { + const item = { + type: "table", + header: splitCells(cap[1]).map(c => { + return { text: c } + }), + align: cap[2].replace(/^ *|\| *$/g, "").split(/ *\| */), + rows: + cap[3] && cap[3].trim() + ? cap[3].replace(/\n[ \t]*$/, "").split("\n") + : [], + } + + if (item.header.length === item.align.length) { + item.raw = cap[0] + + let l = item.align.length + let i, j, k, row + for (i = 0; i < l; i++) { + if (/^ *-+: *$/.test(item.align[i])) { + item.align[i] = "right" + } else if (/^ *:-+: *$/.test(item.align[i])) { + item.align[i] = "center" + } else if (/^ *:-+ *$/.test(item.align[i])) { + item.align[i] = "left" + } else { + item.align[i] = null + } + } + + l = item.rows.length + for (i = 0; i < l; i++) { + item.rows[i] = splitCells(item.rows[i], item.header.length).map(c => { + return { text: c } + }) + } + + // parse child tokens inside headers and cells + + // header child tokens + l = item.header.length + for (j = 0; j < l; j++) { + item.header[j].tokens = [] + this.lexer.inline(item.header[j].text, item.header[j].tokens) + } + + // cell child tokens + l = item.rows.length + for (j = 0; j < l; j++) { + row = item.rows[j] + for (k = 0; k < row.length; k++) { + row[k].tokens = [] + this.lexer.inline(row[k].text, row[k].tokens) + } + } + + return item + } + } + } + + lheading(src) { + const cap = this.rules.block.lheading.exec(src) + if (cap) { + const token = { + type: "heading", + raw: cap[0], + depth: cap[2].charAt(0) === "=" ? 1 : 2, + text: cap[1], + tokens: [], + } + this.lexer.inline(token.text, token.tokens) + return token + } + } + + paragraph(src) { + const cap = this.rules.block.paragraph.exec(src) + if (cap) { + const token = { + type: "paragraph", + raw: cap[0], + text: + cap[1].charAt(cap[1].length - 1) === "\n" + ? cap[1].slice(0, -1) + : cap[1], + tokens: [], + } + this.lexer.inline(token.text, token.tokens) + return token + } + } + + text(src) { + const cap = this.rules.block.text.exec(src) + if (cap) { + const token = { + type: "text", + raw: cap[0], + text: cap[0], + tokens: [], + } + this.lexer.inline(token.text, token.tokens) + return token + } + } + + escape(src) { + const cap = this.rules.inline.escape.exec(src) + if (cap) { + return { + type: "escape", + raw: cap[0], + text: escape(cap[1]), + } + } + } + + tag(src) { + const cap = this.rules.inline.tag.exec(src) + if (cap) { + if (!this.lexer.state.inLink && /^/i.test(cap[0])) { + this.lexer.state.inLink = false + } + if ( + !this.lexer.state.inRawBlock && + /^<(pre|code|kbd|script)(\s|>)/i.test(cap[0]) + ) { + this.lexer.state.inRawBlock = true + } else if ( + this.lexer.state.inRawBlock && + /^<\/(pre|code|kbd|script)(\s|>)/i.test(cap[0]) + ) { + this.lexer.state.inRawBlock = false + } + + return { + type: this.options.sanitize ? "text" : "html", + raw: cap[0], + inLink: this.lexer.state.inLink, + inRawBlock: this.lexer.state.inRawBlock, + text: this.options.sanitize + ? this.options.sanitizer + ? this.options.sanitizer(cap[0]) + : escape(cap[0]) + : cap[0], + } + } + } + + link(src) { + const cap = this.rules.inline.link.exec(src) + if (cap) { + const trimmedUrl = cap[2].trim() + if (!this.options.pedantic && /^$/.test(trimmedUrl)) { + return + } + + // ending angle bracket cannot be escaped + const rtrimSlash = rtrim(trimmedUrl.slice(0, -1), "\\") + if ((trimmedUrl.length - rtrimSlash.length) % 2 === 0) { + return + } + } else { + // find closing parenthesis + const lastParenIndex = findClosingBracket(cap[2], "()") + if (lastParenIndex > -1) { + const start = cap[0].indexOf("!") === 0 ? 5 : 4 + const linkLen = start + cap[1].length + lastParenIndex + cap[2] = cap[2].substring(0, lastParenIndex) + cap[0] = cap[0].substring(0, linkLen).trim() + cap[3] = "" + } + } + let href = cap[2] + let title = "" + if (this.options.pedantic) { + // split pedantic href and title + const link = /^([^'"]*[^\s])\s+(['"])(.*)\2/.exec(href) + + if (link) { + href = link[1] + title = link[3] + } + } else { + title = cap[3] ? cap[3].slice(1, -1) : "" + } + + href = href.trim() + if (/^$/.test(trimmedUrl)) { + // pedantic allows starting angle bracket without ending angle bracket + href = href.slice(1) + } else { + href = href.slice(1, -1) + } + } + return outputLink( + cap, + { + href: href ? href.replace(this.rules.inline._escapes, "$1") : href, + title: title + ? title.replace(this.rules.inline._escapes, "$1") + : title, + }, + cap[0], + this.lexer + ) + } + } + + reflink(src, links) { + let cap + if ( + (cap = this.rules.inline.reflink.exec(src)) || + (cap = this.rules.inline.nolink.exec(src)) + ) { + let link = (cap[2] || cap[1]).replace(/\s+/g, " ") + link = links[link.toLowerCase()] + if (!link || !link.href) { + const text = cap[0].charAt(0) + return { + type: "text", + raw: text, + text, + } + } + return outputLink(cap, link, cap[0], this.lexer) + } + } + + emStrong(src, maskedSrc, prevChar = "") { + let match = this.rules.inline.emStrong.lDelim.exec(src) + if (!match) return + + // _ can't be between two alphanumerics. \p{L}\p{N} includes non-english alphabet/numbers as well + if (match[3] && prevChar.match(/[\p{L}\p{N}]/u)) return + + const nextChar = match[1] || match[2] || "" + + if ( + !nextChar || + (nextChar && + (prevChar === "" || this.rules.inline.punctuation.exec(prevChar))) + ) { + const lLength = match[0].length - 1 + let rDelim, + rLength, + delimTotal = lLength, + midDelimTotal = 0 + + const endReg = + match[0][0] === "*" + ? this.rules.inline.emStrong.rDelimAst + : this.rules.inline.emStrong.rDelimUnd + endReg.lastIndex = 0 + + // Clip maskedSrc to same section of string as src (move to lexer?) + maskedSrc = maskedSrc.slice(-1 * src.length + lLength) + + while ((match = endReg.exec(maskedSrc)) != null) { + rDelim = + match[1] || match[2] || match[3] || match[4] || match[5] || match[6] + + if (!rDelim) continue // skip single * in __abc*abc__ + + rLength = rDelim.length + + if (match[3] || match[4]) { + // found another Left Delim + delimTotal += rLength + continue + } else if (match[5] || match[6]) { + // either Left or Right Delim + if (lLength % 3 && !((lLength + rLength) % 3)) { + midDelimTotal += rLength + continue // CommonMark Emphasis Rules 9-10 + } + } + + delimTotal -= rLength + + if (delimTotal > 0) continue // Haven't found enough closing delimiters + + // Remove extra characters. *a*** -> *a* + rLength = Math.min(rLength, rLength + delimTotal + midDelimTotal) + + // Create `em` if smallest delimiter has odd char count. *a*** + if (Math.min(lLength, rLength) % 2) { + const text = src.slice(1, lLength + match.index + rLength) + return { + type: "em", + raw: src.slice(0, lLength + match.index + rLength + 1), + text, + tokens: this.lexer.inlineTokens(text, []), + } + } + + // Create 'strong' if smallest delimiter has even char count. **a*** + const text = src.slice(2, lLength + match.index + rLength - 1) + return { + type: "strong", + raw: src.slice(0, lLength + match.index + rLength + 1), + text, + tokens: this.lexer.inlineTokens(text, []), + } + } + } + } + + codespan(src) { + const cap = this.rules.inline.code.exec(src) + if (cap) { + let text = cap[2].replace(/\n/g, " ") + const hasNonSpaceChars = /[^ ]/.test(text) + const hasSpaceCharsOnBothEnds = /^ /.test(text) && / $/.test(text) + if (hasNonSpaceChars && hasSpaceCharsOnBothEnds) { + text = text.substring(1, text.length - 1) + } + text = escape(text, true) + return { + type: "codespan", + raw: cap[0], + text, + } + } + } + + br(src) { + const cap = this.rules.inline.br.exec(src) + if (cap) { + return { + type: "br", + raw: cap[0], + } + } + } + + del(src) { + const cap = this.rules.inline.del.exec(src) + if (cap) { + return { + type: "del", + raw: cap[0], + text: cap[2], + tokens: this.lexer.inlineTokens(cap[2], []), + } + } + } + + autolink(src, mangle) { + const cap = this.rules.inline.autolink.exec(src) + if (cap) { + let text, href + if (cap[2] === "@") { + text = escape(this.options.mangle ? mangle(cap[1]) : cap[1]) + href = "mailto:" + text + } else { + text = escape(cap[1]) + href = text + } + + return { + type: "link", + raw: cap[0], + text, + href, + tokens: [ + { + type: "text", + raw: text, + text, + }, + ], + } + } + } + + url(src, mangle) { + let cap + if ((cap = this.rules.inline.url.exec(src))) { + let text, href + if (cap[2] === "@") { + text = escape(this.options.mangle ? mangle(cap[0]) : cap[0]) + href = "mailto:" + text + } else { + // do extended autolink path validation + let prevCapZero + do { + prevCapZero = cap[0] + cap[0] = this.rules.inline._backpedal.exec(cap[0])[0] + } while (prevCapZero !== cap[0]) + text = escape(cap[0]) + if (cap[1] === "www.") { + href = "http://" + text + } else { + href = text + } + } + return { + type: "link", + raw: cap[0], + text, + href, + tokens: [ + { + type: "text", + raw: text, + text, + }, + ], + } + } + } + + inlineText(src, smartypants) { + const cap = this.rules.inline.text.exec(src) + if (cap) { + let text + if (this.lexer.state.inRawBlock) { + text = this.options.sanitize + ? this.options.sanitizer + ? this.options.sanitizer(cap[0]) + : escape(cap[0]) + : cap[0] + } else { + text = escape(this.options.smartypants ? smartypants(cap[0]) : cap[0]) + } + return { + type: "text", + raw: cap[0], + text, + } + } + } +} + +/** + * Block-Level Grammar + */ +const block = { + newline: /^(?: *(?:\n|$))+/, + code: /^( {4}[^\n]+(?:\n(?: *(?:\n|$))*)?)+/, + fences: + /^ {0,3}(`{3,}(?=[^`\n]*\n)|~{3,})([^\n]*)\n(?:|([\s\S]*?)\n)(?: {0,3}\1[~`]* *(?=\n|$)|$)/, + hr: /^ {0,3}((?:-[\t ]*){3,}|(?:_[ \t]*){3,}|(?:\*[ \t]*){3,})(?:\n+|$)/, + heading: /^ {0,3}(#{1,6})(?=\s|$)(.*)(?:\n+|$)/, + blockquote: /^( {0,3}> ?(paragraph|[^\n]*)(?:\n|$))+/, + list: /^( {0,3}bull)([ \t][^\n]+?)?(?:\n|$)/, + html: + "^ {0,3}(?:" + // optional indentation + "<(script|pre|style|textarea)[\\s>][\\s\\S]*?(?:[^\\n]*\\n+|$)" + // (1) + "|comment[^\\n]*(\\n+|$)" + // (2) + "|<\\?[\\s\\S]*?(?:\\?>\\n*|$)" + // (3) + "|\\n*|$)" + // (4) + "|\\n*|$)" + // (5) + "|)[\\s\\S]*?(?:(?:\\n *)+\\n|$)" + // (6) + "|<(?!script|pre|style|textarea)([a-z][\\w-]*)(?:attribute)*? */?>(?=[ \\t]*(?:\\n|$))[\\s\\S]*?(?:(?:\\n *)+\\n|$)" + // (7) open tag + "|(?=[ \\t]*(?:\\n|$))[\\s\\S]*?(?:(?:\\n *)+\\n|$)" + // (7) closing tag + ")", + def: /^ {0,3}\[(label)\]: *(?:\n *)?]+)>?(?:(?: +(?:\n *)?| *\n *)(title))? *(?:\n+|$)/, + table: noopTest, + lheading: /^([^\n]+)\n {0,3}(=+|-+) *(?:\n+|$)/, + // regex template, placeholders will be replaced according to different paragraph + // interruption rules of commonmark and the original markdown spec: + _paragraph: + /^([^\n]+(?:\n(?!hr|heading|lheading|blockquote|fences|list|html|table| +\n)[^\n]+)*)/, + text: /^[^\n]+/, +} + +block._label = /(?!\s*\])(?:\\.|[^\[\]\\])+/ +block._title = /(?:"(?:\\"?|[^"\\])*"|'[^'\n]*(?:\n[^'\n]+)*\n?'|\([^()]*\))/ +block.def = edit(block.def) + .replace("label", block._label) + .replace("title", block._title) + .getRegex() + +block.bullet = /(?:[*+-]|\d{1,9}[.)])/ +block.listItemStart = edit(/^( *)(bull) */) + .replace("bull", block.bullet) + .getRegex() + +block.list = edit(block.list) + .replace(/bull/g, block.bullet) + .replace( + "hr", + "\\n+(?=\\1?(?:(?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$))" + ) + .replace("def", "\\n+(?=" + block.def.source + ")") + .getRegex() + +block._tag = + "address|article|aside|base|basefont|blockquote|body|caption" + + "|center|col|colgroup|dd|details|dialog|dir|div|dl|dt|fieldset|figcaption" + + "|figure|footer|form|frame|frameset|h[1-6]|head|header|hr|html|iframe" + + "|legend|li|link|main|menu|menuitem|meta|nav|noframes|ol|optgroup|option" + + "|p|param|section|source|summary|table|tbody|td|tfoot|th|thead|title|tr" + + "|track|ul" +block._comment = /|$)/ +block.html = edit(block.html, "i") + .replace("comment", block._comment) + .replace("tag", block._tag) + .replace( + "attribute", + / +[a-zA-Z:_][\w.:-]*(?: *= *"[^"\n]*"| *= *'[^'\n]*'| *= *[^\s"'=<>`]+)?/ + ) + .getRegex() + +block.paragraph = edit(block._paragraph) + .replace("hr", block.hr) + .replace("heading", " {0,3}#{1,6} ") + .replace("|lheading", "") // setex headings don't interrupt commonmark paragraphs + .replace("|table", "") + .replace("blockquote", " {0,3}>") + .replace("fences", " {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n") + .replace("list", " {0,3}(?:[*+-]|1[.)]) ") // only lists starting from 1 can interrupt + .replace( + "html", + ")|<(?:script|pre|style|textarea|!--)" + ) + .replace("tag", block._tag) // pars can be interrupted by type (6) html blocks + .getRegex() + +block.blockquote = edit(block.blockquote) + .replace("paragraph", block.paragraph) + .getRegex() + +/** + * Normal Block Grammar + */ + +block.normal = merge({}, block) + +/** + * GFM Block Grammar + */ + +block.gfm = merge({}, block.normal, { + table: + "^ *([^\\n ].*\\|.*)\\n" + // Header + " {0,3}(?:\\| *)?(:?-+:? *(?:\\| *:?-+:? *)*)(?:\\| *)?" + // Align + "(?:\\n((?:(?! *\\n|hr|heading|blockquote|code|fences|list|html).*(?:\\n|$))*)\\n*|$)", // Cells +}) + +block.gfm.table = edit(block.gfm.table) + .replace("hr", block.hr) + .replace("heading", " {0,3}#{1,6} ") + .replace("blockquote", " {0,3}>") + .replace("code", " {4}[^\\n]") + .replace("fences", " {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n") + .replace("list", " {0,3}(?:[*+-]|1[.)]) ") // only lists starting from 1 can interrupt + .replace( + "html", + ")|<(?:script|pre|style|textarea|!--)" + ) + .replace("tag", block._tag) // tables can be interrupted by type (6) html blocks + .getRegex() + +block.gfm.paragraph = edit(block._paragraph) + .replace("hr", block.hr) + .replace("heading", " {0,3}#{1,6} ") + .replace("|lheading", "") // setex headings don't interrupt commonmark paragraphs + .replace("table", block.gfm.table) // interrupt paragraphs with table + .replace("blockquote", " {0,3}>") + .replace("fences", " {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n") + .replace("list", " {0,3}(?:[*+-]|1[.)]) ") // only lists starting from 1 can interrupt + .replace( + "html", + ")|<(?:script|pre|style|textarea|!--)" + ) + .replace("tag", block._tag) // pars can be interrupted by type (6) html blocks + .getRegex() +/** + * Pedantic grammar (original John Gruber's loose markdown specification) + */ + +block.pedantic = merge({}, block.normal, { + html: edit( + "^ *(?:comment *(?:\\n|\\s*$)" + + "|<(tag)[\\s\\S]+? *(?:\\n{2,}|\\s*$)" + // closed tag + "|\\s]*)*?/?> *(?:\\n{2,}|\\s*$))" + ) + .replace("comment", block._comment) + .replace( + /tag/g, + "(?!(?:" + + "a|em|strong|small|s|cite|q|dfn|abbr|data|time|code|var|samp|kbd|sub" + + "|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo|span|br|wbr|ins|del|img)" + + "\\b)\\w+(?!:|[^\\w\\s@]*@)\\b" + ) + .getRegex(), + def: /^ *\[([^\]]+)\]: *]+)>?(?: +(["(][^\n]+[")]))? *(?:\n+|$)/, + heading: /^(#{1,6})(.*)(?:\n+|$)/, + fences: noopTest, // fences not supported + paragraph: edit(block.normal._paragraph) + .replace("hr", block.hr) + .replace("heading", " *#{1,6} *[^\n]") + .replace("lheading", block.lheading) + .replace("blockquote", " {0,3}>") + .replace("|fences", "") + .replace("|list", "") + .replace("|html", "") + .getRegex(), +}) + +/** + * Inline-Level Grammar + */ +const inline = { + escape: /^\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/, + autolink: /^<(scheme:[^\s\x00-\x1f<>]*|email)>/, + url: noopTest, + tag: + "^comment" + + "|^" + // self-closing tag + "|^<[a-zA-Z][\\w-]*(?:attribute)*?\\s*/?>" + // open tag + "|^<\\?[\\s\\S]*?\\?>" + // processing instruction, e.g. + "|^" + // declaration, e.g. + "|^", // CDATA section + link: /^!?\[(label)\]\(\s*(href)(?:\s+(title))?\s*\)/, + reflink: /^!?\[(label)\]\[(ref)\]/, + nolink: /^!?\[(ref)\](?:\[\])?/, + reflinkSearch: "reflink|nolink(?!\\()", + emStrong: { + lDelim: /^(?:\*+(?:([punct_])|[^\s*]))|^_+(?:([punct*])|([^\s_]))/, + // (1) and (2) can only be a Right Delimiter. (3) and (4) can only be Left. (5) and (6) can be either Left or Right. + // () Skip orphan inside strong () Consume to delim (1) #*** (2) a***#, a*** (3) #***a, ***a (4) ***# (5) #***# (6) a***a + rDelimAst: + /^[^_*]*?\_\_[^_*]*?\*[^_*]*?(?=\_\_)|[^*]+(?=[^*])|[punct_](\*+)(?=[\s]|$)|[^punct*_\s](\*+)(?=[punct_\s]|$)|[punct_\s](\*+)(?=[^punct*_\s])|[\s](\*+)(?=[punct_])|[punct_](\*+)(?=[punct_])|[^punct*_\s](\*+)(?=[^punct*_\s])/, + rDelimUnd: + /^[^_*]*?\*\*[^_*]*?\_[^_*]*?(?=\*\*)|[^_]+(?=[^_])|[punct*](\_+)(?=[\s]|$)|[^punct*_\s](\_+)(?=[punct*\s]|$)|[punct*\s](\_+)(?=[^punct*_\s])|[\s](\_+)(?=[punct*])|[punct*](\_+)(?=[punct*])/, // ^- Not allowed for _ + }, + code: /^(`+)([^`]|[^`][\s\S]*?[^`])\1(?!`)/, + br: /^( {2,}|\\)\n(?!\s*$)/, + del: noopTest, + text: /^(`+|[^`])(?:(?= {2,}\n)|[\s\S]*?(?:(?=[\\?@\\[\\]`^{|}~" +inline.punctuation = edit(inline.punctuation) + .replace(/punctuation/g, inline._punctuation) + .getRegex() + +// sequences em should skip over [title](link), `code`, +inline.blockSkip = /\[[^\]]*?\]\([^\)]*?\)|`[^`]*?`|<[^>]*?>/g +inline.escapedEmSt = /\\\*|\\_/g + +inline._comment = edit(block._comment).replace("(?:-->|$)", "-->").getRegex() + +inline.emStrong.lDelim = edit(inline.emStrong.lDelim) + .replace(/punct/g, inline._punctuation) + .getRegex() + +inline.emStrong.rDelimAst = edit(inline.emStrong.rDelimAst, "g") + .replace(/punct/g, inline._punctuation) + .getRegex() + +inline.emStrong.rDelimUnd = edit(inline.emStrong.rDelimUnd, "g") + .replace(/punct/g, inline._punctuation) + .getRegex() + +inline._escapes = /\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/g + +inline._scheme = /[a-zA-Z][a-zA-Z0-9+.-]{1,31}/ +inline._email = + /[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+(@)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?![-_])/ +inline.autolink = edit(inline.autolink) + .replace("scheme", inline._scheme) + .replace("email", inline._email) + .getRegex() + +inline._attribute = + /\s+[a-zA-Z:_][\w.:-]*(?:\s*=\s*"[^"]*"|\s*=\s*'[^']*'|\s*=\s*[^\s"'=<>`]+)?/ + +inline.tag = edit(inline.tag) + .replace("comment", inline._comment) + .replace("attribute", inline._attribute) + .getRegex() + +inline._label = /(?:\[(?:\\.|[^\[\]\\])*\]|\\.|`[^`]*`|[^\[\]\\`])*?/ +inline._href = /<(?:\\.|[^\n<>\\])+>|[^\s\x00-\x1f]*/ +inline._title = /"(?:\\"?|[^"\\])*"|'(?:\\'?|[^'\\])*'|\((?:\\\)?|[^)\\])*\)/ + +inline.link = edit(inline.link) + .replace("label", inline._label) + .replace("href", inline._href) + .replace("title", inline._title) + .getRegex() + +inline.reflink = edit(inline.reflink) + .replace("label", inline._label) + .replace("ref", block._label) + .getRegex() + +inline.nolink = edit(inline.nolink).replace("ref", block._label).getRegex() + +inline.reflinkSearch = edit(inline.reflinkSearch, "g") + .replace("reflink", inline.reflink) + .replace("nolink", inline.nolink) + .getRegex() + +/** + * Normal Inline Grammar + */ + +inline.normal = merge({}, inline) + +/** + * Pedantic Inline Grammar + */ + +inline.pedantic = merge({}, inline.normal, { + strong: { + start: /^__|\*\*/, + middle: /^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/, + endAst: /\*\*(?!\*)/g, + endUnd: /__(?!_)/g, + }, + em: { + start: /^_|\*/, + middle: /^()\*(?=\S)([\s\S]*?\S)\*(?!\*)|^_(?=\S)([\s\S]*?\S)_(?!_)/, + endAst: /\*(?!\*)/g, + endUnd: /_(?!_)/g, + }, + link: edit(/^!?\[(label)\]\((.*?)\)/) + .replace("label", inline._label) + .getRegex(), + reflink: edit(/^!?\[(label)\]\s*\[([^\]]*)\]/) + .replace("label", inline._label) + .getRegex(), +}) + +/** + * GFM Inline Grammar + */ + +inline.gfm = merge({}, inline.normal, { + escape: edit(inline.escape).replace("])", "~|])").getRegex(), + _extended_email: + /[A-Za-z0-9._+-]+(@)[a-zA-Z0-9-_]+(?:\.[a-zA-Z0-9-_]*[a-zA-Z0-9])+(?![-_])/, + url: /^((?:ftp|https?):\/\/|www\.)(?:[a-zA-Z0-9\-]+\.?)+[^\s<]*|^email/, + _backpedal: + /(?:[^?!.,:;*_~()&]+|\([^)]*\)|&(?![a-zA-Z0-9]+;$)|[?!.,:;*_~)]+(?!$))+/, + del: /^(~~?)(?=[^\s~])([\s\S]*?[^\s~])\1(?=[^~]|$)/, + text: /^([`~]+|[^`~])(?:(?= {2,}\n)|(?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)|[\s\S]*?(?:(?=[\\ 0.5) { + ch = "x" + ch.toString(16) + } + out += "&#" + ch + ";" + } + + return out +} + +/** + * Block Lexer + */ +class Lexer { + constructor(options) { + this.tokens = [] + this.tokens.links = Object.create(null) + this.options = options || defaults + this.options.tokenizer = this.options.tokenizer || new Tokenizer() + this.tokenizer = this.options.tokenizer + this.tokenizer.options = this.options + this.tokenizer.lexer = this + this.inlineQueue = [] + this.state = { + inLink: false, + inRawBlock: false, + top: true, + } + + const rules = { + block: block.normal, + inline: inline.normal, + } + + if (this.options.pedantic) { + rules.block = block.pedantic + rules.inline = inline.pedantic + } else if (this.options.gfm) { + rules.block = block.gfm + if (this.options.breaks) { + rules.inline = inline.breaks + } else { + rules.inline = inline.gfm + } + } + this.tokenizer.rules = rules + } + + /** + * Expose Rules + */ + static get rules() { + return { + block, + inline, + } + } + + /** + * Static Lex Method + */ + static lex(src, options) { + const lexer = new Lexer(options) + return lexer.lex(src) + } + + /** + * Static Lex Inline Method + */ + static lexInline(src, options) { + const lexer = new Lexer(options) + return lexer.inlineTokens(src) + } + + /** + * Preprocessing + */ + lex(src) { + src = src.replace(/\r\n|\r/g, "\n") + + this.blockTokens(src, this.tokens) + + let next + while ((next = this.inlineQueue.shift())) { + this.inlineTokens(next.src, next.tokens) + } + + return this.tokens + } + + /** + * Lexing + */ + blockTokens(src, tokens = []) { + if (this.options.pedantic) { + src = src.replace(/\t/g, " ").replace(/^ +$/gm, "") + } else { + src = src.replace(/^( *)(\t+)/gm, (_, leading, tabs) => { + return leading + " ".repeat(tabs.length) + }) + } + + let token, lastToken, cutSrc, lastParagraphClipped + + while (src) { + if ( + this.options.extensions && + this.options.extensions.block && + this.options.extensions.block.some(extTokenizer => { + if ((token = extTokenizer.call({ lexer: this }, src, tokens))) { + src = src.substring(token.raw.length) + tokens.push(token) + return true + } + return false + }) + ) { + continue + } + + // newline + if ((token = this.tokenizer.space(src))) { + src = src.substring(token.raw.length) + if (token.raw.length === 1 && tokens.length > 0) { + // if there's a single \n as a spacer, it's terminating the last line, + // so move it there so that we don't get unecessary paragraph tags + tokens[tokens.length - 1].raw += "\n" + } else { + tokens.push(token) + } + continue + } + + // code + if ((token = this.tokenizer.code(src))) { + src = src.substring(token.raw.length) + lastToken = tokens[tokens.length - 1] + // An indented code block cannot interrupt a paragraph. + if ( + lastToken && + (lastToken.type === "paragraph" || lastToken.type === "text") + ) { + lastToken.raw += "\n" + token.raw + lastToken.text += "\n" + token.text + this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text + } else { + tokens.push(token) + } + continue + } + + // fences + if ((token = this.tokenizer.fences(src))) { + src = src.substring(token.raw.length) + tokens.push(token) + continue + } + + // heading + if ((token = this.tokenizer.heading(src))) { + src = src.substring(token.raw.length) + tokens.push(token) + continue + } + + // hr + if ((token = this.tokenizer.hr(src))) { + src = src.substring(token.raw.length) + tokens.push(token) + continue + } + + // blockquote + if ((token = this.tokenizer.blockquote(src))) { + src = src.substring(token.raw.length) + tokens.push(token) + continue + } + + // list + if ((token = this.tokenizer.list(src))) { + src = src.substring(token.raw.length) + tokens.push(token) + continue + } + + // html + if ((token = this.tokenizer.html(src))) { + src = src.substring(token.raw.length) + tokens.push(token) + continue + } + + // def + if ((token = this.tokenizer.def(src))) { + src = src.substring(token.raw.length) + lastToken = tokens[tokens.length - 1] + if ( + lastToken && + (lastToken.type === "paragraph" || lastToken.type === "text") + ) { + lastToken.raw += "\n" + token.raw + lastToken.text += "\n" + token.raw + this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text + } else if (!this.tokens.links[token.tag]) { + this.tokens.links[token.tag] = { + href: token.href, + title: token.title, + } + } + continue + } + + // table (gfm) + if ((token = this.tokenizer.table(src))) { + src = src.substring(token.raw.length) + tokens.push(token) + continue + } + + // lheading + if ((token = this.tokenizer.lheading(src))) { + src = src.substring(token.raw.length) + tokens.push(token) + continue + } + + // top-level paragraph + // prevent paragraph consuming extensions by clipping 'src' to extension start + cutSrc = src + if (this.options.extensions && this.options.extensions.startBlock) { + let startIndex = Infinity + const tempSrc = src.slice(1) + let tempStart + this.options.extensions.startBlock.forEach(function (getStartIndex) { + tempStart = getStartIndex.call({ lexer: this }, tempSrc) + if (typeof tempStart === "number" && tempStart >= 0) { + startIndex = Math.min(startIndex, tempStart) + } + }) + if (startIndex < Infinity && startIndex >= 0) { + cutSrc = src.substring(0, startIndex + 1) + } + } + if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) { + lastToken = tokens[tokens.length - 1] + if (lastParagraphClipped && lastToken.type === "paragraph") { + lastToken.raw += "\n" + token.raw + lastToken.text += "\n" + token.text + this.inlineQueue.pop() + this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text + } else { + tokens.push(token) + } + lastParagraphClipped = cutSrc.length !== src.length + src = src.substring(token.raw.length) + continue + } + + // text + if ((token = this.tokenizer.text(src))) { + src = src.substring(token.raw.length) + lastToken = tokens[tokens.length - 1] + if (lastToken && lastToken.type === "text") { + lastToken.raw += "\n" + token.raw + lastToken.text += "\n" + token.text + this.inlineQueue.pop() + this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text + } else { + tokens.push(token) + } + continue + } + + if (src) { + const errMsg = "Infinite loop on byte: " + src.charCodeAt(0) + if (this.options.silent) { + console.error(errMsg) + break + } else { + throw new Error(errMsg) + } + } + } + + this.state.top = true + return tokens + } + + inline(src, tokens) { + this.inlineQueue.push({ src, tokens }) + } + + /** + * Lexing/Compiling + */ + inlineTokens(src, tokens = []) { + let token, lastToken, cutSrc + + // String with links masked to avoid interference with em and strong + let maskedSrc = src + let match + let keepPrevChar, prevChar + + // Mask out reflinks + if (this.tokens.links) { + const links = Object.keys(this.tokens.links) + if (links.length > 0) { + while ( + (match = this.tokenizer.rules.inline.reflinkSearch.exec(maskedSrc)) != + null + ) { + if ( + links.includes(match[0].slice(match[0].lastIndexOf("[") + 1, -1)) + ) { + maskedSrc = + maskedSrc.slice(0, match.index) + + "[" + + repeatString("a", match[0].length - 2) + + "]" + + maskedSrc.slice( + this.tokenizer.rules.inline.reflinkSearch.lastIndex + ) + } + } + } + } + // Mask out other blocks + while ( + (match = this.tokenizer.rules.inline.blockSkip.exec(maskedSrc)) != null + ) { + maskedSrc = + maskedSrc.slice(0, match.index) + + "[" + + repeatString("a", match[0].length - 2) + + "]" + + maskedSrc.slice(this.tokenizer.rules.inline.blockSkip.lastIndex) + } + + // Mask out escaped em & strong delimiters + while ( + (match = this.tokenizer.rules.inline.escapedEmSt.exec(maskedSrc)) != null + ) { + maskedSrc = + maskedSrc.slice(0, match.index) + + "++" + + maskedSrc.slice(this.tokenizer.rules.inline.escapedEmSt.lastIndex) + } + + while (src) { + if (!keepPrevChar) { + prevChar = "" + } + keepPrevChar = false + + // extensions + if ( + this.options.extensions && + this.options.extensions.inline && + this.options.extensions.inline.some(extTokenizer => { + if ((token = extTokenizer.call({ lexer: this }, src, tokens))) { + src = src.substring(token.raw.length) + tokens.push(token) + return true + } + return false + }) + ) { + continue + } + + // escape + if ((token = this.tokenizer.escape(src))) { + src = src.substring(token.raw.length) + tokens.push(token) + continue + } + + // tag + if ((token = this.tokenizer.tag(src))) { + src = src.substring(token.raw.length) + lastToken = tokens[tokens.length - 1] + if (lastToken && token.type === "text" && lastToken.type === "text") { + lastToken.raw += token.raw + lastToken.text += token.text + } else { + tokens.push(token) + } + continue + } + + // link + if ((token = this.tokenizer.link(src))) { + src = src.substring(token.raw.length) + tokens.push(token) + continue + } + + // reflink, nolink + if ((token = this.tokenizer.reflink(src, this.tokens.links))) { + src = src.substring(token.raw.length) + lastToken = tokens[tokens.length - 1] + if (lastToken && token.type === "text" && lastToken.type === "text") { + lastToken.raw += token.raw + lastToken.text += token.text + } else { + tokens.push(token) + } + continue + } + + // em & strong + if ((token = this.tokenizer.emStrong(src, maskedSrc, prevChar))) { + src = src.substring(token.raw.length) + tokens.push(token) + continue + } + + // code + if ((token = this.tokenizer.codespan(src))) { + src = src.substring(token.raw.length) + tokens.push(token) + continue + } + + // br + if ((token = this.tokenizer.br(src))) { + src = src.substring(token.raw.length) + tokens.push(token) + continue + } + + // del (gfm) + if ((token = this.tokenizer.del(src))) { + src = src.substring(token.raw.length) + tokens.push(token) + continue + } + + // autolink + if ((token = this.tokenizer.autolink(src, mangle))) { + src = src.substring(token.raw.length) + tokens.push(token) + continue + } + + // url (gfm) + if (!this.state.inLink && (token = this.tokenizer.url(src, mangle))) { + src = src.substring(token.raw.length) + tokens.push(token) + continue + } + + // text + // prevent inlineText consuming extensions by clipping 'src' to extension start + cutSrc = src + if (this.options.extensions && this.options.extensions.startInline) { + let startIndex = Infinity + const tempSrc = src.slice(1) + let tempStart + this.options.extensions.startInline.forEach(function (getStartIndex) { + tempStart = getStartIndex.call({ lexer: this }, tempSrc) + if (typeof tempStart === "number" && tempStart >= 0) { + startIndex = Math.min(startIndex, tempStart) + } + }) + if (startIndex < Infinity && startIndex >= 0) { + cutSrc = src.substring(0, startIndex + 1) + } + } + if ((token = this.tokenizer.inlineText(cutSrc, smartypants))) { + src = src.substring(token.raw.length) + if (token.raw.slice(-1) !== "_") { + // Track prevChar before string of ____ started + prevChar = token.raw.slice(-1) + } + keepPrevChar = true + lastToken = tokens[tokens.length - 1] + if (lastToken && lastToken.type === "text") { + lastToken.raw += token.raw + lastToken.text += token.text + } else { + tokens.push(token) + } + continue + } + + if (src) { + const errMsg = "Infinite loop on byte: " + src.charCodeAt(0) + if (this.options.silent) { + console.error(errMsg) + break + } else { + throw new Error(errMsg) + } + } + } + + return tokens + } +} + +/** + * Renderer + */ +class Renderer { + constructor(options) { + this.options = options || defaults + } + + code(code, infostring, escaped) { + const lang = (infostring || "").match(/\S*/)[0] + if (this.options.highlight) { + const out = this.options.highlight(code, lang) + if (out != null && out !== code) { + escaped = true + code = out + } + } + + code = code.replace(/\n$/, "") + "\n" + + if (!lang) { + return ( + "
" +
+        (escaped ? code : escape(code, true)) +
+        "
\n" + ) + } + + return ( + '
' +
+      (escaped ? code : escape(code, true)) +
+      "
\n" + ) + } + + /** + * @param {string} quote + */ + blockquote(quote) { + return `
\n${quote}
\n` + } + + html(html) { + return html + } + + /** + * @param {string} text + * @param {string} level + * @param {string} raw + * @param {any} slugger + */ + heading(text, level, raw, slugger) { + if (this.options.headerIds) { + const id = this.options.headerPrefix + slugger.slug(raw) + return `${text}\n` + } + + // ignore IDs + return `${text}\n` + } + + hr() { + return this.options.xhtml ? "
\n" : "
\n" + } + + list(body, ordered, start) { + const type = ordered ? "ol" : "ul", + startatt = ordered && start !== 1 ? ' start="' + start + '"' : "" + return "<" + type + startatt + ">\n" + body + "\n" + } + + /** + * @param {string} text + */ + listitem(text) { + return `
  • ${text}
  • \n` + } + + checkbox(checked) { + return ( + " " + ) + } + + /** + * @param {string} text + */ + paragraph(text) { + return `

    ${text}

    \n` + } + + /** + * @param {string} header + * @param {string} body + */ + table(header, body) { + if (body) body = `${body}` + + return ( + "\n" + "\n" + header + "\n" + body + "
    \n" + ) + } + + /** + * @param {string} content + */ + tablerow(content) { + return `\n${content}\n` + } + + tablecell(content, flags) { + const type = flags.header ? "th" : "td" + const tag = flags.align ? `<${type} align="${flags.align}">` : `<${type}>` + return tag + content + `\n` + } + + /** + * span level renderer + * @param {string} text + */ + strong(text) { + return `${text}` + } + + /** + * @param {string} text + */ + em(text) { + return `${text}` + } + + /** + * @param {string} text + */ + codespan(text) { + return `${text}` + } + + br() { + return this.options.xhtml ? "
    " : "
    " + } + + /** + * @param {string} text + */ + del(text) { + return `${text}` + } + + /** + * @param {string} href + * @param {string} title + * @param {string} text + */ + link(href, title, text) { + href = cleanUrl(this.options.sanitize, this.options.baseUrl, href) + if (href === null) { + return text + } + let out = '
    " + return out + } + + /** + * @param {string} href + * @param {string} title + * @param {string} text + */ + image(href, title, text) { + href = cleanUrl(this.options.sanitize, this.options.baseUrl, href) + if (href === null) { + return text + } + + let out = `${text}" : ">" + return out + } + + text(text) { + return text + } +} + +/** + * TextRenderer + * returns only the textual part of the token + */ +class TextRenderer { + // no need for block level renderers + strong(text) { + return text + } + + em(text) { + return text + } + + codespan(text) { + return text + } + + del(text) { + return text + } + + html(text) { + return text + } + + text(text) { + return text + } + + link(href, title, text) { + return "" + text + } + + image(href, title, text) { + return "" + text + } + + br() { + return "" + } +} + +/** + * Slugger generates header id + */ +class Slugger { + constructor() { + this.seen = {} + } + + /** + * @param {string} value + */ + serialize(value) { + return ( + value + .toLowerCase() + .trim() + // remove html tags + .replace(/<[!\/a-z].*?>/gi, "") + // remove unwanted chars + .replace( + /[\u2000-\u206F\u2E00-\u2E7F\\'!"#$%&()*+,./:;<=>?@[\]^`{|}~]/g, + "" + ) + .replace(/\s/g, "-") + ) + } + + /** + * Finds the next safe (unique) slug to use + * @param {string} originalSlug + * @param {boolean} isDryRun + */ + getNextSafeSlug(originalSlug, isDryRun) { + let slug = originalSlug + let occurenceAccumulator = 0 + if (this.seen.hasOwnProperty(slug)) { + occurenceAccumulator = this.seen[originalSlug] + do { + occurenceAccumulator++ + slug = originalSlug + "-" + occurenceAccumulator + } while (this.seen.hasOwnProperty(slug)) + } + if (!isDryRun) { + this.seen[originalSlug] = occurenceAccumulator + this.seen[slug] = 0 + } + return slug + } + + /** + * Convert string to unique id + * @param {object} [options] + * @param {boolean} [options.dryrun] Generates the next unique slug without + * updating the internal accumulator. + */ + slug(value, options = {}) { + const slug = this.serialize(value) + return this.getNextSafeSlug(slug, options.dryrun) + } +} + +/** + * Parsing & Compiling + */ +class Parser { + constructor(options) { + this.options = options || defaults + this.options.renderer = this.options.renderer || new Renderer() + this.renderer = this.options.renderer + this.renderer.options = this.options + this.textRenderer = new TextRenderer() + this.slugger = new Slugger() + } + + /** + * Static Parse Method + */ + static parse(tokens, options) { + const parser = new Parser(options) + return parser.parse(tokens) + } + + /** + * Static Parse Inline Method + */ + static parseInline(tokens, options) { + const parser = new Parser(options) + return parser.parseInline(tokens) + } + + /** + * Parse Loop + */ + parse(tokens, top = true) { + let out = "", + i, + j, + k, + l2, + l3, + row, + cell, + header, + body, + token, + ordered, + start, + loose, + itemBody, + item, + checked, + task, + checkbox, + ret + + const l = tokens.length + for (i = 0; i < l; i++) { + token = tokens[i] + + // Run any renderer extensions + if ( + this.options.extensions && + this.options.extensions.renderers && + this.options.extensions.renderers[token.type] + ) { + ret = this.options.extensions.renderers[token.type].call( + { parser: this }, + token + ) + if ( + ret !== false || + ![ + "space", + "hr", + "heading", + "code", + "table", + "blockquote", + "list", + "html", + "paragraph", + "text", + ].includes(token.type) + ) { + out += ret || "" + continue + } + } + + switch (token.type) { + case "space": { + continue + } + case "hr": { + out += this.renderer.hr() + continue + } + case "heading": { + out += this.renderer.heading( + this.parseInline(token.tokens), + token.depth, + unescape(this.parseInline(token.tokens, this.textRenderer)), + this.slugger + ) + continue + } + case "code": { + out += this.renderer.code(token.text, token.lang, token.escaped) + continue + } + case "table": { + header = "" + + // header + cell = "" + l2 = token.header.length + for (j = 0; j < l2; j++) { + cell += this.renderer.tablecell( + this.parseInline(token.header[j].tokens), + { header: true, align: token.align[j] } + ) + } + header += this.renderer.tablerow(cell) + + body = "" + l2 = token.rows.length + for (j = 0; j < l2; j++) { + row = token.rows[j] + + cell = "" + l3 = row.length + for (k = 0; k < l3; k++) { + cell += this.renderer.tablecell(this.parseInline(row[k].tokens), { + header: false, + align: token.align[k], + }) + } + + body += this.renderer.tablerow(cell) + } + out += this.renderer.table(header, body) + continue + } + case "blockquote": { + body = this.parse(token.tokens) + out += this.renderer.blockquote(body) + continue + } + case "list": { + ordered = token.ordered + start = token.start + loose = token.loose + l2 = token.items.length + + body = "" + for (j = 0; j < l2; j++) { + item = token.items[j] + checked = item.checked + task = item.task + + itemBody = "" + if (item.task) { + checkbox = this.renderer.checkbox(checked) + if (loose) { + if ( + item.tokens.length > 0 && + item.tokens[0].type === "paragraph" + ) { + item.tokens[0].text = checkbox + " " + item.tokens[0].text + if ( + item.tokens[0].tokens && + item.tokens[0].tokens.length > 0 && + item.tokens[0].tokens[0].type === "text" + ) { + item.tokens[0].tokens[0].text = + checkbox + " " + item.tokens[0].tokens[0].text + } + } else { + item.tokens.unshift({ + type: "text", + text: checkbox, + }) + } + } else { + itemBody += checkbox + } + } + + itemBody += this.parse(item.tokens, loose) + body += this.renderer.listitem(itemBody, task, checked) + } + + out += this.renderer.list(body, ordered, start) + continue + } + case "html": { + // TODO parse inline content if parameter markdown=1 + out += this.renderer.html(token.text) + continue + } + case "paragraph": { + out += this.renderer.paragraph(this.parseInline(token.tokens)) + continue + } + case "text": { + body = token.tokens ? this.parseInline(token.tokens) : token.text + while (i + 1 < l && tokens[i + 1].type === "text") { + token = tokens[++i] + body += + "\n" + + (token.tokens ? this.parseInline(token.tokens) : token.text) + } + out += top ? this.renderer.paragraph(body) : body + continue + } + + default: { + const errMsg = 'Token with "' + token.type + '" type was not found.' + if (this.options.silent) { + console.error(errMsg) + return + } else { + throw new Error(errMsg) + } + } + } + } + + return out + } + + /** + * Parse Inline Tokens + */ + parseInline(tokens, renderer) { + renderer = renderer || this.renderer + let out = "", + i, + token, + ret + + const l = tokens.length + for (i = 0; i < l; i++) { + token = tokens[i] + + // Run any renderer extensions + if ( + this.options.extensions && + this.options.extensions.renderers && + this.options.extensions.renderers[token.type] + ) { + ret = this.options.extensions.renderers[token.type].call( + { parser: this }, + token + ) + if ( + ret !== false || + ![ + "escape", + "html", + "link", + "image", + "strong", + "em", + "codespan", + "br", + "del", + "text", + ].includes(token.type) + ) { + out += ret || "" + continue + } + } + + switch (token.type) { + case "escape": { + out += renderer.text(token.text) + break + } + case "html": { + out += renderer.html(token.text) + break + } + case "link": { + out += renderer.link( + token.href, + token.title, + this.parseInline(token.tokens, renderer) + ) + break + } + case "image": { + out += renderer.image(token.href, token.title, token.text) + break + } + case "strong": { + out += renderer.strong(this.parseInline(token.tokens, renderer)) + break + } + case "em": { + out += renderer.em(this.parseInline(token.tokens, renderer)) + break + } + case "codespan": { + out += renderer.codespan(token.text) + break + } + case "br": { + out += renderer.br() + break + } + case "del": { + out += renderer.del(this.parseInline(token.tokens, renderer)) + break + } + case "text": { + out += renderer.text(token.text) + break + } + default: { + const errMsg = 'Token with "' + token.type + '" type was not found.' + if (this.options.silent) { + console.error(errMsg) + return + } else { + throw new Error(errMsg) + } + } + } + } + return out + } +} + +/** + * Marked + */ +function marked(src, opt, callback) { + // throw error in case of non string input + if (typeof src === "undefined" || src === null) { + throw new Error("marked(): input parameter is undefined or null") + } + if (typeof src !== "string") { + throw new Error( + "marked(): input parameter is of type " + + Object.prototype.toString.call(src) + + ", string expected" + ) + } + + if (typeof opt === "function") { + callback = opt + opt = null + } + + opt = merge({}, marked.defaults, opt || {}) + checkSanitizeDeprecation(opt) + + if (callback) { + const highlight = opt.highlight + let tokens + + try { + tokens = Lexer.lex(src, opt) + } catch (e) { + return callback(e) + } + + const done = function (err) { + let out + + if (!err) { + try { + if (opt.walkTokens) { + marked.walkTokens(tokens, opt.walkTokens) + } + out = Parser.parse(tokens, opt) + } catch (e) { + err = e + } + } + + opt.highlight = highlight + + return err ? callback(err) : callback(null, out) + } + + if (!highlight || highlight.length < 3) { + return done() + } + + delete opt.highlight + + if (!tokens.length) return done() + + let pending = 0 + marked.walkTokens(tokens, function (token) { + if (token.type === "code") { + pending++ + setTimeout(() => { + highlight(token.text, token.lang, function (err, code) { + if (err) { + return done(err) + } + if (code != null && code !== token.text) { + token.text = code + token.escaped = true + } + + pending-- + if (pending === 0) { + done() + } + }) + }, 0) + } + }) + + if (pending === 0) { + done() + } + + return + } + + try { + const tokens = Lexer.lex(src, opt) + if (opt.walkTokens) { + marked.walkTokens(tokens, opt.walkTokens) + } + return Parser.parse(tokens, opt) + } catch (e) { + e.message += "\nPlease report this to https://github.com/markedjs/marked." + if (opt.silent) { + return ( + "

    An error occurred:

    " +
    +        escape(e.message + "", true) +
    +        "
    " + ) + } + throw e + } +} + +/** + * Options + */ + +marked.options = marked.setOptions = function (opt) { + merge(marked.defaults, opt) + changeDefaults(marked.defaults) + return marked +} + +marked.getDefaults = getDefaults + +marked.defaults = defaults + +/** + * Use Extension + */ + +marked.use = function (...args) { + const opts = merge({}, ...args) + const extensions = marked.defaults.extensions || { + renderers: {}, + childTokens: {}, + } + let hasExtensions + + args.forEach(pack => { + // ==-- Parse "addon" extensions --== // + if (pack.extensions) { + hasExtensions = true + pack.extensions.forEach(ext => { + if (!ext.name) { + throw new Error("extension name required") + } + if (ext.renderer) { + // Renderer extensions + const prevRenderer = extensions.renderers + ? extensions.renderers[ext.name] + : null + if (prevRenderer) { + // Replace extension with func to run new extension but fall back if false + extensions.renderers[ext.name] = function (...args) { + let ret = ext.renderer.apply(this, args) + if (ret === false) { + ret = prevRenderer.apply(this, args) + } + return ret + } + } else { + extensions.renderers[ext.name] = ext.renderer + } + } + if (ext.tokenizer) { + // Tokenizer Extensions + if (!ext.level || (ext.level !== "block" && ext.level !== "inline")) { + throw new Error("extension level must be 'block' or 'inline'") + } + if (extensions[ext.level]) { + extensions[ext.level].unshift(ext.tokenizer) + } else { + extensions[ext.level] = [ext.tokenizer] + } + if (ext.start) { + // Function to check for start of token + if (ext.level === "block") { + if (extensions.startBlock) { + extensions.startBlock.push(ext.start) + } else { + extensions.startBlock = [ext.start] + } + } else if (ext.level === "inline") { + if (extensions.startInline) { + extensions.startInline.push(ext.start) + } else { + extensions.startInline = [ext.start] + } + } + } + } + if (ext.childTokens) { + // Child tokens to be visited by walkTokens + extensions.childTokens[ext.name] = ext.childTokens + } + }) + } + + // ==-- Parse "overwrite" extensions --== // + if (pack.renderer) { + const renderer = marked.defaults.renderer || new Renderer() + for (const prop in pack.renderer) { + const prevRenderer = renderer[prop] + // Replace renderer with func to run extension, but fall back if false + renderer[prop] = (...args) => { + let ret = pack.renderer[prop].apply(renderer, args) + if (ret === false) { + ret = prevRenderer.apply(renderer, args) + } + return ret + } + } + opts.renderer = renderer + } + if (pack.tokenizer) { + const tokenizer = marked.defaults.tokenizer || new Tokenizer() + for (const prop in pack.tokenizer) { + const prevTokenizer = tokenizer[prop] + // Replace tokenizer with func to run extension, but fall back if false + tokenizer[prop] = (...args) => { + let ret = pack.tokenizer[prop].apply(tokenizer, args) + if (ret === false) { + ret = prevTokenizer.apply(tokenizer, args) + } + return ret + } + } + opts.tokenizer = tokenizer + } + + // ==-- Parse WalkTokens extensions --== // + if (pack.walkTokens) { + const walkTokens = marked.defaults.walkTokens + opts.walkTokens = function (token) { + pack.walkTokens.call(this, token) + if (walkTokens) { + walkTokens.call(this, token) + } + } + } + + if (hasExtensions) { + opts.extensions = extensions + } + + marked.setOptions(opts) + }) +} + +/** + * Run callback for every token + */ + +marked.walkTokens = function (tokens, callback) { + for (const token of tokens) { + callback.call(marked, token) + switch (token.type) { + case "table": { + for (const cell of token.header) { + marked.walkTokens(cell.tokens, callback) + } + for (const row of token.rows) { + for (const cell of row) { + marked.walkTokens(cell.tokens, callback) + } + } + break + } + case "list": { + marked.walkTokens(token.items, callback) + break + } + default: { + if ( + marked.defaults.extensions && + marked.defaults.extensions.childTokens && + marked.defaults.extensions.childTokens[token.type] + ) { + // Walk any extensions + marked.defaults.extensions.childTokens[token.type].forEach(function ( + childTokens + ) { + marked.walkTokens(token[childTokens], callback) + }) + } else if (token.tokens) { + marked.walkTokens(token.tokens, callback) + } + } + } + } +} + +/** + * Parse Inline + * @param {string} src + */ +marked.parseInline = function (src, opt) { + // throw error in case of non string input + if (typeof src === "undefined" || src === null) { + throw new Error( + "marked.parseInline(): input parameter is undefined or null" + ) + } + if (typeof src !== "string") { + throw new Error( + "marked.parseInline(): input parameter is of type " + + Object.prototype.toString.call(src) + + ", string expected" + ) + } + + opt = merge({}, marked.defaults, opt || {}) + checkSanitizeDeprecation(opt) + + try { + const tokens = Lexer.lexInline(src, opt) + if (opt.walkTokens) { + marked.walkTokens(tokens, opt.walkTokens) + } + return Parser.parseInline(tokens, opt) + } catch (e) { + e.message += "\nPlease report this to https://github.com/markedjs/marked." + if (opt.silent) { + return ( + "

    An error occurred:

    " +
    +        escape(e.message + "", true) +
    +        "
    " + ) + } + throw e + } +} + +/** + * Expose + */ +marked.Parser = Parser +marked.parser = Parser.parse +marked.Renderer = Renderer +marked.TextRenderer = TextRenderer +marked.Lexer = Lexer +marked.lexer = Lexer.lex +marked.Tokenizer = Tokenizer +marked.Slugger = Slugger +marked.parse = marked + +const options = marked.options +const setOptions = marked.setOptions +const use = marked.use +const walkTokens = marked.walkTokens +const parseInline = marked.parseInline +const parse = marked +const parser = Parser.parse +const lexer = Lexer.lex + +const email = trigger.row +return marked(email.Message) diff --git a/packages/server/src/jsRunner/utilities.ts b/packages/server/src/jsRunner/utilities.ts new file mode 100644 index 0000000000..fa398ec239 --- /dev/null +++ b/packages/server/src/jsRunner/utilities.ts @@ -0,0 +1,3 @@ +export function iifeWrapper(script: string) { + return `(function(){\n${script}\n})();` +} diff --git a/packages/server/src/jsRunner/vm/isolated-vm.ts b/packages/server/src/jsRunner/vm/isolated-vm.ts index e5c431666d..b0692f0fd1 100644 --- a/packages/server/src/jsRunner/vm/isolated-vm.ts +++ b/packages/server/src/jsRunner/vm/isolated-vm.ts @@ -7,6 +7,7 @@ import querystring from "querystring" import { BundleType, loadBundle } from "../bundles" import { VM } from "@budibase/types" +import { iifeWrapper } from "../utilities" import environment from "../../environment" class ExecutionTimeoutError extends Error { @@ -118,11 +119,11 @@ export class IsolatedVM implements VM { // 3. Process script // 4. Stringify the result in order to convert the result from BSON to json this.codeWrapper = code => - `(function(){ - const data = bson.deserialize(bsonData, { validation: { utf8: false } }).data; - const result = ${code} - return bson.toJson(result); - })();` + iifeWrapper(` + const data = bson.deserialize(bsonData, { validation: { utf8: false } }).data; + const result = ${code} + return bson.toJson(result); + `) const bsonSource = loadBundle(BundleType.BSON) diff --git a/packages/server/src/sdk/app/rows/search.ts b/packages/server/src/sdk/app/rows/search.ts index 4b71179839..8b24f9bc5f 100644 --- a/packages/server/src/sdk/app/rows/search.ts +++ b/packages/server/src/sdk/app/rows/search.ts @@ -36,11 +36,13 @@ export async function search(options: SearchParams): Promise<{ export interface ExportRowsParams { tableId: string format: Format + delimiter?: string rowIds?: string[] columns?: string[] query?: SearchFilters sort?: string sortOrder?: SortOrder + customHeaders?: { [key: string]: string } } export interface ExportRowsResult { diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index 8465f997e3..e2d1a1b32c 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -101,7 +101,17 @@ export async function search(options: SearchParams) { export async function exportRows( options: ExportRowsParams ): Promise { - const { tableId, format, columns, rowIds, query, sort, sortOrder } = options + const { + tableId, + format, + columns, + rowIds, + query, + sort, + sortOrder, + delimiter, + customHeaders, + } = options const { datasourceId, tableName } = breakExternalTableId(tableId) let requestQuery: SearchFilters = {} @@ -153,12 +163,17 @@ export async function exportRows( rows = result.rows } - let exportRows = cleanExportRows(rows, schema, format, columns) + let exportRows = cleanExportRows(rows, schema, format, columns, customHeaders) let content: string switch (format) { case exporters.Format.CSV: - content = exporters.csv(headers ?? Object.keys(schema), exportRows) + content = exporters.csv( + headers ?? Object.keys(schema), + exportRows, + delimiter, + customHeaders + ) break case exporters.Format.JSON: content = exporters.json(exportRows) diff --git a/packages/server/src/sdk/app/rows/search/internal.ts b/packages/server/src/sdk/app/rows/search/internal.ts index 22cb3985b7..2d3c32e02e 100644 --- a/packages/server/src/sdk/app/rows/search/internal.ts +++ b/packages/server/src/sdk/app/rows/search/internal.ts @@ -84,7 +84,17 @@ export async function search(options: SearchParams) { export async function exportRows( options: ExportRowsParams ): Promise { - const { tableId, format, rowIds, columns, query, sort, sortOrder } = options + const { + tableId, + format, + rowIds, + columns, + query, + sort, + sortOrder, + delimiter, + customHeaders, + } = options const db = context.getAppDB() const table = await sdk.tables.getTable(tableId) @@ -124,11 +134,16 @@ export async function exportRows( rows = result } - let exportRows = cleanExportRows(rows, schema, format, columns) + let exportRows = cleanExportRows(rows, schema, format, columns, customHeaders) if (format === Format.CSV) { return { fileName: "export.csv", - content: csv(headers ?? Object.keys(rows[0]), exportRows), + content: csv( + headers ?? Object.keys(rows[0]), + exportRows, + delimiter, + customHeaders + ), } } else if (format === Format.JSON) { return { diff --git a/packages/server/src/sdk/app/rows/utils.ts b/packages/server/src/sdk/app/rows/utils.ts index 14868a4013..0ff85f40ac 100644 --- a/packages/server/src/sdk/app/rows/utils.ts +++ b/packages/server/src/sdk/app/rows/utils.ts @@ -16,7 +16,8 @@ export function cleanExportRows( rows: any[], schema: TableSchema, format: string, - columns?: string[] + columns?: string[], + customHeaders: { [key: string]: string } = {} ) { let cleanRows = [...rows] @@ -44,11 +45,27 @@ export function cleanExportRows( } } } + } else if (format === Format.JSON) { + // Replace row keys with custom headers + for (let row of cleanRows) { + renameKeys(customHeaders, row) + } } return cleanRows } +function renameKeys(keysMap: { [key: string]: any }, row: any) { + for (const key in keysMap) { + Object.defineProperty( + row, + keysMap[key], + Object.getOwnPropertyDescriptor(row, key) || {} + ) + delete row[key] + } +} + function isForeignKey(key: string, table: Table) { const relationships = Object.values(table.schema).filter(isRelationshipColumn) return relationships.some( diff --git a/packages/server/src/tests/utilities/api/application.ts b/packages/server/src/tests/utilities/api/application.ts index 9c784bade1..3951bba667 100644 --- a/packages/server/src/tests/utilities/api/application.ts +++ b/packages/server/src/tests/utilities/api/application.ts @@ -1,17 +1,96 @@ import { Response } from "supertest" -import { App } from "@budibase/types" +import { + App, + type CreateAppRequest, + type FetchAppDefinitionResponse, + type FetchAppPackageResponse, +} from "@budibase/types" import TestConfiguration from "../TestConfiguration" import { TestAPI } from "./base" +import { AppStatus } from "../../../db/utils" +import { constants } from "@budibase/backend-core" export class ApplicationAPI extends TestAPI { constructor(config: TestConfiguration) { super(config) } + create = async (app: CreateAppRequest): Promise => { + const request = this.request + .post("/api/applications") + .set(this.config.defaultHeaders()) + .expect("Content-Type", /json/) + + for (const key of Object.keys(app)) { + request.field(key, (app as any)[key]) + } + + if (app.templateFile) { + request.attach("templateFile", app.templateFile) + } + + const result = await request + + if (result.statusCode !== 200) { + throw new Error(JSON.stringify(result.body)) + } + + return result.body as App + } + + delete = async (appId: string): Promise => { + await this.request + .delete(`/api/applications/${appId}`) + .set(this.config.defaultHeaders()) + .expect(200) + } + + publish = async ( + appId: string + ): Promise<{ _id: string; status: string; appUrl: string }> => { + // While the publish endpoint does take an :appId parameter, it doesn't + // use it. It uses the appId from the context. + let headers = { + ...this.config.defaultHeaders(), + [constants.Header.APP_ID]: appId, + } + const result = await this.request + .post(`/api/applications/${appId}/publish`) + .set(headers) + .expect("Content-Type", /json/) + .expect(200) + return result.body as { _id: string; status: string; appUrl: string } + } + + unpublish = async (appId: string): Promise => { + await this.request + .post(`/api/applications/${appId}/unpublish`) + .set(this.config.defaultHeaders()) + .expect(204) + } + + sync = async ( + appId: string, + { statusCode }: { statusCode: number } = { statusCode: 200 } + ): Promise<{ message: string }> => { + const result = await this.request + .post(`/api/applications/${appId}/sync`) + .set(this.config.defaultHeaders()) + .expect("Content-Type", /json/) + .expect(statusCode) + return result.body + } + getRaw = async (appId: string): Promise => { + // While the appPackage endpoint does take an :appId parameter, it doesn't + // use it. It uses the appId from the context. + let headers = { + ...this.config.defaultHeaders(), + [constants.Header.APP_ID]: appId, + } const result = await this.request .get(`/api/applications/${appId}/appPackage`) - .set(this.config.defaultHeaders()) + .set(headers) .expect("Content-Type", /json/) .expect(200) return result @@ -21,4 +100,94 @@ export class ApplicationAPI extends TestAPI { const result = await this.getRaw(appId) return result.body.application as App } + + getDefinition = async ( + appId: string + ): Promise => { + const result = await this.request + .get(`/api/applications/${appId}/definition`) + .set(this.config.defaultHeaders()) + .expect("Content-Type", /json/) + .expect(200) + return result.body as FetchAppDefinitionResponse + } + + getAppPackage = async (appId: string): Promise => { + const result = await this.request + .get(`/api/applications/${appId}/appPackage`) + .set(this.config.defaultHeaders()) + .expect("Content-Type", /json/) + .expect(200) + return result.body + } + + update = async ( + appId: string, + app: { name?: string; url?: string } + ): Promise => { + const request = this.request + .put(`/api/applications/${appId}`) + .set(this.config.defaultHeaders()) + .expect("Content-Type", /json/) + + for (const key of Object.keys(app)) { + request.field(key, (app as any)[key]) + } + + const result = await request + + if (result.statusCode !== 200) { + throw new Error(JSON.stringify(result.body)) + } + + return result.body as App + } + + updateClient = async (appId: string): Promise => { + // While the updateClient endpoint does take an :appId parameter, it doesn't + // use it. It uses the appId from the context. + let headers = { + ...this.config.defaultHeaders(), + [constants.Header.APP_ID]: appId, + } + const response = await this.request + .post(`/api/applications/${appId}/client/update`) + .set(headers) + .expect("Content-Type", /json/) + + if (response.statusCode !== 200) { + throw new Error(JSON.stringify(response.body)) + } + } + + revertClient = async (appId: string): Promise => { + // While the revertClient endpoint does take an :appId parameter, it doesn't + // use it. It uses the appId from the context. + let headers = { + ...this.config.defaultHeaders(), + [constants.Header.APP_ID]: appId, + } + const response = await this.request + .post(`/api/applications/${appId}/client/revert`) + .set(headers) + .expect("Content-Type", /json/) + + if (response.statusCode !== 200) { + throw new Error(JSON.stringify(response.body)) + } + } + + fetch = async ({ status }: { status?: AppStatus } = {}): Promise => { + let query = [] + if (status) { + query.push(`status=${status}`) + } + + const result = await this.request + .get(`/api/applications${query.length ? `?${query.join("&")}` : ""}`) + .set(this.config.defaultHeaders()) + .expect("Content-Type", /json/) + .expect(200) + return result.body as App[] + } } diff --git a/packages/server/src/threads/query.ts b/packages/server/src/threads/query.ts index 6cdccc7868..9d7b7062a5 100644 --- a/packages/server/src/threads/query.ts +++ b/packages/server/src/threads/query.ts @@ -8,6 +8,7 @@ import { QueryResponse, } from "./definitions" import { IsolatedVM } from "../jsRunner/vm" +import { iifeWrapper } from "../jsRunner/utilities" import { getIntegration } from "../integrations" import { processStringSync } from "@budibase/string-templates" import { context, cache, auth } from "@budibase/backend-core" @@ -127,7 +128,7 @@ class QueryRunner { // transform as required if (transformer) { - transformer = `(function(){\n${transformer}\n})();` + transformer = iifeWrapper(transformer) let vm = new IsolatedVM() if (datasource.source === SourceName.MONGODB) { vm = vm.withParsingBson(rows) diff --git a/packages/types/src/api/web/app/rows.ts b/packages/types/src/api/web/app/rows.ts index dad3286754..14e28e4a01 100644 --- a/packages/types/src/api/web/app/rows.ts +++ b/packages/types/src/api/web/app/rows.ts @@ -37,6 +37,8 @@ export interface ExportRowsRequest { query?: SearchFilters sort?: string sortOrder?: SortOrder + delimiter?: string + customHeaders?: { [key: string]: string } } export type ExportRowsResponse = ReadStream diff --git a/packages/types/src/api/web/application.ts b/packages/types/src/api/web/application.ts new file mode 100644 index 0000000000..87a0bd6ef9 --- /dev/null +++ b/packages/types/src/api/web/application.ts @@ -0,0 +1,29 @@ +import type { PlanType } from "../../sdk" +import type { Layout, App, Screen } from "../../documents" + +export interface CreateAppRequest { + name: string + url?: string + useTemplate?: string + templateName?: string + templateKey?: string + templateFile?: string + includeSampleData?: boolean + encryptionPassword?: string + templateString?: string +} + +export interface FetchAppDefinitionResponse { + layouts: Layout[] + screens: Screen[] + libraries: string[] +} + +export interface FetchAppPackageResponse { + application: App + licenseType: PlanType + screens: Screen[] + layouts: Layout[] + clientLibPath: string + hasLock: boolean +} diff --git a/packages/types/src/api/web/index.ts b/packages/types/src/api/web/index.ts index 75c246ab9b..ab18add208 100644 --- a/packages/types/src/api/web/index.ts +++ b/packages/types/src/api/web/index.ts @@ -1,3 +1,4 @@ +export * from "./application" export * from "./analytics" export * from "./auth" export * from "./user" diff --git a/packages/types/src/documents/app/app.ts b/packages/types/src/documents/app/app.ts index 08aafc6527..ae4f3fa6da 100644 --- a/packages/types/src/documents/app/app.ts +++ b/packages/types/src/documents/app/app.ts @@ -1,4 +1,4 @@ -import { User, Document } from "../" +import { User, Document, Plugin } from "../" import { SocketSession } from "../../sdk" export type AppMetadataErrors = { [key: string]: string[] } @@ -24,6 +24,8 @@ export interface App extends Document { icon?: AppIcon features?: AppFeatures automations?: AutomationSettings + usedPlugins?: Plugin[] + upgradableVersion?: string } export interface AppInstance { diff --git a/packages/worker/scripts/test.sh b/packages/worker/scripts/test.sh index eba95c4916..17b3ee17f4 100644 --- a/packages/worker/scripts/test.sh +++ b/packages/worker/scripts/test.sh @@ -4,10 +4,10 @@ set -e if [[ -n $CI ]] then # Running in ci, where resources are limited - echo "jest --coverage --maxWorkers=2 --forceExit --bail" - jest --coverage --maxWorkers=2 --forceExit --bail + echo "jest --coverage --maxWorkers=2 --forceExit --bail $@" + jest --coverage --maxWorkers=2 --forceExit --bail $@ else # --maxWorkers performs better in development - echo "jest --coverage --maxWorkers=2 --forceExit" - jest --coverage --maxWorkers=2 --forceExit + echo "jest --coverage --maxWorkers=2 --forceExit $@" + jest --coverage --maxWorkers=2 --forceExit $@ fi \ No newline at end of file diff --git a/qa-core/src/internal-api/api/apis/AppAPI.ts b/qa-core/src/internal-api/api/apis/AppAPI.ts index a9f9a6a841..8b291a628e 100644 --- a/qa-core/src/internal-api/api/apis/AppAPI.ts +++ b/qa-core/src/internal-api/api/apis/AppAPI.ts @@ -1,11 +1,10 @@ -import { App } from "@budibase/types" +import { App, CreateAppRequest } from "@budibase/types" import { Response } from "node-fetch" import { RouteConfig, AppPackageResponse, DeployConfig, MessageResponse, - CreateAppRequest, } from "../../../types" import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient" import BaseAPI from "./BaseAPI" diff --git a/qa-core/src/internal-api/fixtures/applications.ts b/qa-core/src/internal-api/fixtures/applications.ts index 01dd18fc6a..59f73ba863 100644 --- a/qa-core/src/internal-api/fixtures/applications.ts +++ b/qa-core/src/internal-api/fixtures/applications.ts @@ -1,5 +1,5 @@ import { generator } from "../../shared" -import { CreateAppRequest } from "../../types" +import { CreateAppRequest } from "@budibase/types" function uniqueWord() { return generator.word() + generator.hash() diff --git a/qa-core/src/internal-api/tests/tables/tables.spec.ts b/qa-core/src/internal-api/tests/tables/tables.spec.ts index 09d8f68e86..a38b8e6059 100644 --- a/qa-core/src/internal-api/tests/tables/tables.spec.ts +++ b/qa-core/src/internal-api/tests/tables/tables.spec.ts @@ -13,17 +13,6 @@ describe("Internal API - Table Operations", () => { await config.afterAll() }) - async function createAppFromTemplate() { - return config.api.apps.create({ - name: generator.word(), - url: `/${generator.word()}`, - useTemplate: "true", - templateName: "Near Miss Register", - templateKey: "app/near-miss-register", - templateFile: undefined, - }) - } - it("Create and delete table, columns and rows", async () => { // create the app await config.createApp(fixtures.apps.appFromTemplate()) diff --git a/qa-core/src/shared/BudibaseTestConfiguration.ts b/qa-core/src/shared/BudibaseTestConfiguration.ts index 18b7c89ec8..9a12f3e65d 100644 --- a/qa-core/src/shared/BudibaseTestConfiguration.ts +++ b/qa-core/src/shared/BudibaseTestConfiguration.ts @@ -1,8 +1,8 @@ import { BudibaseInternalAPI } from "../internal-api" import { AccountInternalAPI } from "../account-api" -import { APIRequestOpts, CreateAppRequest, State } from "../types" +import { APIRequestOpts, State } from "../types" import * as fixtures from "../internal-api/fixtures" -import { CreateAccountRequest } from "@budibase/types" +import { CreateAccountRequest, CreateAppRequest } from "@budibase/types" export default class BudibaseTestConfiguration { // apis diff --git a/qa-core/src/types/app.ts b/qa-core/src/types/app.ts deleted file mode 100644 index 7159112024..0000000000 --- a/qa-core/src/types/app.ts +++ /dev/null @@ -1,10 +0,0 @@ -// TODO: Integrate with budibase -export interface CreateAppRequest { - name: string - url: string - useTemplate?: string - templateName?: string - templateKey?: string - templateFile?: string - includeSampleData?: boolean -} diff --git a/qa-core/src/types/index.ts b/qa-core/src/types/index.ts index 9bde46c66e..a44df4ef3c 100644 --- a/qa-core/src/types/index.ts +++ b/qa-core/src/types/index.ts @@ -1,6 +1,5 @@ export * from "./api" export * from "./apiKeyResponse" -export * from "./app" export * from "./appPackage" export * from "./deploy" export * from "./newAccount" diff --git a/yarn.lock b/yarn.lock index 21d3c5a76b..2c367e469b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3902,19 +3902,6 @@ magic-string "^0.25.7" resolve "^1.17.0" -"@rollup/plugin-commonjs@^18.0.0": - version "18.1.0" - resolved "https://registry.yarnpkg.com/@rollup/plugin-commonjs/-/plugin-commonjs-18.1.0.tgz#5a760d757af168a50727c0ae080251fbfcc5eb02" - integrity sha512-h3e6T9rUxVMAQswpDIobfUHn/doMzM9sgkMrsMWCFLmB84PSoC8mV8tOloAJjSRwdqhXBqstlX2BwBpHJvbhxg== - dependencies: - "@rollup/pluginutils" "^3.1.0" - commondir "^1.0.1" - estree-walker "^2.0.1" - glob "^7.1.6" - is-reference "^1.2.1" - magic-string "^0.25.7" - resolve "^1.17.0" - "@rollup/plugin-commonjs@^25.0.7": version "25.0.7" resolved "https://registry.yarnpkg.com/@rollup/plugin-commonjs/-/plugin-commonjs-25.0.7.tgz#145cec7589ad952171aeb6a585bbeabd0fd3b4cf" @@ -19257,7 +19244,7 @@ rollup@2.45.2: optionalDependencies: fsevents "~2.3.1" -rollup@^2.36.2, rollup@^2.44.0, rollup@^2.45.2: +rollup@^2.36.2, rollup@^2.45.2: version "2.79.1" resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.79.1.tgz#bedee8faef7c9f93a2647ac0108748f497f081c7" integrity sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw==