From a4c0328c53610fab0209be6e25af502609d9e38d Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Mon, 22 Apr 2024 16:30:57 +0100 Subject: [PATCH] REST file handling and SMTP automation block attachments (#13403) * handle files in rest connector * fetch presigned url and return * further updates to handle files in rest connector * remove unused important and fix extension bug * wrong expiry param * tests * add const for temp bucket * handle ttl on bucket * more bucket ttl work * split out fileresponse and xmlresponse into utils * lint * remove log * fix tests * some pr comments * update function naming and lint * adding back needed response for frontend * use fsp * handle different content-disposition and potential path traversal * add test container for s3 / minio * add test case for filename* and ascii filenames * move tests into separate describe * remove log * up timeout * switch to minio image instead of localstack * use minio image instead of s3 for testing * stream file upload instead * use streamUpload and update signatures * update bucketcreate return * throw real error * tidy up * pro * pro ref fix? * pro fix * pro fix? * move minio test provider to backend-core * update email builder to allow attachments * testing for sending files via smtp * use backend-core minio test container in server * handle different types of url * fix minio test provider * test with container host * lint * try different hostname? * Revert "try different hostname?" This reverts commit cfefdb8ded2b49462604053cf140e7292771c651. * fix issue with fetching of signed url with test minio * update autoamtion attachments to take filename and url * fix tests * pro ref * fix parsing of url object * pr comments and linting * pro ref * fix pro again * fix pro * account-portal * fix null issue * fix ref * ref * When sending a file attachment in email fetch it directly from our object store * add more checks to ensure we're working with a signed url * update test to account for direct object store read * formatting * fix time issues within test * update bucket and path extraction to regex * use const in regex * pro * Updating TTL handling in upload functions (#13539) * Updating TTL handling in upload functions * describe ttl type * account for ttl creation in existing buckets and update types * fix tests * pro * pro --- packages/backend-core/src/environment.ts | 2 + .../src/objectStore/objectStore.ts | 121 ++++++++++++++---- .../backend-core/src/objectStore/utils.ts | 26 ++++ .../tests/core/utilities/index.ts | 3 + .../tests/core/utilities/minio.ts | 34 +++++ .../SetupPanel/AutomationBlockSetup.svelte | 56 +++++++- .../integration/KeyValueBuilder.svelte | 29 +++-- packages/cli/src/backups/objectStore.ts | 4 +- packages/pro | 2 +- packages/server/package.json | 3 + .../src/automations/steps/sendSmtpEmail.ts | 12 +- .../automations/tests/sendSmtpEmail.spec.ts | 8 ++ packages/server/src/integrations/rest.ts | 68 +++++----- .../src/integrations/tests/rest.spec.ts | 116 ++++++++++++++++- .../server/src/integrations/utils/utils.ts | 78 ++++++++++- .../src/utilities/fileSystem/clientLibrary.ts | 28 ++-- .../server/src/utilities/workerRequests.ts | 5 +- .../types/src/documents/app/automation.ts | 8 ++ .../src/api/controllers/global/email.ts | 2 + .../api/routes/global/tests/realEmail.spec.ts | 47 ++++++- packages/worker/src/tests/api/email.ts | 4 +- packages/worker/src/tests/jestEnv.ts | 4 +- .../worker/src/tests/structures/configs.ts | 4 +- packages/worker/src/utilities/email.ts | 40 +++++- yarn.lock | 19 ++- 25 files changed, 619 insertions(+), 104 deletions(-) create mode 100644 packages/backend-core/tests/core/utilities/minio.ts diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index 8dbc904643..9ade81b9d7 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -29,6 +29,7 @@ const DefaultBucketName = { TEMPLATES: "templates", GLOBAL: "global", PLUGINS: "plugins", + TEMP: "tmp-file-attachments", } const selfHosted = !!parseInt(process.env.SELF_HOSTED || "") @@ -146,6 +147,7 @@ const environment = { process.env.GLOBAL_BUCKET_NAME || DefaultBucketName.GLOBAL, PLUGIN_BUCKET_NAME: process.env.PLUGIN_BUCKET_NAME || DefaultBucketName.PLUGINS, + TEMP_BUCKET_NAME: process.env.TEMP_BUCKET_NAME || DefaultBucketName.TEMP, USE_COUCH: process.env.USE_COUCH || true, MOCK_REDIS: process.env.MOCK_REDIS, DEFAULT_LICENSE: process.env.DEFAULT_LICENSE, diff --git a/packages/backend-core/src/objectStore/objectStore.ts b/packages/backend-core/src/objectStore/objectStore.ts index 8d18fb97fd..aa5365c5c3 100644 --- a/packages/backend-core/src/objectStore/objectStore.ts +++ b/packages/backend-core/src/objectStore/objectStore.ts @@ -7,31 +7,41 @@ import tar from "tar-fs" import zlib from "zlib" import { promisify } from "util" import { join } from "path" -import fs, { ReadStream } from "fs" +import fs, { PathLike, ReadStream } from "fs" import env from "../environment" -import { budibaseTempDir } from "./utils" +import { bucketTTLConfig, budibaseTempDir } from "./utils" import { v4 } from "uuid" import { APP_PREFIX, APP_DEV_PREFIX } from "../db" +import fsp from "fs/promises" const streamPipeline = promisify(stream.pipeline) // use this as a temporary store of buckets that are being created const STATE = { bucketCreationPromises: {}, } +const signedFilePrefix = "/files/signed" type ListParams = { ContinuationToken?: string } -type UploadParams = { +type BaseUploadParams = { bucket: string filename: string - path: string type?: string | null - // can be undefined, we will remove it - metadata?: { - [key: string]: string | undefined - } + metadata?: { [key: string]: string | undefined } + body?: ReadableStream | Buffer + ttl?: number + addTTL?: boolean + extra?: any +} + +type UploadParams = BaseUploadParams & { + path?: string | PathLike +} + +type StreamUploadParams = BaseUploadParams & { + stream: ReadStream } const CONTENT_TYPE_MAP: any = { @@ -41,6 +51,8 @@ const CONTENT_TYPE_MAP: any = { js: "application/javascript", json: "application/json", gz: "application/gzip", + svg: "image/svg+xml", + form: "multipart/form-data", } const STRING_CONTENT_TYPES = [ @@ -105,7 +117,10 @@ export function ObjectStore( * Given an object store and a bucket name this will make sure the bucket exists, * if it does not exist then it will create it. */ -export async function makeSureBucketExists(client: any, bucketName: string) { +export async function createBucketIfNotExists( + client: any, + bucketName: string +): Promise<{ created: boolean; exists: boolean }> { bucketName = sanitizeBucket(bucketName) try { await client @@ -113,15 +128,16 @@ export async function makeSureBucketExists(client: any, bucketName: string) { Bucket: bucketName, }) .promise() + return { created: false, exists: true } } catch (err: any) { const promises: any = STATE.bucketCreationPromises const doesntExist = err.statusCode === 404, noAccess = err.statusCode === 403 if (promises[bucketName]) { await promises[bucketName] + return { created: false, exists: true } } else if (doesntExist || noAccess) { if (doesntExist) { - // bucket doesn't exist create it promises[bucketName] = client .createBucket({ Bucket: bucketName, @@ -129,13 +145,15 @@ export async function makeSureBucketExists(client: any, bucketName: string) { .promise() await promises[bucketName] delete promises[bucketName] + return { created: true, exists: false } + } else { + throw new Error("Access denied to object store bucket." + err) } } else { throw new Error("Unable to write to object store bucket.") } } } - /** * Uploads the contents of a file given the required parameters, useful when * temp files in use (for example file uploaded as an attachment). @@ -146,12 +164,22 @@ export async function upload({ path, type, metadata, + body, + ttl, }: UploadParams) { const extension = filename.split(".").pop() - const fileBytes = fs.readFileSync(path) + + const fileBytes = path ? (await fsp.open(path)).createReadStream() : body const objectStore = ObjectStore(bucketName) - await makeSureBucketExists(objectStore, bucketName) + const bucketCreated = await createBucketIfNotExists(objectStore, bucketName) + + if (ttl && (bucketCreated.created || bucketCreated.exists)) { + let ttlConfig = bucketTTLConfig(bucketName, ttl) + if (objectStore.putBucketLifecycleConfiguration) { + await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise() + } + } let contentType = type if (!contentType) { @@ -174,6 +202,7 @@ export async function upload({ } config.Metadata = metadata } + return objectStore.upload(config).promise() } @@ -181,14 +210,24 @@ export async function upload({ * Similar to the upload function but can be used to send a file stream * through to the object store. */ -export async function streamUpload( - bucketName: string, - filename: string, - stream: ReadStream | ReadableStream, - extra = {} -) { +export async function streamUpload({ + bucket: bucketName, + stream, + filename, + type, + extra, + ttl, +}: StreamUploadParams) { + const extension = filename.split(".").pop() const objectStore = ObjectStore(bucketName) - await makeSureBucketExists(objectStore, bucketName) + const bucketCreated = await createBucketIfNotExists(objectStore, bucketName) + + if (ttl && (bucketCreated.created || bucketCreated.exists)) { + let ttlConfig = bucketTTLConfig(bucketName, ttl) + if (objectStore.putBucketLifecycleConfiguration) { + await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise() + } + } // Set content type for certain known extensions if (filename?.endsWith(".js")) { @@ -203,10 +242,18 @@ export async function streamUpload( } } + let contentType = type + if (!contentType) { + contentType = extension + ? CONTENT_TYPE_MAP[extension.toLowerCase()] + : CONTENT_TYPE_MAP.txt + } + const params = { Bucket: sanitizeBucket(bucketName), Key: sanitizeKey(filename), Body: stream, + ContentType: contentType, ...extra, } return objectStore.upload(params).promise() @@ -286,7 +333,7 @@ export function getPresignedUrl( const signedUrl = new URL(url) const path = signedUrl.pathname const query = signedUrl.search - return `/files/signed${path}${query}` + return `${signedFilePrefix}${path}${query}` } } @@ -341,7 +388,7 @@ export async function retrieveDirectory(bucketName: string, path: string) { */ export async function deleteFile(bucketName: string, filepath: string) { const objectStore = ObjectStore(bucketName) - await makeSureBucketExists(objectStore, bucketName) + await createBucketIfNotExists(objectStore, bucketName) const params = { Bucket: bucketName, Key: sanitizeKey(filepath), @@ -351,7 +398,7 @@ export async function deleteFile(bucketName: string, filepath: string) { export async function deleteFiles(bucketName: string, filepaths: string[]) { const objectStore = ObjectStore(bucketName) - await makeSureBucketExists(objectStore, bucketName) + await createBucketIfNotExists(objectStore, bucketName) const params = { Bucket: bucketName, Delete: { @@ -412,7 +459,13 @@ export async function uploadDirectory( if (file.isDirectory()) { uploads.push(uploadDirectory(bucketName, local, path)) } else { - uploads.push(streamUpload(bucketName, path, fs.createReadStream(local))) + uploads.push( + streamUpload({ + bucket: bucketName, + filename: path, + stream: fs.createReadStream(local), + }) + ) } } await Promise.all(uploads) @@ -467,3 +520,23 @@ export async function getReadStream( } return client.getObject(params).createReadStream() } + +/* +Given a signed url like '/files/signed/tmp-files-attachments/app_123456/myfile.txt' extract +the bucket and the path from it +*/ +export function extractBucketAndPath( + url: string +): { bucket: string; path: string } | null { + const baseUrl = url.split("?")[0] + + const regex = new RegExp(`^${signedFilePrefix}/(?[^/]+)/(?.+)$`) + const match = baseUrl.match(regex) + + if (match && match.groups) { + const { bucket, path } = match.groups + return { bucket, path } + } + + return null +} diff --git a/packages/backend-core/src/objectStore/utils.ts b/packages/backend-core/src/objectStore/utils.ts index 4c3a84ba91..08b5238ff6 100644 --- a/packages/backend-core/src/objectStore/utils.ts +++ b/packages/backend-core/src/objectStore/utils.ts @@ -2,6 +2,7 @@ import { join } from "path" import { tmpdir } from "os" import fs from "fs" import env from "../environment" +import { PutBucketLifecycleConfigurationRequest } from "aws-sdk/clients/s3" /**************************************************** * NOTE: When adding a new bucket - name * @@ -15,6 +16,7 @@ export const ObjectStoreBuckets = { TEMPLATES: env.TEMPLATES_BUCKET_NAME, GLOBAL: env.GLOBAL_BUCKET_NAME, PLUGINS: env.PLUGIN_BUCKET_NAME, + TEMP: env.TEMP_BUCKET_NAME, } const bbTmp = join(tmpdir(), ".budibase") @@ -29,3 +31,27 @@ try { export function budibaseTempDir() { return bbTmp } + +export const bucketTTLConfig = ( + bucketName: string, + days: number +): PutBucketLifecycleConfigurationRequest => { + const lifecycleRule = { + ID: `${bucketName}-ExpireAfter${days}days`, + Prefix: "", + Status: "Enabled", + Expiration: { + Days: days, + }, + } + const lifecycleConfiguration = { + Rules: [lifecycleRule], + } + + const params = { + Bucket: bucketName, + LifecycleConfiguration: lifecycleConfiguration, + } + + return params +} diff --git a/packages/backend-core/tests/core/utilities/index.ts b/packages/backend-core/tests/core/utilities/index.ts index 787d69be2c..b2f19a0286 100644 --- a/packages/backend-core/tests/core/utilities/index.ts +++ b/packages/backend-core/tests/core/utilities/index.ts @@ -4,3 +4,6 @@ export { generator } from "./structures" export * as testContainerUtils from "./testContainerUtils" export * as utils from "./utils" export * from "./jestUtils" +import * as minio from "./minio" + +export const objectStoreTestProviders = { minio } diff --git a/packages/backend-core/tests/core/utilities/minio.ts b/packages/backend-core/tests/core/utilities/minio.ts new file mode 100644 index 0000000000..cef33daa91 --- /dev/null +++ b/packages/backend-core/tests/core/utilities/minio.ts @@ -0,0 +1,34 @@ +import { GenericContainer, Wait, StartedTestContainer } from "testcontainers" +import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy" +import env from "../../../src/environment" + +let container: StartedTestContainer | undefined + +class ObjectStoreWaitStrategy extends AbstractWaitStrategy { + async waitUntilReady(container: any, boundPorts: any, startTime?: Date) { + const logs = Wait.forListeningPorts() + await logs.waitUntilReady(container, boundPorts, startTime) + } +} + +export async function start(): Promise { + container = await new GenericContainer("minio/minio") + .withExposedPorts(9000) + .withCommand(["server", "/data"]) + .withEnvironment({ + MINIO_ACCESS_KEY: "budibase", + MINIO_SECRET_KEY: "budibase", + }) + .withWaitStrategy(new ObjectStoreWaitStrategy().withStartupTimeout(30000)) + .start() + + const port = container.getMappedPort(9000) + env._set("MINIO_URL", `http://0.0.0.0:${port}`) +} + +export async function stop() { + if (container) { + await container.stop() + container = undefined + } +} diff --git a/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte b/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte index 6434c7710d..2d2022299c 100644 --- a/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte +++ b/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte @@ -32,6 +32,7 @@ import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte" import CodeEditor from "components/common/CodeEditor/CodeEditor.svelte" import BindingSidePanel from "components/common/bindings/BindingSidePanel.svelte" + import KeyValueBuilder from "components/integration/KeyValueBuilder.svelte" import { BindingHelpers, BindingType } from "components/common/bindings/utils" import { bindingsToCompletions, @@ -356,7 +357,8 @@ value.customType !== "queryParams" && value.customType !== "cron" && value.customType !== "triggerSchema" && - value.customType !== "automationFields" + value.customType !== "automationFields" && + value.type !== "attachment" ) } @@ -372,6 +374,15 @@ console.error(error) } }) + const handleAttachmentParams = keyValuObj => { + let params = {} + if (keyValuObj?.length) { + for (let param of keyValuObj) { + params[param.url] = param.filename + } + } + return params + }
@@ -437,6 +448,33 @@ value={inputData[key]} options={Object.keys(table?.schema || {})} /> + {:else if value.type === "attachment"} +
+
+ +
+
+ + onChange( + { + detail: e.detail.map(({ name, value }) => ({ + url: name, + filename: value, + })), + }, + key + )} + object={handleAttachmentParams(inputData[key])} + allowJS + {bindings} + keyBindings + customButtonText={"Add attachment"} + keyPlaceholder={"URL"} + valuePlaceholder={"Filename"} + /> +
+
{:else if value.customType === "filters"} Define filters @@ -651,14 +689,22 @@ } .block-field { - display: flex; /* Use Flexbox */ + display: flex; justify-content: space-between; - flex-direction: row; /* Arrange label and field side by side */ - align-items: center; /* Align vertically in the center */ - gap: 10px; /* Add some space between label and field */ + flex-direction: row; + align-items: center; + gap: 10px; flex: 1; } + .attachment-field-width { + margin-top: var(--spacing-xs); + } + + .label-wrapper { + margin-top: var(--spacing-s); + } + .test :global(.drawer) { width: 10000px !important; } diff --git a/packages/builder/src/components/integration/KeyValueBuilder.svelte b/packages/builder/src/components/integration/KeyValueBuilder.svelte index 74636fc50c..5ed18a970a 100644 --- a/packages/builder/src/components/integration/KeyValueBuilder.svelte +++ b/packages/builder/src/components/integration/KeyValueBuilder.svelte @@ -35,6 +35,8 @@ export let bindingDrawerLeft export let allowHelpers = true export let customButtonText = null + export let keyBindings = false + export let allowJS = false export let compare = (option, value) => option === value let fields = Object.entries(object || {}).map(([name, value]) => ({ @@ -116,12 +118,23 @@ class:readOnly-menu={readOnly && showMenu} > {#each fields as field, idx} - + {#if keyBindings} + { + field.name = e.detail + changed() + }} + disabled={readOnly} + value={field.name} + {allowJS} + {allowHelpers} + drawerLeft={bindingDrawerLeft} + /> + {:else} + + {/if} {#if isJsonArray(field.value)}