2021-03-20 08:07:47 +13:00
|
|
|
const sanitize = require("sanitize-s3-objectkey")
|
2022-04-29 09:39:21 +12:00
|
|
|
import AWS from "aws-sdk"
|
|
|
|
import stream from "stream"
|
|
|
|
import fetch from "node-fetch"
|
|
|
|
import tar from "tar-fs"
|
2021-03-20 08:07:47 +13:00
|
|
|
const zlib = require("zlib")
|
2022-04-29 09:39:21 +12:00
|
|
|
import { promisify } from "util"
|
|
|
|
import { join } from "path"
|
|
|
|
import fs from "fs"
|
|
|
|
import env from "../environment"
|
|
|
|
import { budibaseTempDir, ObjectStoreBuckets } from "./utils"
|
|
|
|
import { v4 } from "uuid"
|
|
|
|
import { APP_PREFIX, APP_DEV_PREFIX } from "../db/utils"
|
2021-03-20 08:07:47 +13:00
|
|
|
|
|
|
|
const streamPipeline = promisify(stream.pipeline)
|
2021-03-26 08:22:08 +13:00
|
|
|
// use this as a temporary store of buckets that are being created
|
|
|
|
const STATE = {
|
|
|
|
bucketCreationPromises: {},
|
|
|
|
}
|
2021-03-20 08:07:47 +13:00
|
|
|
|
2022-10-11 08:08:59 +13:00
|
|
|
type ListParams = {
|
|
|
|
ContinuationToken?: string
|
|
|
|
}
|
|
|
|
|
2022-11-09 00:49:07 +13:00
|
|
|
type UploadParams = {
|
|
|
|
bucket: string
|
|
|
|
filename: string
|
|
|
|
path: string
|
2022-11-09 07:25:37 +13:00
|
|
|
type?: string
|
2022-11-09 00:49:07 +13:00
|
|
|
// can be undefined, we will remove it
|
2022-11-09 07:25:37 +13:00
|
|
|
metadata?: {
|
2022-11-09 00:49:07 +13:00
|
|
|
[key: string]: string | undefined
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-04 09:58:19 +12:00
|
|
|
const CONTENT_TYPE_MAP: any = {
|
2022-11-09 00:49:07 +13:00
|
|
|
txt: "text/plain",
|
2021-03-23 07:06:10 +13:00
|
|
|
html: "text/html",
|
|
|
|
css: "text/css",
|
|
|
|
js: "application/javascript",
|
2021-07-08 23:55:51 +12:00
|
|
|
json: "application/json",
|
2022-10-15 07:24:03 +13:00
|
|
|
gz: "application/gzip",
|
2021-03-23 07:06:10 +13:00
|
|
|
}
|
2021-03-24 06:54:02 +13:00
|
|
|
const STRING_CONTENT_TYPES = [
|
|
|
|
CONTENT_TYPE_MAP.html,
|
|
|
|
CONTENT_TYPE_MAP.css,
|
|
|
|
CONTENT_TYPE_MAP.js,
|
2021-07-08 23:55:51 +12:00
|
|
|
CONTENT_TYPE_MAP.json,
|
2021-03-24 06:54:02 +13:00
|
|
|
]
|
|
|
|
|
2021-05-14 00:29:53 +12:00
|
|
|
// does normal sanitization and then swaps dev apps to apps
|
2022-10-12 08:25:22 +13:00
|
|
|
export function sanitizeKey(input: string) {
|
2021-05-14 00:29:53 +12:00
|
|
|
return sanitize(sanitizeBucket(input)).replace(/\\/g, "/")
|
|
|
|
}
|
|
|
|
|
|
|
|
// simply handles the dev app to app conversion
|
2022-10-12 08:25:22 +13:00
|
|
|
export function sanitizeBucket(input: string) {
|
2021-05-14 00:29:53 +12:00
|
|
|
return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX)
|
|
|
|
}
|
|
|
|
|
2022-10-12 08:25:22 +13:00
|
|
|
function publicPolicy(bucketName: string) {
|
2021-03-24 06:54:02 +13:00
|
|
|
return {
|
|
|
|
Version: "2012-10-17",
|
|
|
|
Statement: [
|
|
|
|
{
|
|
|
|
Effect: "Allow",
|
|
|
|
Principal: {
|
|
|
|
AWS: ["*"],
|
|
|
|
},
|
|
|
|
Action: "s3:GetObject",
|
|
|
|
Resource: [`arn:aws:s3:::${bucketName}/*`],
|
|
|
|
},
|
|
|
|
],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-11 07:01:48 +12:00
|
|
|
const PUBLIC_BUCKETS = [
|
|
|
|
ObjectStoreBuckets.APPS,
|
|
|
|
ObjectStoreBuckets.GLOBAL,
|
|
|
|
ObjectStoreBuckets.PLUGINS,
|
|
|
|
]
|
2021-03-23 07:06:10 +13:00
|
|
|
|
2021-03-20 08:07:47 +13:00
|
|
|
/**
|
|
|
|
* Gets a connection to the object store using the S3 SDK.
|
|
|
|
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
|
|
|
|
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
|
|
|
|
* @constructor
|
|
|
|
*/
|
2022-10-12 08:25:22 +13:00
|
|
|
export const ObjectStore = (bucket: string) => {
|
2022-05-04 09:58:19 +12:00
|
|
|
const config: any = {
|
2021-03-25 01:54:59 +13:00
|
|
|
s3ForcePathStyle: true,
|
2021-03-23 07:06:10 +13:00
|
|
|
signatureVersion: "v4",
|
2022-02-26 04:55:19 +13:00
|
|
|
apiVersion: "2006-03-01",
|
2022-08-26 04:01:12 +12:00
|
|
|
accessKeyId: env.MINIO_ACCESS_KEY,
|
|
|
|
secretAccessKey: env.MINIO_SECRET_KEY,
|
|
|
|
region: env.AWS_REGION,
|
2022-07-01 07:26:49 +12:00
|
|
|
}
|
|
|
|
if (bucket) {
|
|
|
|
config.params = {
|
2021-05-14 00:29:53 +12:00
|
|
|
Bucket: sanitizeBucket(bucket),
|
2022-07-01 07:26:49 +12:00
|
|
|
}
|
2021-03-24 06:54:02 +13:00
|
|
|
}
|
|
|
|
if (env.MINIO_URL) {
|
|
|
|
config.endpoint = env.MINIO_URL
|
|
|
|
}
|
|
|
|
return new AWS.S3(config)
|
2021-03-20 08:07:47 +13:00
|
|
|
}
|
|
|
|
|
2021-03-24 06:54:02 +13:00
|
|
|
/**
|
|
|
|
* Given an object store and a bucket name this will make sure the bucket exists,
|
|
|
|
* if it does not exist then it will create it.
|
|
|
|
*/
|
2022-10-11 08:08:59 +13:00
|
|
|
export const makeSureBucketExists = async (client: any, bucketName: string) => {
|
2021-05-14 00:29:53 +12:00
|
|
|
bucketName = sanitizeBucket(bucketName)
|
2021-03-20 08:07:47 +13:00
|
|
|
try {
|
|
|
|
await client
|
|
|
|
.headBucket({
|
|
|
|
Bucket: bucketName,
|
|
|
|
})
|
|
|
|
.promise()
|
2022-05-04 09:58:19 +12:00
|
|
|
} catch (err: any) {
|
|
|
|
const promises: any = STATE.bucketCreationPromises
|
2022-02-26 04:55:19 +13:00
|
|
|
const doesntExist = err.statusCode === 404,
|
|
|
|
noAccess = err.statusCode === 403
|
2021-03-26 08:22:08 +13:00
|
|
|
if (promises[bucketName]) {
|
|
|
|
await promises[bucketName]
|
2022-02-26 04:55:19 +13:00
|
|
|
} else if (doesntExist || noAccess) {
|
|
|
|
if (doesntExist) {
|
|
|
|
// bucket doesn't exist create it
|
|
|
|
promises[bucketName] = client
|
|
|
|
.createBucket({
|
|
|
|
Bucket: bucketName,
|
|
|
|
})
|
|
|
|
.promise()
|
|
|
|
await promises[bucketName]
|
|
|
|
delete promises[bucketName]
|
|
|
|
}
|
2021-03-24 06:54:02 +13:00
|
|
|
// public buckets are quite hidden in the system, make sure
|
|
|
|
// no bucket is set accidentally
|
|
|
|
if (PUBLIC_BUCKETS.includes(bucketName)) {
|
|
|
|
await client
|
|
|
|
.putBucketPolicy({
|
|
|
|
Bucket: bucketName,
|
|
|
|
Policy: JSON.stringify(publicPolicy(bucketName)),
|
|
|
|
})
|
|
|
|
.promise()
|
|
|
|
}
|
2021-03-20 08:07:47 +13:00
|
|
|
} else {
|
2022-02-26 04:55:19 +13:00
|
|
|
throw new Error("Unable to write to object store bucket.")
|
2021-03-20 08:07:47 +13:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-23 07:06:10 +13:00
|
|
|
/**
|
|
|
|
* Uploads the contents of a file given the required parameters, useful when
|
|
|
|
* temp files in use (for example file uploaded as an attachment).
|
|
|
|
*/
|
2022-05-04 09:58:19 +12:00
|
|
|
export const upload = async ({
|
2021-05-15 02:43:41 +12:00
|
|
|
bucket: bucketName,
|
|
|
|
filename,
|
|
|
|
path,
|
|
|
|
type,
|
|
|
|
metadata,
|
2022-11-09 00:49:07 +13:00
|
|
|
}: UploadParams) => {
|
2022-10-15 07:24:03 +13:00
|
|
|
const extension = filename.split(".").pop()
|
2021-03-23 07:06:10 +13:00
|
|
|
const fileBytes = fs.readFileSync(path)
|
|
|
|
|
2022-05-04 09:58:19 +12:00
|
|
|
const objectStore = ObjectStore(bucketName)
|
|
|
|
await makeSureBucketExists(objectStore, bucketName)
|
2021-03-25 07:31:13 +13:00
|
|
|
|
2022-11-09 00:49:07 +13:00
|
|
|
let contentType = type
|
|
|
|
if (!contentType) {
|
|
|
|
contentType = extension
|
|
|
|
? CONTENT_TYPE_MAP[extension.toLowerCase()]
|
|
|
|
: CONTENT_TYPE_MAP.txt
|
|
|
|
}
|
2022-05-04 09:58:19 +12:00
|
|
|
const config: any = {
|
2021-03-23 07:06:10 +13:00
|
|
|
// windows file paths need to be converted to forward slashes for s3
|
2021-05-14 00:29:53 +12:00
|
|
|
Key: sanitizeKey(filename),
|
2021-03-23 07:06:10 +13:00
|
|
|
Body: fileBytes,
|
2022-11-09 00:49:07 +13:00
|
|
|
ContentType: contentType,
|
2021-03-23 07:06:10 +13:00
|
|
|
}
|
2022-11-09 00:49:07 +13:00
|
|
|
if (metadata && typeof metadata === "object") {
|
2022-11-09 00:34:16 +13:00
|
|
|
// remove any nullish keys from the metadata object, as these may be considered invalid
|
|
|
|
for (let key of Object.keys(metadata)) {
|
|
|
|
if (!metadata[key] || typeof metadata[key] !== "string") {
|
|
|
|
delete metadata[key]
|
|
|
|
}
|
|
|
|
}
|
2021-03-23 07:06:10 +13:00
|
|
|
config.Metadata = metadata
|
|
|
|
}
|
|
|
|
return objectStore.upload(config).promise()
|
|
|
|
}
|
|
|
|
|
2021-03-24 06:54:02 +13:00
|
|
|
/**
|
|
|
|
* Similar to the upload function but can be used to send a file stream
|
|
|
|
* through to the object store.
|
|
|
|
*/
|
2022-05-04 09:58:19 +12:00
|
|
|
export const streamUpload = async (
|
2022-10-11 08:08:59 +13:00
|
|
|
bucketName: string,
|
|
|
|
filename: string,
|
2022-05-04 09:58:19 +12:00
|
|
|
stream: any,
|
|
|
|
extra = {}
|
|
|
|
) => {
|
|
|
|
const objectStore = ObjectStore(bucketName)
|
|
|
|
await makeSureBucketExists(objectStore, bucketName)
|
2021-03-20 08:07:47 +13:00
|
|
|
|
2022-08-12 02:25:04 +12:00
|
|
|
// Set content type for certain known extensions
|
|
|
|
if (filename?.endsWith(".js")) {
|
|
|
|
extra = {
|
|
|
|
...extra,
|
|
|
|
ContentType: "application/javascript",
|
|
|
|
}
|
2022-09-30 07:30:53 +13:00
|
|
|
} else if (filename?.endsWith(".svg")) {
|
|
|
|
extra = {
|
|
|
|
...extra,
|
|
|
|
ContentType: "image",
|
|
|
|
}
|
2022-08-12 02:25:04 +12:00
|
|
|
}
|
|
|
|
|
2021-03-20 08:07:47 +13:00
|
|
|
const params = {
|
2021-05-14 00:29:53 +12:00
|
|
|
Bucket: sanitizeBucket(bucketName),
|
|
|
|
Key: sanitizeKey(filename),
|
2021-03-20 08:07:47 +13:00
|
|
|
Body: stream,
|
2021-06-04 08:33:44 +12:00
|
|
|
...extra,
|
2021-03-20 08:07:47 +13:00
|
|
|
}
|
|
|
|
return objectStore.upload(params).promise()
|
|
|
|
}
|
|
|
|
|
2021-03-24 06:54:02 +13:00
|
|
|
/**
|
|
|
|
* retrieves the contents of a file from the object store, if it is a known content type it
|
|
|
|
* will be converted, otherwise it will be returned as a buffer stream.
|
|
|
|
*/
|
2022-10-11 08:08:59 +13:00
|
|
|
export const retrieve = async (bucketName: string, filepath: string) => {
|
2022-05-04 09:58:19 +12:00
|
|
|
const objectStore = ObjectStore(bucketName)
|
2021-03-24 06:54:02 +13:00
|
|
|
const params = {
|
2021-05-14 00:29:53 +12:00
|
|
|
Bucket: sanitizeBucket(bucketName),
|
|
|
|
Key: sanitizeKey(filepath),
|
2021-03-24 06:54:02 +13:00
|
|
|
}
|
2022-05-04 09:58:19 +12:00
|
|
|
const response: any = await objectStore.getObject(params).promise()
|
2021-03-24 06:54:02 +13:00
|
|
|
// currently these are all strings
|
|
|
|
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
|
|
|
|
return response.Body.toString("utf8")
|
|
|
|
} else {
|
|
|
|
return response.Body
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-11 08:08:59 +13:00
|
|
|
export const listAllObjects = async (bucketName: string, path: string) => {
|
|
|
|
const objectStore = ObjectStore(bucketName)
|
|
|
|
const list = (params: ListParams = {}) => {
|
|
|
|
return objectStore
|
|
|
|
.listObjectsV2({
|
|
|
|
...params,
|
|
|
|
Bucket: sanitizeBucket(bucketName),
|
|
|
|
Prefix: sanitizeKey(path),
|
|
|
|
})
|
|
|
|
.promise()
|
|
|
|
}
|
|
|
|
let isTruncated = false,
|
|
|
|
token,
|
|
|
|
objects: AWS.S3.Types.Object[] = []
|
|
|
|
do {
|
|
|
|
let params: ListParams = {}
|
|
|
|
if (token) {
|
|
|
|
params.ContinuationToken = token
|
|
|
|
}
|
|
|
|
const response = await list(params)
|
|
|
|
if (response.Contents) {
|
|
|
|
objects = objects.concat(response.Contents)
|
|
|
|
}
|
|
|
|
isTruncated = !!response.IsTruncated
|
|
|
|
} while (isTruncated)
|
|
|
|
return objects
|
|
|
|
}
|
|
|
|
|
2021-03-26 07:03:58 +13:00
|
|
|
/**
|
|
|
|
* Same as retrieval function but puts to a temporary file.
|
|
|
|
*/
|
2022-10-11 08:08:59 +13:00
|
|
|
export const retrieveToTmp = async (bucketName: string, filepath: string) => {
|
2021-05-14 00:29:53 +12:00
|
|
|
bucketName = sanitizeBucket(bucketName)
|
|
|
|
filepath = sanitizeKey(filepath)
|
2022-05-04 09:58:19 +12:00
|
|
|
const data = await retrieve(bucketName, filepath)
|
2021-05-08 00:55:30 +12:00
|
|
|
const outputPath = join(budibaseTempDir(), v4())
|
2021-03-26 07:03:58 +13:00
|
|
|
fs.writeFileSync(outputPath, data)
|
|
|
|
return outputPath
|
|
|
|
}
|
|
|
|
|
2022-10-11 08:08:59 +13:00
|
|
|
export const retrieveDirectory = async (bucketName: string, path: string) => {
|
|
|
|
let writePath = join(budibaseTempDir(), v4())
|
2022-10-12 07:28:13 +13:00
|
|
|
fs.mkdirSync(writePath)
|
2022-10-11 08:08:59 +13:00
|
|
|
const objects = await listAllObjects(bucketName, path)
|
|
|
|
let fullObjects = await Promise.all(
|
|
|
|
objects.map(obj => retrieve(bucketName, obj.Key!))
|
|
|
|
)
|
|
|
|
let count = 0
|
|
|
|
for (let obj of objects) {
|
|
|
|
const filename = obj.Key!
|
|
|
|
const data = fullObjects[count++]
|
|
|
|
const possiblePath = filename.split("/")
|
|
|
|
if (possiblePath.length > 1) {
|
|
|
|
const dirs = possiblePath.slice(0, possiblePath.length - 1)
|
|
|
|
fs.mkdirSync(join(writePath, ...dirs), { recursive: true })
|
|
|
|
}
|
|
|
|
fs.writeFileSync(join(writePath, ...possiblePath), data)
|
|
|
|
}
|
|
|
|
return writePath
|
|
|
|
}
|
|
|
|
|
2022-01-07 06:59:29 +13:00
|
|
|
/**
|
|
|
|
* Delete a single file.
|
|
|
|
*/
|
2022-10-11 08:08:59 +13:00
|
|
|
export const deleteFile = async (bucketName: string, filepath: string) => {
|
2022-05-04 09:58:19 +12:00
|
|
|
const objectStore = ObjectStore(bucketName)
|
|
|
|
await makeSureBucketExists(objectStore, bucketName)
|
2022-01-07 06:59:29 +13:00
|
|
|
const params = {
|
|
|
|
Bucket: bucketName,
|
|
|
|
Key: filepath,
|
|
|
|
}
|
|
|
|
return objectStore.deleteObject(params)
|
|
|
|
}
|
|
|
|
|
2022-10-12 08:25:22 +13:00
|
|
|
export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
|
2022-05-04 09:58:19 +12:00
|
|
|
const objectStore = ObjectStore(bucketName)
|
|
|
|
await makeSureBucketExists(objectStore, bucketName)
|
2022-01-07 06:59:29 +13:00
|
|
|
const params = {
|
|
|
|
Bucket: bucketName,
|
|
|
|
Delete: {
|
2022-05-04 09:58:19 +12:00
|
|
|
Objects: filepaths.map((path: any) => ({ Key: path })),
|
2022-01-07 06:59:29 +13:00
|
|
|
},
|
|
|
|
}
|
|
|
|
return objectStore.deleteObjects(params).promise()
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Delete a path, including everything within.
|
|
|
|
*/
|
2022-05-04 09:58:19 +12:00
|
|
|
export const deleteFolder = async (
|
2022-10-12 08:25:22 +13:00
|
|
|
bucketName: string,
|
|
|
|
folder: string
|
2022-05-04 09:58:19 +12:00
|
|
|
): Promise<any> => {
|
2021-05-14 00:29:53 +12:00
|
|
|
bucketName = sanitizeBucket(bucketName)
|
|
|
|
folder = sanitizeKey(folder)
|
2022-05-04 09:58:19 +12:00
|
|
|
const client = ObjectStore(bucketName)
|
2021-03-20 08:07:47 +13:00
|
|
|
const listParams = {
|
2021-05-14 00:29:53 +12:00
|
|
|
Bucket: bucketName,
|
2021-03-20 08:07:47 +13:00
|
|
|
Prefix: folder,
|
|
|
|
}
|
|
|
|
|
2022-05-04 09:58:19 +12:00
|
|
|
let response: any = await client.listObjects(listParams).promise()
|
2021-03-25 01:54:59 +13:00
|
|
|
if (response.Contents.length === 0) {
|
|
|
|
return
|
|
|
|
}
|
2022-05-04 09:58:19 +12:00
|
|
|
const deleteParams: any = {
|
2021-05-14 00:29:53 +12:00
|
|
|
Bucket: bucketName,
|
2021-03-25 01:54:59 +13:00
|
|
|
Delete: {
|
|
|
|
Objects: [],
|
|
|
|
},
|
|
|
|
}
|
2021-03-20 08:07:47 +13:00
|
|
|
|
2022-05-04 09:58:19 +12:00
|
|
|
response.Contents.forEach((content: any) => {
|
2021-03-25 01:54:59 +13:00
|
|
|
deleteParams.Delete.Objects.push({ Key: content.Key })
|
|
|
|
})
|
2021-03-20 08:07:47 +13:00
|
|
|
|
2021-03-25 01:54:59 +13:00
|
|
|
response = await client.deleteObjects(deleteParams).promise()
|
|
|
|
// can only empty 1000 items at once
|
|
|
|
if (response.Deleted.length === 1000) {
|
2022-05-04 09:58:19 +12:00
|
|
|
return deleteFolder(bucketName, folder)
|
2021-03-20 08:07:47 +13:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-04 09:58:19 +12:00
|
|
|
export const uploadDirectory = async (
|
2022-10-12 08:25:22 +13:00
|
|
|
bucketName: string,
|
|
|
|
localPath: string,
|
|
|
|
bucketPath: string
|
2022-05-04 09:58:19 +12:00
|
|
|
) => {
|
2021-05-14 00:29:53 +12:00
|
|
|
bucketName = sanitizeBucket(bucketName)
|
2021-03-20 08:07:47 +13:00
|
|
|
let uploads = []
|
|
|
|
const files = fs.readdirSync(localPath, { withFileTypes: true })
|
|
|
|
for (let file of files) {
|
2021-05-14 00:29:53 +12:00
|
|
|
const path = sanitizeKey(join(bucketPath, file.name))
|
2021-03-20 08:07:47 +13:00
|
|
|
const local = join(localPath, file.name)
|
|
|
|
if (file.isDirectory()) {
|
2022-05-04 09:58:19 +12:00
|
|
|
uploads.push(uploadDirectory(bucketName, local, path))
|
2021-03-20 08:07:47 +13:00
|
|
|
} else {
|
2022-05-04 09:58:19 +12:00
|
|
|
uploads.push(streamUpload(bucketName, path, fs.createReadStream(local)))
|
2021-03-20 08:07:47 +13:00
|
|
|
}
|
|
|
|
}
|
|
|
|
await Promise.all(uploads)
|
2022-07-01 07:26:49 +12:00
|
|
|
return files
|
2021-03-20 08:07:47 +13:00
|
|
|
}
|
|
|
|
|
2022-09-07 02:46:11 +12:00
|
|
|
exports.downloadTarballDirect = async (
|
|
|
|
url: string,
|
|
|
|
path: string,
|
|
|
|
headers = {}
|
|
|
|
) => {
|
2022-06-07 10:30:36 +12:00
|
|
|
path = sanitizeKey(path)
|
2022-09-07 02:46:11 +12:00
|
|
|
const response = await fetch(url, { headers })
|
2022-06-07 10:30:36 +12:00
|
|
|
if (!response.ok) {
|
|
|
|
throw new Error(`unexpected response ${response.statusText}`)
|
|
|
|
}
|
|
|
|
|
|
|
|
await streamPipeline(response.body, zlib.Unzip(), tar.extract(path))
|
|
|
|
}
|
|
|
|
|
2022-10-12 08:25:22 +13:00
|
|
|
export const downloadTarball = async (
|
|
|
|
url: string,
|
|
|
|
bucketName: string,
|
|
|
|
path: string
|
|
|
|
) => {
|
2021-05-14 00:29:53 +12:00
|
|
|
bucketName = sanitizeBucket(bucketName)
|
|
|
|
path = sanitizeKey(path)
|
2021-03-20 08:07:47 +13:00
|
|
|
const response = await fetch(url)
|
|
|
|
if (!response.ok) {
|
|
|
|
throw new Error(`unexpected response ${response.statusText}`)
|
|
|
|
}
|
|
|
|
|
|
|
|
const tmpPath = join(budibaseTempDir(), path)
|
|
|
|
await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath))
|
2021-10-07 03:36:40 +13:00
|
|
|
if (!env.isTest() && env.SELF_HOSTED) {
|
2022-05-04 09:58:19 +12:00
|
|
|
await uploadDirectory(bucketName, tmpPath, path)
|
2021-03-26 03:46:32 +13:00
|
|
|
}
|
2021-03-20 08:07:47 +13:00
|
|
|
// return the temporary path incase there is a use for it
|
|
|
|
return tmpPath
|
|
|
|
}
|