1
0
Fork 0
mirror of synced 2024-06-27 18:40:42 +12:00

Merge branch 'develop' of github.com:Budibase/budibase into develop

This commit is contained in:
mike12345567 2022-07-29 16:21:26 +01:00
commit 76f3339a31
30 changed files with 515 additions and 229 deletions

View file

@ -15,4 +15,4 @@
]
}
}
}
}

View file

@ -26,7 +26,7 @@
"build": "lerna run build",
"build:dev": "lerna run prebuild && tsc --build --watch --preserveWatchOutput",
"release": "lerna publish ${RELEASE_VERSION_TYPE:-patch} --yes --force-publish && yarn release:pro",
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop && yarn release:pro:develop",
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop --exact && yarn release:pro:develop",
"release:pro": "bash scripts/pro/release.sh",
"release:pro:develop": "bash scripts/pro/release.sh develop",
"restore": "yarn run clean && yarn run bootstrap && yarn run build",
@ -85,4 +85,4 @@
"install:pro": "bash scripts/pro/install.sh",
"dep:clean": "yarn clean && yarn bootstrap"
}
}
}

View file

@ -80,4 +80,4 @@
"typescript": "4.7.3"
},
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
}
}

View file

@ -15,11 +15,22 @@ export function logAlert(message: string, e?: any) {
console.error(`bb-alert: ${message} ${errorJson}`)
}
export function logAlertWithInfo(
message: string,
db: string,
id: string,
error: any
) {
message = `${message} - db: ${db} - doc: ${id} - error: `
logAlert(message, error)
}
export function logWarn(message: string) {
console.warn(`bb-warn: ${message}`)
}
export default {
logAlert,
logAlertWithInfo,
logWarn,
}

View file

@ -86,4 +86,4 @@
"svelte-portal": "^1.0.0"
},
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
}
}

View file

@ -121,4 +121,4 @@
"vite": "^2.1.5"
},
"gitHead": "115189f72a850bfb52b65ec61d932531bf327072"
}
}

View file

@ -32,7 +32,8 @@
if (!results) {
return {}
}
if (results.outputs?.status?.toLowerCase() === "stopped") {
const lcStatus = results.outputs?.status?.toLowerCase()
if (lcStatus === "stopped" || lcStatus === "stopped_error") {
return { yellow: true, message: "Stopped" }
} else if (results.outputs?.success || isTrigger) {
return { positive: true, message: "Success" }

View file

@ -1,5 +1,5 @@
<script>
import { Layout, Icon, ActionButton } from "@budibase/bbui"
import { Layout, Icon, ActionButton, InlineAlert } from "@budibase/bbui"
import StatusRenderer from "./StatusRenderer.svelte"
import DateTimeRenderer from "components/common/renderers/DateTimeRenderer.svelte"
import TestDisplay from "components/automation/AutomationBuilder/TestDisplay.svelte"
@ -9,6 +9,7 @@
export let history
export let appId
export let close
const STOPPED_ERROR = "stopped_error"
$: exists = $automationStore.automations?.find(
auto => auto._id === history?.automationId
@ -32,6 +33,15 @@
<Icon name="JourneyVoyager" />
<div>{history.automationName}</div>
</div>
{#if history.status === STOPPED_ERROR}
<div class="cron-error">
<InlineAlert
type="error"
header="CRON automation disabled"
message="Fix the error and re-publish your app to re-activate."
/>
</div>
{/if}
<div>
{#if exists}
<ActionButton
@ -87,4 +97,10 @@
grid-template-columns: 1fr auto;
gap: var(--spacing-s);
}
.cron-error {
display: flex;
width: 100%;
justify-content: center;
}
</style>

View file

@ -3,7 +3,8 @@
export let value
$: isError = !value || value.toLowerCase() === "error"
$: isStopped = value?.toLowerCase() === "stopped"
$: isStoppedError = value?.toLowerCase() === "stopped_error"
$: isStopped = value?.toLowerCase() === "stopped" || isStoppedError
$: status = getStatus(isError, isStopped)
function getStatus(error, stopped) {

View file

@ -48,4 +48,4 @@
"eslint": "^7.20.0",
"renamer": "^4.0.0"
}
}
}

View file

@ -58,4 +58,4 @@
"rollup-plugin-visualizer": "^5.5.4"
},
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
}
}

View file

@ -10,4 +10,4 @@
"lodash": "^4.17.21",
"svelte": "^3.46.2"
}
}
}

View file

@ -196,4 +196,4 @@
"oracledb": "5.3.0"
},
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
}
}

View file

@ -5,7 +5,11 @@ import {
getDevelopmentAppID,
} from "@budibase/backend-core/db"
import { DocumentTypes, getAutomationParams } from "../../../db/utils"
import { disableAllCrons, enableCronTrigger } from "../../../automations/utils"
import {
disableAllCrons,
enableCronTrigger,
clearMetadata,
} from "../../../automations/utils"
import { app as appCache } from "@budibase/backend-core/cache"
import {
getAppId,
@ -80,6 +84,7 @@ async function initDeployedApp(prodAppId: any) {
})
)
).rows.map((row: any) => row.doc)
await clearMetadata()
console.log("You have " + automations.length + " automations")
const promises = []
console.log("Disabling prod crons..")

View file

@ -31,7 +31,7 @@ describe("Run through some parts of the automations system", () => {
it("should be able to init in builder", async () => {
await triggers.externalTrigger(basicAutomation(), { a: 1 })
await wait(100)
expect(thread).toHaveBeenCalled()
expect(thread.execute).toHaveBeenCalled()
})
it("should be able to init in prod", async () => {
@ -52,7 +52,7 @@ describe("Run through some parts of the automations system", () => {
}
})
await wait(100)
expect(thread).toHaveBeenCalledWith(makePartial({
expect(thread.execute).toHaveBeenCalledWith(makePartial({
data: {
event: {
fields: {

View file

@ -6,10 +6,16 @@ import newid from "../db/newid"
import { updateEntityMetadata } from "../utilities"
import { MetadataTypes, WebhookType } from "../constants"
import { getProdAppID, doWithDB } from "@budibase/backend-core/db"
import { getAutomationMetadataParams } from "../db/utils"
import { cloneDeep } from "lodash/fp"
import { getAppDB, getAppId } from "@budibase/backend-core/context"
import {
getAppDB,
getAppId,
getProdAppDB,
} from "@budibase/backend-core/context"
import { tenancy } from "@budibase/backend-core"
import { quotas } from "@budibase/pro"
import { Automation } from "@budibase/types"
const WH_STEP_ID = definitions.WEBHOOK.stepId
const CRON_STEP_ID = definitions.CRON.stepId
@ -82,6 +88,26 @@ export async function disableAllCrons(appId: any) {
return Promise.all(promises)
}
export async function disableCron(jobId: string, jobKey: string) {
await queue.removeRepeatableByKey(jobKey)
await queue.removeJobs(jobId)
}
export async function clearMetadata() {
const db = getProdAppDB()
const automationMetadata = (
await db.allDocs(
getAutomationMetadataParams({
include_docs: true,
})
)
).rows.map((row: any) => row.doc)
for (let metadata of automationMetadata) {
metadata._deleted = true
}
await db.bulkDocs(automationMetadata)
}
/**
* This function handles checking of any cron jobs that need to be enabled/updated.
* @param {string} appId The ID of the app in which we are checking for webhooks
@ -204,3 +230,30 @@ export async function checkForWebhooks({ oldAuto, newAuto }: any) {
export async function cleanupAutomations(appId: any) {
await disableAllCrons(appId)
}
/**
* Checks if the supplied automation is of a recurring type.
* @param automation The automation to check.
* @return {boolean} if it is recurring (cron).
*/
export function isRecurring(automation: Automation) {
return automation.definition.trigger.stepId === definitions.CRON.stepId
}
export function isErrorInOutput(output: {
steps: { outputs?: { success: boolean } }[]
}) {
let first = true,
error = false
for (let step of output.steps) {
// skip the trigger, its always successful if automation ran
if (first) {
first = false
continue
}
if (!step.outputs?.success) {
error = true
}
}
return error
}

View file

@ -208,10 +208,7 @@ exports.AutomationErrors = {
FAILURE_CONDITION: "FAILURE_CONDITION_MET",
}
exports.LoopStepTypes = {
ARRAY: "Array",
STRING: "String",
}
// pass through the list from the auth/core lib
exports.ObjectStoreBuckets = ObjectStoreBuckets
exports.MAX_AUTOMATION_RECURRING_ERRORS = 5

View file

@ -41,6 +41,7 @@ const DocumentTypes = {
METADATA: "metadata",
MEM_VIEW: "view",
USER_FLAG: "flag",
AUTOMATION_METADATA: "meta_au",
}
const InternalTables = {
@ -311,6 +312,21 @@ exports.generateQueryID = datasourceId => {
}${SEPARATOR}${datasourceId}${SEPARATOR}${newid()}`
}
/**
* Generates a metadata ID for automations, used to track errors in recurring
* automations etc.
*/
exports.generateAutomationMetadataID = automationId => {
return `${DocumentTypes.AUTOMATION_METADATA}${SEPARATOR}${automationId}`
}
/**
* Retrieve all automation metadata in an app database.
*/
exports.getAutomationMetadataParams = (otherProps = {}) => {
return getDocParams(DocumentTypes.AUTOMATION_METADATA, null, otherProps)
}
/**
* Gets parameters for retrieving a query, this is a utility function for the getDocParams function.
*/

View file

@ -0,0 +1,49 @@
import {
Automation,
AutomationResults,
AutomationStep,
Document,
} from "@budibase/types"
export enum LoopStepTypes {
ARRAY = "Array",
STRING = "String",
}
export interface LoopStep extends AutomationStep {
inputs: {
option: LoopStepTypes
[key: string]: any
}
}
export interface LoopInput {
binding: string[] | string
}
export interface TriggerOutput {
metadata?: any
appId?: string
timestamp?: number
}
export interface AutomationEvent {
data: {
automation: Automation
event: any
}
opts?: {
repeat?: {
jobId: string
}
}
}
export interface AutomationContext extends AutomationResults {
steps: any[]
trigger: any
}
export interface AutomationMetadata extends Document {
errorCount?: number
}

View file

@ -1,36 +1,47 @@
require("./utils").threadSetup()
const actions = require("../automations/actions")
const automationUtils = require("../automations/automationUtils")
const AutomationEmitter = require("../events/AutomationEmitter")
const { processObject } = require("@budibase/string-templates")
const { DocumentTypes } = require("../db/utils")
const { definitions: triggerDefs } = require("../automations/triggerInfo")
import { default as threadUtils } from "./utils"
threadUtils.threadSetup()
import { isRecurring, disableCron, isErrorInOutput } from "../automations/utils"
import { default as actions } from "../automations/actions"
import { default as automationUtils } from "../automations/automationUtils"
import { default as AutomationEmitter } from "../events/AutomationEmitter"
import { generateAutomationMetadataID, isProdAppID } from "../db/utils"
import { definitions as triggerDefs } from "../automations/triggerInfo"
import { AutomationErrors, MAX_AUTOMATION_RECURRING_ERRORS } from "../constants"
import { storeLog } from "../automations/logging"
import { Automation, AutomationStep, AutomationStatus } from "@budibase/types"
import {
LoopStep,
LoopStepTypes,
LoopInput,
AutomationEvent,
TriggerOutput,
AutomationContext,
AutomationMetadata,
} from "../definitions/automations"
import { WorkerCallback } from "./definitions"
const { doInAppContext, getAppDB } = require("@budibase/backend-core/context")
const { AutomationErrors, LoopStepTypes } = require("../constants")
const { storeLog } = require("../automations/logging")
const { logAlertWithInfo, logWarn } = require("@budibase/backend-core/logging")
const { processObject } = require("@budibase/string-templates")
const FILTER_STEP_ID = actions.ACTION_DEFINITIONS.FILTER.stepId
const LOOP_STEP_ID = actions.ACTION_DEFINITIONS.LOOP.stepId
const CRON_STEP_ID = triggerDefs.CRON.stepId
const STOPPED_STATUS = { success: true, status: "STOPPED" }
const STOPPED_STATUS = { success: true, status: AutomationStatus.STOPPED }
const { cloneDeep } = require("lodash/fp")
const env = require("../environment")
function typecastForLooping(loopStep, input) {
function typecastForLooping(loopStep: LoopStep, input: LoopInput) {
if (!input || !input.binding) {
return null
}
const isArray = Array.isArray(input.binding),
isString = typeof input.binding === "string"
try {
switch (loopStep.inputs.option) {
case LoopStepTypes.ARRAY:
if (isString) {
if (typeof input.binding === "string") {
return JSON.parse(input.binding)
}
break
case LoopStepTypes.STRING:
if (isArray) {
if (Array.isArray(input.binding)) {
return input.binding.join(",")
}
break
@ -41,7 +52,7 @@ function typecastForLooping(loopStep, input) {
return input.binding
}
function getLoopIterations(loopStep, input) {
function getLoopIterations(loopStep: LoopStep, input: LoopInput) {
const binding = typecastForLooping(loopStep, input)
if (!loopStep || !binding) {
return 1
@ -57,11 +68,24 @@ function getLoopIterations(loopStep, input) {
* inputs and handles any outputs.
*/
class Orchestrator {
constructor(automation, triggerOutput = {}) {
this._metadata = triggerOutput.metadata
this._chainCount = this._metadata ? this._metadata.automationChainCount : 0
this._appId = triggerOutput.appId
this._app = null
_chainCount: number
_appId: string
_automation: Automation
_emitter: any
_context: AutomationContext
_repeat?: { jobId: string; jobKey: string }
executionOutput: AutomationContext
constructor(automation: Automation, triggerOutput: TriggerOutput, opts: any) {
const metadata = triggerOutput.metadata
this._chainCount = metadata ? metadata.automationChainCount : 0
this._appId = triggerOutput.appId as string
if (opts?.repeat) {
this._repeat = {
jobId: opts.repeat.jobId,
jobKey: opts.repeat.key,
}
}
const triggerStepId = automation.definition.trigger.stepId
triggerOutput = this.cleanupTriggerOutputs(triggerStepId, triggerOutput)
// remove from context
@ -79,14 +103,14 @@ class Orchestrator {
this.updateExecutionOutput(triggerId, triggerStepId, null, triggerOutput)
}
cleanupTriggerOutputs(stepId, triggerOutput) {
cleanupTriggerOutputs(stepId: string, triggerOutput: TriggerOutput) {
if (stepId === CRON_STEP_ID) {
triggerOutput.timestamp = Date.now()
}
return triggerOutput
}
async getStepFunctionality(stepId) {
async getStepFunctionality(stepId: string) {
let step = await actions.getAction(stepId)
if (step == null) {
throw `Cannot find automation step by name ${stepId}`
@ -94,25 +118,107 @@ class Orchestrator {
return step
}
async getApp() {
if (this._app) {
return this._app
}
async getMetadata(): Promise<AutomationMetadata> {
const metadataId = generateAutomationMetadataID(this._automation._id)
const db = getAppDB()
this._app = await db.get(DocumentTypes.APP_METADATA)
return this._app
let metadata: AutomationMetadata
try {
metadata = await db.get(metadataId)
} catch (err) {
metadata = {
_id: metadataId,
errorCount: 0,
}
}
return metadata
}
updateExecutionOutput(id, stepId, inputs, outputs) {
async checkIfShouldStop(metadata: AutomationMetadata): Promise<boolean> {
if (!metadata.errorCount || !this._repeat) {
return false
}
const automation = this._automation
const trigger = automation.definition.trigger
if (metadata.errorCount >= MAX_AUTOMATION_RECURRING_ERRORS) {
logWarn(
`CRON disabled due to errors - ${this._appId}/${this._automation._id}`
)
await disableCron(this._repeat?.jobId, this._repeat?.jobKey)
this.updateExecutionOutput(
trigger.id,
trigger.stepId,
{},
{
status: AutomationStatus.STOPPED_ERROR,
success: false,
}
)
await storeLog(automation, this.executionOutput)
return true
}
return false
}
async updateMetadata(metadata: AutomationMetadata) {
const output = this.executionOutput,
automation = this._automation
if (!output || !isRecurring(automation)) {
return
}
const count = metadata.errorCount
const isError = isErrorInOutput(output)
// nothing to do in this scenario, escape
if (!count && !isError) {
return
}
if (isError) {
metadata.errorCount = count ? count + 1 : 1
} else {
metadata.errorCount = 0
}
const db = getAppDB()
try {
await db.put(metadata)
} catch (err) {
logAlertWithInfo(
"Failed to write automation metadata",
db.name,
automation._id,
err
)
}
}
updateExecutionOutput(id: string, stepId: string, inputs: any, outputs: any) {
const stepObj = { id, stepId, inputs, outputs }
// replacing trigger when disabling CRON
if (
stepId === CRON_STEP_ID &&
outputs.status === AutomationStatus.STOPPED_ERROR
) {
this.executionOutput.trigger = stepObj
this.executionOutput.steps = [stepObj]
return
}
// first entry is always the trigger (constructor)
if (this.executionOutput.steps.length === 0) {
if (
this.executionOutput.steps.length === 0 ||
this.executionOutput.trigger.id === id
) {
this.executionOutput.trigger = stepObj
}
this.executionOutput.steps.push(stepObj)
}
updateContextAndOutput(loopStepNumber, step, output, result) {
updateContextAndOutput(
loopStepNumber: number | undefined,
step: AutomationStep,
output: any,
result: { success: boolean; status: string }
) {
if (!loopStepNumber) {
throw new Error("No loop step number provided.")
}
this.executionOutput.steps.splice(loopStepNumber, 0, {
id: step.id,
stepId: step.stepId,
@ -133,11 +239,22 @@ class Orchestrator {
async execute() {
let automation = this._automation
let stopped = false
let loopStep = null
let loopStep: AutomationStep | undefined = undefined
let stepCount = 0
let loopStepNumber = null
let loopSteps = []
let loopStepNumber: any = undefined
let loopSteps: LoopStep[] | undefined = []
let metadata
// check if this is a recurring automation,
if (isProdAppID(this._appId) && isRecurring(automation)) {
metadata = await this.getMetadata()
const shouldStop = await this.checkIfShouldStop(metadata)
if (shouldStop) {
return
}
}
for (let step of automation.definition.steps) {
stepCount++
let input,
@ -151,7 +268,7 @@ class Orchestrator {
if (loopStep) {
input = await processObject(loopStep.inputs, this._context)
iterations = getLoopIterations(loopStep, input)
iterations = getLoopIterations(loopStep as LoopStep, input)
}
for (let index = 0; index < iterations; index++) {
@ -166,14 +283,17 @@ class Orchestrator {
let tempOutput = { items: loopSteps, iterations: iterationCount }
try {
newInput.binding = typecastForLooping(loopStep, newInput)
newInput.binding = typecastForLooping(
loopStep as LoopStep,
newInput
)
} catch (err) {
this.updateContextAndOutput(loopStepNumber, step, tempOutput, {
status: AutomationErrors.INCORRECT_TYPE,
success: false,
})
loopSteps = null
loopStep = null
loopSteps = undefined
loopStep = undefined
break
}
@ -223,8 +343,8 @@ class Orchestrator {
status: AutomationErrors.MAX_ITERATIONS,
success: true,
})
loopSteps = null
loopStep = null
loopSteps = undefined
loopStep = undefined
break
}
@ -232,7 +352,7 @@ class Orchestrator {
const currentItem = this._context.steps[loopStepNumber]?.currentItem
if (currentItem && typeof currentItem === "object") {
isFailure = Object.keys(currentItem).some(value => {
return currentItem[value] === loopStep.inputs.failure
return currentItem[value] === loopStep?.inputs.failure
})
} else {
isFailure = currentItem && currentItem === loopStep.inputs.failure
@ -243,8 +363,8 @@ class Orchestrator {
status: AutomationErrors.FAILURE_CONDITION,
success: false,
})
loopSteps = null
loopStep = null
loopSteps = undefined
loopStep = undefined
break
}
}
@ -295,7 +415,7 @@ class Orchestrator {
if (loopStep) {
iterationCount++
if (index === iterations - 1) {
loopStep = null
loopStep = undefined
this._context.steps.splice(loopStepNumber, 1)
break
}
@ -316,22 +436,26 @@ class Orchestrator {
})
this._context.steps.splice(loopStepNumber, 0, tempOutput)
loopSteps = null
loopSteps = undefined
}
}
// store the logs for the automation run
await storeLog(this._automation, this.executionOutput)
if (isProdAppID(this._appId) && isRecurring(automation) && metadata) {
await this.updateMetadata(metadata)
}
return this.executionOutput
}
}
module.exports = (input, callback) => {
export function execute(input: AutomationEvent, callback: WorkerCallback) {
const appId = input.data.event.appId
doInAppContext(appId, async () => {
const automationOrchestrator = new Orchestrator(
input.data.automation,
input.data.event
input.data.event,
input.opts
)
try {
const response = await automationOrchestrator.execute()

View file

@ -0,0 +1,18 @@
export type WorkerCallback = (error: any, response?: any) => void
export interface QueryEvent {
appId?: string
datasource: any
queryVerb: string
fields: { [key: string]: any }
parameters: { [key: string]: any }
pagination?: any
transformer: any
queryId: string
ctx?: any
}
export interface QueryVariable {
queryId: string
name: string
}

View file

@ -18,26 +18,23 @@ function typeToFile(type: any) {
default:
throw "Unknown thread type"
}
// have to use require here, to make it work with worker-farm
return require.resolve(filename)
}
export class Thread {
type: any
count: any
disableThreading: any
workers: any
timeoutMs: any
disableThreading: boolean
static workerRefs: any[] = []
constructor(type: any, opts: any = { timeoutMs: null, count: 1 }) {
this.type = type
this.count = opts.count ? opts.count : 1
this.disableThreading =
env.isTest() ||
env.DISABLE_THREADING ||
this.count === 0 ||
env.isInThread()
this.disableThreading = this.shouldDisableThreading()
if (!this.disableThreading) {
const workerOpts: any = {
autoStart: true,
@ -47,33 +44,44 @@ export class Thread {
this.timeoutMs = opts.timeoutMs
workerOpts.maxCallTime = opts.timeoutMs
}
this.workers = workerFarm(workerOpts, typeToFile(type))
this.workers = workerFarm(workerOpts, typeToFile(type), ["execute"])
Thread.workerRefs.push(this.workers)
}
}
shouldDisableThreading(): boolean {
return !!(
env.isTest() ||
env.DISABLE_THREADING ||
this.count === 0 ||
env.isInThread()
)
}
run(data: any) {
const timeout = this.timeoutMs
return new Promise((resolve, reject) => {
let fncToCall
function fire(worker: any) {
worker.execute(data, (err: any, response: any) => {
if (err && err.type === "TimeoutError") {
reject(
new Error(`Query response time exceeded ${timeout}ms timeout.`)
)
} else if (err) {
reject(err)
} else {
resolve(response)
}
})
}
// if in test then don't use threading
if (this.disableThreading) {
fncToCall = require(typeToFile(this.type))
import(typeToFile(this.type)).then((thread: any) => {
fire(thread)
})
} else {
fncToCall = this.workers
fire(this.workers)
}
fncToCall(data, (err: any, response: any) => {
if (err && err.type === "TimeoutError") {
reject(
new Error(
`Query response time exceeded ${this.timeoutMs}ms timeout.`
)
)
} else if (err) {
reject(err)
} else {
resolve(response)
}
})
})
}

View file

@ -1,5 +1,6 @@
const threadUtils = require("./utils")
import { default as threadUtils } from "./utils"
threadUtils.threadSetup()
import { WorkerCallback, QueryEvent, QueryVariable } from "./definitions"
const ScriptRunner = require("../utilities/scriptRunner")
const { integrations } = require("../integrations")
const { processStringSync } = require("@budibase/string-templates")
@ -19,7 +20,22 @@ const {
} = require("../integrations/queries/sql")
class QueryRunner {
constructor(input, flags = { noRecursiveQuery: false }) {
datasource: any
queryVerb: string
queryId: string
fields: any
parameters: any
pagination: any
transformer: any
cachedVariables: any[]
ctx: any
queryResponse: any
noRecursiveQuery: boolean
hasRerun: boolean
hasRefreshedOAuth: boolean
hasDynamicVariables: boolean
constructor(input: QueryEvent, flags = { noRecursiveQuery: false }) {
this.datasource = input.datasource
this.queryVerb = input.queryVerb
this.fields = input.fields
@ -37,9 +53,10 @@ class QueryRunner {
this.queryResponse = {}
this.hasRerun = false
this.hasRefreshedOAuth = false
this.hasDynamicVariables = false
}
async execute() {
async execute(): Promise<any> {
let { datasource, fields, queryVerb, transformer } = this
let datasourceClone = cloneDeep(datasource)
@ -52,7 +69,7 @@ class QueryRunner {
if (datasourceClone.config.authConfigs) {
datasourceClone.config.authConfigs =
datasourceClone.config.authConfigs.map(config => {
datasourceClone.config.authConfigs.map((config: any) => {
return enrichQueryFields(config, this.ctx)
})
}
@ -138,8 +155,8 @@ class QueryRunner {
}
// map into JSON if just raw primitive here
if (rows.find(row => typeof row !== "object")) {
rows = rows.map(value => ({ value }))
if (rows.find((row: any) => typeof row !== "object")) {
rows = rows.map((value: any) => ({ value }))
}
// get all the potential fields in the schema
@ -152,7 +169,7 @@ class QueryRunner {
return { rows, keys, info, extra, pagination }
}
async runAnotherQuery(queryId, parameters) {
async runAnotherQuery(queryId: string, parameters: any) {
const db = getAppDB()
const query = await db.get(queryId)
const datasource = await db.get(query.datasourceId)
@ -163,12 +180,13 @@ class QueryRunner {
fields: query.fields,
parameters,
transformer: query.transformer,
queryId,
},
{ noRecursiveQuery: true }
).execute()
}
async refreshOAuth2(ctx) {
async refreshOAuth2(ctx: any) {
const { oauth2, providerType, _id } = ctx.user
const { configId } = ctx.auth
@ -200,7 +218,7 @@ class QueryRunner {
return resp
}
async getDynamicVariable(variable) {
async getDynamicVariable(variable: QueryVariable) {
let { parameters } = this
const queryId = variable.queryId,
name = variable.name
@ -233,7 +251,7 @@ class QueryRunner {
if (!this.noRecursiveQuery) {
// need to see if this uses any variables
const stringFields = JSON.stringify(fields)
const foundVars = dynamicVars.filter(variable => {
const foundVars = dynamicVars.filter((variable: QueryVariable) => {
// don't allow a query to use its own dynamic variable (loop)
if (variable.queryId === this.queryId) {
return false
@ -242,7 +260,9 @@ class QueryRunner {
const regex = new RegExp(`{{[ ]*${variable.name}[ ]*}}`)
return regex.test(stringFields)
})
const dynamics = foundVars.map(dynVar => this.getDynamicVariable(dynVar))
const dynamics = foundVars.map((dynVar: QueryVariable) =>
this.getDynamicVariable(dynVar)
)
const responses = await Promise.all(dynamics)
for (let i = 0; i < foundVars.length; i++) {
const variable = foundVars[i]
@ -264,7 +284,7 @@ class QueryRunner {
}
}
module.exports = (input, callback) => {
export function execute(input: QueryEvent, callback: WorkerCallback) {
doInAppContext(input.appId, async () => {
const Runner = new QueryRunner(input)
try {

View file

@ -1,10 +1,11 @@
import { QueryVariable } from "./definitions"
const env = require("../environment")
const db = require("../db")
const redis = require("@budibase/backend-core/redis")
const { SEPARATOR } = require("@budibase/backend-core/db")
const VARIABLE_TTL_SECONDS = 3600
let client
let client: any
async function getClient() {
if (!client) {
@ -14,10 +15,16 @@ async function getClient() {
}
process.on("exit", async () => {
if (client) await client.finish()
if (client) {
await client.finish()
}
})
exports.threadSetup = () => {
function makeVariableKey(queryId: string, variable: string) {
return `${queryId}${SEPARATOR}${variable}`
}
export function threadSetup() {
// don't run this if not threading
if (env.isTest() || env.DISABLE_THREADING) {
return
@ -27,16 +34,15 @@ exports.threadSetup = () => {
db.init()
}
function makeVariableKey(queryId, variable) {
return `${queryId}${SEPARATOR}${variable}`
}
exports.checkCacheForDynamicVariable = async (queryId, variable) => {
export async function checkCacheForDynamicVariable(
queryId: string,
variable: string
) {
const cache = await getClient()
return cache.get(makeVariableKey(queryId, variable))
}
exports.invalidateDynamicVariables = async cachedVars => {
export async function invalidateDynamicVariables(cachedVars: QueryVariable[]) {
const cache = await getClient()
let promises = []
for (let variable of cachedVars) {
@ -47,7 +53,11 @@ exports.invalidateDynamicVariables = async cachedVars => {
await Promise.all(promises)
}
exports.storeDynamicVariable = async (queryId, variable, value) => {
export async function storeDynamicVariable(
queryId: string,
variable: string,
value: any
) {
const cache = await getClient()
await cache.store(
makeVariableKey(queryId, variable),
@ -56,7 +66,7 @@ exports.storeDynamicVariable = async (queryId, variable, value) => {
)
}
exports.formatResponse = resp => {
export function formatResponse(resp: any) {
if (typeof resp === "string") {
try {
resp = JSON.parse(resp)
@ -67,7 +77,7 @@ exports.formatResponse = resp => {
return resp
}
exports.hasExtraData = response => {
export function hasExtraData(response: any) {
return (
typeof response === "object" &&
!Array.isArray(response) &&
@ -76,3 +86,12 @@ exports.hasExtraData = response => {
response.info != null
)
}
export default {
hasExtraData,
formatResponse,
storeDynamicVariable,
invalidateDynamicVariables,
checkCacheForDynamicVariable,
threadSetup,
}

View file

@ -109,6 +109,10 @@ class InMemoryQueue {
async clean() {
return []
}
async getJob() {
return {}
}
}
module.exports = InMemoryQueue

View file

@ -156,9 +156,9 @@
adal-node "^0.2.2"
"@azure/storage-blob@^12.5.0":
version "12.11.0"
resolved "https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.11.0.tgz#2e27902ab293715411ab1f7c8fae422ad0b4b827"
integrity sha512-na+FisoARuaOWaHWpmdtk3FeuTWf2VWamdJ9/TJJzj5ZdXPLC3juoDgFs6XVuJIoK30yuBpyFBEDXVRK4pB7Tg==
version "12.10.0"
resolved "https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.10.0.tgz#b92269f45a1765700a900b41ca81a474a6e36ea4"
integrity sha512-FBEPKGnvtQJS8V8Tg1P9obgmVD9AodrIfwtwhBpsjenClhFyugMp3HPJY0tF7rInUB/CivKBCbnQKrUnKxqxzw==
dependencies:
"@azure/abort-controller" "^1.0.0"
"@azure/core-http" "^2.0.0"
@ -1964,24 +1964,29 @@
"@jridgewell/sourcemap-codec" "^1.4.10"
"@jridgewell/gen-mapping@^0.3.0":
version "0.3.1"
resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.1.tgz#cf92a983c83466b8c0ce9124fadeaf09f7c66ea9"
integrity sha512-GcHwniMlA2z+WFPWuY8lp3fsza0I8xPFMWL5+n8LYyP6PSvPrXf4+n8stDHZY2DM0zy9sVkRDy1jDI4XGzYVqg==
version "0.3.2"
resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9"
integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==
dependencies:
"@jridgewell/set-array" "^1.0.0"
"@jridgewell/set-array" "^1.0.1"
"@jridgewell/sourcemap-codec" "^1.4.10"
"@jridgewell/trace-mapping" "^0.3.9"
"@jridgewell/resolve-uri@^3.0.3":
version "3.0.7"
resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.0.7.tgz#30cd49820a962aff48c8fffc5cd760151fca61fe"
integrity sha512-8cXDaBBHOr2pQ7j77Y6Vp5VDT2sIqWyWQ56TjEq4ih/a4iST3dItRe8Q9fp0rrIl9DoKhWQtUQz/YpOxLkXbNA==
version "3.1.0"
resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78"
integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==
"@jridgewell/set-array@^1.0.0":
version "1.1.1"
resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.1.tgz#36a6acc93987adcf0ba50c66908bd0b70de8afea"
integrity sha512-Ct5MqZkLGEXTVmQYbGtx9SVqD2fqwvdubdps5D3djjAkgkKwT918VNOz65pEHFaYTeWcukmJmH5SwsA9Tn2ObQ==
"@jridgewell/set-array@^1.0.1":
version "1.1.2"
resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72"
integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==
"@jridgewell/source-map@^0.3.2":
version "0.3.2"
resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb"
@ -1991,11 +1996,11 @@
"@jridgewell/trace-mapping" "^0.3.9"
"@jridgewell/sourcemap-codec@^1.4.10":
version "1.4.13"
resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.13.tgz#b6461fb0c2964356c469e115f504c95ad97ab88c"
integrity sha512-GryiOJmNcWbovBxTfZSF71V/mXbgcV3MewDe3kIMCLyIh5e7SKAeUZs+rMnJ8jkMolZ/4/VsdBmMrw3l+VdZ3w==
version "1.4.14"
resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24"
integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==
"@jridgewell/trace-mapping@^0.3.7", "@jridgewell/trace-mapping@^0.3.9":
"@jridgewell/trace-mapping@^0.3.7":
version "0.3.13"
resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.13.tgz#dcfe3e95f224c8fe97a87a5235defec999aa92ea"
integrity sha512-o1xbKhp9qnIAoHJSWd6KlCZfqslL4valSF81H8ImioOAxluWYWOpWkpyktY2vnt4tbrX9XYaxovq6cgowaJp2w==
@ -2003,6 +2008,14 @@
"@jridgewell/resolve-uri" "^3.0.3"
"@jridgewell/sourcemap-codec" "^1.4.10"
"@jridgewell/trace-mapping@^0.3.9":
version "0.3.14"
resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed"
integrity sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ==
dependencies:
"@jridgewell/resolve-uri" "^3.0.3"
"@jridgewell/sourcemap-codec" "^1.4.10"
"@jsdevtools/ono@^7.1.3":
version "7.1.3"
resolved "https://registry.yarnpkg.com/@jsdevtools/ono/-/ono-7.1.3.tgz#9df03bbd7c696a5c58885c34aa06da41c8543796"
@ -2847,7 +2860,7 @@
"@types/bson" "*"
"@types/node" "*"
"@types/node-fetch@2.6.1":
"@types/node-fetch@2.6.1", "@types/node-fetch@^2.5.0":
version "2.6.1"
resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.1.tgz#8f127c50481db65886800ef496f20bbf15518975"
integrity sha512-oMqjURCaxoSIsHSr1E47QHzbmzNR5rK8McHuNb11BOM9cHcIK3Avy0s/b2JlXHoQGTYS3NsvWzV1M0iK7l0wbA==
@ -2855,14 +2868,6 @@
"@types/node" "*"
form-data "^3.0.0"
"@types/node-fetch@^2.5.0":
version "2.6.2"
resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.2.tgz#d1a9c5fd049d9415dce61571557104dec3ec81da"
integrity sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A==
dependencies:
"@types/node" "*"
form-data "^3.0.0"
"@types/node@*", "@types/node@>=12.12.47", "@types/node@>=13.13.4", "@types/node@>=13.7.0":
version "17.0.41"
resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.41.tgz#1607b2fd3da014ae5d4d1b31bc792a39348dfb9b"
@ -3731,11 +3736,6 @@ atomic-sleep@^1.0.0:
resolved "https://registry.yarnpkg.com/atomic-sleep/-/atomic-sleep-1.0.0.tgz#eb85b77a601fc932cfe432c5acd364a9e2c9075b"
integrity sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==
available-typed-arrays@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7"
integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==
aws-sdk@2.1030.0:
version "2.1030.0"
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1030.0.tgz#24a856af3d2b8b37c14a8f59974993661c66fd82"
@ -3752,9 +3752,9 @@ aws-sdk@2.1030.0:
xml2js "0.4.19"
aws-sdk@^2.878.0:
version "2.1174.0"
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1174.0.tgz#3e2acb1ee29229cc5d97015b2d1a18c41e967979"
integrity sha512-t/Cwbdunmoj3WAI+u+hw/kr6mla1sYCn+VncxxIjkACStA47+ZTsfd7cQfpoVMit5KubkHaJ3SHX4/qvmt0Jfg==
version "2.1152.0"
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1152.0.tgz#73e4fb81b3a9c289234b5d6848bcdb854f169bdf"
integrity sha512-Lqwk0bDhm3vzpYb3AAM9VgGHeDpbB8+o7UJnP9R+CO23kJfi/XRpKihAcbyKDD/AUQ+O1LJaUVpvaJYLS9Am7w==
dependencies:
buffer "4.9.2"
events "1.1.1"
@ -3763,7 +3763,6 @@ aws-sdk@^2.878.0:
querystring "0.2.0"
sax "1.2.1"
url "0.10.3"
util "^0.12.4"
uuid "8.0.0"
xml2js "0.4.19"
@ -5504,7 +5503,7 @@ error-inject@^1.0.0:
resolved "https://registry.yarnpkg.com/error-inject/-/error-inject-1.0.0.tgz#e2b3d91b54aed672f309d950d154850fa11d4f37"
integrity sha512-JM8N6PytDbmIYm1IhPWlo8vr3NtfjhDY/1MhD/a5b/aad/USE8a0+NsqE9d5n+GVGmuNkPQWm4bFQWv18d8tMg==
es-abstract@^1.17.5, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5, es-abstract@^1.20.0, es-abstract@^1.20.1:
es-abstract@^1.17.5, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5, es-abstract@^1.20.1:
version "1.20.1"
resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.1.tgz#027292cd6ef44bd12b1913b828116f54787d1814"
integrity sha512-WEm2oBhfoI2sImeM4OF2zE2V3BYdSF+KnSi9Sidz51fQHd7+JuF8Xgcj9/0o+OWeIeIS/MiuNnlruQrJf16GQA==
@ -7454,14 +7453,6 @@ is-accessor-descriptor@^1.0.0:
dependencies:
kind-of "^6.0.0"
is-arguments@^1.0.4:
version "1.1.1"
resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.1.tgz#15b3f88fda01f2a97fec84ca761a560f123efa9b"
integrity sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==
dependencies:
call-bind "^1.0.2"
has-tostringtag "^1.0.0"
is-arrayish@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
@ -7768,17 +7759,6 @@ is-type-of@^1.0.0:
is-class-hotfix "~0.0.6"
isstream "~0.1.2"
is-typed-array@^1.1.3, is-typed-array@^1.1.9:
version "1.1.9"
resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.9.tgz#246d77d2871e7d9f5aeb1d54b9f52c71329ece67"
integrity sha512-kfrlnTTn8pZkfpJMUgYD7YZ3qzeJgWUn8XfVYBARc4wnmNOmLbmuuaAs3q5fvB0UJOn6yHAKaGTPM7d6ezoD/A==
dependencies:
available-typed-arrays "^1.0.5"
call-bind "^1.0.2"
es-abstract "^1.20.0"
for-each "^0.3.3"
has-tostringtag "^1.0.0"
is-typedarray@^1.0.0, is-typedarray@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
@ -9974,16 +9954,11 @@ moment-timezone@^0.5.15, moment-timezone@^0.5.31:
dependencies:
moment ">= 2.9.0"
"moment@>= 2.9.0":
"moment@>= 2.9.0", moment@^2.29.3:
version "2.29.3"
resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.3.tgz#edd47411c322413999f7a5940d526de183c031f3"
integrity sha512-c6YRvhEo//6T2Jz/vVtYzqBzwvPT95JBQ+smCytzf7c50oMZRsR/a4w88aD34I+/QVSfnoAnSBFPJHItlOMJVw==
moment@^2.29.3:
version "2.29.4"
resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108"
integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==
mongodb@3.6.3:
version "3.6.3"
resolved "https://registry.yarnpkg.com/mongodb/-/mongodb-3.6.3.tgz#eddaed0cc3598474d7a15f0f2a5b04848489fd05"
@ -12317,7 +12292,7 @@ signal-exit@^3.0.0, signal-exit@^3.0.2, signal-exit@^3.0.3:
simple-lru-cache@^0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/simple-lru-cache/-/simple-lru-cache-0.0.2.tgz#d59cc3a193c1a5d0320f84ee732f6e4713e511dd"
integrity sha512-uEv/AFO0ADI7d99OHDmh1QfYzQk/izT1vCmu/riQfh7qjBVUUgRT87E5s5h7CxWCA/+YoZerykpEthzVrW3LIw==
integrity sha1-1ZzDoZPBpdAyD4Tucy9uRxPlEd0=
simple-swizzle@^0.2.2:
version "0.2.2"
@ -12388,9 +12363,9 @@ snowflake-promise@^4.5.0:
snowflake-sdk "^1.6.0"
snowflake-sdk@^1.6.0:
version "1.6.11"
resolved "https://registry.yarnpkg.com/snowflake-sdk/-/snowflake-sdk-1.6.11.tgz#2797c816d0d2af6d56180949e1364e53df8a9c13"
integrity sha512-w4oCXjNQ1peAJjhnrwihr+epYw1pSxbe5/+PdxexYb2rzowyOn0RA5PFbir90q/dx0jzM2gvPiHDjnSBEZ1/zA==
version "1.6.10"
resolved "https://registry.yarnpkg.com/snowflake-sdk/-/snowflake-sdk-1.6.10.tgz#c6c4f267edbc50d3c1ef6fcc2651188bb8545dce"
integrity sha512-kguQQSGhmNqZfmN/yZNDaIaMMktTcrTYBjtyx+szJzV69b5F+5b77btpYp+bCFqao69otVM+IPUtb3sugvCVnQ==
dependencies:
"@azure/storage-blob" "^12.5.0"
"@techteamer/ocsp" "1.0.0"
@ -13134,9 +13109,9 @@ terser-webpack-plugin@^5.1.3:
terser "^5.7.2"
terser@^5.7.2:
version "5.14.0"
resolved "https://registry.yarnpkg.com/terser/-/terser-5.14.0.tgz#eefeec9af5153f55798180ee2617f390bdd285e2"
integrity sha512-JC6qfIEkPBd9j1SMO3Pfn+A6w2kQV54tv+ABQLgZr7dA3k/DL/OBoYSWxzVpZev3J+bUHXfr55L8Mox7AaNo6g==
version "5.14.2"
resolved "https://registry.yarnpkg.com/terser/-/terser-5.14.2.tgz#9ac9f22b06994d736174f4091aa368db896f1c10"
integrity sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA==
dependencies:
"@jridgewell/source-map" "^0.3.2"
acorn "^8.5.0"
@ -13776,18 +13751,6 @@ util.promisify@^1.0.0, util.promisify@^1.0.1:
has-symbols "^1.0.1"
object.getownpropertydescriptors "^2.1.1"
util@^0.12.4:
version "0.12.4"
resolved "https://registry.yarnpkg.com/util/-/util-0.12.4.tgz#66121a31420df8f01ca0c464be15dfa1d1850253"
integrity sha512-bxZ9qtSlGUWSOy9Qa9Xgk11kSslpuZwaxCg4sNIDj6FLucDab2JxnHwyNTCpHMtK1MjoQiWQ6DiUMZYbSrO+Sw==
dependencies:
inherits "^2.0.3"
is-arguments "^1.0.4"
is-generator-function "^1.0.7"
is-typed-array "^1.1.3"
safe-buffer "^5.1.2"
which-typed-array "^1.1.2"
utils-merge@1.x.x:
version "1.0.1"
resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
@ -14071,18 +14034,6 @@ which-module@^2.0.0:
resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=
which-typed-array@^1.1.2:
version "1.1.8"
resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.8.tgz#0cfd53401a6f334d90ed1125754a42ed663eb01f"
integrity sha512-Jn4e5PItbcAHyLoRDwvPj1ypu27DJbtdYXUa5zsinrUx77Uvfb0cXwwnGMTn7cjUfhhqgVQnVJCwF+7cgU7tpw==
dependencies:
available-typed-arrays "^1.0.5"
call-bind "^1.0.2"
es-abstract "^1.20.0"
for-each "^0.3.3"
has-tostringtag "^1.0.0"
is-typed-array "^1.1.9"
which@^1.2.9:
version "1.3.1"
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
@ -14125,23 +14076,7 @@ winston-transport@^4.5.0:
readable-stream "^3.6.0"
triple-beam "^1.3.0"
winston@^3.1.0:
version "3.8.1"
resolved "https://registry.yarnpkg.com/winston/-/winston-3.8.1.tgz#76f15b3478cde170b780234e0c4cf805c5a7fb57"
integrity sha512-r+6YAiCR4uI3N8eQNOg8k3P3PqwAm20cLKlzVD9E66Ch39+LZC+VH1UKf9JemQj2B3QoUHfKD7Poewn0Pr3Y1w==
dependencies:
"@dabh/diagnostics" "^2.0.2"
async "^3.2.3"
is-stream "^2.0.0"
logform "^2.4.0"
one-time "^1.0.0"
readable-stream "^3.4.0"
safe-stable-stringify "^2.3.1"
stack-trace "0.0.x"
triple-beam "^1.3.0"
winston-transport "^4.5.0"
winston@^3.3.3:
winston@^3.1.0, winston@^3.3.3:
version "3.7.2"
resolved "https://registry.yarnpkg.com/winston/-/winston-3.7.2.tgz#95b4eeddbec902b3db1424932ac634f887c400b1"
integrity sha512-QziIqtojHBoyzUOdQvQiar1DH0Xp9nF1A1y7NVy2DGEsz82SBDtOalS0ulTRGVT14xPX3WRWkCsdcJKqNflKng==

View file

@ -46,4 +46,4 @@
"typescript": "^4.5.5"
},
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
}
}

View file

@ -17,4 +17,4 @@
"rimraf": "3.0.2",
"typescript": "4.7.3"
}
}
}

View file

@ -12,6 +12,14 @@ export interface Automation extends Document {
export interface AutomationStep {
id: string
stepId: string
inputs: {
[key: string]: any
}
schema: {
inputs: {
[key: string]: any
}
}
}
export interface AutomationTrigger {
@ -23,11 +31,12 @@ export enum AutomationStatus {
SUCCESS = "success",
ERROR = "error",
STOPPED = "stopped",
STOPPED_ERROR = "stopped_error",
}
export interface AutomationResults {
automationId: string
status: string
automationId?: string
status?: AutomationStatus
trigger?: any
steps: {
stepId: string

View file

@ -102,4 +102,4 @@
]
},
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
}
}