1
0
Fork 0
mirror of synced 2024-09-24 21:31:17 +12:00

Merge branch 'master' into budi-8608-ai-platform-level-config-pt-2

This commit is contained in:
Martin McKeaveney 2024-09-19 10:48:01 +01:00 committed by GitHub
commit dadce14a71
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 1105 additions and 281 deletions

View file

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.32.4", "version": "2.32.5",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View file

@ -40,7 +40,6 @@ import { dataFilters, helpers } from "@budibase/shared-core"
import { cloneDeep } from "lodash" import { cloneDeep } from "lodash"
type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any
const MAX_SQS_RELATIONSHIP_FIELDS = 63
function getBaseLimit() { function getBaseLimit() {
const envLimit = environment.SQL_MAX_ROWS const envLimit = environment.SQL_MAX_ROWS
@ -56,6 +55,20 @@ function getRelationshipLimit() {
return envLimit || 500 return envLimit || 500
} }
function prioritisedArraySort(toSort: string[], priorities: string[]) {
return toSort.sort((a, b) => {
const aPriority = priorities.find(field => field && a.endsWith(field))
const bPriority = priorities.find(field => field && b.endsWith(field))
if (aPriority && !bPriority) {
return -1
}
if (!aPriority && bPriority) {
return 1
}
return a.localeCompare(b)
})
}
function getTableName(table?: Table): string | undefined { function getTableName(table?: Table): string | undefined {
// SQS uses the table ID rather than the table name // SQS uses the table ID rather than the table name
if ( if (
@ -877,6 +890,22 @@ class InternalBuilder {
return `'${unaliased}'${separator}${tableField}` return `'${unaliased}'${separator}${tableField}`
} }
maxFunctionParameters() {
// functions like say json_build_object() in SQL have a limit as to how many can be performed
// before a limit is met, this limit exists in Postgres/SQLite. This can be very important, such as
// for JSON column building as part of relationships. We also have a default limit to avoid very complex
// functions being built - it is likely this is not necessary or the best way to do it.
switch (this.client) {
case SqlClient.SQL_LITE:
return 127
case SqlClient.POSTGRES:
return 100
// other DBs don't have a limit, but set some sort of limit
default:
return 200
}
}
addJsonRelationships( addJsonRelationships(
query: Knex.QueryBuilder, query: Knex.QueryBuilder,
fromTable: string, fromTable: string,
@ -884,7 +913,7 @@ class InternalBuilder {
): Knex.QueryBuilder { ): Knex.QueryBuilder {
const sqlClient = this.client const sqlClient = this.client
const knex = this.knex const knex = this.knex
const { resource, tableAliases: aliases, endpoint } = this.query const { resource, tableAliases: aliases, endpoint, meta } = this.query
const fields = resource?.fields || [] const fields = resource?.fields || []
for (let relationship of relationships) { for (let relationship of relationships) {
const { const {
@ -899,21 +928,27 @@ class InternalBuilder {
if (!toTable || !fromTable) { if (!toTable || !fromTable) {
continue continue
} }
const relatedTable = meta.tables?.[toTable]
const toAlias = aliases?.[toTable] || toTable, const toAlias = aliases?.[toTable] || toTable,
fromAlias = aliases?.[fromTable] || fromTable fromAlias = aliases?.[fromTable] || fromTable
let toTableWithSchema = this.tableNameWithSchema(toTable, { let toTableWithSchema = this.tableNameWithSchema(toTable, {
alias: toAlias, alias: toAlias,
schema: endpoint.schema, schema: endpoint.schema,
}) })
let relationshipFields = fields.filter( const requiredFields = [
field => field.split(".")[0] === toAlias ...(relatedTable?.primary || []),
relatedTable?.primaryDisplay,
].filter(field => field) as string[]
// sort the required fields to first in the list, so they don't get sliced out
let relationshipFields = prioritisedArraySort(
fields.filter(field => field.split(".")[0] === toAlias),
requiredFields
) )
if (this.client === SqlClient.SQL_LITE) {
relationshipFields = relationshipFields.slice( relationshipFields = relationshipFields.slice(
0, 0,
MAX_SQS_RELATIONSHIP_FIELDS Math.floor(this.maxFunctionParameters() / 2)
) )
}
const fieldList: string = relationshipFields const fieldList: string = relationshipFields
.map(field => this.buildJsonField(field)) .map(field => this.buildJsonField(field))
.join(",") .join(",")

View file

@ -595,9 +595,13 @@
let loopBlockCount = 0 let loopBlockCount = 0
const addBinding = (name, value, icon, idx, isLoopBlock, bindingName) => { const addBinding = (name, value, icon, idx, isLoopBlock, bindingName) => {
if (!name) return if (!name) return
const runtimeBinding = determineRuntimeBinding(name, idx, isLoopBlock) const runtimeBinding = determineRuntimeBinding(
name,
idx,
isLoopBlock,
bindingName
)
const categoryName = determineCategoryName(idx, isLoopBlock, bindingName) const categoryName = determineCategoryName(idx, isLoopBlock, bindingName)
bindings.push( bindings.push(
createBindingObject( createBindingObject(
name, name,
@ -613,7 +617,7 @@
) )
} }
const determineRuntimeBinding = (name, idx, isLoopBlock) => { const determineRuntimeBinding = (name, idx, isLoopBlock, bindingName) => {
let runtimeName let runtimeName
/* Begin special cases for generating custom schemas based on triggers */ /* Begin special cases for generating custom schemas based on triggers */
@ -634,12 +638,17 @@
} }
/* End special cases for generating custom schemas based on triggers */ /* End special cases for generating custom schemas based on triggers */
let hasUserDefinedName = automation.stepNames?.[allSteps[idx]?.id]
if (isLoopBlock) { if (isLoopBlock) {
runtimeName = `loop.${name}` runtimeName = `loop.${name}`
} else if (block.name.startsWith("JS")) { } else if (block.name.startsWith("JS")) {
runtimeName = `steps[${idx - loopBlockCount}].${name}` runtimeName = hasUserDefinedName
? `stepsByName[${bindingName}].${name}`
: `steps[${idx - loopBlockCount}].${name}`
} else { } else {
runtimeName = `steps.${idx - loopBlockCount}.${name}` runtimeName = hasUserDefinedName
? `stepsByName.${bindingName}.${name}`
: `steps.${idx - loopBlockCount}.${name}`
} }
return idx === 0 ? `trigger.${name}` : runtimeName return idx === 0 ? `trigger.${name}` : runtimeName
} }
@ -666,10 +675,10 @@
const field = Object.values(FIELDS).find( const field = Object.values(FIELDS).find(
field => field.type === value.type && field.subtype === value.subtype field => field.type === value.type && field.subtype === value.subtype
) )
return { return {
readableBinding: bindingName readableBinding:
? `${bindingName}.${name}` bindingName && !isLoopBlock
? `steps.${bindingName}.${name}`
: runtimeBinding, : runtimeBinding,
runtimeBinding, runtimeBinding,
type: value.type, type: value.type,
@ -690,8 +699,12 @@
allSteps[idx]?.stepId === ActionStepID.LOOP && allSteps[idx]?.stepId === ActionStepID.LOOP &&
allSteps.some(x => x.blockToLoop === block.id) allSteps.some(x => x.blockToLoop === block.id)
let schema = cloneDeep(allSteps[idx]?.schema?.outputs?.properties) ?? {} let schema = cloneDeep(allSteps[idx]?.schema?.outputs?.properties) ?? {}
if (allSteps[idx]?.name.includes("Looping")) {
isLoopBlock = true
loopBlockCount++
}
let bindingName = let bindingName =
automation.stepNames?.[allSteps[idx - loopBlockCount].id] automation.stepNames?.[allSteps[idx].id] || allSteps[idx].name
if (isLoopBlock) { if (isLoopBlock) {
schema = { schema = {
@ -740,13 +753,12 @@
if (wasLoopBlock) { if (wasLoopBlock) {
loopBlockCount++ loopBlockCount++
continue schema = cloneDeep(allSteps[idx - 1]?.schema?.outputs?.properties)
} }
Object.entries(schema).forEach(([name, value]) => Object.entries(schema).forEach(([name, value]) => {
addBinding(name, value, icon, idx, isLoopBlock, bindingName) addBinding(name, value, icon, idx, isLoopBlock, bindingName)
) })
} }
return bindings return bindings
} }

View file

@ -0,0 +1,117 @@
import { AutomationActionStepId } from "@budibase/types"
export const updateBindingsInInputs = (inputs, oldName, newName, stepIndex) => {
if (typeof inputs === "string") {
return inputs
.replace(
new RegExp(`stepsByName\\.${oldName}\\.`, "g"),
`stepsByName.${newName}.`
)
.replace(
new RegExp(`steps\\.${stepIndex}\\.`, "g"),
`stepsByName.${newName}.`
)
}
if (Array.isArray(inputs)) {
return inputs.map(item =>
updateBindingsInInputs(item, oldName, newName, stepIndex)
)
}
if (typeof inputs === "object" && inputs !== null) {
const updatedInputs = {}
for (const [key, value] of Object.entries(inputs)) {
const updatedKey = updateBindingsInInputs(
key,
oldName,
newName,
stepIndex
)
updatedInputs[updatedKey] = updateBindingsInInputs(
value,
oldName,
newName,
stepIndex
)
}
return updatedInputs
}
return inputs
}
export const updateBindingsInSteps = (
steps,
oldName,
newName,
changedStepIndex
) => {
return steps.map(step => {
const updatedStep = {
...step,
inputs: updateBindingsInInputs(
step.inputs,
oldName,
newName,
changedStepIndex
),
}
if ("branches" in updatedStep.inputs) {
updatedStep.inputs.branches = updatedStep.inputs.branches.map(branch => ({
...branch,
condition: updateBindingsInInputs(
branch.condition,
oldName,
newName,
changedStepIndex
),
}))
if (updatedStep.inputs.children) {
for (const [key, childSteps] of Object.entries(
updatedStep.inputs.children
)) {
updatedStep.inputs.children[key] = updateBindingsInSteps(
childSteps,
oldName,
newName,
changedStepIndex
)
}
}
}
return updatedStep
})
}
export const getNewStepName = (automation, step) => {
const baseName = step.name
const countExistingSteps = steps => {
return steps.reduce((count, currentStep) => {
if (currentStep.name && currentStep.name.startsWith(baseName)) {
count++
}
if (
currentStep.stepId === AutomationActionStepId.BRANCH &&
currentStep.inputs &&
currentStep.inputs.children
) {
Object.values(currentStep.inputs.children).forEach(branchSteps => {
count += countExistingSteps(branchSteps)
})
}
return count
}, 0)
}
let existingCount = 0
if (automation?.definition) {
existingCount = countExistingSteps(automation.definition.steps)
}
if (existingCount === 0) {
return baseName
}
return `${baseName} ${existingCount + 1}`
}

View file

@ -0,0 +1,177 @@
import { cloneDeep } from "lodash"
import {
updateBindingsInInputs,
updateBindingsInSteps,
} from "../automations/nameHelpers"
describe("Automation Binding Update Functions", () => {
const sampleAutomation = {
definition: {
steps: [
{
name: "First Step",
inputs: {
text: "Starting automation",
},
id: "step1",
},
{
name: "Second Step",
inputs: {
text: "{{ steps.0.success }} and {{ stepsByName.First Step.message }}",
},
id: "step2",
},
{
name: "Branch",
inputs: {
branches: [
{
name: "branch1",
condition: {
equal: {
"steps.1.success": true,
},
},
},
],
children: {
branch1: [
{
name: "Nested Step",
inputs: {
text: "{{ stepsByName.Second Step.message }} and {{ steps.1.success }}",
},
id: "nestedStep",
},
],
},
},
id: "branchStep",
},
],
stepNames: {
step1: "First Step",
step2: "Second Step",
branchStep: "Branch",
},
},
}
it("updateBindingsInInputs updates string bindings correctly", () => {
const input = "{{ stepsByName.oldName.success }} and {{ steps.1.message }}"
const result = updateBindingsInInputs(input, "oldName", "newName", 1)
expect(result).toBe(
"{{ stepsByName.newName.success }} and {{ stepsByName.newName.message }}"
)
})
it("updateBindingsInInputs handles nested objects", () => {
const input = {
text: "{{ stepsByName.oldName.success }}",
nested: {
value: "{{ steps.1.message }}",
},
}
const result = updateBindingsInInputs(input, "oldName", "newName", 1)
expect(result).toEqual({
text: "{{ stepsByName.newName.success }}",
nested: {
value: "{{ stepsByName.newName.message }}",
},
})
})
it("updateBindingsInSteps updates bindings in all steps", () => {
const steps = cloneDeep(sampleAutomation.definition.steps)
const result = updateBindingsInSteps(
steps,
"Second Step",
"Renamed Step",
1
)
expect(result[1].name).toBe("Second Step")
expect(result[2].inputs.branches[0].condition.equal).toEqual({
"stepsByName.Renamed Step.success": true,
})
const nestedStepText = result[2].inputs.children.branch1[0].inputs.text
expect(nestedStepText).toBe(
"{{ stepsByName.Renamed Step.message }} and {{ stepsByName.Renamed Step.success }}"
)
})
it("updateBindingsInSteps handles steps with no bindings", () => {
const steps = [
{
name: "No Binding Step",
inputs: {
text: "Plain text",
},
id: "noBindingStep",
},
]
const result = updateBindingsInSteps(steps, "Old Name", "New Name", 0)
expect(result).toEqual(steps)
})
it("updateBindingsInSteps updates bindings in deeply nested branches", () => {
const deeplyNestedStep = {
name: "Deep Branch",
inputs: {
branches: [
{
name: "deepBranch",
condition: {
equal: {
"stepsByName.Second Step.success": true,
},
},
},
],
children: {
deepBranch: [
{
name: "Deep Log",
inputs: {
text: "{{ steps.1.message }}",
},
},
],
},
},
}
const steps = [...sampleAutomation.definition.steps, deeplyNestedStep]
const result = updateBindingsInSteps(
steps,
"Second Step",
"Renamed Step",
1
)
expect(
result[3].inputs.branches[0].condition.equal[
"stepsByName.Renamed Step.success"
]
).toBe(true)
expect(result[3].inputs.children.deepBranch[0].inputs.text).toBe(
"{{ stepsByName.Renamed Step.message }}"
)
})
it("updateBindingsInSteps does not affect unrelated bindings", () => {
const steps = cloneDeep(sampleAutomation.definition.steps)
const result = updateBindingsInSteps(
steps,
"Second Step",
"Renamed Step",
1
)
expect(result[1].inputs.text).toBe(
"{{ steps.0.success }} and {{ stepsByName.First Step.message }}"
)
})
})

View file

@ -6,6 +6,10 @@ import { createHistoryStore } from "stores/builder/history"
import { notifications } from "@budibase/bbui" import { notifications } from "@budibase/bbui"
import { updateReferencesInObject } from "dataBinding" import { updateReferencesInObject } from "dataBinding"
import { AutomationTriggerStepId } from "@budibase/types" import { AutomationTriggerStepId } from "@budibase/types"
import {
updateBindingsInSteps,
getNewStepName,
} from "helpers/automations/nameHelpers"
const initialAutomationState = { const initialAutomationState = {
automations: [], automations: [],
@ -275,13 +279,17 @@ const automationActions = store => ({
await store.actions.save(newAutomation) await store.actions.save(newAutomation)
}, },
constructBlock(type, stepId, blockDefinition) { constructBlock(type, stepId, blockDefinition) {
return { let newName
const newStep = {
...blockDefinition, ...blockDefinition,
inputs: blockDefinition.inputs || {}, inputs: blockDefinition.inputs || {},
stepId, stepId,
type, type,
id: generate(), id: generate(),
} }
newName = getNewStepName(get(selectedAutomation), newStep)
newStep.name = newName
return newStep
}, },
addBlockToAutomation: async (block, blockIdx) => { addBlockToAutomation: async (block, blockIdx) => {
const automation = get(selectedAutomation) const automation = get(selectedAutomation)
@ -301,15 +309,34 @@ const automationActions = store => ({
saveAutomationName: async (blockId, name) => { saveAutomationName: async (blockId, name) => {
const automation = get(selectedAutomation) const automation = get(selectedAutomation)
let newAutomation = cloneDeep(automation) let newAutomation = cloneDeep(automation)
if (!automation) { if (!newAutomation) {
return return
} }
const stepIndex = newAutomation.definition.steps.findIndex(
step => step.id === blockId
)
if (stepIndex !== -1) {
const oldName = newAutomation.definition.steps[stepIndex].name
const newName = name.trim()
newAutomation.definition.stepNames = { newAutomation.definition.stepNames = {
...newAutomation.definition.stepNames, ...newAutomation.definition.stepNames,
[blockId]: name.trim(), [blockId]: newName,
} }
newAutomation.definition.steps[stepIndex].name = newName
newAutomation.definition.steps = updateBindingsInSteps(
newAutomation.definition.steps,
oldName,
newName,
stepIndex
)
await store.actions.save(newAutomation) await store.actions.save(newAutomation)
}
}, },
deleteAutomationName: async blockId => { deleteAutomationName: async blockId => {
const automation = get(selectedAutomation) const automation = get(selectedAutomation)

View file

@ -12,6 +12,7 @@ import {
OneToManyRelationshipFieldMetadata, OneToManyRelationshipFieldMetadata,
Operation, Operation,
PaginationJson, PaginationJson,
QueryJson,
RelationshipFieldMetadata, RelationshipFieldMetadata,
Row, Row,
SearchFilters, SearchFilters,
@ -161,7 +162,6 @@ export class ExternalRequest<T extends Operation> {
private readonly tableId: string private readonly tableId: string
private datasource?: Datasource private datasource?: Datasource
private tables: { [key: string]: Table } = {} private tables: { [key: string]: Table } = {}
private tableList: Table[]
constructor(operation: T, tableId: string, datasource?: Datasource) { constructor(operation: T, tableId: string, datasource?: Datasource) {
this.operation = operation this.operation = operation
@ -170,7 +170,6 @@ export class ExternalRequest<T extends Operation> {
if (datasource && datasource.entities) { if (datasource && datasource.entities) {
this.tables = datasource.entities this.tables = datasource.entities
} }
this.tableList = Object.values(this.tables)
} }
private prepareFilters( private prepareFilters(
@ -301,7 +300,6 @@ export class ExternalRequest<T extends Operation> {
throw "No tables found, fetch tables before query." throw "No tables found, fetch tables before query."
} }
this.tables = this.datasource.entities this.tables = this.datasource.entities
this.tableList = Object.values(this.tables)
} }
return { tables: this.tables, datasource: this.datasource } return { tables: this.tables, datasource: this.datasource }
} }
@ -463,7 +461,7 @@ export class ExternalRequest<T extends Operation> {
breakExternalTableId(relatedTableId) breakExternalTableId(relatedTableId)
// @ts-ignore // @ts-ignore
const linkPrimaryKey = this.tables[relatedTableName].primary[0] const linkPrimaryKey = this.tables[relatedTableName].primary[0]
if (!lookupField || !row[lookupField]) { if (!lookupField || !row?.[lookupField] == null) {
continue continue
} }
const endpoint = getEndpoint(relatedTableId, Operation.READ) const endpoint = getEndpoint(relatedTableId, Operation.READ)
@ -631,7 +629,8 @@ export class ExternalRequest<T extends Operation> {
const { datasource: ds } = await this.retrieveMetadata(datasourceId) const { datasource: ds } = await this.retrieveMetadata(datasourceId)
datasource = ds datasource = ds
} }
const table = this.tables[tableName] const tables = this.tables
const table = tables[tableName]
let isSql = isSQL(datasource) let isSql = isSQL(datasource)
if (!table) { if (!table) {
throw new Error( throw new Error(
@ -686,7 +685,7 @@ export class ExternalRequest<T extends Operation> {
) { ) {
throw "Deletion must be filtered" throw "Deletion must be filtered"
} }
let json = { let json: QueryJson = {
endpoint: { endpoint: {
datasourceId: datasourceId!, datasourceId: datasourceId!,
entityId: tableName, entityId: tableName,
@ -715,7 +714,7 @@ export class ExternalRequest<T extends Operation> {
}, },
meta: { meta: {
table, table,
id: config.id, tables: tables,
}, },
} }

View file

@ -3080,4 +3080,46 @@ describe.each([
}).toHaveLength(4) }).toHaveLength(4)
}) })
}) })
isSql &&
describe("max related columns", () => {
let relatedRows: Row[]
beforeAll(async () => {
const relatedSchema: TableSchema = {}
const row: Row = {}
for (let i = 0; i < 100; i++) {
const name = `column${i}`
relatedSchema[name] = { name, type: FieldType.NUMBER }
row[name] = i
}
const relatedTable = await createTable(relatedSchema)
table = await createTable({
name: { name: "name", type: FieldType.STRING },
related1: {
type: FieldType.LINK,
name: "related1",
fieldName: "main1",
tableId: relatedTable._id!,
relationshipType: RelationshipType.MANY_TO_MANY,
},
})
relatedRows = await Promise.all([
config.api.row.save(relatedTable._id!, row),
])
await config.api.row.save(table._id!, {
name: "foo",
related1: [relatedRows[0]._id],
})
})
it("retrieve the row with relationships", async () => {
await expectQuery({}).toContainExactly([
{
name: "foo",
related1: [{ _id: relatedRows[0]._id }],
},
])
})
})
}) })

View file

@ -242,4 +242,31 @@ describe("Loop automations", () => {
expect(results.steps[1].outputs.message).toContain("- 3") expect(results.steps[1].outputs.message).toContain("- 3")
expect(results.steps[3].outputs.message).toContain("- 3") expect(results.steps[3].outputs.message).toContain("- 3")
}) })
it("should use automation names to loop with", async () => {
const builder = createAutomationBuilder({
name: "Test Trigger with Loop and Create Row",
})
const results = await builder
.appAction({ fields: {} })
.loop(
{
option: LoopStepType.ARRAY,
binding: [1, 2, 3],
},
{ stepName: "FirstLoopStep" }
)
.serverLog(
{ text: "Message {{loop.currentItem}}" },
{ stepName: "FirstLoopLog" }
)
.serverLog(
{ text: "{{steps.FirstLoopLog.iterations}}" },
{ stepName: "FirstLoopIterationLog" }
)
.run()
expect(results.steps[1].outputs.message).toContain("- 3")
})
}) })

View file

@ -49,7 +49,6 @@ describe("Automation Scenarios", () => {
}, },
}) })
}) })
})
it("should trigger an automation which querys the database", async () => { it("should trigger an automation which querys the database", async () => {
const table = await config.createTable() const table = await config.createTable()
@ -196,4 +195,42 @@ describe("Automation Scenarios", () => {
) )
}) })
}) })
})
describe("Name Based Automations", () => {
it("should fetch and delete a rpw using automation naming", async () => {
const table = await config.createTable()
const row = {
name: "DFN",
description: "original description",
tableId: table._id,
}
await config.createRow(row)
await config.createRow(row)
const builder = createAutomationBuilder({
name: "Test Query and Delete Row",
})
const results = await builder
.appAction({ fields: {} })
.queryRows(
{
tableId: table._id!,
},
{ stepName: "InitialQueryStep" }
)
.deleteRow({
tableId: table._id!,
id: "{{ steps.InitialQueryStep.rows.0._id }}",
})
.queryRows({
tableId: table._id!,
})
.run()
expect(results.steps).toHaveLength(3)
expect(results.steps[1].outputs.success).toBeTruthy()
expect(results.steps[2].outputs.rows).toHaveLength(1)
})
})
}) })

View file

@ -57,21 +57,27 @@ type BranchConfig = {
class BaseStepBuilder { class BaseStepBuilder {
protected steps: AutomationStep[] = [] protected steps: AutomationStep[] = []
protected stepNames: { [key: string]: string } = {}
protected step<TStep extends AutomationActionStepId>( protected step<TStep extends AutomationActionStepId>(
stepId: TStep, stepId: TStep,
stepSchema: Omit<AutomationStep, "id" | "stepId" | "inputs">, stepSchema: Omit<AutomationStep, "id" | "stepId" | "inputs">,
inputs: AutomationStepInputs<TStep> inputs: AutomationStepInputs<TStep>,
stepName?: string
): this { ): this {
const id = uuidv4()
this.steps.push({ this.steps.push({
...stepSchema, ...stepSchema,
inputs: inputs as any, inputs: inputs as any,
id: uuidv4(), id,
stepId, stepId,
name: stepName || stepSchema.name,
}) })
if (stepName) {
this.stepNames[id] = stepName
}
return this return this
} }
protected addBranchStep(branchConfig: BranchConfig): void { protected addBranchStep(branchConfig: BranchConfig): void {
const branchStepInputs: BranchStepInputs = { const branchStepInputs: BranchStepInputs = {
branches: [] as Branch[], branches: [] as Branch[],
@ -99,66 +105,80 @@ class BaseStepBuilder {
} }
// STEPS // STEPS
createRow(inputs: CreateRowStepInputs): this { createRow(inputs: CreateRowStepInputs, opts?: { stepName?: string }): this {
return this.step( return this.step(
AutomationActionStepId.CREATE_ROW, AutomationActionStepId.CREATE_ROW,
BUILTIN_ACTION_DEFINITIONS.CREATE_ROW, BUILTIN_ACTION_DEFINITIONS.CREATE_ROW,
inputs inputs,
opts?.stepName
) )
} }
updateRow(inputs: UpdateRowStepInputs): this { updateRow(inputs: UpdateRowStepInputs, opts?: { stepName?: string }): this {
return this.step( return this.step(
AutomationActionStepId.UPDATE_ROW, AutomationActionStepId.UPDATE_ROW,
BUILTIN_ACTION_DEFINITIONS.UPDATE_ROW, BUILTIN_ACTION_DEFINITIONS.UPDATE_ROW,
inputs inputs,
opts?.stepName
) )
} }
deleteRow(inputs: DeleteRowStepInputs): this { deleteRow(inputs: DeleteRowStepInputs, opts?: { stepName?: string }): this {
return this.step( return this.step(
AutomationActionStepId.DELETE_ROW, AutomationActionStepId.DELETE_ROW,
BUILTIN_ACTION_DEFINITIONS.DELETE_ROW, BUILTIN_ACTION_DEFINITIONS.DELETE_ROW,
inputs inputs,
opts?.stepName
) )
} }
sendSmtpEmail(inputs: SmtpEmailStepInputs): this { sendSmtpEmail(
inputs: SmtpEmailStepInputs,
opts?: { stepName?: string }
): this {
return this.step( return this.step(
AutomationActionStepId.SEND_EMAIL_SMTP, AutomationActionStepId.SEND_EMAIL_SMTP,
BUILTIN_ACTION_DEFINITIONS.SEND_EMAIL_SMTP, BUILTIN_ACTION_DEFINITIONS.SEND_EMAIL_SMTP,
inputs inputs,
opts?.stepName
) )
} }
executeQuery(inputs: ExecuteQueryStepInputs): this { executeQuery(
inputs: ExecuteQueryStepInputs,
opts?: { stepName?: string }
): this {
return this.step( return this.step(
AutomationActionStepId.EXECUTE_QUERY, AutomationActionStepId.EXECUTE_QUERY,
BUILTIN_ACTION_DEFINITIONS.EXECUTE_QUERY, BUILTIN_ACTION_DEFINITIONS.EXECUTE_QUERY,
inputs inputs,
opts?.stepName
) )
} }
queryRows(inputs: QueryRowsStepInputs): this { queryRows(inputs: QueryRowsStepInputs, opts?: { stepName?: string }): this {
return this.step( return this.step(
AutomationActionStepId.QUERY_ROWS, AutomationActionStepId.QUERY_ROWS,
BUILTIN_ACTION_DEFINITIONS.QUERY_ROWS, BUILTIN_ACTION_DEFINITIONS.QUERY_ROWS,
inputs inputs,
opts?.stepName
) )
} }
loop(inputs: LoopStepInputs): this { loop(inputs: LoopStepInputs, opts?: { stepName?: string }): this {
return this.step( return this.step(
AutomationActionStepId.LOOP, AutomationActionStepId.LOOP,
BUILTIN_ACTION_DEFINITIONS.LOOP, BUILTIN_ACTION_DEFINITIONS.LOOP,
inputs inputs,
opts?.stepName
) )
} }
serverLog(input: ServerLogStepInputs): this { serverLog(input: ServerLogStepInputs, opts?: { stepName?: string }): this {
return this.step( return this.step(
AutomationActionStepId.SERVER_LOG, AutomationActionStepId.SERVER_LOG,
BUILTIN_ACTION_DEFINITIONS.SERVER_LOG, BUILTIN_ACTION_DEFINITIONS.SERVER_LOG,
input input,
opts?.stepName
) )
} }
} }
@ -186,6 +206,7 @@ class AutomationBuilder extends BaseStepBuilder {
definition: { definition: {
steps: [], steps: [],
trigger: {} as AutomationTrigger, trigger: {} as AutomationTrigger,
stepNames: {},
}, },
type: "automation", type: "automation",
appId: options.appId ?? setup.getConfig().getAppId(), appId: options.appId ?? setup.getConfig().getAppId(),
@ -268,6 +289,7 @@ class AutomationBuilder extends BaseStepBuilder {
build(): Automation { build(): Automation {
this.automationConfig.definition.steps = this.steps this.automationConfig.definition.steps = this.steps
this.automationConfig.definition.stepNames = this.stepNames
return this.automationConfig return this.automationConfig
} }

View file

@ -15,6 +15,7 @@ export interface TriggerOutput {
export interface AutomationContext extends AutomationResults { export interface AutomationContext extends AutomationResults {
steps: any[] steps: any[]
stepsByName?: Record<string, any>
env?: Record<string, string> env?: Record<string, string>
trigger: any trigger: any
} }

View file

@ -551,11 +551,16 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
await this.connect() await this.connect()
const hasFilters = dataFilters.hasFilters(query.filters) const hasFilters = dataFilters.hasFilters(query.filters)
const limit = query.paginate?.limit || 100 const limit = query.paginate?.limit || 100
const page: number = let offset = query.paginate?.offset || 0
typeof query.paginate?.page === "number"
? query.paginate.page let page = query.paginate?.page
: parseInt(query.paginate?.page || "1") if (typeof page === "string") {
const offset = (page - 1) * limit page = parseInt(page)
}
if (page !== undefined) {
offset = page * limit
}
const sheet = this.client.sheetsByTitle[query.sheet] const sheet = this.client.sheetsByTitle[query.sheet]
let rows: GoogleSpreadsheetRow[] = [] let rows: GoogleSpreadsheetRow[] = []
if (query.paginate && !hasFilters) { if (query.paginate && !hasFilters) {

View file

@ -208,6 +208,42 @@ describe("Google Sheets Integration", () => {
expect(row2.name).toEqual("Test Contact 2") expect(row2.name).toEqual("Test Contact 2")
expect(row2.description).toEqual("original description 2") expect(row2.description).toEqual("original description 2")
}) })
it("can paginate correctly", async () => {
await config.api.row.bulkImport(table._id!, {
rows: Array.from({ length: 248 }, (_, i) => ({
name: `${i}`,
description: "",
})),
})
let resp = await config.api.row.search(table._id!, {
tableId: table._id!,
query: {},
paginate: true,
limit: 10,
})
let rows = resp.rows
while (resp.hasNextPage) {
resp = await config.api.row.search(table._id!, {
tableId: table._id!,
query: {},
paginate: true,
limit: 10,
bookmark: resp.bookmark,
})
rows = rows.concat(resp.rows)
if (rows.length > 250) {
throw new Error("Too many rows returned")
}
}
expect(rows.length).toEqual(250)
expect(rows.map(row => row.name)).toEqual(
expect.arrayContaining(Array.from({ length: 248 }, (_, i) => `${i}`))
)
})
}) })
describe("update", () => { describe("update", () => {
@ -299,5 +335,160 @@ describe("Google Sheets Integration", () => {
expect(mock.cell("A2")).toEqual("Test Contact Updated") expect(mock.cell("A2")).toEqual("Test Contact Updated")
expect(mock.cell("B2")).toEqual("original description updated") expect(mock.cell("B2")).toEqual("original description updated")
}) })
it("should be able to rename a column", async () => {
const row = await config.api.row.save(table._id!, {
name: "Test Contact",
description: "original description",
})
const { name, ...otherColumns } = table.schema
const renamedTable = await config.api.table.save({
...table,
schema: {
...otherColumns,
renamed: {
...table.schema.name,
},
},
_rename: {
old: "name",
updated: "renamed",
},
})
expect(renamedTable.schema.name).not.toBeDefined()
expect(renamedTable.schema.renamed).toBeDefined()
expect(mock.cell("A1")).toEqual("renamed")
expect(mock.cell("B1")).toEqual("description")
expect(mock.cell("A2")).toEqual("Test Contact")
expect(mock.cell("B2")).toEqual("original description")
expect(mock.cell("A3")).toEqual(null)
expect(mock.cell("B3")).toEqual(null)
const renamedRow = await config.api.row.get(table._id!, row._id!)
expect(renamedRow.renamed).toEqual("Test Contact")
expect(renamedRow.description).toEqual("original description")
expect(renamedRow.name).not.toBeDefined()
})
// TODO: this gets the error "Sheet is not large enough to fit 27 columns. Resize the sheet first."
// eslint-disable-next-line jest/no-commented-out-tests
// it("should be able to add a new column", async () => {
// const updatedTable = await config.api.table.save({
// ...table,
// schema: {
// ...table.schema,
// newColumn: {
// name: "newColumn",
// type: FieldType.STRING,
// },
// },
// })
// expect(updatedTable.schema.newColumn).toBeDefined()
// expect(mock.cell("A1")).toEqual("name")
// expect(mock.cell("B1")).toEqual("description")
// expect(mock.cell("C1")).toEqual("newColumn")
// })
it("should be able to delete a column", async () => {
const row = await config.api.row.save(table._id!, {
name: "Test Contact",
description: "original description",
})
const updatedTable = await config.api.table.save({
...table,
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
},
})
expect(updatedTable.schema.name).toBeDefined()
expect(updatedTable.schema.description).not.toBeDefined()
// TODO: we don't delete data in deleted columns yet, should we?
// expect(mock.cell("A1")).toEqual("name")
// expect(mock.cell("B1")).toEqual(null)
const updatedRow = await config.api.row.get(table._id!, row._id!)
expect(updatedRow.name).toEqual("Test Contact")
expect(updatedRow.description).not.toBeDefined()
})
})
describe("delete", () => {
let table: Table
beforeEach(async () => {
table = await config.api.table.save({
name: "Test Table",
type: "table",
sourceId: datasource._id!,
sourceType: TableSourceType.EXTERNAL,
schema: {
name: {
name: "name",
type: FieldType.STRING,
constraints: {
type: "string",
},
},
description: {
name: "description",
type: FieldType.STRING,
constraints: {
type: "string",
},
},
},
})
await config.api.row.bulkImport(table._id!, {
rows: [
{
name: "Test Contact 1",
description: "original description 1",
},
{
name: "Test Contact 2",
description: "original description 2",
},
],
})
})
it("can delete a table", async () => {
expect(mock.sheet(table.name)).toBeDefined()
await config.api.table.destroy(table._id!, table._rev!)
expect(mock.sheet(table.name)).toBeUndefined()
})
it("can delete a row", async () => {
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(2)
// Because row IDs in Google Sheets are sequential and determined by the
// actual row in the sheet, deleting a row will shift the row IDs down by
// one. This is why we reverse the rows before deleting them.
for (const row of rows.reverse()) {
await config.api.row.delete(table._id!, { _id: row._id! })
}
expect(mock.cell("A1")).toEqual("name")
expect(mock.cell("B1")).toEqual("description")
expect(mock.cell("A2")).toEqual(null)
expect(mock.cell("B2")).toEqual(null)
expect(mock.cell("A3")).toEqual(null)
expect(mock.cell("B3")).toEqual(null)
const emptyRows = await config.api.row.fetch(table._id!)
expect(emptyRows.length).toEqual(0)
})
}) })
}) })

View file

@ -162,7 +162,7 @@ describe("SQL query builder", () => {
const query = sql._query(generateRelationshipJson({ schema: "production" })) const query = sql._query(generateRelationshipJson({ schema: "production" }))
expect(query).toEqual({ expect(query).toEqual({
bindings: [limit, relationshipLimit], bindings: [limit, relationshipLimit],
sql: `with "paginated" as (select "brands".* from "production"."brands" order by "test"."id" asc limit $1) select "brands".*, (select json_agg(json_build_object('product_id',"products"."product_id",'product_name',"products"."product_name",'brand_id',"products"."brand_id")) from (select "products".* from "production"."products" as "products" where "products"."brand_id" = "brands"."brand_id" order by "products"."brand_id" asc limit $2) as "products") as "products" from "paginated" as "brands" order by "test"."id" asc`, sql: `with "paginated" as (select "brands".* from "production"."brands" order by "test"."id" asc limit $1) select "brands".*, (select json_agg(json_build_object('brand_id',"products"."brand_id",'product_id',"products"."product_id",'product_name',"products"."product_name")) from (select "products".* from "production"."products" as "products" where "products"."brand_id" = "brands"."brand_id" order by "products"."brand_id" asc limit $2) as "products") as "products" from "paginated" as "brands" order by "test"."id" asc`,
}) })
}) })
@ -170,7 +170,7 @@ describe("SQL query builder", () => {
const query = sql._query(generateRelationshipJson()) const query = sql._query(generateRelationshipJson())
expect(query).toEqual({ expect(query).toEqual({
bindings: [limit, relationshipLimit], bindings: [limit, relationshipLimit],
sql: `with "paginated" as (select "brands".* from "brands" order by "test"."id" asc limit $1) select "brands".*, (select json_agg(json_build_object('product_id',"products"."product_id",'product_name',"products"."product_name",'brand_id',"products"."brand_id")) from (select "products".* from "products" as "products" where "products"."brand_id" = "brands"."brand_id" order by "products"."brand_id" asc limit $2) as "products") as "products" from "paginated" as "brands" order by "test"."id" asc`, sql: `with "paginated" as (select "brands".* from "brands" order by "test"."id" asc limit $1) select "brands".*, (select json_agg(json_build_object('brand_id',"products"."brand_id",'product_id',"products"."product_id",'product_name',"products"."product_name")) from (select "products".* from "products" as "products" where "products"."brand_id" = "brands"."brand_id" order by "products"."brand_id" asc limit $2) as "products") as "products" from "paginated" as "brands" order by "test"."id" asc`,
}) })
}) })

View file

@ -63,7 +63,7 @@ describe("Captures of real examples", () => {
bindings: [primaryLimit, relationshipLimit, relationshipLimit], bindings: [primaryLimit, relationshipLimit, relationshipLimit],
sql: expect.stringContaining( sql: expect.stringContaining(
multiline( multiline(
`select json_agg(json_build_object('executorid',"b"."executorid",'taskname',"b"."taskname",'taskid',"b"."taskid",'completed',"b"."completed",'qaid',"b"."qaid",'executorid',"b"."executorid",'taskname',"b"."taskname",'taskid',"b"."taskid",'completed',"b"."completed",'qaid',"b"."qaid")` `select json_agg(json_build_object('completed',"b"."completed",'completed',"b"."completed",'executorid',"b"."executorid",'executorid',"b"."executorid",'qaid',"b"."qaid",'qaid',"b"."qaid",'taskid',"b"."taskid",'taskid',"b"."taskid",'taskname',"b"."taskname",'taskname',"b"."taskname")`
) )
), ),
}) })
@ -95,7 +95,7 @@ describe("Captures of real examples", () => {
sql: expect.stringContaining( sql: expect.stringContaining(
multiline( multiline(
`with "paginated" as (select "a".* from "products" as "a" order by "a"."productname" asc nulls first, "a"."productid" asc limit $1) `with "paginated" as (select "a".* from "products" as "a" order by "a"."productname" asc nulls first, "a"."productid" asc limit $1)
select "a".*, (select json_agg(json_build_object('executorid',"b"."executorid",'taskname',"b"."taskname",'taskid',"b"."taskid",'completed',"b"."completed",'qaid',"b"."qaid")) select "a".*, (select json_agg(json_build_object('completed',"b"."completed",'executorid',"b"."executorid",'qaid',"b"."qaid",'taskid',"b"."taskid",'taskname',"b"."taskname"))
from (select "b".* from "tasks" as "b" inner join "products_tasks" as "c" on "b"."taskid" = "c"."taskid" where "c"."productid" = "a"."productid" order by "b"."taskid" asc limit $2) as "b") as "tasks" from (select "b".* from "tasks" as "b" inner join "products_tasks" as "c" on "b"."taskid" = "c"."taskid" where "c"."productid" = "a"."productid" order by "b"."taskid" asc limit $2) as "b") as "tasks"
from "paginated" as "a" order by "a"."productname" asc nulls first, "a"."productid" asc` from "paginated" as "a" order by "a"."productname" asc nulls first, "a"."productid" asc`
) )
@ -113,7 +113,7 @@ describe("Captures of real examples", () => {
bindings: [...filters, relationshipLimit, relationshipLimit], bindings: [...filters, relationshipLimit, relationshipLimit],
sql: multiline( sql: multiline(
`with "paginated" as (select "a".* from "tasks" as "a" where "a"."taskid" in ($1, $2) order by "a"."taskid" asc limit $3) `with "paginated" as (select "a".* from "tasks" as "a" where "a"."taskid" in ($1, $2) order by "a"."taskid" asc limit $3)
select "a".*, (select json_agg(json_build_object('productname',"b"."productname",'productid',"b"."productid")) select "a".*, (select json_agg(json_build_object('productid',"b"."productid",'productname',"b"."productname"))
from (select "b".* from "products" as "b" inner join "products_tasks" as "c" on "b"."productid" = "c"."productid" from (select "b".* from "products" as "b" inner join "products_tasks" as "c" on "b"."productid" = "c"."productid"
where "c"."taskid" = "a"."taskid" order by "b"."productid" asc limit $4) as "b") as "products" from "paginated" as "a" order by "a"."taskid" asc` where "c"."taskid" = "a"."taskid" order by "b"."productid" asc limit $4) as "b") as "products" from "paginated" as "a" order by "a"."taskid" asc`
), ),

View file

@ -21,6 +21,7 @@ import type {
CellFormat, CellFormat,
CellPadding, CellPadding,
Color, Color,
GridRange,
} from "google-spreadsheet/src/lib/types/sheets-types" } from "google-spreadsheet/src/lib/types/sheets-types"
const BLACK: Color = { red: 0, green: 0, blue: 0 } const BLACK: Color = { red: 0, green: 0, blue: 0 }
@ -88,11 +89,38 @@ interface UpdateValuesResponse {
updatedData: ValueRange updatedData: ValueRange
} }
// https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/request#AddSheetRequest
interface AddSheetRequest {
properties: WorksheetProperties
}
// https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/response#AddSheetResponse // https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/response#AddSheetResponse
interface AddSheetResponse { interface AddSheetResponse {
properties: WorksheetProperties properties: WorksheetProperties
} }
interface DeleteRangeRequest {
range: GridRange
shiftDimension: WorksheetDimension
}
// https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/request#DeleteSheetRequest
interface DeleteSheetRequest {
sheetId: number
}
// https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/request
interface BatchUpdateRequest {
requests: {
addSheet?: AddSheetRequest
deleteRange?: DeleteRangeRequest
deleteSheet?: DeleteSheetRequest
}[]
includeSpreadsheetInResponse: boolean
responseRanges: string[]
responseIncludeGridData: boolean
}
// https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/response // https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/response
interface BatchUpdateResponse { interface BatchUpdateResponse {
spreadsheetId: string spreadsheetId: string
@ -102,23 +130,6 @@ interface BatchUpdateResponse {
updatedSpreadsheet: Spreadsheet updatedSpreadsheet: Spreadsheet
} }
// https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/request#AddSheetRequest
interface AddSheetRequest {
properties: WorksheetProperties
}
interface Request {
addSheet?: AddSheetRequest
}
// https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/request
interface BatchUpdateRequest {
requests: Request[]
includeSpreadsheetInResponse: boolean
responseRanges: string[]
responseIncludeGridData: boolean
}
// https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/sheets#RowData // https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/sheets#RowData
interface RowData { interface RowData {
values: CellData[] values: CellData[]
@ -369,13 +380,17 @@ export class GoogleSheetsMock {
private handleValueAppend(request: AppendRequest): AppendResponse { private handleValueAppend(request: AppendRequest): AppendResponse {
const { range, params, body } = request const { range, params, body } = request
const { sheet, bottomRight } = this.parseA1Notation(range) const { sheetId, endRowIndex } = this.parseA1Notation(range)
const sheet = this.getSheetById(sheetId)
if (!sheet) {
throw new Error(`Sheet ${sheetId} not found`)
}
const newRows = body.values.map(v => this.valuesToRowData(v)) const newRows = body.values.map(v => this.valuesToRowData(v))
const toDelete = const toDelete =
params.insertDataOption === "INSERT_ROWS" ? newRows.length : 0 params.insertDataOption === "INSERT_ROWS" ? newRows.length : 0
sheet.data[0].rowData.splice(bottomRight.row + 1, toDelete, ...newRows) sheet.data[0].rowData.splice(endRowIndex + 1, toDelete, ...newRows)
sheet.data[0].rowMetadata.splice(bottomRight.row + 1, toDelete, { sheet.data[0].rowMetadata.splice(endRowIndex + 1, toDelete, {
hiddenByUser: false, hiddenByUser: false,
hiddenByFilter: false, hiddenByFilter: false,
pixelSize: 100, pixelSize: 100,
@ -384,17 +399,13 @@ export class GoogleSheetsMock {
// It's important to give back a correct updated range because the API // It's important to give back a correct updated range because the API
// library we use makes use of it to assign the correct row IDs to rows. // library we use makes use of it to assign the correct row IDs to rows.
const updatedRange = this.createA1FromRanges( const updatedRange = this.createA1({
sheet, sheetId,
{ startRowIndex: endRowIndex + 1,
row: bottomRight.row + 1, startColumnIndex: 0,
column: 0, endRowIndex: endRowIndex + newRows.length,
}, endColumnIndex: 0,
{ })
row: bottomRight.row + newRows.length,
column: 0,
}
)
return { return {
spreadsheetId: this.spreadsheet.spreadsheetId, spreadsheetId: this.spreadsheet.spreadsheetId,
@ -438,6 +449,14 @@ export class GoogleSheetsMock {
addSheet: this.handleAddSheet(request.addSheet), addSheet: this.handleAddSheet(request.addSheet),
}) })
} }
if (request.deleteRange) {
this.handleDeleteRange(request.deleteRange)
response.replies.push({})
}
if (request.deleteSheet) {
this.handleDeleteSheet(request.deleteSheet)
response.replies.push({})
}
} }
return response return response
@ -474,12 +493,29 @@ export class GoogleSheetsMock {
return { properties: properties as WorksheetProperties } return { properties: properties as WorksheetProperties }
} }
private handleDeleteRange(request: DeleteRangeRequest) {
const { range, shiftDimension } = request
if (shiftDimension !== "ROWS") {
throw new Error("Only row-based deletes are supported")
}
this.iterateRange(range, cell => {
cell.userEnteredValue = this.createValue(null)
})
}
private handleDeleteSheet(request: DeleteSheetRequest) {
const { sheetId } = request
this.spreadsheet.sheets.splice(sheetId, 1)
}
private handleGetSpreadsheet(): Spreadsheet { private handleGetSpreadsheet(): Spreadsheet {
return this.spreadsheet return this.spreadsheet
} }
private handleValueUpdate(valueRange: ValueRange): UpdateValuesResponse { private handleValueUpdate(valueRange: ValueRange): UpdateValuesResponse {
this.iterateCells(valueRange, (cell, value) => { this.iterateValueRange(valueRange, (cell, value) => {
cell.userEnteredValue = this.createValue(value) cell.userEnteredValue = this.createValue(value)
}) })
@ -494,7 +530,27 @@ export class GoogleSheetsMock {
return response return response
} }
private iterateCells( private iterateRange(range: GridRange, cb: (cell: CellData) => void) {
const {
sheetId,
startRowIndex,
endRowIndex,
startColumnIndex,
endColumnIndex,
} = this.ensureGridRange(range)
for (let row = startRowIndex; row <= endRowIndex; row++) {
for (let col = startColumnIndex; col <= endColumnIndex; col++) {
const cell = this.getCellNumericIndexes(sheetId, row, col)
if (!cell) {
throw new Error("Cell not found")
}
cb(cell)
}
}
}
private iterateValueRange(
valueRange: ValueRange, valueRange: ValueRange,
cb: (cell: CellData, value: Value) => void cb: (cell: CellData, value: Value) => void
) { ) {
@ -502,33 +558,46 @@ export class GoogleSheetsMock {
throw new Error("Only row-major updates are supported") throw new Error("Only row-major updates are supported")
} }
const { sheet, topLeft, bottomRight } = this.parseA1Notation( const {
valueRange.range sheetId,
) startColumnIndex,
for (let row = topLeft.row; row <= bottomRight.row; row++) { startRowIndex,
for (let col = topLeft.column; col <= bottomRight.column; col++) { endColumnIndex,
const cell = this.getCellNumericIndexes(sheet, row, col) endRowIndex,
} = this.parseA1Notation(valueRange.range)
for (let row = startRowIndex; row <= endRowIndex; row++) {
for (let col = startColumnIndex; col <= endColumnIndex; col++) {
const cell = this.getCellNumericIndexes(sheetId, row, col)
if (!cell) { if (!cell) {
throw new Error("Cell not found") throw new Error("Cell not found")
} }
const value = valueRange.values[row - topLeft.row][col - topLeft.column] const value =
valueRange.values[row - startRowIndex][col - startColumnIndex]
cb(cell, value) cb(cell, value)
} }
} }
} }
private getValueRange(range: string): ValueRange { private getValueRange(range: string): ValueRange {
const { sheet, topLeft, bottomRight } = this.parseA1Notation(range) const {
sheetId,
startRowIndex,
endRowIndex,
startColumnIndex,
endColumnIndex,
} = this.parseA1Notation(range)
const valueRange: ValueRange = { const valueRange: ValueRange = {
range, range,
majorDimension: "ROWS", majorDimension: "ROWS",
values: [], values: [],
} }
for (let row = topLeft.row; row <= bottomRight.row; row++) { for (let row = startRowIndex; row <= endRowIndex; row++) {
const values: Value[] = [] const values: Value[] = []
for (let col = topLeft.column; col <= bottomRight.column; col++) { for (let col = startColumnIndex; col <= endColumnIndex; col++) {
const cell = this.getCellNumericIndexes(sheet, row, col) const cell = this.getCellNumericIndexes(sheetId, row, col)
if (!cell) { if (!cell) {
throw new Error("Cell not found") throw new Error("Cell not found")
} }
@ -693,14 +762,12 @@ export class GoogleSheetsMock {
} }
private cellData(cell: string): CellData | undefined { private cellData(cell: string): CellData | undefined {
const { const { sheetId, startColumnIndex, startRowIndex } =
sheet, this.parseA1Notation(cell)
topLeft: { row, column }, return this.getCellNumericIndexes(sheetId, startRowIndex, startColumnIndex)
} = this.parseA1Notation(cell)
return this.getCellNumericIndexes(sheet, row, column)
} }
cell(cell: string): Value | undefined { public cell(cell: string): Value | undefined {
const cellData = this.cellData(cell) const cellData = this.cellData(cell)
if (!cellData) { if (!cellData) {
return undefined return undefined
@ -708,11 +775,26 @@ export class GoogleSheetsMock {
return this.cellValue(cellData) return this.cellValue(cellData)
} }
public sheet(name: string | number): Sheet | undefined {
if (typeof name === "number") {
return this.getSheetById(name)
}
return this.getSheetByName(name)
}
private getCellNumericIndexes( private getCellNumericIndexes(
sheet: Sheet, sheet: Sheet | number,
row: number, row: number,
column: number column: number
): CellData | undefined { ): CellData | undefined {
if (typeof sheet === "number") {
const foundSheet = this.getSheetById(sheet)
if (!foundSheet) {
return undefined
}
sheet = foundSheet
}
const data = sheet.data[0] const data = sheet.data[0]
const rowData = data.rowData[row] const rowData = data.rowData[row]
if (!rowData) { if (!rowData) {
@ -751,11 +833,7 @@ export class GoogleSheetsMock {
// "Sheet1!A:B" -> { topLeft: { row: 0, column: 0 }, bottomRight: { row: 99, column: 1 } } // "Sheet1!A:B" -> { topLeft: { row: 0, column: 0 }, bottomRight: { row: 99, column: 1 } }
// "Sheet1!1:1" -> { topLeft: { row: 0, column: 0 }, bottomRight: { row: 0, column: 25 } } // "Sheet1!1:1" -> { topLeft: { row: 0, column: 0 }, bottomRight: { row: 0, column: 25 } }
// "Sheet1!1:2" -> { topLeft: { row: 0, column: 0 }, bottomRight: { row: 1, column: 25 } } // "Sheet1!1:2" -> { topLeft: { row: 0, column: 0 }, bottomRight: { row: 1, column: 25 } }
private parseA1Notation(range: string): { private parseA1Notation(range: string): Required<GridRange> {
sheet: Sheet
topLeft: Range
bottomRight: Range
} {
let sheet: Sheet let sheet: Sheet
let rest: string let rest: string
if (!range.includes("!")) { if (!range.includes("!")) {
@ -793,35 +871,54 @@ export class GoogleSheetsMock {
parsedBottomRight = parsedTopLeft parsedBottomRight = parsedTopLeft
} }
if (parsedTopLeft && parsedTopLeft.row === undefined) { return this.ensureGridRange({
parsedTopLeft.row = 0 sheetId: sheet.properties.sheetId,
startRowIndex: parsedTopLeft.row,
endRowIndex: parsedBottomRight.row,
startColumnIndex: parsedTopLeft.column,
endColumnIndex: parsedBottomRight.column,
})
} }
if (parsedTopLeft && parsedTopLeft.column === undefined) {
parsedTopLeft.column = 0 private ensureGridRange(range: GridRange): Required<GridRange> {
} const sheet = this.getSheetById(range.sheetId)
if (parsedBottomRight && parsedBottomRight.row === undefined) { if (!sheet) {
parsedBottomRight.row = sheet.properties.gridProperties.rowCount - 1 throw new Error(`Sheet ${range.sheetId} not found`)
}
if (parsedBottomRight && parsedBottomRight.column === undefined) {
parsedBottomRight.column = sheet.properties.gridProperties.columnCount - 1
} }
return { return {
sheet, sheetId: range.sheetId,
topLeft: parsedTopLeft as Range, startRowIndex: range.startRowIndex ?? 0,
bottomRight: parsedBottomRight as Range, endRowIndex:
range.endRowIndex ?? sheet.properties.gridProperties.rowCount - 1,
startColumnIndex: range.startColumnIndex ?? 0,
endColumnIndex:
range.endColumnIndex ?? sheet.properties.gridProperties.columnCount - 1,
} }
} }
private createA1FromRanges(sheet: Sheet, topLeft: Range, bottomRight: Range) { private createA1(range: Required<GridRange>) {
const {
sheetId,
startColumnIndex,
startRowIndex,
endColumnIndex,
endRowIndex,
} = range
const sheet = this.getSheetById(sheetId)
if (!sheet) {
throw new Error(`Sheet ${range.sheetId} not found`)
}
let title = sheet.properties.title let title = sheet.properties.title
if (title.includes(" ")) { if (title.includes(" ")) {
title = `'${title}'` title = `'${title}'`
} }
const topLeftLetter = this.numberToLetter(topLeft.column) const topLeftLetter = this.numberToLetter(startColumnIndex)
const bottomRightLetter = this.numberToLetter(bottomRight.column) const bottomRightLetter = this.numberToLetter(endColumnIndex)
const topLeftRow = topLeft.row + 1 const topLeftRow = startRowIndex + 1
const bottomRightRow = bottomRight.row + 1 const bottomRightRow = endRowIndex + 1
return `${title}!${topLeftLetter}${topLeftRow}:${bottomRightLetter}${bottomRightRow}` return `${title}!${topLeftLetter}${topLeftRow}:${bottomRightLetter}${bottomRightRow}`
} }
@ -860,4 +957,10 @@ export class GoogleSheetsMock {
sheet => sheet.properties.title === name sheet => sheet.properties.title === name
) )
} }
private getSheetById(id: number): Sheet | undefined {
return this.spreadsheet.sheets.find(
sheet => sheet.properties.sheetId === id
)
}
} }

View file

@ -89,7 +89,12 @@ class Orchestrator {
delete triggerOutput.appId delete triggerOutput.appId
delete triggerOutput.metadata delete triggerOutput.metadata
// step zero is never used as the template string is zero indexed for customer facing // step zero is never used as the template string is zero indexed for customer facing
this.context = { steps: [{}], trigger: triggerOutput } this.context = {
steps: [{}],
stepsByName: {},
trigger: triggerOutput,
}
this.automation = automation this.automation = automation
// create an emitter which has the chain count for this automation run in it, so it can block // create an emitter which has the chain count for this automation run in it, so it can block
// excessive chaining if required // excessive chaining if required
@ -451,6 +456,9 @@ class Orchestrator {
outputs: tempOutput, outputs: tempOutput,
inputs: steps[stepToLoopIndex].inputs, inputs: steps[stepToLoopIndex].inputs,
}) })
const stepName = steps[stepToLoopIndex].name || steps[stepToLoopIndex].id
this.context.stepsByName![stepName] = tempOutput
this.context.steps[this.context.steps.length] = tempOutput this.context.steps[this.context.steps.length] = tempOutput
this.context.steps = this.context.steps.filter( this.context.steps = this.context.steps.filter(
item => !item.hasOwnProperty.call(item, "currentItem") item => !item.hasOwnProperty.call(item, "currentItem")
@ -555,8 +563,13 @@ class Orchestrator {
loopIteration loopIteration
) )
} }
const stepFn = await this.getStepFunctionality(step.stepId) const stepFn = await this.getStepFunctionality(step.stepId)
let inputs = await processObject(originalStepInput, this.context) let inputs = await this.addContextAndProcess(
originalStepInput,
this.context
)
inputs = automationUtils.cleanInputValues(inputs, step.schema.inputs) inputs = automationUtils.cleanInputValues(inputs, step.schema.inputs)
const outputs = await stepFn({ const outputs = await stepFn({
@ -583,6 +596,18 @@ class Orchestrator {
return null return null
} }
private async addContextAndProcess(inputs: any, context: any) {
const processContext = {
...context,
steps: {
...context.steps,
...context.stepsByName,
},
}
return processObject(inputs, processContext)
}
private handleStepOutput( private handleStepOutput(
step: AutomationStep, step: AutomationStep,
outputs: any, outputs: any,
@ -600,6 +625,8 @@ class Orchestrator {
} else { } else {
this.updateExecutionOutput(step.id, step.stepId, step.inputs, outputs) this.updateExecutionOutput(step.id, step.stepId, step.inputs, outputs)
this.context.steps[this.context.steps.length] = outputs this.context.steps[this.context.steps.length] = outputs
const stepName = step.name || step.id
this.context.stepsByName![stepName] = outputs
} }
} }
} }

View file

@ -124,6 +124,8 @@ export interface Automation extends Document {
definition: { definition: {
steps: AutomationStep[] steps: AutomationStep[]
trigger: AutomationTrigger trigger: AutomationTrigger
// stepNames is used to lookup step names from their correspnding step ID.
stepNames?: Record<string, string>
} }
screenId?: string screenId?: string
uiTree?: any uiTree?: any