1
0
Fork 0
mirror of synced 2024-06-03 02:55:14 +12:00

Updating rows API so that it has been broken into an internal and external API - this is breaking as it breaks out how bulk deletion is handled.

This commit is contained in:
mike12345567 2021-06-11 18:56:30 +01:00
parent 7737ac149d
commit 7ba21329eb
7 changed files with 281 additions and 153 deletions

View file

@ -0,0 +1,38 @@
const CouchDB = require("../../../db")
exports.patch = async (ctx) => {
ctx.body = {}
}
exports.save = async (ctx) => {
ctx.body = {}
}
exports.fetchView = async (ctx) => {
ctx.body = {}
}
exports.fetchTableRows = async (ctx) => {
ctx.body = {}
}
exports.find = async (ctx) => {
ctx.body = {}
}
exports.destroy = async (ctx) => {
ctx.body = {}
}
exports.bulkDestroy = async (ctx) => {
ctx.body = {}
}
exports.validate = async (ctx) => {
ctx.body = {}
}
exports.fetchEnrichedRow = async (ctx) => {
// TODO: should this join?
ctx.body = {}
}

View file

@ -0,0 +1,114 @@
const internal = require("./internal")
const external = require("./external")
function pickApi(tableId) {
// TODO: go to external
return internal
}
function getTableId(ctx) {
if (ctx.request.body && ctx.request.body.tableId) {
return ctx.request.body.tableId
}
if (ctx.params && ctx.params.tableId) {
return ctx.params.tableId
}
if (ctx.params && ctx.params.viewName) {
return ctx.params.viewName
}
}
exports.patch = async ctx => {
const appId = ctx.appId
const tableId = getTableId(ctx)
try {
const { row, table } = await pickApi(tableId).patch(ctx)
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:update`, appId, row, table)
ctx.message = `${table.name} updated successfully`
ctx.body = row
} catch (err) {
ctx.throw(400, err)
}
}
exports.save = async function (ctx) {
// TODO: this used to handle bulk delete, need to update builder/client
const appId = ctx.appId
const tableId = getTableId(ctx)
try {
const { row, table } = await pickApi(tableId).save(ctx)
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table)
ctx.message = `${table.name} saved successfully`
ctx.body = row
} catch (err) {
ctx.throw(400, err)
}
}
exports.fetchView = async function (ctx) {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).fetchView(ctx)
} catch (err) {
ctx.throw(400, err)
}
}
exports.fetchTableRows = async function (ctx) {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).fetchTableRows(ctx)
} catch (err) {
ctx.throw(400, err)
}
}
exports.find = async function (ctx) {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).find(ctx)
} catch (err) {
ctx.throw(400, err)
}
}
exports.destroy = async function (ctx) {
const appId = ctx.appId
const inputs = ctx.request.body
const tableId = getTableId(ctx)
let response, row
if (inputs.rows) {
let { rows } = await pickApi(tableId).bulkDestroy(ctx)
response = rows
for (let row of rows) {
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
}
} else {
let resp = await pickApi(tableId).destroy(ctx)
response = resp.response
row = resp.row
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
}
ctx.status = 200
// for automations include the row that was deleted
ctx.row = row || {}
ctx.body = response
}
exports.validate = async function (ctx) {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).validate(ctx)
} catch (err) {
ctx.throw(400, err)
}
}
exports.fetchEnrichedRow = async function (ctx) {
const tableId = getTableId(ctx)
try {
ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx)
} catch (err) {
ctx.throw(400, err)
}
}

View file

@ -1,21 +1,20 @@
const CouchDB = require("../../db")
const validateJs = require("validate.js")
const linkRows = require("../../db/linkedRows")
const CouchDB = require("../../../db")
const linkRows = require("../../../db/linkedRows")
const {
getRowParams,
generateRowID,
DocumentTypes,
SEPARATOR,
InternalTables,
} = require("../../db/utils")
const userController = require("./user")
} = require("../../../db/utils")
const userController = require("../user")
const {
inputProcessing,
outputProcessing,
} = require("../../utilities/rowProcessor")
const { FieldTypes } = require("../../constants")
} = require("../../../utilities/rowProcessor")
const { FieldTypes } = require("../../../constants")
const { isEqual } = require("lodash")
const { cloneDeep } = require("lodash/fp")
const { validate, findRow } = require("./utils")
const TABLE_VIEW_BEGINS_WITH = `all${SEPARATOR}${DocumentTypes.TABLE}${SEPARATOR}`
@ -25,35 +24,7 @@ const CALCULATION_TYPES = {
STATS: "stats",
}
validateJs.extend(validateJs.validators.datetime, {
parse: function (value) {
return new Date(value).getTime()
},
// Input is a unix timestamp
format: function (value) {
return new Date(value).toISOString()
},
})
async function findRow(ctx, db, tableId, rowId) {
let row
// TODO remove special user case in future
if (tableId === InternalTables.USER_METADATA) {
ctx.params = {
id: rowId,
}
await userController.findMetadata(ctx)
row = ctx.body
} else {
row = await db.get(rowId)
}
if (row.tableId !== tableId) {
throw "Supplied tableId does not match the rows tableId"
}
return row
}
exports.patch = async function (ctx) {
exports.patch = async ctx => {
const appId = ctx.appId
const db = new CouchDB(appId)
const inputs = ctx.request.body
@ -70,7 +41,7 @@ exports.patch = async function (ctx) {
_id: inputs._id,
}
} else {
ctx.throw(400, "Row does not exist")
throw "Row does not exist"
}
}
let dbTable = await db.get(tableId)
@ -88,12 +59,7 @@ exports.patch = async function (ctx) {
})
if (!validateResult.valid) {
ctx.status = 400
ctx.body = {
status: 400,
errors: validateResult.errors,
}
return
throw validateResult.errors
}
// returned row is cleaned and prepared for writing to DB
@ -109,7 +75,7 @@ exports.patch = async function (ctx) {
// the row has been updated, need to put it into the ctx
ctx.request.body = row
await userController.updateMetadata(ctx)
return
return { row: ctx.body, table }
}
const response = await db.put(row)
@ -119,10 +85,7 @@ exports.patch = async function (ctx) {
}
row._rev = response.rev
row.type = "row"
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:update`, appId, row, table)
ctx.body = row
ctx.status = 200
ctx.message = `${table.name} updated successfully.`
return { row, table }
}
exports.save = async function (ctx) {
@ -131,18 +94,10 @@ exports.save = async function (ctx) {
let inputs = ctx.request.body
inputs.tableId = ctx.params.tableId
// TODO: find usage of this and break out into own endpoint
if (inputs.type === "delete") {
await bulkDelete(ctx)
ctx.body = inputs.rows
return
}
// if the row obj had an _id then it will have been retrieved
if (inputs._id && inputs._rev) {
ctx.params.rowId = inputs._id
await exports.patch(ctx)
return
return exports.patch(ctx)
}
if (!inputs._rev && !inputs._id) {
@ -158,12 +113,7 @@ exports.save = async function (ctx) {
})
if (!validateResult.valid) {
ctx.status = 400
ctx.body = {
status: 400,
errors: validateResult.errors,
}
return
throw validateResult.errors
}
// make sure link rows are up to date
@ -182,13 +132,10 @@ exports.save = async function (ctx) {
await db.put(table)
}
row._rev = response.rev
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table)
ctx.body = row
ctx.status = 200
ctx.message = `${table.name} saved successfully`
return { row, table }
}
exports.fetchView = async function (ctx) {
exports.fetchView = async (ctx) => {
const appId = ctx.appId
const viewName = ctx.params.viewName
@ -204,13 +151,14 @@ exports.fetchView = async function (ctx) {
const designDoc = await db.get("_design/database")
const viewInfo = designDoc.views[viewName]
if (!viewInfo) {
ctx.throw(400, "View does not exist.")
throw "View does not exist."
}
const response = await db.query(`database/${viewName}`, {
include_docs: !calculation,
group: !!group,
})
let rows
if (!calculation) {
response.rows = response.rows.map(row => row.doc)
let table
@ -222,7 +170,7 @@ exports.fetchView = async function (ctx) {
schema: {},
}
}
ctx.body = await outputProcessing(appId, table, response.rows)
rows = await outputProcessing(appId, table, response.rows)
}
if (calculation === CALCULATION_TYPES.STATS) {
@ -232,26 +180,26 @@ exports.fetchView = async function (ctx) {
...row.value,
avg: row.value.sum / row.value.count,
}))
ctx.body = response.rows
rows = response.rows
}
if (
calculation === CALCULATION_TYPES.COUNT ||
calculation === CALCULATION_TYPES.SUM
) {
ctx.body = response.rows.map(row => ({
rows = response.rows.map(row => ({
group: row.key,
field,
value: row.value,
}))
}
return rows
}
exports.fetchTableRows = async function (ctx) {
exports.fetchTableRows = async (ctx) => {
const appId = ctx.appId
const db = new CouchDB(appId)
// TODO remove special user case in future
let rows,
table = await db.get(ctx.params.tableId)
if (ctx.params.tableId === InternalTables.USER_METADATA) {
@ -265,27 +213,25 @@ exports.fetchTableRows = async function (ctx) {
)
rows = response.rows.map(row => row.doc)
}
ctx.body = await outputProcessing(appId, table, rows)
return outputProcessing(appId, table, rows)
}
exports.find = async function (ctx) {
exports.find = async (ctx) => {
const appId = ctx.appId
const db = new CouchDB(appId)
try {
const table = await db.get(ctx.params.tableId)
const row = await findRow(ctx, db, ctx.params.tableId, ctx.params.rowId)
ctx.body = await outputProcessing(appId, table, row)
} catch (err) {
ctx.throw(400, err)
}
const table = await db.get(ctx.params.tableId)
let row = await findRow(ctx, db, ctx.params.tableId, ctx.params.rowId)
row = await outputProcessing(appId, table, row)
return row
}
exports.destroy = async function (ctx) {
const appId = ctx.appId
const db = new CouchDB(appId)
const row = await db.get(ctx.params.rowId)
if (row.tableId !== ctx.params.tableId) {
ctx.throw(400, "Supplied tableId doesn't match the row's tableId")
throw "Supplied tableId doesn't match the row's tableId"
}
await linkRows.updateLinks({
appId,
@ -293,54 +239,57 @@ exports.destroy = async function (ctx) {
row,
tableId: row.tableId,
})
// TODO remove special user case in future
if (ctx.params.tableId === InternalTables.USER_METADATA) {
ctx.params = {
id: ctx.params.rowId,
}
await userController.destroyMetadata(ctx)
return { response: ctx.body, row }
} else {
ctx.body = await db.remove(ctx.params.rowId, ctx.params.revId)
const response = await db.remove(ctx.params.rowId, ctx.params.revId)
return { response, row }
}
// for automations include the row that was deleted
ctx.row = row
ctx.status = 200
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
}
exports.validate = async function (ctx) {
const errors = await validate({
exports.bulkDestroy = async ctx => {
const appId = ctx.appId
const { rows } = ctx.request.body
const db = new CouchDB(appId)
let updates = rows.map(row =>
linkRows.updateLinks({
appId,
eventType: linkRows.EventType.ROW_DELETE,
row,
tableId: row.tableId,
})
)
// TODO remove special user case in future
if (ctx.params.tableId === InternalTables.USER_METADATA) {
updates = updates.concat(
rows.map(row => {
ctx.params = {
id: row._id,
}
return userController.destroyMetadata(ctx)
})
)
} else {
await db.bulkDocs(rows.map(row => ({ ...row, _deleted: true })))
}
await Promise.all(updates)
return { response: { ok: true }, rows }
}
exports.validate = async (ctx) => {
return validate({
appId: ctx.appId,
tableId: ctx.params.tableId,
row: ctx.request.body,
})
ctx.status = 200
ctx.body = errors
}
async function validate({ appId, tableId, row, table }) {
if (!table) {
const db = new CouchDB(appId)
table = await db.get(tableId)
}
const errors = {}
for (let fieldName of Object.keys(table.schema)) {
const constraints = cloneDeep(table.schema[fieldName].constraints)
// special case for options, need to always allow unselected (null)
if (
table.schema[fieldName].type === FieldTypes.OPTIONS &&
constraints.inclusion
) {
constraints.inclusion.push(null)
}
const res = validateJs.single(row[fieldName], constraints)
if (res) errors[fieldName] = res
}
return { valid: Object.keys(errors).length === 0, errors }
}
exports.fetchEnrichedRow = async function (ctx) {
exports.fetchEnrichedRow = async (ctx) => {
const appId = ctx.appId
const db = new CouchDB(appId)
const tableId = ctx.params.tableId
@ -381,39 +330,5 @@ exports.fetchEnrichedRow = async function (ctx) {
)
}
}
ctx.body = row
ctx.status = 200
}
async function bulkDelete(ctx) {
const appId = ctx.appId
const { rows } = ctx.request.body
const db = new CouchDB(appId)
let updates = rows.map(row =>
linkRows.updateLinks({
appId,
eventType: linkRows.EventType.ROW_DELETE,
row,
tableId: row.tableId,
})
)
// TODO remove special user case in future
if (ctx.params.tableId === InternalTables.USER_METADATA) {
updates = updates.concat(
rows.map(row => {
ctx.params = {
id: row._id,
}
return userController.destroyMetadata(ctx)
})
)
} else {
await db.bulkDocs(rows.map(row => ({ ...row, _deleted: true })))
}
await Promise.all(updates)
rows.forEach(row => {
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
})
return row
}

View file

@ -0,0 +1,55 @@
const validateJs = require("validate.js")
const { cloneDeep } = require("lodash/fp")
const CouchDB = require("../../../db")
const { InternalTables } = require("../../../db/utils")
const userController = require("../user")
const { FieldTypes } = require("../../../constants")
validateJs.extend(validateJs.validators.datetime, {
parse: function (value) {
return new Date(value).getTime()
},
// Input is a unix timestamp
format: function (value) {
return new Date(value).toISOString()
},
})
exports.findRow = async (ctx, db, tableId, rowId) => {
let row
// TODO remove special user case in future
if (tableId === InternalTables.USER_METADATA) {
ctx.params = {
id: rowId,
}
await userController.findMetadata(ctx)
row = ctx.body
} else {
row = await db.get(rowId)
}
if (row.tableId !== tableId) {
throw "Supplied tableId does not match the rows tableId"
}
return row
}
exports.validate = async ({ appId, tableId, row, table }) => {
if (!table) {
const db = new CouchDB(appId)
table = await db.get(tableId)
}
const errors = {}
for (let fieldName of Object.keys(table.schema)) {
const constraints = cloneDeep(table.schema[fieldName].constraints)
// special case for options, need to always allow unselected (null)
if (
table.schema[fieldName].type === FieldTypes.OPTIONS &&
constraints.inclusion
) {
constraints.inclusion.push(null)
}
const res = validateJs.single(row[fieldName], constraints)
if (res) errors[fieldName] = res
}
return { valid: Object.keys(errors).length === 0, errors }
}

View file

@ -25,6 +25,7 @@ app.use(
jsonLimit: "10mb",
textLimit: "10mb",
enableTypes: ["json", "form", "text"],
parsedMethods: ["POST", "PUT", "PATCH", "DELETE"],
})
)

View file

@ -50,6 +50,10 @@ function addFilters(query, filters) {
return query
}
function buildRelationships() {
}
function buildCreate(knex, json) {
const { endpoint, body } = json
let query = knex(endpoint.entityId)

View file

@ -104,6 +104,7 @@ class PostgresIntegration extends Sql {
}
async query(json) {
// TODO: get the schema
const operation = this._operation(json).toLowerCase()
const input = this._query(json)
const response = await internalQuery(this.client, input)