1
0
Fork 0
mirror of synced 2024-07-02 13:01:09 +12:00

Merge pull request #2698 from Budibase/feature/view-cloud-replacement

Cloud View replacement
This commit is contained in:
Michael Drury 2021-09-22 18:21:38 +01:00 committed by GitHub
commit f45c780d99
20 changed files with 428 additions and 213 deletions

View file

@ -104,7 +104,7 @@ describe("third party common", () => {
_id: id, _id: id,
email: email, email: email,
} }
const response = await db.post(dbUser) const response = await db.put(dbUser)
dbUser._rev = response.rev dbUser._rev = response.rev
} }

View file

@ -71,7 +71,7 @@ exports.authenticateThirdParty = async function (
dbUser = await syncUser(dbUser, thirdPartyUser) dbUser = await syncUser(dbUser, thirdPartyUser)
// create or sync the user // create or sync the user
const response = await db.post(dbUser) const response = await db.put(dbUser)
dbUser._rev = response.rev dbUser._rev = response.rev
// authenticate // authenticate

View file

@ -105,6 +105,7 @@
"pouchdb-all-dbs": "1.0.2", "pouchdb-all-dbs": "1.0.2",
"pouchdb-find": "^7.2.2", "pouchdb-find": "^7.2.2",
"pouchdb-replication-stream": "1.2.9", "pouchdb-replication-stream": "1.2.9",
"pouchdb-adapter-memory": "^7.2.1",
"server-destroy": "1.0.1", "server-destroy": "1.0.1",
"svelte": "^3.38.2", "svelte": "^3.38.2",
"to-json-schema": "0.2.5", "to-json-schema": "0.2.5",
@ -131,7 +132,6 @@
"express": "^4.17.1", "express": "^4.17.1",
"jest": "^27.0.5", "jest": "^27.0.5",
"nodemon": "^2.0.4", "nodemon": "^2.0.4",
"pouchdb-adapter-memory": "^7.2.1",
"prettier": "^2.3.1", "prettier": "^2.3.1",
"rimraf": "^3.0.2", "rimraf": "^3.0.2",
"supertest": "^4.0.2", "supertest": "^4.0.2",

View file

@ -51,7 +51,7 @@ exports.buildSchemaFromDb = async function (ctx) {
await connector.buildSchema(datasource._id, datasource.entities) await connector.buildSchema(datasource._id, datasource.entities)
datasource.entities = connector.tables datasource.entities = connector.tables
const response = await db.post(datasource) const response = await db.put(datasource)
datasource._rev = response.rev datasource._rev = response.rev
ctx.body = datasource ctx.body = datasource
@ -89,7 +89,7 @@ exports.save = async function (ctx) {
...ctx.request.body, ...ctx.request.body,
} }
const response = await db.post(datasource) const response = await db.put(datasource)
datasource._rev = response.rev datasource._rev = response.rev
// Drain connection pools when configuration is changed // Drain connection pools when configuration is changed

View file

@ -5,17 +5,22 @@ const {
generateRowID, generateRowID,
DocumentTypes, DocumentTypes,
InternalTables, InternalTables,
generateMemoryViewID,
} = require("../../../db/utils") } = require("../../../db/utils")
const userController = require("../user") const userController = require("../user")
const { const {
inputProcessing, inputProcessing,
outputProcessing, outputProcessing,
processAutoColumn,
} = require("../../../utilities/rowProcessor") } = require("../../../utilities/rowProcessor")
const { FieldTypes } = require("../../../constants") const { FieldTypes } = require("../../../constants")
const { isEqual } = require("lodash") const { isEqual } = require("lodash")
const { validate, findRow } = require("./utils") const { validate, findRow } = require("./utils")
const { fullSearch, paginatedSearch } = require("./internalSearch") const { fullSearch, paginatedSearch } = require("./internalSearch")
const { getGlobalUsersFromMetadata } = require("../../../utilities/global") const { getGlobalUsersFromMetadata } = require("../../../utilities/global")
const inMemoryViews = require("../../../db/inMemoryView")
const env = require("../../../environment")
const { migrateToInMemoryView } = require("../view/utils")
const CALCULATION_TYPES = { const CALCULATION_TYPES = {
SUM: "sum", SUM: "sum",
@ -25,17 +30,84 @@ const CALCULATION_TYPES = {
async function storeResponse(ctx, db, row, oldTable, table) { async function storeResponse(ctx, db, row, oldTable, table) {
row.type = "row" row.type = "row"
const response = await db.put(row)
// don't worry about rev, tables handle rev/lastID updates // don't worry about rev, tables handle rev/lastID updates
// if another row has been written since processing this will
// handle the auto ID clash
if (!isEqual(oldTable, table)) { if (!isEqual(oldTable, table)) {
await db.put(table) try {
await db.put(table)
} catch (err) {
if (err.status === 409) {
const updatedTable = await db.get(table._id)
let response = processAutoColumn(null, updatedTable, row, {
reprocessing: true,
})
await db.put(response.table)
row = response.row
} else {
throw err
}
}
} }
const response = await db.put(row)
row._rev = response.rev row._rev = response.rev
// process the row before return, to include relationships // process the row before return, to include relationships
row = await outputProcessing(ctx, table, row, { squash: false }) row = await outputProcessing(ctx, table, row, { squash: false })
return { row, table } return { row, table }
} }
// doesn't do the outputProcessing
async function getRawTableData(ctx, db, tableId) {
let rows
if (tableId === InternalTables.USER_METADATA) {
await userController.fetchMetadata(ctx)
rows = ctx.body
} else {
const response = await db.allDocs(
getRowParams(tableId, null, {
include_docs: true,
})
)
rows = response.rows.map(row => row.doc)
}
return rows
}
async function getView(db, viewName) {
let viewInfo
async function getFromDesignDoc() {
const designDoc = await db.get("_design/database")
viewInfo = designDoc.views[viewName]
return viewInfo
}
let migrate = false
if (env.SELF_HOSTED) {
viewInfo = await getFromDesignDoc()
} else {
try {
viewInfo = await db.get(generateMemoryViewID(viewName))
if (viewInfo) {
viewInfo = viewInfo.view
}
} catch (err) {
// check if it can be retrieved from design doc (needs migrated)
if (err.status !== 404) {
viewInfo = null
} else {
viewInfo = await getFromDesignDoc()
migrate = !!viewInfo
}
}
}
if (migrate) {
await migrateToInMemoryView(db, viewName)
}
if (!viewInfo) {
throw "View does not exist."
}
return viewInfo
}
exports.patch = async ctx => { exports.patch = async ctx => {
const appId = ctx.appId const appId = ctx.appId
const db = new CouchDB(appId) const db = new CouchDB(appId)
@ -139,15 +211,18 @@ exports.fetchView = async ctx => {
const db = new CouchDB(appId) const db = new CouchDB(appId)
const { calculation, group, field } = ctx.query const { calculation, group, field } = ctx.query
const designDoc = await db.get("_design/database") const viewInfo = await getView(db, viewName)
const viewInfo = designDoc.views[viewName] let response
if (!viewInfo) { if (env.SELF_HOSTED) {
throw "View does not exist." response = await db.query(`database/${viewName}`, {
include_docs: !calculation,
group: !!group,
})
} else {
const tableId = viewInfo.meta.tableId
const data = await getRawTableData(ctx, db, tableId)
response = await inMemoryViews.runView(viewInfo, calculation, group, data)
} }
const response = await db.query(`database/${viewName}`, {
include_docs: !calculation,
group: !!group,
})
let rows let rows
if (!calculation) { if (!calculation) {
@ -191,19 +266,9 @@ exports.fetch = async ctx => {
const appId = ctx.appId const appId = ctx.appId
const db = new CouchDB(appId) const db = new CouchDB(appId)
let rows, const tableId = ctx.params.tableId
table = await db.get(ctx.params.tableId) let table = await db.get(tableId)
if (ctx.params.tableId === InternalTables.USER_METADATA) { let rows = await getRawTableData(ctx, db, tableId)
await userController.fetchMetadata(ctx)
rows = ctx.body
} else {
const response = await db.allDocs(
getRowParams(ctx.params.tableId, null, {
include_docs: true,
})
)
rows = response.rows.map(row => row.doc)
}
return outputProcessing(ctx, table, rows) return outputProcessing(ctx, table, rows)
} }

View file

@ -145,7 +145,7 @@ exports.save = async function (ctx) {
if (updatedRows && updatedRows.length !== 0) { if (updatedRows && updatedRows.length !== 0) {
await db.bulkDocs(updatedRows) await db.bulkDocs(updatedRows)
} }
const result = await db.post(tableToSave) const result = await db.put(tableToSave)
tableToSave._rev = result.rev tableToSave._rev = result.rev
tableToSave = await tableSaveFunctions.after(tableToSave) tableToSave = await tableSaveFunctions.after(tableToSave)

View file

@ -68,23 +68,17 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
// Populate the table with rows imported from CSV in a bulk update // Populate the table with rows imported from CSV in a bulk update
const data = await csvParser.transform(dataImport) const data = await csvParser.transform(dataImport)
let finalData = []
for (let i = 0; i < data.length; i++) { for (let i = 0; i < data.length; i++) {
let row = data[i] let row = data[i]
row._id = generateRowID(table._id) row._id = generateRowID(table._id)
row.tableId = table._id row.tableId = table._id
const processed = inputProcessing(user, table, row) const processed = inputProcessing(user, table, row, {
noAutoRelationships: true,
})
table = processed.table table = processed.table
row = processed.row row = processed.row
// make sure link rows are up to date
row = await linkRows.updateLinks({
appId,
eventType: linkRows.EventType.ROW_SAVE,
row,
tableId: row.tableId,
table,
})
for (let [fieldName, schema] of Object.entries(table.schema)) { for (let [fieldName, schema] of Object.entries(table.schema)) {
// check whether the options need to be updated for inclusion as part of the data import // check whether the options need to be updated for inclusion as part of the data import
if ( if (
@ -98,10 +92,20 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
] ]
} }
} }
data[i] = row
// make sure link rows are up to date
finalData.push(
linkRows.updateLinks({
appId,
eventType: linkRows.EventType.ROW_SAVE,
row,
tableId: row.tableId,
table,
})
)
} }
await db.bulkDocs(data) await db.bulkDocs(await Promise.all(finalData))
let response = await db.put(table) let response = await db.put(table)
table._rev = response._rev table._rev = response._rev
} }

View file

@ -2,127 +2,93 @@ const CouchDB = require("../../../db")
const viewTemplate = require("./viewBuilder") const viewTemplate = require("./viewBuilder")
const { apiFileReturn } = require("../../../utilities/fileSystem") const { apiFileReturn } = require("../../../utilities/fileSystem")
const exporters = require("./exporters") const exporters = require("./exporters")
const { saveView, getView, getViews, deleteView } = require("./utils")
const { fetchView } = require("../row") const { fetchView } = require("../row")
const { ViewNames } = require("../../../db/utils")
const controller = { exports.fetch = async ctx => {
fetch: async ctx => { const db = new CouchDB(ctx.appId)
const db = new CouchDB(ctx.appId) ctx.body = await getViews(db)
const designDoc = await db.get("_design/database")
const response = []
for (let name of Object.keys(designDoc.views)) {
// Only return custom views, not built ins
if (Object.values(ViewNames).indexOf(name) !== -1) {
continue
}
response.push({
name,
...designDoc.views[name],
})
}
ctx.body = response
},
save: async ctx => {
const db = new CouchDB(ctx.appId)
const { originalName, ...viewToSave } = ctx.request.body
const designDoc = await db.get("_design/database")
const view = viewTemplate(viewToSave)
if (!viewToSave.name) {
ctx.throw(400, "Cannot create view without a name")
}
designDoc.views = {
...designDoc.views,
[viewToSave.name]: view,
}
// view has been renamed
if (originalName) {
delete designDoc.views[originalName]
}
await db.put(designDoc)
// add views to table document
const table = await db.get(ctx.request.body.tableId)
if (!table.views) table.views = {}
if (!view.meta.schema) {
view.meta.schema = table.schema
}
table.views[viewToSave.name] = view.meta
if (originalName) {
delete table.views[originalName]
}
await db.put(table)
ctx.body = {
...table.views[viewToSave.name],
name: viewToSave.name,
}
},
destroy: async ctx => {
const db = new CouchDB(ctx.appId)
const designDoc = await db.get("_design/database")
const viewName = decodeURI(ctx.params.viewName)
const view = designDoc.views[viewName]
delete designDoc.views[viewName]
await db.put(designDoc)
const table = await db.get(view.meta.tableId)
delete table.views[viewName]
await db.put(table)
ctx.body = view
},
exportView: async ctx => {
const db = new CouchDB(ctx.appId)
const designDoc = await db.get("_design/database")
const viewName = decodeURI(ctx.query.view)
const view = designDoc.views[viewName]
const format = ctx.query.format
if (!format) {
ctx.throw(400, "Format must be specified, either csv or json")
}
if (view) {
ctx.params.viewName = viewName
// Fetch view rows
ctx.query = {
group: view.meta.groupBy,
calculation: view.meta.calculation,
stats: !!view.meta.field,
field: view.meta.field,
}
} else {
// table all_ view
/* istanbul ignore next */
ctx.params.viewName = viewName
}
await fetchView(ctx)
let schema = view && view.meta && view.meta.schema
if (!schema) {
const tableId = ctx.params.tableId || view.meta.tableId
const table = await db.get(tableId)
schema = table.schema
}
// Export part
let headers = Object.keys(schema)
const exporter = exporters[format]
const filename = `${viewName}.${format}`
// send down the file
ctx.attachment(filename)
ctx.body = apiFileReturn(exporter(headers, ctx.body))
},
} }
module.exports = controller exports.save = async ctx => {
const db = new CouchDB(ctx.appId)
const { originalName, ...viewToSave } = ctx.request.body
const view = viewTemplate(viewToSave)
if (!viewToSave.name) {
ctx.throw(400, "Cannot create view without a name")
}
await saveView(db, originalName, viewToSave.name, view)
// add views to table document
const table = await db.get(ctx.request.body.tableId)
if (!table.views) table.views = {}
if (!view.meta.schema) {
view.meta.schema = table.schema
}
table.views[viewToSave.name] = view.meta
if (originalName) {
delete table.views[originalName]
}
await db.put(table)
ctx.body = {
...table.views[viewToSave.name],
name: viewToSave.name,
}
}
exports.destroy = async ctx => {
const db = new CouchDB(ctx.appId)
const viewName = decodeURI(ctx.params.viewName)
const view = await deleteView(db, viewName)
const table = await db.get(view.meta.tableId)
delete table.views[viewName]
await db.put(table)
ctx.body = view
}
exports.exportView = async ctx => {
const db = new CouchDB(ctx.appId)
const viewName = decodeURI(ctx.query.view)
const view = await getView(db, viewName)
const format = ctx.query.format
if (!format) {
ctx.throw(400, "Format must be specified, either csv or json")
}
if (view) {
ctx.params.viewName = viewName
// Fetch view rows
ctx.query = {
group: view.meta.groupBy,
calculation: view.meta.calculation,
stats: !!view.meta.field,
field: view.meta.field,
}
} else {
// table all_ view
/* istanbul ignore next */
ctx.params.viewName = viewName
}
await fetchView(ctx)
let schema = view && view.meta && view.meta.schema
if (!schema) {
const tableId = ctx.params.tableId || view.meta.tableId
const table = await db.get(tableId)
schema = table.schema
}
// Export part
let headers = Object.keys(schema)
const exporter = exporters[format]
const filename = `${viewName}.${format}`
// send down the file
ctx.attachment(filename)
ctx.body = apiFileReturn(exporter(headers, ctx.body))
}

View file

@ -0,0 +1,109 @@
const {
ViewNames,
generateMemoryViewID,
getMemoryViewParams,
} = require("../../../db/utils")
const env = require("../../../environment")
exports.getView = async (db, viewName) => {
if (env.SELF_HOSTED) {
const designDoc = await db.get("_design/database")
return designDoc.views[viewName]
} else {
const viewDoc = await db.get(generateMemoryViewID(viewName))
return viewDoc.view
}
}
exports.getViews = async db => {
const response = []
if (env.SELF_HOSTED) {
const designDoc = await db.get("_design/database")
for (let name of Object.keys(designDoc.views)) {
// Only return custom views, not built ins
if (Object.values(ViewNames).indexOf(name) !== -1) {
continue
}
response.push({
name,
...designDoc.views[name],
})
}
} else {
const views = (
await db.allDocs(
getMemoryViewParams({
include_docs: true,
})
)
).rows.map(row => row.doc)
for (let viewDoc of views) {
response.push({
name: viewDoc.name,
...viewDoc.view,
})
}
}
return response
}
exports.saveView = async (db, originalName, viewName, viewTemplate) => {
if (env.SELF_HOSTED) {
const designDoc = await db.get("_design/database")
designDoc.views = {
...designDoc.views,
[viewName]: viewTemplate,
}
// view has been renamed
if (originalName) {
delete designDoc.views[originalName]
}
await db.put(designDoc)
} else {
const id = generateMemoryViewID(viewName)
const originalId = originalName ? generateMemoryViewID(originalName) : null
const viewDoc = {
_id: id,
view: viewTemplate,
name: viewName,
tableId: viewTemplate.meta.tableId,
}
try {
const old = await db.get(id)
if (originalId) {
const originalDoc = await db.get(originalId)
await db.remove(originalDoc._id, originalDoc._rev)
}
if (old && old._rev) {
viewDoc._rev = old._rev
}
} catch (err) {
// didn't exist, just skip
}
await db.put(viewDoc)
}
}
exports.deleteView = async (db, viewName) => {
if (env.SELF_HOSTED) {
const designDoc = await db.get("_design/database")
const view = designDoc.views[viewName]
delete designDoc.views[viewName]
await db.put(designDoc)
return view
} else {
const id = generateMemoryViewID(viewName)
const viewDoc = await db.get(id)
await db.remove(viewDoc._id, viewDoc._rev)
return viewDoc.view
}
}
exports.migrateToInMemoryView = async (db, viewName) => {
// delete the view initially
const designDoc = await db.get("_design/database")
const view = designDoc.views[viewName]
delete designDoc.views[viewName]
await db.put(designDoc)
await exports.saveView(db, null, viewName, view)
}

View file

@ -205,7 +205,7 @@ describe("/views", () => {
}) })
describe("exportView", () => { describe("exportView", () => {
it("should be able to delete a view", async () => { it("should be able to export a view", async () => {
await config.createTable(priceTable()) await config.createTable(priceTable())
await config.createRow() await config.createRow()
const view = await config.createView() const view = await config.createView()

View file

@ -2,6 +2,7 @@ const rowController = require("../../api/controllers/row")
const automationUtils = require("../automationUtils") const automationUtils = require("../automationUtils")
const env = require("../../environment") const env = require("../../environment")
const usage = require("../../utilities/usageQuota") const usage = require("../../utilities/usageQuota")
const { buildCtx } = require("./utils")
exports.definition = { exports.definition = {
name: "Create Row", name: "Create Row",
@ -69,16 +70,12 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
} }
} }
// have to clean up the row, remove the table from it // have to clean up the row, remove the table from it
const ctx = { const ctx = buildCtx(appId, emitter, {
body: inputs.row,
params: { params: {
tableId: inputs.row.tableId, tableId: inputs.row.tableId,
}, },
request: { })
body: inputs.row,
},
appId,
eventEmitter: emitter,
}
try { try {
inputs.row = await automationUtils.cleanUpRow( inputs.row = await automationUtils.cleanUpRow(
@ -86,7 +83,7 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
inputs.row.tableId, inputs.row.tableId,
inputs.row inputs.row
) )
if (env.isProd()) { if (env.USE_QUOTAS) {
await usage.update(apiKey, usage.Properties.ROW, 1) await usage.update(apiKey, usage.Properties.ROW, 1)
} }
await rowController.save(ctx) await rowController.save(ctx)

View file

@ -1,6 +1,7 @@
const rowController = require("../../api/controllers/row") const rowController = require("../../api/controllers/row")
const env = require("../../environment") const env = require("../../environment")
const usage = require("../../utilities/usageQuota") const usage = require("../../utilities/usageQuota")
const { buildCtx } = require("./utils")
exports.definition = { exports.definition = {
description: "Delete a row from your database", description: "Delete a row from your database",
@ -60,19 +61,16 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
}, },
} }
} }
let ctx = {
let ctx = buildCtx(appId, emitter, {
body: {
_id: inputs.id,
_rev: inputs.revision,
},
params: { params: {
tableId: inputs.tableId, tableId: inputs.tableId,
}, },
request: { })
body: {
_id: inputs.id,
_rev: inputs.revision,
},
},
appId,
eventEmitter: emitter,
}
try { try {
if (env.isProd()) { if (env.isProd()) {

View file

@ -1,6 +1,7 @@
const rowController = require("../../api/controllers/row") const rowController = require("../../api/controllers/row")
const tableController = require("../../api/controllers/table") const tableController = require("../../api/controllers/table")
const { FieldTypes } = require("../../constants") const { FieldTypes } = require("../../constants")
const { buildCtx } = require("./utils")
const SortOrders = { const SortOrders = {
ASCENDING: "ascending", ASCENDING: "ascending",
@ -70,12 +71,11 @@ exports.definition = {
} }
async function getTable(appId, tableId) { async function getTable(appId, tableId) {
const ctx = { const ctx = buildCtx(appId, null, {
params: { params: {
id: tableId, id: tableId,
}, },
appId, })
}
await tableController.find(ctx) await tableController.find(ctx)
return ctx.body return ctx.body
} }
@ -89,21 +89,18 @@ exports.run = async function ({ inputs, appId }) {
sortType = sortType =
fieldType === FieldTypes.NUMBER ? FieldTypes.NUMBER : FieldTypes.STRING fieldType === FieldTypes.NUMBER ? FieldTypes.NUMBER : FieldTypes.STRING
} }
const ctx = { const ctx = buildCtx(appId, null, {
params: { params: {
tableId, tableId,
}, },
request: { body: {
body: { sortOrder,
sortOrder, sortType,
sortType, sort: sortColumn,
sort: sortColumn, query: filters || {},
query: filters || {}, limit,
limit,
},
}, },
appId, })
}
try { try {
await rowController.search(ctx) await rowController.search(ctx)
return { return {

View file

@ -1,5 +1,6 @@
const rowController = require("../../api/controllers/row") const rowController = require("../../api/controllers/row")
const automationUtils = require("../automationUtils") const automationUtils = require("../automationUtils")
const { buildCtx } = require("./utils")
exports.definition = { exports.definition = {
name: "Update Row", name: "Update Row",
@ -72,19 +73,15 @@ exports.run = async function ({ inputs, appId, emitter }) {
} }
// have to clean up the row, remove the table from it // have to clean up the row, remove the table from it
const ctx = { const ctx = buildCtx(appId, emitter, {
body: {
...inputs.row,
_id: inputs.rowId,
},
params: { params: {
rowId: inputs.rowId, rowId: inputs.rowId,
}, },
request: { })
body: {
...inputs.row,
_id: inputs.rowId,
},
},
appId,
eventEmitter: emitter,
}
try { try {
inputs.row = await automationUtils.cleanUpRowById( inputs.row = await automationUtils.cleanUpRowById(

View file

@ -0,0 +1,48 @@
const PouchDB = require("pouchdb")
const memory = require("pouchdb-adapter-memory")
const newid = require("./newid")
PouchDB.plugin(memory)
const Pouch = PouchDB.defaults({
prefix: undefined,
adapter: "memory",
})
exports.runView = async (view, calculation, group, data) => {
// use a different ID each time for the DB, make sure they
// are always unique for each query, don't want overlap
// which could cause 409s
const db = new Pouch(newid())
// write all the docs to the in memory Pouch (remove revs)
await db.bulkDocs(
data.map(row => ({
...row,
_rev: undefined,
}))
)
let fn = (doc, emit) => emit(doc._id)
eval("fn = " + view.map.replace("function (doc)", "function (doc, emit)"))
const queryFns = {
meta: view.meta,
map: fn,
}
if (view.reduce) {
queryFns.reduce = view.reduce
}
const response = await db.query(queryFns, {
include_docs: !calculation,
group: !!group,
})
// need to fix the revs to be totally accurate
for (let row of response.rows) {
if (!row._rev || !row._id) {
continue
}
const found = data.find(possible => possible._id === row._id)
if (found) {
row._rev = found._rev
}
}
await db.destroy()
return response
}

View file

@ -76,9 +76,12 @@ async function getFullLinkedDocs(ctx, appId, links) {
// create DBs // create DBs
const db = new CouchDB(appId) const db = new CouchDB(appId)
const linkedRowIds = links.map(link => link.id) const linkedRowIds = links.map(link => link.id)
let linked = (await db.allDocs(getMultiIDParams(linkedRowIds))).rows.map( const uniqueRowIds = [...new Set(linkedRowIds)]
let dbRows = (await db.allDocs(getMultiIDParams(uniqueRowIds))).rows.map(
row => row.doc row => row.doc
) )
// convert the unique db rows back to a full list of linked rows
const linked = linkedRowIds.map(id => dbRows.find(row => row._id === id))
// need to handle users as specific cases // need to handle users as specific cases
let [users, other] = partition(linked, linkRow => let [users, other] = partition(linked, linkRow =>
linkRow._id.startsWith(USER_METDATA_PREFIX) linkRow._id.startsWith(USER_METDATA_PREFIX)
@ -112,7 +115,7 @@ exports.updateLinks = async function (args) {
let linkController = new LinkController(args) let linkController = new LinkController(args)
try { try {
if ( if (
!(await linkController.doesTableHaveLinkedFields()) && !(await linkController.doesTableHaveLinkedFields(table)) &&
(oldTable == null || (oldTable == null ||
!(await linkController.doesTableHaveLinkedFields(oldTable))) !(await linkController.doesTableHaveLinkedFields(oldTable)))
) { ) {

View file

@ -39,6 +39,7 @@ const DocumentTypes = {
QUERY: "query", QUERY: "query",
DEPLOYMENTS: "deployments", DEPLOYMENTS: "deployments",
METADATA: "metadata", METADATA: "metadata",
MEM_VIEW: "view",
} }
const ViewNames = { const ViewNames = {
@ -348,6 +349,14 @@ exports.getMetadataParams = (type, entityId = null, otherProps = {}) => {
return getDocParams(DocumentTypes.METADATA, docId, otherProps) return getDocParams(DocumentTypes.METADATA, docId, otherProps)
} }
exports.generateMemoryViewID = viewName => {
return `${DocumentTypes.MEM_VIEW}${SEPARATOR}${viewName}`
}
exports.getMemoryViewParams = (otherProps = {}) => {
return getDocParams(DocumentTypes.MEM_VIEW, null, otherProps)
}
/** /**
* This can be used with the db.allDocs to get a list of IDs * This can be used with the db.allDocs to get a list of IDs
*/ */

View file

@ -26,7 +26,7 @@ module.exports = {
COUCH_DB_URL: process.env.COUCH_DB_URL, COUCH_DB_URL: process.env.COUCH_DB_URL,
MINIO_URL: process.env.MINIO_URL, MINIO_URL: process.env.MINIO_URL,
WORKER_URL: process.env.WORKER_URL, WORKER_URL: process.env.WORKER_URL,
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED), SELF_HOSTED: process.env.SELF_HOSTED,
AWS_REGION: process.env.AWS_REGION, AWS_REGION: process.env.AWS_REGION,
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS, ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY, MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
@ -66,3 +66,10 @@ module.exports = {
return !isDev() return !isDev()
}, },
} }
// convert any strings to numbers if required, like "0" would be true otherwise
for (let [key, value] of Object.entries(module.exports)) {
if (typeof value === "string" && !isNaN(parseInt(value))) {
module.exports[key] = parseInt(value)
}
}

View file

@ -89,10 +89,16 @@ const TYPE_TRANSFORM_MAP = {
* @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields. * @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields.
* @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing. * @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing.
* @param {Object} row The row which is to be updated with information for the auto columns. * @param {Object} row The row which is to be updated with information for the auto columns.
* @param {Object} opts specific options for function to carry out optional features.
* @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated * @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated
* for automatic ID purposes. * for automatic ID purposes.
*/ */
function processAutoColumn(user, table, row) { function processAutoColumn(
user,
table,
row,
opts = { reprocessing: false, noAutoRelationships: false }
) {
let now = new Date().toISOString() let now = new Date().toISOString()
// if a row doesn't have a revision then it doesn't exist yet // if a row doesn't have a revision then it doesn't exist yet
const creating = !row._rev const creating = !row._rev
@ -102,7 +108,7 @@ function processAutoColumn(user, table, row) {
} }
switch (schema.subtype) { switch (schema.subtype) {
case AutoFieldSubTypes.CREATED_BY: case AutoFieldSubTypes.CREATED_BY:
if (creating) { if (creating && !opts.reprocessing && !opts.noAutoRelationships) {
row[key] = [user.userId] row[key] = [user.userId]
} }
break break
@ -112,7 +118,9 @@ function processAutoColumn(user, table, row) {
} }
break break
case AutoFieldSubTypes.UPDATED_BY: case AutoFieldSubTypes.UPDATED_BY:
row[key] = [user.userId] if (!opts.reprocessing && !opts.noAutoRelationships) {
row[key] = [user.userId]
}
break break
case AutoFieldSubTypes.UPDATED_AT: case AutoFieldSubTypes.UPDATED_AT:
row[key] = now row[key] = now
@ -127,6 +135,7 @@ function processAutoColumn(user, table, row) {
} }
return { table, row } return { table, row }
} }
exports.processAutoColumn = processAutoColumn
/** /**
* This will coerce a value to the correct types based on the type transform map * This will coerce a value to the correct types based on the type transform map
@ -151,9 +160,15 @@ exports.coerce = (row, type) => {
* @param {object} user the user which is performing the input. * @param {object} user the user which is performing the input.
* @param {object} row the row which is being created/updated. * @param {object} row the row which is being created/updated.
* @param {object} table the table which the row is being saved to. * @param {object} table the table which the row is being saved to.
* @param {object} opts some input processing options (like disabling auto-column relationships).
* @returns {object} the row which has been prepared to be written to the DB. * @returns {object} the row which has been prepared to be written to the DB.
*/ */
exports.inputProcessing = (user = {}, table, row) => { exports.inputProcessing = (
user = {},
table,
row,
opts = { noAutoRelationships: false }
) => {
let clonedRow = cloneDeep(row) let clonedRow = cloneDeep(row)
// need to copy the table so it can be differenced on way out // need to copy the table so it can be differenced on way out
const copiedTable = cloneDeep(table) const copiedTable = cloneDeep(table)
@ -176,7 +191,7 @@ exports.inputProcessing = (user = {}, table, row) => {
} }
} }
// handle auto columns - this returns an object like {table, row} // handle auto columns - this returns an object like {table, row}
return processAutoColumn(user, copiedTable, clonedRow) return processAutoColumn(user, copiedTable, clonedRow, opts)
} }
/** /**

View file

@ -11,7 +11,7 @@ exports.save = async function (ctx) {
} }
try { try {
const response = await db.post(workspaceDoc) const response = await db.put(workspaceDoc)
ctx.body = { ctx.body = {
_id: response.id, _id: response.id,
_rev: response.rev, _rev: response.rev,