diff --git a/packages/auth/src/middleware/passport/tests/third-party-common.spec.js b/packages/auth/src/middleware/passport/tests/third-party-common.spec.js index ff38a01fbb..1ace65ba40 100644 --- a/packages/auth/src/middleware/passport/tests/third-party-common.spec.js +++ b/packages/auth/src/middleware/passport/tests/third-party-common.spec.js @@ -104,7 +104,7 @@ describe("third party common", () => { _id: id, email: email, } - const response = await db.post(dbUser) + const response = await db.put(dbUser) dbUser._rev = response.rev } diff --git a/packages/auth/src/middleware/passport/third-party-common.js b/packages/auth/src/middleware/passport/third-party-common.js index 7c03944232..c25aa3e0b0 100644 --- a/packages/auth/src/middleware/passport/third-party-common.js +++ b/packages/auth/src/middleware/passport/third-party-common.js @@ -71,7 +71,7 @@ exports.authenticateThirdParty = async function ( dbUser = await syncUser(dbUser, thirdPartyUser) // create or sync the user - const response = await db.post(dbUser) + const response = await db.put(dbUser) dbUser._rev = response.rev // authenticate diff --git a/packages/server/package.json b/packages/server/package.json index 420e7082a1..671c062fcd 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -105,6 +105,7 @@ "pouchdb-all-dbs": "1.0.2", "pouchdb-find": "^7.2.2", "pouchdb-replication-stream": "1.2.9", + "pouchdb-adapter-memory": "^7.2.1", "server-destroy": "1.0.1", "svelte": "^3.38.2", "to-json-schema": "0.2.5", @@ -131,7 +132,6 @@ "express": "^4.17.1", "jest": "^27.0.5", "nodemon": "^2.0.4", - "pouchdb-adapter-memory": "^7.2.1", "prettier": "^2.3.1", "rimraf": "^3.0.2", "supertest": "^4.0.2", diff --git a/packages/server/src/api/controllers/datasource.js b/packages/server/src/api/controllers/datasource.js index 38b6e68932..4a2fd7d86a 100644 --- a/packages/server/src/api/controllers/datasource.js +++ b/packages/server/src/api/controllers/datasource.js @@ -51,7 +51,7 @@ exports.buildSchemaFromDb = async function (ctx) { await connector.buildSchema(datasource._id, datasource.entities) datasource.entities = connector.tables - const response = await db.post(datasource) + const response = await db.put(datasource) datasource._rev = response.rev ctx.body = datasource @@ -89,7 +89,7 @@ exports.save = async function (ctx) { ...ctx.request.body, } - const response = await db.post(datasource) + const response = await db.put(datasource) datasource._rev = response.rev // Drain connection pools when configuration is changed diff --git a/packages/server/src/api/controllers/row/internal.js b/packages/server/src/api/controllers/row/internal.js index 2299a20580..d429c14cc7 100644 --- a/packages/server/src/api/controllers/row/internal.js +++ b/packages/server/src/api/controllers/row/internal.js @@ -5,17 +5,22 @@ const { generateRowID, DocumentTypes, InternalTables, + generateMemoryViewID, } = require("../../../db/utils") const userController = require("../user") const { inputProcessing, outputProcessing, + processAutoColumn, } = require("../../../utilities/rowProcessor") const { FieldTypes } = require("../../../constants") const { isEqual } = require("lodash") const { validate, findRow } = require("./utils") const { fullSearch, paginatedSearch } = require("./internalSearch") const { getGlobalUsersFromMetadata } = require("../../../utilities/global") +const inMemoryViews = require("../../../db/inMemoryView") +const env = require("../../../environment") +const { migrateToInMemoryView } = require("../view/utils") const CALCULATION_TYPES = { SUM: "sum", @@ -25,17 +30,84 @@ const CALCULATION_TYPES = { async function storeResponse(ctx, db, row, oldTable, table) { row.type = "row" - const response = await db.put(row) // don't worry about rev, tables handle rev/lastID updates + // if another row has been written since processing this will + // handle the auto ID clash if (!isEqual(oldTable, table)) { - await db.put(table) + try { + await db.put(table) + } catch (err) { + if (err.status === 409) { + const updatedTable = await db.get(table._id) + let response = processAutoColumn(null, updatedTable, row, { + reprocessing: true, + }) + await db.put(response.table) + row = response.row + } else { + throw err + } + } } + const response = await db.put(row) row._rev = response.rev // process the row before return, to include relationships row = await outputProcessing(ctx, table, row, { squash: false }) return { row, table } } +// doesn't do the outputProcessing +async function getRawTableData(ctx, db, tableId) { + let rows + if (tableId === InternalTables.USER_METADATA) { + await userController.fetchMetadata(ctx) + rows = ctx.body + } else { + const response = await db.allDocs( + getRowParams(tableId, null, { + include_docs: true, + }) + ) + rows = response.rows.map(row => row.doc) + } + return rows +} + +async function getView(db, viewName) { + let viewInfo + async function getFromDesignDoc() { + const designDoc = await db.get("_design/database") + viewInfo = designDoc.views[viewName] + return viewInfo + } + let migrate = false + if (env.SELF_HOSTED) { + viewInfo = await getFromDesignDoc() + } else { + try { + viewInfo = await db.get(generateMemoryViewID(viewName)) + if (viewInfo) { + viewInfo = viewInfo.view + } + } catch (err) { + // check if it can be retrieved from design doc (needs migrated) + if (err.status !== 404) { + viewInfo = null + } else { + viewInfo = await getFromDesignDoc() + migrate = !!viewInfo + } + } + } + if (migrate) { + await migrateToInMemoryView(db, viewName) + } + if (!viewInfo) { + throw "View does not exist." + } + return viewInfo +} + exports.patch = async ctx => { const appId = ctx.appId const db = new CouchDB(appId) @@ -139,15 +211,18 @@ exports.fetchView = async ctx => { const db = new CouchDB(appId) const { calculation, group, field } = ctx.query - const designDoc = await db.get("_design/database") - const viewInfo = designDoc.views[viewName] - if (!viewInfo) { - throw "View does not exist." + const viewInfo = await getView(db, viewName) + let response + if (env.SELF_HOSTED) { + response = await db.query(`database/${viewName}`, { + include_docs: !calculation, + group: !!group, + }) + } else { + const tableId = viewInfo.meta.tableId + const data = await getRawTableData(ctx, db, tableId) + response = await inMemoryViews.runView(viewInfo, calculation, group, data) } - const response = await db.query(`database/${viewName}`, { - include_docs: !calculation, - group: !!group, - }) let rows if (!calculation) { @@ -191,19 +266,9 @@ exports.fetch = async ctx => { const appId = ctx.appId const db = new CouchDB(appId) - let rows, - table = await db.get(ctx.params.tableId) - if (ctx.params.tableId === InternalTables.USER_METADATA) { - await userController.fetchMetadata(ctx) - rows = ctx.body - } else { - const response = await db.allDocs( - getRowParams(ctx.params.tableId, null, { - include_docs: true, - }) - ) - rows = response.rows.map(row => row.doc) - } + const tableId = ctx.params.tableId + let table = await db.get(tableId) + let rows = await getRawTableData(ctx, db, tableId) return outputProcessing(ctx, table, rows) } diff --git a/packages/server/src/api/controllers/table/index.js b/packages/server/src/api/controllers/table/index.js index 60b5167f66..c7b72cf1c8 100644 --- a/packages/server/src/api/controllers/table/index.js +++ b/packages/server/src/api/controllers/table/index.js @@ -145,7 +145,7 @@ exports.save = async function (ctx) { if (updatedRows && updatedRows.length !== 0) { await db.bulkDocs(updatedRows) } - const result = await db.post(tableToSave) + const result = await db.put(tableToSave) tableToSave._rev = result.rev tableToSave = await tableSaveFunctions.after(tableToSave) diff --git a/packages/server/src/api/controllers/table/utils.js b/packages/server/src/api/controllers/table/utils.js index 154a9ba8f5..d263002da6 100644 --- a/packages/server/src/api/controllers/table/utils.js +++ b/packages/server/src/api/controllers/table/utils.js @@ -68,23 +68,17 @@ exports.handleDataImport = async (appId, user, table, dataImport) => { // Populate the table with rows imported from CSV in a bulk update const data = await csvParser.transform(dataImport) + let finalData = [] for (let i = 0; i < data.length; i++) { let row = data[i] row._id = generateRowID(table._id) row.tableId = table._id - const processed = inputProcessing(user, table, row) + const processed = inputProcessing(user, table, row, { + noAutoRelationships: true, + }) table = processed.table row = processed.row - // make sure link rows are up to date - row = await linkRows.updateLinks({ - appId, - eventType: linkRows.EventType.ROW_SAVE, - row, - tableId: row.tableId, - table, - }) - for (let [fieldName, schema] of Object.entries(table.schema)) { // check whether the options need to be updated for inclusion as part of the data import if ( @@ -98,10 +92,20 @@ exports.handleDataImport = async (appId, user, table, dataImport) => { ] } } - data[i] = row + + // make sure link rows are up to date + finalData.push( + linkRows.updateLinks({ + appId, + eventType: linkRows.EventType.ROW_SAVE, + row, + tableId: row.tableId, + table, + }) + ) } - await db.bulkDocs(data) + await db.bulkDocs(await Promise.all(finalData)) let response = await db.put(table) table._rev = response._rev } diff --git a/packages/server/src/api/controllers/view/index.js b/packages/server/src/api/controllers/view/index.js index 3d0f236fce..ecaee0f32f 100644 --- a/packages/server/src/api/controllers/view/index.js +++ b/packages/server/src/api/controllers/view/index.js @@ -2,127 +2,93 @@ const CouchDB = require("../../../db") const viewTemplate = require("./viewBuilder") const { apiFileReturn } = require("../../../utilities/fileSystem") const exporters = require("./exporters") +const { saveView, getView, getViews, deleteView } = require("./utils") const { fetchView } = require("../row") -const { ViewNames } = require("../../../db/utils") -const controller = { - fetch: async ctx => { - const db = new CouchDB(ctx.appId) - const designDoc = await db.get("_design/database") - const response = [] - - for (let name of Object.keys(designDoc.views)) { - // Only return custom views, not built ins - if (Object.values(ViewNames).indexOf(name) !== -1) { - continue - } - response.push({ - name, - ...designDoc.views[name], - }) - } - - ctx.body = response - }, - save: async ctx => { - const db = new CouchDB(ctx.appId) - const { originalName, ...viewToSave } = ctx.request.body - const designDoc = await db.get("_design/database") - const view = viewTemplate(viewToSave) - - if (!viewToSave.name) { - ctx.throw(400, "Cannot create view without a name") - } - - designDoc.views = { - ...designDoc.views, - [viewToSave.name]: view, - } - - // view has been renamed - if (originalName) { - delete designDoc.views[originalName] - } - - await db.put(designDoc) - - // add views to table document - const table = await db.get(ctx.request.body.tableId) - if (!table.views) table.views = {} - if (!view.meta.schema) { - view.meta.schema = table.schema - } - table.views[viewToSave.name] = view.meta - - if (originalName) { - delete table.views[originalName] - } - - await db.put(table) - - ctx.body = { - ...table.views[viewToSave.name], - name: viewToSave.name, - } - }, - destroy: async ctx => { - const db = new CouchDB(ctx.appId) - const designDoc = await db.get("_design/database") - const viewName = decodeURI(ctx.params.viewName) - const view = designDoc.views[viewName] - delete designDoc.views[viewName] - - await db.put(designDoc) - - const table = await db.get(view.meta.tableId) - delete table.views[viewName] - await db.put(table) - - ctx.body = view - }, - exportView: async ctx => { - const db = new CouchDB(ctx.appId) - const designDoc = await db.get("_design/database") - const viewName = decodeURI(ctx.query.view) - - const view = designDoc.views[viewName] - const format = ctx.query.format - if (!format) { - ctx.throw(400, "Format must be specified, either csv or json") - } - - if (view) { - ctx.params.viewName = viewName - // Fetch view rows - ctx.query = { - group: view.meta.groupBy, - calculation: view.meta.calculation, - stats: !!view.meta.field, - field: view.meta.field, - } - } else { - // table all_ view - /* istanbul ignore next */ - ctx.params.viewName = viewName - } - - await fetchView(ctx) - - let schema = view && view.meta && view.meta.schema - if (!schema) { - const tableId = ctx.params.tableId || view.meta.tableId - const table = await db.get(tableId) - schema = table.schema - } - - // Export part - let headers = Object.keys(schema) - const exporter = exporters[format] - const filename = `${viewName}.${format}` - // send down the file - ctx.attachment(filename) - ctx.body = apiFileReturn(exporter(headers, ctx.body)) - }, +exports.fetch = async ctx => { + const db = new CouchDB(ctx.appId) + ctx.body = await getViews(db) } -module.exports = controller +exports.save = async ctx => { + const db = new CouchDB(ctx.appId) + const { originalName, ...viewToSave } = ctx.request.body + const view = viewTemplate(viewToSave) + + if (!viewToSave.name) { + ctx.throw(400, "Cannot create view without a name") + } + + await saveView(db, originalName, viewToSave.name, view) + + // add views to table document + const table = await db.get(ctx.request.body.tableId) + if (!table.views) table.views = {} + if (!view.meta.schema) { + view.meta.schema = table.schema + } + table.views[viewToSave.name] = view.meta + if (originalName) { + delete table.views[originalName] + } + await db.put(table) + + ctx.body = { + ...table.views[viewToSave.name], + name: viewToSave.name, + } +} + +exports.destroy = async ctx => { + const db = new CouchDB(ctx.appId) + const viewName = decodeURI(ctx.params.viewName) + const view = await deleteView(db, viewName) + const table = await db.get(view.meta.tableId) + delete table.views[viewName] + await db.put(table) + + ctx.body = view +} + +exports.exportView = async ctx => { + const db = new CouchDB(ctx.appId) + const viewName = decodeURI(ctx.query.view) + const view = await getView(db, viewName) + + const format = ctx.query.format + if (!format) { + ctx.throw(400, "Format must be specified, either csv or json") + } + + if (view) { + ctx.params.viewName = viewName + // Fetch view rows + ctx.query = { + group: view.meta.groupBy, + calculation: view.meta.calculation, + stats: !!view.meta.field, + field: view.meta.field, + } + } else { + // table all_ view + /* istanbul ignore next */ + ctx.params.viewName = viewName + } + + await fetchView(ctx) + + let schema = view && view.meta && view.meta.schema + if (!schema) { + const tableId = ctx.params.tableId || view.meta.tableId + const table = await db.get(tableId) + schema = table.schema + } + + // Export part + let headers = Object.keys(schema) + const exporter = exporters[format] + const filename = `${viewName}.${format}` + // send down the file + ctx.attachment(filename) + ctx.body = apiFileReturn(exporter(headers, ctx.body)) +} diff --git a/packages/server/src/api/controllers/view/utils.js b/packages/server/src/api/controllers/view/utils.js new file mode 100644 index 0000000000..c93604177f --- /dev/null +++ b/packages/server/src/api/controllers/view/utils.js @@ -0,0 +1,109 @@ +const { + ViewNames, + generateMemoryViewID, + getMemoryViewParams, +} = require("../../../db/utils") +const env = require("../../../environment") + +exports.getView = async (db, viewName) => { + if (env.SELF_HOSTED) { + const designDoc = await db.get("_design/database") + return designDoc.views[viewName] + } else { + const viewDoc = await db.get(generateMemoryViewID(viewName)) + return viewDoc.view + } +} + +exports.getViews = async db => { + const response = [] + if (env.SELF_HOSTED) { + const designDoc = await db.get("_design/database") + for (let name of Object.keys(designDoc.views)) { + // Only return custom views, not built ins + if (Object.values(ViewNames).indexOf(name) !== -1) { + continue + } + response.push({ + name, + ...designDoc.views[name], + }) + } + } else { + const views = ( + await db.allDocs( + getMemoryViewParams({ + include_docs: true, + }) + ) + ).rows.map(row => row.doc) + for (let viewDoc of views) { + response.push({ + name: viewDoc.name, + ...viewDoc.view, + }) + } + } + return response +} + +exports.saveView = async (db, originalName, viewName, viewTemplate) => { + if (env.SELF_HOSTED) { + const designDoc = await db.get("_design/database") + designDoc.views = { + ...designDoc.views, + [viewName]: viewTemplate, + } + // view has been renamed + if (originalName) { + delete designDoc.views[originalName] + } + await db.put(designDoc) + } else { + const id = generateMemoryViewID(viewName) + const originalId = originalName ? generateMemoryViewID(originalName) : null + const viewDoc = { + _id: id, + view: viewTemplate, + name: viewName, + tableId: viewTemplate.meta.tableId, + } + try { + const old = await db.get(id) + if (originalId) { + const originalDoc = await db.get(originalId) + await db.remove(originalDoc._id, originalDoc._rev) + } + if (old && old._rev) { + viewDoc._rev = old._rev + } + } catch (err) { + // didn't exist, just skip + } + await db.put(viewDoc) + } +} + +exports.deleteView = async (db, viewName) => { + if (env.SELF_HOSTED) { + const designDoc = await db.get("_design/database") + const view = designDoc.views[viewName] + delete designDoc.views[viewName] + await db.put(designDoc) + return view + } else { + const id = generateMemoryViewID(viewName) + const viewDoc = await db.get(id) + await db.remove(viewDoc._id, viewDoc._rev) + return viewDoc.view + } +} + +exports.migrateToInMemoryView = async (db, viewName) => { + // delete the view initially + const designDoc = await db.get("_design/database") + const view = designDoc.views[viewName] + delete designDoc.views[viewName] + await db.put(designDoc) + await exports.saveView(db, null, viewName, view) +} diff --git a/packages/server/src/api/routes/tests/view.spec.js b/packages/server/src/api/routes/tests/view.spec.js index 458da6e023..b1c5f655c6 100644 --- a/packages/server/src/api/routes/tests/view.spec.js +++ b/packages/server/src/api/routes/tests/view.spec.js @@ -205,7 +205,7 @@ describe("/views", () => { }) describe("exportView", () => { - it("should be able to delete a view", async () => { + it("should be able to export a view", async () => { await config.createTable(priceTable()) await config.createRow() const view = await config.createView() diff --git a/packages/server/src/automations/steps/createRow.js b/packages/server/src/automations/steps/createRow.js index 9706126438..9033004578 100644 --- a/packages/server/src/automations/steps/createRow.js +++ b/packages/server/src/automations/steps/createRow.js @@ -2,6 +2,7 @@ const rowController = require("../../api/controllers/row") const automationUtils = require("../automationUtils") const env = require("../../environment") const usage = require("../../utilities/usageQuota") +const { buildCtx } = require("./utils") exports.definition = { name: "Create Row", @@ -69,16 +70,12 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) { } } // have to clean up the row, remove the table from it - const ctx = { + const ctx = buildCtx(appId, emitter, { + body: inputs.row, params: { tableId: inputs.row.tableId, }, - request: { - body: inputs.row, - }, - appId, - eventEmitter: emitter, - } + }) try { inputs.row = await automationUtils.cleanUpRow( @@ -86,7 +83,7 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) { inputs.row.tableId, inputs.row ) - if (env.isProd()) { + if (env.USE_QUOTAS) { await usage.update(apiKey, usage.Properties.ROW, 1) } await rowController.save(ctx) diff --git a/packages/server/src/automations/steps/deleteRow.js b/packages/server/src/automations/steps/deleteRow.js index 26623d628b..0f9648cc51 100644 --- a/packages/server/src/automations/steps/deleteRow.js +++ b/packages/server/src/automations/steps/deleteRow.js @@ -1,6 +1,7 @@ const rowController = require("../../api/controllers/row") const env = require("../../environment") const usage = require("../../utilities/usageQuota") +const { buildCtx } = require("./utils") exports.definition = { description: "Delete a row from your database", @@ -60,19 +61,16 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) { }, } } - let ctx = { + + let ctx = buildCtx(appId, emitter, { + body: { + _id: inputs.id, + _rev: inputs.revision, + }, params: { tableId: inputs.tableId, }, - request: { - body: { - _id: inputs.id, - _rev: inputs.revision, - }, - }, - appId, - eventEmitter: emitter, - } + }) try { if (env.isProd()) { diff --git a/packages/server/src/automations/steps/queryRows.js b/packages/server/src/automations/steps/queryRows.js index 64b757418e..3c4bb422a0 100644 --- a/packages/server/src/automations/steps/queryRows.js +++ b/packages/server/src/automations/steps/queryRows.js @@ -1,6 +1,7 @@ const rowController = require("../../api/controllers/row") const tableController = require("../../api/controllers/table") const { FieldTypes } = require("../../constants") +const { buildCtx } = require("./utils") const SortOrders = { ASCENDING: "ascending", @@ -70,12 +71,11 @@ exports.definition = { } async function getTable(appId, tableId) { - const ctx = { + const ctx = buildCtx(appId, null, { params: { id: tableId, }, - appId, - } + }) await tableController.find(ctx) return ctx.body } @@ -89,21 +89,18 @@ exports.run = async function ({ inputs, appId }) { sortType = fieldType === FieldTypes.NUMBER ? FieldTypes.NUMBER : FieldTypes.STRING } - const ctx = { + const ctx = buildCtx(appId, null, { params: { tableId, }, - request: { - body: { - sortOrder, - sortType, - sort: sortColumn, - query: filters || {}, - limit, - }, + body: { + sortOrder, + sortType, + sort: sortColumn, + query: filters || {}, + limit, }, - appId, - } + }) try { await rowController.search(ctx) return { diff --git a/packages/server/src/automations/steps/updateRow.js b/packages/server/src/automations/steps/updateRow.js index ac5eb16fcd..94f77bc801 100644 --- a/packages/server/src/automations/steps/updateRow.js +++ b/packages/server/src/automations/steps/updateRow.js @@ -1,5 +1,6 @@ const rowController = require("../../api/controllers/row") const automationUtils = require("../automationUtils") +const { buildCtx } = require("./utils") exports.definition = { name: "Update Row", @@ -72,19 +73,15 @@ exports.run = async function ({ inputs, appId, emitter }) { } // have to clean up the row, remove the table from it - const ctx = { + const ctx = buildCtx(appId, emitter, { + body: { + ...inputs.row, + _id: inputs.rowId, + }, params: { rowId: inputs.rowId, }, - request: { - body: { - ...inputs.row, - _id: inputs.rowId, - }, - }, - appId, - eventEmitter: emitter, - } + }) try { inputs.row = await automationUtils.cleanUpRowById( diff --git a/packages/server/src/db/inMemoryView.js b/packages/server/src/db/inMemoryView.js new file mode 100644 index 0000000000..892617e068 --- /dev/null +++ b/packages/server/src/db/inMemoryView.js @@ -0,0 +1,48 @@ +const PouchDB = require("pouchdb") +const memory = require("pouchdb-adapter-memory") +const newid = require("./newid") + +PouchDB.plugin(memory) +const Pouch = PouchDB.defaults({ + prefix: undefined, + adapter: "memory", +}) + +exports.runView = async (view, calculation, group, data) => { + // use a different ID each time for the DB, make sure they + // are always unique for each query, don't want overlap + // which could cause 409s + const db = new Pouch(newid()) + // write all the docs to the in memory Pouch (remove revs) + await db.bulkDocs( + data.map(row => ({ + ...row, + _rev: undefined, + })) + ) + let fn = (doc, emit) => emit(doc._id) + eval("fn = " + view.map.replace("function (doc)", "function (doc, emit)")) + const queryFns = { + meta: view.meta, + map: fn, + } + if (view.reduce) { + queryFns.reduce = view.reduce + } + const response = await db.query(queryFns, { + include_docs: !calculation, + group: !!group, + }) + // need to fix the revs to be totally accurate + for (let row of response.rows) { + if (!row._rev || !row._id) { + continue + } + const found = data.find(possible => possible._id === row._id) + if (found) { + row._rev = found._rev + } + } + await db.destroy() + return response +} diff --git a/packages/server/src/db/linkedRows/index.js b/packages/server/src/db/linkedRows/index.js index 67412e7e89..303cd085c1 100644 --- a/packages/server/src/db/linkedRows/index.js +++ b/packages/server/src/db/linkedRows/index.js @@ -76,9 +76,12 @@ async function getFullLinkedDocs(ctx, appId, links) { // create DBs const db = new CouchDB(appId) const linkedRowIds = links.map(link => link.id) - let linked = (await db.allDocs(getMultiIDParams(linkedRowIds))).rows.map( + const uniqueRowIds = [...new Set(linkedRowIds)] + let dbRows = (await db.allDocs(getMultiIDParams(uniqueRowIds))).rows.map( row => row.doc ) + // convert the unique db rows back to a full list of linked rows + const linked = linkedRowIds.map(id => dbRows.find(row => row._id === id)) // need to handle users as specific cases let [users, other] = partition(linked, linkRow => linkRow._id.startsWith(USER_METDATA_PREFIX) @@ -112,7 +115,7 @@ exports.updateLinks = async function (args) { let linkController = new LinkController(args) try { if ( - !(await linkController.doesTableHaveLinkedFields()) && + !(await linkController.doesTableHaveLinkedFields(table)) && (oldTable == null || !(await linkController.doesTableHaveLinkedFields(oldTable))) ) { diff --git a/packages/server/src/db/utils.js b/packages/server/src/db/utils.js index ec1c267fa2..3e20b30869 100644 --- a/packages/server/src/db/utils.js +++ b/packages/server/src/db/utils.js @@ -39,6 +39,7 @@ const DocumentTypes = { QUERY: "query", DEPLOYMENTS: "deployments", METADATA: "metadata", + MEM_VIEW: "view", } const ViewNames = { @@ -348,6 +349,14 @@ exports.getMetadataParams = (type, entityId = null, otherProps = {}) => { return getDocParams(DocumentTypes.METADATA, docId, otherProps) } +exports.generateMemoryViewID = viewName => { + return `${DocumentTypes.MEM_VIEW}${SEPARATOR}${viewName}` +} + +exports.getMemoryViewParams = (otherProps = {}) => { + return getDocParams(DocumentTypes.MEM_VIEW, null, otherProps) +} + /** * This can be used with the db.allDocs to get a list of IDs */ diff --git a/packages/server/src/environment.js b/packages/server/src/environment.js index c5739a37e1..89e015b6f5 100644 --- a/packages/server/src/environment.js +++ b/packages/server/src/environment.js @@ -26,7 +26,7 @@ module.exports = { COUCH_DB_URL: process.env.COUCH_DB_URL, MINIO_URL: process.env.MINIO_URL, WORKER_URL: process.env.WORKER_URL, - SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED), + SELF_HOSTED: process.env.SELF_HOSTED, AWS_REGION: process.env.AWS_REGION, ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS, MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY, @@ -66,3 +66,10 @@ module.exports = { return !isDev() }, } + +// convert any strings to numbers if required, like "0" would be true otherwise +for (let [key, value] of Object.entries(module.exports)) { + if (typeof value === "string" && !isNaN(parseInt(value))) { + module.exports[key] = parseInt(value) + } +} diff --git a/packages/server/src/utilities/rowProcessor/index.js b/packages/server/src/utilities/rowProcessor/index.js index bb4ac98bb7..07549dd8a8 100644 --- a/packages/server/src/utilities/rowProcessor/index.js +++ b/packages/server/src/utilities/rowProcessor/index.js @@ -89,10 +89,16 @@ const TYPE_TRANSFORM_MAP = { * @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields. * @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing. * @param {Object} row The row which is to be updated with information for the auto columns. + * @param {Object} opts specific options for function to carry out optional features. * @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated * for automatic ID purposes. */ -function processAutoColumn(user, table, row) { +function processAutoColumn( + user, + table, + row, + opts = { reprocessing: false, noAutoRelationships: false } +) { let now = new Date().toISOString() // if a row doesn't have a revision then it doesn't exist yet const creating = !row._rev @@ -102,7 +108,7 @@ function processAutoColumn(user, table, row) { } switch (schema.subtype) { case AutoFieldSubTypes.CREATED_BY: - if (creating) { + if (creating && !opts.reprocessing && !opts.noAutoRelationships) { row[key] = [user.userId] } break @@ -112,7 +118,9 @@ function processAutoColumn(user, table, row) { } break case AutoFieldSubTypes.UPDATED_BY: - row[key] = [user.userId] + if (!opts.reprocessing && !opts.noAutoRelationships) { + row[key] = [user.userId] + } break case AutoFieldSubTypes.UPDATED_AT: row[key] = now @@ -127,6 +135,7 @@ function processAutoColumn(user, table, row) { } return { table, row } } +exports.processAutoColumn = processAutoColumn /** * This will coerce a value to the correct types based on the type transform map @@ -151,9 +160,15 @@ exports.coerce = (row, type) => { * @param {object} user the user which is performing the input. * @param {object} row the row which is being created/updated. * @param {object} table the table which the row is being saved to. + * @param {object} opts some input processing options (like disabling auto-column relationships). * @returns {object} the row which has been prepared to be written to the DB. */ -exports.inputProcessing = (user = {}, table, row) => { +exports.inputProcessing = ( + user = {}, + table, + row, + opts = { noAutoRelationships: false } +) => { let clonedRow = cloneDeep(row) // need to copy the table so it can be differenced on way out const copiedTable = cloneDeep(table) @@ -176,7 +191,7 @@ exports.inputProcessing = (user = {}, table, row) => { } } // handle auto columns - this returns an object like {table, row} - return processAutoColumn(user, copiedTable, clonedRow) + return processAutoColumn(user, copiedTable, clonedRow, opts) } /** diff --git a/packages/worker/src/api/controllers/global/workspaces.js b/packages/worker/src/api/controllers/global/workspaces.js index 95a1ec296d..48a710c92d 100644 --- a/packages/worker/src/api/controllers/global/workspaces.js +++ b/packages/worker/src/api/controllers/global/workspaces.js @@ -11,7 +11,7 @@ exports.save = async function (ctx) { } try { - const response = await db.post(workspaceDoc) + const response = await db.put(workspaceDoc) ctx.body = { _id: response.id, _rev: response.rev,