1
0
Fork 0
mirror of synced 2024-06-30 12:00:31 +12:00

Merge branch 'master' of github.com:Budibase/budibase into endpoint-renaming

This commit is contained in:
mike12345567 2020-10-12 17:45:11 +01:00
commit 408eda81ff
12 changed files with 130 additions and 39 deletions

View file

@ -90,6 +90,8 @@ const contextToBindables = (tables, walkResult) => context => {
runtimeBinding: `${contextParentPath}data.${key}`,
// how the binding exressions looks to the user of the builder
readableBinding: `${context.instance._instanceName}.${table.name}.${key}`,
// table / view info
table: context.table,
})
// see TableViewSelect.svelte for the format of context.table

View file

@ -79,7 +79,7 @@
}
function fieldOptions(field) {
return viewTable.schema[field].type === "string"
return viewTable.schema[field].type === "options"
? viewTable.schema[field].constraints.inclusion
: [true, false]
}

View file

@ -3,6 +3,7 @@
import { Input, Button, Spacer, Select, ModalContent } from "@budibase/bbui"
import getTemplates from "builderStore/store/screenTemplates"
import { some } from "lodash/fp"
import analytics from "analytics"
const CONTAINER = "@budibase/standard-components/container"
@ -29,7 +30,7 @@
const templateChanged = newTemplateIndex => {
if (newTemplateIndex === undefined) return
const template = templates[newTemplateIndex]
draftScreen = templates[newTemplateIndex].create()
if (draftScreen.props._instanceName) {
name = draftScreen.props._instanceName
@ -63,6 +64,13 @@
store.createScreen(draftScreen)
if (templateIndex !== undefined) {
const template = templates[templateIndex]
analytics.captureEvent("Screen Created", {
template: template.id || template.name,
})
}
finished()
}

View file

@ -1,18 +1,75 @@
<script>
import { DataList } from "@budibase/bbui"
import { createEventDispatcher } from "svelte"
import { store } from "builderStore"
import { store, backendUiStore } from "builderStore"
import fetchBindableProperties from "builderStore/fetchBindableProperties"
const dispatch = createEventDispatcher()
export let value = ""
$: urls = getUrls()
const handleBlur = () => dispatch("change", value)
// this will get urls of all screens, but only
// choose detail screens that are usable in the current context
// and substitute the :id param for the actual {{ ._id }} binding
const getUrls = () => {
const urls = [
...$store.screens
.filter(screen => !screen.props._component.endsWith("/rowdetail"))
.map(screen => ({
name: screen.props._instanceName,
url: screen.route,
sort: screen.props._component,
})),
]
const bindableProperties = fetchBindableProperties({
componentInstanceId: $store.currentComponentInfo._id,
components: $store.components,
screen: $store.currentPreviewItem,
tables: $backendUiStore.tables,
})
const detailScreens = $store.screens.filter(screen =>
screen.props._component.endsWith("/rowdetail")
)
for (let detailScreen of detailScreens) {
const idBinding = bindableProperties.find(p => {
if (
p.type === "context" &&
p.runtimeBinding.endsWith("._id") &&
p.table
) {
const tableId =
typeof p.table === "string" ? p.table : p.table.tableId
return tableId === detailScreen.props.table
}
return false
})
if (idBinding) {
urls.push({
name: detailScreen.props._instanceName,
url: detailScreen.route.replace(
":id",
`{{ ${idBinding.runtimeBinding} }}`
),
sort: detailScreen.props._component,
})
}
return urls
}
}
</script>
<DataList editable secondary on:blur={handleBlur} on:change bind:value>
<option value="" />
{#each $store.allScreens as screen}
<option value={screen.route}>{screen.props._instanceName}</option>
{#each urls as url}
<option value={url.url}>{url.name}</option>
{/each}
</DataList>

View file

@ -1,4 +1,4 @@
import { isString, isUndefined } from "lodash/fp"
import { isString, isUndefined, cloneDeep } from "lodash/fp"
import { TYPE_MAP } from "./types"
import { assign } from "lodash"
import { uuid } from "builderStore/uuid"
@ -83,13 +83,13 @@ const parsePropDef = propDef => {
if (isString(propDef)) {
if (!TYPE_MAP[propDef]) return error(`Type ${propDef} is not recognised.`)
return TYPE_MAP[propDef].default
return cloneDeep(TYPE_MAP[propDef].default)
}
const type = TYPE_MAP[propDef.type]
if (!type) return error(`Type ${propDef.type} is not recognised.`)
return propDef.default
return cloneDeep(propDef.default)
}
export const arrayElementComponentName = (parentComponentName, arrayPropName) =>

View file

@ -10,7 +10,6 @@ export const TYPE_MAP = {
},
options: {
default: [],
options: [],
},
event: {
default: [],

View file

@ -356,7 +356,7 @@ export default {
{
label: "destinationUrl",
key: "destinationUrl",
control: Input,
control: ScreenSelect,
placeholder: "/table/_id",
},
],
@ -405,7 +405,7 @@ export default {
{
label: "Link Url",
key: "linkUrl",
control: Input,
control: ScreenSelect,
placeholder: "Link URL",
},
{
@ -480,7 +480,7 @@ export default {
{
label: "Link Url",
key: "linkUrl",
control: Input,
control: ScreenSelect,
placeholder: "Link URL",
},
{

View file

@ -15,7 +15,7 @@ export const FIELDS = {
type: "options",
constraints: {
type: "string",
presence: { allowEmpty: true },
presence: false,
inclusion: [],
},
},
@ -67,7 +67,7 @@ export const FIELDS = {
type: "link",
constraints: {
type: "array",
presence: { allowEmpty: true },
presence: false,
},
},
}

View file

@ -33,6 +33,7 @@ exports.save = async function(ctx) {
views: {},
...rest,
}
let renameDocs = []
// if the table obj had an _id then it will have been retrieved
const oldTable = ctx.preExisting
@ -49,14 +50,11 @@ exports.save = async function(ctx) {
include_docs: true,
})
)
const docs = rows.rows.map(({ doc }) => {
renameDocs = rows.rows.map(({ doc }) => {
doc[_rename.updated] = doc[_rename.old]
delete doc[_rename.old]
return doc
})
await db.bulkDocs(docs)
delete tableToSave._rename
}
@ -69,9 +67,6 @@ exports.save = async function(ctx) {
tableView.schema = tableToSave.schema
}
const result = await db.post(tableToSave)
tableToSave._rev = result.rev
// update linked rows
await linkRows.updateLinks({
instanceId,
@ -82,6 +77,14 @@ exports.save = async function(ctx) {
oldTable: oldTable,
})
// don't perform any updates until relationships have been
// checked by the updateLinks function
if (renameDocs.length !== 0) {
await db.bulkDocs(renameDocs)
}
const result = await db.post(tableToSave)
tableToSave._rev = result.rev
ctx.eventEmitter &&
ctx.eventEmitter.emitTable(`table:save`, instanceId, tableToSave)
@ -105,18 +108,17 @@ exports.save = async function(ctx) {
exports.destroy = async function(ctx) {
const instanceId = ctx.user.instanceId
const db = new CouchDB(instanceId)
const tableToDelete = await db.get(ctx.params.tableId)
await db.remove(tableToDelete)
// Delete all rows for that table
const rows = await db.allDocs(
getRowParams(ctx.params.tableId, null, {
include_docs: true,
})
)
await db.bulkDocs(rows.rows.map(row => ({ _id: row.id, _deleted: true })))
await db.bulkDocs(
rows.rows.map(row => ({ ...row.doc, _deleted: true }))
)
// update linked rows
await linkRows.updateLinks({
@ -125,6 +127,9 @@ exports.destroy = async function(ctx) {
table: tableToDelete,
})
// don't remove the table itself until very end
await db.remove(tableToDelete)
ctx.eventEmitter &&
ctx.eventEmitter.emitTable(`table:delete`, instanceId, tableToDelete)
ctx.status = 200

View file

@ -161,7 +161,7 @@ class LinkController {
})
// now add the docs to be deleted to the bulk operation
operations.push(...toDeleteDocs)
// replace this field with a simple entry to denote there are links
// remove the field from this row, link doc will be added to row on way out
delete row[fieldName]
}
}
@ -234,8 +234,16 @@ class LinkController {
for (let fieldName of Object.keys(schema)) {
const field = schema[fieldName]
if (field.type === "link") {
// handle this in a separate try catch, want
// the put to bubble up as an error, if can't update
// table for some reason
let linkedTable
try {
linkedTable = await this._db.get(field.tableId)
} catch (err) {
continue
}
// create the link field in the other table
const linkedTable = await this._db.get(field.tableId)
linkedTable.schema[field.fieldName] = {
name: field.fieldName,
type: "link",

View file

@ -42,6 +42,7 @@ exports.updateLinks = async function({
table,
oldTable,
}) {
const baseReturnObj = row == null ? table : row
if (instanceId == null) {
throw "Cannot operate without an instance ID."
}
@ -50,12 +51,16 @@ exports.updateLinks = async function({
arguments[0].tableId = table._id
}
let linkController = new LinkController(arguments[0])
if (
!(await linkController.doesTableHaveLinkedFields()) &&
(oldTable == null ||
!(await linkController.doesTableHaveLinkedFields(oldTable)))
) {
return row
try {
if (
!(await linkController.doesTableHaveLinkedFields()) &&
(oldTable == null ||
!(await linkController.doesTableHaveLinkedFields(oldTable)))
) {
return baseReturnObj
}
} catch (err) {
return baseReturnObj
}
switch (eventType) {
case EventType.ROW_SAVE:

View file

@ -57,19 +57,26 @@ exports.generateTableID = () => {
/**
* Gets the DB allDocs/query params for retrieving a row.
* @param {string} tableId The table in which the rows have been stored.
* @param {string|null} tableId The table in which the rows have been stored.
* @param {string|null} rowId The ID of the row which is being specifically queried for. This can be
* left null to get all the rows in the table.
* @param {object} otherProps Any other properties to add to the request.
* @returns {object} Parameters which can then be used with an allDocs request.
*/
exports.getRowParams = (tableId, rowId = null, otherProps = {}) => {
exports.getRowParams = (
tableId = null,
rowId = null,
otherProps = {}
) => {
if (tableId == null) {
throw "Cannot build params for rows without a table ID"
return getDocParams(DocumentTypes.ROW, null, otherProps)
} else {
const endOfKey =
rowId == null
? `${tableId}${SEPARATOR}`
: `${tableId}${SEPARATOR}${rowId}`
return getDocParams(DocumentTypes.ROW, endOfKey, otherProps)
}
const endOfKey =
rowId == null ? `${tableId}${SEPARATOR}` : `${tableId}${SEPARATOR}${rowId}`
return getDocParams(DocumentTypes.ROW, endOfKey, otherProps)
}
/**