1
0
Fork 0
mirror of synced 2024-06-28 11:00:55 +12:00

Updating docs and removing old documentation pathway.

This commit is contained in:
mike12345567 2023-10-11 17:49:25 +01:00
parent bda67b1aca
commit d1b7ccd86f
8 changed files with 15 additions and 433 deletions

View file

@ -74,7 +74,6 @@
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
"build:docs": "lerna run --stream build:docs",
"release:helm": "node scripts/releaseHelmChart",
"env:multi:enable": "lerna run --stream env:multi:enable",
"env:multi:disable": "lerna run --stream env:multi:disable",

View file

@ -20,7 +20,6 @@
"test:watch": "jest --watch",
"predocker": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && yarn build && cp ../../yarn.lock ./dist/",
"build:docker": "yarn predocker && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
"build:docs": "node ./scripts/docs/generate.js open",
"run:docker": "node dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:up": "node scripts/dev/manage.js up",

View file

@ -1,31 +0,0 @@
### Documentation
This directory contains the scripts required to generate the APIDoc based documentation.
You can find the docs about comment structure at the [APIDocs page](https://apidocjs.com/).
In general most API endpoints will look like:
```js
/**
* @api {post} /api/:param/url Give it a name
* @apiName Give it a name
* @apiGroup group
* @apiPermission permission
* @apiDescription Describe what the endpoint does, any special cases the user
* should be aware of.
*
* @apiParam {string} param describe a URL parameter.
*
* @apiParam (Body) input describe a field on the body.
*
* @apiSuccess {object} output describe the output.
*/
```
There are a few key points to note when writing API docs:
1. Don't use `@apiBody` - this isn't currently supported by our swagger converter.
2. Make sure to always have an `@api` definition at the start, which must always have the
HTTP verb, the endpoint URL and the name.
3. There are three ways you can specify parameters used as inputs for your endpoint,
`@apiParam` for a URL param, `@apiParam (Body)` for a field on the request body and `@apiParam (Query)`
for query string parameters.
4. The `@apiGroup` should be the same for all API Doc comments in a route file.

View file

@ -1,74 +0,0 @@
const fs = require("fs")
const { join } = require("path")
const { createDoc } = require("apidoc")
const packageJson = require("../../package.json")
const toSwagger = require("./toSwagger")
const open = require("open")
const config = {
name: "Budibase API",
version: packageJson.version,
description: "Documenting the Budibase backend API",
title: "Budibase app service API",
}
const shouldOpen = process.argv[2]
const disallowed = []
function filter(parsedRouteFiles) {
const tagToSearch = "url"
for (let routeFile of parsedRouteFiles) {
for (let route of routeFile) {
let routeInfo = route["local"]
if (disallowed.includes(routeInfo[tagToSearch])) {
const idx = routeFile.indexOf(route)
routeFile.splice(idx, 1)
}
}
}
}
async function generate() {
// start by writing a config file
const configPath = join(__dirname, "config.json")
fs.writeFileSync(configPath, JSON.stringify(config))
const mainPath = join(__dirname, "..", "..")
const srcPath = join(mainPath, "src", "api", "routes")
const assetsPath = join(mainPath, "builder", "assets", "docs")
if (!fs.existsSync(assetsPath)) {
fs.mkdirSync(assetsPath, { recursive: true })
}
const options = {
src: [srcPath],
dest: assetsPath,
filters: {
main: {
postFilter: filter,
},
},
config: configPath,
}
const doc = createDoc(options)
if (typeof doc !== "boolean") {
const swagger = toSwagger(JSON.parse(doc.data), JSON.parse(doc.project))
fs.writeFileSync(join(assetsPath, "swagger.json"), JSON.stringify(swagger))
fs.writeFileSync(join(assetsPath, "apidoc.json"), doc.data)
fs.writeFileSync(join(assetsPath, "project.json"), doc.project)
console.log(
`Docs generated successfully, find in ${assetsPath}, swagger.json, apidoc.json and project.json`
)
} else {
throw "Unable to generate docs."
}
// delete the temporary config file
fs.unlinkSync(configPath)
setTimeout(async () => {
if (shouldOpen === "open") {
await open(join(assetsPath, "index.html"), { wait: false })
}
}, 2000)
}
generate().catch(err => {
console.error(err)
})

View file

@ -1,320 +0,0 @@
let _ = require("lodash")
let { pathToRegexp } = require("path-to-regexp")
/********************************************************
* Based on: https://github.com/fsbahman/apidoc-swagger *
********************************************************/
let swagger = {
swagger: "2.0",
info: {},
paths: {},
definitions: {},
}
function toSwagger(apidocJson, projectJson) {
swagger.info = addInfo(projectJson)
swagger.paths = extractPaths(apidocJson)
return swagger
}
let tagsRegex = /(<([^>]+)>)/gi
// Removes <p> </p> tags from text
function removeTags(text) {
return text ? text.replace(tagsRegex, "") : text
}
function addInfo(projectJson) {
let info = {}
info["title"] = projectJson.title || projectJson.name
info["version"] = projectJson.version
info["description"] = projectJson.description
return info
}
/**
* Extracts paths provided in json format
* post, patch, put request parameters are extracted in body
* get and delete are extracted to path parameters
* @param apidocJson
* @returns {{}}
*/
function extractPaths(apidocJson) {
let apiPaths = groupByUrl(apidocJson)
let paths = {}
for (let i = 0; i < apiPaths.length; i++) {
let verbs = apiPaths[i].verbs
let url = verbs[0].url
let pattern = pathToRegexp(url, null)
let matches = pattern.exec(url)
// Surrounds URL parameters with curly brackets -> :email with {email}
let pathKeys = []
for (let j = 1; j < matches.length; j++) {
let key = matches[j].slice(1)
url = url.replace(matches[j], "{" + key + "}")
pathKeys.push(key)
}
for (let j = 0; j < verbs.length; j++) {
let verb = verbs[j]
let type = verb.type
let obj = (paths[url] = paths[url] || {})
if (type === "post" || type === "patch" || type === "put") {
_.extend(
obj,
createPostPushPutOutput(verb, swagger.definitions, pathKeys)
)
} else {
_.extend(obj, createGetDeleteOutput(verb, swagger.definitions))
}
}
}
return paths
}
function createPostPushPutOutput(verbs, definitions, pathKeys) {
let pathItemObject = {}
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
let params = []
let pathParams = createPathParameters(verbs, pathKeys)
pathParams = _.filter(pathParams, function (param) {
let hasKey = pathKeys.indexOf(param.name) !== -1
return !(param.in === "path" && !hasKey)
})
params = params.concat(pathParams)
let required =
verbs.parameter &&
verbs.parameter.fields &&
verbs.parameter.fields.Parameter &&
verbs.parameter.fields.Parameter.length > 0
params.push({
in: "body",
name: "body",
description: removeTags(verbs.description),
required: required,
schema: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelParametersRef,
},
})
pathItemObject[verbs.type] = {
tags: [verbs.group],
summary: removeTags(verbs.description),
consumes: ["application/json"],
produces: ["application/json"],
parameters: params,
}
if (verbDefinitionResult.topLevelSuccessRef) {
pathItemObject[verbs.type].responses = {
200: {
description: "successful operation",
schema: {
type: verbDefinitionResult.topLevelSuccessRefType,
items: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
},
},
},
}
}
return pathItemObject
}
function createVerbDefinitions(verbs, definitions) {
let result = {
topLevelParametersRef: null,
topLevelSuccessRef: null,
topLevelSuccessRefType: null,
}
let defaultObjectName = verbs.name
let fieldArrayResult = {}
if (verbs && verbs.parameter && verbs.parameter.fields) {
fieldArrayResult = createFieldArrayDefinitions(
verbs.parameter.fields.Parameter,
definitions,
verbs.name,
defaultObjectName
)
result.topLevelParametersRef = fieldArrayResult.topLevelRef
}
if (verbs && verbs.success && verbs.success.fields) {
fieldArrayResult = createFieldArrayDefinitions(
verbs.success.fields["Success 200"],
definitions,
verbs.name,
defaultObjectName
)
result.topLevelSuccessRef = fieldArrayResult.topLevelRef
result.topLevelSuccessRefType = fieldArrayResult.topLevelRefType
}
return result
}
function createFieldArrayDefinitions(
fieldArray,
definitions,
topLevelRef,
defaultObjectName
) {
let result = {
topLevelRef: topLevelRef,
topLevelRefType: null,
}
if (!fieldArray) {
return result
}
for (let i = 0; i < fieldArray.length; i++) {
let parameter = fieldArray[i]
let nestedName = createNestedName(parameter.field)
let objectName = nestedName.objectName
if (!objectName) {
objectName = defaultObjectName
}
let type = parameter.type
if (i === 0) {
result.topLevelRefType = type
if (parameter.type === "Object") {
objectName = nestedName.propertyName
nestedName.propertyName = null
} else if (parameter.type === "Array") {
objectName = nestedName.propertyName
nestedName.propertyName = null
result.topLevelRefType = "array"
}
result.topLevelRef = objectName
}
definitions[objectName] = definitions[objectName] || {
properties: {},
required: [],
}
if (nestedName.propertyName) {
let prop = {
type: (parameter.type || "").toLowerCase(),
description: removeTags(parameter.description),
}
if (parameter.type === "Object") {
prop.$ref = "#/definitions/" + parameter.field
}
let typeIndex = type.indexOf("[]")
if (typeIndex !== -1 && typeIndex === type.length - 2) {
prop.type = "array"
prop.items = {
type: type.slice(0, type.length - 2),
}
}
definitions[objectName]["properties"][nestedName.propertyName] = prop
if (!parameter.optional) {
let arr = definitions[objectName]["required"]
if (arr.indexOf(nestedName.propertyName) === -1) {
arr.push(nestedName.propertyName)
}
}
}
}
return result
}
function createNestedName(field) {
let propertyName = field
let objectName
let propertyNames = field.split(".")
if (propertyNames && propertyNames.length > 1) {
propertyName = propertyNames[propertyNames.length - 1]
propertyNames.pop()
objectName = propertyNames.join(".")
}
return {
propertyName: propertyName,
objectName: objectName,
}
}
/**
* Generate get, delete method output
* @param verbs
* @param definitions
* @returns {{}}
*/
function createGetDeleteOutput(verbs, definitions) {
let pathItemObject = {}
verbs.type = verbs.type === "del" ? "delete" : verbs.type
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
pathItemObject[verbs.type] = {
tags: [verbs.group],
summary: removeTags(verbs.description),
consumes: ["application/json"],
produces: ["application/json"],
parameters: createPathParameters(verbs),
}
if (verbDefinitionResult.topLevelSuccessRef) {
pathItemObject[verbs.type].responses = {
200: {
description: "successful operation",
schema: {
type: verbDefinitionResult.topLevelSuccessRefType,
items: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
},
},
},
}
}
return pathItemObject
}
/**
* Iterate through all method parameters and create array of parameter objects which are stored as path parameters
* @param verbs
* @returns {Array}
*/
function createPathParameters(verbs) {
let pathItemObject = []
if (verbs.parameter && verbs.parameter.fields.Parameter) {
for (let i = 0; i < verbs.parameter.fields.Parameter.length; i++) {
let param = verbs.parameter.fields.Parameter[i]
let field = param.field
let type = param.type
pathItemObject.push({
name: field,
in: type === "file" ? "formData" : "path",
required: !param.optional,
type: param.type.toLowerCase(),
description: removeTags(param.description),
})
}
}
return pathItemObject
}
function groupByUrl(apidocJson) {
return _.chain(apidocJson)
.groupBy("url")
.toPairs()
.map(function (element) {
return _.zipObject(["url", "verbs"], element)
})
.value()
}
module.exports = toSwagger

View file

@ -859,7 +859,8 @@
"json",
"internal",
"barcodeqr",
"bigint"
"bigint",
"bb_reference"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},
@ -1064,7 +1065,8 @@
"json",
"internal",
"barcodeqr",
"bigint"
"bigint",
"bb_reference"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},
@ -1280,7 +1282,8 @@
"json",
"internal",
"barcodeqr",
"bigint"
"bigint",
"bb_reference"
],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
},

View file

@ -782,6 +782,7 @@ components:
- internal
- barcodeqr
- bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:
@ -946,6 +947,7 @@ components:
- internal
- barcodeqr
- bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:
@ -1117,6 +1119,7 @@ components:
- internal
- barcodeqr
- bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link
column is a relationship.
constraints:

View file

@ -279,7 +279,8 @@ export interface components {
| "json"
| "internal"
| "barcodeqr"
| "bigint";
| "bigint"
| "bb_reference";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: {
/** @enum {string} */
@ -386,7 +387,8 @@ export interface components {
| "json"
| "internal"
| "barcodeqr"
| "bigint";
| "bigint"
| "bb_reference";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: {
/** @enum {string} */
@ -495,7 +497,8 @@ export interface components {
| "json"
| "internal"
| "barcodeqr"
| "bigint";
| "bigint"
| "bb_reference";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: {
/** @enum {string} */