1
0
Fork 0
mirror of synced 2024-09-09 22:16:26 +12:00

Updating migrations to correctly cover all the required elements.

This commit is contained in:
mike12345567 2024-06-04 17:24:11 +01:00
parent c509da1f22
commit cf6f86cb2f
6 changed files with 84 additions and 36 deletions

View file

@ -8,6 +8,7 @@ import {
DatabaseOpts,
DatabasePutOpts,
DatabaseQueryOpts,
DBError,
Document,
isDocument,
RowResponse,
@ -41,7 +42,7 @@ function buildNano(couchInfo: { url: string; cookie: string }) {
type DBCall<T> = () => Promise<T>
class CouchDBError extends Error {
class CouchDBError extends Error implements DBError {
status: number
statusCode: number
reason: string

View file

@ -1,32 +0,0 @@
import { context } from "@budibase/backend-core"
import { allLinkDocs } from "../../db/utils"
import LinkDocumentImpl from "../../db/linkedRows/LinkDocument"
const migration = async () => {
const linkDocs = await allLinkDocs()
const docsToUpdate = []
for (const linkDoc of linkDocs) {
if (linkDoc.tableId) {
// It already had the required data
continue
}
linkDoc.tableId = new LinkDocumentImpl(
linkDoc.doc1.tableId,
linkDoc.doc1.fieldName,
linkDoc.doc1.rowId,
linkDoc.doc2.tableId,
linkDoc.doc2.fieldName,
linkDoc.doc2.rowId
).tableId
docsToUpdate.push(linkDoc)
}
if (docsToUpdate.length) {
const db = context.getAppDB()
await db.bulkDocs(docsToUpdate)
}
}
export default migration

View file

@ -0,0 +1,57 @@
import { context } from "@budibase/backend-core"
import { allLinkDocs } from "../../db/utils"
import LinkDocumentImpl from "../../db/linkedRows/LinkDocument"
import sdk from "../../sdk"
import env from "../../environment"
import { DBError } from "@budibase/types"
const migration = async () => {
const linkDocs = await allLinkDocs()
const docsToUpdate = []
for (const linkDoc of linkDocs) {
if (linkDoc.tableId) {
// It already had the required data
continue
}
// it already has the junction table ID - no need to migrate
if (!linkDoc.tableId) {
linkDoc.tableId = new LinkDocumentImpl(
linkDoc.doc1.tableId,
linkDoc.doc1.fieldName,
linkDoc.doc1.rowId,
linkDoc.doc2.tableId,
linkDoc.doc2.fieldName,
linkDoc.doc2.rowId
).tableId
docsToUpdate.push(linkDoc)
}
}
const db = context.getAppDB()
if (docsToUpdate.length) {
await db.bulkDocs(docsToUpdate)
}
// at the end make sure design doc is ready
await sdk.tables.sqs.syncDefinition()
// only do initial search if environment is using SQS already
if (env.SQS_SEARCH_ENABLE) {
const tables = await sdk.tables.getAllInternalTables()
// do these one by one - running in parallel could cause problems
for (let table of tables) {
try {
await db.sql(`select * from ${table._id} limit 1`)
} catch (err) {
if (typeof err === "object") {
const dbErr = err as DBError
throw new Error(`Failed to run initial SQS search - ${dbErr.message}`)
}
throw err
}
}
}
}
export default migration

View file

@ -107,8 +107,17 @@ async function buildBaseDefinition(): Promise<PreSaveSQLiteDefinition> {
export async function syncDefinition(): Promise<void> {
const db = context.getAppDB()
const definition = await buildBaseDefinition()
await db.put(definition)
let rev: string | undefined
try {
const existing = await db.get(SQLITE_DESIGN_DOC_ID)
rev = existing._rev
} finally {
const definition = await buildBaseDefinition()
if (rev) {
definition._rev = rev
}
await db.put(definition)
}
}
export async function addTable(table: Table) {

View file

@ -30,4 +30,7 @@ export interface SQLiteDefinition {
}
}
export type PreSaveSQLiteDefinition = Omit<SQLiteDefinition, "_rev">
export interface PreSaveSQLiteDefinition
extends Omit<SQLiteDefinition, "_rev"> {
_rev?: string
}

View file

@ -165,3 +165,13 @@ export interface Database {
deleteIndex(...args: any[]): Promise<any>
getIndexes(...args: any[]): Promise<any>
}
export interface DBError extends Error {
status: number
statusCode: number
reason: string
name: string
errid: string
error: string
description: string
}