1
0
Fork 0
mirror of synced 2024-06-29 19:41:03 +12:00

Merge remote-tracking branch 'origin/develop' into chore/esbuild

This commit is contained in:
Adria Navarro 2023-05-24 14:17:32 +02:00
commit b264708af6
15 changed files with 213 additions and 43 deletions

View file

@ -1,5 +1,5 @@
{
"version": "2.6.19-alpha.2",
"version": "2.6.19-alpha.5",
"npmClient": "yarn",
"packages": [
"packages/backend-core",

View file

@ -25,6 +25,8 @@ export function createDatasourcesStore() {
store.update(state => ({
...state,
selectedDatasourceId: id,
// Remove any possible schema error
schemaError: null,
}))
}

View file

@ -21,6 +21,7 @@ import {
CreateDatasourceRequest,
VerifyDatasourceRequest,
VerifyDatasourceResponse,
FetchDatasourceInfoResponse,
IntegrationBase,
DatasourcePlus,
} from "@budibase/types"
@ -153,6 +154,21 @@ export async function verify(
}
}
export async function information(
ctx: UserCtx<void, FetchDatasourceInfoResponse>
) {
const datasourceId = ctx.params.datasourceId
const datasource = await sdk.datasources.get(datasourceId, { enriched: true })
const connector = (await getConnector(datasource)) as DatasourcePlus
if (!connector.getTableNames) {
ctx.throw(400, "Table name fetching not supported by datasource")
}
const tableNames = await connector.getTableNames()
ctx.body = {
tableNames,
}
}
export async function buildSchemaFromDb(ctx: UserCtx) {
const db = context.getAppDB()
const datasource = await sdk.datasources.get(ctx.params.datasourceId)

View file

@ -20,6 +20,11 @@ router
authorized(permissions.BUILDER),
datasourceController.verify
)
.get(
"/api/datasources/:datasourceId/info",
authorized(permissions.BUILDER),
datasourceController.information
)
.get(
"/api/datasources/:datasourceId",
authorized(

View file

@ -87,7 +87,7 @@ describe("/datasources", () => {
expect(contents.rows.length).toEqual(1)
// update the datasource to remove the variables
datasource.config.dynamicVariables = []
datasource.config!.dynamicVariables = []
const res = await request
.put(`/api/datasources/${datasource._id}`)
.send(datasource)

View file

@ -26,7 +26,7 @@ jest.setTimeout(30000)
jest.unmock("pg")
describe("row api - postgres", () => {
describe("postgres integrations", () => {
let makeRequest: MakeRequestResponse,
postgresDatasource: Datasource,
primaryPostgresTable: Table,
@ -52,8 +52,8 @@ describe("row api - postgres", () => {
makeRequest = generateMakeRequest(apiKey, true)
})
beforeEach(async () => {
postgresDatasource = await config.createDatasource({
function pgDatasourceConfig() {
return {
datasource: {
type: "datasource",
source: SourceName.POSTGRES,
@ -70,7 +70,11 @@ describe("row api - postgres", () => {
ca: false,
},
},
})
}
}
beforeEach(async () => {
postgresDatasource = await config.createDatasource(pgDatasourceConfig())
async function createAuxTable(prefix: string) {
return await config.createTable({
@ -1024,4 +1028,43 @@ describe("row api - postgres", () => {
})
})
})
describe("POST /api/datasources/verify", () => {
it("should be able to verify the connection", async () => {
const config = pgDatasourceConfig()
const response = await makeRequest(
"post",
"/api/datasources/verify",
config
)
expect(response.status).toBe(200)
expect(response.body.connected).toBe(true)
})
it("should state an invalid datasource cannot connect", async () => {
const config = pgDatasourceConfig()
config.datasource.config.password = "wrongpassword"
const response = await makeRequest(
"post",
"/api/datasources/verify",
config
)
expect(response.status).toBe(200)
expect(response.body.connected).toBe(false)
expect(response.body.error).toBeDefined()
})
})
describe("GET /api/datasources/:datasourceId/info", () => {
it("should fetch information about postgres datasource", async () => {
const primaryName = primaryPostgresTable.name
const response = await makeRequest(
"get",
`/api/datasources/${postgresDatasource._id}/info`
)
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1)
})
})
})

View file

@ -63,10 +63,13 @@ const SCHEMA: Integration = {
relationships: false,
docs: "https://developers.google.com/sheets/api/quickstart/nodejs",
description:
"Create and collaborate on online spreadsheets in real-time and from any device. ",
"Create and collaborate on online spreadsheets in real-time and from any device.",
friendlyName: "Google Sheets",
type: "Spreadsheet",
features: [DatasourceFeature.CONNECTION_CHECKING],
features: [
DatasourceFeature.CONNECTION_CHECKING,
DatasourceFeature.FETCH_TABLE_NAMES,
],
datasource: {
spreadsheetId: {
display: "Google Sheet URL",
@ -145,7 +148,6 @@ class GoogleSheetsIntegration implements DatasourcePlus {
async testConnection(): Promise<ConnectionInfo> {
try {
await this.connect()
await this.client.loadInfo()
return { connected: true }
} catch (e: any) {
return {
@ -240,6 +242,12 @@ class GoogleSheetsIntegration implements DatasourcePlus {
}
}
async getTableNames(): Promise<string[]> {
await this.connect()
const sheets = this.client.sheetsByIndex
return sheets.map(s => s.title)
}
getTableSchema(title: string, headerValues: string[], id?: string) {
// base table
const table: Table = {

View file

@ -20,7 +20,6 @@ import {
} from "./utils"
import Sql from "./base/sql"
import { MSSQLTablesResponse, MSSQLColumn } from "./base/types"
const sqlServer = require("mssql")
const DEFAULT_SCHEMA = "dbo"
@ -41,7 +40,10 @@ const SCHEMA: Integration = {
"Microsoft SQL Server is a relational database management system developed by Microsoft. ",
friendlyName: "MS SQL Server",
type: "Relational",
features: [DatasourceFeature.CONNECTION_CHECKING],
features: [
DatasourceFeature.CONNECTION_CHECKING,
DatasourceFeature.FETCH_TABLE_NAMES,
],
datasource: {
user: {
type: DatasourceFieldType.STRING,
@ -284,6 +286,20 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
this.schemaErrors = final.errors
}
async queryTableNames() {
let tableInfo: MSSQLTablesResponse[] = await this.runSQL(this.TABLES_SQL)
const schema = this.config.schema || DEFAULT_SCHEMA
return tableInfo
.filter((record: any) => record.TABLE_SCHEMA === schema)
.map((record: any) => record.TABLE_NAME)
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
}
async getTableNames() {
await this.connect()
return this.queryTableNames()
}
async read(query: SqlQuery | string) {
await this.connect()
const response = await this.internalQuery(getSqlQuery(query))

View file

@ -36,7 +36,10 @@ const SCHEMA: Integration = {
type: "Relational",
description:
"MySQL Database Service is a fully managed database service to deploy cloud-native applications. ",
features: [DatasourceFeature.CONNECTION_CHECKING],
features: [
DatasourceFeature.CONNECTION_CHECKING,
DatasourceFeature.FETCH_TABLE_NAMES,
],
datasource: {
host: {
type: DatasourceFieldType.STRING,
@ -214,20 +217,11 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
const tables: { [key: string]: Table } = {}
const database = this.config.database
await this.connect()
try {
// get the tables first
const tablesResp: Record<string, string>[] = await this.internalQuery(
{ sql: "SHOW TABLES;" },
{ connect: false }
)
const tableNames: string[] = tablesResp.map(
(obj: any) =>
obj[`Tables_in_${database}`] ||
obj[`Tables_in_${database.toLowerCase()}`]
)
const tableNames = await this.queryTableNames()
for (let tableName of tableNames) {
const primaryKeys = []
const schema: TableSchema = {}
@ -274,6 +268,28 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
this.schemaErrors = final.errors
}
async queryTableNames() {
const database = this.config.database
const tablesResp: Record<string, string>[] = await this.internalQuery(
{ sql: "SHOW TABLES;" },
{ connect: false }
)
return tablesResp.map(
(obj: any) =>
obj[`Tables_in_${database}`] ||
obj[`Tables_in_${database.toLowerCase()}`]
)
}
async getTableNames() {
await this.connect()
try {
return this.queryTableNames()
} finally {
await this.disconnect()
}
}
async create(query: SqlQuery | string) {
const results = await this.internalQuery(getSqlQuery(query))
return results.length ? results : [{ created: true }]

View file

@ -50,7 +50,10 @@ const SCHEMA: Integration = {
type: "Relational",
description:
"Oracle Database is an object-relational database management system developed by Oracle Corporation",
features: [DatasourceFeature.CONNECTION_CHECKING],
features: [
DatasourceFeature.CONNECTION_CHECKING,
DatasourceFeature.FETCH_TABLE_NAMES,
],
datasource: {
host: {
type: DatasourceFieldType.STRING,
@ -323,6 +326,13 @@ class OracleIntegration extends Sql implements DatasourcePlus {
this.schemaErrors = final.errors
}
async getTableNames() {
const columnsResponse = await this.internalQuery<OracleColumnsResponse>({
sql: this.COLUMNS_SQL,
})
return (columnsResponse.rows || []).map(row => row.TABLE_NAME)
}
async testConnection() {
const response: ConnectionInfo = {
connected: false,

View file

@ -52,7 +52,10 @@ const SCHEMA: Integration = {
type: "Relational",
description:
"PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
features: [DatasourceFeature.CONNECTION_CHECKING],
features: [
DatasourceFeature.CONNECTION_CHECKING,
DatasourceFeature.FETCH_TABLE_NAMES,
],
datasource: {
host: {
type: DatasourceFieldType.STRING,
@ -126,14 +129,15 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
COLUMNS_SQL!: string
PRIMARY_KEYS_SQL = `
select tc.table_schema, tc.table_name, kc.column_name as primary_key
from information_schema.table_constraints tc
join
information_schema.key_column_usage kc on kc.table_name = tc.table_name
and kc.table_schema = tc.table_schema
and kc.constraint_name = tc.constraint_name
where tc.constraint_type = 'PRIMARY KEY';
PRIMARY_KEYS_SQL = () => `
SELECT pg_namespace.nspname table_schema
, pg_class.relname table_name
, pg_attribute.attname primary_key
FROM pg_class
JOIN pg_index ON pg_class.oid = pg_index.indrelid AND pg_index.indisprimary
JOIN pg_attribute ON pg_attribute.attrelid = pg_class.oid AND pg_attribute.attnum = ANY(pg_index.indkey)
JOIN pg_namespace ON pg_namespace.oid = pg_class.relnamespace
WHERE pg_namespace.nspname = '${this.config.schema}';
`
constructor(config: PostgresConfig) {
@ -239,7 +243,9 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
let tableKeys: { [key: string]: string[] } = {}
await this.openConnection()
try {
const primaryKeysResponse = await this.client.query(this.PRIMARY_KEYS_SQL)
const primaryKeysResponse = await this.client.query(
this.PRIMARY_KEYS_SQL()
)
for (let table of primaryKeysResponse.rows) {
const tableName = table.table_name
if (!tableKeys[tableName]) {
@ -311,6 +317,17 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
}
}
async getTableNames() {
try {
await this.openConnection()
const columnsResponse: { rows: PostgresColumn[] } =
await this.client.query(this.COLUMNS_SQL)
return columnsResponse.rows.map(row => row.table_name)
} finally {
await this.closeConnection()
}
}
async create(query: SqlQuery | string) {
const response = await this.internalQuery(getSqlQuery(query))
return response.rows.length ? response.rows : [{ created: true }]

View file

@ -17,14 +17,15 @@ jest.mock("google-spreadsheet")
const { GoogleSpreadsheet } = require("google-spreadsheet")
const sheetsByTitle: { [title: string]: GoogleSpreadsheetWorksheet } = {}
const sheetsByIndex: GoogleSpreadsheetWorksheet[] = []
const mockGoogleIntegration = {
useOAuth2Client: jest.fn(),
loadInfo: jest.fn(),
sheetsByTitle,
sheetsByIndex,
}
GoogleSpreadsheet.mockImplementation(() => {
return {
useOAuth2Client: jest.fn(),
loadInfo: jest.fn(),
sheetsByTitle,
}
})
GoogleSpreadsheet.mockImplementation(() => mockGoogleIntegration)
import { structures } from "@budibase/backend-core/tests"
import TestConfiguration from "../../tests/utilities/TestConfiguration"
@ -53,6 +54,8 @@ describe("Google Sheets Integration", () => {
},
})
await config.init()
jest.clearAllMocks()
})
function createBasicTable(name: string, columns: string[]): Table {
@ -88,7 +91,7 @@ describe("Google Sheets Integration", () => {
}
describe("update table", () => {
test("adding a new field will be adding a new header row", async () => {
it("adding a new field will be adding a new header row", async () => {
await config.doInContext(structures.uuid(), async () => {
const tableColumns = ["name", "description", "new field"]
const table = createBasicTable(structures.uuid(), tableColumns)
@ -103,7 +106,7 @@ describe("Google Sheets Integration", () => {
})
})
test("removing an existing field will remove the header from the google sheet", async () => {
it("removing an existing field will remove the header from the google sheet", async () => {
const sheet = await config.doInContext(structures.uuid(), async () => {
const tableColumns = ["name"]
const table = createBasicTable(structures.uuid(), tableColumns)
@ -123,4 +126,33 @@ describe("Google Sheets Integration", () => {
expect((sheet.setHeaderRow as any).mock.calls[0][0]).toHaveLength(1)
})
})
describe("getTableNames", () => {
it("can fetch table names", async () => {
await config.doInContext(structures.uuid(), async () => {
const sheetNames: string[] = []
for (let i = 0; i < 5; i++) {
const sheet = createSheet({ headerValues: [] })
sheetsByIndex.push(sheet)
sheetNames.push(sheet.title)
}
const res = await integration.getTableNames()
expect(mockGoogleIntegration.loadInfo).toBeCalledTimes(1)
expect(res).toEqual(sheetNames)
})
})
})
describe("testConnection", () => {
it("can test successful connections", async () => {
await config.doInContext(structures.uuid(), async () => {
const res = await integration.testConnection()
expect(mockGoogleIntegration.loadInfo).toBeCalledTimes(1)
expect(res).toEqual({ connected: true })
})
})
})
})

View file

@ -23,6 +23,10 @@ export interface VerifyDatasourceResponse {
error?: string
}
export interface FetchDatasourceInfoResponse {
tableNames: string[]
}
export interface UpdateDatasourceRequest extends Datasource {
datasource: Datasource
}

View file

@ -75,6 +75,7 @@ export enum FilterType {
export enum DatasourceFeature {
CONNECTION_CHECKING = "connection",
FETCH_TABLE_NAMES = "fetch_table_names",
}
export interface StepDefinition {
@ -150,4 +151,5 @@ export interface DatasourcePlus extends IntegrationBase {
getBindingIdentifier(): string
getStringConcat(parts: string[]): string
buildSchema(datasourceId: string, entities: Record<string, Table>): any
getTableNames(): Promise<string[]>
}

View file

@ -3,5 +3,4 @@ if [ -d "packages/pro/packages" ]; then
yarn
lerna bootstrap
yarn setup
fi