diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 94d78e94ff..29ca4123f5 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -12,7 +12,7 @@ import { isDocument, } from "@budibase/types" import { getCouchInfo } from "./connections" -import { directCouchCall } from "./utils" +import { directCouchUrlCall } from "./utils" import { getPouchDB } from "./pouchDB" import { WriteStream, ReadStream } from "fs" import { newid } from "../../docIds/newid" @@ -46,6 +46,8 @@ export class DatabaseImpl implements Database { private readonly instanceNano?: Nano.ServerScope private readonly pouchOpts: DatabaseOpts + private readonly couchInfo = getCouchInfo() + constructor(dbName?: string, opts?: DatabaseOpts, connection?: string) { if (dbName == null) { throw new Error("Database name cannot be undefined.") @@ -53,8 +55,8 @@ export class DatabaseImpl implements Database { this.name = dbName this.pouchOpts = opts || {} if (connection) { - const couchInfo = getCouchInfo(connection) - this.instanceNano = buildNano(couchInfo) + this.couchInfo = getCouchInfo(connection) + this.instanceNano = buildNano(this.couchInfo) } if (!DatabaseImpl.nano) { DatabaseImpl.init() @@ -67,7 +69,11 @@ export class DatabaseImpl implements Database { } async exists() { - let response = await directCouchCall(`/${this.name}`, "HEAD") + const response = await directCouchUrlCall({ + url: `${this.couchInfo.url}/${this.name}`, + method: "HEAD", + cookie: this.couchInfo.cookie, + }) return response.status === 200 } diff --git a/packages/backend-core/src/db/couch/connections.ts b/packages/backend-core/src/db/couch/connections.ts index 06c661f350..4214c7cdc6 100644 --- a/packages/backend-core/src/db/couch/connections.ts +++ b/packages/backend-core/src/db/couch/connections.ts @@ -4,21 +4,21 @@ export const getCouchInfo = (connection?: string) => { const urlInfo = getUrlInfo(connection) let username let password - if (env.COUCH_DB_USERNAME) { - // set from env - username = env.COUCH_DB_USERNAME - } else if (urlInfo.auth.username) { + if (urlInfo.auth?.username) { // set from url username = urlInfo.auth.username + } else if (env.COUCH_DB_USERNAME) { + // set from env + username = env.COUCH_DB_USERNAME } else if (!env.isTest()) { throw new Error("CouchDB username not set") } - if (env.COUCH_DB_PASSWORD) { - // set from env - password = env.COUCH_DB_PASSWORD - } else if (urlInfo.auth.password) { + if (urlInfo.auth?.password) { // set from url password = urlInfo.auth.password + } else if (env.COUCH_DB_PASSWORD) { + // set from env + password = env.COUCH_DB_PASSWORD } else if (!env.isTest()) { throw new Error("CouchDB password not set") } diff --git a/packages/backend-core/src/db/couch/utils.ts b/packages/backend-core/src/db/couch/utils.ts index 426bf92158..51b2a38998 100644 --- a/packages/backend-core/src/db/couch/utils.ts +++ b/packages/backend-core/src/db/couch/utils.ts @@ -9,6 +9,20 @@ export async function directCouchCall( ) { let { url, cookie } = getCouchInfo() const couchUrl = `${url}/${path}` + return await directCouchUrlCall({ url: couchUrl, cookie, method, body }) +} + +export async function directCouchUrlCall({ + url, + cookie, + method, + body, +}: { + url: string + cookie: string + method: string + body?: any +}) { const params: any = { method: method, headers: { @@ -19,7 +33,7 @@ export async function directCouchCall( params.body = JSON.stringify(body) params.headers["Content-Type"] = "application/json" } - return await fetch(checkSlashesInUrl(encodeURI(couchUrl)), params) + return await fetch(checkSlashesInUrl(encodeURI(url)), params) } export async function directCouchQuery( diff --git a/packages/server/package.json b/packages/server/package.json index f0ece87bed..c65469f5cb 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -99,7 +99,7 @@ "mysql2": "2.3.3", "node-fetch": "2.6.7", "open": "8.4.0", - "pg": "8.5.1", + "pg": "8.10.0", "posthog-node": "1.3.0", "pouchdb": "7.3.0", "pouchdb-adapter-memory": "7.2.2", @@ -141,6 +141,7 @@ "@types/node": "14.18.20", "@types/node-fetch": "2.6.1", "@types/oracledb": "5.2.2", + "@types/pg": "8.6.6", "@types/pouchdb": "6.4.0", "@types/redis": "4.0.11", "@types/server-destroy": "1.0.1", diff --git a/packages/server/src/api/controllers/datasource.ts b/packages/server/src/api/controllers/datasource.ts index b61b168980..8f13e0e618 100644 --- a/packages/server/src/api/controllers/datasource.ts +++ b/packages/server/src/api/controllers/datasource.ts @@ -18,11 +18,71 @@ import { Row, CreateDatasourceResponse, UpdateDatasourceResponse, - UpdateDatasourceRequest, CreateDatasourceRequest, + VerifyDatasourceRequest, + VerifyDatasourceResponse, + IntegrationBase, + DatasourcePlus, } from "@budibase/types" import sdk from "../../sdk" +function getErrorTables(errors: any, errorType: string) { + return Object.entries(errors) + .filter(entry => entry[1] === errorType) + .map(([name]) => name) +} + +function updateError(error: any, newError: any, tables: string[]) { + if (!error) { + error = "" + } + if (error.length > 0) { + error += "\n" + } + error += `${newError} ${tables.join(", ")}` + return error +} + +async function getConnector( + datasource: Datasource +): Promise { + const Connector = await getIntegration(datasource.source) + // can't enrich if it doesn't have an ID yet + if (datasource._id) { + datasource = await sdk.datasources.enrich(datasource) + } + // Connect to the DB and build the schema + return new Connector(datasource.config) +} + +async function buildSchemaHelper(datasource: Datasource) { + const connector = (await getConnector(datasource)) as DatasourcePlus + await connector.buildSchema(datasource._id!, datasource.entities!) + + const errors = connector.schemaErrors + let error = null + if (errors && Object.keys(errors).length > 0) { + const noKey = getErrorTables(errors, BuildSchemaErrors.NO_KEY) + const invalidCol = getErrorTables(errors, BuildSchemaErrors.INVALID_COLUMN) + if (noKey.length) { + error = updateError( + error, + "No primary key constraint found for the following:", + noKey + ) + } + if (invalidCol.length) { + const invalidCols = Object.values(InvalidColumns).join(", ") + error = updateError( + error, + `Cannot use columns ${invalidCols} found in following:`, + invalidCol + ) + } + } + return { tables: connector.tables, error } +} + export async function fetch(ctx: UserCtx) { // Get internal tables const db = context.getAppDB() @@ -66,6 +126,33 @@ export async function fetch(ctx: UserCtx) { ctx.body = [bbInternalDb, ...datasources] } +export async function verify( + ctx: UserCtx +) { + const { datasource } = ctx.request.body + let existingDatasource: undefined | Datasource + if (datasource._id) { + existingDatasource = await sdk.datasources.get(datasource._id) + } + let enrichedDatasource = datasource + if (existingDatasource) { + enrichedDatasource = sdk.datasources.mergeConfigs( + datasource, + existingDatasource + ) + } + const connector = await getConnector(enrichedDatasource) + if (!connector.testConnection) { + ctx.throw(400, "Connection information verification not supported") + } + const response = await connector.testConnection() + + ctx.body = { + connected: response.connected, + error: response.error, + } +} + export async function buildSchemaFromDb(ctx: UserCtx) { const db = context.getAppDB() const datasource = await sdk.datasources.get(ctx.params.datasourceId) @@ -311,51 +398,3 @@ export async function query(ctx: UserCtx) { ctx.throw(400, err) } } - -function getErrorTables(errors: any, errorType: string) { - return Object.entries(errors) - .filter(entry => entry[1] === errorType) - .map(([name]) => name) -} - -function updateError(error: any, newError: any, tables: string[]) { - if (!error) { - error = "" - } - if (error.length > 0) { - error += "\n" - } - error += `${newError} ${tables.join(", ")}` - return error -} - -async function buildSchemaHelper(datasource: Datasource) { - const Connector = await getIntegration(datasource.source) - datasource = await sdk.datasources.enrich(datasource) - // Connect to the DB and build the schema - const connector = new Connector(datasource.config) - await connector.buildSchema(datasource._id, datasource.entities) - - const errors = connector.schemaErrors - let error = null - if (errors && Object.keys(errors).length > 0) { - const noKey = getErrorTables(errors, BuildSchemaErrors.NO_KEY) - const invalidCol = getErrorTables(errors, BuildSchemaErrors.INVALID_COLUMN) - if (noKey.length) { - error = updateError( - error, - "No primary key constraint found for the following:", - noKey - ) - } - if (invalidCol.length) { - const invalidCols = Object.values(InvalidColumns).join(", ") - error = updateError( - error, - `Cannot use columns ${invalidCols} found in following:`, - invalidCol - ) - } - } - return { tables: connector.tables, error } -} diff --git a/packages/server/src/api/controllers/integration.ts b/packages/server/src/api/controllers/integration.ts index 743d216da7..23defac831 100644 --- a/packages/server/src/api/controllers/integration.ts +++ b/packages/server/src/api/controllers/integration.ts @@ -1,4 +1,4 @@ -import { getDefinitions } from "../../integrations" +import { getDefinition, getDefinitions } from "../../integrations" import { BBContext } from "@budibase/types" export async function fetch(ctx: BBContext) { @@ -7,7 +7,7 @@ export async function fetch(ctx: BBContext) { } export async function find(ctx: BBContext) { - const defs = await getDefinitions() + const def = await getDefinition(ctx.params.type) + ctx.body = def ctx.status = 200 - ctx.body = defs[ctx.params.type] } diff --git a/packages/server/src/api/routes/datasource.ts b/packages/server/src/api/routes/datasource.ts index 85929d2180..654fb794e3 100644 --- a/packages/server/src/api/routes/datasource.ts +++ b/packages/server/src/api/routes/datasource.ts @@ -15,6 +15,11 @@ router authorized(permissions.BUILDER), datasourceController.fetch ) + .post( + "/api/datasources/verify", + authorized(permissions.BUILDER), + datasourceController.verify + ) .get( "/api/datasources/:datasourceId", authorized( diff --git a/packages/server/src/db/dynamoClient.ts b/packages/server/src/db/dynamoClient.ts index cb045b7d6f..597be2b21e 100644 --- a/packages/server/src/db/dynamoClient.ts +++ b/packages/server/src/db/dynamoClient.ts @@ -140,7 +140,7 @@ export function init(endpoint: string) { docClient = new AWS.DynamoDB.DocumentClient(docClientParams) } -if (!env.isProd()) { +if (!env.isProd() && !env.isJest()) { env._set("AWS_ACCESS_KEY_ID", "KEY_ID") env._set("AWS_SECRET_ACCESS_KEY", "SECRET_KEY") init("http://localhost:8333") diff --git a/packages/server/src/integration-test/postgres.spec.ts b/packages/server/src/integration-test/postgres.spec.ts index 78075b4e54..79f6db5cd1 100644 --- a/packages/server/src/integration-test/postgres.spec.ts +++ b/packages/server/src/integration-test/postgres.spec.ts @@ -19,7 +19,6 @@ import _ from "lodash" import { generator } from "@budibase/backend-core/tests" import { utils } from "@budibase/backend-core" import { GenericContainer } from "testcontainers" -import { generateRowIdField } from "../integrations/utils" const config = setup.getConfig()! diff --git a/packages/server/src/integrations/airtable.ts b/packages/server/src/integrations/airtable.ts index 1f56f0619b..a102caab76 100644 --- a/packages/server/src/integrations/airtable.ts +++ b/packages/server/src/integrations/airtable.ts @@ -1,11 +1,13 @@ import { - Integration, + ConnectionInfo, + DatasourceFeature, DatasourceFieldType, - QueryType, + Integration, IntegrationBase, + QueryType, } from "@budibase/types" -const Airtable = require("airtable") +import Airtable from "airtable" interface AirtableConfig { apiKey: string @@ -18,6 +20,7 @@ const SCHEMA: Integration = { "Airtable is a spreadsheet-database hybrid, with the features of a database but applied to a spreadsheet.", friendlyName: "Airtable", type: "Spreadsheet", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { apiKey: { type: DatasourceFieldType.PASSWORD, @@ -81,13 +84,37 @@ const SCHEMA: Integration = { class AirtableIntegration implements IntegrationBase { private config: AirtableConfig - private client: any + private client constructor(config: AirtableConfig) { this.config = config this.client = new Airtable(config).base(config.base) } + async testConnection(): Promise { + const mockTable = Date.now().toString() + try { + await this.client.makeRequest({ + path: `/${mockTable}`, + }) + + return { connected: true } + } catch (e: any) { + if ( + e.message === + `Could not find table ${mockTable} in application ${this.config.base}` + ) { + // The request managed to check the application, so the credentials are valid + return { connected: true } + } + + return { + connected: false, + error: e.message as string, + } + } + } + async create(query: { table: any; json: any }) { const { table, json } = query diff --git a/packages/server/src/integrations/arangodb.ts b/packages/server/src/integrations/arangodb.ts index e28940f36e..b486748a68 100644 --- a/packages/server/src/integrations/arangodb.ts +++ b/packages/server/src/integrations/arangodb.ts @@ -3,9 +3,11 @@ import { DatasourceFieldType, QueryType, IntegrationBase, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" -const { Database, aql } = require("arangojs") +import { Database, aql } from "arangojs" interface ArangodbConfig { url: string @@ -21,6 +23,7 @@ const SCHEMA: Integration = { type: "Non-relational", description: "ArangoDB is a scalable open-source multi-model database natively supporting graph, document and search. All supported data models & access patterns can be combined in queries allowing for maximal flexibility. ", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { url: { type: DatasourceFieldType.STRING, @@ -58,7 +61,7 @@ const SCHEMA: Integration = { class ArangoDBIntegration implements IntegrationBase { private config: ArangodbConfig - private client: any + private client constructor(config: ArangodbConfig) { const newConfig = { @@ -74,6 +77,19 @@ class ArangoDBIntegration implements IntegrationBase { this.client = new Database(newConfig) } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + await this.client.get() + response.connected = true + } catch (e: any) { + response.error = e.message as string + } + return response + } + async read(query: { sql: any }) { try { const result = await this.client.query(query.sql) diff --git a/packages/server/src/integrations/couchdb.ts b/packages/server/src/integrations/couchdb.ts index 257b84ca13..4ccbd5456d 100644 --- a/packages/server/src/integrations/couchdb.ts +++ b/packages/server/src/integrations/couchdb.ts @@ -1,4 +1,6 @@ import { + ConnectionInfo, + DatasourceFeature, DatasourceFieldType, Document, Integration, @@ -18,6 +20,7 @@ const SCHEMA: Integration = { type: "Non-relational", description: "Apache CouchDB is an open-source document-oriented NoSQL database, implemented in Erlang.", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { url: { type: DatasourceFieldType.STRING, @@ -61,21 +64,32 @@ const SCHEMA: Integration = { } class CouchDBIntegration implements IntegrationBase { - private config: CouchDBConfig - private readonly client: any + private readonly client: dbCore.DatabaseImpl constructor(config: CouchDBConfig) { - this.config = config this.client = dbCore.DatabaseWithConnection(config.database, config.url) } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + const result = await this.query("exists", "validation error", {}) + response.connected = result === true + } catch (e: any) { + response.error = e.message as string + } + return response + } + async query( command: string, errorMsg: string, query: { json?: object; id?: string } ) { try { - return await this.client[command](query.id || query.json) + return await (this.client as any)[command](query.id || query.json) } catch (err) { console.error(errorMsg, err) throw err diff --git a/packages/server/src/integrations/dynamodb.ts b/packages/server/src/integrations/dynamodb.ts index 28c1c7b52b..28b42c7a54 100644 --- a/packages/server/src/integrations/dynamodb.ts +++ b/packages/server/src/integrations/dynamodb.ts @@ -3,10 +3,13 @@ import { DatasourceFieldType, QueryType, IntegrationBase, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" import AWS from "aws-sdk" import { AWS_REGION } from "../db/dynamoClient" +import { DocumentClient } from "aws-sdk/clients/dynamodb" interface DynamoDBConfig { region: string @@ -22,6 +25,7 @@ const SCHEMA: Integration = { "Amazon DynamoDB is a key-value and document database that delivers single-digit millisecond performance at any scale.", friendlyName: "DynamoDB", type: "Non-relational", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { region: { type: DatasourceFieldType.STRING, @@ -128,7 +132,7 @@ const SCHEMA: Integration = { class DynamoDBIntegration implements IntegrationBase { private config: DynamoDBConfig - private client: any + private client constructor(config: DynamoDBConfig) { this.config = config @@ -148,7 +152,23 @@ class DynamoDBIntegration implements IntegrationBase { this.client = new AWS.DynamoDB.DocumentClient(this.config) } - async create(query: { table: string; json: object }) { + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + const scanRes = await new AWS.DynamoDB(this.config).listTables().promise() + response.connected = !!scanRes.$response + } catch (e: any) { + response.error = e.message as string + } + return response + } + + async create(query: { + table: string + json: Omit + }) { const params = { TableName: query.table, ...query.json, @@ -189,7 +209,10 @@ class DynamoDBIntegration implements IntegrationBase { return new AWS.DynamoDB(this.config).describeTable(params).promise() } - async get(query: { table: string; json: object }) { + async get(query: { + table: string + json: Omit + }) { const params = { TableName: query.table, ...query.json, @@ -197,7 +220,10 @@ class DynamoDBIntegration implements IntegrationBase { return this.client.get(params).promise() } - async update(query: { table: string; json: object }) { + async update(query: { + table: string + json: Omit + }) { const params = { TableName: query.table, ...query.json, @@ -205,7 +231,10 @@ class DynamoDBIntegration implements IntegrationBase { return this.client.update(params).promise() } - async delete(query: { table: string; json: object }) { + async delete(query: { + table: string + json: Omit + }) { const params = { TableName: query.table, ...query.json, diff --git a/packages/server/src/integrations/elasticsearch.ts b/packages/server/src/integrations/elasticsearch.ts index aeba628d30..af52799c51 100644 --- a/packages/server/src/integrations/elasticsearch.ts +++ b/packages/server/src/integrations/elasticsearch.ts @@ -3,6 +3,8 @@ import { DatasourceFieldType, QueryType, IntegrationBase, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" import { Client, ClientOptions } from "@elastic/elasticsearch" @@ -20,6 +22,7 @@ const SCHEMA: Integration = { "Elasticsearch is a search engine based on the Lucene library. It provides a distributed, multitenant-capable full-text search engine with an HTTP web interface and schema-free JSON documents.", friendlyName: "ElasticSearch", type: "Non-relational", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { url: { type: DatasourceFieldType.STRING, @@ -95,7 +98,7 @@ const SCHEMA: Integration = { class ElasticSearchIntegration implements IntegrationBase { private config: ElasticsearchConfig - private client: any + private client constructor(config: ElasticsearchConfig) { this.config = config @@ -114,6 +117,18 @@ class ElasticSearchIntegration implements IntegrationBase { this.client = new Client(clientConfig) } + async testConnection(): Promise { + try { + await this.client.info() + return { connected: true } + } catch (e: any) { + return { + connected: false, + error: e.message as string, + } + } + } + async create(query: { index: string; json: object }) { const { index, json } = query diff --git a/packages/server/src/integrations/firebase.ts b/packages/server/src/integrations/firebase.ts index a82b3be782..3907275f41 100644 --- a/packages/server/src/integrations/firebase.ts +++ b/packages/server/src/integrations/firebase.ts @@ -3,6 +3,8 @@ import { Integration, QueryType, IntegrationBase, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" import { Firestore, WhereFilterOp } from "@google-cloud/firestore" @@ -18,6 +20,7 @@ const SCHEMA: Integration = { type: "Non-relational", description: "Cloud Firestore is a flexible, scalable database for mobile, web, and server development from Firebase and Google Cloud.", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { email: { type: DatasourceFieldType.STRING, @@ -99,6 +102,18 @@ class FirebaseIntegration implements IntegrationBase { }) } + async testConnection(): Promise { + try { + await this.client.listCollections() + return { connected: true } + } catch (e: any) { + return { + connected: false, + error: e.message as string, + } + } + } + async create(query: { json: object; extra: { [key: string]: string } }) { try { const documentReference = this.client diff --git a/packages/server/src/integrations/googlesheets.ts b/packages/server/src/integrations/googlesheets.ts index 644f650ea3..eea9cc4176 100644 --- a/packages/server/src/integrations/googlesheets.ts +++ b/packages/server/src/integrations/googlesheets.ts @@ -1,4 +1,6 @@ import { + ConnectionInfo, + DatasourceFeature, DatasourceFieldType, DatasourcePlus, FieldType, @@ -64,6 +66,7 @@ const SCHEMA: Integration = { "Create and collaborate on online spreadsheets in real-time and from any device. ", friendlyName: "Google Sheets", type: "Spreadsheet", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { spreadsheetId: { display: "Google Sheet URL", @@ -139,6 +142,19 @@ class GoogleSheetsIntegration implements DatasourcePlus { this.client = new GoogleSpreadsheet(spreadsheetId) } + async testConnection(): Promise { + try { + await this.connect() + await this.client.loadInfo() + return { connected: true } + } catch (e: any) { + return { + connected: false, + error: e.message as string, + } + } + } + getBindingIdentifier() { return "" } diff --git a/packages/server/src/integrations/index.ts b/packages/server/src/integrations/index.ts index f3285e441f..90dd7cfcd6 100644 --- a/packages/server/src/integrations/index.ts +++ b/packages/server/src/integrations/index.ts @@ -20,7 +20,7 @@ import env from "../environment" import { cloneDeep } from "lodash" import sdk from "../sdk" -const DEFINITIONS: { [key: string]: Integration } = { +const DEFINITIONS: Record = { [SourceName.POSTGRES]: postgres.schema, [SourceName.DYNAMODB]: dynamodb.schema, [SourceName.MONGODB]: mongodb.schema, @@ -36,9 +36,10 @@ const DEFINITIONS: { [key: string]: Integration } = { [SourceName.GOOGLE_SHEETS]: googlesheets.schema, [SourceName.REDIS]: redis.schema, [SourceName.SNOWFLAKE]: snowflake.schema, + [SourceName.ORACLE]: undefined, } -const INTEGRATIONS: { [key: string]: any } = { +const INTEGRATIONS: Record = { [SourceName.POSTGRES]: postgres.integration, [SourceName.DYNAMODB]: dynamodb.integration, [SourceName.MONGODB]: mongodb.integration, @@ -55,6 +56,7 @@ const INTEGRATIONS: { [key: string]: any } = { [SourceName.REDIS]: redis.integration, [SourceName.FIRESTORE]: firebase.integration, [SourceName.SNOWFLAKE]: snowflake.integration, + [SourceName.ORACLE]: undefined, } // optionally add oracle integration if the oracle binary can be installed @@ -67,10 +69,13 @@ if ( INTEGRATIONS[SourceName.ORACLE] = oracle.integration } -export async function getDefinition(source: SourceName): Promise { +export async function getDefinition( + source: SourceName +): Promise { // check if its integrated, faster - if (DEFINITIONS[source]) { - return DEFINITIONS[source] + const definition = DEFINITIONS[source] + if (definition) { + return definition } const allDefinitions = await getDefinitions() return allDefinitions[source] @@ -98,7 +103,7 @@ export async function getDefinitions() { } } -export async function getIntegration(integration: string) { +export async function getIntegration(integration: SourceName) { if (INTEGRATIONS[integration]) { return INTEGRATIONS[integration] } @@ -107,7 +112,7 @@ export async function getIntegration(integration: string) { for (let plugin of plugins) { if (plugin.name === integration) { // need to use commonJS require due to its dynamic runtime nature - const retrieved: any = await getDatasourcePlugin(plugin) + const retrieved = await getDatasourcePlugin(plugin) if (retrieved.integration) { return retrieved.integration } else { diff --git a/packages/server/src/integrations/microsoftSqlServer.ts b/packages/server/src/integrations/microsoftSqlServer.ts index eb87c1ccf1..47f36f60e9 100644 --- a/packages/server/src/integrations/microsoftSqlServer.ts +++ b/packages/server/src/integrations/microsoftSqlServer.ts @@ -8,6 +8,8 @@ import { QueryType, SqlQuery, DatasourcePlus, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" import { getSqlQuery, @@ -39,6 +41,7 @@ const SCHEMA: Integration = { "Microsoft SQL Server is a relational database management system developed by Microsoft. ", friendlyName: "MS SQL Server", type: "Relational", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { user: { type: DatasourceFieldType.STRING, @@ -121,6 +124,19 @@ class SqlServerIntegration extends Sql implements DatasourcePlus { } } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + await this.connect() + response.connected = true + } catch (e: any) { + response.error = e.message as string + } + return response + } + getBindingIdentifier(): string { return `@p${this.index++}` } diff --git a/packages/server/src/integrations/mongodb.ts b/packages/server/src/integrations/mongodb.ts index 38b3891fe4..ee7302c501 100644 --- a/packages/server/src/integrations/mongodb.ts +++ b/packages/server/src/integrations/mongodb.ts @@ -3,6 +3,8 @@ import { DatasourceFieldType, QueryType, IntegrationBase, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" import { MongoClient, @@ -38,6 +40,7 @@ const getSchema = () => { type: "Non-relational", description: "MongoDB is a general purpose, document-based, distributed database built for modern application developers and for the cloud era.", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { connectionString: { type: DatasourceFieldType.STRING, @@ -358,6 +361,19 @@ class MongoIntegration implements IntegrationBase { this.client = new MongoClient(config.connectionString, options) } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + await this.connect() + response.connected = true + } catch (e: any) { + response.error = e.message as string + } + return response + } + async connect() { return this.client.connect() } diff --git a/packages/server/src/integrations/mysql.ts b/packages/server/src/integrations/mysql.ts index 8d984ed402..eb721a6e0f 100644 --- a/packages/server/src/integrations/mysql.ts +++ b/packages/server/src/integrations/mysql.ts @@ -7,6 +7,8 @@ import { Table, TableSchema, DatasourcePlus, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" import { getSqlQuery, @@ -20,18 +22,11 @@ import { NUMBER_REGEX } from "../utilities" import Sql from "./base/sql" import { MySQLColumn } from "./base/types" -const mysql = require("mysql2/promise") +import mysql from "mysql2/promise" -interface MySQLConfig { - host: string - port: number - user: string - password: string +interface MySQLConfig extends mysql.ConnectionOptions { database: string - ssl?: { [key: string]: any } rejectUnauthorized: boolean - typeCast: Function - multipleStatements: boolean } const SCHEMA: Integration = { @@ -41,6 +36,7 @@ const SCHEMA: Integration = { type: "Relational", description: "MySQL Database Service is a fully managed database service to deploy cloud-native applications. ", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { host: { type: DatasourceFieldType.STRING, @@ -92,8 +88,6 @@ const SCHEMA: Integration = { }, } -const TimezoneAwareDateTypes = ["timestamp"] - function bindingTypeCoerce(bindings: any[]) { for (let i = 0; i < bindings.length; i++) { const binding = bindings[i] @@ -120,7 +114,7 @@ function bindingTypeCoerce(bindings: any[]) { class MySQLIntegration extends Sql implements DatasourcePlus { private config: MySQLConfig - private client: any + private client?: mysql.Connection public tables: Record = {} public schemaErrors: Record = {} @@ -134,7 +128,8 @@ class MySQLIntegration extends Sql implements DatasourcePlus { if ( config.rejectUnauthorized != null && !config.rejectUnauthorized && - config.ssl + config.ssl && + typeof config.ssl !== "string" ) { config.ssl.rejectUnauthorized = config.rejectUnauthorized } @@ -160,6 +155,22 @@ class MySQLIntegration extends Sql implements DatasourcePlus { } } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + const [result] = await this.internalQuery( + { sql: "SELECT 1+1 AS checkRes" }, + { connect: true } + ) + response.connected = result?.checkRes == 2 + } catch (e: any) { + response.error = e.message as string + } + return response + } + getBindingIdentifier(): string { return "?" } @@ -173,7 +184,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus { } async disconnect() { - await this.client.end() + await this.client!.end() } async internalQuery( @@ -192,10 +203,10 @@ class MySQLIntegration extends Sql implements DatasourcePlus { ? baseBindings : bindingTypeCoerce(baseBindings) // Node MySQL is callback based, so we must wrap our call in a promise - const response = await this.client.query(query.sql, bindings) + const response = await this.client!.query(query.sql, bindings) return response[0] } finally { - if (opts?.connect) { + if (opts?.connect && this.client) { await this.disconnect() } } diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index 65e0829905..f8ec6e8bae 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -7,6 +7,8 @@ import { SqlQuery, Table, DatasourcePlus, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" import { buildExternalTableId, @@ -24,12 +26,7 @@ import { ExecuteOptions, Result, } from "oracledb" -import { - OracleTable, - OracleColumn, - OracleColumnsResponse, - OracleConstraint, -} from "./base/types" +import { OracleTable, OracleColumn, OracleColumnsResponse } from "./base/types" let oracledb: any try { oracledb = require("oracledb") @@ -53,6 +50,7 @@ const SCHEMA: Integration = { type: "Relational", description: "Oracle Database is an object-relational database management system developed by Oracle Corporation", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { host: { type: DatasourceFieldType.STRING, @@ -325,6 +323,30 @@ class OracleIntegration extends Sql implements DatasourcePlus { this.schemaErrors = final.errors } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + let connection + try { + connection = await this.getConnection() + response.connected = true + } catch (err: any) { + response.connected = false + response.error = err.message + } finally { + if (connection) { + try { + await connection.close() + } catch (err: any) { + response.connected = false + response.error = err.message + } + } + } + return response + } + private async internalQuery(query: SqlQuery): Promise> { let connection try { diff --git a/packages/server/src/integrations/postgres.ts b/packages/server/src/integrations/postgres.ts index c981c3acc5..bf77ec08c6 100644 --- a/packages/server/src/integrations/postgres.ts +++ b/packages/server/src/integrations/postgres.ts @@ -6,6 +6,8 @@ import { SqlQuery, Table, DatasourcePlus, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" import { getSqlQuery, @@ -18,7 +20,7 @@ import Sql from "./base/sql" import { PostgresColumn } from "./base/types" import { escapeDangerousCharacters } from "../utilities" -const { Client, types } = require("pg") +import { Client, types } from "pg" // Return "date" and "timestamp" types as plain strings. // This lets us reference the original stored timezone. @@ -50,6 +52,7 @@ const SCHEMA: Integration = { type: "Relational", description: "PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { host: { type: DatasourceFieldType.STRING, @@ -114,7 +117,7 @@ const SCHEMA: Integration = { } class PostgresIntegration extends Sql implements DatasourcePlus { - private readonly client: any + private readonly client: Client private readonly config: PostgresConfig private index: number = 1 private open: boolean @@ -150,6 +153,21 @@ class PostgresIntegration extends Sql implements DatasourcePlus { this.open = false } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + await this.openConnection() + response.connected = true + } catch (e: any) { + response.error = e.message as string + } finally { + await this.closeConnection() + } + return response + } + getBindingIdentifier(): string { return `$${this.index++}` } @@ -163,7 +181,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus { if (!this.config.schema) { this.config.schema = "public" } - this.client.query(`SET search_path TO ${this.config.schema}`) + await this.client.query(`SET search_path TO ${this.config.schema}`) this.COLUMNS_SQL = `select * from information_schema.columns where table_schema = '${this.config.schema}'` this.open = true } diff --git a/packages/server/src/integrations/redis.ts b/packages/server/src/integrations/redis.ts index 73ef2bb55c..d71f66edc1 100644 --- a/packages/server/src/integrations/redis.ts +++ b/packages/server/src/integrations/redis.ts @@ -1,4 +1,10 @@ -import { DatasourceFieldType, Integration, QueryType } from "@budibase/types" +import { + ConnectionInfo, + DatasourceFeature, + DatasourceFieldType, + Integration, + QueryType, +} from "@budibase/types" import Redis from "ioredis" interface RedisConfig { @@ -11,9 +17,11 @@ interface RedisConfig { const SCHEMA: Integration = { docs: "https://redis.io/docs/", - description: "", + description: + "Redis is a caching tool, providing powerful key-value store capabilities.", friendlyName: "Redis", type: "Non-relational", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { host: { type: "string", @@ -86,7 +94,7 @@ const SCHEMA: Integration = { class RedisIntegration { private readonly config: RedisConfig - private client: any + private client constructor(config: RedisConfig) { this.config = config @@ -99,6 +107,21 @@ class RedisIntegration { }) } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + await this.client.ping() + response.connected = true + } catch (e: any) { + response.error = e.message as string + } finally { + await this.disconnect() + } + return response + } + async disconnect() { return this.client.quit() } diff --git a/packages/server/src/integrations/s3.ts b/packages/server/src/integrations/s3.ts index ad3bb09109..0f9848ed59 100644 --- a/packages/server/src/integrations/s3.ts +++ b/packages/server/src/integrations/s3.ts @@ -3,10 +3,12 @@ import { QueryType, IntegrationBase, DatasourceFieldType, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" -const AWS = require("aws-sdk") -const csv = require("csvtojson") +import AWS from "aws-sdk" +import csv from "csvtojson" interface S3Config { region: string @@ -22,6 +24,7 @@ const SCHEMA: Integration = { "Amazon Simple Storage Service (Amazon S3) is an object storage service that offers industry-leading scalability, data availability, security, and performance.", friendlyName: "Amazon S3", type: "Object store", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { region: { type: "string", @@ -152,7 +155,7 @@ const SCHEMA: Integration = { class S3Integration implements IntegrationBase { private readonly config: S3Config - private client: any + private client constructor(config: S3Config) { this.config = config @@ -165,6 +168,19 @@ class S3Integration implements IntegrationBase { this.client = new AWS.S3(this.config) } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + await this.client.listBuckets().promise() + response.connected = true + } catch (e: any) { + response.error = e.message as string + } + return response + } + async create(query: { bucket: string location: string diff --git a/packages/server/src/integrations/snowflake.ts b/packages/server/src/integrations/snowflake.ts index db702520f9..9b743131ae 100644 --- a/packages/server/src/integrations/snowflake.ts +++ b/packages/server/src/integrations/snowflake.ts @@ -1,4 +1,10 @@ -import { Integration, QueryType, SqlQuery } from "@budibase/types" +import { + ConnectionInfo, + DatasourceFeature, + Integration, + QueryType, + SqlQuery, +} from "@budibase/types" import { Snowflake } from "snowflake-promise" interface SnowflakeConfig { @@ -16,6 +22,7 @@ const SCHEMA: Integration = { "Snowflake is a solution for data warehousing, data lakes, data engineering, data science, data application development, and securely sharing and consuming shared data.", friendlyName: "Snowflake", type: "Relational", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { account: { type: "string", @@ -65,6 +72,18 @@ class SnowflakeIntegration { this.client = new Snowflake(config) } + async testConnection(): Promise { + try { + await this.client.connect() + return { connected: true } + } catch (e: any) { + return { + connected: false, + error: e.message as string, + } + } + } + async internalQuery(query: SqlQuery) { await this.client.connect() try { diff --git a/packages/server/src/migrations/functions/backfill/app/queries.ts b/packages/server/src/migrations/functions/backfill/app/queries.ts index e66c7af841..e028721bce 100644 --- a/packages/server/src/migrations/functions/backfill/app/queries.ts +++ b/packages/server/src/migrations/functions/backfill/app/queries.ts @@ -33,7 +33,7 @@ export const backfill = async (appDb: any, timestamp: string | number) => { datasource = { type: "unknown", _id: query.datasourceId, - source: SourceName.UNKNOWN, + source: "unknown" as SourceName, } } else { throw e diff --git a/packages/server/src/sdk/app/datasources/datasources.ts b/packages/server/src/sdk/app/datasources/datasources.ts index b3fe5bcdf1..c886e6a15f 100644 --- a/packages/server/src/sdk/app/datasources/datasources.ts +++ b/packages/server/src/sdk/app/datasources/datasources.ts @@ -13,6 +13,7 @@ import { import { cloneDeep } from "lodash/fp" import { getEnvironmentVariables } from "../../utils" import { getDefinitions, getDefinition } from "../../../integrations" +import _ from "lodash" const ENV_VAR_PREFIX = "env." @@ -41,7 +42,7 @@ async function enrichDatasourceWithValues(datasource: Datasource) { { onlyFound: true } ) as Datasource const definition = await getDefinition(processed.source) - processed.config = checkDatasourceTypes(definition, processed.config) + processed.config = checkDatasourceTypes(definition!, processed.config) return { datasource: processed, envVars: env as Record, @@ -147,6 +148,11 @@ export function mergeConfigs(update: Datasource, old: Datasource) { } } } + + if (old.config?.auth) { + update.config = _.merge(old.config, update.config) + } + // update back to actual passwords for everything else for (let [key, value] of Object.entries(update.config)) { if (value !== PASSWORD_REPLACEMENT) { diff --git a/packages/types/src/api/web/app/datasource.ts b/packages/types/src/api/web/app/datasource.ts index d54259eab5..983fd45b92 100644 --- a/packages/types/src/api/web/app/datasource.ts +++ b/packages/types/src/api/web/app/datasource.ts @@ -14,6 +14,15 @@ export interface CreateDatasourceRequest { fetchSchema?: boolean } +export interface VerifyDatasourceRequest { + datasource: Datasource +} + +export interface VerifyDatasourceResponse { + connected: boolean + error?: string +} + export interface UpdateDatasourceRequest extends Datasource { datasource: Datasource } diff --git a/packages/types/src/sdk/datasources.ts b/packages/types/src/sdk/datasources.ts index 605b431d9e..9df9670877 100644 --- a/packages/types/src/sdk/datasources.ts +++ b/packages/types/src/sdk/datasources.ts @@ -55,7 +55,6 @@ export enum SourceName { FIRESTORE = "FIRESTORE", REDIS = "REDIS", SNOWFLAKE = "SNOWFLAKE", - UNKNOWN = "unknown", } export enum IncludeRelationship { @@ -74,6 +73,10 @@ export enum FilterType { ONE_OF = "oneOf", } +export enum DatasourceFeature { + CONNECTION_CHECKING = "connection", +} + export interface StepDefinition { key: string template: string @@ -112,6 +115,7 @@ export interface Integration { docs: string plus?: boolean auth?: { type: string } + features?: DatasourceFeature[] relationships?: boolean description: string friendlyName: string @@ -124,11 +128,17 @@ export interface Integration { extra?: ExtraQueryConfig } +export type ConnectionInfo = { + connected: boolean + error?: string +} + export interface IntegrationBase { create?(query: any): Promise read?(query: any): Promise update?(query: any): Promise delete?(query: any): Promise + testConnection?(): Promise } export interface DatasourcePlus extends IntegrationBase { diff --git a/qa-core/package.json b/qa-core/package.json index 2cfc8e2865..987fe36d7c 100644 --- a/qa-core/package.json +++ b/qa-core/package.json @@ -16,10 +16,12 @@ "test:notify": "node scripts/testResultsWebhook", "test:smoke": "yarn run test --testPathIgnorePatterns=/.+\\.integration\\.spec\\.ts", "test:ci": "start-server-and-test dev:built http://localhost:4001/health test:smoke", + "serve": "start-server-and-test dev:built http://localhost:4001/health", "dev:built": "cd ../ && yarn dev:built" }, "devDependencies": { "@budibase/types": "^2.3.17", + "@trendyol/jest-testcontainers": "^2.1.1", "@types/jest": "29.0.0", "@types/node-fetch": "2.6.2", "chance": "1.1.8", diff --git a/qa-core/src/integrations/validators/arango.integration.spec.ts b/qa-core/src/integrations/validators/arango.integration.spec.ts new file mode 100644 index 0000000000..7c0faafd61 --- /dev/null +++ b/qa-core/src/integrations/validators/arango.integration.spec.ts @@ -0,0 +1,77 @@ +import { GenericContainer, Wait } from "testcontainers" +import arangodb from "../../../../packages/server/src/integrations/arangodb" +import { generator } from "../../shared" + +jest.unmock("arangojs") + +describe("datasource validators", () => { + describe("arangodb", () => { + let connectionSettings: { + user: string + password: string + url: string + } + + beforeAll(async () => { + const user = "root" + const password = generator.hash() + const container = await new GenericContainer("arangodb") + .withExposedPorts(8529) + .withEnv("ARANGO_ROOT_PASSWORD", password) + .withWaitStrategy( + Wait.forLogMessage("is ready for business. Have fun!") + ) + .start() + + connectionSettings = { + user, + password, + url: `http://${container.getContainerIpAddress()}:${container.getMappedPort( + 8529 + )}`, + } + }) + + it("test valid connection string", async () => { + const integration = new arangodb.integration({ + url: connectionSettings.url, + username: connectionSettings.user, + password: connectionSettings.password, + databaseName: "", + collection: "", + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test wrong password", async () => { + const integration = new arangodb.integration({ + url: connectionSettings.url, + username: connectionSettings.user, + password: "wrong", + databaseName: "", + collection: "", + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: "not authorized to execute this request", + }) + }) + + it("test wrong url", async () => { + const integration = new arangodb.integration({ + url: "http://not.here", + username: connectionSettings.user, + password: connectionSettings.password, + databaseName: "", + collection: "", + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: "getaddrinfo ENOTFOUND not.here", + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/couch.integration.spec.ts b/qa-core/src/integrations/validators/couch.integration.spec.ts new file mode 100644 index 0000000000..b0f4254610 --- /dev/null +++ b/qa-core/src/integrations/validators/couch.integration.spec.ts @@ -0,0 +1,67 @@ +import { GenericContainer } from "testcontainers" + +import couchdb from "../../../../packages/server/src/integrations/couchdb" +import { generator } from "../../shared" + +describe("datasource validators", () => { + describe("couchdb", () => { + let url: string + + beforeAll(async () => { + const user = generator.first() + const password = generator.hash() + + const container = await new GenericContainer("budibase/couchdb") + .withExposedPorts(5984) + .withEnv("COUCHDB_USER", user) + .withEnv("COUCHDB_PASSWORD", password) + .start() + + const host = container.getContainerIpAddress() + const port = container.getMappedPort(5984) + + await container.exec([ + `curl`, + `-u`, + `${user}:${password}`, + `-X`, + `PUT`, + `localhost:5984/db`, + ]) + url = `http://${user}:${password}@${host}:${port}` + }) + + it("test valid connection string", async () => { + const integration = new couchdb.integration({ + url, + database: "db", + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test invalid database", async () => { + const integration = new couchdb.integration({ + url, + database: "random_db", + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + }) + }) + + it("test invalid url", async () => { + const integration = new couchdb.integration({ + url: "http://invalid:123", + database: "any", + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: + "request to http://invalid:123/any failed, reason: getaddrinfo ENOTFOUND invalid", + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/dynamodb.integration.spec.ts b/qa-core/src/integrations/validators/dynamodb.integration.spec.ts new file mode 100644 index 0000000000..c885f64213 --- /dev/null +++ b/qa-core/src/integrations/validators/dynamodb.integration.spec.ts @@ -0,0 +1,63 @@ +import { GenericContainer } from "testcontainers" +import { env } from "@budibase/backend-core" + +import dynamodb from "../../../../packages/server/src/integrations/dynamodb" +import { generator } from "../../shared" + +jest.unmock("aws-sdk") + +describe("datasource validators", () => { + describe("dynamodb", () => { + let connectionSettings: { + user: string + password: string + url: string + } + + beforeAll(async () => { + const user = "root" + const password = generator.hash() + const container = await new GenericContainer("amazon/dynamodb-local") + .withExposedPorts(8000) + .start() + + connectionSettings = { + user, + password, + url: `http://${container.getContainerIpAddress()}:${container.getMappedPort( + 8000 + )}`, + } + env._set("AWS_ACCESS_KEY_ID", "mocked_key") + env._set("AWS_SECRET_ACCESS_KEY", "mocked_secret") + }) + + it("test valid connection string", async () => { + const integration = new dynamodb.integration({ + endpoint: connectionSettings.url, + region: "", + accessKeyId: "", + secretAccessKey: "", + }) + + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test wrong endpoint", async () => { + const integration = new dynamodb.integration({ + endpoint: "http://wrong.url:2880", + region: "", + accessKeyId: "", + secretAccessKey: "", + }) + + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: + "Inaccessible host: `wrong.url' at port `undefined'. This service may not be available in the `eu-west-1' region.", + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/elastic.integration.spec.ts b/qa-core/src/integrations/validators/elastic.integration.spec.ts new file mode 100644 index 0000000000..39fd732744 --- /dev/null +++ b/qa-core/src/integrations/validators/elastic.integration.spec.ts @@ -0,0 +1,34 @@ +import { ElasticsearchContainer } from "testcontainers" +import elastic from "../../../../packages/server/src/integrations/elasticsearch" + +jest.unmock("@elastic/elasticsearch") + +describe("datasource validators", () => { + describe("elastic search", () => { + let url: string + + beforeAll(async () => { + const container = await new ElasticsearchContainer().start() + url = container.getHttpUrl() + }) + + it("test valid connection string", async () => { + const integration = new elastic.integration({ + url, + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test wrong connection string", async () => { + const integration = new elastic.integration({ + url: `http://localhost:5656`, + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: "connect ECONNREFUSED 127.0.0.1:5656", + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/mongo.integration.spec.ts b/qa-core/src/integrations/validators/mongo.integration.spec.ts new file mode 100644 index 0000000000..a20b7cd7fa --- /dev/null +++ b/qa-core/src/integrations/validators/mongo.integration.spec.ts @@ -0,0 +1,100 @@ +import { GenericContainer } from "testcontainers" +import mongo from "../../../../packages/server/src/integrations/mongodb" +import { generator } from "../../shared" + +jest.unmock("mongodb") + +describe("datasource validators", () => { + describe("mongo", () => { + let connectionSettings: { + user: string + password: string + host: string + port: number + } + + function getConnectionString( + settings: Partial = {} + ) { + const { user, password, host, port } = { + ...connectionSettings, + ...settings, + } + return `mongodb://${user}:${password}@${host}:${port}` + } + + beforeAll(async () => { + const user = generator.name() + const password = generator.hash() + const container = await new GenericContainer("mongo") + .withExposedPorts(27017) + .withEnv("MONGO_INITDB_ROOT_USERNAME", user) + .withEnv("MONGO_INITDB_ROOT_PASSWORD", password) + .start() + + connectionSettings = { + user, + password, + host: container.getContainerIpAddress(), + port: container.getMappedPort(27017), + } + }) + + it("test valid connection string", async () => { + const integration = new mongo.integration({ + connectionString: getConnectionString(), + db: "", + tlsCertificateFile: "", + tlsCertificateKeyFile: "", + tlsCAFile: "", + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test invalid password", async () => { + const integration = new mongo.integration({ + connectionString: getConnectionString({ password: "wrong" }), + db: "", + tlsCertificateFile: "", + tlsCertificateKeyFile: "", + tlsCAFile: "", + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: "Authentication failed.", + }) + }) + + it("test invalid username", async () => { + const integration = new mongo.integration({ + connectionString: getConnectionString({ user: "wrong" }), + db: "", + tlsCertificateFile: "", + tlsCertificateKeyFile: "", + tlsCAFile: "", + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: "Authentication failed.", + }) + }) + + it("test invalid connection", async () => { + const integration = new mongo.integration({ + connectionString: getConnectionString({ host: "http://nothinghere" }), + db: "", + tlsCertificateFile: "", + tlsCertificateKeyFile: "", + tlsCAFile: "", + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: "Error: getaddrinfo ENOTFOUND http", + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/mssql.integration.spec.ts b/qa-core/src/integrations/validators/mssql.integration.spec.ts new file mode 100644 index 0000000000..17f79d86ec --- /dev/null +++ b/qa-core/src/integrations/validators/mssql.integration.spec.ts @@ -0,0 +1,65 @@ +import { GenericContainer, Wait } from "testcontainers" +import { Duration, TemporalUnit } from "node-duration" + +import mssql from "../../../../packages/server/src/integrations/microsoftSqlServer" + +jest.unmock("mssql") + +describe("datasource validators", () => { + describe("mssql", () => { + let host: string, port: number + + const password = "Str0Ng_p@ssW0rd!" + + beforeAll(async () => { + const container = await new GenericContainer( + "mcr.microsoft.com/mssql/server" + ) + .withExposedPorts(1433) + .withEnv("ACCEPT_EULA", "Y") + .withEnv("MSSQL_SA_PASSWORD", password) + .withEnv("MSSQL_PID", "Developer") + .withWaitStrategy(Wait.forHealthCheck()) + .withHealthCheck({ + test: `/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P "${password}" -Q "SELECT 1" -b -o /dev/null`, + interval: new Duration(1000, TemporalUnit.MILLISECONDS), + timeout: new Duration(3, TemporalUnit.SECONDS), + retries: 20, + startPeriod: new Duration(100, TemporalUnit.MILLISECONDS), + }) + .start() + + host = container.getContainerIpAddress() + port = container.getMappedPort(1433) + }) + + it("test valid connection string", async () => { + const integration = new mssql.integration({ + user: "sa", + password, + server: host, + port: port, + database: "master", + schema: "dbo", + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test invalid password", async () => { + const integration = new mssql.integration({ + user: "sa", + password: "wrong_pwd", + server: host, + port: port, + database: "master", + schema: "dbo", + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: "ConnectionError: Login failed for user 'sa'.", + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/mysql.integration.spec.ts b/qa-core/src/integrations/validators/mysql.integration.spec.ts new file mode 100644 index 0000000000..6ee39731fa --- /dev/null +++ b/qa-core/src/integrations/validators/mysql.integration.spec.ts @@ -0,0 +1,70 @@ +import { GenericContainer } from "testcontainers" +import mysql from "../../../../packages/server/src/integrations/mysql" + +jest.unmock("mysql2/promise") + +describe("datasource validators", () => { + describe("mysql", () => { + let host: string + let port: number + + beforeAll(async () => { + const container = await new GenericContainer("mysql") + .withExposedPorts(3306) + .withEnv("MYSQL_ROOT_PASSWORD", "admin") + .withEnv("MYSQL_DATABASE", "db") + .withEnv("MYSQL_USER", "user") + .withEnv("MYSQL_PASSWORD", "password") + .start() + + host = container.getContainerIpAddress() + port = container.getMappedPort(3306) + }) + + it("test valid connection string", async () => { + const integration = new mysql.integration({ + host, + port, + user: "user", + database: "db", + password: "password", + rejectUnauthorized: true, + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test invalid database", async () => { + const integration = new mysql.integration({ + host, + port, + user: "user", + database: "test", + password: "password", + rejectUnauthorized: true, + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: "Access denied for user 'user'@'%' to database 'test'", + }) + }) + + it("test invalid password", async () => { + const integration = new mysql.integration({ + host, + port, + user: "root", + database: "test", + password: "wrong", + rejectUnauthorized: true, + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: + "Access denied for user 'root'@'172.17.0.1' (using password: YES)", + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/postgres.integration.spec.ts b/qa-core/src/integrations/validators/postgres.integration.spec.ts new file mode 100644 index 0000000000..029d929df0 --- /dev/null +++ b/qa-core/src/integrations/validators/postgres.integration.spec.ts @@ -0,0 +1,53 @@ +import { GenericContainer } from "testcontainers" + +jest.unmock("pg") + +describe("datasource validators", () => { + describe("postgres", () => { + let host: string + let port: number + + beforeAll(async () => { + const container = await new GenericContainer("postgres") + .withExposedPorts(5432) + .withEnv("POSTGRES_PASSWORD", "password") + .start() + + host = container.getContainerIpAddress() + port = container.getMappedPort(5432) + }) + + it("test valid connection string", async () => { + const integration = new postgres.integration({ + host, + port, + database: "postgres", + user: "postgres", + password: "password", + schema: "public", + ssl: false, + rejectUnauthorized: false, + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test invalid connection string", async () => { + const integration = new postgres.integration({ + host, + port, + database: "postgres", + user: "wrong", + password: "password", + schema: "public", + ssl: false, + rejectUnauthorized: false, + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: 'password authentication failed for user "wrong"', + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/redis.integration.spec.ts b/qa-core/src/integrations/validators/redis.integration.spec.ts new file mode 100644 index 0000000000..89ada2fe2d --- /dev/null +++ b/qa-core/src/integrations/validators/redis.integration.spec.ts @@ -0,0 +1,72 @@ +import redis from "../../../../packages/server/src/integrations/redis" +import { GenericContainer } from "testcontainers" +import { generator } from "../../shared" + +describe("datasource validators", () => { + describe("redis", () => { + describe("unsecured", () => { + let host: string + let port: number + + beforeAll(async () => { + const container = await new GenericContainer("redis") + .withExposedPorts(6379) + .start() + + host = container.getContainerIpAddress() + port = container.getMappedPort(6379) + }) + + it("test valid connection", async () => { + const integration = new redis.integration({ + host, + port, + username: "", + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test invalid connection even with wrong user/password", async () => { + const integration = new redis.integration({ + host, + port, + username: generator.name(), + password: generator.hash(), + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: + "WRONGPASS invalid username-password pair or user is disabled.", + }) + }) + }) + + describe("secured", () => { + let host: string + let port: number + + beforeAll(async () => { + const container = await new GenericContainer("redis") + .withExposedPorts(6379) + .withCmd(["redis-server", "--requirepass", "P@ssW0rd!"]) + .start() + + host = container.getContainerIpAddress() + port = container.getMappedPort(6379) + }) + + it("test valid connection", async () => { + const integration = new redis.integration({ + host, + port, + username: "", + password: "P@ssW0rd!", + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/s3.integration.spec.ts b/qa-core/src/integrations/validators/s3.integration.spec.ts new file mode 100644 index 0000000000..7bb415ee3d --- /dev/null +++ b/qa-core/src/integrations/validators/s3.integration.spec.ts @@ -0,0 +1,52 @@ +import s3 from "../../../../packages/server/src/integrations/s3" +import { GenericContainer } from "testcontainers" + +jest.unmock("aws-sdk") + +describe("datasource validators", () => { + describe("s3", () => { + let host: string + let port: number + + beforeAll(async () => { + const container = await new GenericContainer("localstack/localstack") + .withExposedPorts(4566) + .withEnv("SERVICES", "s3") + .withEnv("DEFAULT_REGION", "eu-west-1") + .withEnv("AWS_ACCESS_KEY_ID", "testkey") + .withEnv("AWS_SECRET_ACCESS_KEY", "testsecret") + .start() + + host = container.getContainerIpAddress() + port = container.getMappedPort(4566) + }) + + it("test valid connection", async () => { + const integration = new s3.integration({ + region: "eu-west-1", + accessKeyId: "testkey", + secretAccessKey: "testsecret", + s3ForcePathStyle: false, + endpoint: `http://${host}:${port}`, + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test wrong endpoint", async () => { + const integration = new s3.integration({ + region: "eu-west-2", + accessKeyId: "testkey", + secretAccessKey: "testsecret", + s3ForcePathStyle: false, + endpoint: `http://wrong:123`, + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: + "Inaccessible host: `wrong' at port `undefined'. This service may not be available in the `eu-west-2' region.", + }) + }) + }) +}) diff --git a/qa-core/src/shared/generator.ts b/qa-core/src/shared/generator.ts index c9395f7e47..1789fc0f75 100644 --- a/qa-core/src/shared/generator.ts +++ b/qa-core/src/shared/generator.ts @@ -1,3 +1,3 @@ -const Chance = require("chance") +import Chance from "chance" export default new Chance() diff --git a/qa-core/yarn.lock b/qa-core/yarn.lock index 42beb07108..272b4cc03a 100644 --- a/qa-core/yarn.lock +++ b/qa-core/yarn.lock @@ -304,6 +304,11 @@ "@babel/helper-validator-identifier" "^7.18.6" to-fast-properties "^2.0.0" +"@balena/dockerignore@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@balena/dockerignore/-/dockerignore-1.0.2.tgz#9ffe4726915251e8eb69f44ef3547e0da2c03e0d" + integrity sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q== + "@bcoe/v8-coverage@^0.2.3": version "0.2.3" resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" @@ -779,6 +784,15 @@ request "^2.88.0" webfinger "^0.4.2" +"@trendyol/jest-testcontainers@^2.1.1": + version "2.1.1" + resolved "https://registry.yarnpkg.com/@trendyol/jest-testcontainers/-/jest-testcontainers-2.1.1.tgz#dced95cf9c37b75efe0a65db9b75ae8912f2f14a" + integrity sha512-4iAc2pMsev4BTUzoA7jO1VvbTOU2N3juQUYa8TwiSPXPuQtxKwV9WB9ZEP+JQ+Pj15YqfGOXp5H0WNMPtapjiA== + dependencies: + cwd "^0.10.0" + node-duration "^1.0.4" + testcontainers "4.7.0" + "@tsconfig/node10@^1.0.7": version "1.0.9" resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" @@ -832,6 +846,13 @@ dependencies: "@babel/types" "^7.3.0" +"@types/dockerode@^2.5.34": + version "2.5.34" + resolved "https://registry.yarnpkg.com/@types/dockerode/-/dockerode-2.5.34.tgz#9adb884f7cc6c012a6eb4b2ad794cc5d01439959" + integrity sha512-LcbLGcvcBwBAvjH9UrUI+4qotY+A5WCer5r43DR5XHv2ZIEByNXFdPLo1XxR+v/BjkGjlggW8qUiXuVEhqfkpA== + dependencies: + "@types/node" "*" + "@types/graceful-fs@^4.1.3": version "4.1.5" resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" @@ -1006,6 +1027,11 @@ ansi-styles@^5.0.0: resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== +any-promise@^1.1.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" + integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== + anymatch@^3.0.3: version "3.1.2" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" @@ -1044,7 +1070,7 @@ argsarray@0.0.1: resolved "https://registry.yarnpkg.com/argsarray/-/argsarray-0.0.1.tgz#6e7207b4ecdb39b0af88303fa5ae22bda8df61cb" integrity sha512-u96dg2GcAKtpTrBdDoFIM7PjcBA+6rSP0OR94MOReNRyUECL6MtQt5XXmRr4qrftYaef9+l5hcpO5te7sML1Cg== -asn1@~0.2.3: +asn1@^0.2.6, asn1@~0.2.3: version "0.2.6" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.6.tgz#0d3a7bb6e64e02a90c0303b31f292868ea09a08d" integrity sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ== @@ -1199,7 +1225,7 @@ base64url@3.x.x, base64url@^3.0.1: resolved "https://registry.yarnpkg.com/base64url/-/base64url-3.0.1.tgz#6399d572e2bc3f90a9a8b22d5dbb0a32d33f788d" integrity sha512-ir1UPr3dkwexU7FdV8qBBbNDRUhMmIekYMFZfi+C/sLNnRESKPl23nB9b2pltqfOQNnGzsDdId90AEtG5tCx4A== -bcrypt-pbkdf@^1.0.0: +bcrypt-pbkdf@^1.0.0, bcrypt-pbkdf@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" integrity sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w== @@ -1304,6 +1330,11 @@ buffer@^5.5.0, buffer@^5.6.0: base64-js "^1.3.1" ieee754 "^1.1.13" +buildcheck@~0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/buildcheck/-/buildcheck-0.0.6.tgz#89aa6e417cfd1e2196e3f8fe915eb709d2fe4238" + integrity sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A== + bull@4.10.1: version "4.10.1" resolved "https://registry.yarnpkg.com/bull/-/bull-4.10.1.tgz#f14974b6089358b62b495a2cbf838aadc098e43f" @@ -1319,6 +1350,11 @@ bull@4.10.1: semver "^7.3.2" uuid "^8.3.0" +byline@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/byline/-/byline-5.0.0.tgz#741c5216468eadc457b03410118ad77de8c1ddb1" + integrity sha512-s6webAy+R4SR8XVuJWt2V2rGvhnrhxN+9S15GNuTK3wKPOXFF6RNc+8ug2XhH+2s4f+uudG4kUVYmYOQWL2g0Q== + cache-content-type@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-content-type/-/cache-content-type-1.0.1.tgz#035cde2b08ee2129f4a8315ea8f00a00dba1453c" @@ -1546,6 +1582,14 @@ correlation-id@4.0.0: dependencies: uuid "^8.3.1" +cpu-features@~0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/cpu-features/-/cpu-features-0.0.7.tgz#81ba93e1d0a729fd25132a54c3ff689c37b542f7" + integrity sha512-fjzFmsUKKCrC9GrM1eQTvQx18e+kjXFzjRLvJPNEDjk31+bJ6ZiV6uchv/hzbzXVIgbWdrEyyX1IFKwse65+8w== + dependencies: + buildcheck "~0.0.6" + nan "^2.17.0" + create-require@^1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" @@ -1572,6 +1616,14 @@ crypt@0.0.2: resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b" integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow== +cwd@^0.10.0: + version "0.10.0" + resolved "https://registry.yarnpkg.com/cwd/-/cwd-0.10.0.tgz#172400694057c22a13b0cf16162c7e4b7a7fe567" + integrity sha512-YGZxdTTL9lmLkCUTpg4j0zQ7IhRB5ZmqNBbGCl3Tg6MP/d5/6sY7L5mmTjzbc6JKgVZYiqTQTNhPFsbXNGlRaA== + dependencies: + find-pkg "^0.1.2" + fs-exists-sync "^0.1.0" + dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" @@ -1676,6 +1728,32 @@ diff@^4.0.1: resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== +docker-compose@^0.23.5: + version "0.23.19" + resolved "https://registry.yarnpkg.com/docker-compose/-/docker-compose-0.23.19.tgz#9947726e2fe67bdfa9e8efe1ff15aa0de2e10eb8" + integrity sha512-v5vNLIdUqwj4my80wxFDkNH+4S85zsRuH29SO7dCWVWPCMt/ohZBsGN6g6KXWifT0pzQ7uOxqEKCYCDPJ8Vz4g== + dependencies: + yaml "^1.10.2" + +docker-modem@^3.0.0: + version "3.0.8" + resolved "https://registry.yarnpkg.com/docker-modem/-/docker-modem-3.0.8.tgz#ef62c8bdff6e8a7d12f0160988c295ea8705e77a" + integrity sha512-f0ReSURdM3pcKPNS30mxOHSbaFLcknGmQjwSfmbcdOw1XWKXVhukM3NJHhr7NpY9BIyyWQb0EBo3KQvvuU5egQ== + dependencies: + debug "^4.1.1" + readable-stream "^3.5.0" + split-ca "^1.0.1" + ssh2 "^1.11.0" + +dockerode@^3.2.1: + version "3.3.5" + resolved "https://registry.yarnpkg.com/dockerode/-/dockerode-3.3.5.tgz#7ae3f40f2bec53ae5e9a741ce655fff459745629" + integrity sha512-/0YNa3ZDNeLr/tSckmD69+Gq+qVNhvKfAHNeZJBnp7EOP6RGKV8ORrJHkUn20So5wU+xxT7+1n5u8PjHbfjbSA== + dependencies: + "@balena/dockerignore" "^1.0.2" + docker-modem "^3.0.0" + tar-fs "~2.0.1" + dotenv@16.0.1: version "16.0.1" resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.0.1.tgz#8f8f9d94876c35dac989876a5d3a82a267fdce1d" @@ -1844,6 +1922,13 @@ exit@^0.1.2: resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== +expand-tilde@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-1.2.2.tgz#0b81eba897e5a3d31d1c3d102f8f01441e559449" + integrity sha512-rtmc+cjLZqnu9dSYosX9EWmSJhTwpACgJQTfj4hgg2JjOD/6SIQalZrt4a3aQeh++oNxkazcaxrhPUj6+g5G/Q== + dependencies: + os-homedir "^1.0.1" + expect@^29.0.0: version "29.0.2" resolved "https://registry.yarnpkg.com/expect/-/expect-29.0.2.tgz#22c7132400f60444b427211f1d6bb604a9ab2420" @@ -1919,6 +2004,21 @@ fill-range@^7.0.1: dependencies: to-regex-range "^5.0.1" +find-file-up@^0.1.2: + version "0.1.3" + resolved "https://registry.yarnpkg.com/find-file-up/-/find-file-up-0.1.3.tgz#cf68091bcf9f300a40da411b37da5cce5a2fbea0" + integrity sha512-mBxmNbVyjg1LQIIpgO8hN+ybWBgDQK8qjht+EbrTCGmmPV/sc7RF1i9stPTD6bpvXZywBdrwRYxhSdJv867L6A== + dependencies: + fs-exists-sync "^0.1.0" + resolve-dir "^0.1.0" + +find-pkg@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/find-pkg/-/find-pkg-0.1.2.tgz#1bdc22c06e36365532e2a248046854b9788da557" + integrity sha512-0rnQWcFwZr7eO0513HahrWafsc3CTFioEB7DRiEYCUM/70QXSY8f3mCST17HXLcPvEhzH/Ty/Bxd72ZZsr/yvw== + dependencies: + find-file-up "^0.1.2" + find-up@^4.0.0, find-up@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" @@ -1984,6 +2084,11 @@ fs-constants@^1.0.0: resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== +fs-exists-sync@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/fs-exists-sync/-/fs-exists-sync-0.1.0.tgz#982d6893af918e72d08dec9e8673ff2b5a8d6add" + integrity sha512-cR/vflFyPZtrN6b38ZyWxpWdhlXrzZEBawlpBQMq7033xVY7/kg0GDMBK5jg8lDYQckdJ5x/YC88lM3C7VMsLg== + fs-minipass@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" @@ -2062,7 +2167,7 @@ getpass@^0.1.1: dependencies: assert-plus "^1.0.0" -glob@^7.1.3, glob@^7.1.4: +glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: version "7.2.3" resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== @@ -2074,6 +2179,24 @@ glob@^7.1.3, glob@^7.1.4: once "^1.3.0" path-is-absolute "^1.0.0" +global-modules@^0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-0.2.3.tgz#ea5a3bed42c6d6ce995a4f8a1269b5dae223828d" + integrity sha512-JeXuCbvYzYXcwE6acL9V2bAOeSIGl4dD+iwLY9iUx2VBJJ80R18HCn+JCwHM9Oegdfya3lEkGCdaRkSyc10hDA== + dependencies: + global-prefix "^0.1.4" + is-windows "^0.2.0" + +global-prefix@^0.1.4: + version "0.1.5" + resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-0.1.5.tgz#8d3bc6b8da3ca8112a160d8d496ff0462bfef78f" + integrity sha512-gOPiyxcD9dJGCEArAhF4Hd0BAqvAe/JzERP7tYumE4yIkmIedPUVXcJFWbV3/p/ovIIvKjkrTk+f1UVkq7vvbw== + dependencies: + homedir-polyfill "^1.0.0" + ini "^1.3.4" + is-windows "^0.2.0" + which "^1.2.12" + globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" @@ -2131,6 +2254,13 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" +homedir-polyfill@^1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" + integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== + dependencies: + parse-passwd "^1.0.0" + html-escaper@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" @@ -2230,6 +2360,11 @@ inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, i resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== +ini@^1.3.4: + version "1.3.8" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== + ioredis@4.28.0: version "4.28.0" resolved "https://registry.yarnpkg.com/ioredis/-/ioredis-4.28.0.tgz#5a2be3f37ff2075e2332f280eaeb02ab4d9ff0d3" @@ -2318,6 +2453,11 @@ is-typedarray@~1.0.0: resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== +is-windows@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-0.2.0.tgz#de1aa6d63ea29dd248737b69f1ff8b8002d2108c" + integrity sha512-n67eJYmXbniZB7RF4I/FTjK1s6RPOCTxhYrVYLRaCt3lF0mpWZPKr3T2LSZAqyjQsxR2qMmGYXXzK0YWwcPM1Q== + isarray@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" @@ -3332,6 +3472,11 @@ msgpackr@^1.5.2: optionalDependencies: msgpackr-extract "^3.0.0" +nan@^2.17.0: + version "2.17.0" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.17.0.tgz#c0150a2368a182f033e9aa5195ec76ea41a199cb" + integrity sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ== + napi-macros@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.0.0.tgz#2b6bae421e7b96eb687aa6c77a7858640670001b" @@ -3367,6 +3512,11 @@ node-addon-api@^3.1.0: resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.2.1.tgz#81325e0a2117789c0128dab65e7e38f07ceba161" integrity sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A== +node-duration@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/node-duration/-/node-duration-1.0.4.tgz#3e94ecc0e473691c89c4560074503362071cecac" + integrity sha512-eUXYNSY7DL53vqfTosggWkvyIW3bhAcqBDIlolgNYlZhianXTrCL50rlUJWD1eRqkIxMppXTfiFbp+9SjpPrgA== + node-fetch@2, node-fetch@2.6.7, node-fetch@^2.6.7: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" @@ -3490,6 +3640,11 @@ only@~0.0.2: resolved "https://registry.yarnpkg.com/only/-/only-0.0.2.tgz#2afde84d03e50b9a8edc444e30610a70295edfb4" integrity sha512-Fvw+Jemq5fjjyWz6CpKx6w9s7xxqo3+JCyM0WXWeCSOboZ8ABkyvP8ID4CZuChA/wxSx+XSJmdOm8rGVyJ1hdQ== +os-homedir@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" + integrity sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ== + p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" @@ -3543,6 +3698,11 @@ parse-json@^5.2.0: json-parse-even-better-errors "^2.3.0" lines-and-columns "^1.1.6" +parse-passwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" + integrity sha512-1Y1A//QUXEZK7YKz+rD9WydcE1+EuPr6ZBgKecAB8tmoW6UFv0NREVJe1p+jRxtThkcbbKkfwIbWJe/IeE6m2Q== + parseurl@^1.3.2, parseurl@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" @@ -3982,6 +4142,15 @@ readable-stream@1.1.14, readable-stream@^1.0.27-1: string_decoder "^1.1.1" util-deprecate "^1.0.1" +readable-stream@^3.5.0: + version "3.6.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + readable-stream@~0.0.2: version "0.0.4" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-0.0.4.tgz#f32d76e3fb863344a548d79923007173665b3b8d" @@ -4077,6 +4246,14 @@ resolve-cwd@^3.0.0: dependencies: resolve-from "^5.0.0" +resolve-dir@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-0.1.1.tgz#b219259a5602fac5c5c496ad894a6e8cc430261e" + integrity sha512-QxMPqI6le2u0dCLyiGzgy92kjkkL6zO0XyvHzjdTNH3zM6e5Hz3BwG6+aEyNgiQ5Xz6PwTwgQEj3U50dByPKIA== + dependencies: + expand-tilde "^1.2.2" + global-modules "^0.2.3" + resolve-from@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" @@ -4238,6 +4415,11 @@ spark-md5@3.0.2: resolved "https://registry.yarnpkg.com/spark-md5/-/spark-md5-3.0.2.tgz#7952c4a30784347abcee73268e473b9c0167e3fc" integrity sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw== +split-ca@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/split-ca/-/split-ca-1.0.1.tgz#6c83aff3692fa61256e0cd197e05e9de157691a6" + integrity sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ== + split2@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/split2/-/split2-2.2.0.tgz#186b2575bcf83e85b7d18465756238ee4ee42493" @@ -4257,6 +4439,17 @@ sprintf-js@~1.0.2: resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== +ssh2@^1.11.0: + version "1.13.0" + resolved "https://registry.yarnpkg.com/ssh2/-/ssh2-1.13.0.tgz#9b53a07534fa72283ada471b82395a3b3c875934" + integrity sha512-CIZBFRRY1y9mAZSqBGFE4EB4dNJad2ysT2PqO8OpkiI3UTB/gUZwE5EaN16qVyQ6s/M7EgC/iaV/MnjdlvnuzA== + dependencies: + asn1 "^0.2.6" + bcrypt-pbkdf "^1.0.2" + optionalDependencies: + cpu-features "~0.0.7" + nan "^2.17.0" + sshpk@^1.7.0: version "1.17.0" resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.17.0.tgz#578082d92d4fe612b13007496e543fa0fbcbe4c5" @@ -4314,6 +4507,13 @@ stream-combiner@~0.0.4: dependencies: duplexer "~0.1.1" +stream-to-array@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/stream-to-array/-/stream-to-array-2.3.0.tgz#bbf6b39f5f43ec30bc71babcb37557acecf34353" + integrity sha512-UsZtOYEn4tWU2RGLOXr/o/xjRBftZRlG3dEWoaHr8j4GuypJ3isitGbVyjQKAuMu+xbiop8q224TjiZWc4XTZA== + dependencies: + any-promise "^1.1.0" + string-length@^4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" @@ -4403,7 +4603,7 @@ supports-preserve-symlinks-flag@^1.0.0: resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== -tar-fs@2.1.1: +tar-fs@2.1.1, tar-fs@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== @@ -4413,7 +4613,17 @@ tar-fs@2.1.1: pump "^3.0.0" tar-stream "^2.1.4" -tar-stream@^2.1.4: +tar-fs@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.0.1.tgz#e44086c1c60d31a4f0cf893b1c4e155dabfae9e2" + integrity sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA== + dependencies: + chownr "^1.1.1" + mkdirp-classic "^0.5.2" + pump "^3.0.0" + tar-stream "^2.0.0" + +tar-stream@^2.0.0, tar-stream@^2.1.4: version "2.2.0" resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== @@ -4445,6 +4655,23 @@ test-exclude@^6.0.0: glob "^7.1.4" minimatch "^3.0.4" +testcontainers@4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/testcontainers/-/testcontainers-4.7.0.tgz#5a9a864b1b0cc86984086dcc737c2f5e73490cf3" + integrity sha512-5SrG9RMfDRRZig34fDZeMcGD5i3lHCOJzn0kjouyK4TiEWjZB3h7kCk8524lwNRHROFE1j6DGjceonv/5hl5ag== + dependencies: + "@types/dockerode" "^2.5.34" + byline "^5.0.0" + debug "^4.1.1" + docker-compose "^0.23.5" + dockerode "^3.2.1" + get-port "^5.1.1" + glob "^7.1.6" + node-duration "^1.0.4" + slash "^3.0.0" + stream-to-array "^2.3.0" + tar-fs "^2.1.0" + through2@3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/through2/-/through2-3.0.2.tgz#99f88931cfc761ec7678b41d5d7336b5b6a07bf4" @@ -4746,6 +4973,13 @@ whatwg-url@^5.0.0: tr46 "~0.0.3" webidl-conversions "^3.0.0" +which@^1.2.12: + version "1.3.1" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + which@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" @@ -4824,6 +5058,11 @@ yallist@^4.0.0: resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== +yaml@^1.10.2: + version "1.10.2" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" + integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== + yargs-parser@^21.0.0, yargs-parser@^21.0.1: version "21.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" diff --git a/yarn.lock b/yarn.lock index 1e59d0a579..f8357eeeda 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5211,6 +5211,15 @@ resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== +"@types/pg@8.6.6": + version "8.6.6" + resolved "https://registry.yarnpkg.com/@types/pg/-/pg-8.6.6.tgz#21cdf873a3e345a6e78f394677e3b3b1b543cb80" + integrity sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw== + dependencies: + "@types/node" "*" + pg-protocol "*" + pg-types "^2.2.0" + "@types/pouchdb-adapter-cordova-sqlite@*": version "1.0.1" resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-cordova-sqlite/-/pouchdb-adapter-cordova-sqlite-1.0.1.tgz#49e5ee6df7cc0c23196fcb340f43a560e74eb1d6" @@ -19085,7 +19094,7 @@ performance-now@^2.1.0: resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow== -pg-connection-string@2.5.0, pg-connection-string@^2.4.0: +pg-connection-string@2.5.0, pg-connection-string@^2.5.0: version "2.5.0" resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34" integrity sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ== @@ -19095,17 +19104,17 @@ pg-int8@1.0.1: resolved "https://registry.yarnpkg.com/pg-int8/-/pg-int8-1.0.1.tgz#943bd463bf5b71b4170115f80f8efc9a0c0eb78c" integrity sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw== -pg-pool@^3.2.2: +pg-pool@^3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.6.0.tgz#3190df3e4747a0d23e5e9e8045bcd99bda0a712e" integrity sha512-clFRf2ksqd+F497kWFyM21tMjeikn60oGDmqMT8UBrynEwVEX/5R5xd2sdvdo1cZCFlguORNpVuqxIj+aK4cfQ== -pg-protocol@^1.4.0: +pg-protocol@*, pg-protocol@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.6.0.tgz#4c91613c0315349363af2084608db843502f8833" integrity sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q== -pg-types@^2.1.0: +pg-types@^2.1.0, pg-types@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-2.2.0.tgz#2d0250d636454f7cfa3b6ae0382fdfa8063254a3" integrity sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA== @@ -19116,16 +19125,16 @@ pg-types@^2.1.0: postgres-date "~1.0.4" postgres-interval "^1.1.0" -pg@8.5.1: - version "8.5.1" - resolved "https://registry.yarnpkg.com/pg/-/pg-8.5.1.tgz#34dcb15f6db4a29c702bf5031ef2e1e25a06a120" - integrity sha512-9wm3yX9lCfjvA98ybCyw2pADUivyNWT/yIP4ZcDVpMN0og70BUWYEGXPCTAQdGTAqnytfRADb7NERrY1qxhIqw== +pg@8.10.0: + version "8.10.0" + resolved "https://registry.yarnpkg.com/pg/-/pg-8.10.0.tgz#5b8379c9b4a36451d110fc8cd98fc325fe62ad24" + integrity sha512-ke7o7qSTMb47iwzOSaZMfeR7xToFdkE71ifIipOAAaLIM0DYzfOAXlgFFmYUIE2BcJtvnVlGCID84ZzCegE8CQ== dependencies: buffer-writer "2.0.0" packet-reader "1.0.0" - pg-connection-string "^2.4.0" - pg-pool "^3.2.2" - pg-protocol "^1.4.0" + pg-connection-string "^2.5.0" + pg-pool "^3.6.0" + pg-protocol "^1.6.0" pg-types "^2.1.0" pgpass "1.x"