diff --git a/packages/builder/cypress/integration/createTable.spec.js b/packages/builder/cypress/integration/createTable.spec.js index 214727ab7d..6958eb5d37 100644 --- a/packages/builder/cypress/integration/createTable.spec.js +++ b/packages/builder/cypress/integration/createTable.spec.js @@ -8,7 +8,7 @@ context('Create a Table', () => { cy.createTable('dog') // Check if Table exists - cy.get('.title').should('have.text', 'dog') + cy.get('.title').should('contain.text', 'dog') }) it('adds a new column to the table', () => { diff --git a/packages/builder/src/components/database/DataTable/ModelDataTable.svelte b/packages/builder/src/components/database/DataTable/ModelDataTable.svelte index c0d5ad6959..b06e31a344 100644 --- a/packages/builder/src/components/database/DataTable/ModelDataTable.svelte +++ b/packages/builder/src/components/database/DataTable/ModelDataTable.svelte @@ -1,5 +1,6 @@ + +
+ + +
+
+ {#if schema} + {#each Object.keys(schema).filter(key => schema[key].type !== 'omit') as columnName} +
+ {columnName} + + + {schema[columnName].success ? 'Success' : 'Failure'} + + omitColumn(columnName)} /> +
+ {/each} + {/if} +
+ + diff --git a/packages/server/build/Hosting Portal-2.png b/packages/server/build/Hosting Portal-2.png new file mode 100644 index 0000000000..c42db9bd8f Binary files /dev/null and b/packages/server/build/Hosting Portal-2.png differ diff --git a/packages/server/package.json b/packages/server/package.json index 1875e44285..384be7a3d1 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -49,6 +49,7 @@ "aws-sdk": "^2.706.0", "bcryptjs": "^2.4.3", "chmodr": "^1.2.0", + "csvtojson": "^2.0.10", "dotenv": "^8.2.0", "download": "^8.0.0", "electron-is-dev": "^1.2.0", diff --git a/packages/server/src/api/controllers/model.js b/packages/server/src/api/controllers/model.js index 182b2089da..c2e2c2e236 100644 --- a/packages/server/src/api/controllers/model.js +++ b/packages/server/src/api/controllers/model.js @@ -1,8 +1,10 @@ const CouchDB = require("../../db") +const csvParser = require("../../utilities/csvParser") const { getRecordParams, getModelParams, generateModelID, + generateRecordID, } = require("../../db/utils") exports.fetch = async function(ctx) { @@ -22,11 +24,12 @@ exports.find = async function(ctx) { exports.save = async function(ctx) { const db = new CouchDB(ctx.user.instanceId) + const { dataImport, ...rest } = ctx.request.body const modelToSave = { type: "model", _id: generateModelID(), views: {}, - ...ctx.request.body, + ...rest, } // rename record fields when table column is renamed @@ -77,6 +80,18 @@ exports.save = async function(ctx) { } } + if (dataImport && dataImport.path) { + // Populate the table with records imported from CSV in a bulk update + const data = await csvParser.transform(dataImport) + + for (let row of data) { + row._id = generateRecordID(modelToSave._id) + row.modelId = modelToSave._id + } + + await db.bulkDocs(data) + } + ctx.status = 200 ctx.message = `Model ${ctx.request.body.name} saved successfully.` ctx.body = modelToSave @@ -112,3 +127,12 @@ exports.destroy = async function(ctx) { ctx.status = 200 ctx.message = `Model ${ctx.params.modelId} deleted.` } + +exports.validateCSVSchema = async function(ctx) { + const { file, schema = {} } = ctx.request.body + const result = await csvParser.parse(file.path, schema) + ctx.body = { + schema: result, + path: file.path, + } +} diff --git a/packages/server/src/api/routes/model.js b/packages/server/src/api/routes/model.js index 00eb46d515..fe782d4cf5 100644 --- a/packages/server/src/api/routes/model.js +++ b/packages/server/src/api/routes/model.js @@ -13,6 +13,11 @@ router modelController.find ) .post("/api/models", authorized(BUILDER), modelController.save) + .post( + "/api/models/csv/validate", + authorized(BUILDER), + modelController.validateCSVSchema + ) .delete( "/api/models/:modelId/:revId", authorized(BUILDER), diff --git a/packages/server/src/utilities/csvParser.js b/packages/server/src/utilities/csvParser.js new file mode 100644 index 0000000000..872ee5f8a4 --- /dev/null +++ b/packages/server/src/utilities/csvParser.js @@ -0,0 +1,73 @@ +const csv = require("csvtojson") + +const VALIDATORS = { + string: () => true, + number: attribute => !isNaN(Number(attribute)), + datetime: attribute => !isNaN(new Date(attribute).getTime()), +} + +const PARSERS = { + datetime: attribute => new Date(attribute).toISOString(), +} + +function parse(path, parsers) { + const result = csv().fromFile(path) + + const schema = {} + + return new Promise((resolve, reject) => { + result.on("header", headers => { + for (let header of headers) { + schema[header] = { + type: parsers[header] ? parsers[header].type : "string", + success: true, + } + } + }) + result.fromFile(path).subscribe(row => { + // For each CSV row parse all the columns that need parsed + for (let key in parsers) { + if (!schema[key] || schema[key].success) { + // get the validator for the column type + const validator = VALIDATORS[parsers[key].type] + + try { + // allow null/undefined values + schema[key].success = !row[key] || validator(row[key]) + } catch (err) { + schema[key].success = false + } + } + } + }) + result.on("done", error => { + if (error) { + console.error(error) + reject(error) + } + + resolve(schema) + }) + }) +} + +async function transform({ schema, path }) { + const colParser = {} + + for (let key in schema) { + colParser[key] = PARSERS[schema[key].type] || schema[key].type + } + + try { + const json = await csv({ colParser }).fromFile(path) + return json + } catch (err) { + console.error(`Error transforming CSV to JSON for data import`, err) + throw err + } +} + +module.exports = { + parse, + transform, +} diff --git a/packages/server/src/utilities/tests/__snapshots__/csvParser.spec.js.snap b/packages/server/src/utilities/tests/__snapshots__/csvParser.spec.js.snap new file mode 100644 index 0000000000..1966637f56 --- /dev/null +++ b/packages/server/src/utilities/tests/__snapshots__/csvParser.spec.js.snap @@ -0,0 +1,21 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`CSV Parser transformation transforms a CSV file into JSON 1`] = ` +Array [ + Object { + "Address": "5 Sesame Street", + "Age": 4324, + "Name": "Bert", + }, + Object { + "Address": "1 World Trade Center", + "Age": 34, + "Name": "Ernie", + }, + Object { + "Address": "44 Second Avenue", + "Age": 23423, + "Name": "Big Bird", + }, +] +`; diff --git a/packages/server/src/utilities/tests/csvParser.spec.js b/packages/server/src/utilities/tests/csvParser.spec.js new file mode 100644 index 0000000000..a210f604e6 --- /dev/null +++ b/packages/server/src/utilities/tests/csvParser.spec.js @@ -0,0 +1,108 @@ +const csvParser = require("../csvParser"); + +const CSV_PATH = __dirname + "/test.csv"; + +const SCHEMAS = { + VALID: { + Age: { + type: "number", + }, + }, + INVALID: { + Address: { + type: "number", + }, + Age: { + type: "number", + }, + }, + IGNORE: { + Address: { + type: "omit", + }, + Age: { + type: "omit", + }, + }, + BROKEN: { + Address: { + type: "datetime", + } + }, +}; + +describe("CSV Parser", () => { + describe("parsing", () => { + it("returns status and types for a valid CSV transformation", async () => { + expect( + await csvParser.parse(CSV_PATH, SCHEMAS.VALID) + ).toEqual({ + Address: { + success: true, + type: "string", + }, + Age: { + success: true, + type: "number", + }, + Name: { + success: true, + type: "string", + }, + }); + }); + + it("returns status and types for an invalid CSV transformation", async () => { + expect( + await csvParser.parse(CSV_PATH, SCHEMAS.INVALID) + ).toEqual({ + Address: { + success: false, + type: "number", + }, + Age: { + success: true, + type: "number", + }, + Name: { + success: true, + type: "string", + }, + }); + }); + }); + + describe("transformation", () => { + it("transforms a CSV file into JSON", async () => { + expect( + await csvParser.transform({ + schema: SCHEMAS.VALID, + path: CSV_PATH, + }) + ).toMatchSnapshot(); + }); + + it("transforms a CSV file into JSON ignoring certain fields", async () => { + expect( + await csvParser.transform({ + schema: SCHEMAS.IGNORE, + path: CSV_PATH, + }) + ).toEqual([ + { + Name: "Bert" + }, + { + Name: "Ernie" + }, + { + Name: "Big Bird" + } + ]); + }); + + it("throws an error on invalid schema", async () => { + await expect(csvParser.transform({ schema: SCHEMAS.BROKEN, path: CSV_PATH })).rejects.toThrow() + }); + }); +}); diff --git a/packages/server/src/utilities/tests/test.csv b/packages/server/src/utilities/tests/test.csv new file mode 100644 index 0000000000..2266b348cd --- /dev/null +++ b/packages/server/src/utilities/tests/test.csv @@ -0,0 +1,4 @@ +"Name","Age","Address" +"Bert","4324","5 Sesame Street" +"Ernie","34","1 World Trade Center" +"Big Bird","23423","44 Second Avenue" \ No newline at end of file diff --git a/packages/server/yarn.lock b/packages/server/yarn.lock index de283385a8..72c3f38b80 100644 --- a/packages/server/yarn.lock +++ b/packages/server/yarn.lock @@ -1057,7 +1057,7 @@ bluebird-lst@^1.0.9: dependencies: bluebird "^3.5.5" -bluebird@^3.5.5: +bluebird@^3.5.1, bluebird@^3.5.5: version "3.7.2" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" @@ -1596,6 +1596,15 @@ cssstyle@^1.0.0: dependencies: cssom "0.3.x" +csvtojson@^2.0.10: + version "2.0.10" + resolved "https://registry.yarnpkg.com/csvtojson/-/csvtojson-2.0.10.tgz#11e7242cc630da54efce7958a45f443210357574" + integrity sha512-lUWFxGKyhraKCW8Qghz6Z0f2l/PqB1W3AO0HKJzGIQ5JRSlR651ekJDiGJbBT4sRNNv5ddnSGVEnsxP9XRCVpQ== + dependencies: + bluebird "^3.5.1" + lodash "^4.17.3" + strip-bom "^2.0.0" + dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" @@ -3303,6 +3312,11 @@ is-typedarray@^1.0.0, is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" +is-utf8@^0.2.0: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" + integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI= + is-windows@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" @@ -4250,6 +4264,11 @@ lodash@^4.17.10, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15: version "4.17.19" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b" +lodash@^4.17.3: + version "4.17.20" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.20.tgz#b44a9b6297bcb698f1c51a3545a2b3b368d59c52" + integrity sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA== + loose-envify@^1.0.0: version "1.4.0" resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" @@ -6051,6 +6070,13 @@ strip-ansi@^6.0.0: dependencies: ansi-regex "^5.0.0" +strip-bom@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" + integrity sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4= + dependencies: + is-utf8 "^0.2.0" + strip-bom@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3"