From 246d08c804be28b21260ef777cde74e8662e8a88 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 28 Oct 2021 19:39:42 +0100 Subject: [PATCH] SQL table building. --- .../backend/DataTable/DataTable.svelte | 4 +- .../components/backend/DataTable/Table.svelte | 2 +- .../DataTable/modals/CreateEditColumn.svelte | 29 +++- .../popovers/EditTablePopover.svelte | 11 +- .../[selectedDatasource]/index.svelte | 21 +++ .../modals/CreateExternalTableModal.svelte | 44 +++++ packages/builder/src/stores/backend/tables.js | 3 + .../server/src/api/controllers/datasource.js | 4 +- .../api/controllers/row/ExternalRequest.ts | 22 +-- .../server/src/api/controllers/row/utils.js | 13 +- .../src/api/controllers/table/external.js | 116 +++++++++++++ .../server/src/api/controllers/table/index.js | 162 +++--------------- .../src/api/controllers/table/internal.js | 138 +++++++++++++++ packages/server/src/constants/index.js | 3 + packages/server/src/definitions/common.ts | 2 +- packages/server/src/definitions/datasource.ts | 5 + packages/server/src/integrations/base/sql.ts | 33 ++-- .../server/src/integrations/base/sqlTable.ts | 132 ++++++++++++++ .../server/src/integrations/base/utils.ts | 19 ++ packages/server/src/integrations/postgres.ts | 12 +- 20 files changed, 584 insertions(+), 191 deletions(-) create mode 100644 packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/modals/CreateExternalTableModal.svelte create mode 100644 packages/server/src/api/controllers/table/external.js create mode 100644 packages/server/src/api/controllers/table/internal.js create mode 100644 packages/server/src/integrations/base/sqlTable.ts create mode 100644 packages/server/src/integrations/base/utils.ts diff --git a/packages/builder/src/components/backend/DataTable/DataTable.svelte b/packages/builder/src/components/backend/DataTable/DataTable.svelte index 336bb51670..1af703800f 100644 --- a/packages/builder/src/components/backend/DataTable/DataTable.svelte +++ b/packages/builder/src/components/backend/DataTable/DataTable.svelte @@ -98,9 +98,7 @@ on:updatecolumns={onUpdateColumns} on:updaterows={onUpdateRows} > - {#if isInternal} - - {/if} + {#if schema && Object.keys(schema).length > 0} {#if !isUsersTable} editColumn(e.detail)} on:editrow={e => editRow(e.detail)} diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte index cd437bcad2..221d391cbf 100644 --- a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte @@ -56,7 +56,7 @@ let deletion $: tableOptions = $tables.list.filter( - table => table._id !== $tables.draft._id && table.type !== "external" + opt => opt._id !== $tables.draft._id && opt.type === table.type ) $: required = !!field?.constraints?.presence || primaryDisplay $: uneditable = @@ -83,6 +83,7 @@ $: canBeRequired = field.type !== LINK_TYPE && !uneditable && field.type !== AUTO_TYPE $: relationshipOptions = getRelationshipOptions(field) + $: external = table.type === "external" async function saveColumn() { if (field.type === AUTO_TYPE) { @@ -193,6 +194,27 @@ }, ] } + + function getAllowedTypes() { + if (!external) { + return [ + ...Object.values(fieldDefinitions), + { name: "Auto Column", type: AUTO_TYPE }, + ] + } else { + return [ + FIELDS.STRING, + FIELDS.LONGFORM, + FIELDS.OPTIONS, + FIELDS.DATETIME, + FIELDS.NUMBER, + FIELDS.BOOLEAN, + FIELDS.ARRAY, + FIELDS.FORMULA, + FIELDS.LINK, + ] + } + } field.name} getOptionValue={field => field.type} /> diff --git a/packages/builder/src/components/backend/TableNavigator/popovers/EditTablePopover.svelte b/packages/builder/src/components/backend/TableNavigator/popovers/EditTablePopover.svelte index 2513c6c7e5..04094b881a 100644 --- a/packages/builder/src/components/backend/TableNavigator/popovers/EditTablePopover.svelte +++ b/packages/builder/src/components/backend/TableNavigator/popovers/EditTablePopover.svelte @@ -1,7 +1,7 @@ @@ -130,6 +137,10 @@ + + + + {#if datasource && integration}
@@ -189,6 +200,11 @@ /> {/if}
+
+ + New table + +
{#each plusTables as table}
onClickTable(table)}>

{table.name}

@@ -325,4 +341,9 @@ .table-buttons div { grid-column-end: -1; } + + .add-table { + margin-right: 0; + margin-left: auto; + } diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/modals/CreateExternalTableModal.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/modals/CreateExternalTableModal.svelte new file mode 100644 index 0000000000..c7a040d89a --- /dev/null +++ b/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/modals/CreateExternalTableModal.svelte @@ -0,0 +1,44 @@ + + + + Provide a name for your new table; you can add columns once it is created. + + diff --git a/packages/builder/src/stores/backend/tables.js b/packages/builder/src/stores/backend/tables.js index 161877f660..7f90a04a05 100644 --- a/packages/builder/src/stores/backend/tables.js +++ b/packages/builder/src/stores/backend/tables.js @@ -62,6 +62,9 @@ export function createTablesStore() { const response = await api.post(`/api/tables`, updatedTable) const savedTable = await response.json() await fetch() + if (table.type === "external") { + await datasources.fetch() + } await select(savedTable) return savedTable } diff --git a/packages/server/src/api/controllers/datasource.js b/packages/server/src/api/controllers/datasource.js index d75a8f8ced..604fee004e 100644 --- a/packages/server/src/api/controllers/datasource.js +++ b/packages/server/src/api/controllers/datasource.js @@ -9,7 +9,7 @@ const { } = require("../../db/utils") const { BuildSchemaErrors } = require("../../constants") const { integrations } = require("../../integrations") -const { makeExternalQuery } = require("./row/utils") +const { getDatasourceAndQuery } = require("./row/utils") exports.fetch = async function (ctx) { const database = new CouchDB(ctx.appId) @@ -138,7 +138,7 @@ exports.find = async function (ctx) { exports.query = async function (ctx) { const queryJson = ctx.request.body try { - ctx.body = await makeExternalQuery(ctx.appId, queryJson) + ctx.body = await getDatasourceAndQuery(ctx.appId, queryJson) } catch (err) { ctx.throw(400, err) } diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index c58364af07..f538e01f73 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -36,7 +36,7 @@ interface RunConfig { } module External { - const { makeExternalQuery } = require("./utils") + const { getDatasourceAndQuery } = require("./utils") const { DataSourceOperation, FieldTypes, @@ -46,6 +46,7 @@ module External { const { processObjectSync } = require("@budibase/string-templates") const { cloneDeep } = require("lodash/fp") const CouchDB = require("../../../db") + const { processFormulas } = require("../../../utilities/rowProcessor/utils") function buildFilters( id: string | undefined, @@ -225,7 +226,7 @@ module External { manyRelationships: ManyRelationship[] = [] for (let [key, field] of Object.entries(table.schema)) { // if set already, or not set just skip it - if ((!row[key] && row[key] !== "") || newRow[key] || field.autocolumn) { + if (row[key] == null || newRow[key] || field.autocolumn || field.type === FieldTypes.FORMULA) { continue } // if its an empty string then it means return the column to null (if possible) @@ -361,7 +362,7 @@ module External { relationships ) } - return Object.values(finalRows) + return processFormulas(table, Object.values(finalRows)) } /** @@ -428,7 +429,7 @@ module External { const tableId = isMany ? field.through : field.tableId const manyKey = field.throughFrom || primaryKey const fieldName = isMany ? manyKey : field.fieldName - const response = await makeExternalQuery(this.appId, { + const response = await getDatasourceAndQuery(this.appId, { endpoint: getEndpoint(tableId, DataSourceOperation.READ), filters: { equal: { @@ -479,7 +480,7 @@ module External { : DataSourceOperation.CREATE if (!found) { promises.push( - makeExternalQuery(appId, { + getDatasourceAndQuery(appId, { endpoint: getEndpoint(tableId, operation), // if we're doing many relationships then we're writing, only one response body, @@ -509,7 +510,7 @@ module External { : DataSourceOperation.UPDATE const body = isMany ? null : { [colName]: null } promises.push( - makeExternalQuery(this.appId, { + getDatasourceAndQuery(this.appId, { endpoint: getEndpoint(tableId, op), body, filters, @@ -532,16 +533,17 @@ module External { table: Table, includeRelations: IncludeRelationships = IncludeRelationships.INCLUDE ) { - function extractNonLinkFieldNames(table: Table, existing: string[] = []) { + function extractRealFields(table: Table, existing: string[] = []) { return Object.entries(table.schema) .filter( column => column[1].type !== FieldTypes.LINK && + column[1].type !== FieldTypes.FORMULA && !existing.find((field: string) => field === column[0]) ) .map(column => `${table.name}.${column[0]}`) } - let fields = extractNonLinkFieldNames(table) + let fields = extractRealFields(table) for (let field of Object.values(table.schema)) { if (field.type !== FieldTypes.LINK || !includeRelations) { continue @@ -549,7 +551,7 @@ module External { const { tableName: linkTableName } = breakExternalTableId(field.tableId) const linkTable = this.tables[linkTableName] if (linkTable) { - const linkedFields = extractNonLinkFieldNames(linkTable, fields) + const linkedFields = extractRealFields(linkTable, fields) fields = fields.concat(linkedFields) } } @@ -609,7 +611,7 @@ module External { }, } // can't really use response right now - const response = await makeExternalQuery(appId, json) + const response = await getDatasourceAndQuery(appId, json) // handle many to many relationships now if we know the ID (could be auto increment) if ( operation !== DataSourceOperation.READ && diff --git a/packages/server/src/api/controllers/row/utils.js b/packages/server/src/api/controllers/row/utils.js index ca6c782713..bd2df084a3 100644 --- a/packages/server/src/api/controllers/row/utils.js +++ b/packages/server/src/api/controllers/row/utils.js @@ -4,8 +4,8 @@ const CouchDB = require("../../../db") const { InternalTables } = require("../../../db/utils") const userController = require("../user") const { FieldTypes } = require("../../../constants") -const { integrations } = require("../../../integrations") const { processStringSync } = require("@budibase/string-templates") +const { makeExternalQuery } = require("../../../integrations/base/utils") validateJs.extend(validateJs.validators.datetime, { parse: function (value) { @@ -17,18 +17,11 @@ validateJs.extend(validateJs.validators.datetime, { }, }) -exports.makeExternalQuery = async (appId, json) => { +exports.getDatasourceAndQuery = async (appId, json) => { const datasourceId = json.endpoint.datasourceId const db = new CouchDB(appId) const datasource = await db.get(datasourceId) - const Integration = integrations[datasource.source] - // query is the opinionated function - if (Integration.prototype.query) { - const integration = new Integration(datasource.config) - return integration.query(json) - } else { - throw "Datasource does not support query." - } + return makeExternalQuery(datasource, json) } exports.findRow = async (ctx, db, tableId, rowId) => { diff --git a/packages/server/src/api/controllers/table/external.js b/packages/server/src/api/controllers/table/external.js new file mode 100644 index 0000000000..42454fd2e8 --- /dev/null +++ b/packages/server/src/api/controllers/table/external.js @@ -0,0 +1,116 @@ +const CouchDB = require("../../../db") +const { + buildExternalTableId, + breakExternalTableId, +} = require("../../../integrations/utils") +const { getTable } = require("./utils") +const { DataSourceOperation, FieldTypes } = require("../../../constants") +const { makeExternalQuery } = require("../../../integrations/base/utils") +const { cloneDeep } = require("lodash/fp") + +async function makeTableRequest( + datasource, + operation, + table, + tables, + oldTable = null +) { + const json = { + endpoint: { + datasourceId: datasource._id, + entityId: table._id, + operation, + }, + meta: { + tables, + }, + table, + } + if (oldTable) { + json.meta.table = oldTable + } + return makeExternalQuery(datasource, json) +} + +function getDatasourceId(table) { + if (!table) { + throw "No table supplied" + } + if (table.sourceId) { + return table.sourceId + } + return breakExternalTableId(table._id).datasourceId +} + +exports.save = async function (ctx) { + const appId = ctx.appId + const table = ctx.request.body + // can't do this + delete table.dataImport + const datasourceId = getDatasourceId(ctx.request.body) + let tableToSave = { + type: "table", + _id: buildExternalTableId(datasourceId, table.name), + ...table, + } + + let oldTable + if (ctx.request.body && ctx.request.body._id) { + oldTable = await getTable(appId, ctx.request.body._id) + } + + const db = new CouchDB(appId) + const datasource = await db.get(datasourceId) + const tables = datasource.entities + + // check if relations need setup + for (let [key, schema] of Object.entries(tableToSave.schema)) { + // TODO: this assumes all relationships are the same, need to handle cardinality and many to many + if (schema.type === FieldTypes.LINK) { + const relatedTable = Object.values(tables).find( + table => table._id === schema.tableId + ) + const relatedField = schema.fieldName + const foreignKey = `fk_${relatedTable.name}_${schema.fieldName}` + // create foreign key + tableToSave.schema[foreignKey] = { type: FieldTypes.NUMBER } + // setup the relation in other table and this one + schema.foreignKey = foreignKey + schema.fieldName = foreignKey + schema.main = true + const relatedSchema = cloneDeep(schema) + relatedSchema.fieldName = key + delete relatedSchema.main + relatedTable.schema[relatedField] = relatedSchema + } + } + + const operation = oldTable + ? DataSourceOperation.UPDATE_TABLE + : DataSourceOperation.CREATE_TABLE + await makeTableRequest(datasource, operation, tableToSave, tables, oldTable) + + // store it into couch now for budibase reference + datasource.entities[tableToSave.name] = tableToSave + await db.put(datasource) + + return tableToSave +} + +exports.destroy = async function (ctx) { + const appId = ctx.appId + const tableToDelete = await getTable(appId, ctx.params.tableId) + const datasourceId = getDatasourceId(tableToDelete) + + const db = new CouchDB(appId) + const datasource = await db.get(datasourceId) + const tables = datasource.entities + + const operation = DataSourceOperation.DELETE_TABLE + await makeTableRequest(datasource, operation, tableToDelete, tables) + + delete datasource.entities[tableToDelete.name] + await db.put(datasource) + + return tableToDelete +} diff --git a/packages/server/src/api/controllers/table/index.js b/packages/server/src/api/controllers/table/index.js index d4356c9c8b..97b48943b8 100644 --- a/packages/server/src/api/controllers/table/index.js +++ b/packages/server/src/api/controllers/table/index.js @@ -1,16 +1,28 @@ const CouchDB = require("../../../db") -const linkRows = require("../../../db/linkedRows") +const internal = require("./internal") +const external = require("./external") const csvParser = require("../../../utilities/csvParser") +const { isExternalTable } = require("../../../integrations/utils") const { - getRowParams, getTableParams, - generateTableID, getDatasourceParams, BudibaseInternalDB, } = require("../../../db/utils") -const { FieldTypes } = require("../../../constants") -const { TableSaveFunctions, getTable } = require("./utils") +const { getTable } = require("./utils") +function pickApi({ tableId, table }) { + if (table && !tableId) { + tableId = table._id + } + if (table && table.type === "external") { + return external + } else if (tableId && isExternalTable(tableId)) { + return external + } + return internal +} + +// covers both internal and external exports.fetch = async function (ctx) { const db = new CouchDB(ctx.appId) @@ -50,143 +62,23 @@ exports.find = async function (ctx) { exports.save = async function (ctx) { const appId = ctx.appId - const db = new CouchDB(appId) - const { dataImport, ...rest } = ctx.request.body - let tableToSave = { - type: "table", - _id: generateTableID(), - views: {}, - ...rest, - } - - // if the table obj had an _id then it will have been retrieved - let oldTable - if (ctx.request.body && ctx.request.body._id) { - oldTable = await db.get(ctx.request.body._id) - } - - // saving a table is a complex operation, involving many different steps, this - // has been broken out into a utility to make it more obvious/easier to manipulate - const tableSaveFunctions = new TableSaveFunctions({ - db, - ctx, - oldTable, - dataImport, - }) - tableToSave = await tableSaveFunctions.before(tableToSave) - - // make sure that types don't change of a column, have to remove - // the column if you want to change the type - if (oldTable && oldTable.schema) { - for (let propKey of Object.keys(tableToSave.schema)) { - let column = tableToSave.schema[propKey] - let oldColumn = oldTable.schema[propKey] - if (oldColumn && oldColumn.type === "internal") { - oldColumn.type = "auto" - } - if (oldColumn && oldColumn.type !== column.type) { - ctx.throw(400, "Cannot change the type of a column") - } - } - } - - // Don't rename if the name is the same - let { _rename } = tableToSave - /* istanbul ignore next */ - if (_rename && _rename.old === _rename.updated) { - _rename = null - delete tableToSave._rename - } - - // rename row fields when table column is renamed - /* istanbul ignore next */ - if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) { - ctx.throw(400, "Cannot rename a linked column.") - } - - tableToSave = await tableSaveFunctions.mid(tableToSave) - - // update schema of non-statistics views when new columns are added - for (let view in tableToSave.views) { - const tableView = tableToSave.views[view] - if (!tableView) continue - - if (tableView.schema.group || tableView.schema.field) continue - tableView.schema = tableToSave.schema - } - - // update linked rows - try { - const linkResp = await linkRows.updateLinks({ - appId, - eventType: oldTable - ? linkRows.EventType.TABLE_UPDATED - : linkRows.EventType.TABLE_SAVE, - table: tableToSave, - oldTable: oldTable, - }) - if (linkResp != null && linkResp._rev) { - tableToSave._rev = linkResp._rev - } - } catch (err) { - ctx.throw(400, err) - } - - // don't perform any updates until relationships have been - // checked by the updateLinks function - const updatedRows = tableSaveFunctions.getUpdatedRows() - if (updatedRows && updatedRows.length !== 0) { - await db.bulkDocs(updatedRows) - } - const result = await db.put(tableToSave) - tableToSave._rev = result.rev - - tableToSave = await tableSaveFunctions.after(tableToSave) - - ctx.eventEmitter && - ctx.eventEmitter.emitTable(`table:save`, appId, tableToSave) - + const table = ctx.request.body + const savedTable = await pickApi({ table }).save(ctx) ctx.status = 200 - ctx.message = `Table ${ctx.request.body.name} saved successfully.` - ctx.body = tableToSave + ctx.message = `Table ${table.name} saved successfully.` + ctx.eventEmitter && + ctx.eventEmitter.emitTable(`table:save`, appId, savedTable) + ctx.body = savedTable } exports.destroy = async function (ctx) { const appId = ctx.appId - const db = new CouchDB(appId) - const tableToDelete = await db.get(ctx.params.tableId) - - // Delete all rows for that table - const rows = await db.allDocs( - getRowParams(ctx.params.tableId, null, { - include_docs: true, - }) - ) - await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true }))) - - // update linked rows - await linkRows.updateLinks({ - appId, - eventType: linkRows.EventType.TABLE_DELETE, - table: tableToDelete, - }) - - // don't remove the table itself until very end - await db.remove(tableToDelete) - - // remove table search index - const currentIndexes = await db.getIndexes() - const existingIndex = currentIndexes.indexes.find( - existing => existing.name === `search:${ctx.params.tableId}` - ) - if (existingIndex) { - await db.deleteIndex(existingIndex) - } - + const tableId = ctx.params.tableId + const deletedTable = await pickApi({ tableId }).destroy(ctx) ctx.eventEmitter && - ctx.eventEmitter.emitTable(`table:delete`, appId, tableToDelete) + ctx.eventEmitter.emitTable(`table:delete`, appId, deletedTable) ctx.status = 200 - ctx.body = { message: `Table ${ctx.params.tableId} deleted.` } + ctx.body = { message: `Table ${tableId} deleted.` } } exports.validateCSVSchema = async function (ctx) { diff --git a/packages/server/src/api/controllers/table/internal.js b/packages/server/src/api/controllers/table/internal.js new file mode 100644 index 0000000000..898cd0593b --- /dev/null +++ b/packages/server/src/api/controllers/table/internal.js @@ -0,0 +1,138 @@ +const CouchDB = require("../../../db") +const linkRows = require("../../../db/linkedRows") +const { getRowParams, generateTableID } = require("../../../db/utils") +const { FieldTypes } = require("../../../constants") +const { TableSaveFunctions } = require("./utils") + +exports.save = async function (ctx) { + const appId = ctx.appId + const db = new CouchDB(appId) + const { dataImport, ...rest } = ctx.request.body + let tableToSave = { + type: "table", + _id: generateTableID(), + views: {}, + ...rest, + } + + // if the table obj had an _id then it will have been retrieved + let oldTable + if (ctx.request.body && ctx.request.body._id) { + oldTable = await db.get(ctx.request.body._id) + } + + // saving a table is a complex operation, involving many different steps, this + // has been broken out into a utility to make it more obvious/easier to manipulate + const tableSaveFunctions = new TableSaveFunctions({ + db, + ctx, + oldTable, + dataImport, + }) + tableToSave = await tableSaveFunctions.before(tableToSave) + + // make sure that types don't change of a column, have to remove + // the column if you want to change the type + if (oldTable && oldTable.schema) { + for (let propKey of Object.keys(tableToSave.schema)) { + let column = tableToSave.schema[propKey] + let oldColumn = oldTable.schema[propKey] + if (oldColumn && oldColumn.type === "internal") { + oldColumn.type = "auto" + } + if (oldColumn && oldColumn.type !== column.type) { + ctx.throw(400, "Cannot change the type of a column") + } + } + } + + // Don't rename if the name is the same + let { _rename } = tableToSave + /* istanbul ignore next */ + if (_rename && _rename.old === _rename.updated) { + _rename = null + delete tableToSave._rename + } + + // rename row fields when table column is renamed + /* istanbul ignore next */ + if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) { + ctx.throw(400, "Cannot rename a linked column.") + } + + tableToSave = await tableSaveFunctions.mid(tableToSave) + + // update schema of non-statistics views when new columns are added + for (let view in tableToSave.views) { + const tableView = tableToSave.views[view] + if (!tableView) continue + + if (tableView.schema.group || tableView.schema.field) continue + tableView.schema = tableToSave.schema + } + + // update linked rows + try { + const linkResp = await linkRows.updateLinks({ + appId, + eventType: oldTable + ? linkRows.EventType.TABLE_UPDATED + : linkRows.EventType.TABLE_SAVE, + table: tableToSave, + oldTable: oldTable, + }) + if (linkResp != null && linkResp._rev) { + tableToSave._rev = linkResp._rev + } + } catch (err) { + ctx.throw(400, err) + } + + // don't perform any updates until relationships have been + // checked by the updateLinks function + const updatedRows = tableSaveFunctions.getUpdatedRows() + if (updatedRows && updatedRows.length !== 0) { + await db.bulkDocs(updatedRows) + } + const result = await db.put(tableToSave) + tableToSave._rev = result.rev + + tableToSave = await tableSaveFunctions.after(tableToSave) + + return tableToSave +} + +exports.destroy = async function (ctx) { + const appId = ctx.appId + const db = new CouchDB(appId) + const tableToDelete = await db.get(ctx.params.tableId) + + // Delete all rows for that table + const rows = await db.allDocs( + getRowParams(ctx.params.tableId, null, { + include_docs: true, + }) + ) + await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true }))) + + // update linked rows + await linkRows.updateLinks({ + appId, + eventType: linkRows.EventType.TABLE_DELETE, + table: tableToDelete, + }) + + // don't remove the table itself until very end + await db.remove(tableToDelete) + + // remove table search index + const currentIndexes = await db.getIndexes() + const existingIndex = currentIndexes.indexes.find( + existing => existing.name === `search:${ctx.params.tableId}` + ) + if (existingIndex) { + await db.deleteIndex(existingIndex) + } + + return tableToDelete +} diff --git a/packages/server/src/constants/index.js b/packages/server/src/constants/index.js index 7a8958c36a..80c62cd02e 100644 --- a/packages/server/src/constants/index.js +++ b/packages/server/src/constants/index.js @@ -62,6 +62,9 @@ exports.DataSourceOperation = { READ: "READ", UPDATE: "UPDATE", DELETE: "DELETE", + CREATE_TABLE: "CREATE_TABLE", + UPDATE_TABLE: "UPDATE_TABLE", + DELETE_TABLE: "DELETE_TABLE", } exports.SortDirection = { diff --git a/packages/server/src/definitions/common.ts b/packages/server/src/definitions/common.ts index f439fc0d28..b2ab203bee 100644 --- a/packages/server/src/definitions/common.ts +++ b/packages/server/src/definitions/common.ts @@ -36,7 +36,7 @@ export interface TableSchema { export interface Table extends Base { type?: string views?: {} - name?: string + name: string primary?: string[] schema: TableSchema primaryDisplay?: string diff --git a/packages/server/src/definitions/datasource.ts b/packages/server/src/definitions/datasource.ts index c4b248fa17..a82e50b140 100644 --- a/packages/server/src/definitions/datasource.ts +++ b/packages/server/src/definitions/datasource.ts @@ -5,6 +5,9 @@ export enum Operation { READ = "READ", UPDATE = "UPDATE", DELETE = "DELETE", + CREATE_TABLE = "CREATE_TABLE", + UPDATE_TABLE = "UPDATE_TABLE", + DELETE_TABLE = "DELETE_TABLE", } export enum SortDirection { @@ -142,8 +145,10 @@ export interface QueryJson { sort?: SortJson paginate?: PaginationJson body?: object + table?: Table meta?: { table?: Table + tables?: Record } extra?: { idFilter?: SearchFilters diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index 316e20e352..738b44afcc 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -1,19 +1,24 @@ import { Knex, knex } from "knex" -const BASE_LIMIT = 5000 import { - QueryJson, - SearchFilters, - QueryOptions, - SortDirection, Operation, + QueryJson, + QueryOptions, RelationshipsJson, + SearchFilters, + SortDirection, } from "../../definitions/datasource" import { isIsoDateString } from "../utils" +import SqlTableQueryBuilder from "./sqlTable" + +const BASE_LIMIT = 5000 type KnexQuery = Knex.QueryBuilder | Knex function parseBody(body: any) { for (let [key, value] of Object.entries(body)) { + if (Array.isArray(value)) { + body[key] = JSON.stringify(value) + } if (typeof value !== "string") { continue } @@ -243,23 +248,14 @@ function buildDelete( } } -class SqlQueryBuilder { - private readonly sqlClient: string +class SqlQueryBuilder extends SqlTableQueryBuilder { private readonly limit: number // pass through client to get flavour of SQL constructor(client: string, limit: number = BASE_LIMIT) { - this.sqlClient = client + super(client) this.limit = limit } - /** - * @param json the input JSON structure from which an SQL query will be built. - * @return {string} the operation that was found in the JSON. - */ - _operation(json: QueryJson): Operation { - return json.endpoint.operation - } - /** * @param json The JSON query DSL which is to be converted to SQL. * @param opts extra options which are to be passed into the query builder, e.g. disableReturning @@ -267,7 +263,8 @@ class SqlQueryBuilder { * @return {{ sql: string, bindings: object }} the query ready to be passed to the driver. */ _query(json: QueryJson, opts: QueryOptions = {}) { - const client = knex({ client: this.sqlClient }) + const sqlClient = this.getSqlClient() + const client = knex({ client: sqlClient }) let query switch (this._operation(json)) { case Operation.CREATE: @@ -282,6 +279,8 @@ class SqlQueryBuilder { case Operation.DELETE: query = buildDelete(client, json, opts) break + case Operation.CREATE_TABLE: case Operation.UPDATE_TABLE: case Operation.DELETE_TABLE: + return this._tableQuery(json) default: throw `Operation type is not supported by SQL query builder` } diff --git a/packages/server/src/integrations/base/sqlTable.ts b/packages/server/src/integrations/base/sqlTable.ts new file mode 100644 index 0000000000..f2e727ce62 --- /dev/null +++ b/packages/server/src/integrations/base/sqlTable.ts @@ -0,0 +1,132 @@ +import { Knex, knex } from "knex" +import { Table } from "../../definitions/common" +import { Operation, QueryJson } from "../../definitions/datasource" +import { breakExternalTableId } from "../utils" +import SchemaBuilder = Knex.SchemaBuilder +import CreateTableBuilder = Knex.CreateTableBuilder +const { FieldTypes } = require("../../constants") + +function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record, oldTable: null | Table = null) { + let primaryKey = table && table.primary ? table.primary[0] : null + // can't change primary once its set for now + if (primaryKey && !oldTable) { + schema.increments(primaryKey).primary() + } + const foreignKeys = Object.values(table.schema).map(col => col.foreignKey) + for (let [key, column] of Object.entries(table.schema)) { + // skip things that are already correct + const oldColumn = oldTable ? oldTable.schema[key] : null + if ((oldColumn && oldColumn.type === column.type) || primaryKey === key) { + continue + } + switch (column.type) { + case FieldTypes.STRING: case FieldTypes.OPTIONS: case FieldTypes.LONGFORM: + schema.string(key) + break + case FieldTypes.NUMBER: + if (foreignKeys.indexOf(key) === -1) { + schema.float(key) + } + break + case FieldTypes.BOOLEAN: + schema.boolean(key) + break + case FieldTypes.DATETIME: + schema.datetime(key) + break + case FieldTypes.ARRAY: + schema.json(key) + break + case FieldTypes.LINK: + if (!column.foreignKey || !column.tableId) { + throw "Invalid relationship schema" + } + const { tableName } = breakExternalTableId(column.tableId) + // @ts-ignore + const relatedTable = tables[tableName] + if (!relatedTable) { + throw "Referenced table doesn't exist" + } + schema.integer(column.foreignKey).unsigned() + schema.foreign(column.foreignKey).references(`${tableName}.${relatedTable.primary[0]}`) + } + } + return schema +} + +function buildCreateTable( + knex: Knex, + table: Table, + tables: Record, +): SchemaBuilder { + return knex.schema.createTable(table.name, schema => { + generateSchema(schema, table, tables) + }) +} + +function buildUpdateTable( + knex: Knex, + table: Table, + tables: Record, + oldTable: Table, +): SchemaBuilder { + return knex.schema.alterTable(table.name, schema => { + generateSchema(schema, table, tables, oldTable) + }) +} + +function buildDeleteTable( + knex: Knex, + table: Table, +): SchemaBuilder { + return knex.schema.dropTable(table.name) +} + +class SqlTableQueryBuilder { + private readonly sqlClient: string + + // pass through client to get flavour of SQL + constructor(client: string) { + this.sqlClient = client + } + + getSqlClient(): string { + return this.sqlClient + } + + /** + * @param json the input JSON structure from which an SQL query will be built. + * @return {string} the operation that was found in the JSON. + */ + _operation(json: QueryJson): Operation { + return json.endpoint.operation + } + + _tableQuery(json: QueryJson): any { + const client = knex({ client: this.sqlClient }) + let query + if (!json.table || !json.meta || !json.meta.tables) { + throw "Cannot execute without table being specified" + } + switch (this._operation(json)) { + case Operation.CREATE_TABLE: + query = buildCreateTable(client, json.table, json.meta.tables) + break + case Operation.UPDATE_TABLE: + if (!json.meta || !json.meta.table) { + throw "Must specify old table for update" + } + query = buildUpdateTable(client, json.table, json.meta.tables, json.meta.table) + break + case Operation.DELETE_TABLE: + query = buildDeleteTable(client, json.table) + break + default: + throw "Table operation is of unknown type" + } + return query.toSQL() + } +} + +export default SqlTableQueryBuilder +module.exports = SqlTableQueryBuilder \ No newline at end of file diff --git a/packages/server/src/integrations/base/utils.ts b/packages/server/src/integrations/base/utils.ts new file mode 100644 index 0000000000..5757232bc7 --- /dev/null +++ b/packages/server/src/integrations/base/utils.ts @@ -0,0 +1,19 @@ +import { QueryJson } from "../../definitions/datasource" +import { Datasource } from "../../definitions/common" + +module DatasourceUtils { + const { integrations } = require("../index") + + export async function makeExternalQuery(datasource: Datasource, json: QueryJson) { + const Integration = integrations[datasource.source] + // query is the opinionated function + if (Integration.prototype.query) { + const integration = new Integration(datasource.config) + return integration.query(json) + } else { + throw "Datasource does not support query." + } + } + + module.exports.makeExternalQuery = makeExternalQuery +} diff --git a/packages/server/src/integrations/postgres.ts b/packages/server/src/integrations/postgres.ts index dd7ecd5762..adae9b5fc1 100644 --- a/packages/server/src/integrations/postgres.ts +++ b/packages/server/src/integrations/postgres.ts @@ -263,8 +263,16 @@ module PostgresModule { async query(json: QueryJson) { const operation = this._operation(json).toLowerCase() const input = this._query(json) - const response = await internalQuery(this.client, input) - return response.rows.length ? response.rows : [{ [operation]: true }] + if (Array.isArray(input)) { + const responses = [] + for (let query of input) { + responses.push(await internalQuery(this.client, query)) + } + return responses + } else { + const response = await internalQuery(this.client, input) + return response.rows.length ? response.rows : [{ [operation]: true }] + } } }