SQL table building.
This commit is contained in:
parent
7a6cf5d534
commit
246d08c804
|
@ -98,9 +98,7 @@
|
|||
on:updatecolumns={onUpdateColumns}
|
||||
on:updaterows={onUpdateRows}
|
||||
>
|
||||
{#if isInternal}
|
||||
<CreateColumnButton on:updatecolumns={onUpdateColumns} />
|
||||
{/if}
|
||||
{#if schema && Object.keys(schema).length > 0}
|
||||
{#if !isUsersTable}
|
||||
<CreateRowButton
|
||||
|
|
|
@ -129,7 +129,7 @@
|
|||
bind:selectedRows
|
||||
allowSelectRows={allowEditing && !isUsersTable}
|
||||
allowEditRows={allowEditing}
|
||||
allowEditColumns={allowEditing && isInternal}
|
||||
allowEditColumns={allowEditing}
|
||||
showAutoColumns={!hideAutocolumns}
|
||||
on:editcolumn={e => editColumn(e.detail)}
|
||||
on:editrow={e => editRow(e.detail)}
|
||||
|
|
|
@ -56,7 +56,7 @@
|
|||
let deletion
|
||||
|
||||
$: tableOptions = $tables.list.filter(
|
||||
table => table._id !== $tables.draft._id && table.type !== "external"
|
||||
opt => opt._id !== $tables.draft._id && opt.type === table.type
|
||||
)
|
||||
$: required = !!field?.constraints?.presence || primaryDisplay
|
||||
$: uneditable =
|
||||
|
@ -83,6 +83,7 @@
|
|||
$: canBeRequired =
|
||||
field.type !== LINK_TYPE && !uneditable && field.type !== AUTO_TYPE
|
||||
$: relationshipOptions = getRelationshipOptions(field)
|
||||
$: external = table.type === "external"
|
||||
|
||||
async function saveColumn() {
|
||||
if (field.type === AUTO_TYPE) {
|
||||
|
@ -193,6 +194,27 @@
|
|||
},
|
||||
]
|
||||
}
|
||||
|
||||
function getAllowedTypes() {
|
||||
if (!external) {
|
||||
return [
|
||||
...Object.values(fieldDefinitions),
|
||||
{ name: "Auto Column", type: AUTO_TYPE },
|
||||
]
|
||||
} else {
|
||||
return [
|
||||
FIELDS.STRING,
|
||||
FIELDS.LONGFORM,
|
||||
FIELDS.OPTIONS,
|
||||
FIELDS.DATETIME,
|
||||
FIELDS.NUMBER,
|
||||
FIELDS.BOOLEAN,
|
||||
FIELDS.ARRAY,
|
||||
FIELDS.FORMULA,
|
||||
FIELDS.LINK,
|
||||
]
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<ModalContent
|
||||
|
@ -215,10 +237,7 @@
|
|||
label="Type"
|
||||
bind:value={field.type}
|
||||
on:change={handleTypeChange}
|
||||
options={[
|
||||
...Object.values(fieldDefinitions),
|
||||
{ name: "Auto Column", type: AUTO_TYPE },
|
||||
]}
|
||||
options={getAllowedTypes()}
|
||||
getOptionLabel={field => field.name}
|
||||
getOptionValue={field => field.type}
|
||||
/>
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<script>
|
||||
import { goto } from "@roxi/routify"
|
||||
import { allScreens, store } from "builderStore"
|
||||
import { tables } from "stores/backend"
|
||||
import { tables, datasources } from "stores/backend"
|
||||
import {
|
||||
ActionMenu,
|
||||
Icon,
|
||||
|
@ -40,7 +40,10 @@
|
|||
store.actions.screens.delete(templateScreens)
|
||||
await tables.fetch()
|
||||
notifications.success("Table deleted")
|
||||
if (wasSelectedTable._id === table._id) {
|
||||
if (table.type === "external") {
|
||||
await datasources.fetch()
|
||||
}
|
||||
if (wasSelectedTable && wasSelectedTable._id === table._id) {
|
||||
$goto("./table")
|
||||
}
|
||||
}
|
||||
|
@ -64,9 +67,7 @@
|
|||
<Icon s hoverable name="MoreSmallList" />
|
||||
</div>
|
||||
<MenuItem icon="Edit" on:click={editorModal.show}>Edit</MenuItem>
|
||||
{#if !external}
|
||||
<MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem>
|
||||
{/if}
|
||||
</ActionMenu>
|
||||
|
||||
<Modal bind:this={editorModal}>
|
||||
|
|
|
@ -8,17 +8,20 @@
|
|||
Layout,
|
||||
Modal,
|
||||
InlineAlert,
|
||||
ActionButton,
|
||||
} from "@budibase/bbui"
|
||||
import { datasources, integrations, queries, tables } from "stores/backend"
|
||||
import { notifications } from "@budibase/bbui"
|
||||
import IntegrationConfigForm from "components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte"
|
||||
import CreateEditRelationship from "./CreateEditRelationship/CreateEditRelationship.svelte"
|
||||
import CreateExternalTableModal from "./modals/CreateExternalTableModal.svelte"
|
||||
import DisplayColumnModal from "./modals/EditDisplayColumnsModal.svelte"
|
||||
import ICONS from "components/backend/DatasourceNavigator/icons"
|
||||
import { capitalise } from "helpers"
|
||||
|
||||
let relationshipModal
|
||||
let displayColumnModal
|
||||
let createExternalTableModal
|
||||
let selectedFromRelationship, selectedToRelationship
|
||||
|
||||
$: datasource = $datasources.list.find(ds => ds._id === $datasources.selected)
|
||||
|
@ -113,6 +116,10 @@
|
|||
function openDisplayColumnModal() {
|
||||
displayColumnModal.show()
|
||||
}
|
||||
|
||||
function createNewTable() {
|
||||
createExternalTableModal.show()
|
||||
}
|
||||
</script>
|
||||
|
||||
<Modal bind:this={relationshipModal}>
|
||||
|
@ -130,6 +137,10 @@
|
|||
<DisplayColumnModal {datasource} {plusTables} save={saveDatasource} />
|
||||
</Modal>
|
||||
|
||||
<Modal bind:this={createExternalTableModal}>
|
||||
<CreateExternalTableModal {datasource} />
|
||||
</Modal>
|
||||
|
||||
{#if datasource && integration}
|
||||
<section>
|
||||
<Layout>
|
||||
|
@ -189,6 +200,11 @@
|
|||
/>
|
||||
{/if}
|
||||
<div class="query-list">
|
||||
<div class="add-table">
|
||||
<ActionButton quiet icon="TableAdd" on:click={createNewTable}>
|
||||
New table
|
||||
</ActionButton>
|
||||
</div>
|
||||
{#each plusTables as table}
|
||||
<div class="query-list-item" on:click={() => onClickTable(table)}>
|
||||
<p class="query-name">{table.name}</p>
|
||||
|
@ -325,4 +341,9 @@
|
|||
.table-buttons div {
|
||||
grid-column-end: -1;
|
||||
}
|
||||
|
||||
.add-table {
|
||||
margin-right: 0;
|
||||
margin-left: auto;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
<script>
|
||||
import { ModalContent, Body, Input } from "@budibase/bbui"
|
||||
import { tables, datasources } from "stores/backend"
|
||||
import { goto } from "@roxi/routify"
|
||||
|
||||
export let datasource
|
||||
|
||||
let name = ""
|
||||
|
||||
$: valid = name && name.length > 0
|
||||
|
||||
function buildDefaultTable(tableName, datasourceId) {
|
||||
return {
|
||||
name: tableName,
|
||||
type: "external",
|
||||
primary: ["id"],
|
||||
sourceId: datasourceId,
|
||||
schema: {
|
||||
id: {
|
||||
autocolumn: true,
|
||||
type: "number",
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
async function saveTable() {
|
||||
const table = await tables.save(buildDefaultTable(name, datasource._id))
|
||||
await datasources.fetch()
|
||||
$goto(`../../table/${table._id}`)
|
||||
}
|
||||
</script>
|
||||
|
||||
<ModalContent
|
||||
title="Create new table"
|
||||
confirmText="Create"
|
||||
onConfirm={saveTable}
|
||||
disabled={!valid}
|
||||
>
|
||||
<Body
|
||||
>Provide a name for your new table; you can add columns once it is created.</Body
|
||||
>
|
||||
<Input label="Table Name" bind:value={name} />
|
||||
</ModalContent>
|
|
@ -62,6 +62,9 @@ export function createTablesStore() {
|
|||
const response = await api.post(`/api/tables`, updatedTable)
|
||||
const savedTable = await response.json()
|
||||
await fetch()
|
||||
if (table.type === "external") {
|
||||
await datasources.fetch()
|
||||
}
|
||||
await select(savedTable)
|
||||
return savedTable
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ const {
|
|||
} = require("../../db/utils")
|
||||
const { BuildSchemaErrors } = require("../../constants")
|
||||
const { integrations } = require("../../integrations")
|
||||
const { makeExternalQuery } = require("./row/utils")
|
||||
const { getDatasourceAndQuery } = require("./row/utils")
|
||||
|
||||
exports.fetch = async function (ctx) {
|
||||
const database = new CouchDB(ctx.appId)
|
||||
|
@ -138,7 +138,7 @@ exports.find = async function (ctx) {
|
|||
exports.query = async function (ctx) {
|
||||
const queryJson = ctx.request.body
|
||||
try {
|
||||
ctx.body = await makeExternalQuery(ctx.appId, queryJson)
|
||||
ctx.body = await getDatasourceAndQuery(ctx.appId, queryJson)
|
||||
} catch (err) {
|
||||
ctx.throw(400, err)
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ interface RunConfig {
|
|||
}
|
||||
|
||||
module External {
|
||||
const { makeExternalQuery } = require("./utils")
|
||||
const { getDatasourceAndQuery } = require("./utils")
|
||||
const {
|
||||
DataSourceOperation,
|
||||
FieldTypes,
|
||||
|
@ -46,6 +46,7 @@ module External {
|
|||
const { processObjectSync } = require("@budibase/string-templates")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const CouchDB = require("../../../db")
|
||||
const { processFormulas } = require("../../../utilities/rowProcessor/utils")
|
||||
|
||||
function buildFilters(
|
||||
id: string | undefined,
|
||||
|
@ -225,7 +226,7 @@ module External {
|
|||
manyRelationships: ManyRelationship[] = []
|
||||
for (let [key, field] of Object.entries(table.schema)) {
|
||||
// if set already, or not set just skip it
|
||||
if ((!row[key] && row[key] !== "") || newRow[key] || field.autocolumn) {
|
||||
if (row[key] == null || newRow[key] || field.autocolumn || field.type === FieldTypes.FORMULA) {
|
||||
continue
|
||||
}
|
||||
// if its an empty string then it means return the column to null (if possible)
|
||||
|
@ -361,7 +362,7 @@ module External {
|
|||
relationships
|
||||
)
|
||||
}
|
||||
return Object.values(finalRows)
|
||||
return processFormulas(table, Object.values(finalRows))
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -428,7 +429,7 @@ module External {
|
|||
const tableId = isMany ? field.through : field.tableId
|
||||
const manyKey = field.throughFrom || primaryKey
|
||||
const fieldName = isMany ? manyKey : field.fieldName
|
||||
const response = await makeExternalQuery(this.appId, {
|
||||
const response = await getDatasourceAndQuery(this.appId, {
|
||||
endpoint: getEndpoint(tableId, DataSourceOperation.READ),
|
||||
filters: {
|
||||
equal: {
|
||||
|
@ -479,7 +480,7 @@ module External {
|
|||
: DataSourceOperation.CREATE
|
||||
if (!found) {
|
||||
promises.push(
|
||||
makeExternalQuery(appId, {
|
||||
getDatasourceAndQuery(appId, {
|
||||
endpoint: getEndpoint(tableId, operation),
|
||||
// if we're doing many relationships then we're writing, only one response
|
||||
body,
|
||||
|
@ -509,7 +510,7 @@ module External {
|
|||
: DataSourceOperation.UPDATE
|
||||
const body = isMany ? null : { [colName]: null }
|
||||
promises.push(
|
||||
makeExternalQuery(this.appId, {
|
||||
getDatasourceAndQuery(this.appId, {
|
||||
endpoint: getEndpoint(tableId, op),
|
||||
body,
|
||||
filters,
|
||||
|
@ -532,16 +533,17 @@ module External {
|
|||
table: Table,
|
||||
includeRelations: IncludeRelationships = IncludeRelationships.INCLUDE
|
||||
) {
|
||||
function extractNonLinkFieldNames(table: Table, existing: string[] = []) {
|
||||
function extractRealFields(table: Table, existing: string[] = []) {
|
||||
return Object.entries(table.schema)
|
||||
.filter(
|
||||
column =>
|
||||
column[1].type !== FieldTypes.LINK &&
|
||||
column[1].type !== FieldTypes.FORMULA &&
|
||||
!existing.find((field: string) => field === column[0])
|
||||
)
|
||||
.map(column => `${table.name}.${column[0]}`)
|
||||
}
|
||||
let fields = extractNonLinkFieldNames(table)
|
||||
let fields = extractRealFields(table)
|
||||
for (let field of Object.values(table.schema)) {
|
||||
if (field.type !== FieldTypes.LINK || !includeRelations) {
|
||||
continue
|
||||
|
@ -549,7 +551,7 @@ module External {
|
|||
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||
const linkTable = this.tables[linkTableName]
|
||||
if (linkTable) {
|
||||
const linkedFields = extractNonLinkFieldNames(linkTable, fields)
|
||||
const linkedFields = extractRealFields(linkTable, fields)
|
||||
fields = fields.concat(linkedFields)
|
||||
}
|
||||
}
|
||||
|
@ -609,7 +611,7 @@ module External {
|
|||
},
|
||||
}
|
||||
// can't really use response right now
|
||||
const response = await makeExternalQuery(appId, json)
|
||||
const response = await getDatasourceAndQuery(appId, json)
|
||||
// handle many to many relationships now if we know the ID (could be auto increment)
|
||||
if (
|
||||
operation !== DataSourceOperation.READ &&
|
||||
|
|
|
@ -4,8 +4,8 @@ const CouchDB = require("../../../db")
|
|||
const { InternalTables } = require("../../../db/utils")
|
||||
const userController = require("../user")
|
||||
const { FieldTypes } = require("../../../constants")
|
||||
const { integrations } = require("../../../integrations")
|
||||
const { processStringSync } = require("@budibase/string-templates")
|
||||
const { makeExternalQuery } = require("../../../integrations/base/utils")
|
||||
|
||||
validateJs.extend(validateJs.validators.datetime, {
|
||||
parse: function (value) {
|
||||
|
@ -17,18 +17,11 @@ validateJs.extend(validateJs.validators.datetime, {
|
|||
},
|
||||
})
|
||||
|
||||
exports.makeExternalQuery = async (appId, json) => {
|
||||
exports.getDatasourceAndQuery = async (appId, json) => {
|
||||
const datasourceId = json.endpoint.datasourceId
|
||||
const db = new CouchDB(appId)
|
||||
const datasource = await db.get(datasourceId)
|
||||
const Integration = integrations[datasource.source]
|
||||
// query is the opinionated function
|
||||
if (Integration.prototype.query) {
|
||||
const integration = new Integration(datasource.config)
|
||||
return integration.query(json)
|
||||
} else {
|
||||
throw "Datasource does not support query."
|
||||
}
|
||||
return makeExternalQuery(datasource, json)
|
||||
}
|
||||
|
||||
exports.findRow = async (ctx, db, tableId, rowId) => {
|
||||
|
|
|
@ -0,0 +1,116 @@
|
|||
const CouchDB = require("../../../db")
|
||||
const {
|
||||
buildExternalTableId,
|
||||
breakExternalTableId,
|
||||
} = require("../../../integrations/utils")
|
||||
const { getTable } = require("./utils")
|
||||
const { DataSourceOperation, FieldTypes } = require("../../../constants")
|
||||
const { makeExternalQuery } = require("../../../integrations/base/utils")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
|
||||
async function makeTableRequest(
|
||||
datasource,
|
||||
operation,
|
||||
table,
|
||||
tables,
|
||||
oldTable = null
|
||||
) {
|
||||
const json = {
|
||||
endpoint: {
|
||||
datasourceId: datasource._id,
|
||||
entityId: table._id,
|
||||
operation,
|
||||
},
|
||||
meta: {
|
||||
tables,
|
||||
},
|
||||
table,
|
||||
}
|
||||
if (oldTable) {
|
||||
json.meta.table = oldTable
|
||||
}
|
||||
return makeExternalQuery(datasource, json)
|
||||
}
|
||||
|
||||
function getDatasourceId(table) {
|
||||
if (!table) {
|
||||
throw "No table supplied"
|
||||
}
|
||||
if (table.sourceId) {
|
||||
return table.sourceId
|
||||
}
|
||||
return breakExternalTableId(table._id).datasourceId
|
||||
}
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const table = ctx.request.body
|
||||
// can't do this
|
||||
delete table.dataImport
|
||||
const datasourceId = getDatasourceId(ctx.request.body)
|
||||
let tableToSave = {
|
||||
type: "table",
|
||||
_id: buildExternalTableId(datasourceId, table.name),
|
||||
...table,
|
||||
}
|
||||
|
||||
let oldTable
|
||||
if (ctx.request.body && ctx.request.body._id) {
|
||||
oldTable = await getTable(appId, ctx.request.body._id)
|
||||
}
|
||||
|
||||
const db = new CouchDB(appId)
|
||||
const datasource = await db.get(datasourceId)
|
||||
const tables = datasource.entities
|
||||
|
||||
// check if relations need setup
|
||||
for (let [key, schema] of Object.entries(tableToSave.schema)) {
|
||||
// TODO: this assumes all relationships are the same, need to handle cardinality and many to many
|
||||
if (schema.type === FieldTypes.LINK) {
|
||||
const relatedTable = Object.values(tables).find(
|
||||
table => table._id === schema.tableId
|
||||
)
|
||||
const relatedField = schema.fieldName
|
||||
const foreignKey = `fk_${relatedTable.name}_${schema.fieldName}`
|
||||
// create foreign key
|
||||
tableToSave.schema[foreignKey] = { type: FieldTypes.NUMBER }
|
||||
// setup the relation in other table and this one
|
||||
schema.foreignKey = foreignKey
|
||||
schema.fieldName = foreignKey
|
||||
schema.main = true
|
||||
const relatedSchema = cloneDeep(schema)
|
||||
relatedSchema.fieldName = key
|
||||
delete relatedSchema.main
|
||||
relatedTable.schema[relatedField] = relatedSchema
|
||||
}
|
||||
}
|
||||
|
||||
const operation = oldTable
|
||||
? DataSourceOperation.UPDATE_TABLE
|
||||
: DataSourceOperation.CREATE_TABLE
|
||||
await makeTableRequest(datasource, operation, tableToSave, tables, oldTable)
|
||||
|
||||
// store it into couch now for budibase reference
|
||||
datasource.entities[tableToSave.name] = tableToSave
|
||||
await db.put(datasource)
|
||||
|
||||
return tableToSave
|
||||
}
|
||||
|
||||
exports.destroy = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const tableToDelete = await getTable(appId, ctx.params.tableId)
|
||||
const datasourceId = getDatasourceId(tableToDelete)
|
||||
|
||||
const db = new CouchDB(appId)
|
||||
const datasource = await db.get(datasourceId)
|
||||
const tables = datasource.entities
|
||||
|
||||
const operation = DataSourceOperation.DELETE_TABLE
|
||||
await makeTableRequest(datasource, operation, tableToDelete, tables)
|
||||
|
||||
delete datasource.entities[tableToDelete.name]
|
||||
await db.put(datasource)
|
||||
|
||||
return tableToDelete
|
||||
}
|
|
@ -1,16 +1,28 @@
|
|||
const CouchDB = require("../../../db")
|
||||
const linkRows = require("../../../db/linkedRows")
|
||||
const internal = require("./internal")
|
||||
const external = require("./external")
|
||||
const csvParser = require("../../../utilities/csvParser")
|
||||
const { isExternalTable } = require("../../../integrations/utils")
|
||||
const {
|
||||
getRowParams,
|
||||
getTableParams,
|
||||
generateTableID,
|
||||
getDatasourceParams,
|
||||
BudibaseInternalDB,
|
||||
} = require("../../../db/utils")
|
||||
const { FieldTypes } = require("../../../constants")
|
||||
const { TableSaveFunctions, getTable } = require("./utils")
|
||||
const { getTable } = require("./utils")
|
||||
|
||||
function pickApi({ tableId, table }) {
|
||||
if (table && !tableId) {
|
||||
tableId = table._id
|
||||
}
|
||||
if (table && table.type === "external") {
|
||||
return external
|
||||
} else if (tableId && isExternalTable(tableId)) {
|
||||
return external
|
||||
}
|
||||
return internal
|
||||
}
|
||||
|
||||
// covers both internal and external
|
||||
exports.fetch = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
|
||||
|
@ -50,143 +62,23 @@ exports.find = async function (ctx) {
|
|||
|
||||
exports.save = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const { dataImport, ...rest } = ctx.request.body
|
||||
let tableToSave = {
|
||||
type: "table",
|
||||
_id: generateTableID(),
|
||||
views: {},
|
||||
...rest,
|
||||
}
|
||||
|
||||
// if the table obj had an _id then it will have been retrieved
|
||||
let oldTable
|
||||
if (ctx.request.body && ctx.request.body._id) {
|
||||
oldTable = await db.get(ctx.request.body._id)
|
||||
}
|
||||
|
||||
// saving a table is a complex operation, involving many different steps, this
|
||||
// has been broken out into a utility to make it more obvious/easier to manipulate
|
||||
const tableSaveFunctions = new TableSaveFunctions({
|
||||
db,
|
||||
ctx,
|
||||
oldTable,
|
||||
dataImport,
|
||||
})
|
||||
tableToSave = await tableSaveFunctions.before(tableToSave)
|
||||
|
||||
// make sure that types don't change of a column, have to remove
|
||||
// the column if you want to change the type
|
||||
if (oldTable && oldTable.schema) {
|
||||
for (let propKey of Object.keys(tableToSave.schema)) {
|
||||
let column = tableToSave.schema[propKey]
|
||||
let oldColumn = oldTable.schema[propKey]
|
||||
if (oldColumn && oldColumn.type === "internal") {
|
||||
oldColumn.type = "auto"
|
||||
}
|
||||
if (oldColumn && oldColumn.type !== column.type) {
|
||||
ctx.throw(400, "Cannot change the type of a column")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Don't rename if the name is the same
|
||||
let { _rename } = tableToSave
|
||||
/* istanbul ignore next */
|
||||
if (_rename && _rename.old === _rename.updated) {
|
||||
_rename = null
|
||||
delete tableToSave._rename
|
||||
}
|
||||
|
||||
// rename row fields when table column is renamed
|
||||
/* istanbul ignore next */
|
||||
if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) {
|
||||
ctx.throw(400, "Cannot rename a linked column.")
|
||||
}
|
||||
|
||||
tableToSave = await tableSaveFunctions.mid(tableToSave)
|
||||
|
||||
// update schema of non-statistics views when new columns are added
|
||||
for (let view in tableToSave.views) {
|
||||
const tableView = tableToSave.views[view]
|
||||
if (!tableView) continue
|
||||
|
||||
if (tableView.schema.group || tableView.schema.field) continue
|
||||
tableView.schema = tableToSave.schema
|
||||
}
|
||||
|
||||
// update linked rows
|
||||
try {
|
||||
const linkResp = await linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: oldTable
|
||||
? linkRows.EventType.TABLE_UPDATED
|
||||
: linkRows.EventType.TABLE_SAVE,
|
||||
table: tableToSave,
|
||||
oldTable: oldTable,
|
||||
})
|
||||
if (linkResp != null && linkResp._rev) {
|
||||
tableToSave._rev = linkResp._rev
|
||||
}
|
||||
} catch (err) {
|
||||
ctx.throw(400, err)
|
||||
}
|
||||
|
||||
// don't perform any updates until relationships have been
|
||||
// checked by the updateLinks function
|
||||
const updatedRows = tableSaveFunctions.getUpdatedRows()
|
||||
if (updatedRows && updatedRows.length !== 0) {
|
||||
await db.bulkDocs(updatedRows)
|
||||
}
|
||||
const result = await db.put(tableToSave)
|
||||
tableToSave._rev = result.rev
|
||||
|
||||
tableToSave = await tableSaveFunctions.after(tableToSave)
|
||||
|
||||
ctx.eventEmitter &&
|
||||
ctx.eventEmitter.emitTable(`table:save`, appId, tableToSave)
|
||||
|
||||
const table = ctx.request.body
|
||||
const savedTable = await pickApi({ table }).save(ctx)
|
||||
ctx.status = 200
|
||||
ctx.message = `Table ${ctx.request.body.name} saved successfully.`
|
||||
ctx.body = tableToSave
|
||||
ctx.message = `Table ${table.name} saved successfully.`
|
||||
ctx.eventEmitter &&
|
||||
ctx.eventEmitter.emitTable(`table:save`, appId, savedTable)
|
||||
ctx.body = savedTable
|
||||
}
|
||||
|
||||
exports.destroy = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const tableToDelete = await db.get(ctx.params.tableId)
|
||||
|
||||
// Delete all rows for that table
|
||||
const rows = await db.allDocs(
|
||||
getRowParams(ctx.params.tableId, null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true })))
|
||||
|
||||
// update linked rows
|
||||
await linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: linkRows.EventType.TABLE_DELETE,
|
||||
table: tableToDelete,
|
||||
})
|
||||
|
||||
// don't remove the table itself until very end
|
||||
await db.remove(tableToDelete)
|
||||
|
||||
// remove table search index
|
||||
const currentIndexes = await db.getIndexes()
|
||||
const existingIndex = currentIndexes.indexes.find(
|
||||
existing => existing.name === `search:${ctx.params.tableId}`
|
||||
)
|
||||
if (existingIndex) {
|
||||
await db.deleteIndex(existingIndex)
|
||||
}
|
||||
|
||||
const tableId = ctx.params.tableId
|
||||
const deletedTable = await pickApi({ tableId }).destroy(ctx)
|
||||
ctx.eventEmitter &&
|
||||
ctx.eventEmitter.emitTable(`table:delete`, appId, tableToDelete)
|
||||
ctx.eventEmitter.emitTable(`table:delete`, appId, deletedTable)
|
||||
ctx.status = 200
|
||||
ctx.body = { message: `Table ${ctx.params.tableId} deleted.` }
|
||||
ctx.body = { message: `Table ${tableId} deleted.` }
|
||||
}
|
||||
|
||||
exports.validateCSVSchema = async function (ctx) {
|
||||
|
|
|
@ -0,0 +1,138 @@
|
|||
const CouchDB = require("../../../db")
|
||||
const linkRows = require("../../../db/linkedRows")
|
||||
const { getRowParams, generateTableID } = require("../../../db/utils")
|
||||
const { FieldTypes } = require("../../../constants")
|
||||
const { TableSaveFunctions } = require("./utils")
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const { dataImport, ...rest } = ctx.request.body
|
||||
let tableToSave = {
|
||||
type: "table",
|
||||
_id: generateTableID(),
|
||||
views: {},
|
||||
...rest,
|
||||
}
|
||||
|
||||
// if the table obj had an _id then it will have been retrieved
|
||||
let oldTable
|
||||
if (ctx.request.body && ctx.request.body._id) {
|
||||
oldTable = await db.get(ctx.request.body._id)
|
||||
}
|
||||
|
||||
// saving a table is a complex operation, involving many different steps, this
|
||||
// has been broken out into a utility to make it more obvious/easier to manipulate
|
||||
const tableSaveFunctions = new TableSaveFunctions({
|
||||
db,
|
||||
ctx,
|
||||
oldTable,
|
||||
dataImport,
|
||||
})
|
||||
tableToSave = await tableSaveFunctions.before(tableToSave)
|
||||
|
||||
// make sure that types don't change of a column, have to remove
|
||||
// the column if you want to change the type
|
||||
if (oldTable && oldTable.schema) {
|
||||
for (let propKey of Object.keys(tableToSave.schema)) {
|
||||
let column = tableToSave.schema[propKey]
|
||||
let oldColumn = oldTable.schema[propKey]
|
||||
if (oldColumn && oldColumn.type === "internal") {
|
||||
oldColumn.type = "auto"
|
||||
}
|
||||
if (oldColumn && oldColumn.type !== column.type) {
|
||||
ctx.throw(400, "Cannot change the type of a column")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Don't rename if the name is the same
|
||||
let { _rename } = tableToSave
|
||||
/* istanbul ignore next */
|
||||
if (_rename && _rename.old === _rename.updated) {
|
||||
_rename = null
|
||||
delete tableToSave._rename
|
||||
}
|
||||
|
||||
// rename row fields when table column is renamed
|
||||
/* istanbul ignore next */
|
||||
if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) {
|
||||
ctx.throw(400, "Cannot rename a linked column.")
|
||||
}
|
||||
|
||||
tableToSave = await tableSaveFunctions.mid(tableToSave)
|
||||
|
||||
// update schema of non-statistics views when new columns are added
|
||||
for (let view in tableToSave.views) {
|
||||
const tableView = tableToSave.views[view]
|
||||
if (!tableView) continue
|
||||
|
||||
if (tableView.schema.group || tableView.schema.field) continue
|
||||
tableView.schema = tableToSave.schema
|
||||
}
|
||||
|
||||
// update linked rows
|
||||
try {
|
||||
const linkResp = await linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: oldTable
|
||||
? linkRows.EventType.TABLE_UPDATED
|
||||
: linkRows.EventType.TABLE_SAVE,
|
||||
table: tableToSave,
|
||||
oldTable: oldTable,
|
||||
})
|
||||
if (linkResp != null && linkResp._rev) {
|
||||
tableToSave._rev = linkResp._rev
|
||||
}
|
||||
} catch (err) {
|
||||
ctx.throw(400, err)
|
||||
}
|
||||
|
||||
// don't perform any updates until relationships have been
|
||||
// checked by the updateLinks function
|
||||
const updatedRows = tableSaveFunctions.getUpdatedRows()
|
||||
if (updatedRows && updatedRows.length !== 0) {
|
||||
await db.bulkDocs(updatedRows)
|
||||
}
|
||||
const result = await db.put(tableToSave)
|
||||
tableToSave._rev = result.rev
|
||||
|
||||
tableToSave = await tableSaveFunctions.after(tableToSave)
|
||||
|
||||
return tableToSave
|
||||
}
|
||||
|
||||
exports.destroy = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const tableToDelete = await db.get(ctx.params.tableId)
|
||||
|
||||
// Delete all rows for that table
|
||||
const rows = await db.allDocs(
|
||||
getRowParams(ctx.params.tableId, null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true })))
|
||||
|
||||
// update linked rows
|
||||
await linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: linkRows.EventType.TABLE_DELETE,
|
||||
table: tableToDelete,
|
||||
})
|
||||
|
||||
// don't remove the table itself until very end
|
||||
await db.remove(tableToDelete)
|
||||
|
||||
// remove table search index
|
||||
const currentIndexes = await db.getIndexes()
|
||||
const existingIndex = currentIndexes.indexes.find(
|
||||
existing => existing.name === `search:${ctx.params.tableId}`
|
||||
)
|
||||
if (existingIndex) {
|
||||
await db.deleteIndex(existingIndex)
|
||||
}
|
||||
|
||||
return tableToDelete
|
||||
}
|
|
@ -62,6 +62,9 @@ exports.DataSourceOperation = {
|
|||
READ: "READ",
|
||||
UPDATE: "UPDATE",
|
||||
DELETE: "DELETE",
|
||||
CREATE_TABLE: "CREATE_TABLE",
|
||||
UPDATE_TABLE: "UPDATE_TABLE",
|
||||
DELETE_TABLE: "DELETE_TABLE",
|
||||
}
|
||||
|
||||
exports.SortDirection = {
|
||||
|
|
|
@ -36,7 +36,7 @@ export interface TableSchema {
|
|||
export interface Table extends Base {
|
||||
type?: string
|
||||
views?: {}
|
||||
name?: string
|
||||
name: string
|
||||
primary?: string[]
|
||||
schema: TableSchema
|
||||
primaryDisplay?: string
|
||||
|
|
|
@ -5,6 +5,9 @@ export enum Operation {
|
|||
READ = "READ",
|
||||
UPDATE = "UPDATE",
|
||||
DELETE = "DELETE",
|
||||
CREATE_TABLE = "CREATE_TABLE",
|
||||
UPDATE_TABLE = "UPDATE_TABLE",
|
||||
DELETE_TABLE = "DELETE_TABLE",
|
||||
}
|
||||
|
||||
export enum SortDirection {
|
||||
|
@ -142,8 +145,10 @@ export interface QueryJson {
|
|||
sort?: SortJson
|
||||
paginate?: PaginationJson
|
||||
body?: object
|
||||
table?: Table
|
||||
meta?: {
|
||||
table?: Table
|
||||
tables?: Record<string, Table>
|
||||
}
|
||||
extra?: {
|
||||
idFilter?: SearchFilters
|
||||
|
|
|
@ -1,19 +1,24 @@
|
|||
import { Knex, knex } from "knex"
|
||||
const BASE_LIMIT = 5000
|
||||
import {
|
||||
QueryJson,
|
||||
SearchFilters,
|
||||
QueryOptions,
|
||||
SortDirection,
|
||||
Operation,
|
||||
QueryJson,
|
||||
QueryOptions,
|
||||
RelationshipsJson,
|
||||
SearchFilters,
|
||||
SortDirection,
|
||||
} from "../../definitions/datasource"
|
||||
import { isIsoDateString } from "../utils"
|
||||
import SqlTableQueryBuilder from "./sqlTable"
|
||||
|
||||
const BASE_LIMIT = 5000
|
||||
|
||||
type KnexQuery = Knex.QueryBuilder | Knex
|
||||
|
||||
function parseBody(body: any) {
|
||||
for (let [key, value] of Object.entries(body)) {
|
||||
if (Array.isArray(value)) {
|
||||
body[key] = JSON.stringify(value)
|
||||
}
|
||||
if (typeof value !== "string") {
|
||||
continue
|
||||
}
|
||||
|
@ -243,23 +248,14 @@ function buildDelete(
|
|||
}
|
||||
}
|
||||
|
||||
class SqlQueryBuilder {
|
||||
private readonly sqlClient: string
|
||||
class SqlQueryBuilder extends SqlTableQueryBuilder {
|
||||
private readonly limit: number
|
||||
// pass through client to get flavour of SQL
|
||||
constructor(client: string, limit: number = BASE_LIMIT) {
|
||||
this.sqlClient = client
|
||||
super(client)
|
||||
this.limit = limit
|
||||
}
|
||||
|
||||
/**
|
||||
* @param json the input JSON structure from which an SQL query will be built.
|
||||
* @return {string} the operation that was found in the JSON.
|
||||
*/
|
||||
_operation(json: QueryJson): Operation {
|
||||
return json.endpoint.operation
|
||||
}
|
||||
|
||||
/**
|
||||
* @param json The JSON query DSL which is to be converted to SQL.
|
||||
* @param opts extra options which are to be passed into the query builder, e.g. disableReturning
|
||||
|
@ -267,7 +263,8 @@ class SqlQueryBuilder {
|
|||
* @return {{ sql: string, bindings: object }} the query ready to be passed to the driver.
|
||||
*/
|
||||
_query(json: QueryJson, opts: QueryOptions = {}) {
|
||||
const client = knex({ client: this.sqlClient })
|
||||
const sqlClient = this.getSqlClient()
|
||||
const client = knex({ client: sqlClient })
|
||||
let query
|
||||
switch (this._operation(json)) {
|
||||
case Operation.CREATE:
|
||||
|
@ -282,6 +279,8 @@ class SqlQueryBuilder {
|
|||
case Operation.DELETE:
|
||||
query = buildDelete(client, json, opts)
|
||||
break
|
||||
case Operation.CREATE_TABLE: case Operation.UPDATE_TABLE: case Operation.DELETE_TABLE:
|
||||
return this._tableQuery(json)
|
||||
default:
|
||||
throw `Operation type is not supported by SQL query builder`
|
||||
}
|
||||
|
|
|
@ -0,0 +1,132 @@
|
|||
import { Knex, knex } from "knex"
|
||||
import { Table } from "../../definitions/common"
|
||||
import { Operation, QueryJson } from "../../definitions/datasource"
|
||||
import { breakExternalTableId } from "../utils"
|
||||
import SchemaBuilder = Knex.SchemaBuilder
|
||||
import CreateTableBuilder = Knex.CreateTableBuilder
|
||||
const { FieldTypes } = require("../../constants")
|
||||
|
||||
function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record<string, Table>, oldTable: null | Table = null) {
|
||||
let primaryKey = table && table.primary ? table.primary[0] : null
|
||||
// can't change primary once its set for now
|
||||
if (primaryKey && !oldTable) {
|
||||
schema.increments(primaryKey).primary()
|
||||
}
|
||||
const foreignKeys = Object.values(table.schema).map(col => col.foreignKey)
|
||||
for (let [key, column] of Object.entries(table.schema)) {
|
||||
// skip things that are already correct
|
||||
const oldColumn = oldTable ? oldTable.schema[key] : null
|
||||
if ((oldColumn && oldColumn.type === column.type) || primaryKey === key) {
|
||||
continue
|
||||
}
|
||||
switch (column.type) {
|
||||
case FieldTypes.STRING: case FieldTypes.OPTIONS: case FieldTypes.LONGFORM:
|
||||
schema.string(key)
|
||||
break
|
||||
case FieldTypes.NUMBER:
|
||||
if (foreignKeys.indexOf(key) === -1) {
|
||||
schema.float(key)
|
||||
}
|
||||
break
|
||||
case FieldTypes.BOOLEAN:
|
||||
schema.boolean(key)
|
||||
break
|
||||
case FieldTypes.DATETIME:
|
||||
schema.datetime(key)
|
||||
break
|
||||
case FieldTypes.ARRAY:
|
||||
schema.json(key)
|
||||
break
|
||||
case FieldTypes.LINK:
|
||||
if (!column.foreignKey || !column.tableId) {
|
||||
throw "Invalid relationship schema"
|
||||
}
|
||||
const { tableName } = breakExternalTableId(column.tableId)
|
||||
// @ts-ignore
|
||||
const relatedTable = tables[tableName]
|
||||
if (!relatedTable) {
|
||||
throw "Referenced table doesn't exist"
|
||||
}
|
||||
schema.integer(column.foreignKey).unsigned()
|
||||
schema.foreign(column.foreignKey).references(`${tableName}.${relatedTable.primary[0]}`)
|
||||
}
|
||||
}
|
||||
return schema
|
||||
}
|
||||
|
||||
function buildCreateTable(
|
||||
knex: Knex,
|
||||
table: Table,
|
||||
tables: Record<string, Table>,
|
||||
): SchemaBuilder {
|
||||
return knex.schema.createTable(table.name, schema => {
|
||||
generateSchema(schema, table, tables)
|
||||
})
|
||||
}
|
||||
|
||||
function buildUpdateTable(
|
||||
knex: Knex,
|
||||
table: Table,
|
||||
tables: Record<string, Table>,
|
||||
oldTable: Table,
|
||||
): SchemaBuilder {
|
||||
return knex.schema.alterTable(table.name, schema => {
|
||||
generateSchema(schema, table, tables, oldTable)
|
||||
})
|
||||
}
|
||||
|
||||
function buildDeleteTable(
|
||||
knex: Knex,
|
||||
table: Table,
|
||||
): SchemaBuilder {
|
||||
return knex.schema.dropTable(table.name)
|
||||
}
|
||||
|
||||
class SqlTableQueryBuilder {
|
||||
private readonly sqlClient: string
|
||||
|
||||
// pass through client to get flavour of SQL
|
||||
constructor(client: string) {
|
||||
this.sqlClient = client
|
||||
}
|
||||
|
||||
getSqlClient(): string {
|
||||
return this.sqlClient
|
||||
}
|
||||
|
||||
/**
|
||||
* @param json the input JSON structure from which an SQL query will be built.
|
||||
* @return {string} the operation that was found in the JSON.
|
||||
*/
|
||||
_operation(json: QueryJson): Operation {
|
||||
return json.endpoint.operation
|
||||
}
|
||||
|
||||
_tableQuery(json: QueryJson): any {
|
||||
const client = knex({ client: this.sqlClient })
|
||||
let query
|
||||
if (!json.table || !json.meta || !json.meta.tables) {
|
||||
throw "Cannot execute without table being specified"
|
||||
}
|
||||
switch (this._operation(json)) {
|
||||
case Operation.CREATE_TABLE:
|
||||
query = buildCreateTable(client, json.table, json.meta.tables)
|
||||
break
|
||||
case Operation.UPDATE_TABLE:
|
||||
if (!json.meta || !json.meta.table) {
|
||||
throw "Must specify old table for update"
|
||||
}
|
||||
query = buildUpdateTable(client, json.table, json.meta.tables, json.meta.table)
|
||||
break
|
||||
case Operation.DELETE_TABLE:
|
||||
query = buildDeleteTable(client, json.table)
|
||||
break
|
||||
default:
|
||||
throw "Table operation is of unknown type"
|
||||
}
|
||||
return query.toSQL()
|
||||
}
|
||||
}
|
||||
|
||||
export default SqlTableQueryBuilder
|
||||
module.exports = SqlTableQueryBuilder
|
|
@ -0,0 +1,19 @@
|
|||
import { QueryJson } from "../../definitions/datasource"
|
||||
import { Datasource } from "../../definitions/common"
|
||||
|
||||
module DatasourceUtils {
|
||||
const { integrations } = require("../index")
|
||||
|
||||
export async function makeExternalQuery(datasource: Datasource, json: QueryJson) {
|
||||
const Integration = integrations[datasource.source]
|
||||
// query is the opinionated function
|
||||
if (Integration.prototype.query) {
|
||||
const integration = new Integration(datasource.config)
|
||||
return integration.query(json)
|
||||
} else {
|
||||
throw "Datasource does not support query."
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.makeExternalQuery = makeExternalQuery
|
||||
}
|
|
@ -263,10 +263,18 @@ module PostgresModule {
|
|||
async query(json: QueryJson) {
|
||||
const operation = this._operation(json).toLowerCase()
|
||||
const input = this._query(json)
|
||||
if (Array.isArray(input)) {
|
||||
const responses = []
|
||||
for (let query of input) {
|
||||
responses.push(await internalQuery(this.client, query))
|
||||
}
|
||||
return responses
|
||||
} else {
|
||||
const response = await internalQuery(this.client, input)
|
||||
return response.rows.length ? response.rows : [{ [operation]: true }]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
|
|
Loading…
Reference in New Issue