Merge pull request #3291 from Budibase/feature/mssql-plus
MS SQL Server data source plus
This commit is contained in:
commit
94e952984d
|
@ -8,7 +8,11 @@
|
||||||
import CreateEditRow from "./modals/CreateEditRow.svelte"
|
import CreateEditRow from "./modals/CreateEditRow.svelte"
|
||||||
import CreateEditUser from "./modals/CreateEditUser.svelte"
|
import CreateEditUser from "./modals/CreateEditUser.svelte"
|
||||||
import CreateEditColumn from "./modals/CreateEditColumn.svelte"
|
import CreateEditColumn from "./modals/CreateEditColumn.svelte"
|
||||||
import { TableNames, UNEDITABLE_USER_FIELDS } from "constants"
|
import {
|
||||||
|
TableNames,
|
||||||
|
UNEDITABLE_USER_FIELDS,
|
||||||
|
UNSORTABLE_TYPES,
|
||||||
|
} from "constants"
|
||||||
import RoleCell from "./cells/RoleCell.svelte"
|
import RoleCell from "./cells/RoleCell.svelte"
|
||||||
|
|
||||||
export let schema = {}
|
export let schema = {}
|
||||||
|
@ -33,6 +37,15 @@
|
||||||
$: isUsersTable = tableId === TableNames.USERS
|
$: isUsersTable = tableId === TableNames.USERS
|
||||||
$: data && resetSelectedRows()
|
$: data && resetSelectedRows()
|
||||||
$: editRowComponent = isUsersTable ? CreateEditUser : CreateEditRow
|
$: editRowComponent = isUsersTable ? CreateEditUser : CreateEditRow
|
||||||
|
$: {
|
||||||
|
UNSORTABLE_TYPES.forEach(type => {
|
||||||
|
Object.values(schema).forEach(col => {
|
||||||
|
if (col.type === type) {
|
||||||
|
col.sortable = false
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
$: {
|
$: {
|
||||||
if (isUsersTable) {
|
if (isUsersTable) {
|
||||||
customRenderers = [
|
customRenderers = [
|
||||||
|
|
|
@ -59,9 +59,6 @@
|
||||||
let deletion
|
let deletion
|
||||||
|
|
||||||
$: checkConstraints(field)
|
$: checkConstraints(field)
|
||||||
$: tableOptions = $tables.list.filter(
|
|
||||||
opt => opt._id !== $tables.draft._id && opt.type === table.type
|
|
||||||
)
|
|
||||||
$: required = !!field?.constraints?.presence || primaryDisplay
|
$: required = !!field?.constraints?.presence || primaryDisplay
|
||||||
$: uneditable =
|
$: uneditable =
|
||||||
$tables.selected?._id === TableNames.USERS &&
|
$tables.selected?._id === TableNames.USERS &&
|
||||||
|
@ -88,6 +85,13 @@
|
||||||
field.type !== LINK_TYPE && !uneditable && field.type !== AUTO_TYPE
|
field.type !== LINK_TYPE && !uneditable && field.type !== AUTO_TYPE
|
||||||
$: relationshipOptions = getRelationshipOptions(field)
|
$: relationshipOptions = getRelationshipOptions(field)
|
||||||
$: external = table.type === "external"
|
$: external = table.type === "external"
|
||||||
|
// in the case of internal tables the sourceId will just be undefined
|
||||||
|
$: tableOptions = $tables.list.filter(
|
||||||
|
opt =>
|
||||||
|
opt._id !== $tables.draft._id &&
|
||||||
|
opt.type === table.type &&
|
||||||
|
table.sourceId === opt.sourceId
|
||||||
|
)
|
||||||
|
|
||||||
async function saveColumn() {
|
async function saveColumn() {
|
||||||
if (field.type === AUTO_TYPE) {
|
if (field.type === AUTO_TYPE) {
|
||||||
|
@ -174,7 +178,7 @@
|
||||||
if (!field || !field.tableId) {
|
if (!field || !field.tableId) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
const linkTable = tableOptions.find(table => table._id === field.tableId)
|
const linkTable = tableOptions?.find(table => table._id === field.tableId)
|
||||||
if (!linkTable) {
|
if (!linkTable) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,6 +39,8 @@ export const UNEDITABLE_USER_FIELDS = [
|
||||||
"lastName",
|
"lastName",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
export const UNSORTABLE_TYPES = ["formula", "attachment", "array", "link"]
|
||||||
|
|
||||||
export const LAYOUT_NAMES = {
|
export const LAYOUT_NAMES = {
|
||||||
MASTER: {
|
MASTER: {
|
||||||
PRIVATE: "layout_private_master",
|
PRIVATE: "layout_private_master",
|
||||||
|
|
|
@ -79,6 +79,10 @@
|
||||||
try {
|
try {
|
||||||
// Create datasource
|
// Create datasource
|
||||||
await datasources.save(datasource)
|
await datasources.save(datasource)
|
||||||
|
if (datasource?.plus) {
|
||||||
|
await tables.fetch()
|
||||||
|
}
|
||||||
|
await datasources.fetch()
|
||||||
notifications.success(`Datasource ${name} updated successfully.`)
|
notifications.success(`Datasource ${name} updated successfully.`)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
notifications.error(`Error saving datasource: ${err}`)
|
notifications.error(`Error saving datasource: ${err}`)
|
||||||
|
|
|
@ -2,6 +2,7 @@ import { cloneDeep } from "lodash/fp"
|
||||||
import { fetchTableData, fetchTableDefinition } from "./tables"
|
import { fetchTableData, fetchTableDefinition } from "./tables"
|
||||||
import { fetchViewData } from "./views"
|
import { fetchViewData } from "./views"
|
||||||
import { fetchRelationshipData } from "./relationships"
|
import { fetchRelationshipData } from "./relationships"
|
||||||
|
import { FieldTypes } from "../constants"
|
||||||
import { executeQuery, fetchQueryDefinition } from "./queries"
|
import { executeQuery, fetchQueryDefinition } from "./queries"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -28,7 +29,7 @@ export const fetchDatasource = async dataSource => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
rows = await executeQuery({ queryId: dataSource._id, parameters })
|
rows = await executeQuery({ queryId: dataSource._id, parameters })
|
||||||
} else if (type === "link") {
|
} else if (type === FieldTypes.LINK) {
|
||||||
rows = await fetchRelationshipData({
|
rows = await fetchRelationshipData({
|
||||||
rowId: dataSource.rowId,
|
rowId: dataSource.rowId,
|
||||||
tableId: dataSource.rowTableId,
|
tableId: dataSource.rowTableId,
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import { notificationStore, dataSourceStore } from "stores"
|
import { notificationStore, dataSourceStore } from "stores"
|
||||||
import API from "./api"
|
import API from "./api"
|
||||||
import { fetchTableDefinition } from "./tables"
|
import { fetchTableDefinition } from "./tables"
|
||||||
|
import { FieldTypes } from "../constants"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetches data about a certain row in a table.
|
* Fetches data about a certain row in a table.
|
||||||
|
@ -129,7 +130,7 @@ export const enrichRows = async (rows, tableId) => {
|
||||||
const keys = Object.keys(schema)
|
const keys = Object.keys(schema)
|
||||||
for (let key of keys) {
|
for (let key of keys) {
|
||||||
const type = schema[key].type
|
const type = schema[key].type
|
||||||
if (type === "link" && Array.isArray(row[key])) {
|
if (type === FieldTypes.LINK && Array.isArray(row[key])) {
|
||||||
// Enrich row a string join of relationship fields
|
// Enrich row a string join of relationship fields
|
||||||
row[`${key}_text`] =
|
row[`${key}_text`] =
|
||||||
row[key]
|
row[key]
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
<script>
|
<script>
|
||||||
import { getContext } from "svelte"
|
import { getContext } from "svelte"
|
||||||
import { Heading, Icon } from "@budibase/bbui"
|
import { Heading, Icon } from "@budibase/bbui"
|
||||||
|
import { FieldTypes } from "../../constants"
|
||||||
import active from "svelte-spa-router/active"
|
import active from "svelte-spa-router/active"
|
||||||
|
|
||||||
const { routeStore, styleable, linkable, builderStore } = getContext("sdk")
|
const { routeStore, styleable, linkable, builderStore } = getContext("sdk")
|
||||||
|
@ -108,7 +109,7 @@
|
||||||
{#each validLinks as { text, url }}
|
{#each validLinks as { text, url }}
|
||||||
{#if isInternal(url)}
|
{#if isInternal(url)}
|
||||||
<a
|
<a
|
||||||
class="link"
|
class={FieldTypes.LINK}
|
||||||
href={url}
|
href={url}
|
||||||
use:linkable
|
use:linkable
|
||||||
on:click={close}
|
on:click={close}
|
||||||
|
@ -117,7 +118,11 @@
|
||||||
{text}
|
{text}
|
||||||
</a>
|
</a>
|
||||||
{:else}
|
{:else}
|
||||||
<a class="link" href={ensureExternal(url)} on:click={close}>
|
<a
|
||||||
|
class={FieldTypes.LINK}
|
||||||
|
href={ensureExternal(url)}
|
||||||
|
on:click={close}
|
||||||
|
>
|
||||||
{text}
|
{text}
|
||||||
</a>
|
</a>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
import { CoreSelect, CoreMultiselect } from "@budibase/bbui"
|
import { CoreSelect, CoreMultiselect } from "@budibase/bbui"
|
||||||
import { getContext } from "svelte"
|
import { getContext } from "svelte"
|
||||||
import Field from "./Field.svelte"
|
import Field from "./Field.svelte"
|
||||||
|
import { FieldTypes } from "../../../constants"
|
||||||
|
|
||||||
const { API } = getContext("sdk")
|
const { API } = getContext("sdk")
|
||||||
|
|
||||||
|
@ -68,7 +69,7 @@
|
||||||
{field}
|
{field}
|
||||||
{disabled}
|
{disabled}
|
||||||
{validation}
|
{validation}
|
||||||
type="link"
|
type={FieldTypes.LINK}
|
||||||
bind:fieldState
|
bind:fieldState
|
||||||
bind:fieldApi
|
bind:fieldApi
|
||||||
bind:fieldSchema
|
bind:fieldSchema
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import flatpickr from "flatpickr"
|
import flatpickr from "flatpickr"
|
||||||
|
import { FieldTypes } from "../../../constants"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a validation function from a combination of schema-level constraints
|
* Creates a validation function from a combination of schema-level constraints
|
||||||
|
@ -154,7 +155,7 @@ const parseType = (value, type) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse as string
|
// Parse as string
|
||||||
if (type === "string") {
|
if (type === FieldTypes.STRING) {
|
||||||
if (typeof value === "string" || Array.isArray(value)) {
|
if (typeof value === "string" || Array.isArray(value)) {
|
||||||
return value
|
return value
|
||||||
}
|
}
|
||||||
|
@ -165,7 +166,7 @@ const parseType = (value, type) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse as number
|
// Parse as number
|
||||||
if (type === "number") {
|
if (type === FieldTypes.NUMBER) {
|
||||||
if (isNaN(value)) {
|
if (isNaN(value)) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
@ -173,7 +174,7 @@ const parseType = (value, type) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse as date
|
// Parse as date
|
||||||
if (type === "datetime") {
|
if (type === FieldTypes.DATETIME) {
|
||||||
if (value instanceof Date) {
|
if (value instanceof Date) {
|
||||||
return value.getTime()
|
return value.getTime()
|
||||||
}
|
}
|
||||||
|
@ -182,7 +183,7 @@ const parseType = (value, type) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse as boolean
|
// Parse as boolean
|
||||||
if (type === "boolean") {
|
if (type === FieldTypes.BOOLEAN) {
|
||||||
if (typeof value === "string") {
|
if (typeof value === "string") {
|
||||||
return value.toLowerCase() === "true"
|
return value.toLowerCase() === "true"
|
||||||
}
|
}
|
||||||
|
@ -190,7 +191,7 @@ const parseType = (value, type) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse attachments, treating no elements as null
|
// Parse attachments, treating no elements as null
|
||||||
if (type === "attachment") {
|
if (type === FieldTypes.ATTACHMENT) {
|
||||||
if (!Array.isArray(value) || !value.length) {
|
if (!Array.isArray(value) || !value.length) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
@ -198,14 +199,14 @@ const parseType = (value, type) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse links, treating no elements as null
|
// Parse links, treating no elements as null
|
||||||
if (type === "link") {
|
if (type === FieldTypes.LINK) {
|
||||||
if (!Array.isArray(value) || !value.length) {
|
if (!Array.isArray(value) || !value.length) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
return value
|
return value
|
||||||
}
|
}
|
||||||
|
|
||||||
if (type === "array") {
|
if (type === FieldTypes.ARRAY) {
|
||||||
if (!Array.isArray(value) || !value.length) {
|
if (!Array.isArray(value) || !value.length) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
import { getContext } from "svelte"
|
import { getContext } from "svelte"
|
||||||
import { Table } from "@budibase/bbui"
|
import { Table } from "@budibase/bbui"
|
||||||
import SlotRenderer from "./SlotRenderer.svelte"
|
import SlotRenderer from "./SlotRenderer.svelte"
|
||||||
|
import { UnsortableTypes } from "../../../constants"
|
||||||
|
|
||||||
export let dataProvider
|
export let dataProvider
|
||||||
export let columns
|
export let columns
|
||||||
|
@ -73,8 +74,12 @@
|
||||||
divider: true,
|
divider: true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fields.forEach(field => {
|
fields.forEach(field => {
|
||||||
newSchema[field] = schema[field]
|
newSchema[field] = schema[field]
|
||||||
|
if (schema[field] && UnsortableTypes.indexOf(schema[field].type) !== -1) {
|
||||||
|
newSchema[field].sortable = false
|
||||||
|
}
|
||||||
})
|
})
|
||||||
return newSchema
|
return newSchema
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,26 @@ export const TableNames = {
|
||||||
USERS: "ta_users",
|
USERS: "ta_users",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const FieldTypes = {
|
||||||
|
STRING: "string",
|
||||||
|
LONGFORM: "longform",
|
||||||
|
OPTIONS: "options",
|
||||||
|
NUMBER: "number",
|
||||||
|
BOOLEAN: "boolean",
|
||||||
|
ARRAY: "array",
|
||||||
|
DATETIME: "datetime",
|
||||||
|
ATTACHMENT: "attachment",
|
||||||
|
LINK: "link",
|
||||||
|
FORMULA: "formula",
|
||||||
|
}
|
||||||
|
|
||||||
|
export const UnsortableTypes = [
|
||||||
|
FieldTypes.FORMULA,
|
||||||
|
FieldTypes.ATTACHMENT,
|
||||||
|
FieldTypes.ARRAY,
|
||||||
|
FieldTypes.LINK,
|
||||||
|
]
|
||||||
|
|
||||||
export const ActionTypes = {
|
export const ActionTypes = {
|
||||||
ValidateForm: "ValidateForm",
|
ValidateForm: "ValidateForm",
|
||||||
RefreshDatasource: "RefreshDatasource",
|
RefreshDatasource: "RefreshDatasource",
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import { writable, get } from "svelte/store"
|
import { writable, get } from "svelte/store"
|
||||||
import { fetchTableDefinition } from "../api"
|
import { fetchTableDefinition } from "../api"
|
||||||
|
import { FieldTypes } from "../constants"
|
||||||
|
|
||||||
export const createDataSourceStore = () => {
|
export const createDataSourceStore = () => {
|
||||||
const store = writable([])
|
const store = writable([])
|
||||||
|
@ -20,7 +21,7 @@ export const createDataSourceStore = () => {
|
||||||
|
|
||||||
// Only one side of the relationship is required as a trigger, as it will
|
// Only one side of the relationship is required as a trigger, as it will
|
||||||
// automatically invalidate related table IDs
|
// automatically invalidate related table IDs
|
||||||
else if (dataSource.type === "link") {
|
else if (dataSource.type === FieldTypes.LINK) {
|
||||||
dataSourceId = dataSource.tableId || dataSource.rowTableId
|
dataSourceId = dataSource.tableId || dataSource.rowTableId
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,7 +73,7 @@ export const createDataSourceStore = () => {
|
||||||
if (schema) {
|
if (schema) {
|
||||||
Object.values(schema).forEach(fieldSchema => {
|
Object.values(schema).forEach(fieldSchema => {
|
||||||
if (
|
if (
|
||||||
fieldSchema.type === "link" &&
|
fieldSchema.type === FieldTypes.LINK &&
|
||||||
fieldSchema.tableId &&
|
fieldSchema.tableId &&
|
||||||
!fieldSchema.autocolumn
|
!fieldSchema.autocolumn
|
||||||
) {
|
) {
|
||||||
|
|
|
@ -15,7 +15,7 @@ module MsSqlMock {
|
||||||
mssql.ConnectionPool = jest.fn(() => ({
|
mssql.ConnectionPool = jest.fn(() => ({
|
||||||
connect: jest.fn(() => ({
|
connect: jest.fn(() => ({
|
||||||
request: jest.fn(() => ({
|
request: jest.fn(() => ({
|
||||||
query: jest.fn(() => ({})),
|
query: jest.fn(sql => ({ recordset: [ sql ] })),
|
||||||
})),
|
})),
|
||||||
})),
|
})),
|
||||||
}))
|
}))
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
FROM mcr.microsoft.com/mssql/server
|
||||||
|
|
||||||
|
ENV ACCEPT_EULA=Y
|
||||||
|
ENV SA_PASSWORD=Passw0rd
|
||||||
|
|
||||||
|
COPY ./data /
|
||||||
|
|
||||||
|
ENTRYPOINT [ "/bin/bash", "entrypoint.sh" ]
|
||||||
|
CMD [ "/opt/mssql/bin/sqlservr" ]
|
|
@ -0,0 +1,24 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [ "$1" = '/opt/mssql/bin/sqlservr' ]; then
|
||||||
|
# If this is the container's first run, initialize the application database
|
||||||
|
if [ ! -f /tmp/app-initialized ]; then
|
||||||
|
# Initialize the application database asynchronously in a background process. This allows a) the SQL Server process to be the main process in the container, which allows graceful shutdown and other goodies, and b) us to only start the SQL Server process once, as opposed to starting, stopping, then starting it again.
|
||||||
|
function initialize_app_database() {
|
||||||
|
# Wait a bit for SQL Server to start. SQL Server's process doesn't provide a clever way to check if it's up or not, and it needs to be up before we can import the application database
|
||||||
|
sleep 30s
|
||||||
|
|
||||||
|
echo "RUNNING BUDIBASE SETUP"
|
||||||
|
|
||||||
|
#run the setup script to create the DB and the schema in the DB
|
||||||
|
/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Passw0rd -i setup.sql
|
||||||
|
|
||||||
|
# Note that the container has been initialized so future starts won't wipe changes to the data
|
||||||
|
touch /tmp/app-initialized
|
||||||
|
}
|
||||||
|
initialize_app_database &
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec "$@"
|
|
@ -0,0 +1,54 @@
|
||||||
|
USE master;
|
||||||
|
|
||||||
|
IF OBJECT_ID ('dbo.products', 'U') IS NOT NULL
|
||||||
|
DROP TABLE products;
|
||||||
|
GO
|
||||||
|
CREATE TABLE products
|
||||||
|
(
|
||||||
|
id int IDENTITY(1,1),
|
||||||
|
name varchar (20),
|
||||||
|
description varchar(30),
|
||||||
|
CONSTRAINT pk_products PRIMARY KEY NONCLUSTERED (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
IF OBJECT_ID ('dbo.tasks', 'U') IS NOT NULL
|
||||||
|
DROP TABLE tasks;
|
||||||
|
GO
|
||||||
|
CREATE TABLE tasks
|
||||||
|
(
|
||||||
|
taskid int IDENTITY(1,1),
|
||||||
|
taskname varchar (20),
|
||||||
|
productid int,
|
||||||
|
CONSTRAINT pk_tasks PRIMARY KEY NONCLUSTERED (taskid),
|
||||||
|
CONSTRAINT fk_products FOREIGN KEY (productid) REFERENCES products (id),
|
||||||
|
);
|
||||||
|
|
||||||
|
IF OBJECT_ID ('dbo.people', 'U') IS NOT NULL
|
||||||
|
DROP TABLE people;
|
||||||
|
GO
|
||||||
|
CREATE TABLE people
|
||||||
|
(
|
||||||
|
name varchar(30),
|
||||||
|
age varchar(20),
|
||||||
|
CONSTRAINT pk_people PRIMARY KEY NONCLUSTERED (name, age)
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT products
|
||||||
|
(name, description)
|
||||||
|
VALUES
|
||||||
|
('Bananas', 'Fruit thing');
|
||||||
|
|
||||||
|
INSERT products
|
||||||
|
(name, description)
|
||||||
|
VALUES
|
||||||
|
('Meat', 'Animal thing');
|
||||||
|
|
||||||
|
INSERT tasks
|
||||||
|
(taskname, productid)
|
||||||
|
VALUES
|
||||||
|
('Processing', 1);
|
||||||
|
|
||||||
|
INSERT people
|
||||||
|
(name, age)
|
||||||
|
VALUES
|
||||||
|
('Bob', '30');
|
|
@ -0,0 +1,12 @@
|
||||||
|
version: "3.8"
|
||||||
|
services:
|
||||||
|
# password: Passw0rd
|
||||||
|
# user: sa
|
||||||
|
# database: master
|
||||||
|
mssql:
|
||||||
|
image: bb/mssql
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: data/Dockerfile
|
||||||
|
ports:
|
||||||
|
- "1433:1433"
|
|
@ -0,0 +1,3 @@
|
||||||
|
#!/bin/bash
|
||||||
|
docker-compose down
|
||||||
|
docker volume prune -f
|
|
@ -163,8 +163,8 @@ module External {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function basicProcessing(row: Row, table: Table) {
|
function basicProcessing(row: Row, table: Table): Row {
|
||||||
const thisRow: { [key: string]: any } = {}
|
const thisRow: Row = {}
|
||||||
// filter the row down to what is actually the row (not joined)
|
// filter the row down to what is actually the row (not joined)
|
||||||
for (let fieldName of Object.keys(table.schema)) {
|
for (let fieldName of Object.keys(table.schema)) {
|
||||||
const value = row[`${table.name}.${fieldName}`] || row[fieldName]
|
const value = row[`${table.name}.${fieldName}`] || row[fieldName]
|
||||||
|
@ -179,6 +179,23 @@ module External {
|
||||||
return thisRow
|
return thisRow
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function fixArrayTypes(row: Row, table: Table) {
|
||||||
|
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
||||||
|
if (
|
||||||
|
schema.type === FieldTypes.ARRAY &&
|
||||||
|
typeof row[fieldName] === "string"
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
row[fieldName] = JSON.parse(row[fieldName])
|
||||||
|
} catch (err) {
|
||||||
|
// couldn't convert back to array, ignore
|
||||||
|
delete row[fieldName]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return row
|
||||||
|
}
|
||||||
|
|
||||||
function isMany(field: FieldSchema) {
|
function isMany(field: FieldSchema) {
|
||||||
return (
|
return (
|
||||||
field.relationshipType && field.relationshipType.split("-")[0] === "many"
|
field.relationshipType && field.relationshipType.split("-")[0] === "many"
|
||||||
|
@ -226,7 +243,12 @@ module External {
|
||||||
manyRelationships: ManyRelationship[] = []
|
manyRelationships: ManyRelationship[] = []
|
||||||
for (let [key, field] of Object.entries(table.schema)) {
|
for (let [key, field] of Object.entries(table.schema)) {
|
||||||
// if set already, or not set just skip it
|
// if set already, or not set just skip it
|
||||||
if (row[key] == null || newRow[key] || field.autocolumn || field.type === FieldTypes.FORMULA) {
|
if (
|
||||||
|
row[key] == null ||
|
||||||
|
newRow[key] ||
|
||||||
|
field.autocolumn ||
|
||||||
|
field.type === FieldTypes.FORMULA
|
||||||
|
) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
// if its an empty string then it means return the column to null (if possible)
|
// if its an empty string then it means return the column to null (if possible)
|
||||||
|
@ -337,7 +359,7 @@ module External {
|
||||||
table: Table,
|
table: Table,
|
||||||
relationships: RelationshipsJson[]
|
relationships: RelationshipsJson[]
|
||||||
) {
|
) {
|
||||||
if (rows[0].read === true) {
|
if (!rows || rows.length === 0 || rows[0].read === true) {
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
let finalRows: { [key: string]: Row } = {}
|
let finalRows: { [key: string]: Row } = {}
|
||||||
|
@ -353,7 +375,10 @@ module External {
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
const thisRow = basicProcessing(row, table)
|
const thisRow = fixArrayTypes(basicProcessing(row, table), table)
|
||||||
|
if (thisRow._id == null) {
|
||||||
|
throw "Unable to generate row ID for SQL rows"
|
||||||
|
}
|
||||||
finalRows[thisRow._id] = thisRow
|
finalRows[thisRow._id] = thisRow
|
||||||
// do this at end once its been added to the final rows
|
// do this at end once its been added to the final rows
|
||||||
finalRows = this.updateRelationshipColumns(
|
finalRows = this.updateRelationshipColumns(
|
||||||
|
|
|
@ -119,7 +119,7 @@ export interface SortJson {
|
||||||
|
|
||||||
export interface PaginationJson {
|
export interface PaginationJson {
|
||||||
limit: number
|
limit: number
|
||||||
page: string | number
|
page?: string | number
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface RelationshipsJson {
|
export interface RelationshipsJson {
|
||||||
|
|
|
@ -7,7 +7,7 @@ import {
|
||||||
SearchFilters,
|
SearchFilters,
|
||||||
SortDirection,
|
SortDirection,
|
||||||
} from "../../definitions/datasource"
|
} from "../../definitions/datasource"
|
||||||
import { isIsoDateString } from "../utils"
|
import { isIsoDateString, SqlClients } from "../utils"
|
||||||
import SqlTableQueryBuilder from "./sqlTable"
|
import SqlTableQueryBuilder from "./sqlTable"
|
||||||
|
|
||||||
const BASE_LIMIT = 5000
|
const BASE_LIMIT = 5000
|
||||||
|
@ -29,222 +29,236 @@ function parseBody(body: any) {
|
||||||
return body
|
return body
|
||||||
}
|
}
|
||||||
|
|
||||||
// right now we only do filters on the specific table being queried
|
class InternalBuilder {
|
||||||
function addFilters(
|
private readonly client: string
|
||||||
tableName: string,
|
|
||||||
query: KnexQuery,
|
constructor(client: string) {
|
||||||
filters: SearchFilters | undefined
|
this.client = client
|
||||||
): KnexQuery {
|
|
||||||
function iterate(
|
|
||||||
structure: { [key: string]: any },
|
|
||||||
fn: (key: string, value: any) => void
|
|
||||||
) {
|
|
||||||
for (let [key, value] of Object.entries(structure)) {
|
|
||||||
fn(`${tableName}.${key}`, value)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (!filters) {
|
|
||||||
return query
|
// right now we only do filters on the specific table being queried
|
||||||
}
|
addFilters(
|
||||||
// if all or specified in filters, then everything is an or
|
tableName: string,
|
||||||
const allOr = filters.allOr
|
query: KnexQuery,
|
||||||
if (filters.oneOf) {
|
filters: SearchFilters | undefined
|
||||||
iterate(filters.oneOf, (key, array) => {
|
): KnexQuery {
|
||||||
const fnc = allOr ? "orWhereIn" : "whereIn"
|
function iterate(
|
||||||
query = query[fnc](key, array)
|
structure: { [key: string]: any },
|
||||||
})
|
fn: (key: string, value: any) => void
|
||||||
}
|
) {
|
||||||
if (filters.string) {
|
for (let [key, value] of Object.entries(structure)) {
|
||||||
iterate(filters.string, (key, value) => {
|
fn(`${tableName}.${key}`, value)
|
||||||
const fnc = allOr ? "orWhere" : "where"
|
|
||||||
query = query[fnc](key, "ilike", `${value}%`)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if (filters.fuzzy) {
|
|
||||||
iterate(filters.fuzzy, (key, value) => {
|
|
||||||
const fnc = allOr ? "orWhere" : "where"
|
|
||||||
query = query[fnc](key, "ilike", `%${value}%`)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if (filters.range) {
|
|
||||||
iterate(filters.range, (key, value) => {
|
|
||||||
if (!value.high || !value.low) {
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
const fnc = allOr ? "orWhereBetween" : "whereBetween"
|
}
|
||||||
query = query[fnc](key, [value.low, value.high])
|
if (!filters) {
|
||||||
})
|
return query
|
||||||
}
|
}
|
||||||
if (filters.equal) {
|
// if all or specified in filters, then everything is an or
|
||||||
iterate(filters.equal, (key, value) => {
|
const allOr = filters.allOr
|
||||||
const fnc = allOr ? "orWhere" : "where"
|
if (filters.oneOf) {
|
||||||
query = query[fnc]({ [key]: value })
|
iterate(filters.oneOf, (key, array) => {
|
||||||
})
|
const fnc = allOr ? "orWhereIn" : "whereIn"
|
||||||
}
|
query = query[fnc](key, array)
|
||||||
if (filters.notEqual) {
|
})
|
||||||
iterate(filters.notEqual, (key, value) => {
|
}
|
||||||
const fnc = allOr ? "orWhereNot" : "whereNot"
|
if (filters.string) {
|
||||||
query = query[fnc]({ [key]: value })
|
iterate(filters.string, (key, value) => {
|
||||||
})
|
const fnc = allOr ? "orWhere" : "where"
|
||||||
}
|
// postgres supports ilike, nothing else does
|
||||||
if (filters.empty) {
|
if (this.client === SqlClients.POSTGRES) {
|
||||||
iterate(filters.empty, key => {
|
query = query[fnc](key, "ilike", `${value}%`)
|
||||||
const fnc = allOr ? "orWhereNull" : "whereNull"
|
} else {
|
||||||
query = query[fnc](key)
|
const rawFnc = `${fnc}Raw`
|
||||||
})
|
// @ts-ignore
|
||||||
}
|
query = query[rawFnc](`LOWER(${key}) LIKE ?`, [`${value}%`])
|
||||||
if (filters.notEmpty) {
|
}
|
||||||
iterate(filters.notEmpty, key => {
|
})
|
||||||
const fnc = allOr ? "orWhereNotNull" : "whereNotNull"
|
}
|
||||||
query = query[fnc](key)
|
if (filters.fuzzy) {
|
||||||
})
|
iterate(filters.fuzzy, (key, value) => {
|
||||||
}
|
const fnc = allOr ? "orWhere" : "where"
|
||||||
return query
|
// postgres supports ilike, nothing else does
|
||||||
}
|
if (this.client === SqlClients.POSTGRES) {
|
||||||
|
query = query[fnc](key, "ilike", `%${value}%`)
|
||||||
function addRelationships(
|
} else {
|
||||||
knex: Knex,
|
const rawFnc = `${fnc}Raw`
|
||||||
query: KnexQuery,
|
// @ts-ignore
|
||||||
fields: string | string[],
|
query = query[rawFnc](`LOWER(${key}) LIKE ?`, [`%${value}%`])
|
||||||
fromTable: string,
|
}
|
||||||
relationships: RelationshipsJson[] | undefined
|
})
|
||||||
): KnexQuery {
|
}
|
||||||
if (!relationships) {
|
if (filters.range) {
|
||||||
|
iterate(filters.range, (key, value) => {
|
||||||
|
if (!value.high || !value.low) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const fnc = allOr ? "orWhereBetween" : "whereBetween"
|
||||||
|
query = query[fnc](key, [value.low, value.high])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (filters.equal) {
|
||||||
|
iterate(filters.equal, (key, value) => {
|
||||||
|
const fnc = allOr ? "orWhere" : "where"
|
||||||
|
query = query[fnc]({ [key]: value })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (filters.notEqual) {
|
||||||
|
iterate(filters.notEqual, (key, value) => {
|
||||||
|
const fnc = allOr ? "orWhereNot" : "whereNot"
|
||||||
|
query = query[fnc]({ [key]: value })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (filters.empty) {
|
||||||
|
iterate(filters.empty, key => {
|
||||||
|
const fnc = allOr ? "orWhereNull" : "whereNull"
|
||||||
|
query = query[fnc](key)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (filters.notEmpty) {
|
||||||
|
iterate(filters.notEmpty, key => {
|
||||||
|
const fnc = allOr ? "orWhereNotNull" : "whereNotNull"
|
||||||
|
query = query[fnc](key)
|
||||||
|
})
|
||||||
|
}
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
for (let relationship of relationships) {
|
|
||||||
const from = relationship.from,
|
addRelationships(
|
||||||
to = relationship.to,
|
knex: Knex,
|
||||||
toTable = relationship.tableName
|
query: KnexQuery,
|
||||||
if (!relationship.through) {
|
fields: string | string[],
|
||||||
// @ts-ignore
|
fromTable: string,
|
||||||
query = query.leftJoin(
|
relationships: RelationshipsJson[] | undefined
|
||||||
toTable,
|
): KnexQuery {
|
||||||
`${fromTable}.${from}`,
|
if (!relationships) {
|
||||||
`${toTable}.${to}`
|
return query
|
||||||
)
|
}
|
||||||
} else {
|
for (let relationship of relationships) {
|
||||||
const throughTable = relationship.through
|
const from = relationship.from,
|
||||||
const fromPrimary = relationship.fromPrimary
|
to = relationship.to,
|
||||||
const toPrimary = relationship.toPrimary
|
toTable = relationship.tableName
|
||||||
query = query
|
if (!relationship.through) {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
.leftJoin(
|
query = query.leftJoin(
|
||||||
throughTable,
|
toTable,
|
||||||
`${fromTable}.${fromPrimary}`,
|
`${fromTable}.${from}`,
|
||||||
`${throughTable}.${from}`
|
`${toTable}.${to}`
|
||||||
)
|
)
|
||||||
.leftJoin(toTable, `${toTable}.${toPrimary}`, `${throughTable}.${to}`)
|
} else {
|
||||||
|
const throughTable = relationship.through
|
||||||
|
const fromPrimary = relationship.fromPrimary
|
||||||
|
const toPrimary = relationship.toPrimary
|
||||||
|
query = query
|
||||||
|
// @ts-ignore
|
||||||
|
.leftJoin(
|
||||||
|
throughTable,
|
||||||
|
`${fromTable}.${fromPrimary}`,
|
||||||
|
`${throughTable}.${from}`
|
||||||
|
)
|
||||||
|
.leftJoin(toTable, `${toTable}.${toPrimary}`, `${throughTable}.${to}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return query.limit(BASE_LIMIT)
|
||||||
|
}
|
||||||
|
|
||||||
|
create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||||
|
const { endpoint, body } = json
|
||||||
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
|
const parsedBody = parseBody(body)
|
||||||
|
// make sure no null values in body for creation
|
||||||
|
for (let [key, value] of Object.entries(parsedBody)) {
|
||||||
|
if (value == null) {
|
||||||
|
delete parsedBody[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// mysql can't use returning
|
||||||
|
if (opts.disableReturning) {
|
||||||
|
return query.insert(parsedBody)
|
||||||
|
} else {
|
||||||
|
return query.insert(parsedBody).returning("*")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return query.limit(BASE_LIMIT)
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildCreate(
|
read(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
||||||
knex: Knex,
|
let { endpoint, resource, filters, sort, paginate, relationships } = json
|
||||||
json: QueryJson,
|
const tableName = endpoint.entityId
|
||||||
opts: QueryOptions
|
// select all if not specified
|
||||||
): KnexQuery {
|
if (!resource) {
|
||||||
const { endpoint, body } = json
|
resource = { fields: [] }
|
||||||
let query: KnexQuery = knex(endpoint.entityId)
|
|
||||||
const parsedBody = parseBody(body)
|
|
||||||
// make sure no null values in body for creation
|
|
||||||
for (let [key, value] of Object.entries(parsedBody)) {
|
|
||||||
if (value == null) {
|
|
||||||
delete parsedBody[key]
|
|
||||||
}
|
}
|
||||||
}
|
let selectStatement: string | string[] = "*"
|
||||||
// mysql can't use returning
|
// handle select
|
||||||
if (opts.disableReturning) {
|
if (resource.fields && resource.fields.length > 0) {
|
||||||
return query.insert(parsedBody)
|
// select the resources as the format "table.columnName" - this is what is provided
|
||||||
} else {
|
// by the resource builder further up
|
||||||
return query.insert(parsedBody).returning("*")
|
selectStatement = resource.fields.map(field => `${field} as ${field}`)
|
||||||
}
|
}
|
||||||
}
|
let foundLimit = limit || BASE_LIMIT
|
||||||
|
// handle pagination
|
||||||
function buildRead(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
let foundOffset: number | null = null
|
||||||
let { endpoint, resource, filters, sort, paginate, relationships } = json
|
if (paginate && paginate.page && paginate.limit) {
|
||||||
const tableName = endpoint.entityId
|
// @ts-ignore
|
||||||
// select all if not specified
|
const page = paginate.page <= 1 ? 0 : paginate.page - 1
|
||||||
if (!resource) {
|
const offset = page * paginate.limit
|
||||||
resource = { fields: [] }
|
foundLimit = paginate.limit
|
||||||
}
|
foundOffset = offset
|
||||||
let selectStatement: string | string[] = "*"
|
} else if (paginate && paginate.limit) {
|
||||||
// handle select
|
foundLimit = paginate.limit
|
||||||
if (resource.fields && resource.fields.length > 0) {
|
}
|
||||||
// select the resources as the format "table.columnName" - this is what is provided
|
// start building the query
|
||||||
// by the resource builder further up
|
let query: KnexQuery = knex(tableName).limit(foundLimit)
|
||||||
selectStatement = resource.fields.map(field => `${field} as ${field}`)
|
if (foundOffset) {
|
||||||
}
|
query = query.offset(foundOffset)
|
||||||
let foundLimit = limit || BASE_LIMIT
|
}
|
||||||
// handle pagination
|
if (sort) {
|
||||||
let foundOffset: number | null = null
|
for (let [key, value] of Object.entries(sort)) {
|
||||||
if (paginate && paginate.page && paginate.limit) {
|
const direction = value === SortDirection.ASCENDING ? "asc" : "desc"
|
||||||
|
query = query.orderBy(key, direction)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.client === SqlClients.MS_SQL && !sort && paginate?.limit) {
|
||||||
|
// @ts-ignore
|
||||||
|
query = query.orderBy(json.meta?.table?.primary[0])
|
||||||
|
}
|
||||||
|
query = this.addFilters(tableName, query, filters)
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
const page = paginate.page <= 1 ? 0 : paginate.page - 1
|
let preQuery: KnexQuery = knex({
|
||||||
const offset = page * paginate.limit
|
// @ts-ignore
|
||||||
foundLimit = paginate.limit
|
[tableName]: query,
|
||||||
foundOffset = offset
|
}).select(selectStatement)
|
||||||
} else if (paginate && paginate.limit) {
|
// handle joins
|
||||||
foundLimit = paginate.limit
|
return this.addRelationships(
|
||||||
|
knex,
|
||||||
|
preQuery,
|
||||||
|
selectStatement,
|
||||||
|
tableName,
|
||||||
|
relationships
|
||||||
|
)
|
||||||
}
|
}
|
||||||
// start building the query
|
|
||||||
let query: KnexQuery = knex(tableName).limit(foundLimit)
|
update(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||||
if (foundOffset) {
|
const { endpoint, body, filters } = json
|
||||||
query = query.offset(foundOffset)
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
}
|
const parsedBody = parseBody(body)
|
||||||
if (sort) {
|
query = this.addFilters(endpoint.entityId, query, filters)
|
||||||
for (let [key, value] of Object.entries(sort)) {
|
// mysql can't use returning
|
||||||
const direction = value === SortDirection.ASCENDING ? "asc" : "desc"
|
if (opts.disableReturning) {
|
||||||
query = query.orderBy(key, direction)
|
return query.update(parsedBody)
|
||||||
|
} else {
|
||||||
|
return query.update(parsedBody).returning("*")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
query = addFilters(tableName, query, filters)
|
|
||||||
// @ts-ignore
|
|
||||||
let preQuery: KnexQuery = knex({
|
|
||||||
// @ts-ignore
|
|
||||||
[tableName]: query,
|
|
||||||
}).select(selectStatement)
|
|
||||||
// handle joins
|
|
||||||
return addRelationships(
|
|
||||||
knex,
|
|
||||||
preQuery,
|
|
||||||
selectStatement,
|
|
||||||
tableName,
|
|
||||||
relationships
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildUpdate(
|
delete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||||
knex: Knex,
|
const { endpoint, filters } = json
|
||||||
json: QueryJson,
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
opts: QueryOptions
|
query = this.addFilters(endpoint.entityId, query, filters)
|
||||||
): KnexQuery {
|
// mysql can't use returning
|
||||||
const { endpoint, body, filters } = json
|
if (opts.disableReturning) {
|
||||||
let query: KnexQuery = knex(endpoint.entityId)
|
return query.delete()
|
||||||
const parsedBody = parseBody(body)
|
} else {
|
||||||
query = addFilters(endpoint.entityId, query, filters)
|
return query.delete().returning("*")
|
||||||
// mysql can't use returning
|
}
|
||||||
if (opts.disableReturning) {
|
|
||||||
return query.update(parsedBody)
|
|
||||||
} else {
|
|
||||||
return query.update(parsedBody).returning("*")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildDelete(
|
|
||||||
knex: Knex,
|
|
||||||
json: QueryJson,
|
|
||||||
opts: QueryOptions
|
|
||||||
): KnexQuery {
|
|
||||||
const { endpoint, filters } = json
|
|
||||||
let query: KnexQuery = knex(endpoint.entityId)
|
|
||||||
query = addFilters(endpoint.entityId, query, filters)
|
|
||||||
// mysql can't use returning
|
|
||||||
if (opts.disableReturning) {
|
|
||||||
return query.delete()
|
|
||||||
} else {
|
|
||||||
return query.delete().returning("*")
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -266,20 +280,23 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
||||||
const sqlClient = this.getSqlClient()
|
const sqlClient = this.getSqlClient()
|
||||||
const client = knex({ client: sqlClient })
|
const client = knex({ client: sqlClient })
|
||||||
let query
|
let query
|
||||||
|
const builder = new InternalBuilder(sqlClient)
|
||||||
switch (this._operation(json)) {
|
switch (this._operation(json)) {
|
||||||
case Operation.CREATE:
|
case Operation.CREATE:
|
||||||
query = buildCreate(client, json, opts)
|
query = builder.create(client, json, opts)
|
||||||
break
|
break
|
||||||
case Operation.READ:
|
case Operation.READ:
|
||||||
query = buildRead(client, json, this.limit)
|
query = builder.read(client, json, this.limit)
|
||||||
break
|
break
|
||||||
case Operation.UPDATE:
|
case Operation.UPDATE:
|
||||||
query = buildUpdate(client, json, opts)
|
query = builder.update(client, json, opts)
|
||||||
break
|
break
|
||||||
case Operation.DELETE:
|
case Operation.DELETE:
|
||||||
query = buildDelete(client, json, opts)
|
query = builder.delete(client, json, opts)
|
||||||
break
|
break
|
||||||
case Operation.CREATE_TABLE: case Operation.UPDATE_TABLE: case Operation.DELETE_TABLE:
|
case Operation.CREATE_TABLE:
|
||||||
|
case Operation.UPDATE_TABLE:
|
||||||
|
case Operation.DELETE_TABLE:
|
||||||
return this._tableQuery(json)
|
return this._tableQuery(json)
|
||||||
default:
|
default:
|
||||||
throw `Operation type is not supported by SQL query builder`
|
throw `Operation type is not supported by SQL query builder`
|
||||||
|
@ -288,6 +305,85 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
return query.toSQL().toNative()
|
return query.toSQL().toNative()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async getReturningRow(queryFn: Function, json: QueryJson) {
|
||||||
|
if (!json.extra || !json.extra.idFilter) {
|
||||||
|
return {}
|
||||||
|
}
|
||||||
|
const input = this._query({
|
||||||
|
endpoint: {
|
||||||
|
...json.endpoint,
|
||||||
|
operation: Operation.READ,
|
||||||
|
},
|
||||||
|
resource: {
|
||||||
|
fields: [],
|
||||||
|
},
|
||||||
|
filters: json.extra.idFilter,
|
||||||
|
paginate: {
|
||||||
|
limit: 1,
|
||||||
|
},
|
||||||
|
meta: json.meta,
|
||||||
|
})
|
||||||
|
return queryFn(input, Operation.READ)
|
||||||
|
}
|
||||||
|
|
||||||
|
// when creating if an ID has been inserted need to make sure
|
||||||
|
// the id filter is enriched with it before trying to retrieve the row
|
||||||
|
checkLookupKeys(id: any, json: QueryJson) {
|
||||||
|
if (!id || !json.meta?.table || !json.meta.table.primary) {
|
||||||
|
return json
|
||||||
|
}
|
||||||
|
const primaryKey = json.meta.table.primary?.[0]
|
||||||
|
json.extra = {
|
||||||
|
idFilter: {
|
||||||
|
equal: {
|
||||||
|
[primaryKey]: id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return json
|
||||||
|
}
|
||||||
|
|
||||||
|
// this function recreates the returning functionality of postgres
|
||||||
|
async queryWithReturning(
|
||||||
|
json: QueryJson,
|
||||||
|
queryFn: Function,
|
||||||
|
processFn: Function = (result: any) => result
|
||||||
|
) {
|
||||||
|
const sqlClient = this.getSqlClient()
|
||||||
|
const operation = this._operation(json)
|
||||||
|
const input = this._query(json, { disableReturning: true })
|
||||||
|
if (Array.isArray(input)) {
|
||||||
|
const responses = []
|
||||||
|
for (let query of input) {
|
||||||
|
responses.push(await queryFn(query, operation))
|
||||||
|
}
|
||||||
|
return responses
|
||||||
|
}
|
||||||
|
let row
|
||||||
|
// need to manage returning, a feature mySQL can't do
|
||||||
|
if (operation === Operation.DELETE) {
|
||||||
|
row = processFn(await this.getReturningRow(queryFn, json))
|
||||||
|
}
|
||||||
|
const response = await queryFn(input, operation)
|
||||||
|
const results = processFn(response)
|
||||||
|
// same as delete, manage returning
|
||||||
|
if (operation === Operation.CREATE || operation === Operation.UPDATE) {
|
||||||
|
let id
|
||||||
|
if (sqlClient === SqlClients.MS_SQL) {
|
||||||
|
id = results?.[0].id
|
||||||
|
} else if (sqlClient === SqlClients.MY_SQL) {
|
||||||
|
id = results?.insertId
|
||||||
|
}
|
||||||
|
row = processFn(
|
||||||
|
await this.getReturningRow(queryFn, this.checkLookupKeys(id, json))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (operation !== Operation.READ) {
|
||||||
|
return row
|
||||||
|
}
|
||||||
|
return results.length ? results : [{ [operation.toLowerCase()]: true }]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = SqlQueryBuilder
|
module.exports = SqlQueryBuilder
|
||||||
|
|
|
@ -6,7 +6,12 @@ import SchemaBuilder = Knex.SchemaBuilder
|
||||||
import CreateTableBuilder = Knex.CreateTableBuilder
|
import CreateTableBuilder = Knex.CreateTableBuilder
|
||||||
const { FieldTypes, RelationshipTypes } = require("../../constants")
|
const { FieldTypes, RelationshipTypes } = require("../../constants")
|
||||||
|
|
||||||
function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record<string, Table>, oldTable: null | Table = null) {
|
function generateSchema(
|
||||||
|
schema: CreateTableBuilder,
|
||||||
|
table: Table,
|
||||||
|
tables: Record<string, Table>,
|
||||||
|
oldTable: null | Table = null
|
||||||
|
) {
|
||||||
let primaryKey = table && table.primary ? table.primary[0] : null
|
let primaryKey = table && table.primary ? table.primary[0] : null
|
||||||
const columns = Object.values(table.schema)
|
const columns = Object.values(table.schema)
|
||||||
// all columns in a junction table will be meta
|
// all columns in a junction table will be meta
|
||||||
|
@ -19,17 +24,21 @@ function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record
|
||||||
schema.primary(metaCols.map(col => col.name))
|
schema.primary(metaCols.map(col => col.name))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// check if any columns need added
|
// check if any columns need added
|
||||||
const foreignKeys = Object.values(table.schema).map(col => col.foreignKey)
|
const foreignKeys = Object.values(table.schema).map(col => col.foreignKey)
|
||||||
for (let [key, column] of Object.entries(table.schema)) {
|
for (let [key, column] of Object.entries(table.schema)) {
|
||||||
// skip things that are already correct
|
// skip things that are already correct
|
||||||
const oldColumn = oldTable ? oldTable.schema[key] : null
|
const oldColumn = oldTable ? oldTable.schema[key] : null
|
||||||
if ((oldColumn && oldColumn.type === column.type) || (primaryKey === key && !isJunction)) {
|
if (
|
||||||
|
(oldColumn && oldColumn.type === column.type) ||
|
||||||
|
(primaryKey === key && !isJunction)
|
||||||
|
) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
switch (column.type) {
|
switch (column.type) {
|
||||||
case FieldTypes.STRING: case FieldTypes.OPTIONS: case FieldTypes.LONGFORM:
|
case FieldTypes.STRING:
|
||||||
|
case FieldTypes.OPTIONS:
|
||||||
|
case FieldTypes.LONGFORM:
|
||||||
schema.string(key)
|
schema.string(key)
|
||||||
break
|
break
|
||||||
case FieldTypes.NUMBER:
|
case FieldTypes.NUMBER:
|
||||||
|
@ -67,7 +76,9 @@ function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record
|
||||||
throw "Referenced table doesn't exist"
|
throw "Referenced table doesn't exist"
|
||||||
}
|
}
|
||||||
schema.integer(column.foreignKey).unsigned()
|
schema.integer(column.foreignKey).unsigned()
|
||||||
schema.foreign(column.foreignKey).references(`${tableName}.${relatedTable.primary[0]}`)
|
schema
|
||||||
|
.foreign(column.foreignKey)
|
||||||
|
.references(`${tableName}.${relatedTable.primary[0]}`)
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
@ -76,7 +87,10 @@ function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record
|
||||||
// need to check if any columns have been deleted
|
// need to check if any columns have been deleted
|
||||||
if (oldTable) {
|
if (oldTable) {
|
||||||
const deletedColumns = Object.entries(oldTable.schema)
|
const deletedColumns = Object.entries(oldTable.schema)
|
||||||
.filter(([key, schema]) => schema.type !== FieldTypes.LINK && table.schema[key] == null)
|
.filter(
|
||||||
|
([key, schema]) =>
|
||||||
|
schema.type !== FieldTypes.LINK && table.schema[key] == null
|
||||||
|
)
|
||||||
.map(([key]) => key)
|
.map(([key]) => key)
|
||||||
deletedColumns.forEach(key => {
|
deletedColumns.forEach(key => {
|
||||||
if (oldTable.constrained && oldTable.constrained.indexOf(key) !== -1) {
|
if (oldTable.constrained && oldTable.constrained.indexOf(key) !== -1) {
|
||||||
|
@ -92,7 +106,7 @@ function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record
|
||||||
function buildCreateTable(
|
function buildCreateTable(
|
||||||
knex: Knex,
|
knex: Knex,
|
||||||
table: Table,
|
table: Table,
|
||||||
tables: Record<string, Table>,
|
tables: Record<string, Table>
|
||||||
): SchemaBuilder {
|
): SchemaBuilder {
|
||||||
return knex.schema.createTable(table.name, schema => {
|
return knex.schema.createTable(table.name, schema => {
|
||||||
generateSchema(schema, table, tables)
|
generateSchema(schema, table, tables)
|
||||||
|
@ -103,17 +117,14 @@ function buildUpdateTable(
|
||||||
knex: Knex,
|
knex: Knex,
|
||||||
table: Table,
|
table: Table,
|
||||||
tables: Record<string, Table>,
|
tables: Record<string, Table>,
|
||||||
oldTable: Table,
|
oldTable: Table
|
||||||
): SchemaBuilder {
|
): SchemaBuilder {
|
||||||
return knex.schema.alterTable(table.name, schema => {
|
return knex.schema.alterTable(table.name, schema => {
|
||||||
generateSchema(schema, table, tables, oldTable)
|
generateSchema(schema, table, tables, oldTable)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildDeleteTable(
|
function buildDeleteTable(knex: Knex, table: Table): SchemaBuilder {
|
||||||
knex: Knex,
|
|
||||||
table: Table,
|
|
||||||
): SchemaBuilder {
|
|
||||||
return knex.schema.dropTable(table.name)
|
return knex.schema.dropTable(table.name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -151,7 +162,12 @@ class SqlTableQueryBuilder {
|
||||||
if (!json.meta || !json.meta.table) {
|
if (!json.meta || !json.meta.table) {
|
||||||
throw "Must specify old table for update"
|
throw "Must specify old table for update"
|
||||||
}
|
}
|
||||||
query = buildUpdateTable(client, json.table, json.meta.tables, json.meta.table)
|
query = buildUpdateTable(
|
||||||
|
client,
|
||||||
|
json.table,
|
||||||
|
json.meta.tables,
|
||||||
|
json.meta.table
|
||||||
|
)
|
||||||
break
|
break
|
||||||
case Operation.DELETE_TABLE:
|
case Operation.DELETE_TABLE:
|
||||||
query = buildDeleteTable(client, json.table)
|
query = buildDeleteTable(client, json.table)
|
||||||
|
@ -164,4 +180,4 @@ class SqlTableQueryBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
export default SqlTableQueryBuilder
|
export default SqlTableQueryBuilder
|
||||||
module.exports = SqlTableQueryBuilder
|
module.exports = SqlTableQueryBuilder
|
||||||
|
|
|
@ -4,7 +4,10 @@ import { Datasource } from "../../definitions/common"
|
||||||
module DatasourceUtils {
|
module DatasourceUtils {
|
||||||
const { integrations } = require("../index")
|
const { integrations } = require("../index")
|
||||||
|
|
||||||
export async function makeExternalQuery(datasource: Datasource, json: QueryJson) {
|
export async function makeExternalQuery(
|
||||||
|
datasource: Datasource,
|
||||||
|
json: QueryJson
|
||||||
|
) {
|
||||||
const Integration = integrations[datasource.source]
|
const Integration = integrations[datasource.source]
|
||||||
// query is the opinionated function
|
// query is the opinionated function
|
||||||
if (Integration.prototype.query) {
|
if (Integration.prototype.query) {
|
||||||
|
|
|
@ -1,11 +1,20 @@
|
||||||
import {
|
import {
|
||||||
Integration,
|
|
||||||
DatasourceFieldTypes,
|
DatasourceFieldTypes,
|
||||||
QueryTypes,
|
Integration,
|
||||||
|
Operation,
|
||||||
QueryJson,
|
QueryJson,
|
||||||
|
QueryTypes,
|
||||||
SqlQuery,
|
SqlQuery,
|
||||||
} from "../definitions/datasource"
|
} from "../definitions/datasource"
|
||||||
import { getSqlQuery } from "./utils"
|
import {
|
||||||
|
getSqlQuery,
|
||||||
|
buildExternalTableId,
|
||||||
|
convertSqlType,
|
||||||
|
finaliseExternalTables,
|
||||||
|
SqlClients,
|
||||||
|
} from "./utils"
|
||||||
|
import { DatasourcePlus } from "./base/datasourcePlus"
|
||||||
|
import { Table, TableSchema } from "../definitions/common"
|
||||||
|
|
||||||
module MSSQLModule {
|
module MSSQLModule {
|
||||||
const sqlServer = require("mssql")
|
const sqlServer = require("mssql")
|
||||||
|
@ -22,6 +31,7 @@ module MSSQLModule {
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
const SCHEMA: Integration = {
|
||||||
docs: "https://github.com/tediousjs/node-mssql",
|
docs: "https://github.com/tediousjs/node-mssql",
|
||||||
|
plus: true,
|
||||||
description:
|
description:
|
||||||
"Microsoft SQL Server is a relational database management system developed by Microsoft. ",
|
"Microsoft SQL Server is a relational database management system developed by Microsoft. ",
|
||||||
friendlyName: "MS SQL Server",
|
friendlyName: "MS SQL Server",
|
||||||
|
@ -69,26 +79,84 @@ module MSSQLModule {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async function internalQuery(client: any, query: SqlQuery) {
|
async function internalQuery(
|
||||||
|
client: any,
|
||||||
|
query: SqlQuery,
|
||||||
|
operation: string | undefined = undefined
|
||||||
|
) {
|
||||||
|
const request = client.request()
|
||||||
try {
|
try {
|
||||||
return await client.query(query.sql, query.bindings || {})
|
if (Array.isArray(query.bindings)) {
|
||||||
|
let count = 0
|
||||||
|
for (let binding of query.bindings) {
|
||||||
|
request.input(`p${count++}`, binding)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// this is a hack to get the inserted ID back,
|
||||||
|
// no way to do this with Knex nicely
|
||||||
|
const sql =
|
||||||
|
operation === Operation.CREATE
|
||||||
|
? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;`
|
||||||
|
: query.sql
|
||||||
|
return await request.query(sql)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
throw new Error(err)
|
throw new Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class SqlServerIntegration extends Sql {
|
class SqlServerIntegration extends Sql implements DatasourcePlus {
|
||||||
private readonly config: MSSQLConfig
|
private readonly config: MSSQLConfig
|
||||||
static pool: any
|
static pool: any
|
||||||
|
public tables: Record<string, Table> = {}
|
||||||
|
public schemaErrors: Record<string, string> = {}
|
||||||
|
|
||||||
|
MASTER_TABLES = [
|
||||||
|
"spt_fallback_db",
|
||||||
|
"spt_fallback_dev",
|
||||||
|
"spt_fallback_usg",
|
||||||
|
"spt_monitor",
|
||||||
|
"MSreplication_options",
|
||||||
|
]
|
||||||
|
TABLES_SQL =
|
||||||
|
"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE'"
|
||||||
|
|
||||||
|
getDefinitionSQL(tableName: string) {
|
||||||
|
return `select *
|
||||||
|
from INFORMATION_SCHEMA.COLUMNS
|
||||||
|
where TABLE_NAME='${tableName}'`
|
||||||
|
}
|
||||||
|
|
||||||
|
getConstraintsSQL(tableName: string) {
|
||||||
|
return `SELECT * FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
|
||||||
|
INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KU
|
||||||
|
ON TC.CONSTRAINT_TYPE = 'PRIMARY KEY'
|
||||||
|
AND TC.CONSTRAINT_NAME = KU.CONSTRAINT_NAME
|
||||||
|
AND KU.table_name='${tableName}'
|
||||||
|
ORDER BY
|
||||||
|
KU.TABLE_NAME,
|
||||||
|
KU.ORDINAL_POSITION;`
|
||||||
|
}
|
||||||
|
|
||||||
|
getAutoColumnsSQL(tableName: string) {
|
||||||
|
return `SELECT
|
||||||
|
COLUMNPROPERTY(OBJECT_ID(TABLE_SCHEMA+'.'+TABLE_NAME),COLUMN_NAME,'IsComputed')
|
||||||
|
AS IS_COMPUTED,
|
||||||
|
COLUMNPROPERTY(object_id(TABLE_SCHEMA+'.'+TABLE_NAME), COLUMN_NAME, 'IsIdentity')
|
||||||
|
AS IS_IDENTITY,
|
||||||
|
*
|
||||||
|
FROM INFORMATION_SCHEMA.COLUMNS
|
||||||
|
WHERE TABLE_NAME='${tableName}'`
|
||||||
|
}
|
||||||
|
|
||||||
constructor(config: MSSQLConfig) {
|
constructor(config: MSSQLConfig) {
|
||||||
super("mssql")
|
super(SqlClients.MS_SQL)
|
||||||
this.config = config
|
this.config = config
|
||||||
const clientCfg = {
|
const clientCfg = {
|
||||||
...this.config,
|
...this.config,
|
||||||
options: {
|
options: {
|
||||||
encrypt: this.config.encrypt,
|
encrypt: this.config.encrypt,
|
||||||
|
enableArithAbort: true,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
delete clientCfg.encrypt
|
delete clientCfg.encrypt
|
||||||
|
@ -99,14 +167,75 @@ module MSSQLModule {
|
||||||
|
|
||||||
async connect() {
|
async connect() {
|
||||||
try {
|
try {
|
||||||
const client = await this.pool.connect()
|
this.client = await this.pool.connect()
|
||||||
this.client = client.request()
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
throw new Error(err)
|
throw new Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async runSQL(sql: string) {
|
||||||
|
return (await internalQuery(this.client, getSqlQuery(sql))).recordset
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetches the tables from the sql server database and assigns them to the datasource.
|
||||||
|
* @param {*} datasourceId - datasourceId to fetch
|
||||||
|
* @param entities - the tables that are to be built
|
||||||
|
*/
|
||||||
|
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||||
|
await this.connect()
|
||||||
|
let tableNames = await this.runSQL(this.TABLES_SQL)
|
||||||
|
if (tableNames == null || !Array.isArray(tableNames)) {
|
||||||
|
throw "Unable to get list of tables in database"
|
||||||
|
}
|
||||||
|
tableNames = tableNames
|
||||||
|
.map((record: any) => record.TABLE_NAME)
|
||||||
|
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
|
||||||
|
|
||||||
|
const tables: Record<string, Table> = {}
|
||||||
|
for (let tableName of tableNames) {
|
||||||
|
// get the column definition (type)
|
||||||
|
const definition = await this.runSQL(this.getDefinitionSQL(tableName))
|
||||||
|
// find primary key constraints
|
||||||
|
const constraints = await this.runSQL(this.getConstraintsSQL(tableName))
|
||||||
|
// find the computed and identity columns (auto columns)
|
||||||
|
const columns = await this.runSQL(this.getAutoColumnsSQL(tableName))
|
||||||
|
const primaryKeys = constraints
|
||||||
|
.filter(
|
||||||
|
(constraint: any) => constraint.CONSTRAINT_TYPE === "PRIMARY KEY"
|
||||||
|
)
|
||||||
|
.map((constraint: any) => constraint.COLUMN_NAME)
|
||||||
|
const autoColumns = columns
|
||||||
|
.filter((col: any) => col.IS_COMPUTED || col.IS_IDENTITY)
|
||||||
|
.map((col: any) => col.COLUMN_NAME)
|
||||||
|
|
||||||
|
let schema: TableSchema = {}
|
||||||
|
for (let def of definition) {
|
||||||
|
const name = def.COLUMN_NAME
|
||||||
|
if (typeof name !== "string") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const type: string = convertSqlType(def.DATA_TYPE)
|
||||||
|
|
||||||
|
schema[name] = {
|
||||||
|
autocolumn: !!autoColumns.find((col: string) => col === name),
|
||||||
|
name: name,
|
||||||
|
type,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tables[tableName] = {
|
||||||
|
_id: buildExternalTableId(datasourceId, tableName),
|
||||||
|
primary: primaryKeys,
|
||||||
|
name: tableName,
|
||||||
|
schema,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const final = finaliseExternalTables(tables, entities)
|
||||||
|
this.tables = final.tables
|
||||||
|
this.schemaErrors = final.errors
|
||||||
|
}
|
||||||
|
|
||||||
async read(query: SqlQuery | string) {
|
async read(query: SqlQuery | string) {
|
||||||
await this.connect()
|
await this.connect()
|
||||||
const response = await internalQuery(this.client, getSqlQuery(query))
|
const response = await internalQuery(this.client, getSqlQuery(query))
|
||||||
|
@ -132,10 +261,13 @@ module MSSQLModule {
|
||||||
}
|
}
|
||||||
|
|
||||||
async query(json: QueryJson) {
|
async query(json: QueryJson) {
|
||||||
const operation = this._operation(json).toLowerCase()
|
await this.connect()
|
||||||
const input = this._query(json)
|
const operation = this._operation(json)
|
||||||
const response = await internalQuery(this.client, input)
|
const queryFn = (query: any, op: string) =>
|
||||||
return response.recordset ? response.recordset : [{ [operation]: true }]
|
internalQuery(this.client, query, op)
|
||||||
|
const processFn = (result: any) =>
|
||||||
|
result.recordset ? result.recordset : [{ [operation]: true }]
|
||||||
|
return this.queryWithReturning(json, queryFn, processFn)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,23 +2,22 @@ import {
|
||||||
Integration,
|
Integration,
|
||||||
DatasourceFieldTypes,
|
DatasourceFieldTypes,
|
||||||
QueryTypes,
|
QueryTypes,
|
||||||
Operation,
|
|
||||||
QueryJson,
|
QueryJson,
|
||||||
SqlQuery,
|
SqlQuery,
|
||||||
} from "../definitions/datasource"
|
} from "../definitions/datasource"
|
||||||
import { Table, TableSchema } from "../definitions/common"
|
import { Table, TableSchema } from "../definitions/common"
|
||||||
import { getSqlQuery } from "./utils"
|
import {
|
||||||
|
getSqlQuery,
|
||||||
|
SqlClients,
|
||||||
|
buildExternalTableId,
|
||||||
|
convertSqlType,
|
||||||
|
finaliseExternalTables,
|
||||||
|
} from "./utils"
|
||||||
import { DatasourcePlus } from "./base/datasourcePlus"
|
import { DatasourcePlus } from "./base/datasourcePlus"
|
||||||
|
|
||||||
module MySQLModule {
|
module MySQLModule {
|
||||||
const mysql = require("mysql2")
|
const mysql = require("mysql2")
|
||||||
const Sql = require("./base/sql")
|
const Sql = require("./base/sql")
|
||||||
const {
|
|
||||||
buildExternalTableId,
|
|
||||||
convertType,
|
|
||||||
finaliseExternalTables,
|
|
||||||
} = require("./utils")
|
|
||||||
const { FieldTypes } = require("../constants")
|
|
||||||
|
|
||||||
interface MySQLConfig {
|
interface MySQLConfig {
|
||||||
host: string
|
host: string
|
||||||
|
@ -29,30 +28,6 @@ module MySQLModule {
|
||||||
ssl?: object
|
ssl?: object
|
||||||
}
|
}
|
||||||
|
|
||||||
const TYPE_MAP = {
|
|
||||||
text: FieldTypes.LONGFORM,
|
|
||||||
blob: FieldTypes.LONGFORM,
|
|
||||||
enum: FieldTypes.STRING,
|
|
||||||
varchar: FieldTypes.STRING,
|
|
||||||
float: FieldTypes.NUMBER,
|
|
||||||
int: FieldTypes.NUMBER,
|
|
||||||
numeric: FieldTypes.NUMBER,
|
|
||||||
bigint: FieldTypes.NUMBER,
|
|
||||||
mediumint: FieldTypes.NUMBER,
|
|
||||||
decimal: FieldTypes.NUMBER,
|
|
||||||
dec: FieldTypes.NUMBER,
|
|
||||||
double: FieldTypes.NUMBER,
|
|
||||||
real: FieldTypes.NUMBER,
|
|
||||||
fixed: FieldTypes.NUMBER,
|
|
||||||
smallint: FieldTypes.NUMBER,
|
|
||||||
timestamp: FieldTypes.DATETIME,
|
|
||||||
date: FieldTypes.DATETIME,
|
|
||||||
datetime: FieldTypes.DATETIME,
|
|
||||||
time: FieldTypes.DATETIME,
|
|
||||||
tinyint: FieldTypes.BOOLEAN,
|
|
||||||
json: DatasourceFieldTypes.JSON,
|
|
||||||
}
|
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
const SCHEMA: Integration = {
|
||||||
docs: "https://github.com/mysqljs/mysql",
|
docs: "https://github.com/mysqljs/mysql",
|
||||||
plus: true,
|
plus: true,
|
||||||
|
@ -139,7 +114,7 @@ module MySQLModule {
|
||||||
public schemaErrors: Record<string, string> = {}
|
public schemaErrors: Record<string, string> = {}
|
||||||
|
|
||||||
constructor(config: MySQLConfig) {
|
constructor(config: MySQLConfig) {
|
||||||
super("mysql")
|
super(SqlClients.MY_SQL)
|
||||||
this.config = config
|
this.config = config
|
||||||
if (config.ssl && Object.keys(config.ssl).length === 0) {
|
if (config.ssl && Object.keys(config.ssl).length === 0) {
|
||||||
delete config.ssl
|
delete config.ssl
|
||||||
|
@ -184,7 +159,7 @@ module MySQLModule {
|
||||||
schema[columnName] = {
|
schema[columnName] = {
|
||||||
name: columnName,
|
name: columnName,
|
||||||
autocolumn: isAuto,
|
autocolumn: isAuto,
|
||||||
type: convertType(column.Type, TYPE_MAP),
|
type: convertSqlType(column.Type),
|
||||||
constraints,
|
constraints,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -223,67 +198,12 @@ module MySQLModule {
|
||||||
return results.length ? results : [{ deleted: true }]
|
return results.length ? results : [{ deleted: true }]
|
||||||
}
|
}
|
||||||
|
|
||||||
async getReturningRow(json: QueryJson) {
|
|
||||||
if (!json.extra || !json.extra.idFilter) {
|
|
||||||
return {}
|
|
||||||
}
|
|
||||||
const input = this._query({
|
|
||||||
endpoint: {
|
|
||||||
...json.endpoint,
|
|
||||||
operation: Operation.READ,
|
|
||||||
},
|
|
||||||
fields: [],
|
|
||||||
filters: json.extra.idFilter,
|
|
||||||
paginate: {
|
|
||||||
limit: 1,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
return internalQuery(this.client, input, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
// when creating if an ID has been inserted need to make sure
|
|
||||||
// the id filter is enriched with it before trying to retrieve the row
|
|
||||||
checkLookupKeys(results: any, json: QueryJson) {
|
|
||||||
if (!results?.insertId || !json.meta?.table || !json.meta.table.primary) {
|
|
||||||
return json
|
|
||||||
}
|
|
||||||
const primaryKey = json.meta.table.primary?.[0]
|
|
||||||
json.extra = {
|
|
||||||
idFilter: {
|
|
||||||
equal: {
|
|
||||||
[primaryKey]: results.insertId,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
return json
|
|
||||||
}
|
|
||||||
|
|
||||||
async query(json: QueryJson) {
|
async query(json: QueryJson) {
|
||||||
const operation = this._operation(json)
|
|
||||||
this.client.connect()
|
this.client.connect()
|
||||||
const input = this._query(json, { disableReturning: true })
|
const queryFn = (query: any) => internalQuery(this.client, query, false)
|
||||||
if (Array.isArray(input)) {
|
const output = await this.queryWithReturning(json, queryFn)
|
||||||
const responses = []
|
|
||||||
for (let query of input) {
|
|
||||||
responses.push(await internalQuery(this.client, query, false))
|
|
||||||
}
|
|
||||||
return responses
|
|
||||||
}
|
|
||||||
let row
|
|
||||||
// need to manage returning, a feature mySQL can't do
|
|
||||||
if (operation === operation.DELETE) {
|
|
||||||
row = this.getReturningRow(json)
|
|
||||||
}
|
|
||||||
const results = await internalQuery(this.client, input, false)
|
|
||||||
// same as delete, manage returning
|
|
||||||
if (operation === Operation.CREATE || operation === Operation.UPDATE) {
|
|
||||||
row = this.getReturningRow(this.checkLookupKeys(results, json))
|
|
||||||
}
|
|
||||||
this.client.end()
|
this.client.end()
|
||||||
if (operation !== Operation.READ) {
|
return output
|
||||||
return row
|
|
||||||
}
|
|
||||||
return results.length ? results : [{ [operation.toLowerCase()]: true }]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,18 +6,18 @@ import {
|
||||||
SqlQuery,
|
SqlQuery,
|
||||||
} from "../definitions/datasource"
|
} from "../definitions/datasource"
|
||||||
import { Table } from "../definitions/common"
|
import { Table } from "../definitions/common"
|
||||||
import { getSqlQuery } from "./utils"
|
import {
|
||||||
|
getSqlQuery,
|
||||||
|
buildExternalTableId,
|
||||||
|
convertSqlType,
|
||||||
|
finaliseExternalTables,
|
||||||
|
SqlClients,
|
||||||
|
} from "./utils"
|
||||||
import { DatasourcePlus } from "./base/datasourcePlus"
|
import { DatasourcePlus } from "./base/datasourcePlus"
|
||||||
|
|
||||||
module PostgresModule {
|
module PostgresModule {
|
||||||
const { Pool } = require("pg")
|
const { Pool } = require("pg")
|
||||||
const Sql = require("./base/sql")
|
const Sql = require("./base/sql")
|
||||||
const { FieldTypes } = require("../constants")
|
|
||||||
const {
|
|
||||||
buildExternalTableId,
|
|
||||||
convertType,
|
|
||||||
finaliseExternalTables,
|
|
||||||
} = require("./utils")
|
|
||||||
const { escapeDangerousCharacters } = require("../utilities")
|
const { escapeDangerousCharacters } = require("../utilities")
|
||||||
|
|
||||||
const JSON_REGEX = /'{.*}'::json/s
|
const JSON_REGEX = /'{.*}'::json/s
|
||||||
|
@ -97,22 +97,6 @@ module PostgresModule {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
const TYPE_MAP = {
|
|
||||||
text: FieldTypes.LONGFORM,
|
|
||||||
varchar: FieldTypes.STRING,
|
|
||||||
integer: FieldTypes.NUMBER,
|
|
||||||
bigint: FieldTypes.NUMBER,
|
|
||||||
decimal: FieldTypes.NUMBER,
|
|
||||||
smallint: FieldTypes.NUMBER,
|
|
||||||
real: FieldTypes.NUMBER,
|
|
||||||
"double precision": FieldTypes.NUMBER,
|
|
||||||
timestamp: FieldTypes.DATETIME,
|
|
||||||
time: FieldTypes.DATETIME,
|
|
||||||
boolean: FieldTypes.BOOLEAN,
|
|
||||||
json: FieldTypes.JSON,
|
|
||||||
date: FieldTypes.DATETIME,
|
|
||||||
}
|
|
||||||
|
|
||||||
async function internalQuery(client: any, query: SqlQuery) {
|
async function internalQuery(client: any, query: SqlQuery) {
|
||||||
// need to handle a specific issue with json data types in postgres,
|
// need to handle a specific issue with json data types in postgres,
|
||||||
// new lines inside the JSON data will break it
|
// new lines inside the JSON data will break it
|
||||||
|
@ -154,7 +138,7 @@ module PostgresModule {
|
||||||
`
|
`
|
||||||
|
|
||||||
constructor(config: PostgresConfig) {
|
constructor(config: PostgresConfig) {
|
||||||
super("pg")
|
super(SqlClients.POSTGRES)
|
||||||
this.config = config
|
this.config = config
|
||||||
|
|
||||||
let newConfig = {
|
let newConfig = {
|
||||||
|
@ -216,7 +200,7 @@ module PostgresModule {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const type: string = convertType(column.data_type, TYPE_MAP)
|
const type: string = convertSqlType(column.data_type)
|
||||||
const identity = !!(
|
const identity = !!(
|
||||||
column.identity_generation ||
|
column.identity_generation ||
|
||||||
column.identity_start ||
|
column.identity_start ||
|
||||||
|
|
|
@ -9,32 +9,39 @@ class TestConfiguration {
|
||||||
}
|
}
|
||||||
|
|
||||||
describe("MS SQL Server Integration", () => {
|
describe("MS SQL Server Integration", () => {
|
||||||
let config
|
let config
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(async () => {
|
||||||
config = new TestConfiguration()
|
config = new TestConfiguration()
|
||||||
})
|
})
|
||||||
|
|
||||||
it("calls the create method with the correct params", async () => {
|
describe("check sql used", () => {
|
||||||
const sql = "insert into users (name, age) values ('Joe', 123);"
|
beforeEach(async () => {
|
||||||
const response = await config.integration.create({
|
await config.integration.connect()
|
||||||
sql
|
|
||||||
})
|
})
|
||||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("calls the read method with the correct params", async () => {
|
it("calls the create method with the correct params", async () => {
|
||||||
const sql = "select * from users;"
|
const sql = "insert into users (name, age) values ('Joe', 123);"
|
||||||
const response = await config.integration.read({
|
const response = await config.integration.create({
|
||||||
sql
|
sql
|
||||||
|
})
|
||||||
|
expect(config.integration.client.request).toHaveBeenCalledWith()
|
||||||
|
expect(response[0]).toEqual(sql)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("calls the read method with the correct params", async () => {
|
||||||
|
const sql = "select * from users;"
|
||||||
|
const response = await config.integration.read({
|
||||||
|
sql
|
||||||
|
})
|
||||||
|
expect(config.integration.client.request).toHaveBeenCalledWith()
|
||||||
|
expect(response[0]).toEqual(sql)
|
||||||
})
|
})
|
||||||
expect(config.integration.client.query).toHaveBeenCalledWith(sql, {})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("no rows returned", () => {
|
describe("no rows returned", () => {
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
await config.integration.connect()
|
await config.integration.connect()
|
||||||
config.integration.client.query.mockImplementation(() => ({ rows: [] }))
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("returns the correct response when the create response has no rows", async () => {
|
it("returns the correct response when the create response has no rows", async () => {
|
||||||
|
@ -42,7 +49,7 @@ describe("MS SQL Server Integration", () => {
|
||||||
const response = await config.integration.create({
|
const response = await config.integration.create({
|
||||||
sql
|
sql
|
||||||
})
|
})
|
||||||
expect(response).toEqual([{ created: true }])
|
expect(response[0]).toEqual(sql)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
|
@ -7,6 +7,39 @@ const { FieldTypes, BuildSchemaErrors } = require("../constants")
|
||||||
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
||||||
const ROW_ID_REGEX = /^\[.*]$/g
|
const ROW_ID_REGEX = /^\[.*]$/g
|
||||||
|
|
||||||
|
const SQL_TYPE_MAP = {
|
||||||
|
text: FieldTypes.LONGFORM,
|
||||||
|
varchar: FieldTypes.STRING,
|
||||||
|
integer: FieldTypes.NUMBER,
|
||||||
|
bigint: FieldTypes.NUMBER,
|
||||||
|
decimal: FieldTypes.NUMBER,
|
||||||
|
smallint: FieldTypes.NUMBER,
|
||||||
|
real: FieldTypes.NUMBER,
|
||||||
|
"double precision": FieldTypes.NUMBER,
|
||||||
|
timestamp: FieldTypes.DATETIME,
|
||||||
|
time: FieldTypes.DATETIME,
|
||||||
|
boolean: FieldTypes.BOOLEAN,
|
||||||
|
json: FieldTypes.JSON,
|
||||||
|
date: FieldTypes.DATETIME,
|
||||||
|
blob: FieldTypes.LONGFORM,
|
||||||
|
enum: FieldTypes.STRING,
|
||||||
|
float: FieldTypes.NUMBER,
|
||||||
|
int: FieldTypes.NUMBER,
|
||||||
|
numeric: FieldTypes.NUMBER,
|
||||||
|
mediumint: FieldTypes.NUMBER,
|
||||||
|
dec: FieldTypes.NUMBER,
|
||||||
|
double: FieldTypes.NUMBER,
|
||||||
|
fixed: FieldTypes.NUMBER,
|
||||||
|
datetime: FieldTypes.DATETIME,
|
||||||
|
tinyint: FieldTypes.BOOLEAN,
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum SqlClients {
|
||||||
|
MS_SQL = "mssql",
|
||||||
|
POSTGRES = "pg",
|
||||||
|
MY_SQL = "mysql",
|
||||||
|
}
|
||||||
|
|
||||||
export function isExternalTable(tableId: string) {
|
export function isExternalTable(tableId: string) {
|
||||||
return tableId.includes(DocumentTypes.DATASOURCE)
|
return tableId.includes(DocumentTypes.DATASOURCE)
|
||||||
}
|
}
|
||||||
|
@ -68,8 +101,8 @@ export function breakRowIdField(_id: string | { _id: string }): any[] {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function convertType(type: string, map: { [key: string]: any }) {
|
export function convertSqlType(type: string) {
|
||||||
for (let [external, internal] of Object.entries(map)) {
|
for (let [external, internal] of Object.entries(SQL_TYPE_MAP)) {
|
||||||
if (type.toLowerCase().includes(external)) {
|
if (type.toLowerCase().includes(external)) {
|
||||||
return internal
|
return internal
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,8 +84,7 @@ exports.setInitInfo = ctx => {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getInitInfo = ctx => {
|
exports.getInitInfo = ctx => {
|
||||||
const initInfo = getCookie(ctx, Cookies.Init)
|
ctx.body = getCookie(ctx, Cookies.Init)
|
||||||
ctx.body = initInfo
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
Loading…
Reference in New Issue