Formatting and fixing an issue with mysql not being able to return a row that it has created, updated or deleted.
This commit is contained in:
parent
65f08b27b5
commit
40e06cc5d1
|
@ -2,23 +2,21 @@ import { store } from "./index"
|
||||||
import { get as svelteGet } from "svelte/store"
|
import { get as svelteGet } from "svelte/store"
|
||||||
import { removeCookie, Cookies } from "./cookies"
|
import { removeCookie, Cookies } from "./cookies"
|
||||||
|
|
||||||
const apiCall = method => async (
|
const apiCall =
|
||||||
url,
|
method =>
|
||||||
body,
|
async (url, body, headers = { "Content-Type": "application/json" }) => {
|
||||||
headers = { "Content-Type": "application/json" }
|
headers["x-budibase-app-id"] = svelteGet(store).appId
|
||||||
) => {
|
const json = headers["Content-Type"] === "application/json"
|
||||||
headers["x-budibase-app-id"] = svelteGet(store).appId
|
const resp = await fetch(url, {
|
||||||
const json = headers["Content-Type"] === "application/json"
|
method: method,
|
||||||
const resp = await fetch(url, {
|
body: json ? JSON.stringify(body) : body,
|
||||||
method: method,
|
headers,
|
||||||
body: json ? JSON.stringify(body) : body,
|
})
|
||||||
headers,
|
if (resp.status === 403) {
|
||||||
})
|
removeCookie(Cookies.Auth)
|
||||||
if (resp.status === 403) {
|
}
|
||||||
removeCookie(Cookies.Auth)
|
return resp
|
||||||
}
|
}
|
||||||
return resp
|
|
||||||
}
|
|
||||||
|
|
||||||
export const post = apiCall("POST")
|
export const post = apiCall("POST")
|
||||||
export const get = apiCall("GET")
|
export const get = apiCall("GET")
|
||||||
|
|
|
@ -100,9 +100,10 @@ const automationActions = store => ({
|
||||||
},
|
},
|
||||||
deleteAutomationBlock: block => {
|
deleteAutomationBlock: block => {
|
||||||
store.update(state => {
|
store.update(state => {
|
||||||
const idx = state.selectedAutomation.automation.definition.steps.findIndex(
|
const idx =
|
||||||
x => x.id === block.id
|
state.selectedAutomation.automation.definition.steps.findIndex(
|
||||||
)
|
x => x.id === block.id
|
||||||
|
)
|
||||||
state.selectedAutomation.deleteBlock(block.id)
|
state.selectedAutomation.deleteBlock(block.id)
|
||||||
|
|
||||||
// Select next closest step
|
// Select next closest step
|
||||||
|
|
|
@ -59,9 +59,7 @@
|
||||||
<section>
|
<section>
|
||||||
<Heading size="XS">Columns</Heading>
|
<Heading size="XS">Columns</Heading>
|
||||||
<ul>
|
<ul>
|
||||||
{#each context.filter(context =>
|
{#each context.filter( context => context.readableBinding.match(searchRgx) ) as { readableBinding }}
|
||||||
context.readableBinding.match(searchRgx)
|
|
||||||
) as { readableBinding }}
|
|
||||||
<li
|
<li
|
||||||
on:click={() => {
|
on:click={() => {
|
||||||
value = addToText(value, getCaretPosition(), readableBinding)
|
value = addToText(value, getCaretPosition(), readableBinding)
|
||||||
|
@ -77,9 +75,7 @@
|
||||||
<section>
|
<section>
|
||||||
<Heading size="XS">Components</Heading>
|
<Heading size="XS">Components</Heading>
|
||||||
<ul>
|
<ul>
|
||||||
{#each instance.filter(instance =>
|
{#each instance.filter( instance => instance.readableBinding.match(searchRgx) ) as { readableBinding }}
|
||||||
instance.readableBinding.match(searchRgx)
|
|
||||||
) as { readableBinding }}
|
|
||||||
<li on:click={() => addToText(readableBinding)}>
|
<li on:click={() => addToText(readableBinding)}>
|
||||||
{readableBinding}
|
{readableBinding}
|
||||||
</li>
|
</li>
|
||||||
|
|
|
@ -49,9 +49,7 @@
|
||||||
<div class="section">
|
<div class="section">
|
||||||
{#each categories as [categoryName, bindings]}
|
{#each categories as [categoryName, bindings]}
|
||||||
<Heading size="XS">{categoryName}</Heading>
|
<Heading size="XS">{categoryName}</Heading>
|
||||||
{#each bindings.filter(binding =>
|
{#each bindings.filter( binding => binding.label.match(searchRgx) ) as binding}
|
||||||
binding.label.match(searchRgx)
|
|
||||||
) as binding}
|
|
||||||
<div
|
<div
|
||||||
class="binding"
|
class="binding"
|
||||||
on:click={() => {
|
on:click={() => {
|
||||||
|
|
|
@ -103,8 +103,9 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
function fetchQueryDefinition(query) {
|
function fetchQueryDefinition(query) {
|
||||||
const source = $datasources.list.find(ds => ds._id === query.datasourceId)
|
const source = $datasources.list.find(
|
||||||
.source
|
ds => ds._id === query.datasourceId
|
||||||
|
).source
|
||||||
return $integrations[source].query[query.queryVerb]
|
return $integrations[source].query[query.queryVerb]
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
|
@ -18,8 +18,9 @@
|
||||||
)
|
)
|
||||||
|
|
||||||
function fetchQueryDefinition(query) {
|
function fetchQueryDefinition(query) {
|
||||||
const source = $datasources.list.find(ds => ds._id === query.datasourceId)
|
const source = $datasources.list.find(
|
||||||
.source
|
ds => ds._id === query.datasourceId
|
||||||
|
).source
|
||||||
return $integrations[source].query[query.queryVerb]
|
return $integrations[source].query[query.queryVerb]
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
|
@ -15,19 +15,21 @@
|
||||||
<section>
|
<section>
|
||||||
<Layout>
|
<Layout>
|
||||||
<header>
|
<header>
|
||||||
<svelte:component
|
<svelte:component this={ICONS.BUDIBASE} height="26" width="26" />
|
||||||
this={ICONS.BUDIBASE}
|
|
||||||
height="26"
|
|
||||||
width="26"
|
|
||||||
/>
|
|
||||||
<Heading size="M">Budibase Internal</Heading>
|
<Heading size="M">Budibase Internal</Heading>
|
||||||
</header>
|
</header>
|
||||||
<Body size="S" grey lh>Budibase internal tables are part of your app, the data will be stored in your apps context.</Body>
|
<Body size="S" grey lh
|
||||||
|
>Budibase internal tables are part of your app, the data will be stored in
|
||||||
|
your apps context.</Body
|
||||||
|
>
|
||||||
<Divider />
|
<Divider />
|
||||||
<Heading size="S">Tables</Heading>
|
<Heading size="S">Tables</Heading>
|
||||||
<div class="table-list">
|
<div class="table-list">
|
||||||
{#each $tables.list.filter(table => table.type !== "external") as table}
|
{#each $tables.list.filter(table => table.type !== "external") as table}
|
||||||
<div class="table-list-item" on:click={$goto(`../../table/${table._id}`)}>
|
<div
|
||||||
|
class="table-list-item"
|
||||||
|
on:click={$goto(`../../table/${table._id}`)}
|
||||||
|
>
|
||||||
<Body size="S">{table.name}</Body>
|
<Body size="S">{table.name}</Body>
|
||||||
{#if table.primaryDisplay}
|
{#if table.primaryDisplay}
|
||||||
<Body size="S">display column: {table.primaryDisplay}</Body>
|
<Body size="S">display column: {table.primaryDisplay}</Body>
|
||||||
|
|
|
@ -8,8 +8,7 @@
|
||||||
// and this is the final url (i.e. no selectedTable)
|
// and this is the final url (i.e. no selectedTable)
|
||||||
if (
|
if (
|
||||||
!$leftover &&
|
!$leftover &&
|
||||||
$tables.list.length > 0
|
$tables.list.length > 0(!$tables.selected || !$tables.selected._id)
|
||||||
(!$tables.selected || !$tables.selected._id)
|
|
||||||
) {
|
) {
|
||||||
$goto(`./${$tables.list[0]._id}`)
|
$goto(`./${$tables.list[0]._id}`)
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,8 +9,7 @@ export const SOME_QUERY = {
|
||||||
queryVerb: "read",
|
queryVerb: "read",
|
||||||
schema: {},
|
schema: {},
|
||||||
name: "Speakers",
|
name: "Speakers",
|
||||||
_id:
|
_id: "query_datasource_04b003a7b4a8428eadd3bb2f7eae0255_bcb8ffc6fcbc484e8d63121fc0bf986f",
|
||||||
"query_datasource_04b003a7b4a8428eadd3bb2f7eae0255_bcb8ffc6fcbc484e8d63121fc0bf986f",
|
|
||||||
_rev: "2-941f8699eb0adf995f8bd59c99203b26",
|
_rev: "2-941f8699eb0adf995f8bd59c99203b26",
|
||||||
readable: true,
|
readable: true,
|
||||||
}
|
}
|
||||||
|
@ -75,8 +74,7 @@ export const SAVE_QUERY_RESPONSE = {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
name: "Speakers",
|
name: "Speakers",
|
||||||
_id:
|
_id: "query_datasource_04b003a7b4a8428eadd3bb2f7eae0255_bcb8ffc6fcbc484e8d63121fc0bf986f",
|
||||||
"query_datasource_04b003a7b4a8428eadd3bb2f7eae0255_bcb8ffc6fcbc484e8d63121fc0bf986f",
|
|
||||||
_rev: "3-5a64adef494b1e9c793dc91b51ce73c6",
|
_rev: "3-5a64adef494b1e9c793dc91b51ce73c6",
|
||||||
readable: true,
|
readable: true,
|
||||||
}
|
}
|
||||||
|
|
|
@ -59,7 +59,7 @@ async function checkForCronTriggers({ appId, oldAuto, newAuto }) {
|
||||||
|
|
||||||
const cronTriggerActivated = isLive(newAuto) && !isLive(oldAuto)
|
const cronTriggerActivated = isLive(newAuto) && !isLive(oldAuto)
|
||||||
|
|
||||||
if (cronTriggerRemoved || cronTriggerDeactivated && oldTrigger.cronJobId) {
|
if (cronTriggerRemoved || (cronTriggerDeactivated && oldTrigger.cronJobId)) {
|
||||||
await triggers.automationQueue.removeRepeatableByKey(oldTrigger.cronJobId)
|
await triggers.automationQueue.removeRepeatableByKey(oldTrigger.cronJobId)
|
||||||
}
|
}
|
||||||
// need to create cron job
|
// need to create cron job
|
||||||
|
|
|
@ -6,6 +6,7 @@ const {
|
||||||
generateRowIdField,
|
generateRowIdField,
|
||||||
breakRowIdField,
|
breakRowIdField,
|
||||||
} = require("../../../integrations/utils")
|
} = require("../../../integrations/utils")
|
||||||
|
const { cloneDeep } = require("lodash/fp")
|
||||||
|
|
||||||
function inputProcessing(row, table) {
|
function inputProcessing(row, table) {
|
||||||
if (!row) {
|
if (!row) {
|
||||||
|
@ -42,6 +43,8 @@ function outputProcessing(rows, table) {
|
||||||
|
|
||||||
function buildFilters(id, filters, table) {
|
function buildFilters(id, filters, table) {
|
||||||
const primary = table.primary
|
const primary = table.primary
|
||||||
|
// if passed in array need to copy for shifting etc
|
||||||
|
let idCopy = cloneDeep(id)
|
||||||
if (filters) {
|
if (filters) {
|
||||||
// need to map over the filters and make sure the _id field isn't present
|
// need to map over the filters and make sure the _id field isn't present
|
||||||
for (let filter of Object.values(filters)) {
|
for (let filter of Object.values(filters)) {
|
||||||
|
@ -56,17 +59,17 @@ function buildFilters(id, filters, table) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// there is no id, just use the user provided filters
|
// there is no id, just use the user provided filters
|
||||||
if (!id || !table) {
|
if (!idCopy || !table) {
|
||||||
return filters
|
return filters
|
||||||
}
|
}
|
||||||
// if used as URL parameter it will have been joined
|
// if used as URL parameter it will have been joined
|
||||||
if (typeof id === "string") {
|
if (typeof idCopy === "string") {
|
||||||
id = breakRowIdField(id)
|
idCopy = breakRowIdField(idCopy)
|
||||||
}
|
}
|
||||||
const equal = {}
|
const equal = {}
|
||||||
for (let field of primary) {
|
for (let field of primary) {
|
||||||
// work through the ID and get the parts
|
// work through the ID and get the parts
|
||||||
equal[field] = id.shift()
|
equal[field] = idCopy.shift()
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
equal,
|
equal,
|
||||||
|
@ -86,6 +89,8 @@ async function handleRequest(
|
||||||
}
|
}
|
||||||
// clean up row on ingress using schema
|
// clean up row on ingress using schema
|
||||||
filters = buildFilters(id, filters, table)
|
filters = buildFilters(id, filters, table)
|
||||||
|
// get the id after building filters, but before it is removed from the row
|
||||||
|
id = id || (row ? row._id : null)
|
||||||
row = inputProcessing(row, table)
|
row = inputProcessing(row, table)
|
||||||
if (
|
if (
|
||||||
operation === DataSourceOperation.DELETE &&
|
operation === DataSourceOperation.DELETE &&
|
||||||
|
@ -107,6 +112,10 @@ async function handleRequest(
|
||||||
sort,
|
sort,
|
||||||
paginate,
|
paginate,
|
||||||
body: row,
|
body: row,
|
||||||
|
// pass an id filter into extra, purely for mysql/returning
|
||||||
|
extra: {
|
||||||
|
idFilter: buildFilters(id, {}, table),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// can't really use response right now
|
// can't really use response right now
|
||||||
const response = await makeExternalQuery(appId, json)
|
const response = await makeExternalQuery(appId, json)
|
||||||
|
@ -167,9 +176,14 @@ exports.destroy = async ctx => {
|
||||||
const appId = ctx.appId
|
const appId = ctx.appId
|
||||||
const tableId = ctx.params.tableId
|
const tableId = ctx.params.tableId
|
||||||
const id = ctx.request.body._id
|
const id = ctx.request.body._id
|
||||||
const { row } = await handleRequest(appId, DataSourceOperation.DELETE, tableId, {
|
const { row } = await handleRequest(
|
||||||
id,
|
appId,
|
||||||
})
|
DataSourceOperation.DELETE,
|
||||||
|
tableId,
|
||||||
|
{
|
||||||
|
id,
|
||||||
|
}
|
||||||
|
)
|
||||||
return { response: { ok: true }, row }
|
return { response: { ok: true }, row }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -185,8 +199,8 @@ exports.bulkDestroy = async ctx => {
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
await Promise.all(promises)
|
const responses = await Promise.all(promises)
|
||||||
return { response: { ok: true }, rows }
|
return { response: { ok: true }, rows: responses.map(resp => resp.row) }
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.search = async ctx => {
|
exports.search = async ctx => {
|
||||||
|
@ -227,14 +241,19 @@ exports.search = async ctx => {
|
||||||
})
|
})
|
||||||
let hasNextPage = false
|
let hasNextPage = false
|
||||||
if (paginate && rows.length === limit) {
|
if (paginate && rows.length === limit) {
|
||||||
const nextRows = await handleRequest(appId, DataSourceOperation.READ, tableId, {
|
const nextRows = await handleRequest(
|
||||||
filters: query,
|
appId,
|
||||||
sort,
|
DataSourceOperation.READ,
|
||||||
paginate: {
|
tableId,
|
||||||
limit: 1,
|
{
|
||||||
page: (bookmark * limit) + 1,
|
filters: query,
|
||||||
|
sort,
|
||||||
|
paginate: {
|
||||||
|
limit: 1,
|
||||||
|
page: bookmark * limit + 1,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
})
|
)
|
||||||
hasNextPage = nextRows.length > 0
|
hasNextPage = nextRows.length > 0
|
||||||
}
|
}
|
||||||
// need wrapper object for bookmarks etc when paginating
|
// need wrapper object for bookmarks etc when paginating
|
||||||
|
|
|
@ -55,17 +55,24 @@ function addFilters(query, filters) {
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
// function buildRelationships() {}
|
function buildCreate(knex, json, opts) {
|
||||||
|
|
||||||
function buildCreate(knex, json) {
|
|
||||||
const { endpoint, body } = json
|
const { endpoint, body } = json
|
||||||
let query = knex(endpoint.entityId)
|
let query = knex(endpoint.entityId)
|
||||||
return query.insert(body).returning("*")
|
// mysql can't use returning
|
||||||
|
if (opts.disableReturning) {
|
||||||
|
return query.insert(body)
|
||||||
|
} else {
|
||||||
|
return query.insert(body).returning("*")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildRead(knex, json, limit) {
|
function buildRead(knex, json, limit) {
|
||||||
const { endpoint, resource, filters, sort, paginate } = json
|
let { endpoint, resource, filters, sort, paginate } = json
|
||||||
let query = knex(endpoint.entityId)
|
let query = knex(endpoint.entityId)
|
||||||
|
// select all if not specified
|
||||||
|
if (!resource) {
|
||||||
|
resource = { fields: [] }
|
||||||
|
}
|
||||||
// handle select
|
// handle select
|
||||||
if (resource.fields && resource.fields.length > 0) {
|
if (resource.fields && resource.fields.length > 0) {
|
||||||
query = query.select(resource.fields)
|
query = query.select(resource.fields)
|
||||||
|
@ -94,18 +101,28 @@ function buildRead(knex, json, limit) {
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildUpdate(knex, json) {
|
function buildUpdate(knex, json, opts) {
|
||||||
const { endpoint, body, filters } = json
|
const { endpoint, body, filters } = json
|
||||||
let query = knex(endpoint.entityId)
|
let query = knex(endpoint.entityId)
|
||||||
query = addFilters(query, filters)
|
query = addFilters(query, filters)
|
||||||
return query.update(body).returning("*")
|
// mysql can't use returning
|
||||||
|
if (opts.disableReturning) {
|
||||||
|
return query.update(body)
|
||||||
|
} else {
|
||||||
|
return query.update(body).returning("*")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildDelete(knex, json) {
|
function buildDelete(knex, json, opts) {
|
||||||
const { endpoint, filters } = json
|
const { endpoint, filters } = json
|
||||||
let query = knex(endpoint.entityId)
|
let query = knex(endpoint.entityId)
|
||||||
query = addFilters(query, filters)
|
query = addFilters(query, filters)
|
||||||
return query.delete().returning("*")
|
// mysql can't use returning
|
||||||
|
if (opts.disableReturning) {
|
||||||
|
return query.delete()
|
||||||
|
} else {
|
||||||
|
return query.delete().returning("*")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class SqlQueryBuilder {
|
class SqlQueryBuilder {
|
||||||
|
@ -115,28 +132,38 @@ class SqlQueryBuilder {
|
||||||
this._limit = limit
|
this._limit = limit
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param json the input JSON structure from which an SQL query will be built.
|
||||||
|
* @return {string} the operation that was found in the JSON.
|
||||||
|
*/
|
||||||
_operation(json) {
|
_operation(json) {
|
||||||
if (!json || !json.endpoint) {
|
if (!json || !json.endpoint) {
|
||||||
return null
|
return ""
|
||||||
}
|
}
|
||||||
return json.endpoint.operation
|
return json.endpoint.operation
|
||||||
}
|
}
|
||||||
|
|
||||||
_query(json) {
|
/**
|
||||||
|
* @param json The JSON query DSL which is to be converted to SQL.
|
||||||
|
* @param opts extra options which are to be passed into the query builder, e.g. disableReturning
|
||||||
|
* which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes.
|
||||||
|
* @return {{ sql: string, bindings: object }} the query ready to be passed to the driver.
|
||||||
|
*/
|
||||||
|
_query(json, opts = {}) {
|
||||||
const knex = require("knex")({ client: this._client })
|
const knex = require("knex")({ client: this._client })
|
||||||
let query
|
let query
|
||||||
switch (this._operation(json)) {
|
switch (this._operation(json)) {
|
||||||
case DataSourceOperation.CREATE:
|
case DataSourceOperation.CREATE:
|
||||||
query = buildCreate(knex, json)
|
query = buildCreate(knex, json, opts)
|
||||||
break
|
break
|
||||||
case DataSourceOperation.READ:
|
case DataSourceOperation.READ:
|
||||||
query = buildRead(knex, json, this._limit)
|
query = buildRead(knex, json, this._limit, opts)
|
||||||
break
|
break
|
||||||
case DataSourceOperation.UPDATE:
|
case DataSourceOperation.UPDATE:
|
||||||
query = buildUpdate(knex, json)
|
query = buildUpdate(knex, json, opts)
|
||||||
break
|
break
|
||||||
case DataSourceOperation.DELETE:
|
case DataSourceOperation.DELETE:
|
||||||
query = buildDelete(knex, json)
|
query = buildDelete(knex, json, opts)
|
||||||
break
|
break
|
||||||
default:
|
default:
|
||||||
throw `Operation type is not supported by SQL query builder`
|
throw `Operation type is not supported by SQL query builder`
|
||||||
|
|
|
@ -2,8 +2,7 @@ const { Client } = require("@elastic/elasticsearch")
|
||||||
const { QUERY_TYPES, FIELD_TYPES } = require("./Integration")
|
const { QUERY_TYPES, FIELD_TYPES } = require("./Integration")
|
||||||
|
|
||||||
const SCHEMA = {
|
const SCHEMA = {
|
||||||
docs:
|
docs: "https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
|
||||||
"https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
|
|
||||||
description:
|
description:
|
||||||
"Elasticsearch is a search engine based on the Lucene library. It provides a distributed, multitenant-capable full-text search engine with an HTTP web interface and schema-free JSON documents.",
|
"Elasticsearch is a search engine based on the Lucene library. It provides a distributed, multitenant-capable full-text search engine with an HTTP web interface and schema-free JSON documents.",
|
||||||
friendlyName: "ElasticSearch",
|
friendlyName: "ElasticSearch",
|
||||||
|
|
|
@ -3,6 +3,7 @@ const { FIELD_TYPES, QUERY_TYPES } = require("./Integration")
|
||||||
const Sql = require("./base/sql")
|
const Sql = require("./base/sql")
|
||||||
const { buildExternalTableId, convertType } = require("./utils")
|
const { buildExternalTableId, convertType } = require("./utils")
|
||||||
const { FieldTypes } = require("../constants")
|
const { FieldTypes } = require("../constants")
|
||||||
|
const { Operation } = require("./base/constants")
|
||||||
|
|
||||||
const TYPE_MAP = {
|
const TYPE_MAP = {
|
||||||
text: FieldTypes.LONGFORM,
|
text: FieldTypes.LONGFORM,
|
||||||
|
@ -101,19 +102,6 @@ function internalQuery(client, query, connect = true) {
|
||||||
}
|
}
|
||||||
|
|
||||||
class MySQLIntegration extends Sql {
|
class MySQLIntegration extends Sql {
|
||||||
GET_TABLES_SQL =
|
|
||||||
"select * from information_schema.columns where table_schema = 'public'"
|
|
||||||
|
|
||||||
PRIMARY_KEYS_SQL = `
|
|
||||||
select tc.table_schema, tc.table_name, kc.column_name as primary_key
|
|
||||||
from information_schema.table_constraints tc
|
|
||||||
join
|
|
||||||
information_schema.key_column_usage kc on kc.table_name = tc.table_name
|
|
||||||
and kc.table_schema = tc.table_schema
|
|
||||||
and kc.constraint_name = tc.constraint_name
|
|
||||||
where tc.constraint_type = 'PRIMARY KEY';
|
|
||||||
`
|
|
||||||
|
|
||||||
constructor(config) {
|
constructor(config) {
|
||||||
super("mysql")
|
super("mysql")
|
||||||
this.config = config
|
this.config = config
|
||||||
|
@ -134,7 +122,11 @@ class MySQLIntegration extends Sql {
|
||||||
for (let tableName of tableNames) {
|
for (let tableName of tableNames) {
|
||||||
const primaryKeys = []
|
const primaryKeys = []
|
||||||
const schema = {}
|
const schema = {}
|
||||||
const descResp = await internalQuery(this.client, `DESCRIBE ${tableName};`, false)
|
const descResp = await internalQuery(
|
||||||
|
this.client,
|
||||||
|
`DESCRIBE ${tableName};`,
|
||||||
|
false
|
||||||
|
)
|
||||||
for (let column of descResp) {
|
for (let column of descResp) {
|
||||||
const columnName = column.Field
|
const columnName = column.Field
|
||||||
if (column.Key === "PRI") {
|
if (column.Key === "PRI") {
|
||||||
|
@ -187,11 +179,40 @@ class MySQLIntegration extends Sql {
|
||||||
return results.length ? results : [{ deleted: true }]
|
return results.length ? results : [{ deleted: true }]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async getReturningRow(json) {
|
||||||
|
const input = this._query({
|
||||||
|
endpoint: {
|
||||||
|
...json.endpoint,
|
||||||
|
operation: Operation.READ,
|
||||||
|
},
|
||||||
|
fields: [],
|
||||||
|
filters: json.extra.idFilter,
|
||||||
|
paginate: {
|
||||||
|
limit: 1,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return internalQuery(this.client, input, false)
|
||||||
|
}
|
||||||
|
|
||||||
async query(json) {
|
async query(json) {
|
||||||
const operation = this._operation(json).toLowerCase()
|
const operation = this._operation(json)
|
||||||
const input = this._query(json)
|
this.client.connect()
|
||||||
const results = await internalQuery(this.client, input)
|
const input = this._query(json, { disableReturning: true })
|
||||||
return results.length ? results : [{ [operation]: true }]
|
let row
|
||||||
|
// need to manage returning, a feature mySQL can't do
|
||||||
|
if (operation === Operation.DELETE) {
|
||||||
|
row = this.getReturningRow(json)
|
||||||
|
}
|
||||||
|
const results = await internalQuery(this.client, input, false)
|
||||||
|
// same as delete, manage returning
|
||||||
|
if (operation === Operation.CREATE || operation === Operation.UPDATE) {
|
||||||
|
row = this.getReturningRow(json)
|
||||||
|
}
|
||||||
|
this.client.end()
|
||||||
|
if (operation !== Operation.READ) {
|
||||||
|
return row
|
||||||
|
}
|
||||||
|
return results.length ? results : [{ [operation.toLowerCase()]: true }]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,50 +14,52 @@ const WEBHOOK_ENDPOINTS = new RegExp(
|
||||||
["webhooks/trigger", "webhooks/schema"].join("|")
|
["webhooks/trigger", "webhooks/schema"].join("|")
|
||||||
)
|
)
|
||||||
|
|
||||||
module.exports = (permType, permLevel = null) => async (ctx, next) => {
|
module.exports =
|
||||||
// webhooks don't need authentication, each webhook unique
|
(permType, permLevel = null) =>
|
||||||
if (WEBHOOK_ENDPOINTS.test(ctx.request.url)) {
|
async (ctx, next) => {
|
||||||
|
// webhooks don't need authentication, each webhook unique
|
||||||
|
if (WEBHOOK_ENDPOINTS.test(ctx.request.url)) {
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!ctx.user) {
|
||||||
|
return ctx.throw(403, "No user info found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// check general builder stuff, this middleware is a good way
|
||||||
|
// to find API endpoints which are builder focused
|
||||||
|
await builderMiddleware(ctx, permType)
|
||||||
|
|
||||||
|
const isAuthed = ctx.isAuthenticated
|
||||||
|
const { basePermissions, permissions } = await getUserPermissions(
|
||||||
|
ctx.appId,
|
||||||
|
ctx.roleId
|
||||||
|
)
|
||||||
|
|
||||||
|
// builders for now have permission to do anything
|
||||||
|
// TODO: in future should consider separating permissions with an require("@budibase/auth").isClient check
|
||||||
|
let isBuilder = ctx.user && ctx.user.builder && ctx.user.builder.global
|
||||||
|
const isBuilderApi = permType === PermissionTypes.BUILDER
|
||||||
|
if (isBuilder) {
|
||||||
|
return next()
|
||||||
|
} else if (isBuilderApi && !isBuilder) {
|
||||||
|
return ctx.throw(403, "Not Authorized")
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
hasResource(ctx) &&
|
||||||
|
doesHaveResourcePermission(permissions, permLevel, ctx)
|
||||||
|
) {
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isAuthed) {
|
||||||
|
ctx.throw(403, "Session not authenticated")
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!doesHaveBasePermission(permType, permLevel, basePermissions)) {
|
||||||
|
ctx.throw(403, "User does not have permission")
|
||||||
|
}
|
||||||
|
|
||||||
return next()
|
return next()
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!ctx.user) {
|
|
||||||
return ctx.throw(403, "No user info found")
|
|
||||||
}
|
|
||||||
|
|
||||||
// check general builder stuff, this middleware is a good way
|
|
||||||
// to find API endpoints which are builder focused
|
|
||||||
await builderMiddleware(ctx, permType)
|
|
||||||
|
|
||||||
const isAuthed = ctx.isAuthenticated
|
|
||||||
const { basePermissions, permissions } = await getUserPermissions(
|
|
||||||
ctx.appId,
|
|
||||||
ctx.roleId
|
|
||||||
)
|
|
||||||
|
|
||||||
// builders for now have permission to do anything
|
|
||||||
// TODO: in future should consider separating permissions with an require("@budibase/auth").isClient check
|
|
||||||
let isBuilder = ctx.user && ctx.user.builder && ctx.user.builder.global
|
|
||||||
const isBuilderApi = permType === PermissionTypes.BUILDER
|
|
||||||
if (isBuilder) {
|
|
||||||
return next()
|
|
||||||
} else if (isBuilderApi && !isBuilder) {
|
|
||||||
return ctx.throw(403, "Not Authorized")
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
hasResource(ctx) &&
|
|
||||||
doesHaveResourcePermission(permissions, permLevel, ctx)
|
|
||||||
) {
|
|
||||||
return next()
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isAuthed) {
|
|
||||||
ctx.throw(403, "Session not authenticated")
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!doesHaveBasePermission(permType, permLevel, basePermissions)) {
|
|
||||||
ctx.throw(403, "User does not have permission")
|
|
||||||
}
|
|
||||||
|
|
||||||
return next()
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,9 +1,5 @@
|
||||||
const {
|
const { getAppId, setCookie, getCookie, clearCookie } =
|
||||||
getAppId,
|
require("@budibase/auth").utils
|
||||||
setCookie,
|
|
||||||
getCookie,
|
|
||||||
clearCookie,
|
|
||||||
} = require("@budibase/auth").utils
|
|
||||||
const { Cookies } = require("@budibase/auth").constants
|
const { Cookies } = require("@budibase/auth").constants
|
||||||
const { getRole } = require("@budibase/auth/roles")
|
const { getRole } = require("@budibase/auth/roles")
|
||||||
const { getGlobalSelf } = require("../utilities/workerRequests")
|
const { getGlobalSelf } = require("../utilities/workerRequests")
|
||||||
|
|
|
@ -90,15 +90,17 @@ const numericalConstraint = (constraint, error) => value => {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
const inclusionConstraint = (options = []) => value => {
|
const inclusionConstraint =
|
||||||
if (value == null || value === "") {
|
(options = []) =>
|
||||||
|
value => {
|
||||||
|
if (value == null || value === "") {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
if (!options.includes(value)) {
|
||||||
|
return "Invalid value"
|
||||||
|
}
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
if (!options.includes(value)) {
|
|
||||||
return "Invalid value"
|
|
||||||
}
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const dateConstraint = (dateString, isEarliest) => {
|
const dateConstraint = (dateString, isEarliest) => {
|
||||||
const dateLimit = Date.parse(dateString)
|
const dateLimit = Date.parse(dateString)
|
||||||
|
|
|
@ -5,15 +5,8 @@ const authPkg = require("@budibase/auth")
|
||||||
const GLOBAL_DB = authPkg.StaticDatabases.GLOBAL.name
|
const GLOBAL_DB = authPkg.StaticDatabases.GLOBAL.name
|
||||||
|
|
||||||
exports.sendEmail = async ctx => {
|
exports.sendEmail = async ctx => {
|
||||||
const {
|
const { groupId, email, userId, purpose, contents, from, subject } =
|
||||||
groupId,
|
ctx.request.body
|
||||||
email,
|
|
||||||
userId,
|
|
||||||
purpose,
|
|
||||||
contents,
|
|
||||||
from,
|
|
||||||
subject,
|
|
||||||
} = ctx.request.body
|
|
||||||
let user
|
let user
|
||||||
if (userId) {
|
if (userId) {
|
||||||
const db = new CouchDB(GLOBAL_DB)
|
const db = new CouchDB(GLOBAL_DB)
|
||||||
|
|
|
@ -1,9 +1,6 @@
|
||||||
const CouchDB = require("../../../db")
|
const CouchDB = require("../../../db")
|
||||||
const {
|
const { getGroupParams, generateGroupID, StaticDatabases } =
|
||||||
getGroupParams,
|
require("@budibase/auth").db
|
||||||
generateGroupID,
|
|
||||||
StaticDatabases,
|
|
||||||
} = require("@budibase/auth").db
|
|
||||||
|
|
||||||
const GLOBAL_DB = StaticDatabases.GLOBAL.name
|
const GLOBAL_DB = StaticDatabases.GLOBAL.name
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,6 @@
|
||||||
const CouchDB = require("../../../db")
|
const CouchDB = require("../../../db")
|
||||||
const {
|
const { generateGlobalUserID, getGlobalUserParams, StaticDatabases } =
|
||||||
generateGlobalUserID,
|
require("@budibase/auth").db
|
||||||
getGlobalUserParams,
|
|
||||||
StaticDatabases,
|
|
||||||
} = require("@budibase/auth").db
|
|
||||||
const { hash, getGlobalUserByEmail } = require("@budibase/auth").utils
|
const { hash, getGlobalUserByEmail } = require("@budibase/auth").utils
|
||||||
const { UserStatus, EmailTemplatePurpose } = require("../../../constants")
|
const { UserStatus, EmailTemplatePurpose } = require("../../../constants")
|
||||||
const { checkInviteCode } = require("../../../utilities/redis")
|
const { checkInviteCode } = require("../../../utilities/redis")
|
||||||
|
|
Loading…
Reference in New Issue