Merge branch 'develop' of github.com:Budibase/budibase into copy-paste-improvements
This commit is contained in:
commit
fe95844569
|
@ -49,7 +49,7 @@ http {
|
||||||
add_header X-Frame-Options SAMEORIGIN always;
|
add_header X-Frame-Options SAMEORIGIN always;
|
||||||
add_header X-Content-Type-Options nosniff always;
|
add_header X-Content-Type-Options nosniff always;
|
||||||
add_header X-XSS-Protection "1; mode=block" always;
|
add_header X-XSS-Protection "1; mode=block" always;
|
||||||
add_header Content-Security-Policy "default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io; style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com; object-src 'none'; base-uri 'self'; connect-src 'self' https://api-iam.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io; font-src 'self' data https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com; frame-src 'self' https:; img-src http: https: data; manifest-src 'self'; media-src 'self'; worker-src 'none';" always;
|
add_header Content-Security-Policy "default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io; style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com; object-src 'none'; base-uri 'self'; connect-src 'self' https://api-iam.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io ; font-src 'self' data https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com; frame-src 'self' https:; img-src http: https: data; manifest-src 'self'; media-src 'self'; worker-src 'none';" always;
|
||||||
|
|
||||||
# upstreams
|
# upstreams
|
||||||
set $apps {{ apps }};
|
set $apps {{ apps }};
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "1.0.80-alpha.1",
|
"version": "1.0.81-alpha.0",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*"
|
"packages/*"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/backend-core",
|
"name": "@budibase/backend-core",
|
||||||
"version": "1.0.80-alpha.1",
|
"version": "1.0.81-alpha.0",
|
||||||
"description": "Budibase backend core libraries used in server and worker",
|
"description": "Budibase backend core libraries used in server and worker",
|
||||||
"main": "src/index.js",
|
"main": "src/index.js",
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/bbui",
|
"name": "@budibase/bbui",
|
||||||
"description": "A UI solution used in the different Budibase projects.",
|
"description": "A UI solution used in the different Budibase projects.",
|
||||||
"version": "1.0.80-alpha.1",
|
"version": "1.0.81-alpha.0",
|
||||||
"license": "MPL-2.0",
|
"license": "MPL-2.0",
|
||||||
"svelte": "src/index.js",
|
"svelte": "src/index.js",
|
||||||
"module": "dist/bbui.es.js",
|
"module": "dist/bbui.es.js",
|
||||||
|
@ -38,7 +38,7 @@
|
||||||
],
|
],
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@adobe/spectrum-css-workflow-icons": "^1.2.1",
|
"@adobe/spectrum-css-workflow-icons": "^1.2.1",
|
||||||
"@budibase/string-templates": "^1.0.80-alpha.1",
|
"@budibase/string-templates": "^1.0.81-alpha.0",
|
||||||
"@spectrum-css/actionbutton": "^1.0.1",
|
"@spectrum-css/actionbutton": "^1.0.1",
|
||||||
"@spectrum-css/actiongroup": "^1.0.1",
|
"@spectrum-css/actiongroup": "^1.0.1",
|
||||||
"@spectrum-css/avatar": "^3.0.2",
|
"@spectrum-css/avatar": "^3.0.2",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/builder",
|
"name": "@budibase/builder",
|
||||||
"version": "1.0.80-alpha.1",
|
"version": "1.0.81-alpha.0",
|
||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
@ -65,10 +65,10 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/bbui": "^1.0.80-alpha.1",
|
"@budibase/bbui": "^1.0.81-alpha.0",
|
||||||
"@budibase/client": "^1.0.80-alpha.1",
|
"@budibase/client": "^1.0.81-alpha.0",
|
||||||
"@budibase/frontend-core": "^1.0.80-alpha.1",
|
"@budibase/frontend-core": "^1.0.81-alpha.0",
|
||||||
"@budibase/string-templates": "^1.0.80-alpha.1",
|
"@budibase/string-templates": "^1.0.81-alpha.0",
|
||||||
"@sentry/browser": "5.19.1",
|
"@sentry/browser": "5.19.1",
|
||||||
"@spectrum-css/page": "^3.0.1",
|
"@spectrum-css/page": "^3.0.1",
|
||||||
"@spectrum-css/vars": "^3.0.1",
|
"@spectrum-css/vars": "^3.0.1",
|
||||||
|
|
|
@ -331,7 +331,9 @@ const getSelectedRowsBindings = asset => {
|
||||||
bindings = bindings.concat(
|
bindings = bindings.concat(
|
||||||
tables.map(table => ({
|
tables.map(table => ({
|
||||||
type: "context",
|
type: "context",
|
||||||
runtimeBinding: `${safeState}.${makePropSafe(table._id)}`,
|
runtimeBinding: `${safeState}.${makePropSafe(table._id)}.${makePropSafe(
|
||||||
|
"selectedRows"
|
||||||
|
)}`,
|
||||||
readableBinding: `${table._instanceName}.Selected rows`,
|
readableBinding: `${table._instanceName}.Selected rows`,
|
||||||
}))
|
}))
|
||||||
)
|
)
|
||||||
|
@ -343,7 +345,9 @@ const getSelectedRowsBindings = asset => {
|
||||||
bindings = bindings.concat(
|
bindings = bindings.concat(
|
||||||
tableBlocks.map(block => ({
|
tableBlocks.map(block => ({
|
||||||
type: "context",
|
type: "context",
|
||||||
runtimeBinding: `${safeState}.${makePropSafe(block._id + "-table")}`,
|
runtimeBinding: `${safeState}.${makePropSafe(
|
||||||
|
block._id + "-table"
|
||||||
|
)}.${makePropSafe("selectedRows")}`,
|
||||||
readableBinding: `${block._instanceName}.Selected rows`,
|
readableBinding: `${block._instanceName}.Selected rows`,
|
||||||
}))
|
}))
|
||||||
)
|
)
|
||||||
|
|
|
@ -20,7 +20,9 @@
|
||||||
$goto(`./datasource/${resp._id}`)
|
$goto(`./datasource/${resp._id}`)
|
||||||
notifications.success(`Datasource updated successfully.`)
|
notifications.success(`Datasource updated successfully.`)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
notifications.error("Error saving datasource")
|
notifications.error(err?.message ?? "Error saving datasource")
|
||||||
|
// prevent the modal from closing
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -134,8 +134,9 @@
|
||||||
|
|
||||||
// Remove all iframe event listeners on component destroy
|
// Remove all iframe event listeners on component destroy
|
||||||
onDestroy(() => {
|
onDestroy(() => {
|
||||||
|
window.removeEventListener("message", receiveMessage)
|
||||||
|
|
||||||
if (iframe.contentWindow) {
|
if (iframe.contentWindow) {
|
||||||
window.removeEventListener("message", receiveMessage)
|
|
||||||
if (!$store.clientFeatures.messagePassing) {
|
if (!$store.clientFeatures.messagePassing) {
|
||||||
// Legacy - remove in later versions of BB
|
// Legacy - remove in later versions of BB
|
||||||
iframe.contentWindow.removeEventListener(
|
iframe.contentWindow.removeEventListener(
|
||||||
|
|
|
@ -0,0 +1,71 @@
|
||||||
|
<script>
|
||||||
|
import { Label, Select, Body } from "@budibase/bbui"
|
||||||
|
import { tables } from "stores/backend"
|
||||||
|
import { onMount } from "svelte"
|
||||||
|
|
||||||
|
export let parameters
|
||||||
|
$: tableOptions = $tables.list || []
|
||||||
|
|
||||||
|
const FORMATS = [
|
||||||
|
{
|
||||||
|
label: "CSV",
|
||||||
|
value: "csv",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: "JSON",
|
||||||
|
value: "json",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
onMount(() => {
|
||||||
|
if (!parameters.type) {
|
||||||
|
parameters.type = "csv"
|
||||||
|
}
|
||||||
|
})
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="root">
|
||||||
|
<Body size="S">
|
||||||
|
Choose the table that you would like to export your row selection from.
|
||||||
|
<br />
|
||||||
|
Please ensure you have enabled row selection in the table settings
|
||||||
|
</Body>
|
||||||
|
|
||||||
|
<div class="params">
|
||||||
|
<Label small>Table</Label>
|
||||||
|
<Select
|
||||||
|
bind:value={parameters.tableId}
|
||||||
|
options={tableOptions}
|
||||||
|
getOptionLabel={option => option.name}
|
||||||
|
getOptionValue={option => option._id}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<Label small>Type</Label>
|
||||||
|
<Select bind:value={parameters.type} options={FORMATS} />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
.root {
|
||||||
|
width: 100%;
|
||||||
|
max-width: 800px;
|
||||||
|
margin: 0 auto;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
justify-content: flex-start;
|
||||||
|
align-items: stretch;
|
||||||
|
gap: var(--spacing-xl);
|
||||||
|
}
|
||||||
|
|
||||||
|
.root :global(p) {
|
||||||
|
line-height: 1.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.params {
|
||||||
|
display: grid;
|
||||||
|
column-gap: var(--spacing-l);
|
||||||
|
row-gap: var(--spacing-s);
|
||||||
|
grid-template-columns: 100px 1fr;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
</style>
|
|
@ -12,3 +12,4 @@ export { default as UpdateState } from "./UpdateState.svelte"
|
||||||
export { default as RefreshDataProvider } from "./RefreshDataProvider.svelte"
|
export { default as RefreshDataProvider } from "./RefreshDataProvider.svelte"
|
||||||
export { default as DuplicateRow } from "./DuplicateRow.svelte"
|
export { default as DuplicateRow } from "./DuplicateRow.svelte"
|
||||||
export { default as S3Upload } from "./S3Upload.svelte"
|
export { default as S3Upload } from "./S3Upload.svelte"
|
||||||
|
export { default as ExportData } from "./ExportData.svelte"
|
||||||
|
|
|
@ -80,6 +80,10 @@
|
||||||
"value": "publicUrl"
|
"value": "publicUrl"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Export Data",
|
||||||
|
"component": "ExportData"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/cli",
|
"name": "@budibase/cli",
|
||||||
"version": "1.0.80-alpha.1",
|
"version": "1.0.81-alpha.0",
|
||||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||||
"main": "src/index.js",
|
"main": "src/index.js",
|
||||||
"bin": {
|
"bin": {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/client",
|
"name": "@budibase/client",
|
||||||
"version": "1.0.80-alpha.1",
|
"version": "1.0.81-alpha.0",
|
||||||
"license": "MPL-2.0",
|
"license": "MPL-2.0",
|
||||||
"module": "dist/budibase-client.js",
|
"module": "dist/budibase-client.js",
|
||||||
"main": "dist/budibase-client.js",
|
"main": "dist/budibase-client.js",
|
||||||
|
@ -19,25 +19,12 @@
|
||||||
"dev:builder": "rollup -cw"
|
"dev:builder": "rollup -cw"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/bbui": "^1.0.80-alpha.1",
|
"@budibase/bbui": "^1.0.81-alpha.0",
|
||||||
"@budibase/frontend-core": "^1.0.80-alpha.1",
|
"@budibase/frontend-core": "^1.0.81-alpha.0",
|
||||||
"@budibase/string-templates": "^1.0.80-alpha.1",
|
"@budibase/string-templates": "^1.0.81-alpha.0",
|
||||||
"@spectrum-css/button": "^3.0.3",
|
|
||||||
"@spectrum-css/card": "^3.0.3",
|
|
||||||
"@spectrum-css/divider": "^1.0.3",
|
|
||||||
"@spectrum-css/link": "^3.1.3",
|
|
||||||
"@spectrum-css/page": "^3.0.1",
|
|
||||||
"@spectrum-css/tag": "^3.1.4",
|
|
||||||
"@spectrum-css/typography": "^3.0.2",
|
|
||||||
"@spectrum-css/vars": "^3.0.1",
|
|
||||||
"apexcharts": "^3.22.1",
|
|
||||||
"dayjs": "^1.10.5",
|
|
||||||
"regexparam": "^1.3.0",
|
"regexparam": "^1.3.0",
|
||||||
"rollup-plugin-polyfill-node": "^0.8.0",
|
"rollup-plugin-polyfill-node": "^0.8.0",
|
||||||
"shortid": "^2.2.15",
|
"shortid": "^2.2.15",
|
||||||
"svelte": "^3.38.2",
|
|
||||||
"svelte-apexcharts": "^1.0.2",
|
|
||||||
"svelte-flatpickr": "^3.1.0",
|
|
||||||
"svelte-spa-router": "^3.0.5"
|
"svelte-spa-router": "^3.0.5"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|
|
@ -81,7 +81,10 @@
|
||||||
loading = false
|
loading = false
|
||||||
return res
|
return res
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
notificationStore.actions.error(`Error uploading file: ${error}`)
|
notificationStore.actions.error(
|
||||||
|
`Error uploading file: ${error?.message || error}`
|
||||||
|
)
|
||||||
|
loading = false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -42,6 +42,7 @@
|
||||||
$: {
|
$: {
|
||||||
rowSelectionStore.actions.updateSelection(
|
rowSelectionStore.actions.updateSelection(
|
||||||
$component.id,
|
$component.id,
|
||||||
|
selectedRows.length ? selectedRows[0].tableId : "",
|
||||||
selectedRows.map(row => row._id)
|
selectedRows.map(row => row._id)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,20 +1,29 @@
|
||||||
import { writable } from "svelte/store"
|
import { get, writable } from "svelte/store"
|
||||||
|
|
||||||
const createRowSelectionStore = () => {
|
const createRowSelectionStore = () => {
|
||||||
const store = writable({})
|
const store = writable({})
|
||||||
|
|
||||||
function updateSelection(componentId, selectedRows) {
|
function updateSelection(componentId, tableId, selectedRows) {
|
||||||
store.update(state => {
|
store.update(state => {
|
||||||
state[componentId] = [...selectedRows]
|
state[componentId] = { tableId: tableId, selectedRows: selectedRows }
|
||||||
return state
|
return state
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getSelection(tableId) {
|
||||||
|
const selection = get(store)
|
||||||
|
const componentId = Object.keys(selection).find(
|
||||||
|
componentId => selection[componentId].tableId === tableId
|
||||||
|
)
|
||||||
|
return componentId ? selection[componentId] : {}
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
subscribe: store.subscribe,
|
subscribe: store.subscribe,
|
||||||
set: store.set,
|
set: store.set,
|
||||||
actions: {
|
actions: {
|
||||||
updateSelection,
|
updateSelection,
|
||||||
|
getSelection,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import { get } from "svelte/store"
|
import { get } from "svelte/store"
|
||||||
|
import download from "downloadjs"
|
||||||
import {
|
import {
|
||||||
routeStore,
|
routeStore,
|
||||||
builderStore,
|
builderStore,
|
||||||
|
@ -8,6 +9,7 @@ import {
|
||||||
notificationStore,
|
notificationStore,
|
||||||
dataSourceStore,
|
dataSourceStore,
|
||||||
uploadStore,
|
uploadStore,
|
||||||
|
rowSelectionStore,
|
||||||
} from "stores"
|
} from "stores"
|
||||||
import { API } from "api"
|
import { API } from "api"
|
||||||
import { ActionTypes } from "constants"
|
import { ActionTypes } from "constants"
|
||||||
|
@ -239,6 +241,26 @@ const s3UploadHandler = async action => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const exportDataHandler = async action => {
|
||||||
|
let selection = rowSelectionStore.actions.getSelection(
|
||||||
|
action.parameters.tableId
|
||||||
|
)
|
||||||
|
if (selection.selectedRows && selection.selectedRows.length > 0) {
|
||||||
|
try {
|
||||||
|
const data = await API.exportRows({
|
||||||
|
tableId: selection.tableId,
|
||||||
|
rows: selection.selectedRows,
|
||||||
|
})
|
||||||
|
|
||||||
|
download(JSON.stringify(data), `export.${action.parameters.type}`)
|
||||||
|
} catch (error) {
|
||||||
|
notificationStore.actions.error("There was an error exporting the data")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
notificationStore.actions.error("Please select at least one row")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const handlerMap = {
|
const handlerMap = {
|
||||||
["Save Row"]: saveRowHandler,
|
["Save Row"]: saveRowHandler,
|
||||||
["Duplicate Row"]: duplicateRowHandler,
|
["Duplicate Row"]: duplicateRowHandler,
|
||||||
|
@ -254,6 +276,7 @@ const handlerMap = {
|
||||||
["Change Form Step"]: changeFormStepHandler,
|
["Change Form Step"]: changeFormStepHandler,
|
||||||
["Update State"]: updateStateHandler,
|
["Update State"]: updateStateHandler,
|
||||||
["Upload File to S3"]: s3UploadHandler,
|
["Upload File to S3"]: s3UploadHandler,
|
||||||
|
["Export Data"]: exportDataHandler,
|
||||||
}
|
}
|
||||||
|
|
||||||
const confirmTextMap = {
|
const confirmTextMap = {
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -464,6 +464,11 @@ domutils@^2.6.0:
|
||||||
domelementtype "^2.2.0"
|
domelementtype "^2.2.0"
|
||||||
domhandler "^4.2.0"
|
domhandler "^4.2.0"
|
||||||
|
|
||||||
|
downloadjs@1.4.7:
|
||||||
|
version "1.4.7"
|
||||||
|
resolved "https://registry.yarnpkg.com/downloadjs/-/downloadjs-1.4.7.tgz#f69f96f940e0d0553dac291139865a3cd0101e3c"
|
||||||
|
integrity sha1-9p+W+UDg0FU9rCkROYZaPNAQHjw=
|
||||||
|
|
||||||
electron-to-chromium@^1.3.896:
|
electron-to-chromium@^1.3.896:
|
||||||
version "1.3.900"
|
version "1.3.900"
|
||||||
resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.900.tgz#5be2c5818a2a012c511b4b43e87b6ab7a296d4f5"
|
resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.900.tgz#5be2c5818a2a012c511b4b43e87b6ab7a296d4f5"
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/frontend-core",
|
"name": "@budibase/frontend-core",
|
||||||
"version": "1.0.80-alpha.1",
|
"version": "1.0.81-alpha.0",
|
||||||
"description": "Budibase frontend core libraries used in builder and client",
|
"description": "Budibase frontend core libraries used in builder and client",
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "MPL-2.0",
|
"license": "MPL-2.0",
|
||||||
"svelte": "src/index.js",
|
"svelte": "src/index.js",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/bbui": "^1.0.80-alpha.1",
|
"@budibase/bbui": "^1.0.81-alpha.0",
|
||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
"svelte": "^3.46.2"
|
"svelte": "^3.46.2"
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,61 +1,66 @@
|
||||||
export const buildAttachmentEndpoints = API => ({
|
export const buildAttachmentEndpoints = API => {
|
||||||
/**
|
|
||||||
* Uploads an attachment to the server.
|
|
||||||
* @param data the attachment to upload
|
|
||||||
* @param tableId the table ID to upload to
|
|
||||||
*/
|
|
||||||
uploadAttachment: async ({ data, tableId }) => {
|
|
||||||
return await API.post({
|
|
||||||
url: `/api/attachments/${tableId}/upload`,
|
|
||||||
body: data,
|
|
||||||
json: false,
|
|
||||||
})
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Uploads an attachment to the server as a builder user from the builder.
|
|
||||||
* @param data the data to upload
|
|
||||||
*/
|
|
||||||
uploadBuilderAttachment: async data => {
|
|
||||||
return await API.post({
|
|
||||||
url: "/api/attachments/process",
|
|
||||||
body: data,
|
|
||||||
json: false,
|
|
||||||
})
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a signed URL to upload a file to an external datasource.
|
* Generates a signed URL to upload a file to an external datasource.
|
||||||
* @param datasourceId the ID of the datasource to upload to
|
* @param datasourceId the ID of the datasource to upload to
|
||||||
* @param bucket the name of the bucket to upload to
|
* @param bucket the name of the bucket to upload to
|
||||||
* @param key the name of the file to upload to
|
* @param key the name of the file to upload to
|
||||||
*/
|
*/
|
||||||
getSignedDatasourceURL: async ({ datasourceId, bucket, key }) => {
|
const getSignedDatasourceURL = async ({ datasourceId, bucket, key }) => {
|
||||||
return await API.post({
|
return await API.post({
|
||||||
url: `/api/attachments/${datasourceId}/url`,
|
url: `/api/attachments/${datasourceId}/url`,
|
||||||
body: { bucket, key },
|
body: { bucket, key },
|
||||||
})
|
})
|
||||||
},
|
}
|
||||||
|
|
||||||
/**
|
return {
|
||||||
* Uploads a file to an external datasource.
|
getSignedDatasourceURL,
|
||||||
* @param datasourceId the ID of the datasource to upload to
|
|
||||||
* @param bucket the name of the bucket to upload to
|
/**
|
||||||
* @param key the name of the file to upload to
|
* Uploads an attachment to the server.
|
||||||
* @param data the file to upload
|
* @param data the attachment to upload
|
||||||
*/
|
* @param tableId the table ID to upload to
|
||||||
externalUpload: async ({ datasourceId, bucket, key, data }) => {
|
*/
|
||||||
const { signedUrl, publicUrl } = await API.getSignedDatasourceURL({
|
uploadAttachment: async ({ data, tableId }) => {
|
||||||
datasourceId,
|
return await API.post({
|
||||||
bucket,
|
url: `/api/attachments/${tableId}/upload`,
|
||||||
key,
|
body: data,
|
||||||
})
|
json: false,
|
||||||
await API.put({
|
})
|
||||||
url: signedUrl,
|
},
|
||||||
body: data,
|
|
||||||
json: false,
|
/**
|
||||||
external: true,
|
* Uploads an attachment to the server as a builder user from the builder.
|
||||||
})
|
* @param data the data to upload
|
||||||
return { publicUrl }
|
*/
|
||||||
},
|
uploadBuilderAttachment: async data => {
|
||||||
})
|
return await API.post({
|
||||||
|
url: "/api/attachments/process",
|
||||||
|
body: data,
|
||||||
|
json: false,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uploads a file to an external datasource.
|
||||||
|
* @param datasourceId the ID of the datasource to upload to
|
||||||
|
* @param bucket the name of the bucket to upload to
|
||||||
|
* @param key the name of the file to upload to
|
||||||
|
* @param data the file to upload
|
||||||
|
*/
|
||||||
|
externalUpload: async ({ datasourceId, bucket, key, data }) => {
|
||||||
|
console.log(API)
|
||||||
|
const { signedUrl, publicUrl } = await getSignedDatasourceURL({
|
||||||
|
datasourceId,
|
||||||
|
bucket,
|
||||||
|
key,
|
||||||
|
})
|
||||||
|
await API.put({
|
||||||
|
url: signedUrl,
|
||||||
|
body: data,
|
||||||
|
json: false,
|
||||||
|
external: true,
|
||||||
|
})
|
||||||
|
return { publicUrl }
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -60,4 +60,18 @@ export const buildRowEndpoints = API => ({
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Exports rows.
|
||||||
|
* @param tableId the table ID to export the rows from
|
||||||
|
* @param rows the array of rows to export
|
||||||
|
*/
|
||||||
|
exportRows: async ({ tableId, rows }) => {
|
||||||
|
return await API.post({
|
||||||
|
url: `/api/${tableId}/rows/exportRows`,
|
||||||
|
body: {
|
||||||
|
rows,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
},
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/server",
|
"name": "@budibase/server",
|
||||||
"email": "hi@budibase.com",
|
"email": "hi@budibase.com",
|
||||||
"version": "1.0.80-alpha.1",
|
"version": "1.0.81-alpha.0",
|
||||||
"description": "Budibase Web Server",
|
"description": "Budibase Web Server",
|
||||||
"main": "src/index.ts",
|
"main": "src/index.ts",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
@ -71,9 +71,9 @@
|
||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@apidevtools/swagger-parser": "^10.0.3",
|
"@apidevtools/swagger-parser": "^10.0.3",
|
||||||
"@budibase/backend-core": "^1.0.80-alpha.1",
|
"@budibase/backend-core": "^1.0.81-alpha.0",
|
||||||
"@budibase/client": "^1.0.80-alpha.1",
|
"@budibase/client": "^1.0.81-alpha.0",
|
||||||
"@budibase/string-templates": "^1.0.80-alpha.1",
|
"@budibase/string-templates": "^1.0.81-alpha.0",
|
||||||
"@bull-board/api": "^3.7.0",
|
"@bull-board/api": "^3.7.0",
|
||||||
"@bull-board/koa": "^3.7.0",
|
"@bull-board/koa": "^3.7.0",
|
||||||
"@elastic/elasticsearch": "7.10.0",
|
"@elastic/elasticsearch": "7.10.0",
|
||||||
|
|
|
@ -1,5 +1,10 @@
|
||||||
USE master;
|
USE master;
|
||||||
|
|
||||||
|
IF NOT EXISTS(SELECT 1 FROM sys.schemas WHERE name = 'Chains')
|
||||||
|
BEGIN
|
||||||
|
EXEC sys.sp_executesql N'CREATE SCHEMA Chains;'
|
||||||
|
END
|
||||||
|
|
||||||
IF OBJECT_ID ('dbo.products', 'U') IS NOT NULL
|
IF OBJECT_ID ('dbo.products', 'U') IS NOT NULL
|
||||||
DROP TABLE products;
|
DROP TABLE products;
|
||||||
GO
|
GO
|
||||||
|
@ -61,3 +66,15 @@ VALUES ('Bob', '30'),
|
||||||
('Bobert', '99'),
|
('Bobert', '99'),
|
||||||
('Jan', '22'),
|
('Jan', '22'),
|
||||||
('Megan', '11');
|
('Megan', '11');
|
||||||
|
|
||||||
|
|
||||||
|
IF OBJECT_ID ('Chains.sizes', 'U') IS NOT NULL
|
||||||
|
DROP TABLE Chains.sizes;
|
||||||
|
GO
|
||||||
|
CREATE TABLE Chains.sizes
|
||||||
|
(
|
||||||
|
sizeid int IDENTITY(1, 1),
|
||||||
|
name varchar(30),
|
||||||
|
CONSTRAINT pk_size PRIMARY KEY NONCLUSTERED (sizeid)
|
||||||
|
);
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
SELECT 'CREATE DATABASE main'
|
SELECT 'CREATE DATABASE main'
|
||||||
WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec
|
WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec
|
||||||
|
CREATE SCHEMA test;
|
||||||
CREATE TYPE person_job AS ENUM ('qa', 'programmer', 'designer');
|
CREATE TYPE person_job AS ENUM ('qa', 'programmer', 'designer');
|
||||||
CREATE TABLE Persons (
|
CREATE TABLE Persons (
|
||||||
PersonID SERIAL PRIMARY KEY,
|
PersonID SERIAL PRIMARY KEY,
|
||||||
|
@ -37,6 +38,10 @@ CREATE TABLE Products_Tasks (
|
||||||
REFERENCES Tasks(TaskID),
|
REFERENCES Tasks(TaskID),
|
||||||
PRIMARY KEY (ProductID, TaskID)
|
PRIMARY KEY (ProductID, TaskID)
|
||||||
);
|
);
|
||||||
|
CREATE TABLE test.table1 (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
Name varchar(255)
|
||||||
|
);
|
||||||
INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Mike', 'Hughes', '123 Fake Street', 'Belfast', 'qa');
|
INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Mike', 'Hughes', '123 Fake Street', 'Belfast', 'qa');
|
||||||
INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer');
|
INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer');
|
||||||
INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (1, 2, 'assembling', TRUE);
|
INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (1, 2, 'assembling', TRUE);
|
||||||
|
@ -48,3 +53,4 @@ INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 1);
|
||||||
INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (2, 1);
|
INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (2, 1);
|
||||||
INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (3, 1);
|
INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (3, 1);
|
||||||
INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 2);
|
INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 2);
|
||||||
|
INSERT INTO test.table1 (Name) VALUES ('Test');
|
||||||
|
|
|
@ -17,12 +17,23 @@ const options = {
|
||||||
},
|
},
|
||||||
servers: [
|
servers: [
|
||||||
{
|
{
|
||||||
url: "http://budibase.app/api/public/v1",
|
url: "https://budibase.app/api/public/v1",
|
||||||
description: "Budibase Cloud API",
|
description: "Budibase Cloud API",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
url: "{protocol}://{hostname}:10000/api/public/v1",
|
url: "{protocol}://{hostname}/api/public/v1",
|
||||||
description: "Budibase self hosted API",
|
description: "Budibase self hosted API",
|
||||||
|
variables: {
|
||||||
|
protocol: {
|
||||||
|
default: "http",
|
||||||
|
description:
|
||||||
|
"Whether HTTP or HTTPS should be used to communicate with your Budibase instance.",
|
||||||
|
},
|
||||||
|
hostname: {
|
||||||
|
default: "localhost:10000",
|
||||||
|
description: "The URL of your Budibase instance.",
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
components: {
|
components: {
|
||||||
|
|
|
@ -7,12 +7,22 @@
|
||||||
},
|
},
|
||||||
"servers": [
|
"servers": [
|
||||||
{
|
{
|
||||||
"url": "http://budibase.app/api/public/v1",
|
"url": "https://budibase.app/api/public/v1",
|
||||||
"description": "Budibase Cloud API"
|
"description": "Budibase Cloud API"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"url": "{protocol}://{hostname}:10000/api/public/v1",
|
"url": "{protocol}://{hostname}/api/public/v1",
|
||||||
"description": "Budibase self hosted API"
|
"description": "Budibase self hosted API",
|
||||||
|
"variables": {
|
||||||
|
"protocol": {
|
||||||
|
"default": "http",
|
||||||
|
"description": "Whether HTTP or HTTPS should be used to communicate with your Budibase instance."
|
||||||
|
},
|
||||||
|
"hostname": {
|
||||||
|
"default": "localhost:10000",
|
||||||
|
"description": "The URL of your Budibase instance."
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"components": {
|
"components": {
|
||||||
|
|
|
@ -4,10 +4,18 @@ info:
|
||||||
description: The public API for Budibase apps and its services.
|
description: The public API for Budibase apps and its services.
|
||||||
version: 1.0.0
|
version: 1.0.0
|
||||||
servers:
|
servers:
|
||||||
- url: http://budibase.app/api/public/v1
|
- url: https://budibase.app/api/public/v1
|
||||||
description: Budibase Cloud API
|
description: Budibase Cloud API
|
||||||
- url: "{protocol}://{hostname}:10000/api/public/v1"
|
- url: "{protocol}://{hostname}/api/public/v1"
|
||||||
description: Budibase self hosted API
|
description: Budibase self hosted API
|
||||||
|
variables:
|
||||||
|
protocol:
|
||||||
|
default: http
|
||||||
|
description: Whether HTTP or HTTPS should be used to communicate with your
|
||||||
|
Budibase instance.
|
||||||
|
hostname:
|
||||||
|
default: localhost:10000
|
||||||
|
description: The URL of your Budibase instance.
|
||||||
components:
|
components:
|
||||||
parameters:
|
parameters:
|
||||||
tableId:
|
tableId:
|
||||||
|
|
|
@ -152,6 +152,27 @@ exports.validate = async () => {
|
||||||
return { valid: true }
|
return { valid: true }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.exportRows = async ctx => {
|
||||||
|
const { datasourceId, tableName } = breakExternalTableId(ctx.params.tableId)
|
||||||
|
const db = getAppDB()
|
||||||
|
const datasource = await db.get(datasourceId)
|
||||||
|
if (!datasource || !datasource.entities) {
|
||||||
|
ctx.throw(400, "Datasource has not been configured for plus API.")
|
||||||
|
}
|
||||||
|
const tables = datasource.entities
|
||||||
|
const table = tables[tableName]
|
||||||
|
ctx.request.body = {
|
||||||
|
query: {
|
||||||
|
oneOf: {
|
||||||
|
[table.primaryDisplay]: ctx.request.body.map(
|
||||||
|
id => breakRowIdField(id)[0]
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return exports.search(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
exports.fetchEnrichedRow = async ctx => {
|
exports.fetchEnrichedRow = async ctx => {
|
||||||
const id = ctx.params.rowId
|
const id = ctx.params.rowId
|
||||||
const tableId = ctx.params.tableId
|
const tableId = ctx.params.tableId
|
||||||
|
|
|
@ -137,3 +137,12 @@ exports.fetchEnrichedRow = async function (ctx) {
|
||||||
ctx.throw(400, err)
|
ctx.throw(400, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.export = async function (ctx) {
|
||||||
|
const tableId = getTableId(ctx)
|
||||||
|
try {
|
||||||
|
ctx.body = await pickApi(tableId).exportRows(ctx)
|
||||||
|
} catch (err) {
|
||||||
|
ctx.throw(400, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -362,6 +362,22 @@ exports.validate = async ctx => {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.exportRows = async ctx => {
|
||||||
|
const db = getAppDB()
|
||||||
|
const table = await db.get(ctx.params.tableId)
|
||||||
|
const rowIds = ctx.request.body.rows
|
||||||
|
let response = (
|
||||||
|
await db.allDocs({
|
||||||
|
include_docs: true,
|
||||||
|
keys: rowIds,
|
||||||
|
})
|
||||||
|
).rows.map(row => row.doc)
|
||||||
|
|
||||||
|
let rows = await outputProcessing(table, response)
|
||||||
|
|
||||||
|
return rows
|
||||||
|
}
|
||||||
|
|
||||||
exports.fetchEnrichedRow = async ctx => {
|
exports.fetchEnrichedRow = async ctx => {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
const tableId = ctx.params.tableId
|
const tableId = ctx.params.tableId
|
||||||
|
|
|
@ -25,7 +25,8 @@ async function makeTableRequest(
|
||||||
operation,
|
operation,
|
||||||
table,
|
table,
|
||||||
tables,
|
tables,
|
||||||
oldTable = null
|
oldTable = null,
|
||||||
|
renamed = null
|
||||||
) {
|
) {
|
||||||
const json = {
|
const json = {
|
||||||
endpoint: {
|
endpoint: {
|
||||||
|
@ -41,6 +42,9 @@ async function makeTableRequest(
|
||||||
if (oldTable) {
|
if (oldTable) {
|
||||||
json.meta.table = oldTable
|
json.meta.table = oldTable
|
||||||
}
|
}
|
||||||
|
if (renamed) {
|
||||||
|
json.meta.renamed = renamed
|
||||||
|
}
|
||||||
return makeExternalQuery(datasource, json)
|
return makeExternalQuery(datasource, json)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -160,6 +164,7 @@ function isRelationshipSetup(column) {
|
||||||
|
|
||||||
exports.save = async function (ctx) {
|
exports.save = async function (ctx) {
|
||||||
const table = ctx.request.body
|
const table = ctx.request.body
|
||||||
|
const { _rename: renamed } = table
|
||||||
// can't do this right now
|
// can't do this right now
|
||||||
delete table.dataImport
|
delete table.dataImport
|
||||||
const datasourceId = getDatasourceId(ctx.request.body)
|
const datasourceId = getDatasourceId(ctx.request.body)
|
||||||
|
@ -241,7 +246,14 @@ exports.save = async function (ctx) {
|
||||||
const operation = oldTable
|
const operation = oldTable
|
||||||
? DataSourceOperation.UPDATE_TABLE
|
? DataSourceOperation.UPDATE_TABLE
|
||||||
: DataSourceOperation.CREATE_TABLE
|
: DataSourceOperation.CREATE_TABLE
|
||||||
await makeTableRequest(datasource, operation, tableToSave, tables, oldTable)
|
await makeTableRequest(
|
||||||
|
datasource,
|
||||||
|
operation,
|
||||||
|
tableToSave,
|
||||||
|
tables,
|
||||||
|
oldTable,
|
||||||
|
renamed
|
||||||
|
)
|
||||||
// update any extra tables (like foreign keys in other tables)
|
// update any extra tables (like foreign keys in other tables)
|
||||||
for (let extraTable of extraTablesToUpdate) {
|
for (let extraTable of extraTablesToUpdate) {
|
||||||
const oldExtraTable = oldTables[extraTable.name]
|
const oldExtraTable = oldTables[extraTable.name]
|
||||||
|
@ -258,6 +270,8 @@ exports.save = async function (ctx) {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// remove the rename prop
|
||||||
|
delete tableToSave._rename
|
||||||
// store it into couch now for budibase reference
|
// store it into couch now for budibase reference
|
||||||
datasource.entities[tableToSave.name] = tableToSave
|
datasource.entities[tableToSave.name] = tableToSave
|
||||||
await db.put(datasource)
|
await db.put(datasource)
|
||||||
|
|
|
@ -252,4 +252,25 @@ router
|
||||||
rowController.destroy
|
rowController.destroy
|
||||||
)
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @api {post} /api/:tableId/rows/exportRows Export Rows
|
||||||
|
* @apiName Export rows
|
||||||
|
* @apiGroup rows
|
||||||
|
* @apiPermission table write access
|
||||||
|
* @apiDescription This API can export a number of provided rows
|
||||||
|
*
|
||||||
|
* @apiParam {string} tableId The ID of the table the row is to be deleted from.
|
||||||
|
*
|
||||||
|
* @apiParam (Body) {object[]} [rows] The row IDs which are to be exported
|
||||||
|
*
|
||||||
|
* @apiSuccess {object[]|object}
|
||||||
|
*/
|
||||||
|
.post(
|
||||||
|
"/api/:tableId/rows/exportRows",
|
||||||
|
paramResource("tableId"),
|
||||||
|
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
|
||||||
|
usage,
|
||||||
|
rowController.export
|
||||||
|
)
|
||||||
|
|
||||||
module.exports = router
|
module.exports = router
|
||||||
|
|
|
@ -138,6 +138,11 @@ export interface PaginationJson {
|
||||||
page?: string | number
|
page?: string | number
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface RenameColumn {
|
||||||
|
old: string
|
||||||
|
updated: string
|
||||||
|
}
|
||||||
|
|
||||||
export interface RelationshipsJson {
|
export interface RelationshipsJson {
|
||||||
through?: string
|
through?: string
|
||||||
from?: string
|
from?: string
|
||||||
|
@ -153,6 +158,7 @@ export interface QueryJson {
|
||||||
datasourceId: string
|
datasourceId: string
|
||||||
entityId: string
|
entityId: string
|
||||||
operation: Operation
|
operation: Operation
|
||||||
|
schema?: string
|
||||||
}
|
}
|
||||||
resource: {
|
resource: {
|
||||||
fields: string[]
|
fields: string[]
|
||||||
|
@ -165,6 +171,7 @@ export interface QueryJson {
|
||||||
meta?: {
|
meta?: {
|
||||||
table?: Table
|
table?: Table
|
||||||
tables?: Record<string, Table>
|
tables?: Record<string, Table>
|
||||||
|
renamed: RenameColumn
|
||||||
}
|
}
|
||||||
extra?: {
|
extra?: {
|
||||||
idFilter?: SearchFilters
|
idFilter?: SearchFilters
|
||||||
|
|
|
@ -249,6 +249,9 @@ class InternalBuilder {
|
||||||
create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||||
const { endpoint, body } = json
|
const { endpoint, body } = json
|
||||||
let query: KnexQuery = knex(endpoint.entityId)
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
|
if (endpoint.schema) {
|
||||||
|
query = query.withSchema(endpoint.schema)
|
||||||
|
}
|
||||||
const parsedBody = parseBody(body)
|
const parsedBody = parseBody(body)
|
||||||
// make sure no null values in body for creation
|
// make sure no null values in body for creation
|
||||||
for (let [key, value] of Object.entries(parsedBody)) {
|
for (let [key, value] of Object.entries(parsedBody)) {
|
||||||
|
@ -267,6 +270,9 @@ class InternalBuilder {
|
||||||
bulkCreate(knex: Knex, json: QueryJson): KnexQuery {
|
bulkCreate(knex: Knex, json: QueryJson): KnexQuery {
|
||||||
const { endpoint, body } = json
|
const { endpoint, body } = json
|
||||||
let query: KnexQuery = knex(endpoint.entityId)
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
|
if (endpoint.schema) {
|
||||||
|
query = query.withSchema(endpoint.schema)
|
||||||
|
}
|
||||||
if (!Array.isArray(body)) {
|
if (!Array.isArray(body)) {
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
@ -275,7 +281,7 @@ class InternalBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
read(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
read(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
||||||
let { endpoint, resource, filters, sort, paginate, relationships } = json
|
let { endpoint, resource, filters, paginate, relationships } = json
|
||||||
const tableName = endpoint.entityId
|
const tableName = endpoint.entityId
|
||||||
// select all if not specified
|
// select all if not specified
|
||||||
if (!resource) {
|
if (!resource) {
|
||||||
|
@ -302,6 +308,9 @@ class InternalBuilder {
|
||||||
}
|
}
|
||||||
// start building the query
|
// start building the query
|
||||||
let query: KnexQuery = knex(tableName).limit(foundLimit)
|
let query: KnexQuery = knex(tableName).limit(foundLimit)
|
||||||
|
if (endpoint.schema) {
|
||||||
|
query = query.withSchema(endpoint.schema)
|
||||||
|
}
|
||||||
if (foundOffset) {
|
if (foundOffset) {
|
||||||
query = query.offset(foundOffset)
|
query = query.offset(foundOffset)
|
||||||
}
|
}
|
||||||
|
@ -331,6 +340,9 @@ class InternalBuilder {
|
||||||
update(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
update(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||||
const { endpoint, body, filters } = json
|
const { endpoint, body, filters } = json
|
||||||
let query: KnexQuery = knex(endpoint.entityId)
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
|
if (endpoint.schema) {
|
||||||
|
query = query.withSchema(endpoint.schema)
|
||||||
|
}
|
||||||
const parsedBody = parseBody(body)
|
const parsedBody = parseBody(body)
|
||||||
query = this.addFilters(query, filters, { tableName: endpoint.entityId })
|
query = this.addFilters(query, filters, { tableName: endpoint.entityId })
|
||||||
// mysql can't use returning
|
// mysql can't use returning
|
||||||
|
@ -344,6 +356,9 @@ class InternalBuilder {
|
||||||
delete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
delete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||||
const { endpoint, filters } = json
|
const { endpoint, filters } = json
|
||||||
let query: KnexQuery = knex(endpoint.entityId)
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
|
if (endpoint.schema) {
|
||||||
|
query = query.withSchema(endpoint.schema)
|
||||||
|
}
|
||||||
query = this.addFilters(query, filters, { tableName: endpoint.entityId })
|
query = this.addFilters(query, filters, { tableName: endpoint.entityId })
|
||||||
// mysql can't use returning
|
// mysql can't use returning
|
||||||
if (opts.disableReturning) {
|
if (opts.disableReturning) {
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
import { Knex, knex } from "knex"
|
import { Knex, knex } from "knex"
|
||||||
import { Table } from "../../definitions/common"
|
import { Table } from "../../definitions/common"
|
||||||
import { Operation, QueryJson } from "../../definitions/datasource"
|
import {
|
||||||
|
Operation,
|
||||||
|
QueryJson,
|
||||||
|
RenameColumn,
|
||||||
|
} from "../../definitions/datasource"
|
||||||
import { breakExternalTableId } from "../utils"
|
import { breakExternalTableId } from "../utils"
|
||||||
import SchemaBuilder = Knex.SchemaBuilder
|
import SchemaBuilder = Knex.SchemaBuilder
|
||||||
import CreateTableBuilder = Knex.CreateTableBuilder
|
import CreateTableBuilder = Knex.CreateTableBuilder
|
||||||
|
@ -10,7 +14,8 @@ function generateSchema(
|
||||||
schema: CreateTableBuilder,
|
schema: CreateTableBuilder,
|
||||||
table: Table,
|
table: Table,
|
||||||
tables: Record<string, Table>,
|
tables: Record<string, Table>,
|
||||||
oldTable: null | Table = null
|
oldTable: null | Table = null,
|
||||||
|
renamed?: RenameColumn
|
||||||
) {
|
) {
|
||||||
let primaryKey = table && table.primary ? table.primary[0] : null
|
let primaryKey = table && table.primary ? table.primary[0] : null
|
||||||
const columns = Object.values(table.schema)
|
const columns = Object.values(table.schema)
|
||||||
|
@ -29,7 +34,11 @@ function generateSchema(
|
||||||
for (let [key, column] of Object.entries(table.schema)) {
|
for (let [key, column] of Object.entries(table.schema)) {
|
||||||
// skip things that are already correct
|
// skip things that are already correct
|
||||||
const oldColumn = oldTable ? oldTable.schema[key] : null
|
const oldColumn = oldTable ? oldTable.schema[key] : null
|
||||||
if ((oldColumn && oldColumn.type) || (primaryKey === key && !isJunction)) {
|
if (
|
||||||
|
(oldColumn && oldColumn.type) ||
|
||||||
|
(primaryKey === key && !isJunction) ||
|
||||||
|
renamed?.updated === key
|
||||||
|
) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
switch (column.type) {
|
switch (column.type) {
|
||||||
|
@ -81,6 +90,10 @@ function generateSchema(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (renamed) {
|
||||||
|
schema.renameColumn(renamed.old, renamed.updated)
|
||||||
|
}
|
||||||
|
|
||||||
// need to check if any columns have been deleted
|
// need to check if any columns have been deleted
|
||||||
if (oldTable) {
|
if (oldTable) {
|
||||||
const deletedColumns = Object.entries(oldTable.schema)
|
const deletedColumns = Object.entries(oldTable.schema)
|
||||||
|
@ -90,6 +103,9 @@ function generateSchema(
|
||||||
)
|
)
|
||||||
.map(([key]) => key)
|
.map(([key]) => key)
|
||||||
deletedColumns.forEach(key => {
|
deletedColumns.forEach(key => {
|
||||||
|
if (renamed?.old === key) {
|
||||||
|
return
|
||||||
|
}
|
||||||
if (oldTable.constrained && oldTable.constrained.indexOf(key) !== -1) {
|
if (oldTable.constrained && oldTable.constrained.indexOf(key) !== -1) {
|
||||||
schema.dropForeign(key)
|
schema.dropForeign(key)
|
||||||
}
|
}
|
||||||
|
@ -101,28 +117,29 @@ function generateSchema(
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildCreateTable(
|
function buildCreateTable(
|
||||||
knex: Knex,
|
knex: SchemaBuilder,
|
||||||
table: Table,
|
table: Table,
|
||||||
tables: Record<string, Table>
|
tables: Record<string, Table>
|
||||||
): SchemaBuilder {
|
): SchemaBuilder {
|
||||||
return knex.schema.createTable(table.name, schema => {
|
return knex.createTable(table.name, schema => {
|
||||||
generateSchema(schema, table, tables)
|
generateSchema(schema, table, tables)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildUpdateTable(
|
function buildUpdateTable(
|
||||||
knex: Knex,
|
knex: SchemaBuilder,
|
||||||
table: Table,
|
table: Table,
|
||||||
tables: Record<string, Table>,
|
tables: Record<string, Table>,
|
||||||
oldTable: Table
|
oldTable: Table,
|
||||||
|
renamed: RenameColumn
|
||||||
): SchemaBuilder {
|
): SchemaBuilder {
|
||||||
return knex.schema.alterTable(table.name, schema => {
|
return knex.alterTable(table.name, schema => {
|
||||||
generateSchema(schema, table, tables, oldTable)
|
generateSchema(schema, table, tables, oldTable, renamed)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildDeleteTable(knex: Knex, table: Table): SchemaBuilder {
|
function buildDeleteTable(knex: SchemaBuilder, table: Table): SchemaBuilder {
|
||||||
return knex.schema.dropTable(table.name)
|
return knex.dropTable(table.name)
|
||||||
}
|
}
|
||||||
|
|
||||||
class SqlTableQueryBuilder {
|
class SqlTableQueryBuilder {
|
||||||
|
@ -146,7 +163,11 @@ class SqlTableQueryBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
_tableQuery(json: QueryJson): any {
|
_tableQuery(json: QueryJson): any {
|
||||||
const client = knex({ client: this.sqlClient })
|
let client = knex({ client: this.sqlClient }).schema
|
||||||
|
if (json?.endpoint?.schema) {
|
||||||
|
client = client.withSchema(json.endpoint.schema)
|
||||||
|
}
|
||||||
|
|
||||||
let query
|
let query
|
||||||
if (!json.table || !json.meta || !json.meta.tables) {
|
if (!json.table || !json.meta || !json.meta.tables) {
|
||||||
throw "Cannot execute without table being specified"
|
throw "Cannot execute without table being specified"
|
||||||
|
@ -163,7 +184,8 @@ class SqlTableQueryBuilder {
|
||||||
client,
|
client,
|
||||||
json.table,
|
json.table,
|
||||||
json.meta.tables,
|
json.meta.tables,
|
||||||
json.meta.table
|
json.meta.table,
|
||||||
|
json.meta.renamed
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
case Operation.DELETE_TABLE:
|
case Operation.DELETE_TABLE:
|
||||||
|
|
|
@ -19,6 +19,7 @@ import { Table, TableSchema } from "../definitions/common"
|
||||||
module MSSQLModule {
|
module MSSQLModule {
|
||||||
const sqlServer = require("mssql")
|
const sqlServer = require("mssql")
|
||||||
const Sql = require("./base/sql")
|
const Sql = require("./base/sql")
|
||||||
|
const DEFAULT_SCHEMA = "dbo"
|
||||||
|
|
||||||
interface MSSQLConfig {
|
interface MSSQLConfig {
|
||||||
user: string
|
user: string
|
||||||
|
@ -26,9 +27,17 @@ module MSSQLModule {
|
||||||
server: string
|
server: string
|
||||||
port: number
|
port: number
|
||||||
database: string
|
database: string
|
||||||
|
schema: string
|
||||||
encrypt?: boolean
|
encrypt?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface TablesResponse {
|
||||||
|
TABLE_CATALOG: string
|
||||||
|
TABLE_SCHEMA: string
|
||||||
|
TABLE_NAME: string
|
||||||
|
TABLE_TYPE: string
|
||||||
|
}
|
||||||
|
|
||||||
const SCHEMA: Integration = {
|
const SCHEMA: Integration = {
|
||||||
docs: "https://github.com/tediousjs/node-mssql",
|
docs: "https://github.com/tediousjs/node-mssql",
|
||||||
plus: true,
|
plus: true,
|
||||||
|
@ -58,6 +67,10 @@ module MSSQLModule {
|
||||||
type: DatasourceFieldTypes.STRING,
|
type: DatasourceFieldTypes.STRING,
|
||||||
default: "root",
|
default: "root",
|
||||||
},
|
},
|
||||||
|
schema: {
|
||||||
|
type: DatasourceFieldTypes.STRING,
|
||||||
|
default: DEFAULT_SCHEMA,
|
||||||
|
},
|
||||||
encrypt: {
|
encrypt: {
|
||||||
type: DatasourceFieldTypes.BOOLEAN,
|
type: DatasourceFieldTypes.BOOLEAN,
|
||||||
default: true,
|
default: true,
|
||||||
|
@ -96,6 +109,35 @@ module MSSQLModule {
|
||||||
TABLES_SQL =
|
TABLES_SQL =
|
||||||
"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE'"
|
"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE'"
|
||||||
|
|
||||||
|
constructor(config: MSSQLConfig) {
|
||||||
|
super(SqlClients.MS_SQL)
|
||||||
|
this.config = config
|
||||||
|
const clientCfg = {
|
||||||
|
...this.config,
|
||||||
|
options: {
|
||||||
|
encrypt: this.config.encrypt,
|
||||||
|
enableArithAbort: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
delete clientCfg.encrypt
|
||||||
|
if (!this.pool) {
|
||||||
|
this.pool = new sqlServer.ConnectionPool(clientCfg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getBindingIdentifier(): string {
|
||||||
|
return `(@p${this.index++})`
|
||||||
|
}
|
||||||
|
|
||||||
|
async connect() {
|
||||||
|
try {
|
||||||
|
this.client = await this.pool.connect()
|
||||||
|
} catch (err) {
|
||||||
|
// @ts-ignore
|
||||||
|
throw new Error(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async internalQuery(
|
async internalQuery(
|
||||||
query: SqlQuery,
|
query: SqlQuery,
|
||||||
operation: string | undefined = undefined
|
operation: string | undefined = undefined
|
||||||
|
@ -151,35 +193,6 @@ module MSSQLModule {
|
||||||
WHERE TABLE_NAME='${tableName}'`
|
WHERE TABLE_NAME='${tableName}'`
|
||||||
}
|
}
|
||||||
|
|
||||||
constructor(config: MSSQLConfig) {
|
|
||||||
super(SqlClients.MS_SQL)
|
|
||||||
this.config = config
|
|
||||||
const clientCfg = {
|
|
||||||
...this.config,
|
|
||||||
options: {
|
|
||||||
encrypt: this.config.encrypt,
|
|
||||||
enableArithAbort: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
delete clientCfg.encrypt
|
|
||||||
if (!this.pool) {
|
|
||||||
this.pool = new sqlServer.ConnectionPool(clientCfg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
getBindingIdentifier(): string {
|
|
||||||
return `(@p${this.index++})`
|
|
||||||
}
|
|
||||||
|
|
||||||
async connect() {
|
|
||||||
try {
|
|
||||||
this.client = await this.pool.connect()
|
|
||||||
} catch (err) {
|
|
||||||
// @ts-ignore
|
|
||||||
throw new Error(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async runSQL(sql: string) {
|
async runSQL(sql: string) {
|
||||||
return (await this.internalQuery(getSqlQuery(sql))).recordset
|
return (await this.internalQuery(getSqlQuery(sql))).recordset
|
||||||
}
|
}
|
||||||
|
@ -191,11 +204,14 @@ module MSSQLModule {
|
||||||
*/
|
*/
|
||||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||||
await this.connect()
|
await this.connect()
|
||||||
let tableNames = await this.runSQL(this.TABLES_SQL)
|
let tableInfo: TablesResponse[] = await this.runSQL(this.TABLES_SQL)
|
||||||
if (tableNames == null || !Array.isArray(tableNames)) {
|
if (tableInfo == null || !Array.isArray(tableInfo)) {
|
||||||
throw "Unable to get list of tables in database"
|
throw "Unable to get list of tables in database"
|
||||||
}
|
}
|
||||||
tableNames = tableNames
|
|
||||||
|
const schema = this.config.schema || DEFAULT_SCHEMA
|
||||||
|
const tableNames = tableInfo
|
||||||
|
.filter((record: any) => record.TABLE_SCHEMA === schema)
|
||||||
.map((record: any) => record.TABLE_NAME)
|
.map((record: any) => record.TABLE_NAME)
|
||||||
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
|
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
|
||||||
|
|
||||||
|
@ -267,7 +283,11 @@ module MSSQLModule {
|
||||||
}
|
}
|
||||||
|
|
||||||
async query(json: QueryJson) {
|
async query(json: QueryJson) {
|
||||||
|
const schema = this.config.schema
|
||||||
await this.connect()
|
await this.connect()
|
||||||
|
if (schema && schema !== DEFAULT_SCHEMA && json?.endpoint) {
|
||||||
|
json.endpoint.schema = schema
|
||||||
|
}
|
||||||
const operation = this._operation(json)
|
const operation = this._operation(json)
|
||||||
const queryFn = (query: any, op: string) => this.internalQuery(query, op)
|
const queryFn = (query: any, op: string) => this.internalQuery(query, op)
|
||||||
const processFn = (result: any) =>
|
const processFn = (result: any) =>
|
||||||
|
|
|
@ -6,35 +6,63 @@ import { FieldTypes, BuildSchemaErrors, InvalidColumns } from "../constants"
|
||||||
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
||||||
const ROW_ID_REGEX = /^\[.*]$/g
|
const ROW_ID_REGEX = /^\[.*]$/g
|
||||||
|
|
||||||
const SQL_TYPE_MAP = {
|
const SQL_NUMBER_TYPE_MAP = {
|
||||||
text: FieldTypes.LONGFORM,
|
|
||||||
varchar: FieldTypes.STRING,
|
|
||||||
integer: FieldTypes.NUMBER,
|
integer: FieldTypes.NUMBER,
|
||||||
|
int: FieldTypes.NUMBER,
|
||||||
bigint: FieldTypes.NUMBER,
|
bigint: FieldTypes.NUMBER,
|
||||||
decimal: FieldTypes.NUMBER,
|
decimal: FieldTypes.NUMBER,
|
||||||
smallint: FieldTypes.NUMBER,
|
smallint: FieldTypes.NUMBER,
|
||||||
real: FieldTypes.NUMBER,
|
real: FieldTypes.NUMBER,
|
||||||
"double precision": FieldTypes.NUMBER,
|
|
||||||
timestamp: FieldTypes.DATETIME,
|
|
||||||
time: FieldTypes.DATETIME,
|
|
||||||
boolean: FieldTypes.BOOLEAN,
|
|
||||||
json: FieldTypes.JSON,
|
|
||||||
date: FieldTypes.DATETIME,
|
|
||||||
blob: FieldTypes.LONGFORM,
|
|
||||||
enum: FieldTypes.STRING,
|
|
||||||
float: FieldTypes.NUMBER,
|
float: FieldTypes.NUMBER,
|
||||||
int: FieldTypes.NUMBER,
|
|
||||||
numeric: FieldTypes.NUMBER,
|
numeric: FieldTypes.NUMBER,
|
||||||
mediumint: FieldTypes.NUMBER,
|
mediumint: FieldTypes.NUMBER,
|
||||||
dec: FieldTypes.NUMBER,
|
dec: FieldTypes.NUMBER,
|
||||||
double: FieldTypes.NUMBER,
|
double: FieldTypes.NUMBER,
|
||||||
fixed: FieldTypes.NUMBER,
|
fixed: FieldTypes.NUMBER,
|
||||||
datetime: FieldTypes.DATETIME,
|
"double precision": FieldTypes.NUMBER,
|
||||||
tinyint: FieldTypes.BOOLEAN,
|
|
||||||
long: FieldTypes.LONGFORM,
|
|
||||||
number: FieldTypes.NUMBER,
|
number: FieldTypes.NUMBER,
|
||||||
binary_float: FieldTypes.NUMBER,
|
binary_float: FieldTypes.NUMBER,
|
||||||
binary_double: FieldTypes.NUMBER,
|
binary_double: FieldTypes.NUMBER,
|
||||||
|
money: FieldTypes.NUMBER,
|
||||||
|
smallmoney: FieldTypes.NUMBER,
|
||||||
|
}
|
||||||
|
|
||||||
|
const SQL_DATE_TYPE_MAP = {
|
||||||
|
timestamp: FieldTypes.DATETIME,
|
||||||
|
time: FieldTypes.DATETIME,
|
||||||
|
datetime: FieldTypes.DATETIME,
|
||||||
|
smalldatetime: FieldTypes.DATETIME,
|
||||||
|
date: FieldTypes.DATETIME,
|
||||||
|
}
|
||||||
|
|
||||||
|
const SQL_STRING_TYPE_MAP = {
|
||||||
|
varchar: FieldTypes.STRING,
|
||||||
|
char: FieldTypes.STRING,
|
||||||
|
nchar: FieldTypes.STRING,
|
||||||
|
nvarchar: FieldTypes.STRING,
|
||||||
|
ntext: FieldTypes.STRING,
|
||||||
|
enum: FieldTypes.STRING,
|
||||||
|
blob: FieldTypes.LONGFORM,
|
||||||
|
long: FieldTypes.LONGFORM,
|
||||||
|
text: FieldTypes.LONGFORM,
|
||||||
|
}
|
||||||
|
|
||||||
|
const SQL_BOOLEAN_TYPE_MAP = {
|
||||||
|
boolean: FieldTypes.BOOLEAN,
|
||||||
|
bit: FieldTypes.BOOLEAN,
|
||||||
|
tinyint: FieldTypes.BOOLEAN,
|
||||||
|
}
|
||||||
|
|
||||||
|
const SQL_MISC_TYPE_MAP = {
|
||||||
|
json: FieldTypes.JSON,
|
||||||
|
}
|
||||||
|
|
||||||
|
const SQL_TYPE_MAP = {
|
||||||
|
...SQL_NUMBER_TYPE_MAP,
|
||||||
|
...SQL_DATE_TYPE_MAP,
|
||||||
|
...SQL_STRING_TYPE_MAP,
|
||||||
|
...SQL_BOOLEAN_TYPE_MAP,
|
||||||
|
...SQL_MISC_TYPE_MAP,
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum SqlClients {
|
export enum SqlClients {
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/string-templates",
|
"name": "@budibase/string-templates",
|
||||||
"version": "1.0.80-alpha.1",
|
"version": "1.0.81-alpha.0",
|
||||||
"description": "Handlebars wrapper for Budibase templating.",
|
"description": "Handlebars wrapper for Budibase templating.",
|
||||||
"main": "src/index.cjs",
|
"main": "src/index.cjs",
|
||||||
"module": "dist/bundle.mjs",
|
"module": "dist/bundle.mjs",
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/worker",
|
"name": "@budibase/worker",
|
||||||
"email": "hi@budibase.com",
|
"email": "hi@budibase.com",
|
||||||
"version": "1.0.80-alpha.1",
|
"version": "1.0.81-alpha.0",
|
||||||
"description": "Budibase background service",
|
"description": "Budibase background service",
|
||||||
"main": "src/index.ts",
|
"main": "src/index.ts",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
@ -34,8 +34,8 @@
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/backend-core": "^1.0.80-alpha.1",
|
"@budibase/backend-core": "^1.0.81-alpha.0",
|
||||||
"@budibase/string-templates": "^1.0.80-alpha.1",
|
"@budibase/string-templates": "^1.0.81-alpha.0",
|
||||||
"@koa/router": "^8.0.0",
|
"@koa/router": "^8.0.0",
|
||||||
"@sentry/node": "^6.0.0",
|
"@sentry/node": "^6.0.0",
|
||||||
"@techpass/passport-openidconnect": "^0.3.0",
|
"@techpass/passport-openidconnect": "^0.3.0",
|
||||||
|
|
Loading…
Reference in New Issue