Merge branch 'master' of github.com:budibase/budibase into sqs-auto-tests

This commit is contained in:
Sam Rose 2024-04-25 15:30:56 +01:00
commit 289de5906a
No known key found for this signature in database
76 changed files with 1320 additions and 452 deletions

View File

@ -42,6 +42,8 @@
}, },
"rules": { "rules": {
"no-unused-vars": "off", "no-unused-vars": "off",
"local-rules/no-budibase-imports": "error",
"local-rules/no-console-error": "error",
"@typescript-eslint/no-unused-vars": [ "@typescript-eslint/no-unused-vars": [
"error", "error",
{ {

View File

@ -106,6 +106,8 @@ spec:
value: {{ .Values.services.objectStore.globalBucketName | quote }} value: {{ .Values.services.objectStore.globalBucketName | quote }}
- name: BACKUPS_BUCKET_NAME - name: BACKUPS_BUCKET_NAME
value: {{ .Values.services.objectStore.backupsBucketName | quote }} value: {{ .Values.services.objectStore.backupsBucketName | quote }}
- name: TEMP_BUCKET_NAME
value: {{ .Values.globals.tempBucketName | quote }}
- name: PORT - name: PORT
value: {{ .Values.services.apps.port | quote }} value: {{ .Values.services.apps.port | quote }}
{{ if .Values.services.worker.publicApiRateLimitPerSecond }} {{ if .Values.services.worker.publicApiRateLimitPerSecond }}

View File

@ -107,6 +107,8 @@ spec:
value: {{ .Values.services.objectStore.globalBucketName | quote }} value: {{ .Values.services.objectStore.globalBucketName | quote }}
- name: BACKUPS_BUCKET_NAME - name: BACKUPS_BUCKET_NAME
value: {{ .Values.services.objectStore.backupsBucketName | quote }} value: {{ .Values.services.objectStore.backupsBucketName | quote }}
- name: TEMP_BUCKET_NAME
value: {{ .Values.globals.tempBucketName | quote }}
- name: PORT - name: PORT
value: {{ .Values.services.automationWorkers.port | quote }} value: {{ .Values.services.automationWorkers.port | quote }}
{{ if .Values.services.worker.publicApiRateLimitPerSecond }} {{ if .Values.services.worker.publicApiRateLimitPerSecond }}

View File

@ -106,6 +106,8 @@ spec:
value: {{ .Values.services.objectStore.globalBucketName | quote }} value: {{ .Values.services.objectStore.globalBucketName | quote }}
- name: BACKUPS_BUCKET_NAME - name: BACKUPS_BUCKET_NAME
value: {{ .Values.services.objectStore.backupsBucketName | quote }} value: {{ .Values.services.objectStore.backupsBucketName | quote }}
- name: TEMP_BUCKET_NAME
value: {{ .Values.globals.tempBucketName | quote }}
- name: PORT - name: PORT
value: {{ .Values.services.worker.port | quote }} value: {{ .Values.services.worker.port | quote }}
- name: MULTI_TENANCY - name: MULTI_TENANCY

View File

@ -121,6 +121,9 @@ globals:
# to the old value for the duration of the rotation. # to the old value for the duration of the rotation.
jwtSecretFallback: "" jwtSecretFallback: ""
## -- If using S3 the bucket name to be used for storing temporary files
tempBucketName: ""
smtp: smtp:
# -- Whether to enable SMTP or not. # -- Whether to enable SMTP or not.
enabled: false enabled: false

View File

@ -1,4 +1,25 @@
module.exports = { module.exports = {
"no-console-error": {
create: function(context) {
return {
CallExpression(node) {
if (
node.callee.type === "MemberExpression" &&
node.callee.object.name === "console" &&
node.callee.property.name === "error" &&
node.arguments.length === 1 &&
node.arguments[0].name &&
node.arguments[0].name.startsWith("err")
) {
context.report({
node,
message: 'Using console.error(err) on its own is not allowed. Either provide context to the error (console.error(msg, err)) or throw it.',
})
}
},
};
},
},
"no-budibase-imports": { "no-budibase-imports": {
create: function (context) { create: function (context) {
return { return {

View File

@ -19,9 +19,6 @@ RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
RUN ./scripts/removeWorkspaceDependencies.sh packages/server/package.json RUN ./scripts/removeWorkspaceDependencies.sh packages/server/package.json
RUN ./scripts/removeWorkspaceDependencies.sh packages/worker/package.json RUN ./scripts/removeWorkspaceDependencies.sh packages/worker/package.json
# We will never want to sync pro, but the script is still required
RUN echo '' > scripts/syncProPackage.js
RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json
RUN ./scripts/removeWorkspaceDependencies.sh package.json RUN ./scripts/removeWorkspaceDependencies.sh package.json
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production --frozen-lockfile RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production --frozen-lockfile

View File

@ -1,5 +1,5 @@
{ {
"version": "2.23.8", "version": "2.23.12",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -32,7 +32,6 @@
"yargs": "^17.7.2" "yargs": "^17.7.2"
}, },
"scripts": { "scripts": {
"preinstall": "node scripts/syncProPackage.js",
"get-past-client-version": "node scripts/getPastClientVersion.js", "get-past-client-version": "node scripts/getPastClientVersion.js",
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev", "setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
"build": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream", "build": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream",
@ -107,6 +106,7 @@
"@budibase/shared-core": "0.0.0", "@budibase/shared-core": "0.0.0",
"@budibase/string-templates": "0.0.0", "@budibase/string-templates": "0.0.0",
"@budibase/types": "0.0.0", "@budibase/types": "0.0.0",
"@budibase/pro": "npm:@budibase/pro@latest",
"tough-cookie": "4.1.3", "tough-cookie": "4.1.3",
"node-fetch": "2.6.7", "node-fetch": "2.6.7",
"semver": "7.5.3", "semver": "7.5.3",

@ -1 +1 @@
Subproject commit eb7d5da233885c5cffd9c255d3e954d0cd39185e Subproject commit c167c331ff9b8161fc18e2ecbaaf1ea5815ba964

View File

@ -64,7 +64,6 @@ async function refreshOIDCAccessToken(
} }
strategy = await oidc.strategyFactory(enrichedConfig, ssoSaveUserNoOp) strategy = await oidc.strategyFactory(enrichedConfig, ssoSaveUserNoOp)
} catch (err) { } catch (err) {
console.error(err)
throw new Error("Could not refresh OAuth Token") throw new Error("Could not refresh OAuth Token")
} }
@ -99,7 +98,6 @@ async function refreshGoogleAccessToken(
ssoSaveUserNoOp ssoSaveUserNoOp
) )
} catch (err: any) { } catch (err: any) {
console.error(err)
throw new Error( throw new Error(
`Error constructing OIDC refresh strategy: message=${err.message}` `Error constructing OIDC refresh strategy: message=${err.message}`
) )

View File

@ -29,6 +29,7 @@ const DefaultBucketName = {
TEMPLATES: "templates", TEMPLATES: "templates",
GLOBAL: "global", GLOBAL: "global",
PLUGINS: "plugins", PLUGINS: "plugins",
TEMP: "tmp-file-attachments",
} }
const selfHosted = !!parseInt(process.env.SELF_HOSTED || "") const selfHosted = !!parseInt(process.env.SELF_HOSTED || "")
@ -146,6 +147,7 @@ const environment = {
process.env.GLOBAL_BUCKET_NAME || DefaultBucketName.GLOBAL, process.env.GLOBAL_BUCKET_NAME || DefaultBucketName.GLOBAL,
PLUGIN_BUCKET_NAME: PLUGIN_BUCKET_NAME:
process.env.PLUGIN_BUCKET_NAME || DefaultBucketName.PLUGINS, process.env.PLUGIN_BUCKET_NAME || DefaultBucketName.PLUGINS,
TEMP_BUCKET_NAME: process.env.TEMP_BUCKET_NAME || DefaultBucketName.TEMP,
USE_COUCH: process.env.USE_COUCH || true, USE_COUCH: process.env.USE_COUCH || true,
MOCK_REDIS: process.env.MOCK_REDIS, MOCK_REDIS: process.env.MOCK_REDIS,
DEFAULT_LICENSE: process.env.DEFAULT_LICENSE, DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,

View File

@ -138,7 +138,6 @@ export default function (
} catch (err: any) { } catch (err: any) {
authenticated = false authenticated = false
console.error(`Auth Error: ${err.message}`) console.error(`Auth Error: ${err.message}`)
console.error(err)
// remove the cookie as the user does not exist anymore // remove the cookie as the user does not exist anymore
clearCookie(ctx, Cookie.Auth) clearCookie(ctx, Cookie.Auth)
} }
@ -187,7 +186,6 @@ export default function (
} }
} catch (err: any) { } catch (err: any) {
console.error(`Auth Error: ${err.message}`) console.error(`Auth Error: ${err.message}`)
console.error(err)
// invalid token, clear the cookie // invalid token, clear the cookie
if (err?.name === "JsonWebTokenError") { if (err?.name === "JsonWebTokenError") {
clearCookie(ctx, Cookie.Auth) clearCookie(ctx, Cookie.Auth)

View File

@ -12,7 +12,7 @@ export async function errorHandling(ctx: any, next: any) {
if (status >= 400 && status < 500) { if (status >= 400 && status < 500) {
console.warn(err) console.warn(err)
} else { } else {
console.error(err) console.error("Got 400 response code", err)
} }
let error: APIError = { let error: APIError = {

View File

@ -68,7 +68,6 @@ export async function strategyFactory(
verify verify
) )
} catch (err: any) { } catch (err: any) {
console.error(err)
throw new Error(`Error constructing google authentication strategy: ${err}`) throw new Error(`Error constructing google authentication strategy: ${err}`)
} }
} }

View File

@ -103,7 +103,6 @@ export async function strategyFactory(
strategy.name = "oidc" strategy.name = "oidc"
return strategy return strategy
} catch (err: any) { } catch (err: any) {
console.error(err)
throw new Error(`Error constructing OIDC authentication strategy - ${err}`) throw new Error(`Error constructing OIDC authentication strategy - ${err}`)
} }
} }
@ -142,7 +141,6 @@ export async function fetchStrategyConfig(
callbackURL: callbackUrl, callbackURL: callbackUrl,
} }
} catch (err) { } catch (err) {
console.error(err)
throw new Error( throw new Error(
`Error constructing OIDC authentication configuration - ${err}` `Error constructing OIDC authentication configuration - ${err}`
) )

View File

@ -26,7 +26,6 @@ export const getMigrationsDoc = async (db: any) => {
if (err.status && err.status === 404) { if (err.status && err.status === 404) {
return { _id: DocumentType.MIGRATIONS } return { _id: DocumentType.MIGRATIONS }
} else { } else {
console.error(err)
throw err throw err
} }
} }

View File

@ -7,31 +7,41 @@ import tar from "tar-fs"
import zlib from "zlib" import zlib from "zlib"
import { promisify } from "util" import { promisify } from "util"
import { join } from "path" import { join } from "path"
import fs, { ReadStream } from "fs" import fs, { PathLike, ReadStream } from "fs"
import env from "../environment" import env from "../environment"
import { budibaseTempDir } from "./utils" import { bucketTTLConfig, budibaseTempDir } from "./utils"
import { v4 } from "uuid" import { v4 } from "uuid"
import { APP_PREFIX, APP_DEV_PREFIX } from "../db" import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
import fsp from "fs/promises"
const streamPipeline = promisify(stream.pipeline) const streamPipeline = promisify(stream.pipeline)
// use this as a temporary store of buckets that are being created // use this as a temporary store of buckets that are being created
const STATE = { const STATE = {
bucketCreationPromises: {}, bucketCreationPromises: {},
} }
const signedFilePrefix = "/files/signed"
type ListParams = { type ListParams = {
ContinuationToken?: string ContinuationToken?: string
} }
type UploadParams = { type BaseUploadParams = {
bucket: string bucket: string
filename: string filename: string
path: string
type?: string | null type?: string | null
// can be undefined, we will remove it metadata?: { [key: string]: string | undefined }
metadata?: { body?: ReadableStream | Buffer
[key: string]: string | undefined ttl?: number
} addTTL?: boolean
extra?: any
}
type UploadParams = BaseUploadParams & {
path?: string | PathLike
}
type StreamUploadParams = BaseUploadParams & {
stream: ReadStream
} }
const CONTENT_TYPE_MAP: any = { const CONTENT_TYPE_MAP: any = {
@ -41,6 +51,8 @@ const CONTENT_TYPE_MAP: any = {
js: "application/javascript", js: "application/javascript",
json: "application/json", json: "application/json",
gz: "application/gzip", gz: "application/gzip",
svg: "image/svg+xml",
form: "multipart/form-data",
} }
const STRING_CONTENT_TYPES = [ const STRING_CONTENT_TYPES = [
@ -105,7 +117,10 @@ export function ObjectStore(
* Given an object store and a bucket name this will make sure the bucket exists, * Given an object store and a bucket name this will make sure the bucket exists,
* if it does not exist then it will create it. * if it does not exist then it will create it.
*/ */
export async function makeSureBucketExists(client: any, bucketName: string) { export async function createBucketIfNotExists(
client: any,
bucketName: string
): Promise<{ created: boolean; exists: boolean }> {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
try { try {
await client await client
@ -113,15 +128,16 @@ export async function makeSureBucketExists(client: any, bucketName: string) {
Bucket: bucketName, Bucket: bucketName,
}) })
.promise() .promise()
return { created: false, exists: true }
} catch (err: any) { } catch (err: any) {
const promises: any = STATE.bucketCreationPromises const promises: any = STATE.bucketCreationPromises
const doesntExist = err.statusCode === 404, const doesntExist = err.statusCode === 404,
noAccess = err.statusCode === 403 noAccess = err.statusCode === 403
if (promises[bucketName]) { if (promises[bucketName]) {
await promises[bucketName] await promises[bucketName]
return { created: false, exists: true }
} else if (doesntExist || noAccess) { } else if (doesntExist || noAccess) {
if (doesntExist) { if (doesntExist) {
// bucket doesn't exist create it
promises[bucketName] = client promises[bucketName] = client
.createBucket({ .createBucket({
Bucket: bucketName, Bucket: bucketName,
@ -129,13 +145,15 @@ export async function makeSureBucketExists(client: any, bucketName: string) {
.promise() .promise()
await promises[bucketName] await promises[bucketName]
delete promises[bucketName] delete promises[bucketName]
return { created: true, exists: false }
} else {
throw new Error("Access denied to object store bucket." + err)
} }
} else { } else {
throw new Error("Unable to write to object store bucket.") throw new Error("Unable to write to object store bucket.")
} }
} }
} }
/** /**
* Uploads the contents of a file given the required parameters, useful when * Uploads the contents of a file given the required parameters, useful when
* temp files in use (for example file uploaded as an attachment). * temp files in use (for example file uploaded as an attachment).
@ -146,12 +164,22 @@ export async function upload({
path, path,
type, type,
metadata, metadata,
body,
ttl,
}: UploadParams) { }: UploadParams) {
const extension = filename.split(".").pop() const extension = filename.split(".").pop()
const fileBytes = fs.readFileSync(path)
const fileBytes = path ? (await fsp.open(path)).createReadStream() : body
const objectStore = ObjectStore(bucketName) const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName) const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
if (ttl && (bucketCreated.created || bucketCreated.exists)) {
let ttlConfig = bucketTTLConfig(bucketName, ttl)
if (objectStore.putBucketLifecycleConfiguration) {
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
}
}
let contentType = type let contentType = type
if (!contentType) { if (!contentType) {
@ -174,6 +202,7 @@ export async function upload({
} }
config.Metadata = metadata config.Metadata = metadata
} }
return objectStore.upload(config).promise() return objectStore.upload(config).promise()
} }
@ -181,14 +210,24 @@ export async function upload({
* Similar to the upload function but can be used to send a file stream * Similar to the upload function but can be used to send a file stream
* through to the object store. * through to the object store.
*/ */
export async function streamUpload( export async function streamUpload({
bucketName: string, bucket: bucketName,
filename: string, stream,
stream: ReadStream | ReadableStream, filename,
extra = {} type,
) { extra,
ttl,
}: StreamUploadParams) {
const extension = filename.split(".").pop()
const objectStore = ObjectStore(bucketName) const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName) const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
if (ttl && (bucketCreated.created || bucketCreated.exists)) {
let ttlConfig = bucketTTLConfig(bucketName, ttl)
if (objectStore.putBucketLifecycleConfiguration) {
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
}
}
// Set content type for certain known extensions // Set content type for certain known extensions
if (filename?.endsWith(".js")) { if (filename?.endsWith(".js")) {
@ -203,10 +242,18 @@ export async function streamUpload(
} }
} }
let contentType = type
if (!contentType) {
contentType = extension
? CONTENT_TYPE_MAP[extension.toLowerCase()]
: CONTENT_TYPE_MAP.txt
}
const params = { const params = {
Bucket: sanitizeBucket(bucketName), Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filename), Key: sanitizeKey(filename),
Body: stream, Body: stream,
ContentType: contentType,
...extra, ...extra,
} }
return objectStore.upload(params).promise() return objectStore.upload(params).promise()
@ -286,7 +333,7 @@ export function getPresignedUrl(
const signedUrl = new URL(url) const signedUrl = new URL(url)
const path = signedUrl.pathname const path = signedUrl.pathname
const query = signedUrl.search const query = signedUrl.search
return `/files/signed${path}${query}` return `${signedFilePrefix}${path}${query}`
} }
} }
@ -341,7 +388,7 @@ export async function retrieveDirectory(bucketName: string, path: string) {
*/ */
export async function deleteFile(bucketName: string, filepath: string) { export async function deleteFile(bucketName: string, filepath: string) {
const objectStore = ObjectStore(bucketName) const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName) await createBucketIfNotExists(objectStore, bucketName)
const params = { const params = {
Bucket: bucketName, Bucket: bucketName,
Key: sanitizeKey(filepath), Key: sanitizeKey(filepath),
@ -351,7 +398,7 @@ export async function deleteFile(bucketName: string, filepath: string) {
export async function deleteFiles(bucketName: string, filepaths: string[]) { export async function deleteFiles(bucketName: string, filepaths: string[]) {
const objectStore = ObjectStore(bucketName) const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName) await createBucketIfNotExists(objectStore, bucketName)
const params = { const params = {
Bucket: bucketName, Bucket: bucketName,
Delete: { Delete: {
@ -412,7 +459,13 @@ export async function uploadDirectory(
if (file.isDirectory()) { if (file.isDirectory()) {
uploads.push(uploadDirectory(bucketName, local, path)) uploads.push(uploadDirectory(bucketName, local, path))
} else { } else {
uploads.push(streamUpload(bucketName, path, fs.createReadStream(local))) uploads.push(
streamUpload({
bucket: bucketName,
filename: path,
stream: fs.createReadStream(local),
})
)
} }
} }
await Promise.all(uploads) await Promise.all(uploads)
@ -467,3 +520,23 @@ export async function getReadStream(
} }
return client.getObject(params).createReadStream() return client.getObject(params).createReadStream()
} }
/*
Given a signed url like '/files/signed/tmp-files-attachments/app_123456/myfile.txt' extract
the bucket and the path from it
*/
export function extractBucketAndPath(
url: string
): { bucket: string; path: string } | null {
const baseUrl = url.split("?")[0]
const regex = new RegExp(`^${signedFilePrefix}/(?<bucket>[^/]+)/(?<path>.+)$`)
const match = baseUrl.match(regex)
if (match && match.groups) {
const { bucket, path } = match.groups
return { bucket, path }
}
return null
}

View File

@ -2,6 +2,7 @@ import { join } from "path"
import { tmpdir } from "os" import { tmpdir } from "os"
import fs from "fs" import fs from "fs"
import env from "../environment" import env from "../environment"
import { PutBucketLifecycleConfigurationRequest } from "aws-sdk/clients/s3"
/**************************************************** /****************************************************
* NOTE: When adding a new bucket - name * * NOTE: When adding a new bucket - name *
@ -15,6 +16,7 @@ export const ObjectStoreBuckets = {
TEMPLATES: env.TEMPLATES_BUCKET_NAME, TEMPLATES: env.TEMPLATES_BUCKET_NAME,
GLOBAL: env.GLOBAL_BUCKET_NAME, GLOBAL: env.GLOBAL_BUCKET_NAME,
PLUGINS: env.PLUGIN_BUCKET_NAME, PLUGINS: env.PLUGIN_BUCKET_NAME,
TEMP: env.TEMP_BUCKET_NAME,
} }
const bbTmp = join(tmpdir(), ".budibase") const bbTmp = join(tmpdir(), ".budibase")
@ -29,3 +31,27 @@ try {
export function budibaseTempDir() { export function budibaseTempDir() {
return bbTmp return bbTmp
} }
export const bucketTTLConfig = (
bucketName: string,
days: number
): PutBucketLifecycleConfigurationRequest => {
const lifecycleRule = {
ID: `${bucketName}-ExpireAfter${days}days`,
Prefix: "",
Status: "Enabled",
Expiration: {
Days: days,
},
}
const lifecycleConfiguration = {
Rules: [lifecycleRule],
}
const params = {
Bucket: bucketName,
LifecycleConfiguration: lifecycleConfiguration,
}
return params
}

View File

@ -4,3 +4,6 @@ export { generator } from "./structures"
export * as testContainerUtils from "./testContainerUtils" export * as testContainerUtils from "./testContainerUtils"
export * as utils from "./utils" export * as utils from "./utils"
export * from "./jestUtils" export * from "./jestUtils"
import * as minio from "./minio"
export const objectStoreTestProviders = { minio }

View File

@ -0,0 +1,34 @@
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
import env from "../../../src/environment"
let container: StartedTestContainer | undefined
class ObjectStoreWaitStrategy extends AbstractWaitStrategy {
async waitUntilReady(container: any, boundPorts: any, startTime?: Date) {
const logs = Wait.forListeningPorts()
await logs.waitUntilReady(container, boundPorts, startTime)
}
}
export async function start(): Promise<void> {
container = await new GenericContainer("minio/minio")
.withExposedPorts(9000)
.withCommand(["server", "/data"])
.withEnvironment({
MINIO_ACCESS_KEY: "budibase",
MINIO_SECRET_KEY: "budibase",
})
.withWaitStrategy(new ObjectStoreWaitStrategy().withStartupTimeout(30000))
.start()
const port = container.getMappedPort(9000)
env._set("MINIO_URL", `http://0.0.0.0:${port}`)
}
export async function stop() {
if (container) {
await container.stop()
container = undefined
}
}

View File

@ -32,6 +32,7 @@
import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte" import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte"
import CodeEditor from "components/common/CodeEditor/CodeEditor.svelte" import CodeEditor from "components/common/CodeEditor/CodeEditor.svelte"
import BindingSidePanel from "components/common/bindings/BindingSidePanel.svelte" import BindingSidePanel from "components/common/bindings/BindingSidePanel.svelte"
import KeyValueBuilder from "components/integration/KeyValueBuilder.svelte"
import { BindingHelpers, BindingType } from "components/common/bindings/utils" import { BindingHelpers, BindingType } from "components/common/bindings/utils"
import { import {
bindingsToCompletions, bindingsToCompletions,
@ -356,7 +357,8 @@
value.customType !== "queryParams" && value.customType !== "queryParams" &&
value.customType !== "cron" && value.customType !== "cron" &&
value.customType !== "triggerSchema" && value.customType !== "triggerSchema" &&
value.customType !== "automationFields" value.customType !== "automationFields" &&
value.type !== "attachment"
) )
} }
@ -372,6 +374,15 @@
console.error(error) console.error(error)
} }
}) })
const handleAttachmentParams = keyValuObj => {
let params = {}
if (keyValuObj?.length) {
for (let param of keyValuObj) {
params[param.url] = param.filename
}
}
return params
}
</script> </script>
<div class="fields"> <div class="fields">
@ -437,6 +448,33 @@
value={inputData[key]} value={inputData[key]}
options={Object.keys(table?.schema || {})} options={Object.keys(table?.schema || {})}
/> />
{:else if value.type === "attachment"}
<div class="attachment-field-wrapper">
<div class="label-wrapper">
<Label>{label}</Label>
</div>
<div class="attachment-field-width">
<KeyValueBuilder
on:change={e =>
onChange(
{
detail: e.detail.map(({ name, value }) => ({
url: name,
filename: value,
})),
},
key
)}
object={handleAttachmentParams(inputData[key])}
allowJS
{bindings}
keyBindings
customButtonText={"Add attachment"}
keyPlaceholder={"URL"}
valuePlaceholder={"Filename"}
/>
</div>
</div>
{:else if value.customType === "filters"} {:else if value.customType === "filters"}
<ActionButton on:click={drawer.show}>Define filters</ActionButton> <ActionButton on:click={drawer.show}>Define filters</ActionButton>
<Drawer bind:this={drawer} title="Filtering"> <Drawer bind:this={drawer} title="Filtering">
@ -651,14 +689,22 @@
} }
.block-field { .block-field {
display: flex; /* Use Flexbox */ display: flex;
justify-content: space-between; justify-content: space-between;
flex-direction: row; /* Arrange label and field side by side */ flex-direction: row;
align-items: center; /* Align vertically in the center */ align-items: center;
gap: 10px; /* Add some space between label and field */ gap: 10px;
flex: 1; flex: 1;
} }
.attachment-field-width {
margin-top: var(--spacing-xs);
}
.label-wrapper {
margin-top: var(--spacing-s);
}
.test :global(.drawer) { .test :global(.drawer) {
width: 10000px !important; width: 10000px !important;
} }

View File

@ -35,6 +35,8 @@
export let bindingDrawerLeft export let bindingDrawerLeft
export let allowHelpers = true export let allowHelpers = true
export let customButtonText = null export let customButtonText = null
export let keyBindings = false
export let allowJS = false
export let compare = (option, value) => option === value export let compare = (option, value) => option === value
let fields = Object.entries(object || {}).map(([name, value]) => ({ let fields = Object.entries(object || {}).map(([name, value]) => ({
@ -116,12 +118,23 @@
class:readOnly-menu={readOnly && showMenu} class:readOnly-menu={readOnly && showMenu}
> >
{#each fields as field, idx} {#each fields as field, idx}
<Input {#if keyBindings}
placeholder={keyPlaceholder} <DrawerBindableInput
readonly={readOnly} {bindings}
bind:value={field.name} placeholder={keyPlaceholder}
on:blur={changed} on:blur={e => {
/> field.name = e.detail
changed()
}}
disabled={readOnly}
value={field.name}
{allowJS}
{allowHelpers}
drawerLeft={bindingDrawerLeft}
/>
{:else}
<Input readonly={readOnly} bind:value={field.name} on:blur={changed} />
{/if}
{#if isJsonArray(field.value)} {#if isJsonArray(field.value)}
<Select readonly={true} value="Array" options={["Array"]} /> <Select readonly={true} value="Array" options={["Array"]} />
{:else if options} {:else if options}
@ -134,14 +147,14 @@
{:else if bindings && bindings.length} {:else if bindings && bindings.length}
<DrawerBindableInput <DrawerBindableInput
{bindings} {bindings}
placeholder="Value" placeholder={valuePlaceholder}
on:blur={e => { on:blur={e => {
field.value = e.detail field.value = e.detail
changed() changed()
}} }}
disabled={readOnly} disabled={readOnly}
value={field.value} value={field.value}
allowJS={false} {allowJS}
{allowHelpers} {allowHelpers}
drawerLeft={bindingDrawerLeft} drawerLeft={bindingDrawerLeft}
/> />

View File

@ -22,6 +22,7 @@ import {
isJSBinding, isJSBinding,
decodeJSBinding, decodeJSBinding,
encodeJSBinding, encodeJSBinding,
getJsHelperList,
} from "@budibase/string-templates" } from "@budibase/string-templates"
import { TableNames } from "./constants" import { TableNames } from "./constants"
import { JSONUtils, Constants } from "@budibase/frontend-core" import { JSONUtils, Constants } from "@budibase/frontend-core"
@ -1210,9 +1211,32 @@ const shouldReplaceBinding = (currentValue, from, convertTo, binding) => {
if (!currentValue?.includes(from)) { if (!currentValue?.includes(from)) {
return false return false
} }
if (convertTo === "readableBinding") { // some cases we have the same binding for readable/runtime, specific logic for this
// Dont replace if the value already matches the readable binding const sameBindings = binding.runtimeBinding.includes(binding.readableBinding)
const convertingToReadable = convertTo === "readableBinding"
const helperNames = Object.keys(getJsHelperList())
const matchedHelperNames = helperNames.filter(
name => name.includes(from) && currentValue.includes(name)
)
// edge case - if the binding is part of a helper it may accidentally replace it
if (matchedHelperNames.length > 0) {
const indexStart = currentValue.indexOf(from),
indexEnd = indexStart + from.length
for (let helperName of matchedHelperNames) {
const helperIndexStart = currentValue.indexOf(helperName),
helperIndexEnd = helperIndexStart + helperName.length
if (indexStart >= helperIndexStart && indexEnd <= helperIndexEnd) {
return false
}
}
}
if (convertingToReadable && !sameBindings) {
// Don't replace if the value already matches the readable binding
return currentValue.indexOf(binding.readableBinding) === -1 return currentValue.indexOf(binding.readableBinding) === -1
} else if (convertingToReadable) {
// if the runtime and readable bindings are very similar we have to assume it should be replaced
return true
} }
// remove all the spaces, if the input is surrounded by spaces e.g. [ Auto ID ] then // remove all the spaces, if the input is surrounded by spaces e.g. [ Auto ID ] then
// this makes sure it is detected // this makes sure it is detected

View File

@ -189,6 +189,7 @@
<Select <Select
options={settingOptions} options={settingOptions}
bind:value={condition.setting} bind:value={condition.setting}
on:change={() => delete condition.settingValue}
/> />
<div>TO</div> <div>TO</div>
{#if definition} {#if definition}

View File

@ -9,7 +9,7 @@ const {
ObjectStore, ObjectStore,
retrieve, retrieve,
uploadDirectory, uploadDirectory,
makeSureBucketExists, createBucketIfNotExists,
} = objectStore } = objectStore
const bucketList = Object.values(ObjectStoreBuckets) const bucketList = Object.values(ObjectStoreBuckets)
@ -61,7 +61,7 @@ export async function importObjects() {
let count = 0 let count = 0
for (let bucket of buckets) { for (let bucket of buckets) {
const client = ObjectStore(bucket) const client = ObjectStore(bucket)
await makeSureBucketExists(client, bucket) await createBucketIfNotExists(client, bucket)
const files = await uploadDirectory(bucket, join(path, bucket), "/") const files = await uploadDirectory(bucket, join(path, bucket), "/")
count += files.length count += files.length
bar.update(count) bar.update(count)

View File

@ -54,11 +54,9 @@ export async function downloadDockerCompose() {
export async function checkDockerConfigured() { export async function checkDockerConfigured() {
const error = const error =
"docker/docker-compose has not been installed, please follow instructions at: https://docs.budibase.com/docs/docker-compose" "docker has not been installed, please follow instructions at: https://docs.budibase.com/docs/docker-compose"
const docker = await lookpath("docker") const docker = await lookpath("docker")
const compose = await lookpath("docker-compose") if (!docker) {
const composeV2 = await lookpath("docker compose")
if (!docker || (!compose && !composeV2)) {
throw error throw error
} }
} }

View File

@ -6973,6 +6973,12 @@
"key": "stripeRows", "key": "stripeRows",
"defaultValue": false "defaultValue": false
}, },
{
"type": "boolean",
"label": "Quiet",
"key": "quiet",
"defaultValue": false
},
{ {
"section": true, "section": true,
"name": "Columns", "name": "Columns",

View File

@ -119,140 +119,142 @@
{/if} {/if}
</svelte:head> </svelte:head>
<div {#if dataLoaded}
id="spectrum-root" <div
lang="en" id="spectrum-root"
dir="ltr" lang="en"
class="spectrum spectrum--medium {$themeStore.baseTheme} {$themeStore.theme}" dir="ltr"
class:builder={$builderStore.inBuilder} class="spectrum spectrum--medium {$themeStore.baseTheme} {$themeStore.theme}"
class:show={fontsLoaded && dataLoaded} class:builder={$builderStore.inBuilder}
> class:show={fontsLoaded && dataLoaded}
{#if $environmentStore.maintenance.length > 0} >
<MaintenanceScreen maintenanceList={$environmentStore.maintenance} /> {#if $environmentStore.maintenance.length > 0}
{:else} <MaintenanceScreen maintenanceList={$environmentStore.maintenance} />
<DeviceBindingsProvider> {:else}
<UserBindingsProvider> <DeviceBindingsProvider>
<StateBindingsProvider> <UserBindingsProvider>
<RowSelectionProvider> <StateBindingsProvider>
<QueryParamsProvider> <RowSelectionProvider>
<SnippetsProvider> <QueryParamsProvider>
<!-- Settings bar can be rendered outside of device preview --> <SnippetsProvider>
<!-- Key block needs to be outside the if statement or it breaks --> <!-- Settings bar can be rendered outside of device preview -->
{#key $builderStore.selectedComponentId} <!-- Key block needs to be outside the if statement or it breaks -->
{#if $builderStore.inBuilder} {#key $builderStore.selectedComponentId}
<SettingsBar /> {#if $builderStore.inBuilder}
{/if} <SettingsBar />
{/key}
<!-- Clip boundary for selection indicators -->
<div
id="clip-root"
class:preview={$builderStore.inBuilder}
class:tablet-preview={$builderStore.previewDevice ===
"tablet"}
class:mobile-preview={$builderStore.previewDevice ===
"mobile"}
>
<!-- Actual app -->
<div id="app-root">
{#if showDevTools}
<DevToolsHeader />
{/if} {/if}
{/key}
<div id="app-body"> <!-- Clip boundary for selection indicators -->
{#if permissionError} <div
<div class="error"> id="clip-root"
<Layout justifyItems="center" gap="S"> class:preview={$builderStore.inBuilder}
<!-- eslint-disable-next-line svelte/no-at-html-tags --> class:tablet-preview={$builderStore.previewDevice ===
{@html ErrorSVG} "tablet"}
<Heading size="L"> class:mobile-preview={$builderStore.previewDevice ===
You don't have permission to use this app "mobile"}
</Heading> >
<Body size="S"> <!-- Actual app -->
Ask your administrator to grant you access <div id="app-root">
</Body> {#if showDevTools}
</Layout> <DevToolsHeader />
</div> {/if}
{:else if !$screenStore.activeLayout}
<div class="error">
<Layout justifyItems="center" gap="S">
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
{@html ErrorSVG}
<Heading size="L">
Something went wrong rendering your app
</Heading>
<Body size="S">
Get in touch with support if this issue persists
</Body>
</Layout>
</div>
{:else if embedNoScreens}
<div class="error">
<Layout justifyItems="center" gap="S">
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
{@html ErrorSVG}
<Heading size="L">
This Budibase app is not publicly accessible
</Heading>
</Layout>
</div>
{:else}
<CustomThemeWrapper>
{#key $screenStore.activeLayout._id}
<Component
isLayout
instance={$screenStore.activeLayout.props}
/>
{/key}
<!-- <div id="app-body">
{#if permissionError}
<div class="error">
<Layout justifyItems="center" gap="S">
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
{@html ErrorSVG}
<Heading size="L">
You don't have permission to use this app
</Heading>
<Body size="S">
Ask your administrator to grant you access
</Body>
</Layout>
</div>
{:else if !$screenStore.activeLayout}
<div class="error">
<Layout justifyItems="center" gap="S">
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
{@html ErrorSVG}
<Heading size="L">
Something went wrong rendering your app
</Heading>
<Body size="S">
Get in touch with support if this issue persists
</Body>
</Layout>
</div>
{:else if embedNoScreens}
<div class="error">
<Layout justifyItems="center" gap="S">
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
{@html ErrorSVG}
<Heading size="L">
This Budibase app is not publicly accessible
</Heading>
</Layout>
</div>
{:else}
<CustomThemeWrapper>
{#key $screenStore.activeLayout._id}
<Component
isLayout
instance={$screenStore.activeLayout.props}
/>
{/key}
<!--
Flatpickr needs to be inside the theme wrapper. Flatpickr needs to be inside the theme wrapper.
It also needs its own container because otherwise it hijacks It also needs its own container because otherwise it hijacks
key events on the whole page. It is painful to work with. key events on the whole page. It is painful to work with.
--> -->
<div id="flatpickr-root" /> <div id="flatpickr-root" />
<!-- Modal container to ensure they sit on top --> <!-- Modal container to ensure they sit on top -->
<div class="modal-container" /> <div class="modal-container" />
<!-- Layers on top of app --> <!-- Layers on top of app -->
<NotificationDisplay /> <NotificationDisplay />
<ConfirmationDisplay /> <ConfirmationDisplay />
<PeekScreenDisplay /> <PeekScreenDisplay />
</CustomThemeWrapper> </CustomThemeWrapper>
{/if} {/if}
{#if showDevTools} {#if showDevTools}
<DevTools /> <DevTools />
{/if}
</div>
{#if !$builderStore.inBuilder && $featuresStore.logoEnabled}
<FreeFooter />
{/if} {/if}
</div> </div>
{#if !$builderStore.inBuilder && $featuresStore.logoEnabled} <!-- Preview and dev tools utilities -->
<FreeFooter /> {#if $appStore.isDevApp}
<SelectionIndicator />
{/if}
{#if $builderStore.inBuilder || $devToolsStore.allowSelection}
<HoverIndicator />
{/if}
{#if $builderStore.inBuilder}
<DNDHandler />
<GridDNDHandler />
{/if} {/if}
</div> </div>
</SnippetsProvider>
<!-- Preview and dev tools utilities --> </QueryParamsProvider>
{#if $appStore.isDevApp} </RowSelectionProvider>
<SelectionIndicator /> </StateBindingsProvider>
{/if} </UserBindingsProvider>
{#if $builderStore.inBuilder || $devToolsStore.allowSelection} </DeviceBindingsProvider>
<HoverIndicator /> {/if}
{/if} </div>
{#if $builderStore.inBuilder} <KeyboardManager />
<DNDHandler /> {/if}
<GridDNDHandler />
{/if}
</div>
</SnippetsProvider>
</QueryParamsProvider>
</RowSelectionProvider>
</StateBindingsProvider>
</UserBindingsProvider>
</DeviceBindingsProvider>
{/if}
</div>
<KeyboardManager />
<style> <style>
#spectrum-root { #spectrum-root {

View File

@ -11,6 +11,7 @@
export let allowEditRows = true export let allowEditRows = true
export let allowDeleteRows = true export let allowDeleteRows = true
export let stripeRows = false export let stripeRows = false
export let quiet = false
export let initialFilter = null export let initialFilter = null
export let initialSortColumn = null export let initialSortColumn = null
export let initialSortOrder = null export let initialSortOrder = null
@ -117,6 +118,7 @@
datasource={table} datasource={table}
{API} {API}
{stripeRows} {stripeRows}
{quiet}
{initialFilter} {initialFilter}
{initialSortColumn} {initialSortColumn}
{initialSortOrder} {initialSortOrder}

View File

@ -67,6 +67,11 @@
const removeFilter = id => { const removeFilter = id => {
filters = filters.filter(field => field.id !== id) filters = filters.filter(field => field.id !== id)
// Clear all filters when no fields are specified
if (filters.length === 1 && filters[0].onEmptyFilter) {
filters = []
}
} }
const duplicateFilter = id => { const duplicateFilter = id => {

View File

@ -43,6 +43,9 @@
on:mouseup on:mouseup
on:click on:click
on:contextmenu on:contextmenu
on:touchstart
on:touchend
on:touchcancel
{style} {style}
> >
{#if error} {#if error}

View File

@ -18,7 +18,6 @@
export let column export let column
export let idx export let idx
export let orderable = true
const { const {
reorder, reorder,
@ -66,6 +65,7 @@
$: resetSearchValue(column.name) $: resetSearchValue(column.name)
$: searching = searchValue != null $: searching = searchValue != null
$: debouncedUpdateFilter(searchValue) $: debouncedUpdateFilter(searchValue)
$: orderable = !column.primaryDisplay
const getSortingLabels = type => { const getSortingLabels = type => {
switch (type) { switch (type) {
@ -112,16 +112,17 @@
} }
const onMouseDown = e => { const onMouseDown = e => {
if (e.button === 0 && orderable) { if ((e.touches?.length || e.button === 0) && orderable) {
timeout = setTimeout(() => { timeout = setTimeout(() => {
reorder.actions.startReordering(column.name, e) reorder.actions.startReordering(column.name, e)
}, 200) }, 200)
} }
} }
const onMouseUp = e => { const onMouseUp = () => {
if (e.button === 0 && orderable) { if (timeout) {
clearTimeout(timeout) clearTimeout(timeout)
timeout = null
} }
} }
@ -258,6 +259,9 @@
<GridCell <GridCell
on:mousedown={onMouseDown} on:mousedown={onMouseDown}
on:mouseup={onMouseUp} on:mouseup={onMouseUp}
on:touchstart={onMouseDown}
on:touchend={onMouseUp}
on:touchcancel={onMouseUp}
on:contextmenu={onContextMenu} on:contextmenu={onContextMenu}
width={column.width} width={column.width}
left={column.left} left={column.left}
@ -347,7 +351,8 @@
<MenuItem <MenuItem
icon="Label" icon="Label"
on:click={makeDisplayColumn} on:click={makeDisplayColumn}
disabled={idx === "sticky" || !canBeDisplayColumn(column.schema.type)} disabled={column.primaryDisplay ||
!canBeDisplayColumn(column.schema.type)}
> >
Use as display column Use as display column
</MenuItem> </MenuItem>
@ -378,7 +383,7 @@
Move right Move right
</MenuItem> </MenuItem>
<MenuItem <MenuItem
disabled={idx === "sticky" || !$config.showControls} disabled={column.primaryDisplay || !$config.showControls}
icon="VisibilityOff" icon="VisibilityOff"
on:click={hideColumn} on:click={hideColumn}
> >

View File

@ -81,6 +81,7 @@
size="S" size="S"
value={column.visible} value={column.visible}
on:change={e => toggleVisibility(column, e.detail)} on:change={e => toggleVisibility(column, e.detail)}
disabled={column.primaryDisplay}
/> />
{/each} {/each}
</div> </div>

View File

@ -39,6 +39,7 @@
export let canEditColumns = true export let canEditColumns = true
export let canSaveSchema = true export let canSaveSchema = true
export let stripeRows = false export let stripeRows = false
export let quiet = false
export let collaboration = true export let collaboration = true
export let showAvatars = true export let showAvatars = true
export let showControls = true export let showControls = true
@ -91,6 +92,7 @@
canEditColumns, canEditColumns,
canSaveSchema, canSaveSchema,
stripeRows, stripeRows,
quiet,
collaboration, collaboration,
showAvatars, showAvatars,
showControls, showControls,
@ -124,6 +126,7 @@
class:is-resizing={$isResizing} class:is-resizing={$isResizing}
class:is-reordering={$isReordering} class:is-reordering={$isReordering}
class:stripe={stripeRows} class:stripe={stripeRows}
class:quiet
on:mouseenter={() => gridFocused.set(true)} on:mouseenter={() => gridFocused.set(true)}
on:mouseleave={() => gridFocused.set(false)} on:mouseleave={() => gridFocused.set(false)}
style="--row-height:{$rowHeight}px; --default-row-height:{DefaultRowHeight}px; --gutter-width:{GutterWidth}px; --max-cell-render-height:{MaxCellRenderHeight}px; --max-cell-render-width-overflow:{MaxCellRenderWidthOverflow}px; --content-lines:{$contentLines};" style="--row-height:{$rowHeight}px; --default-row-height:{DefaultRowHeight}px; --gutter-width:{GutterWidth}px; --max-cell-render-height:{MaxCellRenderHeight}px; --max-cell-render-width-overflow:{MaxCellRenderWidthOverflow}px; --content-lines:{$contentLines};"
@ -331,4 +334,9 @@
.grid-data-outer :global(.spectrum-Checkbox-partialCheckmark) { .grid-data-outer :global(.spectrum-Checkbox-partialCheckmark) {
transition: none; transition: none;
} }
/* Overrides */
.grid.quiet :global(.grid-data-content .row > .cell:not(:last-child)) {
border-right: none;
}
</style> </style>

View File

@ -30,6 +30,7 @@
refreshing, refreshing,
config, config,
filter, filter,
inlineFilters,
columnRenderMap, columnRenderMap,
} = getContext("grid") } = getContext("grid")
@ -157,7 +158,11 @@
<!-- svelte-ignore a11y-click-events-have-key-events --> <!-- svelte-ignore a11y-click-events-have-key-events -->
<TempTooltip <TempTooltip
text="Click here to create your first row" text="Click here to create your first row"
condition={hasNoRows && $loaded && !$filter?.length && !$refreshing} condition={hasNoRows &&
$loaded &&
!$filter?.length &&
!$inlineFilters?.length &&
!$refreshing}
type={TooltipType.Info} type={TooltipType.Info}
> >
{#if !visible && !selectedRowCount && $config.canAddRows} {#if !visible && !selectedRowCount && $config.canAddRows}

View File

@ -20,3 +20,10 @@ export const getColumnIcon = column => {
return result || "Text" return result || "Text"
} }
export const parseEventLocation = e => {
return {
x: e.clientX ?? e.touches?.[0]?.clientX,
y: e.clientY ?? e.touches?.[0]?.clientY,
}
}

View File

@ -21,6 +21,7 @@
class="resize-slider" class="resize-slider"
class:visible={activeColumn === $stickyColumn.name} class:visible={activeColumn === $stickyColumn.name}
on:mousedown={e => resize.actions.startResizing($stickyColumn, e)} on:mousedown={e => resize.actions.startResizing($stickyColumn, e)}
on:touchstart={e => resize.actions.startResizing($stickyColumn, e)}
on:dblclick={() => resize.actions.resetSize($stickyColumn)} on:dblclick={() => resize.actions.resetSize($stickyColumn)}
style="left:{GutterWidth + $stickyColumn.width}px;" style="left:{GutterWidth + $stickyColumn.width}px;"
> >
@ -32,6 +33,7 @@
class="resize-slider" class="resize-slider"
class:visible={activeColumn === column.name} class:visible={activeColumn === column.name}
on:mousedown={e => resize.actions.startResizing(column, e)} on:mousedown={e => resize.actions.startResizing(column, e)}
on:touchstart={e => resize.actions.startResizing(column, e)}
on:dblclick={() => resize.actions.resetSize(column)} on:dblclick={() => resize.actions.resetSize(column)}
style={getStyle(column, offset, $scrollLeft)} style={getStyle(column, offset, $scrollLeft)}
> >

View File

@ -2,6 +2,7 @@
import { getContext } from "svelte" import { getContext } from "svelte"
import { domDebounce } from "../../../utils/utils" import { domDebounce } from "../../../utils/utils"
import { DefaultRowHeight, ScrollBarSize } from "../lib/constants" import { DefaultRowHeight, ScrollBarSize } from "../lib/constants"
import { parseEventLocation } from "../lib/utils"
const { const {
scroll, scroll,
@ -53,17 +54,10 @@
} }
} }
const getLocation = e => {
return {
y: e.touches?.[0]?.clientY ?? e.clientY,
x: e.touches?.[0]?.clientX ?? e.clientX,
}
}
// V scrollbar drag handlers // V scrollbar drag handlers
const startVDragging = e => { const startVDragging = e => {
e.preventDefault() e.preventDefault()
initialMouse = getLocation(e).y initialMouse = parseEventLocation(e).y
initialScroll = $scrollTop initialScroll = $scrollTop
document.addEventListener("mousemove", moveVDragging) document.addEventListener("mousemove", moveVDragging)
document.addEventListener("touchmove", moveVDragging) document.addEventListener("touchmove", moveVDragging)
@ -73,7 +67,7 @@
closeMenu() closeMenu()
} }
const moveVDragging = domDebounce(e => { const moveVDragging = domDebounce(e => {
const delta = getLocation(e).y - initialMouse const delta = parseEventLocation(e).y - initialMouse
const weight = delta / availHeight const weight = delta / availHeight
const newScrollTop = initialScroll + weight * $maxScrollTop const newScrollTop = initialScroll + weight * $maxScrollTop
scroll.update(state => ({ scroll.update(state => ({
@ -92,7 +86,7 @@
// H scrollbar drag handlers // H scrollbar drag handlers
const startHDragging = e => { const startHDragging = e => {
e.preventDefault() e.preventDefault()
initialMouse = getLocation(e).x initialMouse = parseEventLocation(e).x
initialScroll = $scrollLeft initialScroll = $scrollLeft
document.addEventListener("mousemove", moveHDragging) document.addEventListener("mousemove", moveHDragging)
document.addEventListener("touchmove", moveHDragging) document.addEventListener("touchmove", moveHDragging)
@ -102,7 +96,7 @@
closeMenu() closeMenu()
} }
const moveHDragging = domDebounce(e => { const moveHDragging = domDebounce(e => {
const delta = getLocation(e).x - initialMouse const delta = parseEventLocation(e).x - initialMouse
const weight = delta / availWidth const weight = delta / availWidth
const newScrollLeft = initialScroll + weight * $maxScrollLeft const newScrollLeft = initialScroll + weight * $maxScrollLeft
scroll.update(state => ({ scroll.update(state => ({

View File

@ -48,22 +48,28 @@ export const createStores = () => {
export const deriveStores = context => { export const deriveStores = context => {
const { columns, stickyColumn } = context const { columns, stickyColumn } = context
// Derive if we have any normal columns // Quick access to all columns
const hasNonAutoColumn = derived( const allColumns = derived(
[columns, stickyColumn], [columns, stickyColumn],
([$columns, $stickyColumn]) => { ([$columns, $stickyColumn]) => {
let allCols = $columns || [] let allCols = $columns || []
if ($stickyColumn) { if ($stickyColumn) {
allCols = [...allCols, $stickyColumn] allCols = [...allCols, $stickyColumn]
} }
const normalCols = allCols.filter(column => { return allCols
return !column.schema?.autocolumn
})
return normalCols.length > 0
} }
) )
// Derive if we have any normal columns
const hasNonAutoColumn = derived(allColumns, $allColumns => {
const normalCols = $allColumns.filter(column => {
return !column.schema?.autocolumn
})
return normalCols.length > 0
})
return { return {
allColumns,
hasNonAutoColumn, hasNonAutoColumn,
} }
} }
@ -142,24 +148,26 @@ export const createActions = context => {
} }
export const initialise = context => { export const initialise = context => {
const { definition, columns, stickyColumn, enrichedSchema } = context const {
definition,
columns,
stickyColumn,
allColumns,
enrichedSchema,
compact,
} = context
// Merge new schema fields with existing schema in order to preserve widths // Merge new schema fields with existing schema in order to preserve widths
enrichedSchema.subscribe($enrichedSchema => { const processColumns = $enrichedSchema => {
if (!$enrichedSchema) { if (!$enrichedSchema) {
columns.set([]) columns.set([])
stickyColumn.set(null) stickyColumn.set(null)
return return
} }
const $definition = get(definition) const $definition = get(definition)
const $columns = get(columns) const $allColumns = get(allColumns)
const $stickyColumn = get(stickyColumn) const $stickyColumn = get(stickyColumn)
const $compact = get(compact)
// Generate array of all columns to easily find pre-existing columns
let allColumns = $columns || []
if ($stickyColumn) {
allColumns.push($stickyColumn)
}
// Find primary display // Find primary display
let primaryDisplay let primaryDisplay
@ -171,7 +179,7 @@ export const initialise = context => {
// Get field list // Get field list
let fields = [] let fields = []
Object.keys($enrichedSchema).forEach(field => { Object.keys($enrichedSchema).forEach(field => {
if (field !== primaryDisplay) { if ($compact || field !== primaryDisplay) {
fields.push(field) fields.push(field)
} }
}) })
@ -181,7 +189,7 @@ export const initialise = context => {
fields fields
.map(field => { .map(field => {
const fieldSchema = $enrichedSchema[field] const fieldSchema = $enrichedSchema[field]
const oldColumn = allColumns?.find(x => x.name === field) const oldColumn = $allColumns?.find(x => x.name === field)
return { return {
name: field, name: field,
label: fieldSchema.displayName || field, label: fieldSchema.displayName || field,
@ -189,9 +197,18 @@ export const initialise = context => {
width: fieldSchema.width || oldColumn?.width || DefaultColumnWidth, width: fieldSchema.width || oldColumn?.width || DefaultColumnWidth,
visible: fieldSchema.visible ?? true, visible: fieldSchema.visible ?? true,
order: fieldSchema.order ?? oldColumn?.order, order: fieldSchema.order ?? oldColumn?.order,
primaryDisplay: field === primaryDisplay,
} }
}) })
.sort((a, b) => { .sort((a, b) => {
// If we don't have a pinned column then primary display will be in
// the normal columns list, and should be first
if (a.name === primaryDisplay) {
return -1
} else if (b.name === primaryDisplay) {
return 1
}
// Sort by order first // Sort by order first
const orderA = a.order const orderA = a.order
const orderB = b.order const orderB = b.order
@ -214,12 +231,12 @@ export const initialise = context => {
) )
// Update sticky column // Update sticky column
if (!primaryDisplay) { if ($compact || !primaryDisplay) {
stickyColumn.set(null) stickyColumn.set(null)
return return
} }
const stickySchema = $enrichedSchema[primaryDisplay] const stickySchema = $enrichedSchema[primaryDisplay]
const oldStickyColumn = allColumns?.find(x => x.name === primaryDisplay) const oldStickyColumn = $allColumns?.find(x => x.name === primaryDisplay)
stickyColumn.set({ stickyColumn.set({
name: primaryDisplay, name: primaryDisplay,
label: stickySchema.displayName || primaryDisplay, label: stickySchema.displayName || primaryDisplay,
@ -228,6 +245,13 @@ export const initialise = context => {
visible: true, visible: true,
order: 0, order: 0,
left: GutterWidth, left: GutterWidth,
primaryDisplay: true,
}) })
}) }
// Process columns when schema changes
enrichedSchema.subscribe(processColumns)
// Process columns when compact flag changes
compact.subscribe(() => processColumns(get(enrichedSchema)))
} }

View File

@ -1,4 +1,5 @@
import { get, writable, derived } from "svelte/store" import { get, writable, derived } from "svelte/store"
import { parseEventLocation } from "../lib/utils"
const reorderInitialState = { const reorderInitialState = {
sourceColumn: null, sourceColumn: null,
@ -33,6 +34,7 @@ export const createActions = context => {
stickyColumn, stickyColumn,
ui, ui,
maxScrollLeft, maxScrollLeft,
width,
} = context } = context
let autoScrollInterval let autoScrollInterval
@ -55,6 +57,11 @@ export const createActions = context => {
x: 0, x: 0,
column: $stickyColumn.name, column: $stickyColumn.name,
}) })
} else if (!$visibleColumns[0].primaryDisplay) {
breakpoints.unshift({
x: 0,
column: null,
})
} }
// Update state // Update state
@ -69,6 +76,9 @@ export const createActions = context => {
// Add listeners to handle mouse movement // Add listeners to handle mouse movement
document.addEventListener("mousemove", onReorderMouseMove) document.addEventListener("mousemove", onReorderMouseMove)
document.addEventListener("mouseup", stopReordering) document.addEventListener("mouseup", stopReordering)
document.addEventListener("touchmove", onReorderMouseMove)
document.addEventListener("touchend", stopReordering)
document.addEventListener("touchcancel", stopReordering)
// Trigger a move event immediately so ensure a candidate column is chosen // Trigger a move event immediately so ensure a candidate column is chosen
onReorderMouseMove(e) onReorderMouseMove(e)
@ -77,7 +87,7 @@ export const createActions = context => {
// Callback when moving the mouse when reordering columns // Callback when moving the mouse when reordering columns
const onReorderMouseMove = e => { const onReorderMouseMove = e => {
// Immediately handle the current position // Immediately handle the current position
const x = e.clientX const { x } = parseEventLocation(e)
reorder.update(state => ({ reorder.update(state => ({
...state, ...state,
latestX: x, latestX: x,
@ -86,7 +96,7 @@ export const createActions = context => {
// Check if we need to start auto-scrolling // Check if we need to start auto-scrolling
const $reorder = get(reorder) const $reorder = get(reorder)
const proximityCutoff = 140 const proximityCutoff = Math.min(140, get(width) / 6)
const speedFactor = 8 const speedFactor = 8
const rightProximity = Math.max(0, $reorder.gridLeft + $reorder.width - x) const rightProximity = Math.max(0, $reorder.gridLeft + $reorder.width - x)
const leftProximity = Math.max(0, x - $reorder.gridLeft) const leftProximity = Math.max(0, x - $reorder.gridLeft)
@ -158,19 +168,22 @@ export const createActions = context => {
// Ensure auto-scrolling is stopped // Ensure auto-scrolling is stopped
stopAutoScroll() stopAutoScroll()
// Swap position of columns
let { sourceColumn, targetColumn } = get(reorder)
moveColumn(sourceColumn, targetColumn)
// Reset state
reorder.set(reorderInitialState)
// Remove event handlers // Remove event handlers
document.removeEventListener("mousemove", onReorderMouseMove) document.removeEventListener("mousemove", onReorderMouseMove)
document.removeEventListener("mouseup", stopReordering) document.removeEventListener("mouseup", stopReordering)
document.removeEventListener("touchmove", onReorderMouseMove)
document.removeEventListener("touchend", stopReordering)
document.removeEventListener("touchcancel", stopReordering)
// Save column changes // Ensure there's actually a change
await columns.actions.saveChanges() let { sourceColumn, targetColumn } = get(reorder)
if (sourceColumn !== targetColumn) {
moveColumn(sourceColumn, targetColumn)
await columns.actions.saveChanges()
}
// Reset state
reorder.set(reorderInitialState)
} }
// Moves a column after another columns. // Moves a column after another columns.
@ -185,8 +198,7 @@ export const createActions = context => {
if (--targetIdx < sourceIdx) { if (--targetIdx < sourceIdx) {
targetIdx++ targetIdx++
} }
state.splice(targetIdx, 0, removed[0]) return state.toSpliced(targetIdx, 0, removed[0])
return state.slice()
}) })
} }

View File

@ -1,5 +1,6 @@
import { writable, get, derived } from "svelte/store" import { writable, get, derived } from "svelte/store"
import { MinColumnWidth, DefaultColumnWidth } from "../lib/constants" import { MinColumnWidth, DefaultColumnWidth } from "../lib/constants"
import { parseEventLocation } from "../lib/utils"
const initialState = { const initialState = {
initialMouseX: null, initialMouseX: null,
@ -24,8 +25,11 @@ export const createActions = context => {
// Starts resizing a certain column // Starts resizing a certain column
const startResizing = (column, e) => { const startResizing = (column, e) => {
const { x } = parseEventLocation(e)
// Prevent propagation to stop reordering triggering // Prevent propagation to stop reordering triggering
e.stopPropagation() e.stopPropagation()
e.preventDefault()
ui.actions.blur() ui.actions.blur()
// Find and cache index // Find and cache index
@ -39,7 +43,7 @@ export const createActions = context => {
width: column.width, width: column.width,
left: column.left, left: column.left,
initialWidth: column.width, initialWidth: column.width,
initialMouseX: e.clientX, initialMouseX: x,
column: column.name, column: column.name,
columnIdx, columnIdx,
}) })
@ -47,12 +51,16 @@ export const createActions = context => {
// Add mouse event listeners to handle resizing // Add mouse event listeners to handle resizing
document.addEventListener("mousemove", onResizeMouseMove) document.addEventListener("mousemove", onResizeMouseMove)
document.addEventListener("mouseup", stopResizing) document.addEventListener("mouseup", stopResizing)
document.addEventListener("touchmove", onResizeMouseMove)
document.addEventListener("touchend", stopResizing)
document.addEventListener("touchcancel", stopResizing)
} }
// Handler for moving the mouse to resize columns // Handler for moving the mouse to resize columns
const onResizeMouseMove = e => { const onResizeMouseMove = e => {
const { initialMouseX, initialWidth, width, columnIdx } = get(resize) const { initialMouseX, initialWidth, width, columnIdx } = get(resize)
const dx = e.clientX - initialMouseX const { x } = parseEventLocation(e)
const dx = x - initialMouseX
const newWidth = Math.round(Math.max(MinColumnWidth, initialWidth + dx)) const newWidth = Math.round(Math.max(MinColumnWidth, initialWidth + dx))
// Ignore small changes // Ignore small changes
@ -87,6 +95,9 @@ export const createActions = context => {
resize.set(initialState) resize.set(initialState)
document.removeEventListener("mousemove", onResizeMouseMove) document.removeEventListener("mousemove", onResizeMouseMove)
document.removeEventListener("mouseup", stopResizing) document.removeEventListener("mouseup", stopResizing)
document.removeEventListener("touchmove", onResizeMouseMove)
document.removeEventListener("touchend", stopResizing)
document.removeEventListener("touchcancel", stopResizing)
// Persist width if it changed // Persist width if it changed
if ($resize.width !== $resize.initialWidth) { if ($resize.width !== $resize.initialWidth) {

View File

@ -98,7 +98,7 @@ export const deriveStores = context => {
// Derive whether we should use the compact UI, depending on width // Derive whether we should use the compact UI, depending on width
const compact = derived([stickyColumn, width], ([$stickyColumn, $width]) => { const compact = derived([stickyColumn, width], ([$stickyColumn, $width]) => {
return ($stickyColumn?.width || 0) + $width + GutterWidth < 1100 return ($stickyColumn?.width || 0) + $width + GutterWidth < 800
}) })
return { return {

@ -1 +1 @@
Subproject commit 06b1064f7e2f7cac5d4bef2ee999796a2a1f0f2c Subproject commit dff7b5a9dd1fd770f8a48fb8e6df1740be605f18

View File

@ -61,14 +61,17 @@
"@google-cloud/firestore": "6.8.0", "@google-cloud/firestore": "6.8.0",
"@koa/router": "8.0.8", "@koa/router": "8.0.8",
"@socket.io/redis-adapter": "^8.2.1", "@socket.io/redis-adapter": "^8.2.1",
"@types/xml2js": "^0.4.14",
"airtable": "0.10.1", "airtable": "0.10.1",
"arangojs": "7.2.0", "arangojs": "7.2.0",
"archiver": "7.0.1", "archiver": "7.0.1",
"aws-sdk": "2.1030.0", "aws-sdk": "2.1030.0",
"bcrypt": "5.1.0", "bcrypt": "5.1.0",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
"bl": "^6.0.12",
"bull": "4.10.1", "bull": "4.10.1",
"chokidar": "3.5.3", "chokidar": "3.5.3",
"content-disposition": "^0.5.4",
"cookies": "0.8.0", "cookies": "0.8.0",
"csvtojson": "2.0.10", "csvtojson": "2.0.10",
"curlconverter": "3.21.0", "curlconverter": "3.21.0",
@ -95,7 +98,7 @@
"memorystream": "0.3.1", "memorystream": "0.3.1",
"mongodb": "^6.3.0", "mongodb": "^6.3.0",
"mssql": "10.0.1", "mssql": "10.0.1",
"mysql2": "3.5.2", "mysql2": "3.9.7",
"node-fetch": "2.6.7", "node-fetch": "2.6.7",
"object-sizeof": "2.6.1", "object-sizeof": "2.6.1",
"open": "8.4.0", "open": "8.4.0",

View File

@ -4,6 +4,7 @@ services:
# user: sa # user: sa
# database: master # database: master
mssql: mssql:
# platform: linux/amd64
image: bb/mssql image: bb/mssql
build: build:
context: . context: .

View File

@ -76,7 +76,7 @@ function writeFile(output: any, filename: string) {
console.log(`Wrote spec to ${path}`) console.log(`Wrote spec to ${path}`)
return path return path
} catch (err) { } catch (err) {
console.error(err) console.error("Error writing spec file", err)
} }
} }

View File

@ -6,7 +6,19 @@ import sdk from "../../../sdk"
import tk from "timekeeper" import tk from "timekeeper"
import { mocks } from "@budibase/backend-core/tests" import { mocks } from "@budibase/backend-core/tests"
import { QueryPreview, SourceName } from "@budibase/types" import {
Datasource,
FieldSchema,
FieldSubtype,
FieldType,
QueryPreview,
RelationshipType,
SourceName,
Table,
TableSchema,
} from "@budibase/types"
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import { tableForDatasource } from "../../../tests/utilities/structures"
tk.freeze(mocks.date.MOCK_DATE) tk.freeze(mocks.date.MOCK_DATE)
@ -223,4 +235,152 @@ describe("/datasources", () => {
}) })
}) })
}) })
describe.each([
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("fetch schema (%s)", (_, dsProvider) => {
beforeAll(async () => {
datasource = await config.api.datasource.create(await dsProvider)
})
it("fetching schema will not drop tables or columns", async () => {
const datasourceId = datasource!._id!
const simpleTable = await config.api.table.save(
tableForDatasource(datasource, {
name: "simple",
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
},
})
)
type SupportedSqlTypes =
| FieldType.STRING
| FieldType.BARCODEQR
| FieldType.LONGFORM
| FieldType.OPTIONS
| FieldType.DATETIME
| FieldType.NUMBER
| FieldType.BOOLEAN
| FieldType.FORMULA
| FieldType.BIGINT
| FieldType.BB_REFERENCE
| FieldType.LINK
| FieldType.ARRAY
const fullSchema: {
[type in SupportedSqlTypes]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: "string",
type: FieldType.STRING,
constraints: {
presence: true,
},
},
[FieldType.LONGFORM]: {
name: "longform",
type: FieldType.LONGFORM,
},
[FieldType.OPTIONS]: {
name: "options",
type: FieldType.OPTIONS,
constraints: {
presence: { allowEmpty: false },
},
},
[FieldType.NUMBER]: {
name: "number",
type: FieldType.NUMBER,
},
[FieldType.BOOLEAN]: {
name: "boolean",
type: FieldType.BOOLEAN,
},
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
},
[FieldType.DATETIME]: {
name: "datetime",
type: FieldType.DATETIME,
dateOnly: true,
timeOnly: false,
},
[FieldType.LINK]: {
name: "link",
type: FieldType.LINK,
tableId: simpleTable._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
fieldName: "link",
},
[FieldType.FORMULA]: {
name: "formula",
type: FieldType.FORMULA,
formula: "any formula",
},
[FieldType.BARCODEQR]: {
name: "barcodeqr",
type: FieldType.BARCODEQR,
},
[FieldType.BIGINT]: {
name: "bigint",
type: FieldType.BIGINT,
},
[FieldType.BB_REFERENCE]: {
name: "bb_reference",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
}
await config.api.table.save(
tableForDatasource(datasource, {
name: "full",
schema: fullSchema,
})
)
const persisted = await config.api.datasource.get(datasourceId)
await config.api.datasource.fetchSchema(datasourceId)
const updated = await config.api.datasource.get(datasourceId)
const expected: Datasource = {
...persisted,
entities:
persisted?.entities &&
Object.entries(persisted.entities).reduce<Record<string, Table>>(
(acc, [tableName, table]) => {
acc[tableName] = {
...table,
primaryDisplay: expect.not.stringMatching(
new RegExp(`^${table.primaryDisplay || ""}$`)
),
schema: Object.entries(table.schema).reduce<TableSchema>(
(acc, [fieldName, field]) => {
acc[fieldName] = expect.objectContaining({
...field,
})
return acc
},
{}
),
}
return acc
},
{}
),
_rev: expect.any(String),
}
expect(updated).toEqual(expected)
})
})
}) })

View File

@ -34,7 +34,7 @@ describe.each([
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/tables (%s)", (_, dsProvider) => { ])("/tables (%s)", (_, dsProvider) => {
let isInternal: boolean const isInternal: boolean = !dsProvider
let datasource: Datasource | undefined let datasource: Datasource | undefined
let config = setup.getConfig() let config = setup.getConfig()
@ -44,9 +44,6 @@ describe.each([
await config.init() await config.init()
if (dsProvider) { if (dsProvider) {
datasource = await config.api.datasource.create(await dsProvider) datasource = await config.api.datasource.create(await dsProvider)
isInternal = false
} else {
isInternal = true
} }
}) })

View File

@ -7,6 +7,7 @@ import {
AutomationStepType, AutomationStepType,
AutomationIOType, AutomationIOType,
AutomationFeature, AutomationFeature,
AutomationCustomIOType,
} from "@budibase/types" } from "@budibase/types"
export const definition: AutomationStepSchema = { export const definition: AutomationStepSchema = {
@ -72,10 +73,10 @@ export const definition: AutomationStepSchema = {
title: "Location", title: "Location",
dependsOn: "addInvite", dependsOn: "addInvite",
}, },
url: { attachments: {
type: AutomationIOType.STRING, type: AutomationIOType.ATTACHMENT,
title: "URL", customType: AutomationCustomIOType.MULTI_ATTACHMENTS,
dependsOn: "addInvite", title: "Attachments",
}, },
}, },
required: ["to", "from", "subject", "contents"], required: ["to", "from", "subject", "contents"],
@ -110,11 +111,13 @@ export async function run({ inputs }: AutomationStepInput) {
summary, summary,
location, location,
url, url,
attachments,
} = inputs } = inputs
if (!contents) { if (!contents) {
contents = "<h1>No content</h1>" contents = "<h1>No content</h1>"
} }
to = to || undefined to = to || undefined
try { try {
let response = await sendSmtpEmail({ let response = await sendSmtpEmail({
to, to,
@ -124,6 +127,7 @@ export async function run({ inputs }: AutomationStepInput) {
cc, cc,
bcc, bcc,
automation: true, automation: true,
attachments,
invite: addInvite invite: addInvite
? { ? {
startTime, startTime,

View File

@ -50,6 +50,10 @@ describe("test the outgoing webhook action", () => {
cc: "cc", cc: "cc",
bcc: "bcc", bcc: "bcc",
addInvite: true, addInvite: true,
attachments: [
{ url: "attachment1", filename: "attachment1.txt" },
{ url: "attachment2", filename: "attachment2.txt" },
],
...invite, ...invite,
} }
let resp = generateResponse(inputs.to, inputs.from) let resp = generateResponse(inputs.to, inputs.from)
@ -69,6 +73,10 @@ describe("test the outgoing webhook action", () => {
bcc: "bcc", bcc: "bcc",
invite, invite,
automation: true, automation: true,
attachments: [
{ url: "attachment1", filename: "attachment1.txt" },
{ url: "attachment2", filename: "attachment2.txt" },
],
}) })
}) })
}) })

View File

@ -472,14 +472,13 @@ class InternalBuilder {
): Knex.QueryBuilder { ): Knex.QueryBuilder {
const tableName = endpoint.entityId const tableName = endpoint.entityId
const tableAlias = aliases?.[tableName] const tableAlias = aliases?.[tableName]
let table: string | Record<string, string> = tableName
if (tableAlias) { const query = knex(
table = { [tableAlias]: tableName } this.tableNameWithSchema(tableName, {
} alias: tableAlias,
let query = knex(table) schema: endpoint.schema,
if (endpoint.schema) { })
query = query.withSchema(endpoint.schema) )
}
return query return query
} }

View File

@ -378,7 +378,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
try { try {
await connection.close() await connection.close()
} catch (err) { } catch (err) {
console.error(err) console.error("Error connecting to Oracle", err)
} }
} }
} }

View File

@ -21,6 +21,10 @@ import { performance } from "perf_hooks"
import FormData from "form-data" import FormData from "form-data"
import { URLSearchParams } from "url" import { URLSearchParams } from "url"
import { blacklist } from "@budibase/backend-core" import { blacklist } from "@budibase/backend-core"
import { handleFileResponse, handleXml } from "./utils"
import { parse } from "content-disposition"
import path from "path"
import { Builder as XmlBuilder } from "xml2js"
const BodyTypes = { const BodyTypes = {
NONE: "none", NONE: "none",
@ -57,8 +61,6 @@ const coreFields = {
}, },
} }
const { parseStringPromise: xmlParser, Builder: XmlBuilder } = require("xml2js")
const SCHEMA: Integration = { const SCHEMA: Integration = {
docs: "https://github.com/node-fetch/node-fetch", docs: "https://github.com/node-fetch/node-fetch",
description: description:
@ -129,42 +131,44 @@ class RestIntegration implements IntegrationBase {
} }
async parseResponse(response: any, pagination: PaginationConfig | null) { async parseResponse(response: any, pagination: PaginationConfig | null) {
let data, raw, headers let data, raw, headers, filename
const contentType = response.headers.get("content-type") || "" const contentType = response.headers.get("content-type") || ""
const contentDisposition = response.headers.get("content-disposition") || ""
if (
contentDisposition.includes("attachment") ||
contentDisposition.includes("form-data")
) {
filename =
path.basename(parse(contentDisposition).parameters?.filename) || ""
}
try { try {
if (response.status === 204) { if (filename) {
data = [] return handleFileResponse(response, filename, this.startTimeMs)
raw = []
} else if (contentType.includes("application/json")) {
data = await response.json()
raw = JSON.stringify(data)
} else if (
contentType.includes("text/xml") ||
contentType.includes("application/xml")
) {
const rawXml = await response.text()
data =
(await xmlParser(rawXml, {
explicitArray: false,
trim: true,
explicitRoot: false,
})) || {}
// there is only one structure, its an array, return the array so it appears as rows
const keys = Object.keys(data)
if (keys.length === 1 && Array.isArray(data[keys[0]])) {
data = data[keys[0]]
}
raw = rawXml
} else if (contentType.includes("application/pdf")) {
data = await response.arrayBuffer() // Save PDF as ArrayBuffer
raw = Buffer.from(data)
} else { } else {
data = await response.text() if (response.status === 204) {
raw = data data = []
raw = []
} else if (contentType.includes("application/json")) {
data = await response.json()
raw = JSON.stringify(data)
} else if (
contentType.includes("text/xml") ||
contentType.includes("application/xml")
) {
let xmlResponse = await handleXml(response)
data = xmlResponse.data
raw = xmlResponse.rawXml
} else {
data = await response.text()
raw = data
}
} }
} catch (err) { } catch (err) {
throw "Failed to parse response body." throw `Failed to parse response body: ${err}`
} }
const size = formatBytes( const size = formatBytes(
response.headers.get("content-length") || Buffer.byteLength(raw, "utf8") response.headers.get("content-length") || Buffer.byteLength(raw, "utf8")
) )

View File

@ -13,9 +13,23 @@ jest.mock("node-fetch", () => {
})) }))
}) })
import fetch from "node-fetch" jest.mock("@budibase/backend-core", () => {
const core = jest.requireActual("@budibase/backend-core")
return {
...core,
context: {
...core.context,
getProdAppId: jest.fn(() => "app-id"),
},
}
})
jest.mock("uuid", () => ({ v4: () => "00000000-0000-0000-0000-000000000000" }))
import { default as RestIntegration } from "../rest" import { default as RestIntegration } from "../rest"
import { RestAuthType } from "@budibase/types" import { RestAuthType } from "@budibase/types"
import fetch from "node-fetch"
import { objectStoreTestProviders } from "@budibase/backend-core/tests"
import { Readable } from "stream"
const FormData = require("form-data") const FormData = require("form-data")
const { URLSearchParams } = require("url") const { URLSearchParams } = require("url")
@ -611,4 +625,104 @@ describe("REST Integration", () => {
expect(calledConfig.headers).toEqual({}) expect(calledConfig.headers).toEqual({})
expect(calledConfig.agent.options.rejectUnauthorized).toBe(false) expect(calledConfig.agent.options.rejectUnauthorized).toBe(false)
}) })
describe("File Handling", () => {
beforeAll(async () => {
jest.unmock("aws-sdk")
await objectStoreTestProviders.minio.start()
})
afterAll(async () => {
await objectStoreTestProviders.minio.stop()
})
it("uploads file to object store and returns signed URL", async () => {
const responseData = Buffer.from("teest file contnt")
const filename = "test.tar.gz"
const contentType = "application/gzip"
const mockReadable = new Readable()
mockReadable.push(responseData)
mockReadable.push(null)
;(fetch as unknown as jest.Mock).mockImplementationOnce(() =>
Promise.resolve({
headers: {
raw: () => ({
"content-type": [contentType],
"content-disposition": [`attachment; filename="${filename}"`],
}),
get: (header: any) => {
if (header === "content-type") return contentType
if (header === "content-disposition")
return `attachment; filename="${filename}"`
},
},
body: mockReadable,
})
)
const query = {
path: "api",
}
const response = await config.integration.read(query)
expect(response.data).toEqual({
size: responseData.byteLength,
name: "00000000-0000-0000-0000-000000000000.tar.gz",
url: expect.stringContaining(
"/files/signed/tmp-file-attachments/app-id/00000000-0000-0000-0000-000000000000.tar.gz"
),
extension: "tar.gz",
key: expect.stringContaining(
"app-id/00000000-0000-0000-0000-000000000000.tar.gz"
),
})
})
it("uploads file with non ascii filename to object store and returns signed URL", async () => {
const responseData = Buffer.from("teest file contnt")
const contentType = "text/plain"
const mockReadable = new Readable()
mockReadable.push(responseData)
mockReadable.push(null)
;(fetch as unknown as jest.Mock).mockImplementationOnce(() =>
Promise.resolve({
headers: {
raw: () => ({
"content-type": [contentType],
"content-disposition": [
// eslint-disable-next-line no-useless-escape
`attachment; filename="£ and ? rates.pdf"; filename*=UTF-8'\'%C2%A3%20and%20%E2%82%AC%20rates.pdf`,
],
}),
get: (header: any) => {
if (header === "content-type") return contentType
if (header === "content-disposition")
// eslint-disable-next-line no-useless-escape
return `attachment; filename="£ and ? rates.pdf"; filename*=UTF-8'\'%C2%A3%20and%20%E2%82%AC%20rates.pdf`
},
},
body: mockReadable,
})
)
const query = {
path: "api",
}
const response = await config.integration.read(query)
expect(response.data).toEqual({
size: responseData.byteLength,
name: "00000000-0000-0000-0000-000000000000.pdf",
url: expect.stringContaining(
"/files/signed/tmp-file-attachments/app-id/00000000-0000-0000-0000-000000000000.pdf"
),
extension: "pdf",
key: expect.stringContaining(
"app-id/00000000-0000-0000-0000-000000000000.pdf"
),
})
})
})
}) })

View File

@ -4,18 +4,45 @@ import {
Datasource, Datasource,
FieldType, FieldType,
TableSourceType, TableSourceType,
FieldSchema,
} from "@budibase/types" } from "@budibase/types"
import { context, objectStore } from "@budibase/backend-core"
import { v4 } from "uuid"
import { parseStringPromise as xmlParser } from "xml2js"
import { formatBytes } from "../../utilities"
import bl from "bl"
import env from "../../environment"
import { DocumentType, SEPARATOR } from "../../db/utils" import { DocumentType, SEPARATOR } from "../../db/utils"
import { InvalidColumns, DEFAULT_BB_DATASOURCE_ID } from "../../constants" import { InvalidColumns, DEFAULT_BB_DATASOURCE_ID } from "../../constants"
import { SWITCHABLE_TYPES, helpers } from "@budibase/shared-core" import { helpers, utils } from "@budibase/shared-core"
import env from "../../environment"
import { Knex } from "knex" import { Knex } from "knex"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}` const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
const ROW_ID_REGEX = /^\[.*]$/g const ROW_ID_REGEX = /^\[.*]$/g
const ENCODED_SPACE = encodeURIComponent(" ") const ENCODED_SPACE = encodeURIComponent(" ")
const SQL_NUMBER_TYPE_MAP = { type PrimitiveTypes =
| FieldType.STRING
| FieldType.NUMBER
| FieldType.BOOLEAN
| FieldType.DATETIME
| FieldType.JSON
| FieldType.BIGINT
| FieldType.OPTIONS
function isPrimitiveType(type: FieldType): type is PrimitiveTypes {
return [
FieldType.STRING,
FieldType.NUMBER,
FieldType.BOOLEAN,
FieldType.DATETIME,
FieldType.JSON,
FieldType.BIGINT,
FieldType.OPTIONS,
].includes(type)
}
const SQL_NUMBER_TYPE_MAP: Record<string, PrimitiveTypes> = {
integer: FieldType.NUMBER, integer: FieldType.NUMBER,
int: FieldType.NUMBER, int: FieldType.NUMBER,
decimal: FieldType.NUMBER, decimal: FieldType.NUMBER,
@ -35,7 +62,7 @@ const SQL_NUMBER_TYPE_MAP = {
smallmoney: FieldType.NUMBER, smallmoney: FieldType.NUMBER,
} }
const SQL_DATE_TYPE_MAP = { const SQL_DATE_TYPE_MAP: Record<string, PrimitiveTypes> = {
timestamp: FieldType.DATETIME, timestamp: FieldType.DATETIME,
time: FieldType.DATETIME, time: FieldType.DATETIME,
datetime: FieldType.DATETIME, datetime: FieldType.DATETIME,
@ -46,7 +73,7 @@ const SQL_DATE_TYPE_MAP = {
const SQL_DATE_ONLY_TYPES = ["date"] const SQL_DATE_ONLY_TYPES = ["date"]
const SQL_TIME_ONLY_TYPES = ["time"] const SQL_TIME_ONLY_TYPES = ["time"]
const SQL_STRING_TYPE_MAP = { const SQL_STRING_TYPE_MAP: Record<string, PrimitiveTypes> = {
varchar: FieldType.STRING, varchar: FieldType.STRING,
char: FieldType.STRING, char: FieldType.STRING,
nchar: FieldType.STRING, nchar: FieldType.STRING,
@ -58,22 +85,22 @@ const SQL_STRING_TYPE_MAP = {
text: FieldType.STRING, text: FieldType.STRING,
} }
const SQL_BOOLEAN_TYPE_MAP = { const SQL_BOOLEAN_TYPE_MAP: Record<string, PrimitiveTypes> = {
boolean: FieldType.BOOLEAN, boolean: FieldType.BOOLEAN,
bit: FieldType.BOOLEAN, bit: FieldType.BOOLEAN,
tinyint: FieldType.BOOLEAN, tinyint: FieldType.BOOLEAN,
} }
const SQL_OPTIONS_TYPE_MAP = { const SQL_OPTIONS_TYPE_MAP: Record<string, PrimitiveTypes> = {
"user-defined": FieldType.OPTIONS, "user-defined": FieldType.OPTIONS,
} }
const SQL_MISC_TYPE_MAP = { const SQL_MISC_TYPE_MAP: Record<string, PrimitiveTypes> = {
json: FieldType.JSON, json: FieldType.JSON,
bigint: FieldType.BIGINT, bigint: FieldType.BIGINT,
} }
const SQL_TYPE_MAP = { const SQL_TYPE_MAP: Record<string, PrimitiveTypes> = {
...SQL_NUMBER_TYPE_MAP, ...SQL_NUMBER_TYPE_MAP,
...SQL_DATE_TYPE_MAP, ...SQL_DATE_TYPE_MAP,
...SQL_STRING_TYPE_MAP, ...SQL_STRING_TYPE_MAP,
@ -239,14 +266,14 @@ export function generateColumnDefinition(config: {
constraints.inclusion = options constraints.inclusion = options
} }
const schema: any = { const schema: FieldSchema = {
type: foundType, type: foundType,
externalType, externalType,
autocolumn, autocolumn,
name, name,
constraints, constraints,
} }
if (foundType === FieldType.DATETIME) { if (schema.type === FieldType.DATETIME) {
schema.dateOnly = SQL_DATE_ONLY_TYPES.includes(lowerCaseType) schema.dateOnly = SQL_DATE_ONLY_TYPES.includes(lowerCaseType)
schema.timeOnly = SQL_TIME_ONLY_TYPES.includes(lowerCaseType) schema.timeOnly = SQL_TIME_ONLY_TYPES.includes(lowerCaseType)
} }
@ -274,49 +301,6 @@ export function isIsoDateString(str: string) {
return d.toISOString() === trimmedValue return d.toISOString() === trimmedValue
} }
/**
* This function will determine whether a column is a relationship and whether it
* is currently valid. The reason for the validity check is that tables can be deleted
* outside of Budibase control and if this is the case it will break Budibase relationships.
* The tableIds is a list passed down from the main finalise tables function, which is
* based on the tables that have just been fetched. This will only really be used on subsequent
* fetches to the first one - if the user is periodically refreshing Budibase knowledge of tables.
* @param column The column to check, to see if it is a valid relationship.
* @param tableIds The IDs of the tables which currently exist.
*/
function shouldCopyRelationship(
column: { type: FieldType.LINK; tableId?: string },
tableIds: string[]
) {
return (
column.type === FieldType.LINK &&
column.tableId &&
tableIds.includes(column.tableId)
)
}
/**
* Similar function to the shouldCopyRelationship function, but instead this looks for options and boolean
* types. It is possible to switch a string -> options and a number -> boolean (and vice versus) need to make
* sure that these get copied over when tables are fetched. Also checks whether they are still valid, if a
* column has changed type in the external database then copying it over may not be possible.
* @param column The column to check for options or boolean type.
* @param fetchedColumn The fetched column to check for the type in the external database.
*/
function shouldCopySpecialColumn(
column: { type: FieldType },
fetchedColumn: { type: FieldType } | undefined
) {
const isFormula = column.type === FieldType.FORMULA
// column has been deleted, remove - formulas will never exist, always copy
if (!isFormula && column && !fetchedColumn) {
return false
}
const fetchedIsNumber =
!fetchedColumn || fetchedColumn.type === FieldType.NUMBER
return fetchedIsNumber && column.type === FieldType.BOOLEAN
}
/** /**
* Looks for columns which need to be copied over into the new table definitions, like relationships, * Looks for columns which need to be copied over into the new table definitions, like relationships,
* options types and views. * options types and views.
@ -338,6 +322,9 @@ function copyExistingPropsOver(
if (entities[tableName]?.created) { if (entities[tableName]?.created) {
table.created = entities[tableName]?.created table.created = entities[tableName]?.created
} }
if (entities[tableName]?.constrained) {
table.constrained = entities[tableName]?.constrained
}
table.views = entities[tableName].views table.views = entities[tableName].views
@ -346,45 +333,73 @@ function copyExistingPropsOver(
if (!Object.prototype.hasOwnProperty.call(existingTableSchema, key)) { if (!Object.prototype.hasOwnProperty.call(existingTableSchema, key)) {
continue continue
} }
const column = existingTableSchema[key] const column = existingTableSchema[key]
const existingColumnType = column?.type const existingColumnType = column?.type
const updatedColumnType = table.schema[key]?.type const updatedColumnType = table.schema[key]?.type
// If the db column type changed to a non-compatible one, we want to re-fetch it const keepIfType = (...validTypes: PrimitiveTypes[]) => {
if ( return (
updatedColumnType !== existingColumnType && isPrimitiveType(updatedColumnType) &&
!SWITCHABLE_TYPES[updatedColumnType]?.includes(existingColumnType) table.schema[key] &&
) { validTypes.includes(updatedColumnType)
continue )
} }
if ( let shouldKeepSchema = false
column.type === FieldType.LINK && switch (existingColumnType) {
!shouldCopyRelationship(column, tableIds) case FieldType.FORMULA:
) { case FieldType.AUTO:
continue case FieldType.INTERNAL:
shouldKeepSchema = true
break
case FieldType.LINK:
shouldKeepSchema =
existingColumnType === FieldType.LINK &&
tableIds.includes(column.tableId)
break
case FieldType.STRING:
case FieldType.OPTIONS:
case FieldType.LONGFORM:
case FieldType.BARCODEQR:
shouldKeepSchema = keepIfType(FieldType.STRING)
break
case FieldType.NUMBER:
case FieldType.BOOLEAN:
shouldKeepSchema = keepIfType(FieldType.BOOLEAN, FieldType.NUMBER)
break
case FieldType.ARRAY:
case FieldType.ATTACHMENTS:
case FieldType.ATTACHMENT_SINGLE:
case FieldType.JSON:
case FieldType.BB_REFERENCE:
shouldKeepSchema = keepIfType(FieldType.JSON, FieldType.STRING)
break
case FieldType.DATETIME:
shouldKeepSchema = keepIfType(FieldType.DATETIME, FieldType.STRING)
break
case FieldType.BIGINT:
shouldKeepSchema = keepIfType(FieldType.BIGINT, FieldType.NUMBER)
break
default:
utils.unreachable(existingColumnType)
} }
const specialTypes = [ if (shouldKeepSchema) {
FieldType.OPTIONS, table.schema[key] = {
FieldType.LONGFORM, ...existingTableSchema[key],
FieldType.ARRAY, externalType:
FieldType.FORMULA, existingTableSchema[key].externalType ||
FieldType.BB_REFERENCE, table.schema[key]?.externalType,
] }
if (
specialTypes.includes(column.type) &&
!shouldCopySpecialColumn(column, table.schema[key])
) {
continue
}
table.schema[key] = {
...existingTableSchema[key],
externalType:
existingTableSchema[key].externalType ||
table.schema[key].externalType,
} }
} }
} }
@ -457,3 +472,74 @@ export function getPrimaryDisplay(testValue: unknown): string | undefined {
export function isValidFilter(value: any) { export function isValidFilter(value: any) {
return value != null && value !== "" return value != null && value !== ""
} }
export async function handleXml(response: any) {
let data,
rawXml = await response.text()
data =
(await xmlParser(rawXml, {
explicitArray: false,
trim: true,
explicitRoot: false,
})) || {}
// there is only one structure, its an array, return the array so it appears as rows
const keys = Object.keys(data)
if (keys.length === 1 && Array.isArray(data[keys[0]])) {
data = data[keys[0]]
}
return { data, rawXml }
}
export async function handleFileResponse(
response: any,
filename: string,
startTime: number
) {
let presignedUrl,
size = 0
const fileExtension = filename.includes(".")
? filename.split(".").slice(1).join(".")
: ""
const processedFileName = `${v4()}.${fileExtension}`
const key = `${context.getProdAppId()}/${processedFileName}`
const bucket = objectStore.ObjectStoreBuckets.TEMP
const stream = response.body.pipe(bl((error, data) => data))
if (response.body) {
const contentLength = response.headers.get("content-length")
if (contentLength) {
size = parseInt(contentLength, 10)
} else {
const chunks: Buffer[] = []
for await (const chunk of response.body) {
chunks.push(chunk)
size += chunk.length
}
}
await objectStore.streamUpload({
bucket,
filename: key,
stream,
ttl: 1,
type: response.headers["content-type"],
})
}
presignedUrl = await objectStore.getPresignedUrl(bucket, key)
return {
data: {
size,
name: processedFileName,
url: presignedUrl,
extension: fileExtension,
key: key,
},
info: {
code: response.status,
size: formatBytes(size.toString()),
time: `${Math.round(performance.now() - startTime)}ms`,
},
}
}

View File

@ -125,7 +125,7 @@ describe("validation and update of external table schemas", () => {
} }
it("should correctly set utilised foreign keys to autocolumns", () => { it("should correctly set utilised foreign keys to autocolumns", () => {
const response = populateExternalTableSchemas(cloneDeep(SCHEMA) as any) const response = populateExternalTableSchemas(cloneDeep(SCHEMA))
const foreignKey = getForeignKeyColumn(response) const foreignKey = getForeignKeyColumn(response)
expect(foreignKey.autocolumn).toBe(true) expect(foreignKey.autocolumn).toBe(true)
expect(foreignKey.autoReason).toBe(AutoReason.FOREIGN_KEY) expect(foreignKey.autoReason).toBe(AutoReason.FOREIGN_KEY)
@ -133,7 +133,7 @@ describe("validation and update of external table schemas", () => {
}) })
it("should correctly unset foreign keys when no longer used", () => { it("should correctly unset foreign keys when no longer used", () => {
const setResponse = populateExternalTableSchemas(cloneDeep(SCHEMA) as any) const setResponse = populateExternalTableSchemas(cloneDeep(SCHEMA))
const beforeFk = getForeignKeyColumn(setResponse) const beforeFk = getForeignKeyColumn(setResponse)
delete setResponse.entities!.client.schema.project delete setResponse.entities!.client.schema.project
delete setResponse.entities!.project.schema.client delete setResponse.entities!.project.schema.client

View File

@ -44,7 +44,10 @@ function checkForeignKeysAreAutoColumns(datasource: Datasource) {
if (shouldBeForeign && !column.autocolumn) { if (shouldBeForeign && !column.autocolumn) {
column.autocolumn = true column.autocolumn = true
column.autoReason = AutoReason.FOREIGN_KEY column.autoReason = AutoReason.FOREIGN_KEY
} else if (column.autoReason === AutoReason.FOREIGN_KEY) { } else if (
!shouldBeForeign &&
column.autoReason === AutoReason.FOREIGN_KEY
) {
delete column.autocolumn delete column.autocolumn
delete column.autoReason delete column.autoReason
} }

View File

@ -5,6 +5,7 @@ import {
UpdateDatasourceResponse, UpdateDatasourceResponse,
UpdateDatasourceRequest, UpdateDatasourceRequest,
QueryJson, QueryJson,
BuildSchemaFromSourceResponse,
} from "@budibase/types" } from "@budibase/types"
import { Expectations, TestAPI } from "./base" import { Expectations, TestAPI } from "./base"
@ -69,4 +70,13 @@ export class DatasourceAPI extends TestAPI {
expectations, expectations,
}) })
} }
fetchSchema = async (id: string, expectations?: Expectations) => {
return await this._post<BuildSchemaFromSourceResponse>(
`/api/datasources/${id}/schema`,
{
expectations,
}
)
}
} }

View File

@ -106,22 +106,22 @@ export async function updateClientLibrary(appId: string) {
} }
// Upload latest manifest and client library // Upload latest manifest and client library
const manifestUpload = objectStore.streamUpload( const manifestUpload = objectStore.streamUpload({
ObjectStoreBuckets.APPS, bucket: ObjectStoreBuckets.APPS,
join(appId, "manifest.json"), filename: join(appId, "manifest.json"),
fs.createReadStream(manifest), stream: fs.createReadStream(manifest),
{ extra: {
ContentType: "application/json", ContentType: "application/json",
} },
) })
const clientUpload = objectStore.streamUpload( const clientUpload = objectStore.streamUpload({
ObjectStoreBuckets.APPS, bucket: ObjectStoreBuckets.APPS,
join(appId, "budibase-client.js"), filename: join(appId, "budibase-client.js"),
fs.createReadStream(client), stream: fs.createReadStream(client),
{ extra: {
ContentType: "application/javascript", ContentType: "application/javascript",
} },
) })
const manifestSrc = fs.promises.readFile(manifest, "utf8") const manifestSrc = fs.promises.readFile(manifest, "utf8")

View File

@ -43,7 +43,7 @@ export const checkDevelopmentEnvironment = () => {
error = "Must run via yarn once to generate environment." error = "Must run via yarn once to generate environment."
} }
if (error) { if (error) {
console.error(error) console.error("Error during development environment check", error)
process.exit(-1) process.exit(-1)
} }
} }

View File

@ -8,7 +8,7 @@ import {
logging, logging,
env as coreEnv, env as coreEnv,
} from "@budibase/backend-core" } from "@budibase/backend-core"
import { Ctx, User, EmailInvite } from "@budibase/types" import { Ctx, User, EmailInvite, EmailAttachment } from "@budibase/types"
interface Request { interface Request {
ctx?: Ctx ctx?: Ctx
@ -97,6 +97,7 @@ export async function sendSmtpEmail({
bcc, bcc,
automation, automation,
invite, invite,
attachments,
}: { }: {
to: string to: string
from: string from: string
@ -105,6 +106,7 @@ export async function sendSmtpEmail({
cc: string cc: string
bcc: string bcc: string
automation: boolean automation: boolean
attachments?: EmailAttachment[]
invite?: EmailInvite invite?: EmailInvite
}) { }) {
// tenant ID will be set in header // tenant ID will be set in header
@ -122,6 +124,7 @@ export async function sendSmtpEmail({
purpose: "custom", purpose: "custom",
automation, automation,
invite, invite,
attachments,
}, },
}) })
) )

View File

@ -16,7 +16,7 @@ import { setJSRunner, removeJSRunner } from "./helpers/javascript"
import manifest from "./manifest.json" import manifest from "./manifest.json"
import { ProcessOptions } from "./types" import { ProcessOptions } from "./types"
export { helpersToRemoveForJs } from "./helpers/list" export { helpersToRemoveForJs, getJsHelperList } from "./helpers/list"
export { FIND_ANY_HBS_REGEX } from "./utilities" export { FIND_ANY_HBS_REGEX } from "./utilities"
export { setJSRunner, setOnErrorLog } from "./helpers/javascript" export { setJSRunner, setOnErrorLog } from "./helpers/javascript"
export { iifeWrapper } from "./iife" export { iifeWrapper } from "./iife"

View File

@ -10,6 +10,7 @@ export enum AutomationIOType {
ARRAY = "array", ARRAY = "array",
JSON = "json", JSON = "json",
DATE = "date", DATE = "date",
ATTACHMENT = "attachment",
} }
export enum AutomationCustomIOType { export enum AutomationCustomIOType {
@ -30,6 +31,7 @@ export enum AutomationCustomIOType {
WEBHOOK_URL = "webhookUrl", WEBHOOK_URL = "webhookUrl",
AUTOMATION = "automation", AUTOMATION = "automation",
AUTOMATION_FIELDS = "automationFields", AUTOMATION_FIELDS = "automationFields",
MULTI_ATTACHMENTS = "multi_attachments",
} }
export enum AutomationTriggerStepId { export enum AutomationTriggerStepId {
@ -80,6 +82,11 @@ export interface EmailInvite {
url?: string url?: string
} }
export interface EmailAttachment {
url: string
filename: string
}
export interface SendEmailOpts { export interface SendEmailOpts {
// workspaceId If finer grain controls being used then this will lookup config for workspace. // workspaceId If finer grain controls being used then this will lookup config for workspace.
workspaceId?: string workspaceId?: string
@ -97,6 +104,7 @@ export interface SendEmailOpts {
bcc?: boolean bcc?: boolean
automation?: boolean automation?: boolean
invite?: EmailInvite invite?: EmailInvite
attachments?: EmailAttachment[]
} }
export const AutomationStepIdArray = [ export const AutomationStepIdArray = [

View File

@ -13,9 +13,7 @@ export interface Datasource extends Document {
config?: Record<string, any> config?: Record<string, any>
plus?: boolean plus?: boolean
isSQL?: boolean isSQL?: boolean
entities?: { entities?: Record<string, Table>
[key: string]: Table
}
} }
export enum RestAuthType { export enum RestAuthType {

View File

@ -91,6 +91,7 @@ export interface DateFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
type: FieldType.DATETIME type: FieldType.DATETIME
ignoreTimezones?: boolean ignoreTimezones?: boolean
timeOnly?: boolean timeOnly?: boolean
dateOnly?: boolean
subtype?: AutoFieldSubType.CREATED_AT | AutoFieldSubType.UPDATED_AT subtype?: AutoFieldSubType.CREATED_AT | AutoFieldSubType.UPDATED_AT
} }

View File

@ -7,6 +7,7 @@ export enum PlanType {
/** @deprecated */ /** @deprecated */
PREMIUM = "premium", PREMIUM = "premium",
PREMIUM_PLUS = "premium_plus", PREMIUM_PLUS = "premium_plus",
PREMIUM_PLUS_TRIAL = "premium_plus_trial",
/** @deprecated */ /** @deprecated */
BUSINESS = "business", BUSINESS = "business",
ENTERPRISE_BASIC = "enterprise_basic", ENTERPRISE_BASIC = "enterprise_basic",

View File

@ -35,8 +35,7 @@ async function passportCallback(
info: { message: string } | null = null info: { message: string } | null = null
) { ) {
if (err) { if (err) {
console.error("Authentication error") console.error("Authentication error", err)
console.error(err)
console.trace(err) console.trace(err)
return ctx.throw(403, info ? info : "Unauthorized") return ctx.throw(403, info ? info : "Unauthorized")
} }

View File

@ -15,6 +15,7 @@ export async function sendEmail(ctx: BBContext) {
bcc, bcc,
automation, automation,
invite, invite,
attachments,
} = ctx.request.body } = ctx.request.body
let user: any let user: any
if (userId) { if (userId) {
@ -31,6 +32,7 @@ export async function sendEmail(ctx: BBContext) {
bcc, bcc,
automation, automation,
invite, invite,
attachments,
}) })
ctx.body = { ctx.body = {
...response, ...response,

View File

@ -1,9 +1,15 @@
jest.unmock("node-fetch") jest.unmock("node-fetch")
jest.unmock("aws-sdk")
import { TestConfiguration } from "../../../../tests" import { TestConfiguration } from "../../../../tests"
import { EmailTemplatePurpose } from "../../../../constants" import { EmailTemplatePurpose } from "../../../../constants"
import { objectStoreTestProviders } from "@budibase/backend-core/tests"
import { objectStore } from "@budibase/backend-core"
import tk from "timekeeper"
import { EmailAttachment } from "@budibase/types"
const fetch = require("node-fetch")
const nodemailer = require("nodemailer") const nodemailer = require("nodemailer")
const fetch = require("node-fetch")
// for the real email tests give them a long time to try complete/fail // for the real email tests give them a long time to try complete/fail
jest.setTimeout(30000) jest.setTimeout(30000)
@ -12,14 +18,20 @@ describe("/api/global/email", () => {
const config = new TestConfiguration() const config = new TestConfiguration()
beforeAll(async () => { beforeAll(async () => {
tk.reset()
await objectStoreTestProviders.minio.start()
await config.beforeAll() await config.beforeAll()
}) })
afterAll(async () => { afterAll(async () => {
await objectStoreTestProviders.minio.stop()
await config.afterAll() await config.afterAll()
}) })
async function sendRealEmail(purpose: string) { async function sendRealEmail(
purpose: string,
attachments?: EmailAttachment[]
) {
let response, text let response, text
try { try {
const timeout = () => const timeout = () =>
@ -35,8 +47,14 @@ describe("/api/global/email", () => {
) )
await Promise.race([config.saveEtherealSmtpConfig(), timeout()]) await Promise.race([config.saveEtherealSmtpConfig(), timeout()])
await Promise.race([config.saveSettingsConfig(), timeout()]) await Promise.race([config.saveSettingsConfig(), timeout()])
let res
const res = await config.api.emails.sendEmail(purpose).timeout(20000) if (attachments) {
res = await config.api.emails
.sendEmail(purpose, attachments)
.timeout(20000)
} else {
res = await config.api.emails.sendEmail(purpose).timeout(20000)
}
// ethereal hiccup, can't test right now // ethereal hiccup, can't test right now
if (res.status >= 300) { if (res.status >= 300) {
return return
@ -80,4 +98,25 @@ describe("/api/global/email", () => {
it("should be able to send a password recovery email", async () => { it("should be able to send a password recovery email", async () => {
await sendRealEmail(EmailTemplatePurpose.PASSWORD_RECOVERY) await sendRealEmail(EmailTemplatePurpose.PASSWORD_RECOVERY)
}) })
it("should be able to send an email with attachments", async () => {
let bucket = "testbucket"
let filename = "test.txt"
await objectStore.upload({
bucket,
filename,
body: Buffer.from("test data"),
})
let presignedUrl = await objectStore.getPresignedUrl(
bucket,
filename,
60000
)
let attachmentObject = {
url: presignedUrl,
filename,
}
await sendRealEmail(EmailTemplatePurpose.WELCOME, [attachmentObject])
})
}) })

View File

@ -1,3 +1,4 @@
import { EmailAttachment } from "@budibase/types"
import TestConfiguration from "../TestConfiguration" import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base" import { TestAPI } from "./base"
@ -6,11 +7,12 @@ export class EmailAPI extends TestAPI {
super(config) super(config)
} }
sendEmail = (purpose: string) => { sendEmail = (purpose: string, attachments?: EmailAttachment[]) => {
return this.request return this.request
.post(`/api/global/email/send`) .post(`/api/global/email/send`)
.send({ .send({
email: "test@example.com", email: "test@example.com",
attachments,
purpose, purpose,
tenantId: this.config.getTenantId(), tenantId: this.config.getTenantId(),
userId: this.config.user?._id!, userId: this.config.user?._id!,

View File

@ -4,8 +4,8 @@ process.env.JWT_SECRET = "test-jwtsecret"
process.env.LOG_LEVEL = process.env.LOG_LEVEL || "error" process.env.LOG_LEVEL = process.env.LOG_LEVEL || "error"
process.env.MULTI_TENANCY = "1" process.env.MULTI_TENANCY = "1"
process.env.MINIO_URL = "http://localhost" process.env.MINIO_URL = "http://localhost"
process.env.MINIO_ACCESS_KEY = "test" process.env.MINIO_ACCESS_KEY = "budibase"
process.env.MINIO_SECRET_KEY = "test" process.env.MINIO_SECRET_KEY = "budibase"
process.env.PLATFORM_URL = "http://localhost:10000" process.env.PLATFORM_URL = "http://localhost:10000"
process.env.INTERNAL_API_KEY = "tet" process.env.INTERNAL_API_KEY = "tet"
process.env.DISABLE_ACCOUNT_PORTAL = "0" process.env.DISABLE_ACCOUNT_PORTAL = "0"

View File

@ -62,8 +62,8 @@ export function smtpEthereal(): SMTPConfig {
from: "testfrom@example.com", from: "testfrom@example.com",
secure: false, secure: false,
auth: { auth: {
user: "wyatt.zulauf29@ethereal.email", user: "mortimer.leuschke@ethereal.email",
pass: "tEwDtHBWWxusVWAPfa", pass: "5hSjsPbzRv7gEUsfzx",
}, },
connectionTimeout: 1000, // must be less than the jest default of 5000 connectionTimeout: 1000, // must be less than the jest default of 5000
}, },

View File

@ -4,8 +4,10 @@ import { getTemplateByPurpose, EmailTemplates } from "../constants/templates"
import { getSettingsTemplateContext } from "./templates" import { getSettingsTemplateContext } from "./templates"
import { processString } from "@budibase/string-templates" import { processString } from "@budibase/string-templates"
import { User, SendEmailOpts, SMTPInnerConfig } from "@budibase/types" import { User, SendEmailOpts, SMTPInnerConfig } from "@budibase/types"
import { configs, cache } from "@budibase/backend-core" import { configs, cache, objectStore } from "@budibase/backend-core"
import ical from "ical-generator" import ical from "ical-generator"
import fetch from "node-fetch"
import path from "path"
const nodemailer = require("nodemailer") const nodemailer = require("nodemailer")
@ -162,6 +164,42 @@ export async function sendEmail(
contents: opts?.contents, contents: opts?.contents,
}), }),
} }
if (opts?.attachments) {
const attachments = await Promise.all(
opts.attachments?.map(async attachment => {
const isFullyFormedUrl =
attachment.url.startsWith("http://") ||
attachment.url.startsWith("https://")
if (isFullyFormedUrl) {
const response = await fetch(attachment.url)
if (!response.ok) {
throw new Error(`unexpected response ${response.statusText}`)
}
const fallbackFilename = path.basename(
new URL(attachment.url).pathname
)
return {
filename: attachment.filename || fallbackFilename,
content: response?.body,
}
} else {
const url = attachment.url
const result = objectStore.extractBucketAndPath(url)
if (result === null) {
throw new Error("Invalid signed URL")
}
const { bucket, path } = result
const readStream = await objectStore.getReadStream(bucket, path)
const fallbackFilename = path.split("/").pop() || ""
return {
filename: attachment.filename || fallbackFilename,
content: readStream,
}
}
})
)
message = { ...message, attachments }
}
message = { message = {
...message, ...message,

View File

@ -1,41 +0,0 @@
const fs = require("fs")
const path = require("path")
const { execSync } = require("child_process")
let version = "0.0.0"
const localPro = fs.existsSync("packages/pro/src")
if (!localPro) {
version = "latest"
}
// Get the list of workspaces with mismatched dependencies
const output = execSync("yarn --silent workspaces info --json", {
encoding: "utf-8",
})
const data = JSON.parse(output)
// Loop through each workspace and update the dependencies
Object.keys(data).forEach(workspace => {
// Loop through each dependency and update its version in package.json
const packageJsonPath = path.join(data[workspace].location, "package.json")
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf-8"))
if (packageJson.version !== "0.0.0") {
// Don't change if we are not using local versions
return
}
let hasChanges = false
if (packageJson.dependencies && packageJson.dependencies["@budibase/pro"]) {
packageJson.dependencies["@budibase/pro"] = version
hasChanges = true
}
// Write changes to package.json if there are any
if (hasChanges) {
fs.writeFileSync(
packageJsonPath,
JSON.stringify(packageJson, null, 2) + "\n"
)
}
})

111
yarn.lock
View File

@ -2046,6 +2046,44 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
"@budibase/backend-core@2.23.12":
version "0.0.0"
dependencies:
"@budibase/nano" "10.1.5"
"@budibase/pouchdb-replication-stream" "1.2.10"
"@budibase/shared-core" "0.0.0"
"@budibase/types" "0.0.0"
"@govtechsg/passport-openidconnect" "^1.0.2"
aws-cloudfront-sign "3.0.2"
aws-sdk "2.1030.0"
bcrypt "5.1.0"
bcryptjs "2.4.3"
bull "4.10.1"
correlation-id "4.0.0"
dd-trace "5.2.0"
dotenv "16.0.1"
ioredis "5.3.2"
joi "17.6.0"
jsonwebtoken "9.0.2"
koa-passport "^6.0.0"
koa-pino-logger "4.0.0"
lodash "4.17.21"
node-fetch "2.6.7"
passport-google-oauth "2.0.0"
passport-local "1.0.0"
passport-oauth2-refresh "^2.1.0"
pino "8.11.0"
pino-http "8.3.3"
posthog-node "1.3.0"
pouchdb "7.3.0"
pouchdb-find "7.2.2"
redlock "4.2.0"
rotating-file-stream "3.1.0"
sanitize-s3-objectkey "0.0.1"
semver "^7.5.4"
tar-fs "2.1.1"
uuid "^8.3.2"
"@budibase/handlebars-helpers@^0.13.1": "@budibase/handlebars-helpers@^0.13.1":
version "0.13.1" version "0.13.1"
resolved "https://registry.yarnpkg.com/@budibase/handlebars-helpers/-/handlebars-helpers-0.13.1.tgz#d02e73c0df8305cd675e70dc37f8427eb0842080" resolved "https://registry.yarnpkg.com/@budibase/handlebars-helpers/-/handlebars-helpers-0.13.1.tgz#d02e73c0df8305cd675e70dc37f8427eb0842080"
@ -2090,6 +2128,44 @@
pouchdb-promise "^6.0.4" pouchdb-promise "^6.0.4"
through2 "^2.0.0" through2 "^2.0.0"
"@budibase/pro@npm:@budibase/pro@latest":
version "2.23.12"
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.23.12.tgz#b2e813c547a5ed22b5bd86b1158159fe4b918260"
integrity sha512-DMtfkrJDSIF9V7AL6brpuWWw7Ot5XxO4YQ32ggmr0264uU9KYsTFvlFXFP3MSF2H+247ZYUouSJU76+XeC13qQ==
dependencies:
"@budibase/backend-core" "2.23.12"
"@budibase/shared-core" "2.23.12"
"@budibase/string-templates" "2.23.12"
"@budibase/types" "2.23.12"
"@koa/router" "8.0.8"
bull "4.10.1"
joi "17.6.0"
jsonwebtoken "9.0.2"
lru-cache "^7.14.1"
memorystream "^0.3.1"
node-fetch "2.6.7"
scim-patch "^0.8.1"
scim2-parse-filter "^0.2.8"
"@budibase/shared-core@2.23.12":
version "0.0.0"
dependencies:
"@budibase/types" "0.0.0"
cron-validate "1.4.5"
"@budibase/string-templates@2.23.12":
version "0.0.0"
dependencies:
"@budibase/handlebars-helpers" "^0.13.1"
dayjs "^1.10.8"
handlebars "^4.7.8"
lodash.clonedeep "^4.5.0"
"@budibase/types@2.23.12":
version "0.0.0"
dependencies:
scim-patch "^0.8.1"
"@bull-board/api@5.10.2": "@bull-board/api@5.10.2":
version "5.10.2" version "5.10.2"
resolved "https://registry.yarnpkg.com/@bull-board/api/-/api-5.10.2.tgz#ae8ff6918b23897bf879a6ead3683f964374c4b3" resolved "https://registry.yarnpkg.com/@bull-board/api/-/api-5.10.2.tgz#ae8ff6918b23897bf879a6ead3683f964374c4b3"
@ -5930,6 +6006,14 @@
resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc" resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc"
integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==
"@types/readable-stream@^4.0.0":
version "4.0.11"
resolved "https://registry.yarnpkg.com/@types/readable-stream/-/readable-stream-4.0.11.tgz#684f1e947c90cb6a8ad3904523d650bb66cdbb84"
integrity sha512-R3eUMUTTKoIoaz7UpYLxvZCrOmCRPRbAmoDDHKcimTEySltaJhF8hLzj4+EzyDifiX5eK6oDQGSfmNnXjxZzYQ==
dependencies:
"@types/node" "*"
safe-buffer "~5.1.1"
"@types/readdir-glob@*": "@types/readdir-glob@*":
version "1.1.5" version "1.1.5"
resolved "https://registry.yarnpkg.com/@types/readdir-glob/-/readdir-glob-1.1.5.tgz#21a4a98898fc606cb568ad815f2a0eedc24d412a" resolved "https://registry.yarnpkg.com/@types/readdir-glob/-/readdir-glob-1.1.5.tgz#21a4a98898fc606cb568ad815f2a0eedc24d412a"
@ -6147,6 +6231,13 @@
dependencies: dependencies:
"@types/webidl-conversions" "*" "@types/webidl-conversions" "*"
"@types/xml2js@^0.4.14":
version "0.4.14"
resolved "https://registry.yarnpkg.com/@types/xml2js/-/xml2js-0.4.14.tgz#5d462a2a7330345e2309c6b549a183a376de8f9a"
integrity sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ==
dependencies:
"@types/node" "*"
"@types/yargs-parser@*": "@types/yargs-parser@*":
version "21.0.0" version "21.0.0"
resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b"
@ -7607,6 +7698,16 @@ bl@^4.0.3, bl@^4.1.0:
inherits "^2.0.4" inherits "^2.0.4"
readable-stream "^3.4.0" readable-stream "^3.4.0"
bl@^6.0.12:
version "6.0.12"
resolved "https://registry.yarnpkg.com/bl/-/bl-6.0.12.tgz#77c35b96e13aeff028496c798b75389ddee9c7f8"
integrity sha512-EnEYHilP93oaOa2MnmNEjAcovPS3JlQZOyzGXi3EyEpPhm9qWvdDp7BmAVEVusGzp8LlwQK56Av+OkDoRjzE0w==
dependencies:
"@types/readable-stream" "^4.0.0"
buffer "^6.0.3"
inherits "^2.0.4"
readable-stream "^4.2.0"
bl@^6.0.3: bl@^6.0.3:
version "6.0.9" version "6.0.9"
resolved "https://registry.yarnpkg.com/bl/-/bl-6.0.9.tgz#df8fcb2ef7be2e5ee8f65afa493502914e0d816f" resolved "https://registry.yarnpkg.com/bl/-/bl-6.0.9.tgz#df8fcb2ef7be2e5ee8f65afa493502914e0d816f"
@ -8781,7 +8882,7 @@ consolidate@^0.16.0:
dependencies: dependencies:
bluebird "^3.7.2" bluebird "^3.7.2"
content-disposition@^0.5.2, content-disposition@^0.5.3, content-disposition@~0.5.2: content-disposition@^0.5.2, content-disposition@^0.5.3, content-disposition@^0.5.4, content-disposition@~0.5.2:
version "0.5.4" version "0.5.4"
resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe"
integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==
@ -16116,10 +16217,10 @@ mute-stream@~1.0.0:
resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-1.0.0.tgz#e31bd9fe62f0aed23520aa4324ea6671531e013e" resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-1.0.0.tgz#e31bd9fe62f0aed23520aa4324ea6671531e013e"
integrity sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA== integrity sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==
mysql2@3.5.2: mysql2@3.9.7:
version "3.5.2" version "3.9.7"
resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-3.5.2.tgz#a06050e1514e9ac15711a8b883ffd51cb44b2dc8" resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-3.9.7.tgz#843755daf65b5ef08afe545fe14b8fb62824741a"
integrity sha512-cptobmhYkYeTBIFp2c0piw2+gElpioga1rUw5UidHvo8yaHijMZoo8A3zyBVoo/K71f7ZFvrShA9iMIy9dCzCA== integrity sha512-KnJT8vYRcNAZv73uf9zpXqNbvBG7DJrs+1nACsjZP1HMJ1TgXEy8wnNilXAn/5i57JizXKtrUtwDB7HxT9DDpw==
dependencies: dependencies:
denque "^2.1.0" denque "^2.1.0"
generate-function "^2.3.1" generate-function "^2.3.1"