Merge branch 'master' into grid-enhancements
This commit is contained in:
commit
8baea1974d
|
@ -42,6 +42,8 @@
|
|||
},
|
||||
"rules": {
|
||||
"no-unused-vars": "off",
|
||||
"local-rules/no-budibase-imports": "error",
|
||||
"local-rules/no-console-error": "error",
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
"error",
|
||||
{
|
||||
|
|
|
@ -24,5 +24,8 @@
|
|||
},
|
||||
"[svelte]": {
|
||||
"editor.defaultFormatter": "svelte.svelte-vscode"
|
||||
},
|
||||
"[handlebars]": {
|
||||
"editor.formatOnSave": false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -106,6 +106,8 @@ spec:
|
|||
value: {{ .Values.services.objectStore.globalBucketName | quote }}
|
||||
- name: BACKUPS_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.backupsBucketName | quote }}
|
||||
- name: TEMP_BUCKET_NAME
|
||||
value: {{ .Values.globals.tempBucketName | quote }}
|
||||
- name: PORT
|
||||
value: {{ .Values.services.apps.port | quote }}
|
||||
{{ if .Values.services.worker.publicApiRateLimitPerSecond }}
|
||||
|
|
|
@ -107,6 +107,8 @@ spec:
|
|||
value: {{ .Values.services.objectStore.globalBucketName | quote }}
|
||||
- name: BACKUPS_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.backupsBucketName | quote }}
|
||||
- name: TEMP_BUCKET_NAME
|
||||
value: {{ .Values.globals.tempBucketName | quote }}
|
||||
- name: PORT
|
||||
value: {{ .Values.services.automationWorkers.port | quote }}
|
||||
{{ if .Values.services.worker.publicApiRateLimitPerSecond }}
|
||||
|
|
|
@ -106,6 +106,8 @@ spec:
|
|||
value: {{ .Values.services.objectStore.globalBucketName | quote }}
|
||||
- name: BACKUPS_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.backupsBucketName | quote }}
|
||||
- name: TEMP_BUCKET_NAME
|
||||
value: {{ .Values.globals.tempBucketName | quote }}
|
||||
- name: PORT
|
||||
value: {{ .Values.services.worker.port | quote }}
|
||||
- name: MULTI_TENANCY
|
||||
|
|
|
@ -121,6 +121,9 @@ globals:
|
|||
# to the old value for the duration of the rotation.
|
||||
jwtSecretFallback: ""
|
||||
|
||||
## -- If using S3 the bucket name to be used for storing temporary files
|
||||
tempBucketName: ""
|
||||
|
||||
smtp:
|
||||
# -- Whether to enable SMTP or not.
|
||||
enabled: false
|
||||
|
|
|
@ -1,4 +1,25 @@
|
|||
module.exports = {
|
||||
"no-console-error": {
|
||||
create: function(context) {
|
||||
return {
|
||||
CallExpression(node) {
|
||||
if (
|
||||
node.callee.type === "MemberExpression" &&
|
||||
node.callee.object.name === "console" &&
|
||||
node.callee.property.name === "error" &&
|
||||
node.arguments.length === 1 &&
|
||||
node.arguments[0].name &&
|
||||
node.arguments[0].name.startsWith("err")
|
||||
) {
|
||||
context.report({
|
||||
node,
|
||||
message: 'Using console.error(err) on its own is not allowed. Either provide context to the error (console.error(msg, err)) or throw it.',
|
||||
})
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
"no-budibase-imports": {
|
||||
create: function (context) {
|
||||
return {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.23.6",
|
||||
"version": "2.23.12",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
5
nx.json
5
nx.json
|
@ -9,10 +9,7 @@
|
|||
},
|
||||
"targetDefaults": {
|
||||
"build": {
|
||||
"inputs": [
|
||||
"{workspaceRoot}/scripts/build.js",
|
||||
"{workspaceRoot}/lerna.json"
|
||||
]
|
||||
"inputs": ["{workspaceRoot}/scripts/*", "{workspaceRoot}/lerna.json"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@
|
|||
"dev:camunda": "./scripts/deploy-camunda.sh",
|
||||
"dev:all": "yarn run kill-all && lerna run --stream dev",
|
||||
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
|
||||
"dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
|
||||
"dev:docker": "./scripts/devDocker.sh",
|
||||
"test": "REUSE_CONTAINERS=1 lerna run --concurrency 1 --stream test --stream",
|
||||
"lint:eslint": "eslint packages --max-warnings=0",
|
||||
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit eb7d5da233885c5cffd9c255d3e954d0cd39185e
|
||||
Subproject commit c167c331ff9b8161fc18e2ecbaaf1ea5815ba964
|
|
@ -64,7 +64,6 @@ async function refreshOIDCAccessToken(
|
|||
}
|
||||
strategy = await oidc.strategyFactory(enrichedConfig, ssoSaveUserNoOp)
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
throw new Error("Could not refresh OAuth Token")
|
||||
}
|
||||
|
||||
|
@ -99,7 +98,6 @@ async function refreshGoogleAccessToken(
|
|||
ssoSaveUserNoOp
|
||||
)
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
throw new Error(
|
||||
`Error constructing OIDC refresh strategy: message=${err.message}`
|
||||
)
|
||||
|
|
|
@ -29,6 +29,7 @@ const DefaultBucketName = {
|
|||
TEMPLATES: "templates",
|
||||
GLOBAL: "global",
|
||||
PLUGINS: "plugins",
|
||||
TEMP: "tmp-file-attachments",
|
||||
}
|
||||
|
||||
const selfHosted = !!parseInt(process.env.SELF_HOSTED || "")
|
||||
|
@ -146,6 +147,7 @@ const environment = {
|
|||
process.env.GLOBAL_BUCKET_NAME || DefaultBucketName.GLOBAL,
|
||||
PLUGIN_BUCKET_NAME:
|
||||
process.env.PLUGIN_BUCKET_NAME || DefaultBucketName.PLUGINS,
|
||||
TEMP_BUCKET_NAME: process.env.TEMP_BUCKET_NAME || DefaultBucketName.TEMP,
|
||||
USE_COUCH: process.env.USE_COUCH || true,
|
||||
MOCK_REDIS: process.env.MOCK_REDIS,
|
||||
DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,
|
||||
|
|
|
@ -138,7 +138,6 @@ export default function (
|
|||
} catch (err: any) {
|
||||
authenticated = false
|
||||
console.error(`Auth Error: ${err.message}`)
|
||||
console.error(err)
|
||||
// remove the cookie as the user does not exist anymore
|
||||
clearCookie(ctx, Cookie.Auth)
|
||||
}
|
||||
|
@ -187,7 +186,6 @@ export default function (
|
|||
}
|
||||
} catch (err: any) {
|
||||
console.error(`Auth Error: ${err.message}`)
|
||||
console.error(err)
|
||||
// invalid token, clear the cookie
|
||||
if (err?.name === "JsonWebTokenError") {
|
||||
clearCookie(ctx, Cookie.Auth)
|
||||
|
|
|
@ -12,7 +12,7 @@ export async function errorHandling(ctx: any, next: any) {
|
|||
if (status >= 400 && status < 500) {
|
||||
console.warn(err)
|
||||
} else {
|
||||
console.error(err)
|
||||
console.error("Got 400 response code", err)
|
||||
}
|
||||
|
||||
let error: APIError = {
|
||||
|
|
|
@ -68,7 +68,6 @@ export async function strategyFactory(
|
|||
verify
|
||||
)
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
throw new Error(`Error constructing google authentication strategy: ${err}`)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -103,7 +103,6 @@ export async function strategyFactory(
|
|||
strategy.name = "oidc"
|
||||
return strategy
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
throw new Error(`Error constructing OIDC authentication strategy - ${err}`)
|
||||
}
|
||||
}
|
||||
|
@ -142,7 +141,6 @@ export async function fetchStrategyConfig(
|
|||
callbackURL: callbackUrl,
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
throw new Error(
|
||||
`Error constructing OIDC authentication configuration - ${err}`
|
||||
)
|
||||
|
|
|
@ -26,7 +26,6 @@ export const getMigrationsDoc = async (db: any) => {
|
|||
if (err.status && err.status === 404) {
|
||||
return { _id: DocumentType.MIGRATIONS }
|
||||
} else {
|
||||
console.error(err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,31 +7,41 @@ import tar from "tar-fs"
|
|||
import zlib from "zlib"
|
||||
import { promisify } from "util"
|
||||
import { join } from "path"
|
||||
import fs, { ReadStream } from "fs"
|
||||
import fs, { PathLike, ReadStream } from "fs"
|
||||
import env from "../environment"
|
||||
import { budibaseTempDir } from "./utils"
|
||||
import { bucketTTLConfig, budibaseTempDir } from "./utils"
|
||||
import { v4 } from "uuid"
|
||||
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
|
||||
import fsp from "fs/promises"
|
||||
|
||||
const streamPipeline = promisify(stream.pipeline)
|
||||
// use this as a temporary store of buckets that are being created
|
||||
const STATE = {
|
||||
bucketCreationPromises: {},
|
||||
}
|
||||
const signedFilePrefix = "/files/signed"
|
||||
|
||||
type ListParams = {
|
||||
ContinuationToken?: string
|
||||
}
|
||||
|
||||
type UploadParams = {
|
||||
type BaseUploadParams = {
|
||||
bucket: string
|
||||
filename: string
|
||||
path: string
|
||||
type?: string | null
|
||||
// can be undefined, we will remove it
|
||||
metadata?: {
|
||||
[key: string]: string | undefined
|
||||
}
|
||||
metadata?: { [key: string]: string | undefined }
|
||||
body?: ReadableStream | Buffer
|
||||
ttl?: number
|
||||
addTTL?: boolean
|
||||
extra?: any
|
||||
}
|
||||
|
||||
type UploadParams = BaseUploadParams & {
|
||||
path?: string | PathLike
|
||||
}
|
||||
|
||||
type StreamUploadParams = BaseUploadParams & {
|
||||
stream: ReadStream
|
||||
}
|
||||
|
||||
const CONTENT_TYPE_MAP: any = {
|
||||
|
@ -41,6 +51,8 @@ const CONTENT_TYPE_MAP: any = {
|
|||
js: "application/javascript",
|
||||
json: "application/json",
|
||||
gz: "application/gzip",
|
||||
svg: "image/svg+xml",
|
||||
form: "multipart/form-data",
|
||||
}
|
||||
|
||||
const STRING_CONTENT_TYPES = [
|
||||
|
@ -105,7 +117,10 @@ export function ObjectStore(
|
|||
* Given an object store and a bucket name this will make sure the bucket exists,
|
||||
* if it does not exist then it will create it.
|
||||
*/
|
||||
export async function makeSureBucketExists(client: any, bucketName: string) {
|
||||
export async function createBucketIfNotExists(
|
||||
client: any,
|
||||
bucketName: string
|
||||
): Promise<{ created: boolean; exists: boolean }> {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
try {
|
||||
await client
|
||||
|
@ -113,15 +128,16 @@ export async function makeSureBucketExists(client: any, bucketName: string) {
|
|||
Bucket: bucketName,
|
||||
})
|
||||
.promise()
|
||||
return { created: false, exists: true }
|
||||
} catch (err: any) {
|
||||
const promises: any = STATE.bucketCreationPromises
|
||||
const doesntExist = err.statusCode === 404,
|
||||
noAccess = err.statusCode === 403
|
||||
if (promises[bucketName]) {
|
||||
await promises[bucketName]
|
||||
return { created: false, exists: true }
|
||||
} else if (doesntExist || noAccess) {
|
||||
if (doesntExist) {
|
||||
// bucket doesn't exist create it
|
||||
promises[bucketName] = client
|
||||
.createBucket({
|
||||
Bucket: bucketName,
|
||||
|
@ -129,13 +145,15 @@ export async function makeSureBucketExists(client: any, bucketName: string) {
|
|||
.promise()
|
||||
await promises[bucketName]
|
||||
delete promises[bucketName]
|
||||
return { created: true, exists: false }
|
||||
} else {
|
||||
throw new Error("Access denied to object store bucket." + err)
|
||||
}
|
||||
} else {
|
||||
throw new Error("Unable to write to object store bucket.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads the contents of a file given the required parameters, useful when
|
||||
* temp files in use (for example file uploaded as an attachment).
|
||||
|
@ -146,12 +164,22 @@ export async function upload({
|
|||
path,
|
||||
type,
|
||||
metadata,
|
||||
body,
|
||||
ttl,
|
||||
}: UploadParams) {
|
||||
const extension = filename.split(".").pop()
|
||||
const fileBytes = fs.readFileSync(path)
|
||||
|
||||
const fileBytes = path ? (await fsp.open(path)).createReadStream() : body
|
||||
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
await makeSureBucketExists(objectStore, bucketName)
|
||||
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
|
||||
|
||||
if (ttl && (bucketCreated.created || bucketCreated.exists)) {
|
||||
let ttlConfig = bucketTTLConfig(bucketName, ttl)
|
||||
if (objectStore.putBucketLifecycleConfiguration) {
|
||||
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
|
||||
}
|
||||
}
|
||||
|
||||
let contentType = type
|
||||
if (!contentType) {
|
||||
|
@ -174,6 +202,7 @@ export async function upload({
|
|||
}
|
||||
config.Metadata = metadata
|
||||
}
|
||||
|
||||
return objectStore.upload(config).promise()
|
||||
}
|
||||
|
||||
|
@ -181,14 +210,24 @@ export async function upload({
|
|||
* Similar to the upload function but can be used to send a file stream
|
||||
* through to the object store.
|
||||
*/
|
||||
export async function streamUpload(
|
||||
bucketName: string,
|
||||
filename: string,
|
||||
stream: ReadStream | ReadableStream,
|
||||
extra = {}
|
||||
) {
|
||||
export async function streamUpload({
|
||||
bucket: bucketName,
|
||||
stream,
|
||||
filename,
|
||||
type,
|
||||
extra,
|
||||
ttl,
|
||||
}: StreamUploadParams) {
|
||||
const extension = filename.split(".").pop()
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
await makeSureBucketExists(objectStore, bucketName)
|
||||
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
|
||||
|
||||
if (ttl && (bucketCreated.created || bucketCreated.exists)) {
|
||||
let ttlConfig = bucketTTLConfig(bucketName, ttl)
|
||||
if (objectStore.putBucketLifecycleConfiguration) {
|
||||
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
|
||||
}
|
||||
}
|
||||
|
||||
// Set content type for certain known extensions
|
||||
if (filename?.endsWith(".js")) {
|
||||
|
@ -203,10 +242,18 @@ export async function streamUpload(
|
|||
}
|
||||
}
|
||||
|
||||
let contentType = type
|
||||
if (!contentType) {
|
||||
contentType = extension
|
||||
? CONTENT_TYPE_MAP[extension.toLowerCase()]
|
||||
: CONTENT_TYPE_MAP.txt
|
||||
}
|
||||
|
||||
const params = {
|
||||
Bucket: sanitizeBucket(bucketName),
|
||||
Key: sanitizeKey(filename),
|
||||
Body: stream,
|
||||
ContentType: contentType,
|
||||
...extra,
|
||||
}
|
||||
return objectStore.upload(params).promise()
|
||||
|
@ -286,7 +333,7 @@ export function getPresignedUrl(
|
|||
const signedUrl = new URL(url)
|
||||
const path = signedUrl.pathname
|
||||
const query = signedUrl.search
|
||||
return `/files/signed${path}${query}`
|
||||
return `${signedFilePrefix}${path}${query}`
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -341,7 +388,7 @@ export async function retrieveDirectory(bucketName: string, path: string) {
|
|||
*/
|
||||
export async function deleteFile(bucketName: string, filepath: string) {
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
await makeSureBucketExists(objectStore, bucketName)
|
||||
await createBucketIfNotExists(objectStore, bucketName)
|
||||
const params = {
|
||||
Bucket: bucketName,
|
||||
Key: sanitizeKey(filepath),
|
||||
|
@ -351,7 +398,7 @@ export async function deleteFile(bucketName: string, filepath: string) {
|
|||
|
||||
export async function deleteFiles(bucketName: string, filepaths: string[]) {
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
await makeSureBucketExists(objectStore, bucketName)
|
||||
await createBucketIfNotExists(objectStore, bucketName)
|
||||
const params = {
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
|
@ -412,7 +459,13 @@ export async function uploadDirectory(
|
|||
if (file.isDirectory()) {
|
||||
uploads.push(uploadDirectory(bucketName, local, path))
|
||||
} else {
|
||||
uploads.push(streamUpload(bucketName, path, fs.createReadStream(local)))
|
||||
uploads.push(
|
||||
streamUpload({
|
||||
bucket: bucketName,
|
||||
filename: path,
|
||||
stream: fs.createReadStream(local),
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
await Promise.all(uploads)
|
||||
|
@ -467,3 +520,23 @@ export async function getReadStream(
|
|||
}
|
||||
return client.getObject(params).createReadStream()
|
||||
}
|
||||
|
||||
/*
|
||||
Given a signed url like '/files/signed/tmp-files-attachments/app_123456/myfile.txt' extract
|
||||
the bucket and the path from it
|
||||
*/
|
||||
export function extractBucketAndPath(
|
||||
url: string
|
||||
): { bucket: string; path: string } | null {
|
||||
const baseUrl = url.split("?")[0]
|
||||
|
||||
const regex = new RegExp(`^${signedFilePrefix}/(?<bucket>[^/]+)/(?<path>.+)$`)
|
||||
const match = baseUrl.match(regex)
|
||||
|
||||
if (match && match.groups) {
|
||||
const { bucket, path } = match.groups
|
||||
return { bucket, path }
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ import { join } from "path"
|
|||
import { tmpdir } from "os"
|
||||
import fs from "fs"
|
||||
import env from "../environment"
|
||||
import { PutBucketLifecycleConfigurationRequest } from "aws-sdk/clients/s3"
|
||||
|
||||
/****************************************************
|
||||
* NOTE: When adding a new bucket - name *
|
||||
|
@ -15,6 +16,7 @@ export const ObjectStoreBuckets = {
|
|||
TEMPLATES: env.TEMPLATES_BUCKET_NAME,
|
||||
GLOBAL: env.GLOBAL_BUCKET_NAME,
|
||||
PLUGINS: env.PLUGIN_BUCKET_NAME,
|
||||
TEMP: env.TEMP_BUCKET_NAME,
|
||||
}
|
||||
|
||||
const bbTmp = join(tmpdir(), ".budibase")
|
||||
|
@ -29,3 +31,27 @@ try {
|
|||
export function budibaseTempDir() {
|
||||
return bbTmp
|
||||
}
|
||||
|
||||
export const bucketTTLConfig = (
|
||||
bucketName: string,
|
||||
days: number
|
||||
): PutBucketLifecycleConfigurationRequest => {
|
||||
const lifecycleRule = {
|
||||
ID: `${bucketName}-ExpireAfter${days}days`,
|
||||
Prefix: "",
|
||||
Status: "Enabled",
|
||||
Expiration: {
|
||||
Days: days,
|
||||
},
|
||||
}
|
||||
const lifecycleConfiguration = {
|
||||
Rules: [lifecycleRule],
|
||||
}
|
||||
|
||||
const params = {
|
||||
Bucket: bucketName,
|
||||
LifecycleConfiguration: lifecycleConfiguration,
|
||||
}
|
||||
|
||||
return params
|
||||
}
|
||||
|
|
|
@ -50,6 +50,8 @@ type CreateAdminUserOpts = {
|
|||
hashPassword?: boolean
|
||||
requirePassword?: boolean
|
||||
skipPasswordValidation?: boolean
|
||||
firstName?: string
|
||||
lastName?: string
|
||||
}
|
||||
type FeatureFns = { isSSOEnforced: FeatureFn; isAppBuildersEnabled: FeatureFn }
|
||||
|
||||
|
@ -517,6 +519,8 @@ export class UserDB {
|
|||
global: true,
|
||||
},
|
||||
tenantId,
|
||||
firstName: opts?.firstName,
|
||||
lastName: opts?.lastName,
|
||||
}
|
||||
if (opts?.ssoId) {
|
||||
user.ssoId = opts.ssoId
|
||||
|
|
|
@ -4,3 +4,6 @@ export { generator } from "./structures"
|
|||
export * as testContainerUtils from "./testContainerUtils"
|
||||
export * as utils from "./utils"
|
||||
export * from "./jestUtils"
|
||||
import * as minio from "./minio"
|
||||
|
||||
export const objectStoreTestProviders = { minio }
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
|
||||
import env from "../../../src/environment"
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
|
||||
class ObjectStoreWaitStrategy extends AbstractWaitStrategy {
|
||||
async waitUntilReady(container: any, boundPorts: any, startTime?: Date) {
|
||||
const logs = Wait.forListeningPorts()
|
||||
await logs.waitUntilReady(container, boundPorts, startTime)
|
||||
}
|
||||
}
|
||||
|
||||
export async function start(): Promise<void> {
|
||||
container = await new GenericContainer("minio/minio")
|
||||
.withExposedPorts(9000)
|
||||
.withCommand(["server", "/data"])
|
||||
.withEnvironment({
|
||||
MINIO_ACCESS_KEY: "budibase",
|
||||
MINIO_SECRET_KEY: "budibase",
|
||||
})
|
||||
.withWaitStrategy(new ObjectStoreWaitStrategy().withStartupTimeout(30000))
|
||||
.start()
|
||||
|
||||
const port = container.getMappedPort(9000)
|
||||
env._set("MINIO_URL", `http://0.0.0.0:${port}`)
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
}
|
||||
}
|
|
@ -32,6 +32,7 @@
|
|||
import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte"
|
||||
import CodeEditor from "components/common/CodeEditor/CodeEditor.svelte"
|
||||
import BindingSidePanel from "components/common/bindings/BindingSidePanel.svelte"
|
||||
import KeyValueBuilder from "components/integration/KeyValueBuilder.svelte"
|
||||
import { BindingHelpers, BindingType } from "components/common/bindings/utils"
|
||||
import {
|
||||
bindingsToCompletions,
|
||||
|
@ -356,7 +357,8 @@
|
|||
value.customType !== "queryParams" &&
|
||||
value.customType !== "cron" &&
|
||||
value.customType !== "triggerSchema" &&
|
||||
value.customType !== "automationFields"
|
||||
value.customType !== "automationFields" &&
|
||||
value.type !== "attachment"
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -372,6 +374,15 @@
|
|||
console.error(error)
|
||||
}
|
||||
})
|
||||
const handleAttachmentParams = keyValuObj => {
|
||||
let params = {}
|
||||
if (keyValuObj?.length) {
|
||||
for (let param of keyValuObj) {
|
||||
params[param.url] = param.filename
|
||||
}
|
||||
}
|
||||
return params
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="fields">
|
||||
|
@ -437,6 +448,33 @@
|
|||
value={inputData[key]}
|
||||
options={Object.keys(table?.schema || {})}
|
||||
/>
|
||||
{:else if value.type === "attachment"}
|
||||
<div class="attachment-field-wrapper">
|
||||
<div class="label-wrapper">
|
||||
<Label>{label}</Label>
|
||||
</div>
|
||||
<div class="attachment-field-width">
|
||||
<KeyValueBuilder
|
||||
on:change={e =>
|
||||
onChange(
|
||||
{
|
||||
detail: e.detail.map(({ name, value }) => ({
|
||||
url: name,
|
||||
filename: value,
|
||||
})),
|
||||
},
|
||||
key
|
||||
)}
|
||||
object={handleAttachmentParams(inputData[key])}
|
||||
allowJS
|
||||
{bindings}
|
||||
keyBindings
|
||||
customButtonText={"Add attachment"}
|
||||
keyPlaceholder={"URL"}
|
||||
valuePlaceholder={"Filename"}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
{:else if value.customType === "filters"}
|
||||
<ActionButton on:click={drawer.show}>Define filters</ActionButton>
|
||||
<Drawer bind:this={drawer} title="Filtering">
|
||||
|
@ -651,14 +689,22 @@
|
|||
}
|
||||
|
||||
.block-field {
|
||||
display: flex; /* Use Flexbox */
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
flex-direction: row; /* Arrange label and field side by side */
|
||||
align-items: center; /* Align vertically in the center */
|
||||
gap: 10px; /* Add some space between label and field */
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.attachment-field-width {
|
||||
margin-top: var(--spacing-xs);
|
||||
}
|
||||
|
||||
.label-wrapper {
|
||||
margin-top: var(--spacing-s);
|
||||
}
|
||||
|
||||
.test :global(.drawer) {
|
||||
width: 10000px !important;
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
Layout,
|
||||
AbsTooltip,
|
||||
} from "@budibase/bbui"
|
||||
import { SWITCHABLE_TYPES, ValidColumnNameRegex } from "@budibase/shared-core"
|
||||
import { createEventDispatcher, getContext, onMount } from "svelte"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { tables, datasources } from "stores/builder"
|
||||
|
@ -20,11 +21,6 @@
|
|||
import {
|
||||
FIELDS,
|
||||
RelationshipType,
|
||||
ALLOWABLE_STRING_OPTIONS,
|
||||
ALLOWABLE_NUMBER_OPTIONS,
|
||||
ALLOWABLE_STRING_TYPES,
|
||||
ALLOWABLE_NUMBER_TYPES,
|
||||
SWITCHABLE_TYPES,
|
||||
PrettyRelationshipDefinitions,
|
||||
DB_TYPE_EXTERNAL,
|
||||
} from "constants/backend"
|
||||
|
@ -33,21 +29,20 @@
|
|||
import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte"
|
||||
import { getBindings } from "components/backend/DataTable/formula"
|
||||
import JSONSchemaModal from "./JSONSchemaModal.svelte"
|
||||
import { ValidColumnNameRegex } from "@budibase/shared-core"
|
||||
import { FieldType, FieldSubtype, SourceName } from "@budibase/types"
|
||||
import RelationshipSelector from "components/common/RelationshipSelector.svelte"
|
||||
import { RowUtils } from "@budibase/frontend-core"
|
||||
import ServerBindingPanel from "components/common/bindings/ServerBindingPanel.svelte"
|
||||
|
||||
const AUTO_TYPE = FIELDS.AUTO.type
|
||||
const FORMULA_TYPE = FIELDS.FORMULA.type
|
||||
const LINK_TYPE = FIELDS.LINK.type
|
||||
const STRING_TYPE = FIELDS.STRING.type
|
||||
const NUMBER_TYPE = FIELDS.NUMBER.type
|
||||
const JSON_TYPE = FIELDS.JSON.type
|
||||
const DATE_TYPE = FIELDS.DATETIME.type
|
||||
const USER_TYPE = FIELDS.USER.subtype
|
||||
const USERS_TYPE = FIELDS.USERS.subtype
|
||||
const AUTO_TYPE = FieldType.AUTO
|
||||
const FORMULA_TYPE = FieldType.FORMULA
|
||||
const LINK_TYPE = FieldType.LINK
|
||||
const STRING_TYPE = FieldType.STRING
|
||||
const NUMBER_TYPE = FieldType.NUMBER
|
||||
const JSON_TYPE = FieldType.JSON
|
||||
const DATE_TYPE = FieldType.DATETIME
|
||||
const USER_TYPE = FieldSubtype.USER
|
||||
const USERS_TYPE = FieldSubtype.USERS
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
|
||||
|
@ -61,8 +56,8 @@
|
|||
let primaryDisplay
|
||||
let indexes = [...($tables.selected.indexes || [])]
|
||||
let isCreating = undefined
|
||||
let relationshipPart1 = PrettyRelationshipDefinitions.Many
|
||||
let relationshipPart2 = PrettyRelationshipDefinitions.One
|
||||
let relationshipPart1 = PrettyRelationshipDefinitions.MANY
|
||||
let relationshipPart2 = PrettyRelationshipDefinitions.ONE
|
||||
let relationshipTableIdPrimary = null
|
||||
let relationshipTableIdSecondary = null
|
||||
let table = $tables.selected
|
||||
|
@ -175,7 +170,7 @@
|
|||
$: typeEnabled =
|
||||
!originalName ||
|
||||
(originalName &&
|
||||
SWITCHABLE_TYPES.indexOf(editableColumn.type) !== -1 &&
|
||||
SWITCHABLE_TYPES[field.type] &&
|
||||
!editableColumn?.autocolumn)
|
||||
|
||||
const fieldDefinitions = Object.values(FIELDS).reduce(
|
||||
|
@ -367,16 +362,15 @@
|
|||
}
|
||||
|
||||
function getAllowedTypes() {
|
||||
if (
|
||||
originalName &&
|
||||
ALLOWABLE_STRING_TYPES.indexOf(editableColumn.type) !== -1
|
||||
) {
|
||||
return ALLOWABLE_STRING_OPTIONS
|
||||
} else if (
|
||||
originalName &&
|
||||
ALLOWABLE_NUMBER_TYPES.indexOf(editableColumn.type) !== -1
|
||||
) {
|
||||
return ALLOWABLE_NUMBER_OPTIONS
|
||||
if (originalName) {
|
||||
const possibleTypes = (
|
||||
SWITCHABLE_TYPES[field.type] || [editableColumn.type]
|
||||
).map(t => t.toLowerCase())
|
||||
return Object.entries(FIELDS)
|
||||
.filter(([fieldType]) =>
|
||||
possibleTypes.includes(fieldType.toLowerCase())
|
||||
)
|
||||
.map(([_, fieldDefinition]) => fieldDefinition)
|
||||
}
|
||||
|
||||
const isUsers =
|
||||
|
@ -632,7 +626,7 @@
|
|||
/>
|
||||
</div>
|
||||
</div>
|
||||
{:else if editableColumn.type === FieldType.LINK}
|
||||
{:else if editableColumn.type === FieldType.LINK && !editableColumn.autocolumn}
|
||||
<RelationshipSelector
|
||||
bind:relationshipPart1
|
||||
bind:relationshipPart2
|
||||
|
|
|
@ -35,6 +35,8 @@
|
|||
export let bindingDrawerLeft
|
||||
export let allowHelpers = true
|
||||
export let customButtonText = null
|
||||
export let keyBindings = false
|
||||
export let allowJS = false
|
||||
export let compare = (option, value) => option === value
|
||||
|
||||
let fields = Object.entries(object || {}).map(([name, value]) => ({
|
||||
|
@ -116,12 +118,23 @@
|
|||
class:readOnly-menu={readOnly && showMenu}
|
||||
>
|
||||
{#each fields as field, idx}
|
||||
<Input
|
||||
placeholder={keyPlaceholder}
|
||||
readonly={readOnly}
|
||||
bind:value={field.name}
|
||||
on:blur={changed}
|
||||
/>
|
||||
{#if keyBindings}
|
||||
<DrawerBindableInput
|
||||
{bindings}
|
||||
placeholder={keyPlaceholder}
|
||||
on:blur={e => {
|
||||
field.name = e.detail
|
||||
changed()
|
||||
}}
|
||||
disabled={readOnly}
|
||||
value={field.name}
|
||||
{allowJS}
|
||||
{allowHelpers}
|
||||
drawerLeft={bindingDrawerLeft}
|
||||
/>
|
||||
{:else}
|
||||
<Input readonly={readOnly} bind:value={field.name} on:blur={changed} />
|
||||
{/if}
|
||||
{#if isJsonArray(field.value)}
|
||||
<Select readonly={true} value="Array" options={["Array"]} />
|
||||
{:else if options}
|
||||
|
@ -134,14 +147,14 @@
|
|||
{:else if bindings && bindings.length}
|
||||
<DrawerBindableInput
|
||||
{bindings}
|
||||
placeholder="Value"
|
||||
placeholder={valuePlaceholder}
|
||||
on:blur={e => {
|
||||
field.value = e.detail
|
||||
changed()
|
||||
}}
|
||||
disabled={readOnly}
|
||||
value={field.value}
|
||||
allowJS={false}
|
||||
{allowJS}
|
||||
{allowHelpers}
|
||||
drawerLeft={bindingDrawerLeft}
|
||||
/>
|
||||
|
|
|
@ -202,26 +202,6 @@ export const PrettyRelationshipDefinitions = {
|
|||
ONE: "One row",
|
||||
}
|
||||
|
||||
export const ALLOWABLE_STRING_OPTIONS = [
|
||||
FIELDS.STRING,
|
||||
FIELDS.OPTIONS,
|
||||
FIELDS.LONGFORM,
|
||||
FIELDS.BARCODEQR,
|
||||
]
|
||||
export const ALLOWABLE_STRING_TYPES = ALLOWABLE_STRING_OPTIONS.map(
|
||||
opt => opt.type
|
||||
)
|
||||
|
||||
export const ALLOWABLE_NUMBER_OPTIONS = [FIELDS.NUMBER, FIELDS.BOOLEAN]
|
||||
export const ALLOWABLE_NUMBER_TYPES = ALLOWABLE_NUMBER_OPTIONS.map(
|
||||
opt => opt.type
|
||||
)
|
||||
|
||||
export const SWITCHABLE_TYPES = [
|
||||
...ALLOWABLE_STRING_TYPES,
|
||||
...ALLOWABLE_NUMBER_TYPES,
|
||||
]
|
||||
|
||||
export const BUDIBASE_INTERNAL_DB_ID = INTERNAL_TABLE_SOURCE_ID
|
||||
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
|
||||
export const BUDIBASE_DATASOURCE_TYPE = "budibase"
|
||||
|
|
|
@ -22,6 +22,7 @@ import {
|
|||
isJSBinding,
|
||||
decodeJSBinding,
|
||||
encodeJSBinding,
|
||||
getJsHelperList,
|
||||
} from "@budibase/string-templates"
|
||||
import { TableNames } from "./constants"
|
||||
import { JSONUtils, Constants } from "@budibase/frontend-core"
|
||||
|
@ -1210,9 +1211,32 @@ const shouldReplaceBinding = (currentValue, from, convertTo, binding) => {
|
|||
if (!currentValue?.includes(from)) {
|
||||
return false
|
||||
}
|
||||
if (convertTo === "readableBinding") {
|
||||
// Dont replace if the value already matches the readable binding
|
||||
// some cases we have the same binding for readable/runtime, specific logic for this
|
||||
const sameBindings = binding.runtimeBinding.includes(binding.readableBinding)
|
||||
const convertingToReadable = convertTo === "readableBinding"
|
||||
const helperNames = Object.keys(getJsHelperList())
|
||||
const matchedHelperNames = helperNames.filter(
|
||||
name => name.includes(from) && currentValue.includes(name)
|
||||
)
|
||||
// edge case - if the binding is part of a helper it may accidentally replace it
|
||||
if (matchedHelperNames.length > 0) {
|
||||
const indexStart = currentValue.indexOf(from),
|
||||
indexEnd = indexStart + from.length
|
||||
for (let helperName of matchedHelperNames) {
|
||||
const helperIndexStart = currentValue.indexOf(helperName),
|
||||
helperIndexEnd = helperIndexStart + helperName.length
|
||||
if (indexStart >= helperIndexStart && indexEnd <= helperIndexEnd) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (convertingToReadable && !sameBindings) {
|
||||
// Don't replace if the value already matches the readable binding
|
||||
return currentValue.indexOf(binding.readableBinding) === -1
|
||||
} else if (convertingToReadable) {
|
||||
// if the runtime and readable bindings are very similar we have to assume it should be replaced
|
||||
return true
|
||||
}
|
||||
// remove all the spaces, if the input is surrounded by spaces e.g. [ Auto ID ] then
|
||||
// this makes sure it is detected
|
||||
|
|
|
@ -189,6 +189,7 @@
|
|||
<Select
|
||||
options={settingOptions}
|
||||
bind:value={condition.setting}
|
||||
on:change={() => delete condition.settingValue}
|
||||
/>
|
||||
<div>TO</div>
|
||||
{#if definition}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { FieldType } from "@budibase/types"
|
||||
import { SWITCHABLE_TYPES } from "@budibase/shared-core"
|
||||
import { get, writable, derived } from "svelte/store"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { API } from "api"
|
||||
import { SWITCHABLE_TYPES } from "constants/backend"
|
||||
|
||||
export function createTablesStore() {
|
||||
const store = writable({
|
||||
|
@ -64,7 +64,7 @@ export function createTablesStore() {
|
|||
if (
|
||||
oldField != null &&
|
||||
oldField?.type !== field.type &&
|
||||
SWITCHABLE_TYPES.indexOf(oldField?.type) === -1
|
||||
!SWITCHABLE_TYPES[oldField?.type]?.includes(field.type)
|
||||
) {
|
||||
updatedTable.schema[key] = oldField
|
||||
}
|
||||
|
@ -148,12 +148,6 @@ export function createTablesStore() {
|
|||
if (indexes) {
|
||||
draft.indexes = indexes
|
||||
}
|
||||
// Add object to indicate if column is being added
|
||||
if (draft.schema[field.name] === undefined) {
|
||||
draft._add = {
|
||||
name: field.name,
|
||||
}
|
||||
}
|
||||
draft.schema = {
|
||||
...draft.schema,
|
||||
[field.name]: cloneDeep(field),
|
||||
|
|
|
@ -9,7 +9,7 @@ const {
|
|||
ObjectStore,
|
||||
retrieve,
|
||||
uploadDirectory,
|
||||
makeSureBucketExists,
|
||||
createBucketIfNotExists,
|
||||
} = objectStore
|
||||
|
||||
const bucketList = Object.values(ObjectStoreBuckets)
|
||||
|
@ -61,7 +61,7 @@ export async function importObjects() {
|
|||
let count = 0
|
||||
for (let bucket of buckets) {
|
||||
const client = ObjectStore(bucket)
|
||||
await makeSureBucketExists(client, bucket)
|
||||
await createBucketIfNotExists(client, bucket)
|
||||
const files = await uploadDirectory(bucket, join(path, bucket), "/")
|
||||
count += files.length
|
||||
bar.update(count)
|
||||
|
|
|
@ -54,11 +54,9 @@ export async function downloadDockerCompose() {
|
|||
|
||||
export async function checkDockerConfigured() {
|
||||
const error =
|
||||
"docker/docker-compose has not been installed, please follow instructions at: https://docs.budibase.com/docs/docker-compose"
|
||||
"docker has not been installed, please follow instructions at: https://docs.budibase.com/docs/docker-compose"
|
||||
const docker = await lookpath("docker")
|
||||
const compose = await lookpath("docker-compose")
|
||||
const composeV2 = await lookpath("docker compose")
|
||||
if (!docker || (!compose && !composeV2)) {
|
||||
if (!docker) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6973,6 +6973,12 @@
|
|||
"key": "stripeRows",
|
||||
"defaultValue": false
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"label": "Quiet",
|
||||
"key": "quiet",
|
||||
"defaultValue": false
|
||||
},
|
||||
{
|
||||
"section": true,
|
||||
"name": "Columns",
|
||||
|
|
|
@ -119,140 +119,142 @@
|
|||
{/if}
|
||||
</svelte:head>
|
||||
|
||||
<div
|
||||
id="spectrum-root"
|
||||
lang="en"
|
||||
dir="ltr"
|
||||
class="spectrum spectrum--medium {$themeStore.baseTheme} {$themeStore.theme}"
|
||||
class:builder={$builderStore.inBuilder}
|
||||
class:show={fontsLoaded && dataLoaded}
|
||||
>
|
||||
{#if $environmentStore.maintenance.length > 0}
|
||||
<MaintenanceScreen maintenanceList={$environmentStore.maintenance} />
|
||||
{:else}
|
||||
<DeviceBindingsProvider>
|
||||
<UserBindingsProvider>
|
||||
<StateBindingsProvider>
|
||||
<RowSelectionProvider>
|
||||
<QueryParamsProvider>
|
||||
<SnippetsProvider>
|
||||
<!-- Settings bar can be rendered outside of device preview -->
|
||||
<!-- Key block needs to be outside the if statement or it breaks -->
|
||||
{#key $builderStore.selectedComponentId}
|
||||
{#if $builderStore.inBuilder}
|
||||
<SettingsBar />
|
||||
{/if}
|
||||
{/key}
|
||||
|
||||
<!-- Clip boundary for selection indicators -->
|
||||
<div
|
||||
id="clip-root"
|
||||
class:preview={$builderStore.inBuilder}
|
||||
class:tablet-preview={$builderStore.previewDevice ===
|
||||
"tablet"}
|
||||
class:mobile-preview={$builderStore.previewDevice ===
|
||||
"mobile"}
|
||||
>
|
||||
<!-- Actual app -->
|
||||
<div id="app-root">
|
||||
{#if showDevTools}
|
||||
<DevToolsHeader />
|
||||
{#if dataLoaded}
|
||||
<div
|
||||
id="spectrum-root"
|
||||
lang="en"
|
||||
dir="ltr"
|
||||
class="spectrum spectrum--medium {$themeStore.baseTheme} {$themeStore.theme}"
|
||||
class:builder={$builderStore.inBuilder}
|
||||
class:show={fontsLoaded && dataLoaded}
|
||||
>
|
||||
{#if $environmentStore.maintenance.length > 0}
|
||||
<MaintenanceScreen maintenanceList={$environmentStore.maintenance} />
|
||||
{:else}
|
||||
<DeviceBindingsProvider>
|
||||
<UserBindingsProvider>
|
||||
<StateBindingsProvider>
|
||||
<RowSelectionProvider>
|
||||
<QueryParamsProvider>
|
||||
<SnippetsProvider>
|
||||
<!-- Settings bar can be rendered outside of device preview -->
|
||||
<!-- Key block needs to be outside the if statement or it breaks -->
|
||||
{#key $builderStore.selectedComponentId}
|
||||
{#if $builderStore.inBuilder}
|
||||
<SettingsBar />
|
||||
{/if}
|
||||
{/key}
|
||||
|
||||
<div id="app-body">
|
||||
{#if permissionError}
|
||||
<div class="error">
|
||||
<Layout justifyItems="center" gap="S">
|
||||
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
|
||||
{@html ErrorSVG}
|
||||
<Heading size="L">
|
||||
You don't have permission to use this app
|
||||
</Heading>
|
||||
<Body size="S">
|
||||
Ask your administrator to grant you access
|
||||
</Body>
|
||||
</Layout>
|
||||
</div>
|
||||
{:else if !$screenStore.activeLayout}
|
||||
<div class="error">
|
||||
<Layout justifyItems="center" gap="S">
|
||||
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
|
||||
{@html ErrorSVG}
|
||||
<Heading size="L">
|
||||
Something went wrong rendering your app
|
||||
</Heading>
|
||||
<Body size="S">
|
||||
Get in touch with support if this issue persists
|
||||
</Body>
|
||||
</Layout>
|
||||
</div>
|
||||
{:else if embedNoScreens}
|
||||
<div class="error">
|
||||
<Layout justifyItems="center" gap="S">
|
||||
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
|
||||
{@html ErrorSVG}
|
||||
<Heading size="L">
|
||||
This Budibase app is not publicly accessible
|
||||
</Heading>
|
||||
</Layout>
|
||||
</div>
|
||||
{:else}
|
||||
<CustomThemeWrapper>
|
||||
{#key $screenStore.activeLayout._id}
|
||||
<Component
|
||||
isLayout
|
||||
instance={$screenStore.activeLayout.props}
|
||||
/>
|
||||
{/key}
|
||||
<!-- Clip boundary for selection indicators -->
|
||||
<div
|
||||
id="clip-root"
|
||||
class:preview={$builderStore.inBuilder}
|
||||
class:tablet-preview={$builderStore.previewDevice ===
|
||||
"tablet"}
|
||||
class:mobile-preview={$builderStore.previewDevice ===
|
||||
"mobile"}
|
||||
>
|
||||
<!-- Actual app -->
|
||||
<div id="app-root">
|
||||
{#if showDevTools}
|
||||
<DevToolsHeader />
|
||||
{/if}
|
||||
|
||||
<!--
|
||||
<div id="app-body">
|
||||
{#if permissionError}
|
||||
<div class="error">
|
||||
<Layout justifyItems="center" gap="S">
|
||||
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
|
||||
{@html ErrorSVG}
|
||||
<Heading size="L">
|
||||
You don't have permission to use this app
|
||||
</Heading>
|
||||
<Body size="S">
|
||||
Ask your administrator to grant you access
|
||||
</Body>
|
||||
</Layout>
|
||||
</div>
|
||||
{:else if !$screenStore.activeLayout}
|
||||
<div class="error">
|
||||
<Layout justifyItems="center" gap="S">
|
||||
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
|
||||
{@html ErrorSVG}
|
||||
<Heading size="L">
|
||||
Something went wrong rendering your app
|
||||
</Heading>
|
||||
<Body size="S">
|
||||
Get in touch with support if this issue persists
|
||||
</Body>
|
||||
</Layout>
|
||||
</div>
|
||||
{:else if embedNoScreens}
|
||||
<div class="error">
|
||||
<Layout justifyItems="center" gap="S">
|
||||
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
|
||||
{@html ErrorSVG}
|
||||
<Heading size="L">
|
||||
This Budibase app is not publicly accessible
|
||||
</Heading>
|
||||
</Layout>
|
||||
</div>
|
||||
{:else}
|
||||
<CustomThemeWrapper>
|
||||
{#key $screenStore.activeLayout._id}
|
||||
<Component
|
||||
isLayout
|
||||
instance={$screenStore.activeLayout.props}
|
||||
/>
|
||||
{/key}
|
||||
|
||||
<!--
|
||||
Flatpickr needs to be inside the theme wrapper.
|
||||
It also needs its own container because otherwise it hijacks
|
||||
key events on the whole page. It is painful to work with.
|
||||
-->
|
||||
<div id="flatpickr-root" />
|
||||
<div id="flatpickr-root" />
|
||||
|
||||
<!-- Modal container to ensure they sit on top -->
|
||||
<div class="modal-container" />
|
||||
<!-- Modal container to ensure they sit on top -->
|
||||
<div class="modal-container" />
|
||||
|
||||
<!-- Layers on top of app -->
|
||||
<NotificationDisplay />
|
||||
<ConfirmationDisplay />
|
||||
<PeekScreenDisplay />
|
||||
</CustomThemeWrapper>
|
||||
{/if}
|
||||
<!-- Layers on top of app -->
|
||||
<NotificationDisplay />
|
||||
<ConfirmationDisplay />
|
||||
<PeekScreenDisplay />
|
||||
</CustomThemeWrapper>
|
||||
{/if}
|
||||
|
||||
{#if showDevTools}
|
||||
<DevTools />
|
||||
{#if showDevTools}
|
||||
<DevTools />
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
{#if !$builderStore.inBuilder && $featuresStore.logoEnabled}
|
||||
<FreeFooter />
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
{#if !$builderStore.inBuilder && $featuresStore.logoEnabled}
|
||||
<FreeFooter />
|
||||
<!-- Preview and dev tools utilities -->
|
||||
{#if $appStore.isDevApp}
|
||||
<SelectionIndicator />
|
||||
{/if}
|
||||
{#if $builderStore.inBuilder || $devToolsStore.allowSelection}
|
||||
<HoverIndicator />
|
||||
{/if}
|
||||
{#if $builderStore.inBuilder}
|
||||
<DNDHandler />
|
||||
<GridDNDHandler />
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Preview and dev tools utilities -->
|
||||
{#if $appStore.isDevApp}
|
||||
<SelectionIndicator />
|
||||
{/if}
|
||||
{#if $builderStore.inBuilder || $devToolsStore.allowSelection}
|
||||
<HoverIndicator />
|
||||
{/if}
|
||||
{#if $builderStore.inBuilder}
|
||||
<DNDHandler />
|
||||
<GridDNDHandler />
|
||||
{/if}
|
||||
</div>
|
||||
</SnippetsProvider>
|
||||
</QueryParamsProvider>
|
||||
</RowSelectionProvider>
|
||||
</StateBindingsProvider>
|
||||
</UserBindingsProvider>
|
||||
</DeviceBindingsProvider>
|
||||
{/if}
|
||||
</div>
|
||||
<KeyboardManager />
|
||||
</SnippetsProvider>
|
||||
</QueryParamsProvider>
|
||||
</RowSelectionProvider>
|
||||
</StateBindingsProvider>
|
||||
</UserBindingsProvider>
|
||||
</DeviceBindingsProvider>
|
||||
{/if}
|
||||
</div>
|
||||
<KeyboardManager />
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
#spectrum-root {
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
export let allowEditRows = true
|
||||
export let allowDeleteRows = true
|
||||
export let stripeRows = false
|
||||
export let quiet = false
|
||||
export let initialFilter = null
|
||||
export let initialSortColumn = null
|
||||
export let initialSortOrder = null
|
||||
|
@ -117,6 +118,7 @@
|
|||
datasource={table}
|
||||
{API}
|
||||
{stripeRows}
|
||||
{quiet}
|
||||
{initialFilter}
|
||||
{initialSortColumn}
|
||||
{initialSortOrder}
|
||||
|
|
|
@ -67,6 +67,11 @@
|
|||
|
||||
const removeFilter = id => {
|
||||
filters = filters.filter(field => field.id !== id)
|
||||
|
||||
// Clear all filters when no fields are specified
|
||||
if (filters.length === 1 && filters[0].onEmptyFilter) {
|
||||
filters = []
|
||||
}
|
||||
}
|
||||
|
||||
const duplicateFilter = id => {
|
||||
|
|
|
@ -39,6 +39,7 @@
|
|||
export let canEditColumns = true
|
||||
export let canSaveSchema = true
|
||||
export let stripeRows = false
|
||||
export let quiet = false
|
||||
export let collaboration = true
|
||||
export let showAvatars = true
|
||||
export let showControls = true
|
||||
|
@ -91,6 +92,7 @@
|
|||
canEditColumns,
|
||||
canSaveSchema,
|
||||
stripeRows,
|
||||
quiet,
|
||||
collaboration,
|
||||
showAvatars,
|
||||
showControls,
|
||||
|
@ -124,6 +126,7 @@
|
|||
class:is-resizing={$isResizing}
|
||||
class:is-reordering={$isReordering}
|
||||
class:stripe={stripeRows}
|
||||
class:quiet
|
||||
on:mouseenter={() => gridFocused.set(true)}
|
||||
on:mouseleave={() => gridFocused.set(false)}
|
||||
style="--row-height:{$rowHeight}px; --default-row-height:{DefaultRowHeight}px; --gutter-width:{GutterWidth}px; --max-cell-render-height:{MaxCellRenderHeight}px; --max-cell-render-width-overflow:{MaxCellRenderWidthOverflow}px; --content-lines:{$contentLines};"
|
||||
|
@ -331,4 +334,9 @@
|
|||
.grid-data-outer :global(.spectrum-Checkbox-partialCheckmark) {
|
||||
transition: none;
|
||||
}
|
||||
|
||||
/* Overrides */
|
||||
.grid.quiet :global(.grid-data-content .row > .cell:not(:last-child)) {
|
||||
border-right: none;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 06b1064f7e2f7cac5d4bef2ee999796a2a1f0f2c
|
||||
Subproject commit dff7b5a9dd1fd770f8a48fb8e6df1740be605f18
|
|
@ -61,14 +61,17 @@
|
|||
"@google-cloud/firestore": "6.8.0",
|
||||
"@koa/router": "8.0.8",
|
||||
"@socket.io/redis-adapter": "^8.2.1",
|
||||
"@types/xml2js": "^0.4.14",
|
||||
"airtable": "0.10.1",
|
||||
"arangojs": "7.2.0",
|
||||
"archiver": "7.0.1",
|
||||
"aws-sdk": "2.1030.0",
|
||||
"bcrypt": "5.1.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bl": "^6.0.12",
|
||||
"bull": "4.10.1",
|
||||
"chokidar": "3.5.3",
|
||||
"content-disposition": "^0.5.4",
|
||||
"cookies": "0.8.0",
|
||||
"csvtojson": "2.0.10",
|
||||
"curlconverter": "3.21.0",
|
||||
|
|
|
@ -4,6 +4,7 @@ services:
|
|||
# user: sa
|
||||
# database: master
|
||||
mssql:
|
||||
# platform: linux/amd64
|
||||
image: bb/mssql
|
||||
build:
|
||||
context: .
|
||||
|
|
|
@ -76,7 +76,7 @@ function writeFile(output: any, filename: string) {
|
|||
console.log(`Wrote spec to ${path}`)
|
||||
return path
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
console.error("Error writing spec file", err)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -374,38 +374,44 @@ export class ExternalRequest<T extends Operation> {
|
|||
) {
|
||||
continue
|
||||
}
|
||||
let tableId: string | undefined,
|
||||
let relatedTableId: string | undefined,
|
||||
lookupField: string | undefined,
|
||||
fieldName: string | undefined
|
||||
if (isManyToMany(field)) {
|
||||
tableId = field.through
|
||||
relatedTableId = field.through
|
||||
lookupField = primaryKey
|
||||
fieldName = field.throughTo || primaryKey
|
||||
} else if (isManyToOne(field)) {
|
||||
tableId = field.tableId
|
||||
relatedTableId = field.tableId
|
||||
lookupField = field.foreignKey
|
||||
fieldName = field.fieldName
|
||||
}
|
||||
if (!tableId || !lookupField || !fieldName) {
|
||||
if (!relatedTableId || !lookupField || !fieldName) {
|
||||
throw new Error(
|
||||
"Unable to lookup relationships - undefined column properties."
|
||||
)
|
||||
}
|
||||
const { tableName: relatedTableName } = breakExternalTableId(tableId)
|
||||
const { tableName: relatedTableName } =
|
||||
breakExternalTableId(relatedTableId)
|
||||
// @ts-ignore
|
||||
const linkPrimaryKey = this.tables[relatedTableName].primary[0]
|
||||
if (!lookupField || !row[lookupField]) {
|
||||
continue
|
||||
}
|
||||
const endpoint = getEndpoint(relatedTableId, Operation.READ)
|
||||
const relatedTable = this.tables[endpoint.entityId]
|
||||
if (!relatedTable) {
|
||||
throw new Error("unable to find related table")
|
||||
}
|
||||
const response = await getDatasourceAndQuery({
|
||||
endpoint: getEndpoint(tableId, Operation.READ),
|
||||
endpoint: endpoint,
|
||||
filters: {
|
||||
equal: {
|
||||
[fieldName]: row[lookupField],
|
||||
},
|
||||
},
|
||||
meta: {
|
||||
table,
|
||||
table: relatedTable,
|
||||
},
|
||||
})
|
||||
// this is the response from knex if no rows found
|
||||
|
@ -414,7 +420,11 @@ export class ExternalRequest<T extends Operation> {
|
|||
const storeTo = isManyToMany(field)
|
||||
? field.throughFrom || linkPrimaryKey
|
||||
: fieldName
|
||||
related[storeTo] = { rows, isMany: isManyToMany(field), tableId }
|
||||
related[storeTo] = {
|
||||
rows,
|
||||
isMany: isManyToMany(field),
|
||||
tableId: relatedTableId,
|
||||
}
|
||||
}
|
||||
return related
|
||||
}
|
||||
|
@ -437,7 +447,6 @@ export class ExternalRequest<T extends Operation> {
|
|||
// if we're creating (in a through table) need to wipe the existing ones first
|
||||
const promises = []
|
||||
const related = await this.lookupRelations(mainTableId, row)
|
||||
const table = this.getTable(mainTableId)!
|
||||
for (let relationship of relationships) {
|
||||
const { key, tableId, isUpdate, id, ...rest } = relationship
|
||||
const body: { [key: string]: any } = processObjectSync(rest, row, {})
|
||||
|
@ -484,7 +493,7 @@ export class ExternalRequest<T extends Operation> {
|
|||
body,
|
||||
filters: buildFilters(id, {}, linkTable),
|
||||
meta: {
|
||||
table,
|
||||
table: linkTable,
|
||||
},
|
||||
})
|
||||
)
|
||||
|
|
|
@ -31,7 +31,6 @@ export async function save(
|
|||
renaming?: RenameColumn
|
||||
) {
|
||||
const inputs = ctx.request.body
|
||||
const adding = inputs?._add
|
||||
// can't do this right now
|
||||
delete inputs.rows
|
||||
const tableId = ctx.request.body._id
|
||||
|
@ -44,7 +43,7 @@ export async function save(
|
|||
const { datasource, table } = await sdk.tables.external.save(
|
||||
datasourceId!,
|
||||
inputs,
|
||||
{ tableId, renaming, adding }
|
||||
{ tableId, renaming }
|
||||
)
|
||||
builderSocket?.emitDatasourceUpdate(ctx, datasource)
|
||||
return table
|
||||
|
|
|
@ -77,11 +77,6 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
|||
const renaming = ctx.request.body._rename
|
||||
|
||||
const api = pickApi({ table })
|
||||
// do not pass _rename or _add if saving to CouchDB
|
||||
if (api === internal) {
|
||||
delete ctx.request.body._add
|
||||
delete ctx.request.body._rename
|
||||
}
|
||||
let savedTable = await api.save(ctx, renaming)
|
||||
if (!table._id) {
|
||||
savedTable = sdk.tables.enrichViewSchemas(savedTable)
|
||||
|
|
|
@ -16,7 +16,7 @@ export async function save(
|
|||
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
|
||||
renaming?: RenameColumn
|
||||
) {
|
||||
const { rows, ...rest } = ctx.request.body
|
||||
const { _rename, rows, ...rest } = ctx.request.body
|
||||
let tableToSave: Table = {
|
||||
_id: generateTableID(),
|
||||
...rest,
|
||||
|
|
|
@ -6,7 +6,19 @@ import sdk from "../../../sdk"
|
|||
|
||||
import tk from "timekeeper"
|
||||
import { mocks } from "@budibase/backend-core/tests"
|
||||
import { QueryPreview, SourceName } from "@budibase/types"
|
||||
import {
|
||||
Datasource,
|
||||
FieldSchema,
|
||||
FieldSubtype,
|
||||
FieldType,
|
||||
QueryPreview,
|
||||
RelationshipType,
|
||||
SourceName,
|
||||
Table,
|
||||
TableSchema,
|
||||
} from "@budibase/types"
|
||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||
|
||||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
|
@ -223,4 +235,152 @@ describe("/datasources", () => {
|
|||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe.each([
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("fetch schema (%s)", (_, dsProvider) => {
|
||||
beforeAll(async () => {
|
||||
datasource = await config.api.datasource.create(await dsProvider)
|
||||
})
|
||||
|
||||
it("fetching schema will not drop tables or columns", async () => {
|
||||
const datasourceId = datasource!._id!
|
||||
|
||||
const simpleTable = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
name: "simple",
|
||||
schema: {
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
type SupportedSqlTypes =
|
||||
| FieldType.STRING
|
||||
| FieldType.BARCODEQR
|
||||
| FieldType.LONGFORM
|
||||
| FieldType.OPTIONS
|
||||
| FieldType.DATETIME
|
||||
| FieldType.NUMBER
|
||||
| FieldType.BOOLEAN
|
||||
| FieldType.FORMULA
|
||||
| FieldType.BIGINT
|
||||
| FieldType.BB_REFERENCE
|
||||
| FieldType.LINK
|
||||
| FieldType.ARRAY
|
||||
|
||||
const fullSchema: {
|
||||
[type in SupportedSqlTypes]: FieldSchema & { type: type }
|
||||
} = {
|
||||
[FieldType.STRING]: {
|
||||
name: "string",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
presence: true,
|
||||
},
|
||||
},
|
||||
[FieldType.LONGFORM]: {
|
||||
name: "longform",
|
||||
type: FieldType.LONGFORM,
|
||||
},
|
||||
[FieldType.OPTIONS]: {
|
||||
name: "options",
|
||||
type: FieldType.OPTIONS,
|
||||
constraints: {
|
||||
presence: { allowEmpty: false },
|
||||
},
|
||||
},
|
||||
[FieldType.NUMBER]: {
|
||||
name: "number",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
[FieldType.BOOLEAN]: {
|
||||
name: "boolean",
|
||||
type: FieldType.BOOLEAN,
|
||||
},
|
||||
[FieldType.ARRAY]: {
|
||||
name: "array",
|
||||
type: FieldType.ARRAY,
|
||||
},
|
||||
[FieldType.DATETIME]: {
|
||||
name: "datetime",
|
||||
type: FieldType.DATETIME,
|
||||
dateOnly: true,
|
||||
timeOnly: false,
|
||||
},
|
||||
[FieldType.LINK]: {
|
||||
name: "link",
|
||||
type: FieldType.LINK,
|
||||
tableId: simpleTable._id!,
|
||||
relationshipType: RelationshipType.ONE_TO_MANY,
|
||||
fieldName: "link",
|
||||
},
|
||||
[FieldType.FORMULA]: {
|
||||
name: "formula",
|
||||
type: FieldType.FORMULA,
|
||||
formula: "any formula",
|
||||
},
|
||||
[FieldType.BARCODEQR]: {
|
||||
name: "barcodeqr",
|
||||
type: FieldType.BARCODEQR,
|
||||
},
|
||||
[FieldType.BIGINT]: {
|
||||
name: "bigint",
|
||||
type: FieldType.BIGINT,
|
||||
},
|
||||
[FieldType.BB_REFERENCE]: {
|
||||
name: "bb_reference",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
}
|
||||
|
||||
await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
name: "full",
|
||||
schema: fullSchema,
|
||||
})
|
||||
)
|
||||
|
||||
const persisted = await config.api.datasource.get(datasourceId)
|
||||
await config.api.datasource.fetchSchema(datasourceId)
|
||||
|
||||
const updated = await config.api.datasource.get(datasourceId)
|
||||
const expected: Datasource = {
|
||||
...persisted,
|
||||
entities:
|
||||
persisted?.entities &&
|
||||
Object.entries(persisted.entities).reduce<Record<string, Table>>(
|
||||
(acc, [tableName, table]) => {
|
||||
acc[tableName] = {
|
||||
...table,
|
||||
primaryDisplay: expect.not.stringMatching(
|
||||
new RegExp(`^${table.primaryDisplay || ""}$`)
|
||||
),
|
||||
schema: Object.entries(table.schema).reduce<TableSchema>(
|
||||
(acc, [fieldName, field]) => {
|
||||
acc[fieldName] = expect.objectContaining({
|
||||
...field,
|
||||
})
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
),
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
),
|
||||
|
||||
_rev: expect.any(String),
|
||||
}
|
||||
expect(updated).toEqual(expected)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -4,6 +4,7 @@ import {
|
|||
Query,
|
||||
QueryPreview,
|
||||
SourceName,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import * as setup from "../utilities"
|
||||
import {
|
||||
|
@ -740,12 +741,25 @@ describe.each(
|
|||
})
|
||||
|
||||
describe("query through datasource", () => {
|
||||
it("should be able to query a pg datasource", async () => {
|
||||
it("should be able to query the datasource", async () => {
|
||||
const entityId = "test_table"
|
||||
await config.api.datasource.update({
|
||||
...datasource,
|
||||
entities: {
|
||||
[entityId]: {
|
||||
name: entityId,
|
||||
schema: {},
|
||||
type: "table",
|
||||
sourceId: datasource._id!,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
},
|
||||
},
|
||||
})
|
||||
const res = await config.api.datasource.query({
|
||||
endpoint: {
|
||||
datasourceId: datasource._id!,
|
||||
operation: Operation.READ,
|
||||
entityId: "test_table",
|
||||
entityId,
|
||||
},
|
||||
resource: {
|
||||
fields: ["id", "name"],
|
||||
|
|
|
@ -26,6 +26,7 @@ describe.each([
|
|||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("/api/:sourceId/search (%s)", (name, dsProvider) => {
|
||||
const isSqs = name === "internal-sqs"
|
||||
const isInternal = name === "internal"
|
||||
const config = setup.getConfig()
|
||||
|
||||
let envCleanup: (() => void) | undefined
|
||||
|
@ -336,6 +337,20 @@ describe.each([
|
|||
expectQuery({
|
||||
range: { age: { low: 5, high: 9 } },
|
||||
}).toFindNothing())
|
||||
|
||||
// We never implemented half-open ranges in Lucene.
|
||||
!isInternal &&
|
||||
it("can search using just a low value", () =>
|
||||
expectQuery({
|
||||
range: { age: { low: 5 } },
|
||||
}).toContainExactly([{ age: 10 }]))
|
||||
|
||||
// We never implemented half-open ranges in Lucene.
|
||||
!isInternal &&
|
||||
it("can search using just a high value", () =>
|
||||
expectQuery({
|
||||
range: { age: { high: 5 } },
|
||||
}).toContainExactly([{ age: 1 }]))
|
||||
})
|
||||
|
||||
describe("sort", () => {
|
||||
|
@ -440,6 +455,20 @@ describe.each([
|
|||
expectQuery({
|
||||
range: { dob: { low: JAN_5TH, high: JAN_9TH } },
|
||||
}).toFindNothing())
|
||||
|
||||
// We never implemented half-open ranges in Lucene.
|
||||
!isInternal &&
|
||||
it("can search using just a low value", () =>
|
||||
expectQuery({
|
||||
range: { dob: { low: JAN_5TH } },
|
||||
}).toContainExactly([{ dob: JAN_10TH }]))
|
||||
|
||||
// We never implemented half-open ranges in Lucene.
|
||||
!isInternal &&
|
||||
it("can search using just a high value", () =>
|
||||
expectQuery({
|
||||
range: { dob: { high: JAN_5TH } },
|
||||
}).toContainExactly([{ dob: JAN_1ST }]))
|
||||
})
|
||||
|
||||
describe("sort", () => {
|
||||
|
@ -550,4 +579,100 @@ describe.each([
|
|||
]))
|
||||
})
|
||||
})
|
||||
|
||||
describe("bigints", () => {
|
||||
const SMALL = "1"
|
||||
const MEDIUM = "10000000"
|
||||
|
||||
// Our bigints are int64s in most datasources.
|
||||
const BIG = "9223372036854775807"
|
||||
|
||||
beforeAll(async () => {
|
||||
await createTable({
|
||||
num: { name: "num", type: FieldType.BIGINT },
|
||||
})
|
||||
await createRows([{ num: SMALL }, { num: MEDIUM }, { num: BIG }])
|
||||
})
|
||||
|
||||
describe("equal", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({ equal: { num: SMALL } }).toContainExactly([
|
||||
{ num: SMALL },
|
||||
]))
|
||||
|
||||
it("successfully finds a big value", () =>
|
||||
expectQuery({ equal: { num: BIG } }).toContainExactly([{ num: BIG }]))
|
||||
|
||||
it("fails to find nonexistent row", () =>
|
||||
expectQuery({ equal: { num: "2" } }).toFindNothing())
|
||||
})
|
||||
|
||||
describe("notEqual", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({ notEqual: { num: SMALL } }).toContainExactly([
|
||||
{ num: MEDIUM },
|
||||
{ num: BIG },
|
||||
]))
|
||||
|
||||
it("fails to find nonexistent row", () =>
|
||||
expectQuery({ notEqual: { num: 10 } }).toContainExactly([
|
||||
{ num: SMALL },
|
||||
{ num: MEDIUM },
|
||||
{ num: BIG },
|
||||
]))
|
||||
})
|
||||
|
||||
describe("oneOf", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({ oneOf: { num: [SMALL] } }).toContainExactly([
|
||||
{ num: SMALL },
|
||||
]))
|
||||
|
||||
it("successfully finds all rows", () =>
|
||||
expectQuery({ oneOf: { num: [SMALL, MEDIUM, BIG] } }).toContainExactly([
|
||||
{ num: SMALL },
|
||||
{ num: MEDIUM },
|
||||
{ num: BIG },
|
||||
]))
|
||||
|
||||
it("fails to find nonexistent row", () =>
|
||||
expectQuery({ oneOf: { num: [2] } }).toFindNothing())
|
||||
})
|
||||
|
||||
// Range searches against bigints don't seem to work at all in Lucene, and I
|
||||
// couldn't figure out why. Given that we're replacing Lucene with SQS,
|
||||
// we've decided not to spend time on it.
|
||||
!isInternal &&
|
||||
describe("range", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({
|
||||
range: { num: { low: SMALL, high: "5" } },
|
||||
}).toContainExactly([{ num: SMALL }]))
|
||||
|
||||
it("successfully finds multiple rows", () =>
|
||||
expectQuery({
|
||||
range: { num: { low: SMALL, high: MEDIUM } },
|
||||
}).toContainExactly([{ num: SMALL }, { num: MEDIUM }]))
|
||||
|
||||
it("successfully finds a row with a high bound", () =>
|
||||
expectQuery({
|
||||
range: { num: { low: MEDIUM, high: BIG } },
|
||||
}).toContainExactly([{ num: MEDIUM }, { num: BIG }]))
|
||||
|
||||
it("successfully finds no rows", () =>
|
||||
expectQuery({
|
||||
range: { num: { low: "5", high: "5" } },
|
||||
}).toFindNothing())
|
||||
|
||||
it("can search using just a low value", () =>
|
||||
expectQuery({
|
||||
range: { num: { low: MEDIUM } },
|
||||
}).toContainExactly([{ num: MEDIUM }, { num: BIG }]))
|
||||
|
||||
it("can search using just a high value", () =>
|
||||
expectQuery({
|
||||
range: { num: { high: MEDIUM } },
|
||||
}).toContainExactly([{ num: SMALL }, { num: MEDIUM }]))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -34,7 +34,7 @@ describe.each([
|
|||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("/tables (%s)", (_, dsProvider) => {
|
||||
let isInternal: boolean
|
||||
const isInternal: boolean = !dsProvider
|
||||
let datasource: Datasource | undefined
|
||||
let config = setup.getConfig()
|
||||
|
||||
|
@ -44,9 +44,6 @@ describe.each([
|
|||
await config.init()
|
||||
if (dsProvider) {
|
||||
datasource = await config.api.datasource.create(await dsProvider)
|
||||
isInternal = false
|
||||
} else {
|
||||
isInternal = true
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -219,9 +216,6 @@ describe.each([
|
|||
|
||||
it("should add a new column for an internal DB table", async () => {
|
||||
const saveTableRequest: SaveTableRequest = {
|
||||
_add: {
|
||||
name: "NEW_COLUMN",
|
||||
},
|
||||
...basicTable(),
|
||||
}
|
||||
|
||||
|
@ -235,7 +229,6 @@ describe.each([
|
|||
updatedAt: expect.stringMatching(ISO_REGEX_PATTERN),
|
||||
views: {},
|
||||
}
|
||||
delete expectedResponse._add
|
||||
expect(response).toEqual(expectedResponse)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -7,6 +7,7 @@ import {
|
|||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
AutomationFeature,
|
||||
AutomationCustomIOType,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationStepSchema = {
|
||||
|
@ -72,10 +73,10 @@ export const definition: AutomationStepSchema = {
|
|||
title: "Location",
|
||||
dependsOn: "addInvite",
|
||||
},
|
||||
url: {
|
||||
type: AutomationIOType.STRING,
|
||||
title: "URL",
|
||||
dependsOn: "addInvite",
|
||||
attachments: {
|
||||
type: AutomationIOType.ATTACHMENT,
|
||||
customType: AutomationCustomIOType.MULTI_ATTACHMENTS,
|
||||
title: "Attachments",
|
||||
},
|
||||
},
|
||||
required: ["to", "from", "subject", "contents"],
|
||||
|
@ -110,11 +111,13 @@ export async function run({ inputs }: AutomationStepInput) {
|
|||
summary,
|
||||
location,
|
||||
url,
|
||||
attachments,
|
||||
} = inputs
|
||||
if (!contents) {
|
||||
contents = "<h1>No content</h1>"
|
||||
}
|
||||
to = to || undefined
|
||||
|
||||
try {
|
||||
let response = await sendSmtpEmail({
|
||||
to,
|
||||
|
@ -124,6 +127,7 @@ export async function run({ inputs }: AutomationStepInput) {
|
|||
cc,
|
||||
bcc,
|
||||
automation: true,
|
||||
attachments,
|
||||
invite: addInvite
|
||||
? {
|
||||
startTime,
|
||||
|
|
|
@ -50,6 +50,10 @@ describe("test the outgoing webhook action", () => {
|
|||
cc: "cc",
|
||||
bcc: "bcc",
|
||||
addInvite: true,
|
||||
attachments: [
|
||||
{ url: "attachment1", filename: "attachment1.txt" },
|
||||
{ url: "attachment2", filename: "attachment2.txt" },
|
||||
],
|
||||
...invite,
|
||||
}
|
||||
let resp = generateResponse(inputs.to, inputs.from)
|
||||
|
@ -69,6 +73,10 @@ describe("test the outgoing webhook action", () => {
|
|||
bcc: "bcc",
|
||||
invite,
|
||||
automation: true,
|
||||
attachments: [
|
||||
{ url: "attachment1", filename: "attachment1.txt" },
|
||||
{ url: "attachment2", filename: "attachment2.txt" },
|
||||
],
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -16,7 +16,6 @@ import {
|
|||
getDatasource,
|
||||
rawQuery,
|
||||
} from "../integrations/tests/utils"
|
||||
import { builderSocket } from "../websockets"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
// @ts-ignore
|
||||
fetch.mockSearch()
|
||||
|
@ -233,72 +232,6 @@ describe("mysql integrations", () => {
|
|||
})
|
||||
|
||||
describe("POST /api/tables/", () => {
|
||||
const emitDatasourceUpdateMock = jest.fn()
|
||||
|
||||
it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => {
|
||||
const addColumnToTable: TableRequest = {
|
||||
type: "table",
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
name: uniqueTableName(),
|
||||
sourceId: datasource._id!,
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: {
|
||||
type: FieldType.AUTO,
|
||||
name: "id",
|
||||
autocolumn: true,
|
||||
},
|
||||
new_column: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "new_column",
|
||||
},
|
||||
},
|
||||
_add: {
|
||||
name: "new_column",
|
||||
},
|
||||
}
|
||||
|
||||
jest
|
||||
.spyOn(builderSocket!, "emitDatasourceUpdate")
|
||||
.mockImplementation(emitDatasourceUpdateMock)
|
||||
|
||||
await makeRequest("post", "/api/tables/", addColumnToTable)
|
||||
|
||||
const expectedTable: TableRequest = {
|
||||
...addColumnToTable,
|
||||
schema: {
|
||||
id: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "id",
|
||||
autocolumn: true,
|
||||
constraints: {
|
||||
presence: false,
|
||||
},
|
||||
externalType: "int unsigned",
|
||||
},
|
||||
new_column: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "new_column",
|
||||
autocolumn: false,
|
||||
constraints: {
|
||||
presence: false,
|
||||
},
|
||||
externalType: "float(8,2)",
|
||||
},
|
||||
},
|
||||
created: true,
|
||||
_id: `${datasource._id}__${addColumnToTable.name}`,
|
||||
}
|
||||
delete expectedTable._add
|
||||
|
||||
expect(emitDatasourceUpdateMock).toHaveBeenCalledTimes(1)
|
||||
const emittedDatasource: Datasource =
|
||||
emitDatasourceUpdateMock.mock.calls[0][1]
|
||||
expect(emittedDatasource.entities![expectedTable.name]).toEqual(
|
||||
expectedTable
|
||||
)
|
||||
})
|
||||
|
||||
it("will rename a column", async () => {
|
||||
await makeRequest("post", "/api/tables/", primaryMySqlTable)
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@ import {
|
|||
QueryJson,
|
||||
Datasource,
|
||||
DatasourcePlusQueryResponse,
|
||||
RowOperations,
|
||||
} from "@budibase/types"
|
||||
import { getIntegration } from "../index"
|
||||
import sdk from "../../sdk"
|
||||
|
@ -10,6 +11,17 @@ export async function makeExternalQuery(
|
|||
datasource: Datasource,
|
||||
json: QueryJson
|
||||
): Promise<DatasourcePlusQueryResponse> {
|
||||
const entityId = json.endpoint.entityId,
|
||||
tableName = json.meta.table.name,
|
||||
tableId = json.meta.table._id
|
||||
// case found during testing - make sure this doesn't happen again
|
||||
if (
|
||||
RowOperations.includes(json.endpoint.operation) &&
|
||||
entityId !== tableId &&
|
||||
entityId !== tableName
|
||||
) {
|
||||
throw new Error("Entity ID and table metadata do not align")
|
||||
}
|
||||
datasource = await sdk.datasources.enrich(datasource)
|
||||
const Integration = await getIntegration(datasource.source)
|
||||
// query is the opinionated function
|
||||
|
|
|
@ -6,6 +6,7 @@ import {
|
|||
SqlClient,
|
||||
isValidFilter,
|
||||
getNativeSql,
|
||||
SqlStatements,
|
||||
} from "../utils"
|
||||
import SqlTableQueryBuilder from "./sqlTable"
|
||||
import {
|
||||
|
@ -160,9 +161,19 @@ class InternalBuilder {
|
|||
addFilters(
|
||||
query: Knex.QueryBuilder,
|
||||
filters: SearchFilters | undefined,
|
||||
tableName: string,
|
||||
table: Table,
|
||||
opts: { aliases?: Record<string, string>; relationship?: boolean }
|
||||
): Knex.QueryBuilder {
|
||||
if (!filters) {
|
||||
return query
|
||||
}
|
||||
filters = parseFilters(filters)
|
||||
// if all or specified in filters, then everything is an or
|
||||
const allOr = filters.allOr
|
||||
const sqlStatements = new SqlStatements(this.client, table, { allOr })
|
||||
const tableName =
|
||||
this.client === SqlClient.SQL_LITE ? table._id! : table.name
|
||||
|
||||
function getTableAlias(name: string) {
|
||||
const alias = opts.aliases?.[name]
|
||||
return alias || name
|
||||
|
@ -258,12 +269,6 @@ class InternalBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
if (!filters) {
|
||||
return query
|
||||
}
|
||||
filters = parseFilters(filters)
|
||||
// if all or specified in filters, then everything is an or
|
||||
const allOr = filters.allOr
|
||||
if (filters.oneOf) {
|
||||
iterate(filters.oneOf, (key, array) => {
|
||||
const fnc = allOr ? "orWhereIn" : "whereIn"
|
||||
|
@ -306,17 +311,11 @@ class InternalBuilder {
|
|||
const lowValid = isValidFilter(value.low),
|
||||
highValid = isValidFilter(value.high)
|
||||
if (lowValid && highValid) {
|
||||
// Use a between operator if we have 2 valid range values
|
||||
const fnc = allOr ? "orWhereBetween" : "whereBetween"
|
||||
query = query[fnc](key, [value.low, value.high])
|
||||
query = sqlStatements.between(query, key, value.low, value.high)
|
||||
} else if (lowValid) {
|
||||
// Use just a single greater than operator if we only have a low
|
||||
const fnc = allOr ? "orWhere" : "where"
|
||||
query = query[fnc](key, ">", value.low)
|
||||
query = sqlStatements.lte(query, key, value.low)
|
||||
} else if (highValid) {
|
||||
// Use just a single less than operator if we only have a high
|
||||
const fnc = allOr ? "orWhere" : "where"
|
||||
query = query[fnc](key, "<", value.high)
|
||||
query = sqlStatements.gte(query, key, value.high)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -359,7 +358,7 @@ class InternalBuilder {
|
|||
|
||||
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
|
||||
let { sort, paginate } = json
|
||||
const table = json.meta?.table
|
||||
const table = json.meta.table
|
||||
const tableName = getTableName(table)
|
||||
const aliases = json.tableAliases
|
||||
const aliased =
|
||||
|
@ -473,14 +472,13 @@ class InternalBuilder {
|
|||
): Knex.QueryBuilder {
|
||||
const tableName = endpoint.entityId
|
||||
const tableAlias = aliases?.[tableName]
|
||||
let table: string | Record<string, string> = tableName
|
||||
if (tableAlias) {
|
||||
table = { [tableAlias]: tableName }
|
||||
}
|
||||
let query = knex(table)
|
||||
if (endpoint.schema) {
|
||||
query = query.withSchema(endpoint.schema)
|
||||
}
|
||||
|
||||
const query = knex(
|
||||
this.tableNameWithSchema(tableName, {
|
||||
alias: tableAlias,
|
||||
schema: endpoint.schema,
|
||||
})
|
||||
)
|
||||
return query
|
||||
}
|
||||
|
||||
|
@ -547,7 +545,7 @@ class InternalBuilder {
|
|||
if (foundOffset) {
|
||||
query = query.offset(foundOffset)
|
||||
}
|
||||
query = this.addFilters(query, filters, tableName, {
|
||||
query = this.addFilters(query, filters, json.meta.table, {
|
||||
aliases: tableAliases,
|
||||
})
|
||||
// add sorting to pre-query
|
||||
|
@ -568,7 +566,7 @@ class InternalBuilder {
|
|||
endpoint.schema,
|
||||
tableAliases
|
||||
)
|
||||
return this.addFilters(query, filters, tableName, {
|
||||
return this.addFilters(query, filters, json.meta.table, {
|
||||
relationship: true,
|
||||
aliases: tableAliases,
|
||||
})
|
||||
|
@ -578,7 +576,7 @@ class InternalBuilder {
|
|||
const { endpoint, body, filters, tableAliases } = json
|
||||
let query = this.knexWithAlias(knex, endpoint, tableAliases)
|
||||
const parsedBody = parseBody(body)
|
||||
query = this.addFilters(query, filters, endpoint.entityId, {
|
||||
query = this.addFilters(query, filters, json.meta.table, {
|
||||
aliases: tableAliases,
|
||||
})
|
||||
// mysql can't use returning
|
||||
|
@ -592,7 +590,7 @@ class InternalBuilder {
|
|||
delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
|
||||
const { endpoint, filters, tableAliases } = json
|
||||
let query = this.knexWithAlias(knex, endpoint, tableAliases)
|
||||
query = this.addFilters(query, filters, endpoint.entityId, {
|
||||
query = this.addFilters(query, filters, json.meta.table, {
|
||||
aliases: tableAliases,
|
||||
})
|
||||
// mysql can't use returning
|
||||
|
@ -684,7 +682,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
// when creating if an ID has been inserted need to make sure
|
||||
// the id filter is enriched with it before trying to retrieve the row
|
||||
checkLookupKeys(id: any, json: QueryJson) {
|
||||
if (!id || !json.meta?.table || !json.meta.table.primary) {
|
||||
if (!id || !json.meta.table || !json.meta.table.primary) {
|
||||
return json
|
||||
}
|
||||
const primaryKey = json.meta.table.primary?.[0]
|
||||
|
|
|
@ -378,7 +378,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
|||
try {
|
||||
await connection.close()
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
console.error("Error connecting to Oracle", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,10 @@ import { performance } from "perf_hooks"
|
|||
import FormData from "form-data"
|
||||
import { URLSearchParams } from "url"
|
||||
import { blacklist } from "@budibase/backend-core"
|
||||
import { handleFileResponse, handleXml } from "./utils"
|
||||
import { parse } from "content-disposition"
|
||||
import path from "path"
|
||||
import { Builder as XmlBuilder } from "xml2js"
|
||||
|
||||
const BodyTypes = {
|
||||
NONE: "none",
|
||||
|
@ -57,8 +61,6 @@ const coreFields = {
|
|||
},
|
||||
}
|
||||
|
||||
const { parseStringPromise: xmlParser, Builder: XmlBuilder } = require("xml2js")
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
docs: "https://github.com/node-fetch/node-fetch",
|
||||
description:
|
||||
|
@ -129,42 +131,44 @@ class RestIntegration implements IntegrationBase {
|
|||
}
|
||||
|
||||
async parseResponse(response: any, pagination: PaginationConfig | null) {
|
||||
let data, raw, headers
|
||||
let data, raw, headers, filename
|
||||
|
||||
const contentType = response.headers.get("content-type") || ""
|
||||
const contentDisposition = response.headers.get("content-disposition") || ""
|
||||
if (
|
||||
contentDisposition.includes("attachment") ||
|
||||
contentDisposition.includes("form-data")
|
||||
) {
|
||||
filename =
|
||||
path.basename(parse(contentDisposition).parameters?.filename) || ""
|
||||
}
|
||||
|
||||
try {
|
||||
if (response.status === 204) {
|
||||
data = []
|
||||
raw = []
|
||||
} else if (contentType.includes("application/json")) {
|
||||
data = await response.json()
|
||||
raw = JSON.stringify(data)
|
||||
} else if (
|
||||
contentType.includes("text/xml") ||
|
||||
contentType.includes("application/xml")
|
||||
) {
|
||||
const rawXml = await response.text()
|
||||
data =
|
||||
(await xmlParser(rawXml, {
|
||||
explicitArray: false,
|
||||
trim: true,
|
||||
explicitRoot: false,
|
||||
})) || {}
|
||||
// there is only one structure, its an array, return the array so it appears as rows
|
||||
const keys = Object.keys(data)
|
||||
if (keys.length === 1 && Array.isArray(data[keys[0]])) {
|
||||
data = data[keys[0]]
|
||||
}
|
||||
raw = rawXml
|
||||
} else if (contentType.includes("application/pdf")) {
|
||||
data = await response.arrayBuffer() // Save PDF as ArrayBuffer
|
||||
raw = Buffer.from(data)
|
||||
if (filename) {
|
||||
return handleFileResponse(response, filename, this.startTimeMs)
|
||||
} else {
|
||||
data = await response.text()
|
||||
raw = data
|
||||
if (response.status === 204) {
|
||||
data = []
|
||||
raw = []
|
||||
} else if (contentType.includes("application/json")) {
|
||||
data = await response.json()
|
||||
raw = JSON.stringify(data)
|
||||
} else if (
|
||||
contentType.includes("text/xml") ||
|
||||
contentType.includes("application/xml")
|
||||
) {
|
||||
let xmlResponse = await handleXml(response)
|
||||
data = xmlResponse.data
|
||||
raw = xmlResponse.rawXml
|
||||
} else {
|
||||
data = await response.text()
|
||||
raw = data
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
throw "Failed to parse response body."
|
||||
throw `Failed to parse response body: ${err}`
|
||||
}
|
||||
|
||||
const size = formatBytes(
|
||||
response.headers.get("content-length") || Buffer.byteLength(raw, "utf8")
|
||||
)
|
||||
|
|
|
@ -13,9 +13,23 @@ jest.mock("node-fetch", () => {
|
|||
}))
|
||||
})
|
||||
|
||||
import fetch from "node-fetch"
|
||||
jest.mock("@budibase/backend-core", () => {
|
||||
const core = jest.requireActual("@budibase/backend-core")
|
||||
return {
|
||||
...core,
|
||||
context: {
|
||||
...core.context,
|
||||
getProdAppId: jest.fn(() => "app-id"),
|
||||
},
|
||||
}
|
||||
})
|
||||
jest.mock("uuid", () => ({ v4: () => "00000000-0000-0000-0000-000000000000" }))
|
||||
|
||||
import { default as RestIntegration } from "../rest"
|
||||
import { RestAuthType } from "@budibase/types"
|
||||
import fetch from "node-fetch"
|
||||
import { objectStoreTestProviders } from "@budibase/backend-core/tests"
|
||||
import { Readable } from "stream"
|
||||
|
||||
const FormData = require("form-data")
|
||||
const { URLSearchParams } = require("url")
|
||||
|
@ -611,4 +625,104 @@ describe("REST Integration", () => {
|
|||
expect(calledConfig.headers).toEqual({})
|
||||
expect(calledConfig.agent.options.rejectUnauthorized).toBe(false)
|
||||
})
|
||||
|
||||
describe("File Handling", () => {
|
||||
beforeAll(async () => {
|
||||
jest.unmock("aws-sdk")
|
||||
await objectStoreTestProviders.minio.start()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await objectStoreTestProviders.minio.stop()
|
||||
})
|
||||
|
||||
it("uploads file to object store and returns signed URL", async () => {
|
||||
const responseData = Buffer.from("teest file contnt")
|
||||
const filename = "test.tar.gz"
|
||||
const contentType = "application/gzip"
|
||||
const mockReadable = new Readable()
|
||||
mockReadable.push(responseData)
|
||||
mockReadable.push(null)
|
||||
;(fetch as unknown as jest.Mock).mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
headers: {
|
||||
raw: () => ({
|
||||
"content-type": [contentType],
|
||||
"content-disposition": [`attachment; filename="${filename}"`],
|
||||
}),
|
||||
get: (header: any) => {
|
||||
if (header === "content-type") return contentType
|
||||
if (header === "content-disposition")
|
||||
return `attachment; filename="${filename}"`
|
||||
},
|
||||
},
|
||||
body: mockReadable,
|
||||
})
|
||||
)
|
||||
|
||||
const query = {
|
||||
path: "api",
|
||||
}
|
||||
|
||||
const response = await config.integration.read(query)
|
||||
|
||||
expect(response.data).toEqual({
|
||||
size: responseData.byteLength,
|
||||
name: "00000000-0000-0000-0000-000000000000.tar.gz",
|
||||
url: expect.stringContaining(
|
||||
"/files/signed/tmp-file-attachments/app-id/00000000-0000-0000-0000-000000000000.tar.gz"
|
||||
),
|
||||
extension: "tar.gz",
|
||||
key: expect.stringContaining(
|
||||
"app-id/00000000-0000-0000-0000-000000000000.tar.gz"
|
||||
),
|
||||
})
|
||||
})
|
||||
|
||||
it("uploads file with non ascii filename to object store and returns signed URL", async () => {
|
||||
const responseData = Buffer.from("teest file contnt")
|
||||
const contentType = "text/plain"
|
||||
const mockReadable = new Readable()
|
||||
mockReadable.push(responseData)
|
||||
mockReadable.push(null)
|
||||
;(fetch as unknown as jest.Mock).mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
headers: {
|
||||
raw: () => ({
|
||||
"content-type": [contentType],
|
||||
"content-disposition": [
|
||||
// eslint-disable-next-line no-useless-escape
|
||||
`attachment; filename="£ and ? rates.pdf"; filename*=UTF-8'\'%C2%A3%20and%20%E2%82%AC%20rates.pdf`,
|
||||
],
|
||||
}),
|
||||
get: (header: any) => {
|
||||
if (header === "content-type") return contentType
|
||||
if (header === "content-disposition")
|
||||
// eslint-disable-next-line no-useless-escape
|
||||
return `attachment; filename="£ and ? rates.pdf"; filename*=UTF-8'\'%C2%A3%20and%20%E2%82%AC%20rates.pdf`
|
||||
},
|
||||
},
|
||||
body: mockReadable,
|
||||
})
|
||||
)
|
||||
|
||||
const query = {
|
||||
path: "api",
|
||||
}
|
||||
|
||||
const response = await config.integration.read(query)
|
||||
|
||||
expect(response.data).toEqual({
|
||||
size: responseData.byteLength,
|
||||
name: "00000000-0000-0000-0000-000000000000.pdf",
|
||||
url: expect.stringContaining(
|
||||
"/files/signed/tmp-file-attachments/app-id/00000000-0000-0000-0000-000000000000.pdf"
|
||||
),
|
||||
extension: "pdf",
|
||||
key: expect.stringContaining(
|
||||
"app-id/00000000-0000-0000-0000-000000000000.pdf"
|
||||
),
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import { SqlClient } from "../utils"
|
||||
import Sql from "../base/sql"
|
||||
import {
|
||||
FieldType,
|
||||
Operation,
|
||||
QueryJson,
|
||||
TableSourceType,
|
||||
Table,
|
||||
FieldType,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
|
||||
const TABLE_NAME = "test"
|
||||
|
@ -13,7 +13,12 @@ const TABLE: Table = {
|
|||
type: "table",
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
sourceId: "SOURCE_ID",
|
||||
schema: {},
|
||||
schema: {
|
||||
id: {
|
||||
name: "id",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
},
|
||||
name: TABLE_NAME,
|
||||
primary: ["id"],
|
||||
}
|
||||
|
@ -73,7 +78,7 @@ function generateUpdateJson({
|
|||
meta?: any
|
||||
}): QueryJson {
|
||||
if (!meta.table) {
|
||||
meta.table = table
|
||||
meta.table = TABLE
|
||||
}
|
||||
return {
|
||||
endpoint: endpoint(table, "UPDATE"),
|
||||
|
@ -158,6 +163,9 @@ function generateManyRelationshipJson(config: { schema?: string } = {}) {
|
|||
},
|
||||
],
|
||||
extra: { idFilter: {} },
|
||||
meta: {
|
||||
table: TABLE,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -341,7 +349,7 @@ describe("SQL query builder", () => {
|
|||
)
|
||||
expect(query).toEqual({
|
||||
bindings: [date, limit],
|
||||
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" > $1 limit $2) as "${TABLE_NAME}"`,
|
||||
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" >= $1 limit $2) as "${TABLE_NAME}"`,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -360,7 +368,7 @@ describe("SQL query builder", () => {
|
|||
)
|
||||
expect(query).toEqual({
|
||||
bindings: [date, limit],
|
||||
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" < $1 limit $2) as "${TABLE_NAME}"`,
|
||||
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" <= $1 limit $2) as "${TABLE_NAME}"`,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -594,7 +602,7 @@ describe("SQL query builder", () => {
|
|||
)
|
||||
expect(query).toEqual({
|
||||
bindings: ["2000-01-01 00:00:00", 500],
|
||||
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" > $1 limit $2) as "${TABLE_NAME}"`,
|
||||
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" >= $1 limit $2) as "${TABLE_NAME}"`,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -613,7 +621,7 @@ describe("SQL query builder", () => {
|
|||
)
|
||||
expect(query).toEqual({
|
||||
bindings: ["2010-01-01 00:00:00", 500],
|
||||
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" < $1 limit $2) as "${TABLE_NAME}"`,
|
||||
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" <= $1 limit $2) as "${TABLE_NAME}"`,
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -117,7 +117,8 @@ describe("Captures of real examples", () => {
|
|||
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
|
||||
const filters = queryJson.filters
|
||||
const notEqualsValue = Object.values(filters?.notEqual!)[0]
|
||||
const rangeValue = Object.values(filters?.range!)[0]
|
||||
const rangeValue: { high?: string | number; low?: string | number } =
|
||||
Object.values(filters?.range!)[0]
|
||||
const equalValue = Object.values(filters?.equal!)[0]
|
||||
|
||||
expect(query).toEqual({
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
export * from "./utils"
|
||||
export { SqlStatements } from "./sqlStatements"
|
|
@ -0,0 +1,80 @@
|
|||
import { FieldType, Table, FieldSchema } from "@budibase/types"
|
||||
import { SqlClient } from "./utils"
|
||||
import { Knex } from "knex"
|
||||
|
||||
export class SqlStatements {
|
||||
client: string
|
||||
table: Table
|
||||
allOr: boolean | undefined
|
||||
constructor(
|
||||
client: string,
|
||||
table: Table,
|
||||
{ allOr }: { allOr?: boolean } = {}
|
||||
) {
|
||||
this.client = client
|
||||
this.table = table
|
||||
this.allOr = allOr
|
||||
}
|
||||
|
||||
getField(key: string): FieldSchema | undefined {
|
||||
const fieldName = key.split(".")[1]
|
||||
return this.table.schema[fieldName]
|
||||
}
|
||||
|
||||
between(
|
||||
query: Knex.QueryBuilder,
|
||||
key: string,
|
||||
low: number | string,
|
||||
high: number | string
|
||||
) {
|
||||
// Use a between operator if we have 2 valid range values
|
||||
const field = this.getField(key)
|
||||
if (
|
||||
field?.type === FieldType.BIGINT &&
|
||||
this.client === SqlClient.SQL_LITE
|
||||
) {
|
||||
query = query.whereRaw(
|
||||
`CAST(${key} AS INTEGER) BETWEEN CAST(? AS INTEGER) AND CAST(? AS INTEGER)`,
|
||||
[low, high]
|
||||
)
|
||||
} else {
|
||||
const fnc = this.allOr ? "orWhereBetween" : "whereBetween"
|
||||
query = query[fnc](key, [low, high])
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
||||
lte(query: Knex.QueryBuilder, key: string, low: number | string) {
|
||||
// Use just a single greater than operator if we only have a low
|
||||
const field = this.getField(key)
|
||||
if (
|
||||
field?.type === FieldType.BIGINT &&
|
||||
this.client === SqlClient.SQL_LITE
|
||||
) {
|
||||
query = query.whereRaw(`CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`, [
|
||||
low,
|
||||
])
|
||||
} else {
|
||||
const fnc = this.allOr ? "orWhere" : "where"
|
||||
query = query[fnc](key, ">=", low)
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
||||
gte(query: Knex.QueryBuilder, key: string, high: number | string) {
|
||||
const field = this.getField(key)
|
||||
// Use just a single less than operator if we only have a high
|
||||
if (
|
||||
field?.type === FieldType.BIGINT &&
|
||||
this.client === SqlClient.SQL_LITE
|
||||
) {
|
||||
query = query.whereRaw(`CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`, [
|
||||
high,
|
||||
])
|
||||
} else {
|
||||
const fnc = this.allOr ? "orWhere" : "where"
|
||||
query = query[fnc](key, "<=", high)
|
||||
}
|
||||
return query
|
||||
}
|
||||
}
|
|
@ -4,18 +4,45 @@ import {
|
|||
Datasource,
|
||||
FieldType,
|
||||
TableSourceType,
|
||||
FieldSchema,
|
||||
} from "@budibase/types"
|
||||
import { DocumentType, SEPARATOR } from "../db/utils"
|
||||
import { InvalidColumns, DEFAULT_BB_DATASOURCE_ID } from "../constants"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
import env from "../environment"
|
||||
import { context, objectStore } from "@budibase/backend-core"
|
||||
import { v4 } from "uuid"
|
||||
import { parseStringPromise as xmlParser } from "xml2js"
|
||||
import { formatBytes } from "../../utilities"
|
||||
import bl from "bl"
|
||||
import env from "../../environment"
|
||||
import { DocumentType, SEPARATOR } from "../../db/utils"
|
||||
import { InvalidColumns, DEFAULT_BB_DATASOURCE_ID } from "../../constants"
|
||||
import { helpers, utils } from "@budibase/shared-core"
|
||||
import { Knex } from "knex"
|
||||
|
||||
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
||||
const ROW_ID_REGEX = /^\[.*]$/g
|
||||
const ENCODED_SPACE = encodeURIComponent(" ")
|
||||
|
||||
const SQL_NUMBER_TYPE_MAP = {
|
||||
type PrimitiveTypes =
|
||||
| FieldType.STRING
|
||||
| FieldType.NUMBER
|
||||
| FieldType.BOOLEAN
|
||||
| FieldType.DATETIME
|
||||
| FieldType.JSON
|
||||
| FieldType.BIGINT
|
||||
| FieldType.OPTIONS
|
||||
|
||||
function isPrimitiveType(type: FieldType): type is PrimitiveTypes {
|
||||
return [
|
||||
FieldType.STRING,
|
||||
FieldType.NUMBER,
|
||||
FieldType.BOOLEAN,
|
||||
FieldType.DATETIME,
|
||||
FieldType.JSON,
|
||||
FieldType.BIGINT,
|
||||
FieldType.OPTIONS,
|
||||
].includes(type)
|
||||
}
|
||||
|
||||
const SQL_NUMBER_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||
integer: FieldType.NUMBER,
|
||||
int: FieldType.NUMBER,
|
||||
decimal: FieldType.NUMBER,
|
||||
|
@ -35,7 +62,7 @@ const SQL_NUMBER_TYPE_MAP = {
|
|||
smallmoney: FieldType.NUMBER,
|
||||
}
|
||||
|
||||
const SQL_DATE_TYPE_MAP = {
|
||||
const SQL_DATE_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||
timestamp: FieldType.DATETIME,
|
||||
time: FieldType.DATETIME,
|
||||
datetime: FieldType.DATETIME,
|
||||
|
@ -46,7 +73,7 @@ const SQL_DATE_TYPE_MAP = {
|
|||
const SQL_DATE_ONLY_TYPES = ["date"]
|
||||
const SQL_TIME_ONLY_TYPES = ["time"]
|
||||
|
||||
const SQL_STRING_TYPE_MAP = {
|
||||
const SQL_STRING_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||
varchar: FieldType.STRING,
|
||||
char: FieldType.STRING,
|
||||
nchar: FieldType.STRING,
|
||||
|
@ -58,22 +85,22 @@ const SQL_STRING_TYPE_MAP = {
|
|||
text: FieldType.STRING,
|
||||
}
|
||||
|
||||
const SQL_BOOLEAN_TYPE_MAP = {
|
||||
const SQL_BOOLEAN_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||
boolean: FieldType.BOOLEAN,
|
||||
bit: FieldType.BOOLEAN,
|
||||
tinyint: FieldType.BOOLEAN,
|
||||
}
|
||||
|
||||
const SQL_OPTIONS_TYPE_MAP = {
|
||||
const SQL_OPTIONS_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||
"user-defined": FieldType.OPTIONS,
|
||||
}
|
||||
|
||||
const SQL_MISC_TYPE_MAP = {
|
||||
const SQL_MISC_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||
json: FieldType.JSON,
|
||||
bigint: FieldType.BIGINT,
|
||||
}
|
||||
|
||||
const SQL_TYPE_MAP = {
|
||||
const SQL_TYPE_MAP: Record<string, PrimitiveTypes> = {
|
||||
...SQL_NUMBER_TYPE_MAP,
|
||||
...SQL_DATE_TYPE_MAP,
|
||||
...SQL_STRING_TYPE_MAP,
|
||||
|
@ -239,14 +266,14 @@ export function generateColumnDefinition(config: {
|
|||
constraints.inclusion = options
|
||||
}
|
||||
|
||||
const schema: any = {
|
||||
const schema: FieldSchema = {
|
||||
type: foundType,
|
||||
externalType,
|
||||
autocolumn,
|
||||
name,
|
||||
constraints,
|
||||
}
|
||||
if (foundType === FieldType.DATETIME) {
|
||||
if (schema.type === FieldType.DATETIME) {
|
||||
schema.dateOnly = SQL_DATE_ONLY_TYPES.includes(lowerCaseType)
|
||||
schema.timeOnly = SQL_TIME_ONLY_TYPES.includes(lowerCaseType)
|
||||
}
|
||||
|
@ -274,59 +301,6 @@ export function isIsoDateString(str: string) {
|
|||
return d.toISOString() === trimmedValue
|
||||
}
|
||||
|
||||
/**
|
||||
* This function will determine whether a column is a relationship and whether it
|
||||
* is currently valid. The reason for the validity check is that tables can be deleted
|
||||
* outside of Budibase control and if this is the case it will break Budibase relationships.
|
||||
* The tableIds is a list passed down from the main finalise tables function, which is
|
||||
* based on the tables that have just been fetched. This will only really be used on subsequent
|
||||
* fetches to the first one - if the user is periodically refreshing Budibase knowledge of tables.
|
||||
* @param column The column to check, to see if it is a valid relationship.
|
||||
* @param tableIds The IDs of the tables which currently exist.
|
||||
*/
|
||||
export function shouldCopyRelationship(
|
||||
column: { type: string; tableId?: string },
|
||||
tableIds: string[]
|
||||
) {
|
||||
return (
|
||||
column.type === FieldType.LINK &&
|
||||
column.tableId &&
|
||||
tableIds.includes(column.tableId)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Similar function to the shouldCopyRelationship function, but instead this looks for options and boolean
|
||||
* types. It is possible to switch a string -> options and a number -> boolean (and vice versus) need to make
|
||||
* sure that these get copied over when tables are fetched. Also checks whether they are still valid, if a
|
||||
* column has changed type in the external database then copying it over may not be possible.
|
||||
* @param column The column to check for options or boolean type.
|
||||
* @param fetchedColumn The fetched column to check for the type in the external database.
|
||||
*/
|
||||
export function shouldCopySpecialColumn(
|
||||
column: { type: string },
|
||||
fetchedColumn: { type: string } | undefined
|
||||
) {
|
||||
const isFormula = column.type === FieldType.FORMULA
|
||||
const specialTypes = [
|
||||
FieldType.OPTIONS,
|
||||
FieldType.LONGFORM,
|
||||
FieldType.ARRAY,
|
||||
FieldType.FORMULA,
|
||||
FieldType.BB_REFERENCE,
|
||||
]
|
||||
// column has been deleted, remove - formulas will never exist, always copy
|
||||
if (!isFormula && column && !fetchedColumn) {
|
||||
return false
|
||||
}
|
||||
const fetchedIsNumber =
|
||||
!fetchedColumn || fetchedColumn.type === FieldType.NUMBER
|
||||
return (
|
||||
specialTypes.indexOf(column.type as FieldType) !== -1 ||
|
||||
(fetchedIsNumber && column.type === FieldType.BOOLEAN)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Looks for columns which need to be copied over into the new table definitions, like relationships,
|
||||
* options types and views.
|
||||
|
@ -348,6 +322,9 @@ function copyExistingPropsOver(
|
|||
if (entities[tableName]?.created) {
|
||||
table.created = entities[tableName]?.created
|
||||
}
|
||||
if (entities[tableName]?.constrained) {
|
||||
table.constrained = entities[tableName]?.constrained
|
||||
}
|
||||
|
||||
table.views = entities[tableName].views
|
||||
|
||||
|
@ -356,12 +333,73 @@ function copyExistingPropsOver(
|
|||
if (!Object.prototype.hasOwnProperty.call(existingTableSchema, key)) {
|
||||
continue
|
||||
}
|
||||
|
||||
const column = existingTableSchema[key]
|
||||
if (
|
||||
shouldCopyRelationship(column, tableIds) ||
|
||||
shouldCopySpecialColumn(column, table.schema[key])
|
||||
) {
|
||||
table.schema[key] = existingTableSchema[key]
|
||||
|
||||
const existingColumnType = column?.type
|
||||
const updatedColumnType = table.schema[key]?.type
|
||||
|
||||
const keepIfType = (...validTypes: PrimitiveTypes[]) => {
|
||||
return (
|
||||
isPrimitiveType(updatedColumnType) &&
|
||||
table.schema[key] &&
|
||||
validTypes.includes(updatedColumnType)
|
||||
)
|
||||
}
|
||||
|
||||
let shouldKeepSchema = false
|
||||
switch (existingColumnType) {
|
||||
case FieldType.FORMULA:
|
||||
case FieldType.AUTO:
|
||||
case FieldType.INTERNAL:
|
||||
shouldKeepSchema = true
|
||||
break
|
||||
|
||||
case FieldType.LINK:
|
||||
shouldKeepSchema =
|
||||
existingColumnType === FieldType.LINK &&
|
||||
tableIds.includes(column.tableId)
|
||||
break
|
||||
|
||||
case FieldType.STRING:
|
||||
case FieldType.OPTIONS:
|
||||
case FieldType.LONGFORM:
|
||||
case FieldType.BARCODEQR:
|
||||
shouldKeepSchema = keepIfType(FieldType.STRING)
|
||||
break
|
||||
|
||||
case FieldType.NUMBER:
|
||||
case FieldType.BOOLEAN:
|
||||
shouldKeepSchema = keepIfType(FieldType.BOOLEAN, FieldType.NUMBER)
|
||||
break
|
||||
|
||||
case FieldType.ARRAY:
|
||||
case FieldType.ATTACHMENTS:
|
||||
case FieldType.ATTACHMENT_SINGLE:
|
||||
case FieldType.JSON:
|
||||
case FieldType.BB_REFERENCE:
|
||||
shouldKeepSchema = keepIfType(FieldType.JSON, FieldType.STRING)
|
||||
break
|
||||
|
||||
case FieldType.DATETIME:
|
||||
shouldKeepSchema = keepIfType(FieldType.DATETIME, FieldType.STRING)
|
||||
break
|
||||
|
||||
case FieldType.BIGINT:
|
||||
shouldKeepSchema = keepIfType(FieldType.BIGINT, FieldType.NUMBER)
|
||||
break
|
||||
|
||||
default:
|
||||
utils.unreachable(existingColumnType)
|
||||
}
|
||||
|
||||
if (shouldKeepSchema) {
|
||||
table.schema[key] = {
|
||||
...existingTableSchema[key],
|
||||
externalType:
|
||||
existingTableSchema[key].externalType ||
|
||||
table.schema[key]?.externalType,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -434,3 +472,74 @@ export function getPrimaryDisplay(testValue: unknown): string | undefined {
|
|||
export function isValidFilter(value: any) {
|
||||
return value != null && value !== ""
|
||||
}
|
||||
|
||||
export async function handleXml(response: any) {
|
||||
let data,
|
||||
rawXml = await response.text()
|
||||
data =
|
||||
(await xmlParser(rawXml, {
|
||||
explicitArray: false,
|
||||
trim: true,
|
||||
explicitRoot: false,
|
||||
})) || {}
|
||||
// there is only one structure, its an array, return the array so it appears as rows
|
||||
const keys = Object.keys(data)
|
||||
if (keys.length === 1 && Array.isArray(data[keys[0]])) {
|
||||
data = data[keys[0]]
|
||||
}
|
||||
return { data, rawXml }
|
||||
}
|
||||
|
||||
export async function handleFileResponse(
|
||||
response: any,
|
||||
filename: string,
|
||||
startTime: number
|
||||
) {
|
||||
let presignedUrl,
|
||||
size = 0
|
||||
const fileExtension = filename.includes(".")
|
||||
? filename.split(".").slice(1).join(".")
|
||||
: ""
|
||||
|
||||
const processedFileName = `${v4()}.${fileExtension}`
|
||||
const key = `${context.getProdAppId()}/${processedFileName}`
|
||||
const bucket = objectStore.ObjectStoreBuckets.TEMP
|
||||
|
||||
const stream = response.body.pipe(bl((error, data) => data))
|
||||
|
||||
if (response.body) {
|
||||
const contentLength = response.headers.get("content-length")
|
||||
if (contentLength) {
|
||||
size = parseInt(contentLength, 10)
|
||||
} else {
|
||||
const chunks: Buffer[] = []
|
||||
for await (const chunk of response.body) {
|
||||
chunks.push(chunk)
|
||||
size += chunk.length
|
||||
}
|
||||
}
|
||||
|
||||
await objectStore.streamUpload({
|
||||
bucket,
|
||||
filename: key,
|
||||
stream,
|
||||
ttl: 1,
|
||||
type: response.headers["content-type"],
|
||||
})
|
||||
}
|
||||
presignedUrl = await objectStore.getPresignedUrl(bucket, key)
|
||||
return {
|
||||
data: {
|
||||
size,
|
||||
name: processedFileName,
|
||||
url: presignedUrl,
|
||||
extension: fileExtension,
|
||||
key: key,
|
||||
},
|
||||
info: {
|
||||
code: response.status,
|
||||
size: formatBytes(size.toString()),
|
||||
time: `${Math.round(performance.now() - startTime)}ms`,
|
||||
},
|
||||
}
|
||||
}
|
|
@ -348,8 +348,7 @@ const preSaveAction: Partial<Record<SourceName, any>> = {
|
|||
* Make sure all datasource entities have a display name selected
|
||||
*/
|
||||
export function setDefaultDisplayColumns(datasource: Datasource) {
|
||||
//
|
||||
for (let entity of Object.values(datasource.entities || {})) {
|
||||
for (const entity of Object.values(datasource.entities || {})) {
|
||||
if (entity.primaryDisplay) {
|
||||
continue
|
||||
}
|
||||
|
|
|
@ -200,6 +200,6 @@ export async function search(
|
|||
}
|
||||
} catch (err: any) {
|
||||
const msg = typeof err === "string" ? err : err.message
|
||||
throw new Error(`Unable to search by SQL - ${msg}`)
|
||||
throw new Error(`Unable to search by SQL - ${msg}`, { cause: err })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,6 +52,12 @@ export async function getDatasourceAndQuery(
|
|||
): Promise<DatasourcePlusQueryResponse> {
|
||||
const datasourceId = json.endpoint.datasourceId
|
||||
const datasource = await sdk.datasources.get(datasourceId)
|
||||
const table = datasource.entities?.[json.endpoint.entityId]
|
||||
if (!json.meta && table) {
|
||||
json.meta = {
|
||||
table,
|
||||
}
|
||||
}
|
||||
return makeExternalQuery(datasource, json)
|
||||
}
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@ import {
|
|||
Operation,
|
||||
RelationshipType,
|
||||
RenameColumn,
|
||||
AddColumn,
|
||||
Table,
|
||||
TableRequest,
|
||||
ViewV2,
|
||||
|
@ -33,7 +32,7 @@ import * as viewSdk from "../../views"
|
|||
export async function save(
|
||||
datasourceId: string,
|
||||
update: Table,
|
||||
opts?: { tableId?: string; renaming?: RenameColumn; adding?: AddColumn }
|
||||
opts?: { tableId?: string; renaming?: RenameColumn }
|
||||
) {
|
||||
let tableToSave: TableRequest = {
|
||||
...update,
|
||||
|
@ -52,6 +51,12 @@ export async function save(
|
|||
!oldTable &&
|
||||
(tableToSave.primary == null || tableToSave.primary.length === 0)
|
||||
) {
|
||||
if (tableToSave.schema.id) {
|
||||
throw new Error(
|
||||
"External tables with no `primary` column set will define an `id` column, but we found an `id` column in the supplied schema. Either set a `primary` column or remove the `id` column."
|
||||
)
|
||||
}
|
||||
|
||||
tableToSave.primary = ["id"]
|
||||
tableToSave.schema.id = {
|
||||
type: FieldType.NUMBER,
|
||||
|
@ -179,14 +184,7 @@ export async function save(
|
|||
// remove the rename prop
|
||||
delete tableToSave._rename
|
||||
|
||||
// if adding a new column, we need to rebuild the schema for that table to get the 'externalType' of the column
|
||||
if (opts?.adding) {
|
||||
datasource.entities[tableToSave.name] = (
|
||||
await datasourceSdk.buildFilteredSchema(datasource, [tableToSave.name])
|
||||
).tables[tableToSave.name]
|
||||
} else {
|
||||
datasource.entities[tableToSave.name] = tableToSave
|
||||
}
|
||||
datasource.entities[tableToSave.name] = tableToSave
|
||||
|
||||
// store it into couch now for budibase reference
|
||||
await db.put(populateExternalTableSchemas(datasource))
|
||||
|
|
|
@ -42,7 +42,7 @@ const FieldTypeMap: Record<FieldType, SQLiteType> = {
|
|||
[FieldType.ATTACHMENT_SINGLE]: SQLiteType.BLOB,
|
||||
[FieldType.ARRAY]: SQLiteType.BLOB,
|
||||
[FieldType.LINK]: SQLiteType.BLOB,
|
||||
[FieldType.BIGINT]: SQLiteType.REAL,
|
||||
[FieldType.BIGINT]: SQLiteType.TEXT,
|
||||
// TODO: consider the difference between multi-user and single user types (subtyping)
|
||||
[FieldType.BB_REFERENCE]: SQLiteType.TEXT,
|
||||
}
|
||||
|
|
|
@ -125,7 +125,7 @@ describe("validation and update of external table schemas", () => {
|
|||
}
|
||||
|
||||
it("should correctly set utilised foreign keys to autocolumns", () => {
|
||||
const response = populateExternalTableSchemas(cloneDeep(SCHEMA) as any)
|
||||
const response = populateExternalTableSchemas(cloneDeep(SCHEMA))
|
||||
const foreignKey = getForeignKeyColumn(response)
|
||||
expect(foreignKey.autocolumn).toBe(true)
|
||||
expect(foreignKey.autoReason).toBe(AutoReason.FOREIGN_KEY)
|
||||
|
@ -133,7 +133,7 @@ describe("validation and update of external table schemas", () => {
|
|||
})
|
||||
|
||||
it("should correctly unset foreign keys when no longer used", () => {
|
||||
const setResponse = populateExternalTableSchemas(cloneDeep(SCHEMA) as any)
|
||||
const setResponse = populateExternalTableSchemas(cloneDeep(SCHEMA))
|
||||
const beforeFk = getForeignKeyColumn(setResponse)
|
||||
delete setResponse.entities!.client.schema.project
|
||||
delete setResponse.entities!.project.schema.client
|
||||
|
|
|
@ -44,7 +44,10 @@ function checkForeignKeysAreAutoColumns(datasource: Datasource) {
|
|||
if (shouldBeForeign && !column.autocolumn) {
|
||||
column.autocolumn = true
|
||||
column.autoReason = AutoReason.FOREIGN_KEY
|
||||
} else if (column.autoReason === AutoReason.FOREIGN_KEY) {
|
||||
} else if (
|
||||
!shouldBeForeign &&
|
||||
column.autoReason === AutoReason.FOREIGN_KEY
|
||||
) {
|
||||
delete column.autocolumn
|
||||
delete column.autoReason
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ import {
|
|||
UpdateDatasourceResponse,
|
||||
UpdateDatasourceRequest,
|
||||
QueryJson,
|
||||
BuildSchemaFromSourceResponse,
|
||||
} from "@budibase/types"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
|
||||
|
@ -61,7 +62,7 @@ export class DatasourceAPI extends TestAPI {
|
|||
}
|
||||
|
||||
query = async (
|
||||
query: Omit<QueryJson, "meta">,
|
||||
query: Omit<QueryJson, "meta"> & Partial<Pick<QueryJson, "meta">>,
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
return await this._post<any>(`/api/datasources/query`, {
|
||||
|
@ -69,4 +70,13 @@ export class DatasourceAPI extends TestAPI {
|
|||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
fetchSchema = async (id: string, expectations?: Expectations) => {
|
||||
return await this._post<BuildSchemaFromSourceResponse>(
|
||||
`/api/datasources/${id}/schema`,
|
||||
{
|
||||
expectations,
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -106,22 +106,22 @@ export async function updateClientLibrary(appId: string) {
|
|||
}
|
||||
|
||||
// Upload latest manifest and client library
|
||||
const manifestUpload = objectStore.streamUpload(
|
||||
ObjectStoreBuckets.APPS,
|
||||
join(appId, "manifest.json"),
|
||||
fs.createReadStream(manifest),
|
||||
{
|
||||
const manifestUpload = objectStore.streamUpload({
|
||||
bucket: ObjectStoreBuckets.APPS,
|
||||
filename: join(appId, "manifest.json"),
|
||||
stream: fs.createReadStream(manifest),
|
||||
extra: {
|
||||
ContentType: "application/json",
|
||||
}
|
||||
)
|
||||
const clientUpload = objectStore.streamUpload(
|
||||
ObjectStoreBuckets.APPS,
|
||||
join(appId, "budibase-client.js"),
|
||||
fs.createReadStream(client),
|
||||
{
|
||||
},
|
||||
})
|
||||
const clientUpload = objectStore.streamUpload({
|
||||
bucket: ObjectStoreBuckets.APPS,
|
||||
filename: join(appId, "budibase-client.js"),
|
||||
stream: fs.createReadStream(client),
|
||||
extra: {
|
||||
ContentType: "application/javascript",
|
||||
}
|
||||
)
|
||||
},
|
||||
})
|
||||
|
||||
const manifestSrc = fs.promises.readFile(manifest, "utf8")
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ export const checkDevelopmentEnvironment = () => {
|
|||
error = "Must run via yarn once to generate environment."
|
||||
}
|
||||
if (error) {
|
||||
console.error(error)
|
||||
console.error("Error during development environment check", error)
|
||||
process.exit(-1)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ import {
|
|||
logging,
|
||||
env as coreEnv,
|
||||
} from "@budibase/backend-core"
|
||||
import { Ctx, User, EmailInvite } from "@budibase/types"
|
||||
import { Ctx, User, EmailInvite, EmailAttachment } from "@budibase/types"
|
||||
|
||||
interface Request {
|
||||
ctx?: Ctx
|
||||
|
@ -97,6 +97,7 @@ export async function sendSmtpEmail({
|
|||
bcc,
|
||||
automation,
|
||||
invite,
|
||||
attachments,
|
||||
}: {
|
||||
to: string
|
||||
from: string
|
||||
|
@ -105,6 +106,7 @@ export async function sendSmtpEmail({
|
|||
cc: string
|
||||
bcc: string
|
||||
automation: boolean
|
||||
attachments?: EmailAttachment[]
|
||||
invite?: EmailInvite
|
||||
}) {
|
||||
// tenant ID will be set in header
|
||||
|
@ -122,6 +124,7 @@ export async function sendSmtpEmail({
|
|||
purpose: "custom",
|
||||
automation,
|
||||
invite,
|
||||
attachments,
|
||||
},
|
||||
})
|
||||
)
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
import { FieldType } from "@budibase/types"
|
||||
|
||||
type SwitchableTypes = Partial<{
|
||||
[K in FieldType]: [K, ...FieldType[]]
|
||||
}>
|
||||
|
||||
export const SWITCHABLE_TYPES: SwitchableTypes = {
|
||||
[FieldType.STRING]: [
|
||||
FieldType.STRING,
|
||||
FieldType.OPTIONS,
|
||||
FieldType.LONGFORM,
|
||||
FieldType.BARCODEQR,
|
||||
],
|
||||
[FieldType.OPTIONS]: [
|
||||
FieldType.OPTIONS,
|
||||
FieldType.STRING,
|
||||
FieldType.LONGFORM,
|
||||
FieldType.BARCODEQR,
|
||||
],
|
||||
[FieldType.LONGFORM]: [
|
||||
FieldType.LONGFORM,
|
||||
FieldType.STRING,
|
||||
FieldType.OPTIONS,
|
||||
FieldType.BARCODEQR,
|
||||
],
|
||||
[FieldType.BARCODEQR]: [
|
||||
FieldType.BARCODEQR,
|
||||
FieldType.STRING,
|
||||
FieldType.OPTIONS,
|
||||
FieldType.LONGFORM,
|
||||
],
|
||||
[FieldType.NUMBER]: [FieldType.NUMBER, FieldType.BOOLEAN],
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
export * from "./api"
|
||||
export * from "./fields"
|
||||
|
||||
export const OperatorOptions = {
|
||||
Equals: {
|
||||
|
|
|
@ -218,14 +218,16 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => {
|
|||
high: type === "number" ? maxint : "9999-00-00T00:00:00.000Z",
|
||||
}
|
||||
}
|
||||
if ((operator as any) === "rangeLow" && value != null && value !== "") {
|
||||
query.range[field].low = value
|
||||
} else if (
|
||||
(operator as any) === "rangeHigh" &&
|
||||
value != null &&
|
||||
value !== ""
|
||||
) {
|
||||
query.range[field].high = value
|
||||
if (operator === "rangeLow" && value != null && value !== "") {
|
||||
query.range[field] = {
|
||||
...query.range[field],
|
||||
low: value,
|
||||
}
|
||||
} else if (operator === "rangeHigh" && value != null && value !== "") {
|
||||
query.range[field] = {
|
||||
...query.range[field],
|
||||
high: value,
|
||||
}
|
||||
}
|
||||
} else if (query[queryOperator] && operator !== "onEmptyFilter") {
|
||||
if (type === "boolean") {
|
||||
|
|
|
@ -16,7 +16,7 @@ import { setJSRunner, removeJSRunner } from "./helpers/javascript"
|
|||
import manifest from "./manifest.json"
|
||||
import { ProcessOptions } from "./types"
|
||||
|
||||
export { helpersToRemoveForJs } from "./helpers/list"
|
||||
export { helpersToRemoveForJs, getJsHelperList } from "./helpers/list"
|
||||
export { FIND_ANY_HBS_REGEX } from "./utilities"
|
||||
export { setJSRunner, setOnErrorLog } from "./helpers/javascript"
|
||||
export { iifeWrapper } from "./iife"
|
||||
|
|
|
@ -66,6 +66,8 @@ export interface CreateAdminUserRequest {
|
|||
password?: string
|
||||
tenantId: string
|
||||
ssoId?: string
|
||||
familyName?: string
|
||||
givenName?: string
|
||||
}
|
||||
|
||||
export interface AddSSoUserRequest {
|
||||
|
|
|
@ -10,6 +10,7 @@ export enum AutomationIOType {
|
|||
ARRAY = "array",
|
||||
JSON = "json",
|
||||
DATE = "date",
|
||||
ATTACHMENT = "attachment",
|
||||
}
|
||||
|
||||
export enum AutomationCustomIOType {
|
||||
|
@ -30,6 +31,7 @@ export enum AutomationCustomIOType {
|
|||
WEBHOOK_URL = "webhookUrl",
|
||||
AUTOMATION = "automation",
|
||||
AUTOMATION_FIELDS = "automationFields",
|
||||
MULTI_ATTACHMENTS = "multi_attachments",
|
||||
}
|
||||
|
||||
export enum AutomationTriggerStepId {
|
||||
|
@ -80,6 +82,11 @@ export interface EmailInvite {
|
|||
url?: string
|
||||
}
|
||||
|
||||
export interface EmailAttachment {
|
||||
url: string
|
||||
filename: string
|
||||
}
|
||||
|
||||
export interface SendEmailOpts {
|
||||
// workspaceId If finer grain controls being used then this will lookup config for workspace.
|
||||
workspaceId?: string
|
||||
|
@ -97,6 +104,7 @@ export interface SendEmailOpts {
|
|||
bcc?: boolean
|
||||
automation?: boolean
|
||||
invite?: EmailInvite
|
||||
attachments?: EmailAttachment[]
|
||||
}
|
||||
|
||||
export const AutomationStepIdArray = [
|
||||
|
|
|
@ -13,9 +13,7 @@ export interface Datasource extends Document {
|
|||
config?: Record<string, any>
|
||||
plus?: boolean
|
||||
isSQL?: boolean
|
||||
entities?: {
|
||||
[key: string]: Table
|
||||
}
|
||||
entities?: Record<string, Table>
|
||||
}
|
||||
|
||||
export enum RestAuthType {
|
||||
|
|
|
@ -91,6 +91,7 @@ export interface DateFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
|
|||
type: FieldType.DATETIME
|
||||
ignoreTimezones?: boolean
|
||||
timeOnly?: boolean
|
||||
dateOnly?: boolean
|
||||
subtype?: AutoFieldSubType.CREATED_AT | AutoFieldSubType.UPDATED_AT
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { Document } from "../../document"
|
||||
import { View, ViewV2 } from "../view"
|
||||
import { AddColumn, RenameColumn } from "../../../sdk"
|
||||
import { RenameColumn } from "../../../sdk"
|
||||
import { TableSchema } from "./schema"
|
||||
|
||||
export const INTERNAL_TABLE_SOURCE_ID = "bb_internal"
|
||||
|
@ -30,6 +30,5 @@ export interface Table extends Document {
|
|||
|
||||
export interface TableRequest extends Table {
|
||||
_rename?: RenameColumn
|
||||
_add?: AddColumn
|
||||
created?: boolean
|
||||
}
|
||||
|
|
|
@ -22,6 +22,13 @@ export interface UserSSO {
|
|||
providerType: SSOProviderType
|
||||
oauth2?: OAuth2
|
||||
thirdPartyProfile?: SSOProfileJson
|
||||
profile?: {
|
||||
displayName?: string
|
||||
name?: {
|
||||
givenName?: string
|
||||
familyName?: string
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export type SSOUser = User & UserSSO
|
||||
|
|
|
@ -14,6 +14,14 @@ export enum Operation {
|
|||
DELETE_TABLE = "DELETE_TABLE",
|
||||
}
|
||||
|
||||
export const RowOperations = [
|
||||
Operation.CREATE,
|
||||
Operation.READ,
|
||||
Operation.UPDATE,
|
||||
Operation.DELETE,
|
||||
Operation.BULK_CREATE,
|
||||
]
|
||||
|
||||
export enum SortDirection {
|
||||
ASCENDING = "ASCENDING",
|
||||
DESCENDING = "DESCENDING",
|
||||
|
|
|
@ -7,6 +7,7 @@ export enum PlanType {
|
|||
/** @deprecated */
|
||||
PREMIUM = "premium",
|
||||
PREMIUM_PLUS = "premium_plus",
|
||||
PREMIUM_PLUS_TRIAL = "premium_plus_trial",
|
||||
/** @deprecated */
|
||||
BUSINESS = "business",
|
||||
ENTERPRISE_BASIC = "enterprise_basic",
|
||||
|
|
|
@ -27,10 +27,13 @@ export interface SearchFilters {
|
|||
[key: string]: string
|
||||
}
|
||||
[SearchFilterOperator.RANGE]?: {
|
||||
[key: string]: {
|
||||
high: number | string
|
||||
low: number | string
|
||||
}
|
||||
[key: string]:
|
||||
| {
|
||||
high: number | string
|
||||
low: number | string
|
||||
}
|
||||
| { high: number | string }
|
||||
| { low: number | string }
|
||||
}
|
||||
[SearchFilterOperator.EQUAL]?: {
|
||||
[key: string]: any
|
||||
|
@ -77,10 +80,6 @@ export interface RenameColumn {
|
|||
updated: string
|
||||
}
|
||||
|
||||
export interface AddColumn {
|
||||
name: string
|
||||
}
|
||||
|
||||
export interface RelationshipsJson {
|
||||
through?: string
|
||||
from?: string
|
||||
|
|
|
@ -35,8 +35,7 @@ async function passportCallback(
|
|||
info: { message: string } | null = null
|
||||
) {
|
||||
if (err) {
|
||||
console.error("Authentication error")
|
||||
console.error(err)
|
||||
console.error("Authentication error", err)
|
||||
console.trace(err)
|
||||
return ctx.throw(403, info ? info : "Unauthorized")
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@ export async function sendEmail(ctx: BBContext) {
|
|||
bcc,
|
||||
automation,
|
||||
invite,
|
||||
attachments,
|
||||
} = ctx.request.body
|
||||
let user: any
|
||||
if (userId) {
|
||||
|
@ -31,6 +32,7 @@ export async function sendEmail(ctx: BBContext) {
|
|||
bcc,
|
||||
automation,
|
||||
invite,
|
||||
attachments,
|
||||
})
|
||||
ctx.body = {
|
||||
...response,
|
||||
|
|
|
@ -116,7 +116,8 @@ const parseBooleanParam = (param: any) => {
|
|||
export const adminUser = async (
|
||||
ctx: Ctx<CreateAdminUserRequest, CreateAdminUserResponse>
|
||||
) => {
|
||||
const { email, password, tenantId, ssoId } = ctx.request.body
|
||||
const { email, password, tenantId, ssoId, givenName, familyName } =
|
||||
ctx.request.body
|
||||
|
||||
if (await platform.tenants.exists(tenantId)) {
|
||||
ctx.throw(403, "Organisation already exists.")
|
||||
|
@ -151,6 +152,8 @@ export const adminUser = async (
|
|||
ssoId,
|
||||
hashPassword,
|
||||
requirePassword,
|
||||
firstName: givenName,
|
||||
lastName: familyName,
|
||||
})
|
||||
|
||||
// events
|
||||
|
|
|
@ -1,9 +1,15 @@
|
|||
jest.unmock("node-fetch")
|
||||
jest.unmock("aws-sdk")
|
||||
import { TestConfiguration } from "../../../../tests"
|
||||
import { EmailTemplatePurpose } from "../../../../constants"
|
||||
import { objectStoreTestProviders } from "@budibase/backend-core/tests"
|
||||
import { objectStore } from "@budibase/backend-core"
|
||||
import tk from "timekeeper"
|
||||
import { EmailAttachment } from "@budibase/types"
|
||||
|
||||
const fetch = require("node-fetch")
|
||||
|
||||
const nodemailer = require("nodemailer")
|
||||
const fetch = require("node-fetch")
|
||||
|
||||
// for the real email tests give them a long time to try complete/fail
|
||||
jest.setTimeout(30000)
|
||||
|
@ -12,14 +18,20 @@ describe("/api/global/email", () => {
|
|||
const config = new TestConfiguration()
|
||||
|
||||
beforeAll(async () => {
|
||||
tk.reset()
|
||||
await objectStoreTestProviders.minio.start()
|
||||
await config.beforeAll()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await objectStoreTestProviders.minio.stop()
|
||||
await config.afterAll()
|
||||
})
|
||||
|
||||
async function sendRealEmail(purpose: string) {
|
||||
async function sendRealEmail(
|
||||
purpose: string,
|
||||
attachments?: EmailAttachment[]
|
||||
) {
|
||||
let response, text
|
||||
try {
|
||||
const timeout = () =>
|
||||
|
@ -35,8 +47,14 @@ describe("/api/global/email", () => {
|
|||
)
|
||||
await Promise.race([config.saveEtherealSmtpConfig(), timeout()])
|
||||
await Promise.race([config.saveSettingsConfig(), timeout()])
|
||||
|
||||
const res = await config.api.emails.sendEmail(purpose).timeout(20000)
|
||||
let res
|
||||
if (attachments) {
|
||||
res = await config.api.emails
|
||||
.sendEmail(purpose, attachments)
|
||||
.timeout(20000)
|
||||
} else {
|
||||
res = await config.api.emails.sendEmail(purpose).timeout(20000)
|
||||
}
|
||||
// ethereal hiccup, can't test right now
|
||||
if (res.status >= 300) {
|
||||
return
|
||||
|
@ -80,4 +98,25 @@ describe("/api/global/email", () => {
|
|||
it("should be able to send a password recovery email", async () => {
|
||||
await sendRealEmail(EmailTemplatePurpose.PASSWORD_RECOVERY)
|
||||
})
|
||||
|
||||
it("should be able to send an email with attachments", async () => {
|
||||
let bucket = "testbucket"
|
||||
let filename = "test.txt"
|
||||
await objectStore.upload({
|
||||
bucket,
|
||||
filename,
|
||||
body: Buffer.from("test data"),
|
||||
})
|
||||
let presignedUrl = await objectStore.getPresignedUrl(
|
||||
bucket,
|
||||
filename,
|
||||
60000
|
||||
)
|
||||
|
||||
let attachmentObject = {
|
||||
url: presignedUrl,
|
||||
filename,
|
||||
}
|
||||
await sendRealEmail(EmailTemplatePurpose.WELCOME, [attachmentObject])
|
||||
})
|
||||
})
|
||||
|
|
|
@ -16,6 +16,8 @@ function buildAdminInitValidation() {
|
|||
password: OPTIONAL_STRING,
|
||||
tenantId: Joi.string().required(),
|
||||
ssoId: Joi.string(),
|
||||
familyName: OPTIONAL_STRING,
|
||||
givenName: OPTIONAL_STRING,
|
||||
})
|
||||
.required()
|
||||
.unknown(false)
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { EmailAttachment } from "@budibase/types"
|
||||
import TestConfiguration from "../TestConfiguration"
|
||||
import { TestAPI } from "./base"
|
||||
|
||||
|
@ -6,11 +7,12 @@ export class EmailAPI extends TestAPI {
|
|||
super(config)
|
||||
}
|
||||
|
||||
sendEmail = (purpose: string) => {
|
||||
sendEmail = (purpose: string, attachments?: EmailAttachment[]) => {
|
||||
return this.request
|
||||
.post(`/api/global/email/send`)
|
||||
.send({
|
||||
email: "test@example.com",
|
||||
attachments,
|
||||
purpose,
|
||||
tenantId: this.config.getTenantId(),
|
||||
userId: this.config.user?._id!,
|
||||
|
|
|
@ -4,8 +4,8 @@ process.env.JWT_SECRET = "test-jwtsecret"
|
|||
process.env.LOG_LEVEL = process.env.LOG_LEVEL || "error"
|
||||
process.env.MULTI_TENANCY = "1"
|
||||
process.env.MINIO_URL = "http://localhost"
|
||||
process.env.MINIO_ACCESS_KEY = "test"
|
||||
process.env.MINIO_SECRET_KEY = "test"
|
||||
process.env.MINIO_ACCESS_KEY = "budibase"
|
||||
process.env.MINIO_SECRET_KEY = "budibase"
|
||||
process.env.PLATFORM_URL = "http://localhost:10000"
|
||||
process.env.INTERNAL_API_KEY = "tet"
|
||||
process.env.DISABLE_ACCOUNT_PORTAL = "0"
|
||||
|
|
|
@ -62,8 +62,8 @@ export function smtpEthereal(): SMTPConfig {
|
|||
from: "testfrom@example.com",
|
||||
secure: false,
|
||||
auth: {
|
||||
user: "wyatt.zulauf29@ethereal.email",
|
||||
pass: "tEwDtHBWWxusVWAPfa",
|
||||
user: "mortimer.leuschke@ethereal.email",
|
||||
pass: "5hSjsPbzRv7gEUsfzx",
|
||||
},
|
||||
connectionTimeout: 1000, // must be less than the jest default of 5000
|
||||
},
|
||||
|
|
|
@ -4,8 +4,10 @@ import { getTemplateByPurpose, EmailTemplates } from "../constants/templates"
|
|||
import { getSettingsTemplateContext } from "./templates"
|
||||
import { processString } from "@budibase/string-templates"
|
||||
import { User, SendEmailOpts, SMTPInnerConfig } from "@budibase/types"
|
||||
import { configs, cache } from "@budibase/backend-core"
|
||||
import { configs, cache, objectStore } from "@budibase/backend-core"
|
||||
import ical from "ical-generator"
|
||||
import fetch from "node-fetch"
|
||||
import path from "path"
|
||||
|
||||
const nodemailer = require("nodemailer")
|
||||
|
||||
|
@ -162,6 +164,42 @@ export async function sendEmail(
|
|||
contents: opts?.contents,
|
||||
}),
|
||||
}
|
||||
if (opts?.attachments) {
|
||||
const attachments = await Promise.all(
|
||||
opts.attachments?.map(async attachment => {
|
||||
const isFullyFormedUrl =
|
||||
attachment.url.startsWith("http://") ||
|
||||
attachment.url.startsWith("https://")
|
||||
if (isFullyFormedUrl) {
|
||||
const response = await fetch(attachment.url)
|
||||
if (!response.ok) {
|
||||
throw new Error(`unexpected response ${response.statusText}`)
|
||||
}
|
||||
const fallbackFilename = path.basename(
|
||||
new URL(attachment.url).pathname
|
||||
)
|
||||
return {
|
||||
filename: attachment.filename || fallbackFilename,
|
||||
content: response?.body,
|
||||
}
|
||||
} else {
|
||||
const url = attachment.url
|
||||
const result = objectStore.extractBucketAndPath(url)
|
||||
if (result === null) {
|
||||
throw new Error("Invalid signed URL")
|
||||
}
|
||||
const { bucket, path } = result
|
||||
const readStream = await objectStore.getReadStream(bucket, path)
|
||||
const fallbackFilename = path.split("/").pop() || ""
|
||||
return {
|
||||
filename: attachment.filename || fallbackFilename,
|
||||
content: readStream,
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
message = { ...message, attachments }
|
||||
}
|
||||
|
||||
message = {
|
||||
...message,
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Check if the pro submodule is loaded
|
||||
if [ ! -d "./packages/pro/src" ]; then
|
||||
echo "[ERROR] Submodule is not loaded. This is only allowed with loaded submodules."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
yarn build --scope @budibase/server --scope @budibase/worker
|
||||
docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0
|
||||
|
||||
|
19
yarn.lock
19
yarn.lock
|
@ -6147,6 +6147,13 @@
|
|||
dependencies:
|
||||
"@types/webidl-conversions" "*"
|
||||
|
||||
"@types/xml2js@^0.4.14":
|
||||
version "0.4.14"
|
||||
resolved "https://registry.yarnpkg.com/@types/xml2js/-/xml2js-0.4.14.tgz#5d462a2a7330345e2309c6b549a183a376de8f9a"
|
||||
integrity sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/yargs-parser@*":
|
||||
version "21.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b"
|
||||
|
@ -7607,6 +7614,16 @@ bl@^4.0.3, bl@^4.1.0:
|
|||
inherits "^2.0.4"
|
||||
readable-stream "^3.4.0"
|
||||
|
||||
bl@^6.0.12:
|
||||
version "6.0.12"
|
||||
resolved "https://registry.yarnpkg.com/bl/-/bl-6.0.12.tgz#77c35b96e13aeff028496c798b75389ddee9c7f8"
|
||||
integrity sha512-EnEYHilP93oaOa2MnmNEjAcovPS3JlQZOyzGXi3EyEpPhm9qWvdDp7BmAVEVusGzp8LlwQK56Av+OkDoRjzE0w==
|
||||
dependencies:
|
||||
"@types/readable-stream" "^4.0.0"
|
||||
buffer "^6.0.3"
|
||||
inherits "^2.0.4"
|
||||
readable-stream "^4.2.0"
|
||||
|
||||
bl@^6.0.3:
|
||||
version "6.0.9"
|
||||
resolved "https://registry.yarnpkg.com/bl/-/bl-6.0.9.tgz#df8fcb2ef7be2e5ee8f65afa493502914e0d816f"
|
||||
|
@ -8781,7 +8798,7 @@ consolidate@^0.16.0:
|
|||
dependencies:
|
||||
bluebird "^3.7.2"
|
||||
|
||||
content-disposition@^0.5.2, content-disposition@^0.5.3, content-disposition@~0.5.2:
|
||||
content-disposition@^0.5.2, content-disposition@^0.5.3, content-disposition@^0.5.4, content-disposition@~0.5.2:
|
||||
version "0.5.4"
|
||||
resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe"
|
||||
integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==
|
||||
|
|
Loading…
Reference in New Issue