Merge branch 'master' into builder-store-conversions-pc

This commit is contained in:
Peter Clement 2025-01-09 10:23:49 +00:00 committed by GitHub
commit db27d1e8af
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
80 changed files with 1297 additions and 2156 deletions

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "3.2.35", "version": "3.2.37",
"npmClient": "yarn", "npmClient": "yarn",
"concurrency": 20, "concurrency": 20,
"command": { "command": {

View File

@ -1,28 +0,0 @@
export class S3 {
headBucket() {
return jest.fn().mockReturnThis()
}
deleteObject() {
return jest.fn().mockReturnThis()
}
deleteObjects() {
return jest.fn().mockReturnThis()
}
createBucket() {
return jest.fn().mockReturnThis()
}
getObject() {
return jest.fn().mockReturnThis()
}
listObject() {
return jest.fn().mockReturnThis()
}
promise() {
return jest.fn().mockReturnThis()
}
catch() {
return jest.fn()
}
}
export const GetObjectCommand = jest.fn(inputs => ({ inputs }))

View File

@ -1,4 +0,0 @@
export const getSignedUrl = jest.fn((_, cmd) => {
const { inputs } = cmd
return `http://s3.example.com/${inputs?.Bucket}/${inputs?.Key}`
})

View File

@ -0,0 +1,19 @@
const mockS3 = {
headBucket: jest.fn().mockReturnThis(),
deleteObject: jest.fn().mockReturnThis(),
deleteObjects: jest.fn().mockReturnThis(),
createBucket: jest.fn().mockReturnThis(),
getObject: jest.fn().mockReturnThis(),
listObject: jest.fn().mockReturnThis(),
getSignedUrl: jest.fn((operation: string, params: any) => {
return `http://s3.example.com/${params.Bucket}/${params.Key}`
}),
promise: jest.fn().mockReturnThis(),
catch: jest.fn(),
}
const AWS = {
S3: jest.fn(() => mockS3),
}
export default AWS

View File

@ -30,9 +30,6 @@
"test:watch": "jest --watchAll" "test:watch": "jest --watchAll"
}, },
"dependencies": { "dependencies": {
"@aws-sdk/client-s3": "3.709.0",
"@aws-sdk/lib-storage": "3.709.0",
"@aws-sdk/s3-request-presigner": "3.709.0",
"@budibase/nano": "10.1.5", "@budibase/nano": "10.1.5",
"@budibase/pouchdb-replication-stream": "1.2.11", "@budibase/pouchdb-replication-stream": "1.2.11",
"@budibase/shared-core": "*", "@budibase/shared-core": "*",
@ -74,13 +71,11 @@
"devDependencies": { "devDependencies": {
"@jest/types": "^29.6.3", "@jest/types": "^29.6.3",
"@shopify/jest-koa-mocks": "5.1.1", "@shopify/jest-koa-mocks": "5.1.1",
"@smithy/types": "4.0.0",
"@swc/core": "1.3.71", "@swc/core": "1.3.71",
"@swc/jest": "0.2.27", "@swc/jest": "0.2.27",
"@types/chance": "1.1.3", "@types/chance": "1.1.3",
"@types/cookies": "0.7.8", "@types/cookies": "0.7.8",
"@types/jest": "29.5.5", "@types/jest": "29.5.5",
"@types/koa": "2.13.4",
"@types/lodash": "4.14.200", "@types/lodash": "4.14.200",
"@types/node-fetch": "2.6.4", "@types/node-fetch": "2.6.4",
"@types/pouchdb": "6.4.2", "@types/pouchdb": "6.4.2",
@ -88,6 +83,7 @@
"@types/semver": "7.3.7", "@types/semver": "7.3.7",
"@types/tar-fs": "2.0.1", "@types/tar-fs": "2.0.1",
"@types/uuid": "8.3.4", "@types/uuid": "8.3.4",
"@types/koa": "2.13.4",
"chance": "1.1.8", "chance": "1.1.8",
"ioredis-mock": "8.9.0", "ioredis-mock": "8.9.0",
"jest": "29.7.0", "jest": "29.7.0",

View File

@ -154,7 +154,7 @@ const environment = {
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY, MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY, MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN, AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN,
AWS_REGION: process.env.AWS_REGION || "eu-west-1", AWS_REGION: process.env.AWS_REGION,
MINIO_URL: process.env.MINIO_URL, MINIO_URL: process.env.MINIO_URL,
MINIO_ENABLED: process.env.MINIO_ENABLED || 1, MINIO_ENABLED: process.env.MINIO_ENABLED || 1,
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY, INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,

View File

@ -13,7 +13,7 @@ export function clientLibraryPath(appId: string) {
* due to issues with the domain we were unable to continue doing this - keeping * due to issues with the domain we were unable to continue doing this - keeping
* incase we are able to switch back to CDN path again in future. * incase we are able to switch back to CDN path again in future.
*/ */
export async function clientLibraryCDNUrl(appId: string, version: string) { export function clientLibraryCDNUrl(appId: string, version: string) {
let file = clientLibraryPath(appId) let file = clientLibraryPath(appId)
if (env.CLOUDFRONT_CDN) { if (env.CLOUDFRONT_CDN) {
// append app version to bust the cache // append app version to bust the cache
@ -24,7 +24,7 @@ export async function clientLibraryCDNUrl(appId: string, version: string) {
// file is public // file is public
return cloudfront.getUrl(file) return cloudfront.getUrl(file)
} else { } else {
return await objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file) return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
} }
} }
@ -44,10 +44,10 @@ export function clientLibraryUrl(appId: string, version: string) {
return `/api/assets/client?${qs.encode(qsParams)}` return `/api/assets/client?${qs.encode(qsParams)}`
} }
export async function getAppFileUrl(s3Key: string) { export function getAppFileUrl(s3Key: string) {
if (env.CLOUDFRONT_CDN) { if (env.CLOUDFRONT_CDN) {
return cloudfront.getPresignedUrl(s3Key) return cloudfront.getPresignedUrl(s3Key)
} else { } else {
return await objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, s3Key) return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, s3Key)
} }
} }

View File

@ -5,11 +5,7 @@ import * as cloudfront from "../cloudfront"
// URLs // URLs
export const getGlobalFileUrl = async ( export const getGlobalFileUrl = (type: string, name: string, etag?: string) => {
type: string,
name: string,
etag?: string
) => {
let file = getGlobalFileS3Key(type, name) let file = getGlobalFileS3Key(type, name)
if (env.CLOUDFRONT_CDN) { if (env.CLOUDFRONT_CDN) {
if (etag) { if (etag) {
@ -17,7 +13,7 @@ export const getGlobalFileUrl = async (
} }
return cloudfront.getPresignedUrl(file) return cloudfront.getPresignedUrl(file)
} else { } else {
return await objectStore.getPresignedUrl(env.GLOBAL_BUCKET_NAME, file) return objectStore.getPresignedUrl(env.GLOBAL_BUCKET_NAME, file)
} }
} }

View File

@ -6,25 +6,23 @@ import { Plugin } from "@budibase/types"
// URLS // URLS
export async function enrichPluginURLs(plugins?: Plugin[]): Promise<Plugin[]> { export function enrichPluginURLs(plugins?: Plugin[]): Plugin[] {
if (!plugins || !plugins.length) { if (!plugins || !plugins.length) {
return [] return []
} }
return await Promise.all( return plugins.map(plugin => {
plugins.map(async plugin => { const jsUrl = getPluginJSUrl(plugin)
const jsUrl = await getPluginJSUrl(plugin) const iconUrl = getPluginIconUrl(plugin)
const iconUrl = await getPluginIconUrl(plugin) return { ...plugin, jsUrl, iconUrl }
return { ...plugin, jsUrl, iconUrl } })
})
)
} }
async function getPluginJSUrl(plugin: Plugin) { function getPluginJSUrl(plugin: Plugin) {
const s3Key = getPluginJSKey(plugin) const s3Key = getPluginJSKey(plugin)
return getPluginUrl(s3Key) return getPluginUrl(s3Key)
} }
async function getPluginIconUrl(plugin: Plugin) { function getPluginIconUrl(plugin: Plugin): string | undefined {
const s3Key = getPluginIconKey(plugin) const s3Key = getPluginIconKey(plugin)
if (!s3Key) { if (!s3Key) {
return return
@ -32,11 +30,11 @@ async function getPluginIconUrl(plugin: Plugin) {
return getPluginUrl(s3Key) return getPluginUrl(s3Key)
} }
async function getPluginUrl(s3Key: string) { function getPluginUrl(s3Key: string) {
if (env.CLOUDFRONT_CDN) { if (env.CLOUDFRONT_CDN) {
return cloudfront.getPresignedUrl(s3Key) return cloudfront.getPresignedUrl(s3Key)
} else { } else {
return await objectStore.getPresignedUrl(env.PLUGIN_BUCKET_NAME, s3Key) return objectStore.getPresignedUrl(env.PLUGIN_BUCKET_NAME, s3Key)
} }
} }

View File

@ -93,25 +93,25 @@ describe("app", () => {
testEnv.multiTenant() testEnv.multiTenant()
}) })
it("gets url with embedded minio", async () => { it("gets url with embedded minio", () => {
testEnv.withMinio() testEnv.withMinio()
const url = await getAppFileUrl() const url = getAppFileUrl()
expect(url).toBe( expect(url).toBe(
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg" "/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
) )
}) })
it("gets url with custom S3", async () => { it("gets url with custom S3", () => {
testEnv.withS3() testEnv.withS3()
const url = await getAppFileUrl() const url = getAppFileUrl()
expect(url).toBe( expect(url).toBe(
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg" "http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
) )
}) })
it("gets url with cloudfront + s3", async () => { it("gets url with cloudfront + s3", () => {
testEnv.withCloudfront() testEnv.withCloudfront()
const url = await getAppFileUrl() const url = getAppFileUrl()
// omit rest of signed params // omit rest of signed params
expect( expect(
url.includes("http://cf.example.com/app_123/attachments/image.jpeg?") url.includes("http://cf.example.com/app_123/attachments/image.jpeg?")
@ -126,8 +126,8 @@ describe("app", () => {
it("gets url with embedded minio", async () => { it("gets url with embedded minio", async () => {
testEnv.withMinio() testEnv.withMinio()
await testEnv.withTenant(async () => { await testEnv.withTenant(() => {
const url = await getAppFileUrl() const url = getAppFileUrl()
expect(url).toBe( expect(url).toBe(
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg" "/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
) )
@ -136,8 +136,8 @@ describe("app", () => {
it("gets url with custom S3", async () => { it("gets url with custom S3", async () => {
testEnv.withS3() testEnv.withS3()
await testEnv.withTenant(async () => { await testEnv.withTenant(() => {
const url = await getAppFileUrl() const url = getAppFileUrl()
expect(url).toBe( expect(url).toBe(
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg" "http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
) )
@ -146,8 +146,8 @@ describe("app", () => {
it("gets url with cloudfront + s3", async () => { it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront() testEnv.withCloudfront()
await testEnv.withTenant(async () => { await testEnv.withTenant(() => {
const url = await getAppFileUrl() const url = getAppFileUrl()
// omit rest of signed params // omit rest of signed params
expect( expect(
url.includes( url.includes(

View File

@ -3,7 +3,7 @@ import { testEnv } from "../../../../tests/extra"
describe("global", () => { describe("global", () => {
describe("getGlobalFileUrl", () => { describe("getGlobalFileUrl", () => {
async function getGlobalFileUrl() { function getGlobalFileUrl() {
return global.getGlobalFileUrl("settings", "logoUrl", "etag") return global.getGlobalFileUrl("settings", "logoUrl", "etag")
} }
@ -12,21 +12,21 @@ describe("global", () => {
testEnv.singleTenant() testEnv.singleTenant()
}) })
it("gets url with embedded minio", async () => { it("gets url with embedded minio", () => {
testEnv.withMinio() testEnv.withMinio()
const url = await getGlobalFileUrl() const url = getGlobalFileUrl()
expect(url).toBe("/files/signed/global/settings/logoUrl") expect(url).toBe("/files/signed/global/settings/logoUrl")
}) })
it("gets url with custom S3", async () => { it("gets url with custom S3", () => {
testEnv.withS3() testEnv.withS3()
const url = await getGlobalFileUrl() const url = getGlobalFileUrl()
expect(url).toBe("http://s3.example.com/global/settings/logoUrl") expect(url).toBe("http://s3.example.com/global/settings/logoUrl")
}) })
it("gets url with cloudfront + s3", async () => { it("gets url with cloudfront + s3", () => {
testEnv.withCloudfront() testEnv.withCloudfront()
const url = await getGlobalFileUrl() const url = getGlobalFileUrl()
// omit rest of signed params // omit rest of signed params
expect( expect(
url.includes("http://cf.example.com/settings/logoUrl?etag=etag&") url.includes("http://cf.example.com/settings/logoUrl?etag=etag&")
@ -41,16 +41,16 @@ describe("global", () => {
it("gets url with embedded minio", async () => { it("gets url with embedded minio", async () => {
testEnv.withMinio() testEnv.withMinio()
await testEnv.withTenant(async tenantId => { await testEnv.withTenant(tenantId => {
const url = await getGlobalFileUrl() const url = getGlobalFileUrl()
expect(url).toBe(`/files/signed/global/${tenantId}/settings/logoUrl`) expect(url).toBe(`/files/signed/global/${tenantId}/settings/logoUrl`)
}) })
}) })
it("gets url with custom S3", async () => { it("gets url with custom S3", async () => {
testEnv.withS3() testEnv.withS3()
await testEnv.withTenant(async tenantId => { await testEnv.withTenant(tenantId => {
const url = await getGlobalFileUrl() const url = getGlobalFileUrl()
expect(url).toBe( expect(url).toBe(
`http://s3.example.com/global/${tenantId}/settings/logoUrl` `http://s3.example.com/global/${tenantId}/settings/logoUrl`
) )
@ -59,8 +59,8 @@ describe("global", () => {
it("gets url with cloudfront + s3", async () => { it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront() testEnv.withCloudfront()
await testEnv.withTenant(async tenantId => { await testEnv.withTenant(tenantId => {
const url = await getGlobalFileUrl() const url = getGlobalFileUrl()
// omit rest of signed params // omit rest of signed params
expect( expect(
url.includes( url.includes(

View File

@ -6,8 +6,8 @@ describe("plugins", () => {
describe("enrichPluginURLs", () => { describe("enrichPluginURLs", () => {
const plugin = structures.plugins.plugin() const plugin = structures.plugins.plugin()
async function getEnrichedPluginUrls() { function getEnrichedPluginUrls() {
const enriched = (await plugins.enrichPluginURLs([plugin]))[0] const enriched = plugins.enrichPluginURLs([plugin])[0]
return { return {
jsUrl: enriched.jsUrl!, jsUrl: enriched.jsUrl!,
iconUrl: enriched.iconUrl!, iconUrl: enriched.iconUrl!,
@ -19,9 +19,9 @@ describe("plugins", () => {
testEnv.singleTenant() testEnv.singleTenant()
}) })
it("gets url with embedded minio", async () => { it("gets url with embedded minio", () => {
testEnv.withMinio() testEnv.withMinio()
const urls = await getEnrichedPluginUrls() const urls = getEnrichedPluginUrls()
expect(urls.jsUrl).toBe( expect(urls.jsUrl).toBe(
`/files/signed/plugins/${plugin.name}/plugin.min.js` `/files/signed/plugins/${plugin.name}/plugin.min.js`
) )
@ -30,9 +30,9 @@ describe("plugins", () => {
) )
}) })
it("gets url with custom S3", async () => { it("gets url with custom S3", () => {
testEnv.withS3() testEnv.withS3()
const urls = await getEnrichedPluginUrls() const urls = getEnrichedPluginUrls()
expect(urls.jsUrl).toBe( expect(urls.jsUrl).toBe(
`http://s3.example.com/plugins/${plugin.name}/plugin.min.js` `http://s3.example.com/plugins/${plugin.name}/plugin.min.js`
) )
@ -41,9 +41,9 @@ describe("plugins", () => {
) )
}) })
it("gets url with cloudfront + s3", async () => { it("gets url with cloudfront + s3", () => {
testEnv.withCloudfront() testEnv.withCloudfront()
const urls = await getEnrichedPluginUrls() const urls = getEnrichedPluginUrls()
// omit rest of signed params // omit rest of signed params
expect( expect(
urls.jsUrl.includes( urls.jsUrl.includes(
@ -65,8 +65,8 @@ describe("plugins", () => {
it("gets url with embedded minio", async () => { it("gets url with embedded minio", async () => {
testEnv.withMinio() testEnv.withMinio()
await testEnv.withTenant(async tenantId => { await testEnv.withTenant(tenantId => {
const urls = await getEnrichedPluginUrls() const urls = getEnrichedPluginUrls()
expect(urls.jsUrl).toBe( expect(urls.jsUrl).toBe(
`/files/signed/plugins/${tenantId}/${plugin.name}/plugin.min.js` `/files/signed/plugins/${tenantId}/${plugin.name}/plugin.min.js`
) )
@ -78,8 +78,8 @@ describe("plugins", () => {
it("gets url with custom S3", async () => { it("gets url with custom S3", async () => {
testEnv.withS3() testEnv.withS3()
await testEnv.withTenant(async tenantId => { await testEnv.withTenant(tenantId => {
const urls = await getEnrichedPluginUrls() const urls = getEnrichedPluginUrls()
expect(urls.jsUrl).toBe( expect(urls.jsUrl).toBe(
`http://s3.example.com/plugins/${tenantId}/${plugin.name}/plugin.min.js` `http://s3.example.com/plugins/${tenantId}/${plugin.name}/plugin.min.js`
) )
@ -91,8 +91,8 @@ describe("plugins", () => {
it("gets url with cloudfront + s3", async () => { it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront() testEnv.withCloudfront()
await testEnv.withTenant(async tenantId => { await testEnv.withTenant(tenantId => {
const urls = await getEnrichedPluginUrls() const urls = getEnrichedPluginUrls()
// omit rest of signed params // omit rest of signed params
expect( expect(
urls.jsUrl.includes( urls.jsUrl.includes(

View File

@ -1,15 +1,6 @@
const sanitize = require("sanitize-s3-objectkey") const sanitize = require("sanitize-s3-objectkey")
import { import AWS from "aws-sdk"
HeadObjectCommandOutput,
PutObjectCommandInput,
S3,
S3ClientConfig,
GetObjectCommand,
_Object as S3Object,
} from "@aws-sdk/client-s3"
import { Upload } from "@aws-sdk/lib-storage"
import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
import stream, { Readable } from "stream" import stream, { Readable } from "stream"
import fetch from "node-fetch" import fetch from "node-fetch"
import tar from "tar-fs" import tar from "tar-fs"
@ -22,8 +13,8 @@ import { bucketTTLConfig, budibaseTempDir } from "./utils"
import { v4 } from "uuid" import { v4 } from "uuid"
import { APP_PREFIX, APP_DEV_PREFIX } from "../db" import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
import fsp from "fs/promises" import fsp from "fs/promises"
import { HeadObjectOutput } from "aws-sdk/clients/s3"
import { ReadableStream } from "stream/web" import { ReadableStream } from "stream/web"
import { NodeJsRuntimeStreamingBlobPayloadOutputTypes } from "@smithy/types"
const streamPipeline = promisify(stream.pipeline) const streamPipeline = promisify(stream.pipeline)
// use this as a temporary store of buckets that are being created // use this as a temporary store of buckets that are being created
@ -93,24 +84,26 @@ export function sanitizeBucket(input: string) {
* @constructor * @constructor
*/ */
export function ObjectStore( export function ObjectStore(
bucket: string,
opts: { presigning: boolean } = { presigning: false } opts: { presigning: boolean } = { presigning: false }
) { ) {
const config: S3ClientConfig = { const config: AWS.S3.ClientConfiguration = {
forcePathStyle: true, s3ForcePathStyle: true,
credentials: { signatureVersion: "v4",
accessKeyId: env.MINIO_ACCESS_KEY!, apiVersion: "2006-03-01",
secretAccessKey: env.MINIO_SECRET_KEY!, accessKeyId: env.MINIO_ACCESS_KEY,
}, secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION, region: env.AWS_REGION,
} }
if (bucket) {
config.params = {
Bucket: sanitizeBucket(bucket),
}
}
// for AWS Credentials using temporary session token // for AWS Credentials using temporary session token
if (!env.MINIO_ENABLED && env.AWS_SESSION_TOKEN) { if (!env.MINIO_ENABLED && env.AWS_SESSION_TOKEN) {
config.credentials = { config.sessionToken = env.AWS_SESSION_TOKEN
accessKeyId: env.MINIO_ACCESS_KEY!,
secretAccessKey: env.MINIO_SECRET_KEY!,
sessionToken: env.AWS_SESSION_TOKEN,
}
} }
// custom S3 is in use i.e. minio // custom S3 is in use i.e. minio
@ -120,13 +113,13 @@ export function ObjectStore(
// Normally a signed url will need to be generated with a specified host in mind. // Normally a signed url will need to be generated with a specified host in mind.
// To support dynamic hosts, e.g. some unknown self-hosted installation url, // To support dynamic hosts, e.g. some unknown self-hosted installation url,
// use a predefined host. The host 'minio-service' is also forwarded to minio requests via nginx // use a predefined host. The host 'minio-service' is also forwarded to minio requests via nginx
config.endpoint = "http://minio-service" config.endpoint = "minio-service"
} else { } else {
config.endpoint = env.MINIO_URL config.endpoint = env.MINIO_URL
} }
} }
return new S3(config) return new AWS.S3(config)
} }
/** /**
@ -139,25 +132,26 @@ export async function createBucketIfNotExists(
): Promise<{ created: boolean; exists: boolean }> { ): Promise<{ created: boolean; exists: boolean }> {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
try { try {
await client.headBucket({ await client
Bucket: bucketName, .headBucket({
}) Bucket: bucketName,
})
.promise()
return { created: false, exists: true } return { created: false, exists: true }
} catch (err: any) { } catch (err: any) {
const statusCode = err.statusCode || err.$response?.statusCode const promises: any = STATE.bucketCreationPromises
const promises: Record<string, Promise<any> | undefined> = const doesntExist = err.statusCode === 404,
STATE.bucketCreationPromises noAccess = err.statusCode === 403
const doesntExist = statusCode === 404,
noAccess = statusCode === 403
if (promises[bucketName]) { if (promises[bucketName]) {
await promises[bucketName] await promises[bucketName]
return { created: false, exists: true } return { created: false, exists: true }
} else if (doesntExist || noAccess) { } else if (doesntExist || noAccess) {
if (doesntExist) { if (doesntExist) {
promises[bucketName] = client.createBucket({ promises[bucketName] = client
Bucket: bucketName, .createBucket({
}) Bucket: bucketName,
})
.promise()
await promises[bucketName] await promises[bucketName]
delete promises[bucketName] delete promises[bucketName]
return { created: true, exists: false } return { created: true, exists: false }
@ -186,26 +180,25 @@ export async function upload({
const fileBytes = path ? (await fsp.open(path)).createReadStream() : body const fileBytes = path ? (await fsp.open(path)).createReadStream() : body
const objectStore = ObjectStore() const objectStore = ObjectStore(bucketName)
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName) const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
if (ttl && bucketCreated.created) { if (ttl && bucketCreated.created) {
let ttlConfig = bucketTTLConfig(bucketName, ttl) let ttlConfig = bucketTTLConfig(bucketName, ttl)
await objectStore.putBucketLifecycleConfiguration(ttlConfig) await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
} }
let contentType = type let contentType = type
const finalContentType = contentType if (!contentType) {
? contentType contentType = extension
: extension ? CONTENT_TYPE_MAP[extension.toLowerCase()]
? CONTENT_TYPE_MAP[extension.toLowerCase()] : CONTENT_TYPE_MAP.txt
: CONTENT_TYPE_MAP.txt }
const config: PutObjectCommandInput = { const config: any = {
// windows file paths need to be converted to forward slashes for s3 // windows file paths need to be converted to forward slashes for s3
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filename), Key: sanitizeKey(filename),
Body: fileBytes as stream.Readable | Buffer, Body: fileBytes,
ContentType: finalContentType, ContentType: contentType,
} }
if (metadata && typeof metadata === "object") { if (metadata && typeof metadata === "object") {
// remove any nullish keys from the metadata object, as these may be considered invalid // remove any nullish keys from the metadata object, as these may be considered invalid
@ -214,15 +207,10 @@ export async function upload({
delete metadata[key] delete metadata[key]
} }
} }
config.Metadata = metadata as Record<string, string> config.Metadata = metadata
} }
const upload = new Upload({ return objectStore.upload(config).promise()
client: objectStore,
params: config,
})
return upload.done()
} }
/** /**
@ -241,12 +229,12 @@ export async function streamUpload({
throw new Error("Stream to upload is invalid/undefined") throw new Error("Stream to upload is invalid/undefined")
} }
const extension = filename.split(".").pop() const extension = filename.split(".").pop()
const objectStore = ObjectStore() const objectStore = ObjectStore(bucketName)
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName) const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
if (ttl && bucketCreated.created) { if (ttl && bucketCreated.created) {
let ttlConfig = bucketTTLConfig(bucketName, ttl) let ttlConfig = bucketTTLConfig(bucketName, ttl)
await objectStore.putBucketLifecycleConfiguration(ttlConfig) await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
} }
// Set content type for certain known extensions // Set content type for certain known extensions
@ -279,15 +267,13 @@ export async function streamUpload({
...extra, ...extra,
} }
const upload = new Upload({ const details = await objectStore.upload(params).promise()
client: objectStore, const headDetails = await objectStore
params, .headObject({
}) Bucket: bucket,
const details = await upload.done() Key: objKey,
const headDetails = await objectStore.headObject({ })
Bucket: bucket, .promise()
Key: objKey,
})
return { return {
...details, ...details,
ContentLength: headDetails.ContentLength, ContentLength: headDetails.ContentLength,
@ -298,44 +284,35 @@ export async function streamUpload({
* retrieves the contents of a file from the object store, if it is a known content type it * retrieves the contents of a file from the object store, if it is a known content type it
* will be converted, otherwise it will be returned as a buffer stream. * will be converted, otherwise it will be returned as a buffer stream.
*/ */
export async function retrieve( export async function retrieve(bucketName: string, filepath: string) {
bucketName: string, const objectStore = ObjectStore(bucketName)
filepath: string
): Promise<string | stream.Readable> {
const objectStore = ObjectStore()
const params = { const params = {
Bucket: sanitizeBucket(bucketName), Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filepath), Key: sanitizeKey(filepath),
} }
const response = await objectStore.getObject(params) const response: any = await objectStore.getObject(params).promise()
if (!response.Body) {
throw new Error("Unable to retrieve object")
}
const nodeResponse =
response.Body as NodeJsRuntimeStreamingBlobPayloadOutputTypes
// currently these are all strings // currently these are all strings
if (STRING_CONTENT_TYPES.includes(response.ContentType)) { if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
return nodeResponse.toString() return response.Body.toString("utf8")
} else { } else {
return nodeResponse return response.Body
} }
} }
export async function listAllObjects( export async function listAllObjects(bucketName: string, path: string) {
bucketName: string, const objectStore = ObjectStore(bucketName)
path: string
): Promise<S3Object[]> {
const objectStore = ObjectStore()
const list = (params: ListParams = {}) => { const list = (params: ListParams = {}) => {
return objectStore.listObjectsV2({ return objectStore
...params, .listObjectsV2({
Bucket: sanitizeBucket(bucketName), ...params,
Prefix: sanitizeKey(path), Bucket: sanitizeBucket(bucketName),
}) Prefix: sanitizeKey(path),
})
.promise()
} }
let isTruncated = false, let isTruncated = false,
token, token,
objects: Object[] = [] objects: AWS.S3.Types.Object[] = []
do { do {
let params: ListParams = {} let params: ListParams = {}
if (token) { if (token) {
@ -354,19 +331,18 @@ export async function listAllObjects(
/** /**
* Generate a presigned url with a default TTL of 1 hour * Generate a presigned url with a default TTL of 1 hour
*/ */
export async function getPresignedUrl( export function getPresignedUrl(
bucketName: string, bucketName: string,
key: string, key: string,
durationSeconds = 3600 durationSeconds = 3600
) { ) {
const objectStore = ObjectStore({ presigning: true }) const objectStore = ObjectStore(bucketName, { presigning: true })
const params = { const params = {
Bucket: sanitizeBucket(bucketName), Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(key), Key: sanitizeKey(key),
Expires: durationSeconds,
} }
const url = await getSignedUrl(objectStore, new GetObjectCommand(params), { const url = objectStore.getSignedUrl("getObject", params)
expiresIn: durationSeconds,
})
if (!env.MINIO_ENABLED) { if (!env.MINIO_ENABLED) {
// return the full URL to the client // return the full URL to the client
@ -390,11 +366,7 @@ export async function retrieveToTmp(bucketName: string, filepath: string) {
filepath = sanitizeKey(filepath) filepath = sanitizeKey(filepath)
const data = await retrieve(bucketName, filepath) const data = await retrieve(bucketName, filepath)
const outputPath = join(budibaseTempDir(), v4()) const outputPath = join(budibaseTempDir(), v4())
if (data instanceof stream.Readable) { fs.writeFileSync(outputPath, data)
data.pipe(fs.createWriteStream(outputPath))
} else {
fs.writeFileSync(outputPath, data)
}
return outputPath return outputPath
} }
@ -436,17 +408,17 @@ export async function retrieveDirectory(bucketName: string, path: string) {
* Delete a single file. * Delete a single file.
*/ */
export async function deleteFile(bucketName: string, filepath: string) { export async function deleteFile(bucketName: string, filepath: string) {
const objectStore = ObjectStore() const objectStore = ObjectStore(bucketName)
await createBucketIfNotExists(objectStore, bucketName) await createBucketIfNotExists(objectStore, bucketName)
const params = { const params = {
Bucket: bucketName, Bucket: bucketName,
Key: sanitizeKey(filepath), Key: sanitizeKey(filepath),
} }
return objectStore.deleteObject(params) return objectStore.deleteObject(params).promise()
} }
export async function deleteFiles(bucketName: string, filepaths: string[]) { export async function deleteFiles(bucketName: string, filepaths: string[]) {
const objectStore = ObjectStore() const objectStore = ObjectStore(bucketName)
await createBucketIfNotExists(objectStore, bucketName) await createBucketIfNotExists(objectStore, bucketName)
const params = { const params = {
Bucket: bucketName, Bucket: bucketName,
@ -454,7 +426,7 @@ export async function deleteFiles(bucketName: string, filepaths: string[]) {
Objects: filepaths.map((path: any) => ({ Key: sanitizeKey(path) })), Objects: filepaths.map((path: any) => ({ Key: sanitizeKey(path) })),
}, },
} }
return objectStore.deleteObjects(params) return objectStore.deleteObjects(params).promise()
} }
/** /**
@ -466,13 +438,13 @@ export async function deleteFolder(
): Promise<any> { ): Promise<any> {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
folder = sanitizeKey(folder) folder = sanitizeKey(folder)
const client = ObjectStore() const client = ObjectStore(bucketName)
const listParams = { const listParams = {
Bucket: bucketName, Bucket: bucketName,
Prefix: folder, Prefix: folder,
} }
const existingObjectsResponse = await client.listObjects(listParams) const existingObjectsResponse = await client.listObjects(listParams).promise()
if (existingObjectsResponse.Contents?.length === 0) { if (existingObjectsResponse.Contents?.length === 0) {
return return
} }
@ -487,7 +459,7 @@ export async function deleteFolder(
deleteParams.Delete.Objects.push({ Key: content.Key }) deleteParams.Delete.Objects.push({ Key: content.Key })
}) })
const deleteResponse = await client.deleteObjects(deleteParams) const deleteResponse = await client.deleteObjects(deleteParams).promise()
// can only empty 1000 items at once // can only empty 1000 items at once
if (deleteResponse.Deleted?.length === 1000) { if (deleteResponse.Deleted?.length === 1000) {
return deleteFolder(bucketName, folder) return deleteFolder(bucketName, folder)
@ -562,33 +534,29 @@ export async function getReadStream(
): Promise<Readable> { ): Promise<Readable> {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
path = sanitizeKey(path) path = sanitizeKey(path)
const client = ObjectStore() const client = ObjectStore(bucketName)
const params = { const params = {
Bucket: bucketName, Bucket: bucketName,
Key: path, Key: path,
} }
const response = await client.getObject(params) return client.getObject(params).createReadStream()
if (!response.Body || !(response.Body instanceof stream.Readable)) {
throw new Error("Unable to retrieve stream - invalid response")
}
return response.Body
} }
export async function getObjectMetadata( export async function getObjectMetadata(
bucket: string, bucket: string,
path: string path: string
): Promise<HeadObjectCommandOutput> { ): Promise<HeadObjectOutput> {
bucket = sanitizeBucket(bucket) bucket = sanitizeBucket(bucket)
path = sanitizeKey(path) path = sanitizeKey(path)
const client = ObjectStore() const client = ObjectStore(bucket)
const params = { const params = {
Bucket: bucket, Bucket: bucket,
Key: path, Key: path,
} }
try { try {
return await client.headObject(params) return await client.headObject(params).promise()
} catch (err: any) { } catch (err: any) {
throw new Error("Unable to retrieve metadata from object") throw new Error("Unable to retrieve metadata from object")
} }

View File

@ -2,10 +2,7 @@ import path, { join } from "path"
import { tmpdir } from "os" import { tmpdir } from "os"
import fs from "fs" import fs from "fs"
import env from "../environment" import env from "../environment"
import { import { PutBucketLifecycleConfigurationRequest } from "aws-sdk/clients/s3"
LifecycleRule,
PutBucketLifecycleConfigurationCommandInput,
} from "@aws-sdk/client-s3"
import * as objectStore from "./objectStore" import * as objectStore from "./objectStore"
import { import {
AutomationAttachment, AutomationAttachment,
@ -46,8 +43,8 @@ export function budibaseTempDir() {
export const bucketTTLConfig = ( export const bucketTTLConfig = (
bucketName: string, bucketName: string,
days: number days: number
): PutBucketLifecycleConfigurationCommandInput => { ): PutBucketLifecycleConfigurationRequest => {
const lifecycleRule: LifecycleRule = { const lifecycleRule = {
ID: `${bucketName}-ExpireAfter${days}days`, ID: `${bucketName}-ExpireAfter${days}days`,
Prefix: "", Prefix: "",
Status: "Enabled", Status: "Enabled",

3
packages/bbui/src/helpers.d.ts vendored Normal file
View File

@ -0,0 +1,3 @@
declare module "./helpers" {
export const cloneDeep: <T>(obj: T) => T
}

View File

@ -43,7 +43,6 @@
export let showDataProviders = true export let showDataProviders = true
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
const arrayTypes = ["attachment", "array"]
let anchorRight, dropdownRight let anchorRight, dropdownRight
let drawer let drawer
@ -116,8 +115,11 @@
} }
}) })
$: fields = bindings $: fields = bindings
.filter(x => arrayTypes.includes(x.fieldSchema?.type)) .filter(
.filter(x => x.fieldSchema?.tableId != null) x =>
x.fieldSchema?.type === "attachment" ||
(x.fieldSchema?.type === "array" && x.tableId)
)
.map(binding => { .map(binding => {
const { providerId, readableBinding, runtimeBinding } = binding const { providerId, readableBinding, runtimeBinding } = binding
const { name, type, tableId } = binding.fieldSchema const { name, type, tableId } = binding.fieldSchema

View File

@ -1,138 +0,0 @@
import { derived } from "svelte/store"
import { admin } from "./admin"
import { auth } from "./auth"
import { isEnabled } from "@/helpers/featureFlags"
import { sdk } from "@budibase/shared-core"
import { FeatureFlag } from "@budibase/types"
export const menu = derived([admin, auth], ([$admin, $auth]) => {
const user = $auth?.user
const isAdmin = sdk.users.isAdmin(user)
const cloud = $admin?.cloud
// Determine user sub pages
let userSubPages = [
{
title: "Users",
href: "/builder/portal/users/users",
},
]
userSubPages.push({
title: "Groups",
href: "/builder/portal/users/groups",
})
// Pages that all devs and admins can access
let menu = [
{
title: "Apps",
href: "/builder/portal/apps",
},
]
if (sdk.users.isGlobalBuilder(user)) {
menu.push({
title: "Users",
href: "/builder/portal/users",
subPages: userSubPages,
})
menu.push({
title: "Plugins",
href: "/builder/portal/plugins",
})
}
// Add settings page for admins
if (isAdmin) {
let settingsSubPages = [
{
title: "Auth",
href: "/builder/portal/settings/auth",
},
{
title: "Email",
href: "/builder/portal/settings/email",
},
{
title: "Organisation",
href: "/builder/portal/settings/organisation",
},
{
title: "Branding",
href: "/builder/portal/settings/branding",
},
{
title: "Environment",
href: "/builder/portal/settings/environment",
},
]
if (isEnabled(FeatureFlag.AI_CUSTOM_CONFIGS)) {
settingsSubPages.push({
title: "AI",
href: "/builder/portal/settings/ai",
})
}
if (!cloud) {
settingsSubPages.push({
title: "Version",
href: "/builder/portal/settings/version",
})
settingsSubPages.push({
title: "Diagnostics",
href: "/builder/portal/settings/diagnostics",
})
}
menu.push({
title: "Settings",
href: "/builder/portal/settings",
subPages: [...settingsSubPages].sort((a, b) =>
a.title.localeCompare(b.title)
),
})
}
// Add account page
let accountSubPages = [
{
title: "Usage",
href: "/builder/portal/account/usage",
},
]
if (isAdmin) {
accountSubPages.push({
title: "Audit Logs",
href: "/builder/portal/account/auditLogs",
})
if (!cloud) {
accountSubPages.push({
title: "System Logs",
href: "/builder/portal/account/systemLogs",
})
}
}
if (cloud && user?.accountPortalAccess) {
accountSubPages.push({
title: "Upgrade",
href: $admin?.accountPortalUrl + "/portal/upgrade",
})
} else if (!cloud && isAdmin) {
accountSubPages.push({
title: "Upgrade",
href: "/builder/portal/account/upgrade",
})
}
// add license check here
if (user?.accountPortalAccess && user.account.stripeCustomerId) {
accountSubPages.push({
title: "Billing",
href: $admin?.accountPortalUrl + "/portal/billing",
})
}
menu.push({
title: "Account",
href: "/builder/portal/account",
subPages: accountSubPages,
})
return menu
})

View File

@ -0,0 +1,149 @@
import { derived, Readable } from "svelte/store"
import { admin } from "./admin"
import { auth } from "./auth"
import { isEnabled } from "@/helpers/featureFlags"
import { sdk } from "@budibase/shared-core"
import { FeatureFlag } from "@budibase/types"
interface MenuItem {
title: string
href: string
subPages?: MenuItem[]
}
export const menu: Readable<MenuItem[]> = derived(
[admin, auth],
([$admin, $auth]) => {
const user = $auth?.user
const isAdmin = user != null && sdk.users.isAdmin(user)
const isGlobalBuilder = user != null && sdk.users.isGlobalBuilder(user)
const cloud = $admin?.cloud
// Determine user sub pages
let userSubPages: MenuItem[] = [
{
title: "Users",
href: "/builder/portal/users/users",
},
]
userSubPages.push({
title: "Groups",
href: "/builder/portal/users/groups",
})
// Pages that all devs and admins can access
let menu: MenuItem[] = [
{
title: "Apps",
href: "/builder/portal/apps",
},
]
if (isGlobalBuilder) {
menu.push({
title: "Users",
href: "/builder/portal/users",
subPages: userSubPages,
})
menu.push({
title: "Plugins",
href: "/builder/portal/plugins",
})
}
// Add settings page for admins
if (isAdmin) {
let settingsSubPages: MenuItem[] = [
{
title: "Auth",
href: "/builder/portal/settings/auth",
},
{
title: "Email",
href: "/builder/portal/settings/email",
},
{
title: "Organisation",
href: "/builder/portal/settings/organisation",
},
{
title: "Branding",
href: "/builder/portal/settings/branding",
},
{
title: "Environment",
href: "/builder/portal/settings/environment",
},
]
if (isEnabled(FeatureFlag.AI_CUSTOM_CONFIGS)) {
settingsSubPages.push({
title: "AI",
href: "/builder/portal/settings/ai",
})
}
if (!cloud) {
settingsSubPages.push({
title: "Version",
href: "/builder/portal/settings/version",
})
settingsSubPages.push({
title: "Diagnostics",
href: "/builder/portal/settings/diagnostics",
})
}
menu.push({
title: "Settings",
href: "/builder/portal/settings",
subPages: [...settingsSubPages].sort((a, b) =>
a.title.localeCompare(b.title)
),
})
}
// Add account page
let accountSubPages: MenuItem[] = [
{
title: "Usage",
href: "/builder/portal/account/usage",
},
]
if (isAdmin) {
accountSubPages.push({
title: "Audit Logs",
href: "/builder/portal/account/auditLogs",
})
if (!cloud) {
accountSubPages.push({
title: "System Logs",
href: "/builder/portal/account/systemLogs",
})
}
}
if (cloud && user?.accountPortalAccess) {
accountSubPages.push({
title: "Upgrade",
href: $admin?.accountPortalUrl + "/portal/upgrade",
})
} else if (!cloud && isAdmin) {
accountSubPages.push({
title: "Upgrade",
href: "/builder/portal/account/upgrade",
})
}
// add license check here
if (user?.accountPortalAccess && user?.account?.stripeCustomerId) {
accountSubPages.push({
title: "Billing",
href: $admin?.accountPortalUrl + "/portal/billing",
})
}
menu.push({
title: "Account",
href: "/builder/portal/account",
subPages: accountSubPages,
})
return menu
}
)

View File

@ -1,31 +0,0 @@
import { writable, get } from "svelte/store"
import { API } from "@/api"
import { auth } from "@/stores/portal"
const OIDC_CONFIG = {
logo: undefined,
name: undefined,
uuid: undefined,
}
export function createOidcStore() {
const store = writable(OIDC_CONFIG)
const { set, subscribe } = store
return {
subscribe,
set,
init: async () => {
const tenantId = get(auth).tenantId
const config = await API.getOIDCConfig(tenantId)
if (Object.keys(config || {}).length) {
// Just use the first config for now.
// We will be support multiple logins buttons later on.
set(...config)
} else {
set(OIDC_CONFIG)
}
},
}
}
export const oidc = createOidcStore()

View File

@ -0,0 +1,21 @@
import { get } from "svelte/store"
import { API } from "@/api"
import { auth } from "@/stores/portal"
import { BudiStore } from "../BudiStore"
import { PublicOIDCConfig } from "@budibase/types"
class OIDCStore extends BudiStore<PublicOIDCConfig> {
constructor() {
super({})
}
async init() {
const tenantId = get(auth).tenantId
const configs = await API.getOIDCConfigs(tenantId)
// Just use the first config for now.
// We will be support multiple logins buttons later on.
this.set(configs[0] || {})
}
}
export const oidc = new OIDCStore()

View File

@ -1,66 +0,0 @@
import { writable, get } from "svelte/store"
import { API } from "@/api"
import { auth } from "@/stores/portal"
import _ from "lodash"
const DEFAULT_CONFIG = {
platformUrl: "",
logoUrl: undefined,
faviconUrl: undefined,
emailBrandingEnabled: true,
testimonialsEnabled: true,
platformTitle: "Budibase",
loginHeading: undefined,
loginButton: undefined,
metaDescription: undefined,
metaImageUrl: undefined,
metaTitle: undefined,
docsUrl: undefined,
company: "Budibase",
oidc: undefined,
google: undefined,
googleDatasourceConfigured: undefined,
oidcCallbackUrl: "",
googleCallbackUrl: "",
isSSOEnforced: false,
loaded: false,
}
export function createOrganisationStore() {
const store = writable(DEFAULT_CONFIG)
const { subscribe, set } = store
async function init() {
const tenantId = get(auth).tenantId
const settingsConfigDoc = await API.getTenantConfig(tenantId)
set({ ...DEFAULT_CONFIG, ...settingsConfigDoc.config, loaded: true })
}
async function save(config) {
// Delete non-persisted fields
const storeConfig = _.cloneDeep(get(store))
delete storeConfig.oidc
delete storeConfig.google
delete storeConfig.googleDatasourceConfigured
delete storeConfig.oidcCallbackUrl
delete storeConfig.googleCallbackUrl
// delete internal store field
delete storeConfig.loaded
await API.saveConfig({
type: "settings",
config: { ...storeConfig, ...config },
})
await init()
}
return {
subscribe,
set,
save,
init,
}
}
export const organisation = createOrganisationStore()

View File

@ -0,0 +1,71 @@
import { get } from "svelte/store"
import { API } from "@/api"
import { auth } from "@/stores/portal"
import {
ConfigType,
PublicSettingsInnerConfig,
SettingsBrandingConfig,
SettingsInnerConfig,
} from "@budibase/types"
import { BudiStore } from "../BudiStore"
interface LocalOrganisationState {
loaded: boolean
}
type SavedOrganisationState = SettingsInnerConfig & SettingsBrandingConfig
type OrganisationState = SavedOrganisationState &
PublicSettingsInnerConfig &
LocalOrganisationState
const DEFAULT_STATE: OrganisationState = {
platformUrl: "",
emailBrandingEnabled: true,
testimonialsEnabled: true,
platformTitle: "Budibase",
company: "Budibase",
google: false,
googleDatasourceConfigured: false,
oidc: false,
oidcCallbackUrl: "",
googleCallbackUrl: "",
loaded: false,
}
class OrganisationStore extends BudiStore<OrganisationState> {
constructor() {
super(DEFAULT_STATE)
}
async init() {
const tenantId = get(auth).tenantId
const settingsConfigDoc = await API.getTenantConfig(tenantId)
this.set({ ...DEFAULT_STATE, ...settingsConfigDoc.config, loaded: true })
}
async save(changes: Partial<SavedOrganisationState>) {
// Strip non persisted fields
const {
oidc,
google,
googleDatasourceConfigured,
oidcCallbackUrl,
googleCallbackUrl,
loaded,
...config
} = get(this.store)
// Save new config
const newConfig: SavedOrganisationState = {
...config,
...changes,
}
await API.saveConfig({
type: ConfigType.SETTINGS,
config: newConfig,
})
await this.init()
}
}
export const organisation = new OrganisationStore()

View File

@ -3,7 +3,6 @@ import fs from "fs"
import { join } from "path" import { join } from "path"
import { TEMP_DIR, MINIO_DIR } from "./utils" import { TEMP_DIR, MINIO_DIR } from "./utils"
import { progressBar } from "../utils" import { progressBar } from "../utils"
import * as stream from "node:stream"
const { const {
ObjectStoreBuckets, ObjectStoreBuckets,
@ -21,21 +20,15 @@ export async function exportObjects() {
let fullList: any[] = [] let fullList: any[] = []
let errorCount = 0 let errorCount = 0
for (let bucket of bucketList) { for (let bucket of bucketList) {
const client = ObjectStore() const client = ObjectStore(bucket)
try { try {
await client.headBucket({ await client.headBucket().promise()
Bucket: bucket,
})
} catch (err) { } catch (err) {
errorCount++ errorCount++
continue continue
} }
const list = await client.listObjectsV2({ const list = (await client.listObjectsV2().promise()) as { Contents: any[] }
Bucket: bucket, fullList = fullList.concat(list.Contents.map(el => ({ ...el, bucket })))
})
fullList = fullList.concat(
list.Contents?.map(el => ({ ...el, bucket })) || []
)
} }
if (errorCount === bucketList.length) { if (errorCount === bucketList.length) {
throw new Error("Unable to access MinIO/S3 - check environment config.") throw new Error("Unable to access MinIO/S3 - check environment config.")
@ -50,13 +43,7 @@ export async function exportObjects() {
const dirs = possiblePath.slice(0, possiblePath.length - 1) const dirs = possiblePath.slice(0, possiblePath.length - 1)
fs.mkdirSync(join(path, object.bucket, ...dirs), { recursive: true }) fs.mkdirSync(join(path, object.bucket, ...dirs), { recursive: true })
} }
if (data instanceof stream.Readable) { fs.writeFileSync(join(path, object.bucket, ...possiblePath), data)
data.pipe(
fs.createWriteStream(join(path, object.bucket, ...possiblePath))
)
} else {
fs.writeFileSync(join(path, object.bucket, ...possiblePath), data)
}
bar.update(++count) bar.update(++count)
} }
bar.stop() bar.stop()
@ -73,7 +60,7 @@ export async function importObjects() {
const bar = progressBar(total) const bar = progressBar(total)
let count = 0 let count = 0
for (let bucket of buckets) { for (let bucket of buckets) {
const client = ObjectStore() const client = ObjectStore(bucket)
await createBucketIfNotExists(client, bucket) await createBucketIfNotExists(client, bucket)
const files = await uploadDirectory(bucket, join(path, bucket), "/") const files = await uploadDirectory(bucket, join(path, bucket), "/")
count += files.length count += files.length

View File

@ -1,12 +1,12 @@
import { API } from "api" import { API } from "api"
import TableFetch from "@budibase/frontend-core/src/fetch/TableFetch.js" import TableFetch from "@budibase/frontend-core/src/fetch/TableFetch"
import ViewFetch from "@budibase/frontend-core/src/fetch/ViewFetch.js" import ViewFetch from "@budibase/frontend-core/src/fetch/ViewFetch"
import QueryFetch from "@budibase/frontend-core/src/fetch/QueryFetch.js" import QueryFetch from "@budibase/frontend-core/src/fetch/QueryFetch"
import RelationshipFetch from "@budibase/frontend-core/src/fetch/RelationshipFetch.js" import RelationshipFetch from "@budibase/frontend-core/src/fetch/RelationshipFetch"
import NestedProviderFetch from "@budibase/frontend-core/src/fetch/NestedProviderFetch.js" import NestedProviderFetch from "@budibase/frontend-core/src/fetch/NestedProviderFetch"
import FieldFetch from "@budibase/frontend-core/src/fetch/FieldFetch.js" import FieldFetch from "@budibase/frontend-core/src/fetch/FieldFetch"
import JSONArrayFetch from "@budibase/frontend-core/src/fetch/JSONArrayFetch.js" import JSONArrayFetch from "@budibase/frontend-core/src/fetch/JSONArrayFetch"
import ViewV2Fetch from "@budibase/frontend-core/src/fetch/ViewV2Fetch.js" import ViewV2Fetch from "@budibase/frontend-core/src/fetch/ViewV2Fetch"
import QueryArrayFetch from "@budibase/frontend-core/src/fetch/QueryArrayFetch" import QueryArrayFetch from "@budibase/frontend-core/src/fetch/QueryArrayFetch"
/** /**

View File

@ -16,7 +16,7 @@ import { BaseAPIClient } from "./types"
export interface ConfigEndpoints { export interface ConfigEndpoints {
getConfig: (type: ConfigType) => Promise<FindConfigResponse> getConfig: (type: ConfigType) => Promise<FindConfigResponse>
getTenantConfig: (tentantId: string) => Promise<GetPublicSettingsResponse> getTenantConfig: (tentantId: string) => Promise<GetPublicSettingsResponse>
getOIDCConfig: (tenantId: string) => Promise<GetPublicOIDCConfigResponse> getOIDCConfigs: (tenantId: string) => Promise<GetPublicOIDCConfigResponse>
getOIDCLogos: () => Promise<Config<OIDCLogosConfig>> getOIDCLogos: () => Promise<Config<OIDCLogosConfig>>
saveConfig: (config: SaveConfigRequest) => Promise<SaveConfigResponse> saveConfig: (config: SaveConfigRequest) => Promise<SaveConfigResponse>
deleteConfig: (id: string, rev: string) => Promise<DeleteConfigResponse> deleteConfig: (id: string, rev: string) => Promise<DeleteConfigResponse>
@ -73,7 +73,7 @@ export const buildConfigEndpoints = (API: BaseAPIClient): ConfigEndpoints => ({
* Gets the OIDC config for a certain tenant. * Gets the OIDC config for a certain tenant.
* @param tenantId the tenant ID to get the config for * @param tenantId the tenant ID to get the config for
*/ */
getOIDCConfig: async tenantId => { getOIDCConfigs: async tenantId => {
return await API.get({ return await API.get({
url: `/api/global/configs/public/oidc?tenantId=${tenantId}`, url: `/api/global/configs/public/oidc?tenantId=${tenantId}`,
}) })

View File

@ -3,7 +3,15 @@ import { BaseAPIClient } from "./types"
export interface ViewEndpoints { export interface ViewEndpoints {
// Missing request or response types // Missing request or response types
fetchViewData: (name: string, opts: any) => Promise<Row[]> fetchViewData: (
name: string,
opts: {
calculation?: string
field?: string
groupBy?: string
tableId: string
}
) => Promise<Row[]>
exportView: (name: string, format: string) => Promise<any> exportView: (name: string, format: string) => Promise<any>
saveView: (view: any) => Promise<any> saveView: (view: any) => Promise<any>
deleteView: (name: string) => Promise<any> deleteView: (name: string) => Promise<any>
@ -20,7 +28,9 @@ export const buildViewEndpoints = (API: BaseAPIClient): ViewEndpoints => ({
fetchViewData: async (name, { field, groupBy, calculation }) => { fetchViewData: async (name, { field, groupBy, calculation }) => {
const params = new URLSearchParams() const params = new URLSearchParams()
if (calculation) { if (calculation) {
params.set("field", field) if (field) {
params.set("field", field)
}
params.set("calculation", calculation) params.set("calculation", calculation)
} }
if (groupBy) { if (groupBy) {

View File

@ -1,6 +1,7 @@
import { import {
CreateViewRequest, CreateViewRequest,
CreateViewResponse, CreateViewResponse,
PaginatedSearchRowResponse,
SearchRowResponse, SearchRowResponse,
SearchViewRowRequest, SearchViewRowRequest,
UpdateViewRequest, UpdateViewRequest,
@ -13,10 +14,14 @@ export interface ViewV2Endpoints {
fetchDefinition: (viewId: string) => Promise<ViewResponseEnriched> fetchDefinition: (viewId: string) => Promise<ViewResponseEnriched>
create: (view: CreateViewRequest) => Promise<CreateViewResponse> create: (view: CreateViewRequest) => Promise<CreateViewResponse>
update: (view: UpdateViewRequest) => Promise<UpdateViewResponse> update: (view: UpdateViewRequest) => Promise<UpdateViewResponse>
fetch: ( fetch: <T extends SearchViewRowRequest>(
viewId: string, viewId: string,
opts: SearchViewRowRequest opts: T
) => Promise<SearchRowResponse> ) => Promise<
T extends { paginate: true }
? PaginatedSearchRowResponse
: SearchRowResponse
>
delete: (viewId: string) => Promise<void> delete: (viewId: string) => Promise<void>
} }
@ -59,7 +64,7 @@ export const buildViewV2Endpoints = (API: BaseAPIClient): ViewV2Endpoints => ({
* @param viewId the id of the view * @param viewId the id of the view
* @param opts the search options * @param opts the search options
*/ */
fetch: async (viewId, opts) => { fetch: async (viewId, opts: SearchViewRowRequest) => {
return await API.post({ return await API.post({
url: `/api/v2/views/${encodeURIComponent(viewId)}/search`, url: `/api/v2/views/${encodeURIComponent(viewId)}/search`,
body: opts, body: opts,

View File

@ -69,7 +69,7 @@ export const deriveStores = (context: StoreContext): ConfigDerivedStore => {
} }
// Disable features for non DS+ // Disable features for non DS+
if (!["table", "viewV2"].includes(type)) { if (type && !["table", "viewV2"].includes(type)) {
config.canAddRows = false config.canAddRows = false
config.canEditRows = false config.canEditRows = false
config.canDeleteRows = false config.canDeleteRows = false

View File

@ -1,3 +1,5 @@
// TODO: datasource and defitions are unions of the different implementations. At this point, the datasource does not know what type is being used, and the assignations will cause TS exceptions. Casting it "as any" for now. This should be fixed improving the type usages.
import { derived, get, Readable, Writable } from "svelte/store" import { derived, get, Readable, Writable } from "svelte/store"
import { getDatasourceDefinition, getDatasourceSchema } from "../../../fetch" import { getDatasourceDefinition, getDatasourceSchema } from "../../../fetch"
import { enrichSchemaWithRelColumns, memo } from "../../../utils" import { enrichSchemaWithRelColumns, memo } from "../../../utils"
@ -71,10 +73,10 @@ export const deriveStores = (context: StoreContext): DerivedDatasourceStore => {
} = context } = context
const schema = derived(definition, $definition => { const schema = derived(definition, $definition => {
let schema: Record<string, UIFieldSchema> = getDatasourceSchema({ const schema: Record<string, any> | undefined = getDatasourceSchema({
API, API,
datasource: get(datasource), datasource: get(datasource) as any, // TODO: see line 1
definition: $definition, definition: $definition ?? undefined,
}) })
if (!schema) { if (!schema) {
return null return null
@ -82,7 +84,7 @@ export const deriveStores = (context: StoreContext): DerivedDatasourceStore => {
// Ensure schema is configured as objects. // Ensure schema is configured as objects.
// Certain datasources like queries use primitives. // Certain datasources like queries use primitives.
Object.keys(schema || {}).forEach(key => { Object.keys(schema).forEach(key => {
if (typeof schema[key] !== "object") { if (typeof schema[key] !== "object") {
schema[key] = { name: key, type: schema[key] } schema[key] = { name: key, type: schema[key] }
} }
@ -130,13 +132,13 @@ export const deriveStores = (context: StoreContext): DerivedDatasourceStore => {
([$datasource, $definition]) => { ([$datasource, $definition]) => {
let type = $datasource?.type let type = $datasource?.type
if (type === "provider") { if (type === "provider") {
type = ($datasource as any).value?.datasource?.type type = ($datasource as any).value?.datasource?.type // TODO: see line 1
} }
// Handle calculation views // Handle calculation views
if (type === "viewV2" && $definition?.type === ViewV2Type.CALCULATION) { if (type === "viewV2" && $definition?.type === ViewV2Type.CALCULATION) {
return false return false
} }
return ["table", "viewV2", "link"].includes(type) return !!type && ["table", "viewV2", "link"].includes(type)
} }
) )
@ -184,9 +186,9 @@ export const createActions = (context: StoreContext): ActionDatasourceStore => {
const refreshDefinition = async () => { const refreshDefinition = async () => {
const def = await getDatasourceDefinition({ const def = await getDatasourceDefinition({
API, API,
datasource: get(datasource), datasource: get(datasource) as any, // TODO: see line 1
}) })
definition.set(def) definition.set(def as any) // TODO: see line 1
} }
// Saves the datasource definition // Saves the datasource definition
@ -231,7 +233,7 @@ export const createActions = (context: StoreContext): ActionDatasourceStore => {
if ("default" in newDefinition.schema[column]) { if ("default" in newDefinition.schema[column]) {
delete newDefinition.schema[column].default delete newDefinition.schema[column].default
} }
return await saveDefinition(newDefinition as any) return await saveDefinition(newDefinition as any) // TODO: see line 1
} }
// Adds a schema mutation for a single field // Adds a schema mutation for a single field
@ -307,7 +309,7 @@ export const createActions = (context: StoreContext): ActionDatasourceStore => {
await saveDefinition({ await saveDefinition({
...$definition, ...$definition,
schema: newSchema, schema: newSchema,
} as any) } as any) // TODO: see line 1
resetSchemaMutations() resetSchemaMutations()
} }

View File

@ -10,9 +10,10 @@ import {
import { tick } from "svelte" import { tick } from "svelte"
import { Helpers } from "@budibase/bbui" import { Helpers } from "@budibase/bbui"
import { sleep } from "../../../utils/utils" import { sleep } from "../../../utils/utils"
import { FieldType, Row, UIFetchAPI, UIRow } from "@budibase/types" import { FieldType, Row, UIRow } from "@budibase/types"
import { getRelatedTableValues } from "../../../utils" import { getRelatedTableValues } from "../../../utils"
import { Store as StoreContext } from "." import { Store as StoreContext } from "."
import DataFetch from "../../../fetch/DataFetch"
interface IndexedUIRow extends UIRow { interface IndexedUIRow extends UIRow {
__idx: number __idx: number
@ -20,7 +21,7 @@ interface IndexedUIRow extends UIRow {
interface RowStore { interface RowStore {
rows: Writable<UIRow[]> rows: Writable<UIRow[]>
fetch: Writable<UIFetchAPI | null> fetch: Writable<DataFetch<any, any, any> | null> // TODO: type this properly, having a union of all the possible options
loaded: Writable<boolean> loaded: Writable<boolean>
refreshing: Writable<boolean> refreshing: Writable<boolean>
loading: Writable<boolean> loading: Writable<boolean>
@ -225,7 +226,7 @@ export const createActions = (context: StoreContext): RowActionStore => {
}) })
// Subscribe to changes of this fetch model // Subscribe to changes of this fetch model
unsubscribe = newFetch.subscribe(async ($fetch: UIFetchAPI) => { unsubscribe = newFetch.subscribe(async $fetch => {
if ($fetch.error) { if ($fetch.error) {
// Present a helpful error to the user // Present a helpful error to the user
let message = "An unknown error occurred" let message = "An unknown error occurred"
@ -253,7 +254,7 @@ export const createActions = (context: StoreContext): RowActionStore => {
// Reset state properties when dataset changes // Reset state properties when dataset changes
if (!$instanceLoaded || resetRows) { if (!$instanceLoaded || resetRows) {
definition.set($fetch.definition) definition.set($fetch.definition as any) // TODO: datasource and defitions are unions of the different implementations. At this point, the datasource does not know what type is being used, and the assignations will cause TS exceptions. Casting it "as any" for now. This should be fixed improving the type usages.
} }
// Reset scroll state when data changes // Reset scroll state when data changes

View File

@ -32,8 +32,8 @@ export const Cookies = {
} }
// Table names // Table names
export const TableNames = { export const enum TableNames {
USERS: "ta_users", USERS = "ta_users",
} }
export const BudibaseRoles = { export const BudibaseRoles = {

View File

@ -1,8 +1,17 @@
import DataFetch from "./DataFetch.js" import DataFetch from "./DataFetch"
export default class CustomFetch extends DataFetch { interface CustomDatasource {
data: any
}
type CustomDefinition = Record<string, any>
export default class CustomFetch extends DataFetch<
CustomDatasource,
CustomDefinition
> {
// Gets the correct Budibase type for a JS value // Gets the correct Budibase type for a JS value
getType(value) { getType(value: any) {
if (value == null) { if (value == null) {
return "string" return "string"
} }
@ -22,7 +31,7 @@ export default class CustomFetch extends DataFetch {
} }
// Parses the custom data into an array format // Parses the custom data into an array format
parseCustomData(data) { parseCustomData(data: any) {
if (!data) { if (!data) {
return [] return []
} }
@ -55,7 +64,7 @@ export default class CustomFetch extends DataFetch {
} }
// Enriches the custom data to ensure the structure and format is usable // Enriches the custom data to ensure the structure and format is usable
enrichCustomData(data) { enrichCustomData(data: (string | any)[]) {
if (!data?.length) { if (!data?.length) {
return [] return []
} }
@ -72,7 +81,7 @@ export default class CustomFetch extends DataFetch {
// Try parsing strings // Try parsing strings
if (typeof value === "string") { if (typeof value === "string") {
const split = value.split(",").map(x => x.trim()) const split = value.split(",").map(x => x.trim())
let obj = {} const obj: Record<string, string> = {}
for (let i = 0; i < split.length; i++) { for (let i = 0; i < split.length; i++) {
const suffix = i === 0 ? "" : ` ${i + 1}` const suffix = i === 0 ? "" : ` ${i + 1}`
const key = `Value${suffix}` const key = `Value${suffix}`
@ -87,27 +96,29 @@ export default class CustomFetch extends DataFetch {
} }
// Extracts and parses the custom data from the datasource definition // Extracts and parses the custom data from the datasource definition
getCustomData(datasource) { getCustomData(datasource: CustomDatasource) {
return this.enrichCustomData(this.parseCustomData(datasource?.data)) return this.enrichCustomData(this.parseCustomData(datasource?.data))
} }
async getDefinition(datasource) { async getDefinition() {
const { datasource } = this.options
// Try and work out the schema from the array provided // Try and work out the schema from the array provided
let schema = {} const schema: CustomDefinition = {}
const data = this.getCustomData(datasource) const data = this.getCustomData(datasource)
if (!data?.length) { if (!data?.length) {
return { schema } return { schema }
} }
// Go through every object and extract all valid keys // Go through every object and extract all valid keys
for (let datum of data) { for (const datum of data) {
for (let key of Object.keys(datum)) { for (const key of Object.keys(datum)) {
if (key === "_id") { if (key === "_id") {
continue continue
} }
if (!schema[key]) { if (!schema[key]) {
let type = this.getType(datum[key]) let type = this.getType(datum[key])
let constraints = {} const constraints: any = {}
// Determine whether we should render text columns as options instead // Determine whether we should render text columns as options instead
if (type === "string") { if (type === "string") {

View File

@ -1,25 +1,102 @@
import { writable, derived, get } from "svelte/store" import { writable, derived, get, Writable, Readable } from "svelte/store"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { QueryUtils } from "../utils" import { QueryUtils } from "../utils"
import { convertJSONSchemaToTableSchema } from "../utils/json" import { convertJSONSchemaToTableSchema } from "../utils/json"
import { FieldType, SortOrder, SortType } from "@budibase/types" import {
FieldType,
LegacyFilter,
Row,
SearchFilters,
SortOrder,
SortType,
TableSchema,
UISearchFilter,
} from "@budibase/types"
import { APIClient } from "../api/types"
const { buildQuery, limit: queryLimit, runQuery, sort } = QueryUtils const { buildQuery, limit: queryLimit, runQuery, sort } = QueryUtils
interface DataFetchStore<TDefinition, TQuery> {
rows: Row[]
info: any
schema: TableSchema | null
loading: boolean
loaded: boolean
query: TQuery
pageNumber: number
cursor: string | null
cursors: string[]
resetKey: string
error: {
message: string
status: number
} | null
definition?: TDefinition | null
}
interface DataFetchDerivedStore<TDefinition, TQuery>
extends DataFetchStore<TDefinition, TQuery> {
hasNextPage: boolean
hasPrevPage: boolean
supportsSearch: boolean
supportsSort: boolean
supportsPagination: boolean
}
export interface DataFetchParams<
TDatasource,
TQuery = SearchFilters | undefined
> {
API: APIClient
datasource: TDatasource
query: TQuery
options?: {}
}
/** /**
* Parent class which handles the implementation of fetching data from an * Parent class which handles the implementation of fetching data from an
* internal table or datasource plus. * internal table or datasource plus.
* For other types of datasource, this class is overridden and extended. * For other types of datasource, this class is overridden and extended.
*/ */
export default class DataFetch { export default abstract class DataFetch<
TDatasource extends {},
TDefinition extends {
schema?: Record<string, any> | null
primaryDisplay?: string
},
TQuery extends {} = SearchFilters
> {
API: APIClient
features: {
supportsSearch: boolean
supportsSort: boolean
supportsPagination: boolean
}
options: {
datasource: TDatasource
limit: number
// Search config
filter: UISearchFilter | LegacyFilter[] | null
query: TQuery
// Sorting config
sortColumn: string | null
sortOrder: SortOrder
sortType: SortType | null
// Pagination config
paginate: boolean
// Client side feature customisation
clientSideSearching: boolean
clientSideSorting: boolean
clientSideLimiting: boolean
}
store: Writable<DataFetchStore<TDefinition, TQuery>>
derivedStore: Readable<DataFetchDerivedStore<TDefinition, TQuery>>
/** /**
* Constructs a new DataFetch instance. * Constructs a new DataFetch instance.
* @param opts the fetch options * @param opts the fetch options
*/ */
constructor(opts) { constructor(opts: DataFetchParams<TDatasource, TQuery>) {
// API client
this.API = null
// Feature flags // Feature flags
this.features = { this.features = {
supportsSearch: false, supportsSearch: false,
@ -29,12 +106,12 @@ export default class DataFetch {
// Config // Config
this.options = { this.options = {
datasource: null, datasource: opts.datasource,
limit: 10, limit: 10,
// Search config // Search config
filter: null, filter: null,
query: null, query: opts.query,
// Sorting config // Sorting config
sortColumn: null, sortColumn: null,
@ -57,11 +134,11 @@ export default class DataFetch {
schema: null, schema: null,
loading: false, loading: false,
loaded: false, loaded: false,
query: null, query: opts.query,
pageNumber: 0, pageNumber: 0,
cursor: null, cursor: null,
cursors: [], cursors: [],
resetKey: Math.random(), resetKey: Math.random().toString(),
error: null, error: null,
}) })
@ -118,7 +195,10 @@ export default class DataFetch {
/** /**
* Gets the default sort column for this datasource * Gets the default sort column for this datasource
*/ */
getDefaultSortColumn(definition, schema) { getDefaultSortColumn(
definition: { primaryDisplay?: string } | null,
schema: Record<string, any>
): string | null {
if (definition?.primaryDisplay && schema[definition.primaryDisplay]) { if (definition?.primaryDisplay && schema[definition.primaryDisplay]) {
return definition.primaryDisplay return definition.primaryDisplay
} else { } else {
@ -130,13 +210,13 @@ export default class DataFetch {
* Fetches a fresh set of data from the server, resetting pagination * Fetches a fresh set of data from the server, resetting pagination
*/ */
async getInitialData() { async getInitialData() {
const { datasource, filter, paginate } = this.options const { filter, paginate } = this.options
// Fetch datasource definition and extract sort properties if configured // Fetch datasource definition and extract sort properties if configured
const definition = await this.getDefinition(datasource) const definition = await this.getDefinition()
// Determine feature flags // Determine feature flags
const features = this.determineFeatureFlags(definition) const features = await this.determineFeatureFlags()
this.features = { this.features = {
supportsSearch: !!features?.supportsSearch, supportsSearch: !!features?.supportsSearch,
supportsSort: !!features?.supportsSort, supportsSort: !!features?.supportsSort,
@ -144,11 +224,11 @@ export default class DataFetch {
} }
// Fetch and enrich schema // Fetch and enrich schema
let schema = this.getSchema(datasource, definition) let schema = this.getSchema(definition)
schema = this.enrichSchema(schema)
if (!schema) { if (!schema) {
return return
} }
schema = this.enrichSchema(schema)
// If an invalid sort column is specified, delete it // If an invalid sort column is specified, delete it
if (this.options.sortColumn && !schema[this.options.sortColumn]) { if (this.options.sortColumn && !schema[this.options.sortColumn]) {
@ -172,20 +252,25 @@ export default class DataFetch {
if ( if (
fieldSchema?.type === FieldType.NUMBER || fieldSchema?.type === FieldType.NUMBER ||
fieldSchema?.type === FieldType.BIGINT || fieldSchema?.type === FieldType.BIGINT ||
fieldSchema?.calculationType ("calculationType" in fieldSchema && fieldSchema?.calculationType)
) { ) {
this.options.sortType = SortType.NUMBER this.options.sortType = SortType.NUMBER
} }
// If no sort order, default to ascending // If no sort order, default to ascending
if (!this.options.sortOrder) { if (!this.options.sortOrder) {
this.options.sortOrder = SortOrder.ASCENDING this.options.sortOrder = SortOrder.ASCENDING
} else {
// Ensure sortOrder matches the enum
this.options.sortOrder =
this.options.sortOrder.toLowerCase() as SortOrder
} }
} }
// Build the query // Build the query
let query = this.options.query let query = this.options.query
if (!query) { if (!query) {
query = buildQuery(filter) query = buildQuery(filter ?? undefined) as TQuery
} }
// Update store // Update store
@ -210,7 +295,7 @@ export default class DataFetch {
info: page.info, info: page.info,
cursors: paginate && page.hasNextPage ? [null, page.cursor] : [null], cursors: paginate && page.hasNextPage ? [null, page.cursor] : [null],
error: page.error, error: page.error,
resetKey: Math.random(), resetKey: Math.random().toString(),
})) }))
} }
@ -238,8 +323,8 @@ export default class DataFetch {
} }
// If we don't support sorting, do a client-side sort // If we don't support sorting, do a client-side sort
if (!this.features.supportsSort && clientSideSorting) { if (!this.features.supportsSort && clientSideSorting && sortType) {
rows = sort(rows, sortColumn, sortOrder, sortType) rows = sort(rows, sortColumn as any, sortOrder, sortType)
} }
// If we don't support pagination, do a client-side limit // If we don't support pagination, do a client-side limit
@ -256,49 +341,28 @@ export default class DataFetch {
} }
} }
/** abstract getData(): Promise<{
* Fetches a single page of data from the remote resource. rows: Row[]
* Must be overridden by a datasource specific child class. info?: any
*/ hasNextPage?: boolean
async getData() { cursor?: any
return { error?: any
rows: [], }>
info: null,
hasNextPage: false,
cursor: null,
}
}
/** /**
* Gets the definition for this datasource. * Gets the definition for this datasource.
* Defaults to fetching a table definition.
* @param datasource
* @return {object} the definition * @return {object} the definition
*/ */
async getDefinition(datasource) { abstract getDefinition(): Promise<TDefinition | null>
if (!datasource?.tableId) {
return null
}
try {
return await this.API.fetchTableDefinition(datasource.tableId)
} catch (error) {
this.store.update(state => ({
...state,
error,
}))
return null
}
}
/** /**
* Gets the schema definition for a datasource. * Gets the schema definition for a datasource.
* Defaults to getting the "schema" property of the definition.
* @param datasource the datasource
* @param definition the datasource definition * @param definition the datasource definition
* @return {object} the schema * @return {object} the schema
*/ */
getSchema(datasource, definition) { getSchema(definition: TDefinition | null): Record<string, any> | undefined {
return definition?.schema return definition?.schema ?? undefined
} }
/** /**
@ -307,53 +371,56 @@ export default class DataFetch {
* @param schema the datasource schema * @param schema the datasource schema
* @return {object} the enriched datasource schema * @return {object} the enriched datasource schema
*/ */
enrichSchema(schema) { private enrichSchema(schema: TableSchema): TableSchema {
if (schema == null) {
return null
}
// Check for any JSON fields so we can add any top level properties // Check for any JSON fields so we can add any top level properties
let jsonAdditions = {} let jsonAdditions: Record<string, { type: string; nestedJSON: true }> = {}
Object.keys(schema).forEach(fieldKey => { for (const fieldKey of Object.keys(schema)) {
const fieldSchema = schema[fieldKey] const fieldSchema = schema[fieldKey]
if (fieldSchema?.type === FieldType.JSON) { if (fieldSchema.type === FieldType.JSON) {
const jsonSchema = convertJSONSchemaToTableSchema(fieldSchema, { const jsonSchema = convertJSONSchemaToTableSchema(fieldSchema, {
squashObjects: true, squashObjects: true,
}) }) as Record<string, { type: string }> | null // TODO: remove when convertJSONSchemaToTableSchema is typed
Object.keys(jsonSchema).forEach(jsonKey => { if (jsonSchema) {
jsonAdditions[`${fieldKey}.${jsonKey}`] = { for (const jsonKey of Object.keys(jsonSchema)) {
type: jsonSchema[jsonKey].type, jsonAdditions[`${fieldKey}.${jsonKey}`] = {
nestedJSON: true, type: jsonSchema[jsonKey].type,
nestedJSON: true,
}
} }
}) }
} }
}) }
schema = { ...schema, ...jsonAdditions }
// Ensure schema is in the correct structure // Ensure schema is in the correct structure
let enrichedSchema = {} let enrichedSchema: TableSchema = {}
Object.entries(schema).forEach(([fieldName, fieldSchema]) => { Object.entries({ ...schema, ...jsonAdditions }).forEach(
if (typeof fieldSchema === "string") { ([fieldName, fieldSchema]) => {
enrichedSchema[fieldName] = { if (typeof fieldSchema === "string") {
type: fieldSchema, enrichedSchema[fieldName] = {
name: fieldName, type: fieldSchema,
} name: fieldName,
} else { }
enrichedSchema[fieldName] = { } else {
...fieldSchema, enrichedSchema[fieldName] = {
name: fieldName, ...fieldSchema,
type: fieldSchema.type as any, // TODO: check type union definition conflicts
name: fieldName,
}
} }
} }
}) )
return enrichedSchema return enrichedSchema
} }
/** /**
* Determine the feature flag for this datasource definition * Determine the feature flag for this datasource
* @param definition
*/ */
determineFeatureFlags(_definition) { async determineFeatureFlags(): Promise<{
supportsPagination: boolean
supportsSearch?: boolean
supportsSort?: boolean
}> {
return { return {
supportsSearch: false, supportsSearch: false,
supportsSort: false, supportsSort: false,
@ -365,12 +432,11 @@ export default class DataFetch {
* Resets the data set and updates options * Resets the data set and updates options
* @param newOptions any new options * @param newOptions any new options
*/ */
async update(newOptions) { async update(newOptions: any) {
// Check if any settings have actually changed // Check if any settings have actually changed
let refresh = false let refresh = false
const entries = Object.entries(newOptions || {}) for (const [key, value] of Object.entries(newOptions || {})) {
for (let [key, value] of entries) { const oldVal = this.options[key as keyof typeof this.options] ?? null
const oldVal = this.options[key] == null ? null : this.options[key]
const newVal = value == null ? null : value const newVal = value == null ? null : value
if (JSON.stringify(newVal) !== JSON.stringify(oldVal)) { if (JSON.stringify(newVal) !== JSON.stringify(oldVal)) {
refresh = true refresh = true
@ -437,7 +503,7 @@ export default class DataFetch {
* @param state the current store state * @param state the current store state
* @return {boolean} whether there is a next page of data or not * @return {boolean} whether there is a next page of data or not
*/ */
hasNextPage(state) { private hasNextPage(state: DataFetchStore<TDefinition, TQuery>): boolean {
return state.cursors[state.pageNumber + 1] != null return state.cursors[state.pageNumber + 1] != null
} }
@ -447,7 +513,7 @@ export default class DataFetch {
* @param state the current store state * @param state the current store state
* @return {boolean} whether there is a previous page of data or not * @return {boolean} whether there is a previous page of data or not
*/ */
hasPrevPage(state) { private hasPrevPage(state: { pageNumber: number }): boolean {
return state.pageNumber > 0 return state.pageNumber > 0
} }

View File

@ -1,7 +1,27 @@
import DataFetch from "./DataFetch.js" import { Row } from "@budibase/types"
import DataFetch from "./DataFetch"
export interface FieldDatasource {
tableId: string
fieldType: "attachment" | "array"
value: string[] | Row[]
}
export interface FieldDefinition {
schema?: Record<string, { type: string }> | null
}
function isArrayOfStrings(value: string[] | Row[]): value is string[] {
return Array.isArray(value) && !!value[0] && typeof value[0] !== "object"
}
export default class FieldFetch extends DataFetch<
FieldDatasource,
FieldDefinition
> {
async getDefinition(): Promise<FieldDefinition | null> {
const { datasource } = this.options
export default class FieldFetch extends DataFetch {
async getDefinition(datasource) {
// Field sources have their schema statically defined // Field sources have their schema statically defined
let schema let schema
if (datasource.fieldType === "attachment") { if (datasource.fieldType === "attachment") {
@ -28,8 +48,8 @@ export default class FieldFetch extends DataFetch {
// These sources will be available directly from context // These sources will be available directly from context
const data = datasource?.value || [] const data = datasource?.value || []
let rows let rows: Row[]
if (Array.isArray(data) && data[0] && typeof data[0] !== "object") { if (isArrayOfStrings(data)) {
rows = data.map(value => ({ value })) rows = data.map(value => ({ value }))
} else { } else {
rows = data rows = data

View File

@ -1,9 +1,22 @@
import { get } from "svelte/store" import { get } from "svelte/store"
import DataFetch from "./DataFetch.js" import DataFetch, { DataFetchParams } from "./DataFetch"
import { TableNames } from "../constants" import { TableNames } from "../constants"
export default class GroupUserFetch extends DataFetch { interface GroupUserQuery {
constructor(opts) { groupId: string
emailSearch: string
}
interface GroupUserDatasource {
tableId: TableNames.USERS
}
export default class GroupUserFetch extends DataFetch<
GroupUserDatasource,
{},
GroupUserQuery
> {
constructor(opts: DataFetchParams<GroupUserDatasource, GroupUserQuery>) {
super({ super({
...opts, ...opts,
datasource: { datasource: {
@ -12,7 +25,7 @@ export default class GroupUserFetch extends DataFetch {
}) })
} }
determineFeatureFlags() { async determineFeatureFlags() {
return { return {
supportsSearch: true, supportsSearch: true,
supportsSort: false, supportsSort: false,
@ -28,11 +41,12 @@ export default class GroupUserFetch extends DataFetch {
async getData() { async getData() {
const { query, cursor } = get(this.store) const { query, cursor } = get(this.store)
try { try {
const res = await this.API.getGroupUsers({ const res = await this.API.getGroupUsers({
id: query.groupId, id: query.groupId,
emailSearch: query.emailSearch, emailSearch: query.emailSearch,
bookmark: cursor, bookmark: cursor ?? undefined,
}) })
return { return {

View File

@ -1,8 +1,10 @@
import FieldFetch from "./FieldFetch.js" import FieldFetch from "./FieldFetch"
import { getJSONArrayDatasourceSchema } from "../utils/json" import { getJSONArrayDatasourceSchema } from "../utils/json"
export default class JSONArrayFetch extends FieldFetch { export default class JSONArrayFetch extends FieldFetch {
async getDefinition(datasource) { async getDefinition() {
const { datasource } = this.options
// JSON arrays need their table definitions fetched. // JSON arrays need their table definitions fetched.
// We can then extract their schema as a subset of the table schema. // We can then extract their schema as a subset of the table schema.
try { try {

View File

@ -1,21 +0,0 @@
import DataFetch from "./DataFetch.js"
export default class NestedProviderFetch extends DataFetch {
async getDefinition(datasource) {
// Nested providers should already have exposed their own schema
return {
schema: datasource?.value?.schema,
primaryDisplay: datasource?.value?.primaryDisplay,
}
}
async getData() {
const { datasource } = this.options
// Pull the rows from the existing data provider
return {
rows: datasource?.value?.rows || [],
hasNextPage: false,
cursor: null,
}
}
}

View File

@ -0,0 +1,39 @@
import { Row, TableSchema } from "@budibase/types"
import DataFetch from "./DataFetch"
interface NestedProviderDatasource {
value?: {
schema: TableSchema
primaryDisplay: string
rows: Row[]
}
}
interface NestedProviderDefinition {
schema?: TableSchema
primaryDisplay?: string
}
export default class NestedProviderFetch extends DataFetch<
NestedProviderDatasource,
NestedProviderDefinition
> {
async getDefinition() {
const { datasource } = this.options
// Nested providers should already have exposed their own schema
return {
schema: datasource?.value?.schema,
primaryDisplay: datasource?.value?.primaryDisplay,
}
}
async getData() {
const { datasource } = this.options
// Pull the rows from the existing data provider
return {
rows: datasource?.value?.rows || [],
hasNextPage: false,
cursor: null,
}
}
}

View File

@ -1,11 +1,13 @@
import FieldFetch from "./FieldFetch.js" import FieldFetch from "./FieldFetch"
import { import {
getJSONArrayDatasourceSchema, getJSONArrayDatasourceSchema,
generateQueryArraySchemas, generateQueryArraySchemas,
} from "../utils/json" } from "../utils/json"
export default class QueryArrayFetch extends FieldFetch { export default class QueryArrayFetch extends FieldFetch {
async getDefinition(datasource) { async getDefinition() {
const { datasource } = this.options
if (!datasource?.tableId) { if (!datasource?.tableId) {
return null return null
} }
@ -14,10 +16,14 @@ export default class QueryArrayFetch extends FieldFetch {
try { try {
const table = await this.API.fetchQueryDefinition(datasource.tableId) const table = await this.API.fetchQueryDefinition(datasource.tableId)
const schema = generateQueryArraySchemas( const schema = generateQueryArraySchemas(
table?.schema, table.schema,
table?.nestedSchemaFields table.nestedSchemaFields
) )
return { schema: getJSONArrayDatasourceSchema(schema, datasource) } const result = {
schema: getJSONArrayDatasourceSchema(schema, datasource),
}
return result
} catch (error) { } catch (error) {
return null return null
} }

View File

@ -1,9 +1,24 @@
import DataFetch from "./DataFetch.js" import DataFetch from "./DataFetch"
import { Helpers } from "@budibase/bbui" import { Helpers } from "@budibase/bbui"
import { ExecuteQueryRequest, Query } from "@budibase/types"
import { get } from "svelte/store" import { get } from "svelte/store"
export default class QueryFetch extends DataFetch { interface QueryDatasource {
determineFeatureFlags(definition) { _id: string
fields: Record<string, any> & {
pagination?: {
type: string
location: string
pageParam: string
}
}
queryParams?: Record<string, string>
parameters: { name: string; default: string }[]
}
export default class QueryFetch extends DataFetch<QueryDatasource, Query> {
async determineFeatureFlags() {
const definition = await this.getDefinition()
const supportsPagination = const supportsPagination =
!!definition?.fields?.pagination?.type && !!definition?.fields?.pagination?.type &&
!!definition?.fields?.pagination?.location && !!definition?.fields?.pagination?.location &&
@ -11,7 +26,9 @@ export default class QueryFetch extends DataFetch {
return { supportsPagination } return { supportsPagination }
} }
async getDefinition(datasource) { async getDefinition() {
const { datasource } = this.options
if (!datasource?._id) { if (!datasource?._id) {
return null return null
} }
@ -40,17 +57,17 @@ export default class QueryFetch extends DataFetch {
const type = definition?.fields?.pagination?.type const type = definition?.fields?.pagination?.type
// Set the default query params // Set the default query params
let parameters = Helpers.cloneDeep(datasource?.queryParams || {}) const parameters = Helpers.cloneDeep(datasource.queryParams || {})
for (let param of datasource?.parameters || {}) { for (const param of datasource?.parameters || []) {
if (!parameters[param.name]) { if (!parameters[param.name]) {
parameters[param.name] = param.default parameters[param.name] = param.default
} }
} }
// Add pagination to query if supported // Add pagination to query if supported
let queryPayload = { parameters } const queryPayload: ExecuteQueryRequest = { parameters }
if (paginate && supportsPagination) { if (paginate && supportsPagination) {
const requestCursor = type === "page" ? parseInt(cursor || 1) : cursor const requestCursor = type === "page" ? parseInt(cursor || "1") : cursor
queryPayload.pagination = { page: requestCursor, limit } queryPayload.pagination = { page: requestCursor, limit }
} }
@ -65,7 +82,7 @@ export default class QueryFetch extends DataFetch {
if (paginate && supportsPagination) { if (paginate && supportsPagination) {
if (type === "page") { if (type === "page") {
// For "page number" pagination, increment the existing page number // For "page number" pagination, increment the existing page number
nextCursor = queryPayload.pagination.page + 1 nextCursor = queryPayload.pagination!.page! + 1
hasNextPage = data?.length === limit && limit > 0 hasNextPage = data?.length === limit && limit > 0
} else { } else {
// For "cursor" pagination, the cursor should be in the response // For "cursor" pagination, the cursor should be in the response

View File

@ -1,20 +0,0 @@
import DataFetch from "./DataFetch.js"
export default class RelationshipFetch extends DataFetch {
async getData() {
const { datasource } = this.options
if (!datasource?.rowId || !datasource?.rowTableId) {
return { rows: [] }
}
try {
const res = await this.API.fetchRelationshipData(
datasource.rowTableId,
datasource.rowId,
datasource.fieldName
)
return { rows: res }
} catch (error) {
return { rows: [] }
}
}
}

View File

@ -0,0 +1,48 @@
import { Table } from "@budibase/types"
import DataFetch from "./DataFetch"
interface RelationshipDatasource {
tableId: string
rowId: string
rowTableId: string
fieldName: string
}
export default class RelationshipFetch extends DataFetch<
RelationshipDatasource,
Table
> {
async getDefinition() {
const { datasource } = this.options
if (!datasource?.tableId) {
return null
}
try {
return await this.API.fetchTableDefinition(datasource.tableId)
} catch (error: any) {
this.store.update(state => ({
...state,
error,
}))
return null
}
}
async getData() {
const { datasource } = this.options
if (!datasource?.rowId || !datasource?.rowTableId) {
return { rows: [] }
}
try {
const res = await this.API.fetchRelationshipData(
datasource.rowTableId,
datasource.rowId,
datasource.fieldName
)
return { rows: res }
} catch (error) {
return { rows: [] }
}
}
}

View File

@ -1,9 +1,9 @@
import { get } from "svelte/store" import { get } from "svelte/store"
import DataFetch from "./DataFetch.js" import DataFetch from "./DataFetch"
import { SortOrder } from "@budibase/types" import { SortOrder, Table, UITable } from "@budibase/types"
export default class TableFetch extends DataFetch { export default class TableFetch extends DataFetch<UITable, Table> {
determineFeatureFlags() { async determineFeatureFlags() {
return { return {
supportsSearch: true, supportsSearch: true,
supportsSort: true, supportsSort: true,
@ -11,6 +11,23 @@ export default class TableFetch extends DataFetch {
} }
} }
async getDefinition() {
const { datasource } = this.options
if (!datasource?.tableId) {
return null
}
try {
return await this.API.fetchTableDefinition(datasource.tableId)
} catch (error: any) {
this.store.update(state => ({
...state,
error,
}))
return null
}
}
async getData() { async getData() {
const { datasource, limit, sortColumn, sortOrder, sortType, paginate } = const { datasource, limit, sortColumn, sortOrder, sortType, paginate } =
this.options this.options
@ -23,7 +40,7 @@ export default class TableFetch extends DataFetch {
query, query,
limit, limit,
sort: sortColumn, sort: sortColumn,
sortOrder: sortOrder?.toLowerCase() ?? SortOrder.ASCENDING, sortOrder: sortOrder ?? SortOrder.ASCENDING,
sortType, sortType,
paginate, paginate,
bookmark: cursor, bookmark: cursor,

View File

@ -1,10 +1,28 @@
import { get } from "svelte/store" import { get } from "svelte/store"
import DataFetch from "./DataFetch.js" import DataFetch, { DataFetchParams } from "./DataFetch"
import { TableNames } from "../constants" import { TableNames } from "../constants"
import { utils } from "@budibase/shared-core" import { utils } from "@budibase/shared-core"
import {
BasicOperator,
SearchFilters,
SearchUsersRequest,
} from "@budibase/types"
export default class UserFetch extends DataFetch { interface UserFetchQuery {
constructor(opts) { appId: string
paginated: boolean
}
interface UserDatasource {
tableId: string
}
export default class UserFetch extends DataFetch<
UserDatasource,
{},
UserFetchQuery
> {
constructor(opts: DataFetchParams<UserDatasource, UserFetchQuery>) {
super({ super({
...opts, ...opts,
datasource: { datasource: {
@ -13,7 +31,7 @@ export default class UserFetch extends DataFetch {
}) })
} }
determineFeatureFlags() { async determineFeatureFlags() {
return { return {
supportsSearch: true, supportsSearch: true,
supportsSort: false, supportsSort: false,
@ -22,9 +40,7 @@ export default class UserFetch extends DataFetch {
} }
async getDefinition() { async getDefinition() {
return { return { schema: {} }
schema: {},
}
} }
async getData() { async getData() {
@ -32,15 +48,16 @@ export default class UserFetch extends DataFetch {
const { cursor, query } = get(this.store) const { cursor, query } = get(this.store)
// Convert old format to new one - we now allow use of the lucene format // Convert old format to new one - we now allow use of the lucene format
const { appId, paginated, ...rest } = query || {} const { appId, paginated, ...rest } = query
const finalQuery = utils.isSupportedUserSearch(rest)
? query const finalQuery: SearchFilters = utils.isSupportedUserSearch(rest)
: { string: { email: null } } ? rest
: { [BasicOperator.EMPTY]: { email: null } }
try { try {
const opts = { const opts: SearchUsersRequest = {
bookmark: cursor, bookmark: cursor ?? undefined,
query: finalQuery, query: finalQuery ?? undefined,
appId: appId, appId: appId,
paginate: paginated || paginate, paginate: paginated || paginate,
limit, limit,

View File

@ -1,23 +0,0 @@
import DataFetch from "./DataFetch.js"
export default class ViewFetch extends DataFetch {
getSchema(datasource, definition) {
return definition?.views?.[datasource.name]?.schema
}
async getData() {
const { datasource } = this.options
try {
const res = await this.API.fetchViewData(datasource.name, {
calculation: datasource.calculation,
field: datasource.field,
groupBy: datasource.groupBy,
tableId: datasource.tableId,
})
return { rows: res || [] }
} catch (error) {
console.error(error)
return { rows: [] }
}
}
}

View File

@ -0,0 +1,44 @@
import { Table, View } from "@budibase/types"
import DataFetch from "./DataFetch"
type ViewV1 = View & { name: string }
export default class ViewFetch extends DataFetch<ViewV1, Table> {
async getDefinition() {
const { datasource } = this.options
if (!datasource?.tableId) {
return null
}
try {
return await this.API.fetchTableDefinition(datasource.tableId)
} catch (error: any) {
this.store.update(state => ({
...state,
error,
}))
return null
}
}
getSchema(definition: Table) {
const { datasource } = this.options
return definition?.views?.[datasource.name]?.schema
}
async getData() {
const { datasource } = this.options
try {
const res = await this.API.fetchViewData(datasource.name, {
calculation: datasource.calculation,
field: datasource.field,
groupBy: datasource.groupBy,
tableId: datasource.tableId,
})
return { rows: res || [] }
} catch (error) {
console.error(error, { datasource })
return { rows: [] }
}
}
}

View File

@ -1,9 +1,10 @@
import { ViewV2Type } from "@budibase/types" import { SortOrder, UIView, ViewV2, ViewV2Type } from "@budibase/types"
import DataFetch from "./DataFetch.js" import DataFetch from "./DataFetch"
import { get } from "svelte/store" import { get } from "svelte/store"
import { helpers } from "@budibase/shared-core"
export default class ViewV2Fetch extends DataFetch { export default class ViewV2Fetch extends DataFetch<UIView, ViewV2> {
determineFeatureFlags() { async determineFeatureFlags() {
return { return {
supportsSearch: true, supportsSearch: true,
supportsSort: true, supportsSort: true,
@ -11,18 +12,13 @@ export default class ViewV2Fetch extends DataFetch {
} }
} }
getSchema(datasource, definition) { async getDefinition() {
return definition?.schema const { datasource } = this.options
}
async getDefinition(datasource) {
if (!datasource?.id) {
return null
}
try { try {
const res = await this.API.viewV2.fetchDefinition(datasource.id) const res = await this.API.viewV2.fetchDefinition(datasource.id)
return res?.data return res?.data
} catch (error) { } catch (error: any) {
this.store.update(state => ({ this.store.update(state => ({
...state, ...state,
error, error,
@ -42,8 +38,10 @@ export default class ViewV2Fetch extends DataFetch {
// If this is a calculation view and we have no calculations, return nothing // If this is a calculation view and we have no calculations, return nothing
if ( if (
definition.type === ViewV2Type.CALCULATION && definition?.type === ViewV2Type.CALCULATION &&
!Object.values(definition.schema || {}).some(x => x.calculationType) !Object.values(definition.schema || {}).some(
helpers.views.isCalculationField
)
) { ) {
return { return {
rows: [], rows: [],
@ -56,25 +54,41 @@ export default class ViewV2Fetch extends DataFetch {
// If sort/filter params are not defined, update options to store the // If sort/filter params are not defined, update options to store the
// params built in to this view. This ensures that we can accurately // params built in to this view. This ensures that we can accurately
// compare old and new params and skip a redundant API call. // compare old and new params and skip a redundant API call.
if (!sortColumn && definition.sort?.field) { if (!sortColumn && definition?.sort?.field) {
this.options.sortColumn = definition.sort.field this.options.sortColumn = definition.sort.field
this.options.sortOrder = definition.sort.order this.options.sortOrder = definition.sort.order || SortOrder.ASCENDING
} }
try { try {
const res = await this.API.viewV2.fetch(datasource.id, { const request = {
...(query ? { query } : {}), query,
paginate, paginate,
limit, limit,
bookmark: cursor, bookmark: cursor,
sort: sortColumn, sort: sortColumn,
sortOrder: sortOrder?.toLowerCase(), sortOrder: sortOrder,
sortType, sortType,
}) }
return { if (paginate) {
rows: res?.rows || [], const res = await this.API.viewV2.fetch(datasource.id, {
hasNextPage: res?.hasNextPage || false, ...request,
cursor: res?.bookmark || null, paginate,
})
return {
rows: res?.rows || [],
hasNextPage: res?.hasNextPage || false,
cursor: res?.bookmark || null,
}
} else {
const res = await this.API.viewV2.fetch(datasource.id, {
...request,
paginate,
})
return {
rows: res?.rows || [],
hasNextPage: false,
cursor: null,
}
} }
} catch (error) { } catch (error) {
return { return {

View File

@ -1,57 +0,0 @@
import TableFetch from "./TableFetch.js"
import ViewFetch from "./ViewFetch.js"
import ViewV2Fetch from "./ViewV2Fetch.js"
import QueryFetch from "./QueryFetch.js"
import RelationshipFetch from "./RelationshipFetch.js"
import NestedProviderFetch from "./NestedProviderFetch.js"
import FieldFetch from "./FieldFetch.js"
import JSONArrayFetch from "./JSONArrayFetch.js"
import UserFetch from "./UserFetch.js"
import GroupUserFetch from "./GroupUserFetch.js"
import CustomFetch from "./CustomFetch.js"
import QueryArrayFetch from "./QueryArrayFetch.js"
const DataFetchMap = {
table: TableFetch,
view: ViewFetch,
viewV2: ViewV2Fetch,
query: QueryFetch,
link: RelationshipFetch,
user: UserFetch,
groupUser: GroupUserFetch,
custom: CustomFetch,
// Client specific datasource types
provider: NestedProviderFetch,
field: FieldFetch,
jsonarray: JSONArrayFetch,
queryarray: QueryArrayFetch,
}
// Constructs a new fetch model for a certain datasource
export const fetchData = ({ API, datasource, options }) => {
const Fetch = DataFetchMap[datasource?.type] || TableFetch
return new Fetch({ API, datasource, ...options })
}
// Creates an empty fetch instance with no datasource configured, so no data
// will initially be loaded
const createEmptyFetchInstance = ({ API, datasource }) => {
const handler = DataFetchMap[datasource?.type]
if (!handler) {
return null
}
return new handler({ API })
}
// Fetches the definition of any type of datasource
export const getDatasourceDefinition = async ({ API, datasource }) => {
const instance = createEmptyFetchInstance({ API, datasource })
return await instance?.getDefinition(datasource)
}
// Fetches the schema of any type of datasource
export const getDatasourceSchema = ({ API, datasource, definition }) => {
const instance = createEmptyFetchInstance({ API, datasource })
return instance?.getSchema(datasource, definition)
}

View File

@ -0,0 +1,91 @@
import TableFetch from "./TableFetch.js"
import ViewFetch from "./ViewFetch.js"
import ViewV2Fetch from "./ViewV2Fetch.js"
import QueryFetch from "./QueryFetch"
import RelationshipFetch from "./RelationshipFetch"
import NestedProviderFetch from "./NestedProviderFetch"
import FieldFetch from "./FieldFetch"
import JSONArrayFetch from "./JSONArrayFetch"
import UserFetch from "./UserFetch.js"
import GroupUserFetch from "./GroupUserFetch"
import CustomFetch from "./CustomFetch"
import QueryArrayFetch from "./QueryArrayFetch.js"
import { APIClient } from "../api/types.js"
const DataFetchMap = {
table: TableFetch,
view: ViewFetch,
viewV2: ViewV2Fetch,
query: QueryFetch,
link: RelationshipFetch,
user: UserFetch,
groupUser: GroupUserFetch,
custom: CustomFetch,
// Client specific datasource types
provider: NestedProviderFetch,
field: FieldFetch,
jsonarray: JSONArrayFetch,
queryarray: QueryArrayFetch,
}
// Constructs a new fetch model for a certain datasource
export const fetchData = ({ API, datasource, options }: any) => {
const Fetch =
DataFetchMap[datasource?.type as keyof typeof DataFetchMap] || TableFetch
return new Fetch({ API, datasource, ...options })
}
// Creates an empty fetch instance with no datasource configured, so no data
// will initially be loaded
const createEmptyFetchInstance = <
TDatasource extends {
type: keyof typeof DataFetchMap
}
>({
API,
datasource,
}: {
API: APIClient
datasource: TDatasource
}) => {
const handler = DataFetchMap[datasource?.type as keyof typeof DataFetchMap]
if (!handler) {
return null
}
return new handler({ API, datasource: null as any, query: null as any })
}
// Fetches the definition of any type of datasource
export const getDatasourceDefinition = async <
TDatasource extends {
type: keyof typeof DataFetchMap
}
>({
API,
datasource,
}: {
API: APIClient
datasource: TDatasource
}) => {
const instance = createEmptyFetchInstance({ API, datasource })
return await instance?.getDefinition()
}
// Fetches the schema of any type of datasource
export const getDatasourceSchema = <
TDatasource extends {
type: keyof typeof DataFetchMap
}
>({
API,
datasource,
definition,
}: {
API: APIClient
datasource: TDatasource
definition?: any
}) => {
const instance = createEmptyFetchInstance({ API, datasource })
return instance?.getSchema(definition)
}

View File

@ -0,0 +1,23 @@
import { JsonFieldMetadata, QuerySchema } from "@budibase/types"
type Schema = Record<string, QuerySchema | string>
declare module "./json" {
export const getJSONArrayDatasourceSchema: (
tableSchema: Schema,
datasource: any
) => Record<string, { type: string; name: string; prefixKeys: string }>
export const generateQueryArraySchemas: (
schema: Schema,
nestedSchemaFields?: Record<string, Schema>
) => Schema
export const convertJSONSchemaToTableSchema: (
jsonSchema: JsonFieldMetadata,
options: {
squashObjects?: boolean
prefixKeys?: string
}
) => Record<string, { type: string; name: string; prefixKeys: string }>
}

View File

@ -50,10 +50,6 @@
"license": "GPL-3.0", "license": "GPL-3.0",
"dependencies": { "dependencies": {
"@apidevtools/swagger-parser": "10.0.3", "@apidevtools/swagger-parser": "10.0.3",
"@aws-sdk/client-dynamodb": "3.709.0",
"@aws-sdk/client-s3": "3.709.0",
"@aws-sdk/lib-dynamodb": "3.709.0",
"@aws-sdk/s3-request-presigner": "3.709.0",
"@azure/msal-node": "^2.5.1", "@azure/msal-node": "^2.5.1",
"@budibase/backend-core": "*", "@budibase/backend-core": "*",
"@budibase/client": "*", "@budibase/client": "*",
@ -74,6 +70,7 @@
"airtable": "0.12.2", "airtable": "0.12.2",
"arangojs": "7.2.0", "arangojs": "7.2.0",
"archiver": "7.0.1", "archiver": "7.0.1",
"aws-sdk": "2.1692.0",
"bcrypt": "5.1.0", "bcrypt": "5.1.0",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
"bson": "^6.9.0", "bson": "^6.9.0",

View File

@ -230,7 +230,7 @@ export async function fetchAppPackage(
const license = await licensing.cache.getCachedLicense() const license = await licensing.cache.getCachedLicense()
// Enrich plugin URLs // Enrich plugin URLs
application.usedPlugins = await objectStore.enrichPluginURLs( application.usedPlugins = objectStore.enrichPluginURLs(
application.usedPlugins application.usedPlugins
) )

View File

@ -355,7 +355,7 @@ async function execute(
ExecuteQueryRequest, ExecuteQueryRequest,
ExecuteV2QueryResponse | ExecuteV1QueryResponse ExecuteV2QueryResponse | ExecuteV1QueryResponse
>, >,
opts: any = { rowsOnly: false, isAutomation: false } opts = { rowsOnly: false, isAutomation: false }
) { ) {
const db = context.getAppDB() const db = context.getAppDB()
@ -416,7 +416,7 @@ export async function executeV1(
export async function executeV2( export async function executeV2(
ctx: UserCtx<ExecuteQueryRequest, ExecuteV2QueryResponse> ctx: UserCtx<ExecuteQueryRequest, ExecuteV2QueryResponse>
) { ) {
return execute(ctx, { rowsOnly: false }) return execute(ctx, { rowsOnly: false, isAutomation: false })
} }
export async function executeV2AsAutomation( export async function executeV2AsAutomation(

View File

@ -1,16 +1,16 @@
import { import {
UserCtx, UserCtx,
ViewV2, ViewV2,
SearchRowResponse,
SearchViewRowRequest, SearchViewRowRequest,
RequiredKeys, RequiredKeys,
RowSearchParams, RowSearchParams,
PaginatedSearchRowResponse,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
export async function searchView( export async function searchView(
ctx: UserCtx<SearchViewRowRequest, SearchRowResponse> ctx: UserCtx<SearchViewRowRequest, PaginatedSearchRowResponse>
) { ) {
const { viewId } = ctx.params const { viewId } = ctx.params
@ -49,7 +49,13 @@ export async function searchView(
user: sdk.users.getUserContextBindings(ctx.user), user: sdk.users.getUserContextBindings(ctx.user),
}) })
result.rows.forEach(r => (r._viewId = view.id)) result.rows.forEach(r => (r._viewId = view.id))
ctx.body = result
ctx.body = {
rows: result.rows,
bookmark: result.bookmark,
hasNextPage: result.hasNextPage,
totalRows: result.totalRows,
}
} }
function getSortOptions(request: SearchViewRowRequest, view: ViewV2) { function getSortOptions(request: SearchViewRowRequest, view: ViewV2) {

View File

@ -18,8 +18,7 @@ import {
objectStore, objectStore,
utils, utils,
} from "@budibase/backend-core" } from "@budibase/backend-core"
import { getSignedUrl } from "@aws-sdk/s3-request-presigner" import AWS from "aws-sdk"
import { PutObjectCommand, S3 } from "@aws-sdk/client-s3"
import fs from "fs" import fs from "fs"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import * as pro from "@budibase/pro" import * as pro from "@budibase/pro"
@ -129,9 +128,9 @@ export const uploadFile = async function (
return { return {
size: file.size, size: file.size,
name: file.name, name: file.name,
url: await objectStore.getAppFileUrl(s3Key), url: objectStore.getAppFileUrl(s3Key),
extension, extension,
key: response.Key!, key: response.Key,
} }
}) })
) )
@ -211,11 +210,11 @@ export const serveApp = async function (ctx: UserCtx<void, ServeAppResponse>) {
usedPlugins: plugins, usedPlugins: plugins,
favicon: favicon:
branding.faviconUrl !== "" branding.faviconUrl !== ""
? await objectStore.getGlobalFileUrl("settings", "faviconUrl") ? objectStore.getGlobalFileUrl("settings", "faviconUrl")
: "", : "",
logo: logo:
config?.logoUrl !== "" config?.logoUrl !== ""
? await objectStore.getGlobalFileUrl("settings", "logoUrl") ? objectStore.getGlobalFileUrl("settings", "logoUrl")
: "", : "",
appMigrating: needMigrations, appMigrating: needMigrations,
nonce: ctx.state.nonce, nonce: ctx.state.nonce,
@ -244,7 +243,7 @@ export const serveApp = async function (ctx: UserCtx<void, ServeAppResponse>) {
metaDescription: branding?.metaDescription || "", metaDescription: branding?.metaDescription || "",
favicon: favicon:
branding.faviconUrl !== "" branding.faviconUrl !== ""
? await objectStore.getGlobalFileUrl("settings", "faviconUrl") ? objectStore.getGlobalFileUrl("settings", "faviconUrl")
: "", : "",
}) })
@ -335,17 +334,16 @@ export const getSignedUploadURL = async function (
ctx.throw(400, "bucket and key values are required") ctx.throw(400, "bucket and key values are required")
} }
try { try {
const s3 = new S3({ const s3 = new AWS.S3({
region: awsRegion, region: awsRegion,
endpoint: datasource?.config?.endpoint || undefined, endpoint: datasource?.config?.endpoint || undefined,
accessKeyId: datasource?.config?.accessKeyId as string,
credentials: { secretAccessKey: datasource?.config?.secretAccessKey as string,
accessKeyId: datasource?.config?.accessKeyId as string, apiVersion: "2006-03-01",
secretAccessKey: datasource?.config?.secretAccessKey as string, signatureVersion: "v4",
},
}) })
const params = { Bucket: bucket, Key: key } const params = { Bucket: bucket, Key: key }
signedUrl = await getSignedUrl(s3, new PutObjectCommand(params)) signedUrl = s3.getSignedUrl("putObject", params)
if (datasource?.config?.endpoint) { if (datasource?.config?.endpoint) {
publicUrl = `${datasource.config.endpoint}/${bucket}/${key}` publicUrl = `${datasource.config.endpoint}/${bucket}/${key}`
} else { } else {

View File

@ -1,10 +1,12 @@
// Directly mock the AWS SDK // Directly mock the AWS SDK
jest.mock("@aws-sdk/s3-request-presigner", () => ({ jest.mock("aws-sdk", () => ({
getSignedUrl: jest.fn(() => { S3: jest.fn(() => ({
return `http://example.com` getSignedUrl: jest.fn(
}), (operation, params) => `http://example.com/${params.Bucket}/${params.Key}`
),
upload: jest.fn(() => ({ Contents: {} })),
})),
})) }))
jest.mock("@aws-sdk/client-s3")
import { Datasource, SourceName } from "@budibase/types" import { Datasource, SourceName } from "@budibase/types"
import { setEnv } from "../../../environment" import { setEnv } from "../../../environment"
@ -75,10 +77,7 @@ describe("/static", () => {
type: "datasource", type: "datasource",
name: "Test", name: "Test",
source: SourceName.S3, source: SourceName.S3,
config: { config: {},
accessKeyId: "bb",
secretAccessKey: "bb",
},
}, },
}) })
}) })
@ -92,7 +91,7 @@ describe("/static", () => {
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body.signedUrl).toEqual("http://example.com") expect(res.body.signedUrl).toEqual("http://example.com/foo/bar")
expect(res.body.publicUrl).toEqual( expect(res.body.publicUrl).toEqual(
`https://${bucket}.s3.eu-west-1.amazonaws.com/${key}` `https://${bucket}.s3.eu-west-1.amazonaws.com/${key}`
) )

View File

@ -154,12 +154,11 @@ describe("test the create row action", () => {
expect(result.steps[1].outputs.row.file_attachment[0]).toHaveProperty("key") expect(result.steps[1].outputs.row.file_attachment[0]).toHaveProperty("key")
let s3Key = result.steps[1].outputs.row.file_attachment[0].key let s3Key = result.steps[1].outputs.row.file_attachment[0].key
const client = objectStore.ObjectStore() const client = objectStore.ObjectStore(objectStore.ObjectStoreBuckets.APPS)
const objectData = await client.headObject({ const objectData = await client
Bucket: objectStore.ObjectStoreBuckets.APPS, .headObject({ Bucket: objectStore.ObjectStoreBuckets.APPS, Key: s3Key })
Key: s3Key, .promise()
})
expect(objectData).toBeDefined() expect(objectData).toBeDefined()
expect(objectData.ContentLength).toBeGreaterThan(0) expect(objectData.ContentLength).toBeGreaterThan(0)
@ -230,12 +229,11 @@ describe("test the create row action", () => {
) )
let s3Key = result.steps[1].outputs.row.single_file_attachment.key let s3Key = result.steps[1].outputs.row.single_file_attachment.key
const client = objectStore.ObjectStore() const client = objectStore.ObjectStore(objectStore.ObjectStoreBuckets.APPS)
const objectData = await client.headObject({ const objectData = await client
Bucket: objectStore.ObjectStoreBuckets.APPS, .headObject({ Bucket: objectStore.ObjectStoreBuckets.APPS, Key: s3Key })
Key: s3Key, .promise()
})
expect(objectData).toBeDefined() expect(objectData).toBeDefined()
expect(objectData.ContentLength).toBeGreaterThan(0) expect(objectData.ContentLength).toBeGreaterThan(0)

View File

@ -7,15 +7,9 @@ import {
ConnectionInfo, ConnectionInfo,
} from "@budibase/types" } from "@budibase/types"
import { import AWS from "aws-sdk"
DynamoDBDocument,
PutCommandInput,
GetCommandInput,
UpdateCommandInput,
DeleteCommandInput,
} from "@aws-sdk/lib-dynamodb"
import { DynamoDB } from "@aws-sdk/client-dynamodb"
import { AWS_REGION } from "../constants" import { AWS_REGION } from "../constants"
import { DocumentClient } from "aws-sdk/clients/dynamodb"
interface DynamoDBConfig { interface DynamoDBConfig {
region: string region: string
@ -157,7 +151,7 @@ class DynamoDBIntegration implements IntegrationBase {
region: config.region || AWS_REGION, region: config.region || AWS_REGION,
endpoint: config.endpoint || undefined, endpoint: config.endpoint || undefined,
} }
this.client = DynamoDBDocument.from(new DynamoDB(this.config)) this.client = new AWS.DynamoDB.DocumentClient(this.config)
} }
async testConnection() { async testConnection() {
@ -165,8 +159,8 @@ class DynamoDBIntegration implements IntegrationBase {
connected: false, connected: false,
} }
try { try {
const scanRes = await new DynamoDB(this.config).listTables() const scanRes = await new AWS.DynamoDB(this.config).listTables().promise()
response.connected = !!scanRes.$metadata response.connected = !!scanRes.$response
} catch (e: any) { } catch (e: any) {
response.error = e.message as string response.error = e.message as string
} }
@ -175,13 +169,13 @@ class DynamoDBIntegration implements IntegrationBase {
async create(query: { async create(query: {
table: string table: string
json: Omit<PutCommandInput, "TableName"> json: Omit<DocumentClient.PutItemInput, "TableName">
}) { }) {
const params = { const params = {
TableName: query.table, TableName: query.table,
...query.json, ...query.json,
} }
return this.client.put(params) return this.client.put(params).promise()
} }
async read(query: { table: string; json: object; index: null | string }) { async read(query: { table: string; json: object; index: null | string }) {
@ -190,7 +184,7 @@ class DynamoDBIntegration implements IntegrationBase {
IndexName: query.index ? query.index : undefined, IndexName: query.index ? query.index : undefined,
...query.json, ...query.json,
} }
const response = await this.client.query(params) const response = await this.client.query(params).promise()
if (response.Items) { if (response.Items) {
return response.Items return response.Items
} }
@ -203,7 +197,7 @@ class DynamoDBIntegration implements IntegrationBase {
IndexName: query.index ? query.index : undefined, IndexName: query.index ? query.index : undefined,
...query.json, ...query.json,
} }
const response = await this.client.scan(params) const response = await this.client.scan(params).promise()
if (response.Items) { if (response.Items) {
return response.Items return response.Items
} }
@ -214,40 +208,40 @@ class DynamoDBIntegration implements IntegrationBase {
const params = { const params = {
TableName: query.table, TableName: query.table,
} }
return new DynamoDB(this.config).describeTable(params) return new AWS.DynamoDB(this.config).describeTable(params).promise()
} }
async get(query: { async get(query: {
table: string table: string
json: Omit<GetCommandInput, "TableName"> json: Omit<DocumentClient.GetItemInput, "TableName">
}) { }) {
const params = { const params = {
TableName: query.table, TableName: query.table,
...query.json, ...query.json,
} }
return this.client.get(params) return this.client.get(params).promise()
} }
async update(query: { async update(query: {
table: string table: string
json: Omit<UpdateCommandInput, "TableName"> json: Omit<DocumentClient.UpdateItemInput, "TableName">
}) { }) {
const params = { const params = {
TableName: query.table, TableName: query.table,
...query.json, ...query.json,
} }
return this.client.update(params) return this.client.update(params).promise()
} }
async delete(query: { async delete(query: {
table: string table: string
json: Omit<DeleteCommandInput, "TableName"> json: Omit<DocumentClient.DeleteItemInput, "TableName">
}) { }) {
const params = { const params = {
TableName: query.table, TableName: query.table,
...query.json, ...query.json,
} }
return this.client.delete(params) return this.client.delete(params).promise()
} }
} }

View File

@ -7,9 +7,8 @@ import {
ConnectionInfo, ConnectionInfo,
} from "@budibase/types" } from "@budibase/types"
import { S3 } from "@aws-sdk/client-s3" import AWS from "aws-sdk"
import csv from "csvtojson" import csv from "csvtojson"
import stream from "stream"
interface S3Config { interface S3Config {
region: string region: string
@ -168,7 +167,7 @@ class S3Integration implements IntegrationBase {
delete this.config.endpoint delete this.config.endpoint
} }
this.client = new S3(this.config) this.client = new AWS.S3(this.config)
} }
async testConnection() { async testConnection() {
@ -176,7 +175,7 @@ class S3Integration implements IntegrationBase {
connected: false, connected: false,
} }
try { try {
await this.client.listBuckets() await this.client.listBuckets().promise()
response.connected = true response.connected = true
} catch (e: any) { } catch (e: any) {
response.error = e.message as string response.error = e.message as string
@ -210,7 +209,7 @@ class S3Integration implements IntegrationBase {
LocationConstraint: query.location, LocationConstraint: query.location,
} }
} }
return await this.client.createBucket(params) return await this.client.createBucket(params).promise()
} }
async read(query: { async read(query: {
@ -221,39 +220,37 @@ class S3Integration implements IntegrationBase {
maxKeys: number maxKeys: number
prefix: string prefix: string
}) { }) {
const response = await this.client.listObjects({ const response = await this.client
Bucket: query.bucket, .listObjects({
Delimiter: query.delimiter, Bucket: query.bucket,
Marker: query.marker, Delimiter: query.delimiter,
MaxKeys: query.maxKeys, Marker: query.marker,
Prefix: query.prefix, MaxKeys: query.maxKeys,
}) Prefix: query.prefix,
})
.promise()
return response.Contents return response.Contents
} }
async readCsv(query: { bucket: string; key: string }) { async readCsv(query: { bucket: string; key: string }) {
const response = await this.client.getObject({ const stream = this.client
Bucket: query.bucket, .getObject({
Key: query.key, Bucket: query.bucket,
}) Key: query.key,
})
const fileStream = response.Body?.transformToWebStream() .createReadStream()
if (!fileStream || !(fileStream instanceof stream.Readable)) {
throw new Error("Unable to retrieve CSV - invalid stream")
}
let csvError = false let csvError = false
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
fileStream.on("error", (err: Error) => { stream.on("error", (err: Error) => {
reject(err) reject(err)
}) })
const response = csv() const response = csv()
.fromStream(fileStream) .fromStream(stream)
.on("error", () => { .on("error", () => {
csvError = true csvError = true
}) })
fileStream.on("finish", () => { stream.on("finish", () => {
resolve(response) resolve(response)
}) })
}).catch(err => { }).catch(err => {
@ -266,10 +263,12 @@ class S3Integration implements IntegrationBase {
} }
async delete(query: { bucket: string; delete: string }) { async delete(query: { bucket: string; delete: string }) {
return await this.client.deleteObjects({ return await this.client
Bucket: query.bucket, .deleteObjects({
Delete: JSON.parse(query.delete), Bucket: query.bucket,
}) Delete: JSON.parse(query.delete),
})
.promise()
} }
} }

View File

@ -0,0 +1,76 @@
const response = (body: any, extra?: any) => () => ({
promise: () => body,
...extra,
})
class DocumentClient {
put = jest.fn(response({}))
query = jest.fn(
response({
Items: [],
})
)
scan = jest.fn(
response({
Items: [
{
Name: "test",
},
],
})
)
get = jest.fn(response({}))
update = jest.fn(response({}))
delete = jest.fn(response({}))
}
class S3 {
listObjects = jest.fn(
response({
Contents: [],
})
)
createBucket = jest.fn(
response({
Contents: {},
})
)
deleteObjects = jest.fn(
response({
Contents: {},
})
)
getSignedUrl = jest.fn((operation, params) => {
return `http://example.com/${params.Bucket}/${params.Key}`
})
headBucket = jest.fn(
response({
Contents: {},
})
)
upload = jest.fn(
response({
Contents: {},
})
)
getObject = jest.fn(
response(
{
Body: "",
},
{
createReadStream: jest.fn().mockReturnValue("stream"),
}
)
)
}
module.exports = {
DynamoDB: {
DocumentClient,
},
S3,
config: {
update: jest.fn(),
},
}

View File

@ -1,20 +1,4 @@
jest.mock("@aws-sdk/lib-dynamodb", () => ({ jest.mock("aws-sdk", () => require("./aws-sdk.mock"))
DynamoDBDocument: {
from: jest.fn(() => ({
update: jest.fn(),
put: jest.fn(),
query: jest.fn(() => ({
Items: [],
})),
scan: jest.fn(() => ({
Items: [],
})),
delete: jest.fn(),
get: jest.fn(),
})),
},
}))
jest.mock("@aws-sdk/client-dynamodb")
import { default as DynamoDBIntegration } from "../dynamodb" import { default as DynamoDBIntegration } from "../dynamodb"
class TestConfiguration { class TestConfiguration {
@ -73,7 +57,11 @@ describe("DynamoDB Integration", () => {
TableName: tableName, TableName: tableName,
IndexName: indexName, IndexName: indexName,
}) })
expect(response).toEqual([]) expect(response).toEqual([
{
Name: "test",
},
])
}) })
it("calls the get method with the correct params", async () => { it("calls the get method with the correct params", async () => {

View File

@ -1,52 +1,5 @@
jest.mock("aws-sdk", () => require("./aws-sdk.mock"))
import { default as S3Integration } from "../s3" import { default as S3Integration } from "../s3"
jest.mock("@aws-sdk/client-s3", () => {
class S3Mock {
response(body: any, extra?: any) {
return () => ({
promise: () => body,
...extra,
})
}
listObjects = jest.fn(
this.response({
Contents: [],
})
)
createBucket = jest.fn(
this.response({
Contents: {},
})
)
deleteObjects = jest.fn(
this.response({
Contents: {},
})
)
headBucket = jest.fn(
this.response({
Contents: {},
})
)
upload = jest.fn(
this.response({
Contents: {},
})
)
getObject = jest.fn(
this.response(
{
Body: "",
},
{
createReadStream: jest.fn().mockReturnValue("stream"),
}
)
)
}
return { S3: S3Mock }
})
class TestConfiguration { class TestConfiguration {
integration: any integration: any

View File

@ -430,7 +430,7 @@ export async function handleFileResponse(
size = details.ContentLength size = details.ContentLength
} }
} }
presignedUrl = await objectStore.getPresignedUrl(bucket, key) presignedUrl = objectStore.getPresignedUrl(bucket, key)
return { return {
data: { data: {
size, size,

View File

@ -18,7 +18,7 @@ export async function fetch(type?: PluginType): Promise<Plugin[]> {
}) })
) )
let plugins = response.rows.map((row: any) => row.doc) as Plugin[] let plugins = response.rows.map((row: any) => row.doc) as Plugin[]
plugins = await objectStore.enrichPluginURLs(plugins) plugins = objectStore.enrichPluginURLs(plugins)
if (type) { if (type) {
return plugins.filter((plugin: Plugin) => plugin.schema?.type === type) return plugins.filter((plugin: Plugin) => plugin.schema?.type === type)
} else { } else {

View File

@ -3,7 +3,10 @@ import { Datasource, Row, Query } from "@budibase/types"
export type WorkerCallback = (error: any, response?: any) => void export type WorkerCallback = (error: any, response?: any) => void
export interface QueryEvent export interface QueryEvent
extends Omit<Query, "datasourceId" | "name" | "parameters" | "readable"> { extends Omit<
Query,
"datasourceId" | "name" | "parameters" | "readable" | "nestedSchemaFields"
> {
appId?: string appId?: string
datasource: Datasource datasource: Datasource
pagination?: any pagination?: any

View File

@ -78,7 +78,7 @@ export const getComponentLibraryManifest = async (library: string) => {
resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path) resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
} }
if (typeof resp !== "string") { if (typeof resp !== "string") {
resp = resp.toString() resp = resp.toString("utf8")
} }
return JSON.parse(resp) return JSON.parse(resp)
} }

View File

@ -3,7 +3,6 @@ import { budibaseTempDir } from "../budibaseDir"
import fs from "fs" import fs from "fs"
import { join } from "path" import { join } from "path"
import { objectStore } from "@budibase/backend-core" import { objectStore } from "@budibase/backend-core"
import stream from "stream"
const DATASOURCE_PATH = join(budibaseTempDir(), "datasource") const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
const AUTOMATION_PATH = join(budibaseTempDir(), "automation") const AUTOMATION_PATH = join(budibaseTempDir(), "automation")
@ -59,11 +58,7 @@ async function getPluginImpl(path: string, plugin: Plugin) {
pluginKey pluginKey
) )
if (pluginJs instanceof stream.Readable) { fs.writeFileSync(filename, pluginJs)
pluginJs.pipe(fs.createWriteStream(filename))
} else {
fs.writeFileSync(filename, pluginJs)
}
fs.writeFileSync(metadataName, hash) fs.writeFileSync(metadataName, hash)
return require(filename) return require(filename)

View File

@ -359,9 +359,9 @@ export async function coreOutputProcessing(
if (row[property] == null) { if (row[property] == null) {
continue continue
} }
const process = async (attachment: RowAttachment) => { const process = (attachment: RowAttachment) => {
if (!attachment.url && attachment.key) { if (!attachment.url && attachment.key) {
attachment.url = await objectStore.getAppFileUrl(attachment.key) attachment.url = objectStore.getAppFileUrl(attachment.key)
} }
return attachment return attachment
} }
@ -369,13 +369,11 @@ export async function coreOutputProcessing(
row[property] = JSON.parse(row[property]) row[property] = JSON.parse(row[property])
} }
if (Array.isArray(row[property])) { if (Array.isArray(row[property])) {
await Promise.all( row[property].forEach((attachment: RowAttachment) => {
row[property].map((attachment: RowAttachment) => process(attachment)
process(attachment) })
)
)
} else { } else {
await process(row[property]) process(row[property])
} }
} }
} else if ( } else if (

View File

@ -911,8 +911,8 @@ export function sort<T extends Record<string, any>>(
* @param docs the data * @param docs the data
* @param limit the number of docs to limit to * @param limit the number of docs to limit to
*/ */
export function limit<T>(docs: T[], limit: string): T[] { export function limit<T>(docs: T[], limit: string | number): T[] {
const numLimit = parseFloat(limit) const numLimit = typeof limit === "number" ? limit : parseFloat(limit)
if (isNaN(numLimit)) { if (isNaN(numLimit)) {
return docs return docs
} }

View File

@ -109,7 +109,9 @@ export function trimOtherProps(object: any, allowedProps: string[]) {
return result return result
} }
export function isSupportedUserSearch(query: SearchFilters) { export function isSupportedUserSearch(
query: SearchFilters
): query is SearchFilters {
const allowed = [ const allowed = [
{ op: BasicOperator.STRING, key: "email" }, { op: BasicOperator.STRING, key: "email" },
{ op: BasicOperator.EQUAL, key: "_id" }, { op: BasicOperator.EQUAL, key: "_id" },

View File

@ -40,6 +40,10 @@ export interface ExecuteQueryRequest {
export type ExecuteV1QueryResponse = Record<string, any>[] export type ExecuteV1QueryResponse = Record<string, any>[]
export interface ExecuteV2QueryResponse { export interface ExecuteV2QueryResponse {
data: Record<string, any>[] data: Record<string, any>[]
pagination?: {
page: number
cursor: string
}
} }
export interface DeleteQueryResponse { export interface DeleteQueryResponse {

View File

@ -24,4 +24,5 @@ export interface PaginationRequest extends BasicPaginationRequest {
export interface PaginationResponse { export interface PaginationResponse {
bookmark: string | number | undefined bookmark: string | number | undefined
hasNextPage?: boolean hasNextPage?: boolean
totalRows?: number
} }

View File

@ -1,4 +1,5 @@
import { Document } from "../document" import { Document } from "../document"
import { Row } from "./row"
export interface QuerySchema { export interface QuerySchema {
name?: string name?: string
@ -13,6 +14,7 @@ export interface Query extends Document {
fields: RestQueryFields | any fields: RestQueryFields | any
transformer: string | null transformer: string | null
schema: Record<string, QuerySchema | string> schema: Record<string, QuerySchema | string>
nestedSchemaFields?: Record<string, Record<string, QuerySchema | string>>
readable: boolean readable: boolean
queryVerb: string queryVerb: string
// flag to state whether the default bindings are empty strings (old behaviour) or null // flag to state whether the default bindings are empty strings (old behaviour) or null
@ -29,7 +31,7 @@ export interface QueryParameter {
} }
export interface QueryResponse { export interface QueryResponse {
rows: any[] rows: Row[]
keys: string[] keys: string[]
info: any info: any
extra: any extra: any

View File

@ -227,6 +227,7 @@ interface OtherFieldMetadata extends BaseFieldSchema {
| FieldType.OPTIONS | FieldType.OPTIONS
| FieldType.BOOLEAN | FieldType.BOOLEAN
| FieldType.BIGINT | FieldType.BIGINT
| FieldType.JSON
> >
} }

View File

@ -26,13 +26,11 @@ export interface SMTPConfig extends Config<SMTPInnerConfig> {}
export interface SettingsBrandingConfig { export interface SettingsBrandingConfig {
faviconUrl?: string faviconUrl?: string
faviconUrlEtag?: string faviconUrlEtag?: string
emailBrandingEnabled?: boolean emailBrandingEnabled?: boolean
testimonialsEnabled?: boolean testimonialsEnabled?: boolean
platformTitle?: string platformTitle?: string
loginHeading?: string loginHeading?: string
loginButton?: string loginButton?: string
metaDescription?: string metaDescription?: string
metaImageUrl?: string metaImageUrl?: string
metaTitle?: string metaTitle?: string
@ -42,6 +40,7 @@ export interface SettingsInnerConfig {
platformUrl?: string platformUrl?: string
company?: string company?: string
logoUrl?: string // Populated on read logoUrl?: string // Populated on read
docsUrl?: string
logoUrlEtag?: string logoUrlEtag?: string
uniqueTenantId?: string uniqueTenantId?: string
analyticsEnabled?: boolean analyticsEnabled?: boolean

View File

@ -1,8 +1,6 @@
import { UITable, UIView } from "@budibase/types" import { UITable, UIView } from "@budibase/types"
export type UIDatasource = (UITable | UIView) & { export type UIDatasource = UITable | UIView
type: string
}
export interface UIFieldMutation { export interface UIFieldMutation {
visible?: boolean visible?: boolean

View File

@ -1,38 +0,0 @@
import {
Row,
SortOrder,
UIDatasource,
UILegacyFilter,
UISearchFilter,
} from "@budibase/types"
export interface UIFetchAPI {
definition: UIDatasource
getInitialData: () => Promise<void>
loading: any
loaded: boolean
resetKey: string | null
error: any
hasNextPage: boolean
nextPage: () => Promise<void>
rows: Row[]
options?: {
datasource?: {
tableId: string
id: string
}
}
update: ({
sortOrder,
sortColumn,
}: {
sortOrder?: SortOrder
sortColumn?: string
filter?: UILegacyFilter[] | UISearchFilter
}) => any
}

View File

@ -6,4 +6,3 @@ export * from "./view"
export * from "./user" export * from "./user"
export * from "./filters" export * from "./filters"
export * from "./rows" export * from "./rows"
export * from "./fetch"

View File

@ -322,27 +322,27 @@ export async function save(
} }
} }
async function enrichOIDCLogos(oidcLogos: OIDCLogosConfig) { function enrichOIDCLogos(oidcLogos: OIDCLogosConfig) {
if (!oidcLogos) { if (!oidcLogos) {
return return
} }
const newConfig: Record<string, string> = {} oidcLogos.config = Object.keys(oidcLogos.config || {}).reduce(
const keys = Object.keys(oidcLogos.config || {}) (acc: any, key: string) => {
if (!key.endsWith("Etag")) {
for (const key of keys) { const etag = oidcLogos.config[`${key}Etag`]
if (!key.endsWith("Etag")) { const objectStoreUrl = objectStore.getGlobalFileUrl(
const etag = oidcLogos.config[`${key}Etag`] oidcLogos.type,
const objectStoreUrl = await objectStore.getGlobalFileUrl( key,
oidcLogos.type, etag
key, )
etag acc[key] = objectStoreUrl
) } else {
newConfig[key] = objectStoreUrl acc[key] = oidcLogos.config[key]
} else { }
newConfig[key] = oidcLogos.config[key] return acc
} },
} {}
oidcLogos.config = newConfig )
} }
export async function find(ctx: UserCtx<void, FindConfigResponse>) { export async function find(ctx: UserCtx<void, FindConfigResponse>) {
@ -370,7 +370,7 @@ export async function find(ctx: UserCtx<void, FindConfigResponse>) {
async function handleConfigType(type: ConfigType, config: Config) { async function handleConfigType(type: ConfigType, config: Config) {
if (type === ConfigType.OIDC_LOGOS) { if (type === ConfigType.OIDC_LOGOS) {
await enrichOIDCLogos(config) enrichOIDCLogos(config)
} else if (type === ConfigType.AI) { } else if (type === ConfigType.AI) {
await handleAIConfig(config) await handleAIConfig(config)
} }
@ -396,7 +396,7 @@ export async function publicOidc(ctx: Ctx<void, GetPublicOIDCConfigResponse>) {
const oidcCustomLogos = await configs.getOIDCLogosDoc() const oidcCustomLogos = await configs.getOIDCLogosDoc()
if (oidcCustomLogos) { if (oidcCustomLogos) {
await enrichOIDCLogos(oidcCustomLogos) enrichOIDCLogos(oidcCustomLogos)
} }
if (!oidcConfig) { if (!oidcConfig) {
@ -427,7 +427,7 @@ export async function publicSettings(
// enrich the logo url - empty url means deleted // enrich the logo url - empty url means deleted
if (config.logoUrl && config.logoUrl !== "") { if (config.logoUrl && config.logoUrl !== "") {
config.logoUrl = await objectStore.getGlobalFileUrl( config.logoUrl = objectStore.getGlobalFileUrl(
"settings", "settings",
"logoUrl", "logoUrl",
config.logoUrlEtag config.logoUrlEtag
@ -437,7 +437,7 @@ export async function publicSettings(
// enrich the favicon url - empty url means deleted // enrich the favicon url - empty url means deleted
const faviconUrl = const faviconUrl =
branding.faviconUrl && branding.faviconUrl !== "" branding.faviconUrl && branding.faviconUrl !== ""
? await objectStore.getGlobalFileUrl( ? objectStore.getGlobalFileUrl(
"settings", "settings",
"faviconUrl", "faviconUrl",
branding.faviconUrlEtag branding.faviconUrlEtag
@ -522,7 +522,7 @@ export async function upload(ctx: UserCtx<void, UploadConfigFileResponse>) {
ctx.body = { ctx.body = {
message: "File has been uploaded and url stored to config.", message: "File has been uploaded and url stored to config.",
url: await objectStore.getGlobalFileUrl(type, name, etag), url: objectStore.getGlobalFileUrl(type, name, etag),
} }
} }

1107
yarn.lock

File diff suppressed because it is too large Load Diff