Merge branch 'master' into builder-store-conversions-pc
This commit is contained in:
commit
db27d1e8af
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "3.2.35",
|
||||
"version": "3.2.37",
|
||||
"npmClient": "yarn",
|
||||
"concurrency": 20,
|
||||
"command": {
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
export class S3 {
|
||||
headBucket() {
|
||||
return jest.fn().mockReturnThis()
|
||||
}
|
||||
deleteObject() {
|
||||
return jest.fn().mockReturnThis()
|
||||
}
|
||||
deleteObjects() {
|
||||
return jest.fn().mockReturnThis()
|
||||
}
|
||||
createBucket() {
|
||||
return jest.fn().mockReturnThis()
|
||||
}
|
||||
getObject() {
|
||||
return jest.fn().mockReturnThis()
|
||||
}
|
||||
listObject() {
|
||||
return jest.fn().mockReturnThis()
|
||||
}
|
||||
promise() {
|
||||
return jest.fn().mockReturnThis()
|
||||
}
|
||||
catch() {
|
||||
return jest.fn()
|
||||
}
|
||||
}
|
||||
|
||||
export const GetObjectCommand = jest.fn(inputs => ({ inputs }))
|
|
@ -1,4 +0,0 @@
|
|||
export const getSignedUrl = jest.fn((_, cmd) => {
|
||||
const { inputs } = cmd
|
||||
return `http://s3.example.com/${inputs?.Bucket}/${inputs?.Key}`
|
||||
})
|
|
@ -0,0 +1,19 @@
|
|||
const mockS3 = {
|
||||
headBucket: jest.fn().mockReturnThis(),
|
||||
deleteObject: jest.fn().mockReturnThis(),
|
||||
deleteObjects: jest.fn().mockReturnThis(),
|
||||
createBucket: jest.fn().mockReturnThis(),
|
||||
getObject: jest.fn().mockReturnThis(),
|
||||
listObject: jest.fn().mockReturnThis(),
|
||||
getSignedUrl: jest.fn((operation: string, params: any) => {
|
||||
return `http://s3.example.com/${params.Bucket}/${params.Key}`
|
||||
}),
|
||||
promise: jest.fn().mockReturnThis(),
|
||||
catch: jest.fn(),
|
||||
}
|
||||
|
||||
const AWS = {
|
||||
S3: jest.fn(() => mockS3),
|
||||
}
|
||||
|
||||
export default AWS
|
|
@ -30,9 +30,6 @@
|
|||
"test:watch": "jest --watchAll"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "3.709.0",
|
||||
"@aws-sdk/lib-storage": "3.709.0",
|
||||
"@aws-sdk/s3-request-presigner": "3.709.0",
|
||||
"@budibase/nano": "10.1.5",
|
||||
"@budibase/pouchdb-replication-stream": "1.2.11",
|
||||
"@budibase/shared-core": "*",
|
||||
|
@ -74,13 +71,11 @@
|
|||
"devDependencies": {
|
||||
"@jest/types": "^29.6.3",
|
||||
"@shopify/jest-koa-mocks": "5.1.1",
|
||||
"@smithy/types": "4.0.0",
|
||||
"@swc/core": "1.3.71",
|
||||
"@swc/jest": "0.2.27",
|
||||
"@types/chance": "1.1.3",
|
||||
"@types/cookies": "0.7.8",
|
||||
"@types/jest": "29.5.5",
|
||||
"@types/koa": "2.13.4",
|
||||
"@types/lodash": "4.14.200",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
"@types/pouchdb": "6.4.2",
|
||||
|
@ -88,6 +83,7 @@
|
|||
"@types/semver": "7.3.7",
|
||||
"@types/tar-fs": "2.0.1",
|
||||
"@types/uuid": "8.3.4",
|
||||
"@types/koa": "2.13.4",
|
||||
"chance": "1.1.8",
|
||||
"ioredis-mock": "8.9.0",
|
||||
"jest": "29.7.0",
|
||||
|
|
|
@ -154,7 +154,7 @@ const environment = {
|
|||
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
||||
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
||||
AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN,
|
||||
AWS_REGION: process.env.AWS_REGION || "eu-west-1",
|
||||
AWS_REGION: process.env.AWS_REGION,
|
||||
MINIO_URL: process.env.MINIO_URL,
|
||||
MINIO_ENABLED: process.env.MINIO_ENABLED || 1,
|
||||
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,
|
||||
|
|
|
@ -13,7 +13,7 @@ export function clientLibraryPath(appId: string) {
|
|||
* due to issues with the domain we were unable to continue doing this - keeping
|
||||
* incase we are able to switch back to CDN path again in future.
|
||||
*/
|
||||
export async function clientLibraryCDNUrl(appId: string, version: string) {
|
||||
export function clientLibraryCDNUrl(appId: string, version: string) {
|
||||
let file = clientLibraryPath(appId)
|
||||
if (env.CLOUDFRONT_CDN) {
|
||||
// append app version to bust the cache
|
||||
|
@ -24,7 +24,7 @@ export async function clientLibraryCDNUrl(appId: string, version: string) {
|
|||
// file is public
|
||||
return cloudfront.getUrl(file)
|
||||
} else {
|
||||
return await objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
|
||||
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -44,10 +44,10 @@ export function clientLibraryUrl(appId: string, version: string) {
|
|||
return `/api/assets/client?${qs.encode(qsParams)}`
|
||||
}
|
||||
|
||||
export async function getAppFileUrl(s3Key: string) {
|
||||
export function getAppFileUrl(s3Key: string) {
|
||||
if (env.CLOUDFRONT_CDN) {
|
||||
return cloudfront.getPresignedUrl(s3Key)
|
||||
} else {
|
||||
return await objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, s3Key)
|
||||
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, s3Key)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,11 +5,7 @@ import * as cloudfront from "../cloudfront"
|
|||
|
||||
// URLs
|
||||
|
||||
export const getGlobalFileUrl = async (
|
||||
type: string,
|
||||
name: string,
|
||||
etag?: string
|
||||
) => {
|
||||
export const getGlobalFileUrl = (type: string, name: string, etag?: string) => {
|
||||
let file = getGlobalFileS3Key(type, name)
|
||||
if (env.CLOUDFRONT_CDN) {
|
||||
if (etag) {
|
||||
|
@ -17,7 +13,7 @@ export const getGlobalFileUrl = async (
|
|||
}
|
||||
return cloudfront.getPresignedUrl(file)
|
||||
} else {
|
||||
return await objectStore.getPresignedUrl(env.GLOBAL_BUCKET_NAME, file)
|
||||
return objectStore.getPresignedUrl(env.GLOBAL_BUCKET_NAME, file)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,25 +6,23 @@ import { Plugin } from "@budibase/types"
|
|||
|
||||
// URLS
|
||||
|
||||
export async function enrichPluginURLs(plugins?: Plugin[]): Promise<Plugin[]> {
|
||||
export function enrichPluginURLs(plugins?: Plugin[]): Plugin[] {
|
||||
if (!plugins || !plugins.length) {
|
||||
return []
|
||||
}
|
||||
return await Promise.all(
|
||||
plugins.map(async plugin => {
|
||||
const jsUrl = await getPluginJSUrl(plugin)
|
||||
const iconUrl = await getPluginIconUrl(plugin)
|
||||
return plugins.map(plugin => {
|
||||
const jsUrl = getPluginJSUrl(plugin)
|
||||
const iconUrl = getPluginIconUrl(plugin)
|
||||
return { ...plugin, jsUrl, iconUrl }
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
async function getPluginJSUrl(plugin: Plugin) {
|
||||
function getPluginJSUrl(plugin: Plugin) {
|
||||
const s3Key = getPluginJSKey(plugin)
|
||||
return getPluginUrl(s3Key)
|
||||
}
|
||||
|
||||
async function getPluginIconUrl(plugin: Plugin) {
|
||||
function getPluginIconUrl(plugin: Plugin): string | undefined {
|
||||
const s3Key = getPluginIconKey(plugin)
|
||||
if (!s3Key) {
|
||||
return
|
||||
|
@ -32,11 +30,11 @@ async function getPluginIconUrl(plugin: Plugin) {
|
|||
return getPluginUrl(s3Key)
|
||||
}
|
||||
|
||||
async function getPluginUrl(s3Key: string) {
|
||||
function getPluginUrl(s3Key: string) {
|
||||
if (env.CLOUDFRONT_CDN) {
|
||||
return cloudfront.getPresignedUrl(s3Key)
|
||||
} else {
|
||||
return await objectStore.getPresignedUrl(env.PLUGIN_BUCKET_NAME, s3Key)
|
||||
return objectStore.getPresignedUrl(env.PLUGIN_BUCKET_NAME, s3Key)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -93,25 +93,25 @@ describe("app", () => {
|
|||
testEnv.multiTenant()
|
||||
})
|
||||
|
||||
it("gets url with embedded minio", async () => {
|
||||
it("gets url with embedded minio", () => {
|
||||
testEnv.withMinio()
|
||||
const url = await getAppFileUrl()
|
||||
const url = getAppFileUrl()
|
||||
expect(url).toBe(
|
||||
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
|
||||
)
|
||||
})
|
||||
|
||||
it("gets url with custom S3", async () => {
|
||||
it("gets url with custom S3", () => {
|
||||
testEnv.withS3()
|
||||
const url = await getAppFileUrl()
|
||||
const url = getAppFileUrl()
|
||||
expect(url).toBe(
|
||||
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
|
||||
)
|
||||
})
|
||||
|
||||
it("gets url with cloudfront + s3", async () => {
|
||||
it("gets url with cloudfront + s3", () => {
|
||||
testEnv.withCloudfront()
|
||||
const url = await getAppFileUrl()
|
||||
const url = getAppFileUrl()
|
||||
// omit rest of signed params
|
||||
expect(
|
||||
url.includes("http://cf.example.com/app_123/attachments/image.jpeg?")
|
||||
|
@ -126,8 +126,8 @@ describe("app", () => {
|
|||
|
||||
it("gets url with embedded minio", async () => {
|
||||
testEnv.withMinio()
|
||||
await testEnv.withTenant(async () => {
|
||||
const url = await getAppFileUrl()
|
||||
await testEnv.withTenant(() => {
|
||||
const url = getAppFileUrl()
|
||||
expect(url).toBe(
|
||||
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
|
||||
)
|
||||
|
@ -136,8 +136,8 @@ describe("app", () => {
|
|||
|
||||
it("gets url with custom S3", async () => {
|
||||
testEnv.withS3()
|
||||
await testEnv.withTenant(async () => {
|
||||
const url = await getAppFileUrl()
|
||||
await testEnv.withTenant(() => {
|
||||
const url = getAppFileUrl()
|
||||
expect(url).toBe(
|
||||
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
|
||||
)
|
||||
|
@ -146,8 +146,8 @@ describe("app", () => {
|
|||
|
||||
it("gets url with cloudfront + s3", async () => {
|
||||
testEnv.withCloudfront()
|
||||
await testEnv.withTenant(async () => {
|
||||
const url = await getAppFileUrl()
|
||||
await testEnv.withTenant(() => {
|
||||
const url = getAppFileUrl()
|
||||
// omit rest of signed params
|
||||
expect(
|
||||
url.includes(
|
||||
|
|
|
@ -3,7 +3,7 @@ import { testEnv } from "../../../../tests/extra"
|
|||
|
||||
describe("global", () => {
|
||||
describe("getGlobalFileUrl", () => {
|
||||
async function getGlobalFileUrl() {
|
||||
function getGlobalFileUrl() {
|
||||
return global.getGlobalFileUrl("settings", "logoUrl", "etag")
|
||||
}
|
||||
|
||||
|
@ -12,21 +12,21 @@ describe("global", () => {
|
|||
testEnv.singleTenant()
|
||||
})
|
||||
|
||||
it("gets url with embedded minio", async () => {
|
||||
it("gets url with embedded minio", () => {
|
||||
testEnv.withMinio()
|
||||
const url = await getGlobalFileUrl()
|
||||
const url = getGlobalFileUrl()
|
||||
expect(url).toBe("/files/signed/global/settings/logoUrl")
|
||||
})
|
||||
|
||||
it("gets url with custom S3", async () => {
|
||||
it("gets url with custom S3", () => {
|
||||
testEnv.withS3()
|
||||
const url = await getGlobalFileUrl()
|
||||
const url = getGlobalFileUrl()
|
||||
expect(url).toBe("http://s3.example.com/global/settings/logoUrl")
|
||||
})
|
||||
|
||||
it("gets url with cloudfront + s3", async () => {
|
||||
it("gets url with cloudfront + s3", () => {
|
||||
testEnv.withCloudfront()
|
||||
const url = await getGlobalFileUrl()
|
||||
const url = getGlobalFileUrl()
|
||||
// omit rest of signed params
|
||||
expect(
|
||||
url.includes("http://cf.example.com/settings/logoUrl?etag=etag&")
|
||||
|
@ -41,16 +41,16 @@ describe("global", () => {
|
|||
|
||||
it("gets url with embedded minio", async () => {
|
||||
testEnv.withMinio()
|
||||
await testEnv.withTenant(async tenantId => {
|
||||
const url = await getGlobalFileUrl()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
const url = getGlobalFileUrl()
|
||||
expect(url).toBe(`/files/signed/global/${tenantId}/settings/logoUrl`)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets url with custom S3", async () => {
|
||||
testEnv.withS3()
|
||||
await testEnv.withTenant(async tenantId => {
|
||||
const url = await getGlobalFileUrl()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
const url = getGlobalFileUrl()
|
||||
expect(url).toBe(
|
||||
`http://s3.example.com/global/${tenantId}/settings/logoUrl`
|
||||
)
|
||||
|
@ -59,8 +59,8 @@ describe("global", () => {
|
|||
|
||||
it("gets url with cloudfront + s3", async () => {
|
||||
testEnv.withCloudfront()
|
||||
await testEnv.withTenant(async tenantId => {
|
||||
const url = await getGlobalFileUrl()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
const url = getGlobalFileUrl()
|
||||
// omit rest of signed params
|
||||
expect(
|
||||
url.includes(
|
||||
|
|
|
@ -6,8 +6,8 @@ describe("plugins", () => {
|
|||
describe("enrichPluginURLs", () => {
|
||||
const plugin = structures.plugins.plugin()
|
||||
|
||||
async function getEnrichedPluginUrls() {
|
||||
const enriched = (await plugins.enrichPluginURLs([plugin]))[0]
|
||||
function getEnrichedPluginUrls() {
|
||||
const enriched = plugins.enrichPluginURLs([plugin])[0]
|
||||
return {
|
||||
jsUrl: enriched.jsUrl!,
|
||||
iconUrl: enriched.iconUrl!,
|
||||
|
@ -19,9 +19,9 @@ describe("plugins", () => {
|
|||
testEnv.singleTenant()
|
||||
})
|
||||
|
||||
it("gets url with embedded minio", async () => {
|
||||
it("gets url with embedded minio", () => {
|
||||
testEnv.withMinio()
|
||||
const urls = await getEnrichedPluginUrls()
|
||||
const urls = getEnrichedPluginUrls()
|
||||
expect(urls.jsUrl).toBe(
|
||||
`/files/signed/plugins/${plugin.name}/plugin.min.js`
|
||||
)
|
||||
|
@ -30,9 +30,9 @@ describe("plugins", () => {
|
|||
)
|
||||
})
|
||||
|
||||
it("gets url with custom S3", async () => {
|
||||
it("gets url with custom S3", () => {
|
||||
testEnv.withS3()
|
||||
const urls = await getEnrichedPluginUrls()
|
||||
const urls = getEnrichedPluginUrls()
|
||||
expect(urls.jsUrl).toBe(
|
||||
`http://s3.example.com/plugins/${plugin.name}/plugin.min.js`
|
||||
)
|
||||
|
@ -41,9 +41,9 @@ describe("plugins", () => {
|
|||
)
|
||||
})
|
||||
|
||||
it("gets url with cloudfront + s3", async () => {
|
||||
it("gets url with cloudfront + s3", () => {
|
||||
testEnv.withCloudfront()
|
||||
const urls = await getEnrichedPluginUrls()
|
||||
const urls = getEnrichedPluginUrls()
|
||||
// omit rest of signed params
|
||||
expect(
|
||||
urls.jsUrl.includes(
|
||||
|
@ -65,8 +65,8 @@ describe("plugins", () => {
|
|||
|
||||
it("gets url with embedded minio", async () => {
|
||||
testEnv.withMinio()
|
||||
await testEnv.withTenant(async tenantId => {
|
||||
const urls = await getEnrichedPluginUrls()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
const urls = getEnrichedPluginUrls()
|
||||
expect(urls.jsUrl).toBe(
|
||||
`/files/signed/plugins/${tenantId}/${plugin.name}/plugin.min.js`
|
||||
)
|
||||
|
@ -78,8 +78,8 @@ describe("plugins", () => {
|
|||
|
||||
it("gets url with custom S3", async () => {
|
||||
testEnv.withS3()
|
||||
await testEnv.withTenant(async tenantId => {
|
||||
const urls = await getEnrichedPluginUrls()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
const urls = getEnrichedPluginUrls()
|
||||
expect(urls.jsUrl).toBe(
|
||||
`http://s3.example.com/plugins/${tenantId}/${plugin.name}/plugin.min.js`
|
||||
)
|
||||
|
@ -91,8 +91,8 @@ describe("plugins", () => {
|
|||
|
||||
it("gets url with cloudfront + s3", async () => {
|
||||
testEnv.withCloudfront()
|
||||
await testEnv.withTenant(async tenantId => {
|
||||
const urls = await getEnrichedPluginUrls()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
const urls = getEnrichedPluginUrls()
|
||||
// omit rest of signed params
|
||||
expect(
|
||||
urls.jsUrl.includes(
|
||||
|
|
|
@ -1,15 +1,6 @@
|
|||
const sanitize = require("sanitize-s3-objectkey")
|
||||
|
||||
import {
|
||||
HeadObjectCommandOutput,
|
||||
PutObjectCommandInput,
|
||||
S3,
|
||||
S3ClientConfig,
|
||||
GetObjectCommand,
|
||||
_Object as S3Object,
|
||||
} from "@aws-sdk/client-s3"
|
||||
import { Upload } from "@aws-sdk/lib-storage"
|
||||
import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
|
||||
import AWS from "aws-sdk"
|
||||
import stream, { Readable } from "stream"
|
||||
import fetch from "node-fetch"
|
||||
import tar from "tar-fs"
|
||||
|
@ -22,8 +13,8 @@ import { bucketTTLConfig, budibaseTempDir } from "./utils"
|
|||
import { v4 } from "uuid"
|
||||
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
|
||||
import fsp from "fs/promises"
|
||||
import { HeadObjectOutput } from "aws-sdk/clients/s3"
|
||||
import { ReadableStream } from "stream/web"
|
||||
import { NodeJsRuntimeStreamingBlobPayloadOutputTypes } from "@smithy/types"
|
||||
|
||||
const streamPipeline = promisify(stream.pipeline)
|
||||
// use this as a temporary store of buckets that are being created
|
||||
|
@ -93,24 +84,26 @@ export function sanitizeBucket(input: string) {
|
|||
* @constructor
|
||||
*/
|
||||
export function ObjectStore(
|
||||
bucket: string,
|
||||
opts: { presigning: boolean } = { presigning: false }
|
||||
) {
|
||||
const config: S3ClientConfig = {
|
||||
forcePathStyle: true,
|
||||
credentials: {
|
||||
accessKeyId: env.MINIO_ACCESS_KEY!,
|
||||
secretAccessKey: env.MINIO_SECRET_KEY!,
|
||||
},
|
||||
const config: AWS.S3.ClientConfiguration = {
|
||||
s3ForcePathStyle: true,
|
||||
signatureVersion: "v4",
|
||||
apiVersion: "2006-03-01",
|
||||
accessKeyId: env.MINIO_ACCESS_KEY,
|
||||
secretAccessKey: env.MINIO_SECRET_KEY,
|
||||
region: env.AWS_REGION,
|
||||
}
|
||||
if (bucket) {
|
||||
config.params = {
|
||||
Bucket: sanitizeBucket(bucket),
|
||||
}
|
||||
}
|
||||
|
||||
// for AWS Credentials using temporary session token
|
||||
if (!env.MINIO_ENABLED && env.AWS_SESSION_TOKEN) {
|
||||
config.credentials = {
|
||||
accessKeyId: env.MINIO_ACCESS_KEY!,
|
||||
secretAccessKey: env.MINIO_SECRET_KEY!,
|
||||
sessionToken: env.AWS_SESSION_TOKEN,
|
||||
}
|
||||
config.sessionToken = env.AWS_SESSION_TOKEN
|
||||
}
|
||||
|
||||
// custom S3 is in use i.e. minio
|
||||
|
@ -120,13 +113,13 @@ export function ObjectStore(
|
|||
// Normally a signed url will need to be generated with a specified host in mind.
|
||||
// To support dynamic hosts, e.g. some unknown self-hosted installation url,
|
||||
// use a predefined host. The host 'minio-service' is also forwarded to minio requests via nginx
|
||||
config.endpoint = "http://minio-service"
|
||||
config.endpoint = "minio-service"
|
||||
} else {
|
||||
config.endpoint = env.MINIO_URL
|
||||
}
|
||||
}
|
||||
|
||||
return new S3(config)
|
||||
return new AWS.S3(config)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -139,25 +132,26 @@ export async function createBucketIfNotExists(
|
|||
): Promise<{ created: boolean; exists: boolean }> {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
try {
|
||||
await client.headBucket({
|
||||
await client
|
||||
.headBucket({
|
||||
Bucket: bucketName,
|
||||
})
|
||||
.promise()
|
||||
return { created: false, exists: true }
|
||||
} catch (err: any) {
|
||||
const statusCode = err.statusCode || err.$response?.statusCode
|
||||
const promises: Record<string, Promise<any> | undefined> =
|
||||
STATE.bucketCreationPromises
|
||||
const doesntExist = statusCode === 404,
|
||||
noAccess = statusCode === 403
|
||||
const promises: any = STATE.bucketCreationPromises
|
||||
const doesntExist = err.statusCode === 404,
|
||||
noAccess = err.statusCode === 403
|
||||
if (promises[bucketName]) {
|
||||
await promises[bucketName]
|
||||
return { created: false, exists: true }
|
||||
} else if (doesntExist || noAccess) {
|
||||
if (doesntExist) {
|
||||
promises[bucketName] = client.createBucket({
|
||||
promises[bucketName] = client
|
||||
.createBucket({
|
||||
Bucket: bucketName,
|
||||
})
|
||||
|
||||
.promise()
|
||||
await promises[bucketName]
|
||||
delete promises[bucketName]
|
||||
return { created: true, exists: false }
|
||||
|
@ -186,26 +180,25 @@ export async function upload({
|
|||
|
||||
const fileBytes = path ? (await fsp.open(path)).createReadStream() : body
|
||||
|
||||
const objectStore = ObjectStore()
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
|
||||
|
||||
if (ttl && bucketCreated.created) {
|
||||
let ttlConfig = bucketTTLConfig(bucketName, ttl)
|
||||
await objectStore.putBucketLifecycleConfiguration(ttlConfig)
|
||||
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
|
||||
}
|
||||
|
||||
let contentType = type
|
||||
const finalContentType = contentType
|
||||
? contentType
|
||||
: extension
|
||||
if (!contentType) {
|
||||
contentType = extension
|
||||
? CONTENT_TYPE_MAP[extension.toLowerCase()]
|
||||
: CONTENT_TYPE_MAP.txt
|
||||
const config: PutObjectCommandInput = {
|
||||
}
|
||||
const config: any = {
|
||||
// windows file paths need to be converted to forward slashes for s3
|
||||
Bucket: sanitizeBucket(bucketName),
|
||||
Key: sanitizeKey(filename),
|
||||
Body: fileBytes as stream.Readable | Buffer,
|
||||
ContentType: finalContentType,
|
||||
Body: fileBytes,
|
||||
ContentType: contentType,
|
||||
}
|
||||
if (metadata && typeof metadata === "object") {
|
||||
// remove any nullish keys from the metadata object, as these may be considered invalid
|
||||
|
@ -214,15 +207,10 @@ export async function upload({
|
|||
delete metadata[key]
|
||||
}
|
||||
}
|
||||
config.Metadata = metadata as Record<string, string>
|
||||
config.Metadata = metadata
|
||||
}
|
||||
|
||||
const upload = new Upload({
|
||||
client: objectStore,
|
||||
params: config,
|
||||
})
|
||||
|
||||
return upload.done()
|
||||
return objectStore.upload(config).promise()
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -241,12 +229,12 @@ export async function streamUpload({
|
|||
throw new Error("Stream to upload is invalid/undefined")
|
||||
}
|
||||
const extension = filename.split(".").pop()
|
||||
const objectStore = ObjectStore()
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
|
||||
|
||||
if (ttl && bucketCreated.created) {
|
||||
let ttlConfig = bucketTTLConfig(bucketName, ttl)
|
||||
await objectStore.putBucketLifecycleConfiguration(ttlConfig)
|
||||
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
|
||||
}
|
||||
|
||||
// Set content type for certain known extensions
|
||||
|
@ -279,15 +267,13 @@ export async function streamUpload({
|
|||
...extra,
|
||||
}
|
||||
|
||||
const upload = new Upload({
|
||||
client: objectStore,
|
||||
params,
|
||||
})
|
||||
const details = await upload.done()
|
||||
const headDetails = await objectStore.headObject({
|
||||
const details = await objectStore.upload(params).promise()
|
||||
const headDetails = await objectStore
|
||||
.headObject({
|
||||
Bucket: bucket,
|
||||
Key: objKey,
|
||||
})
|
||||
.promise()
|
||||
return {
|
||||
...details,
|
||||
ContentLength: headDetails.ContentLength,
|
||||
|
@ -298,44 +284,35 @@ export async function streamUpload({
|
|||
* retrieves the contents of a file from the object store, if it is a known content type it
|
||||
* will be converted, otherwise it will be returned as a buffer stream.
|
||||
*/
|
||||
export async function retrieve(
|
||||
bucketName: string,
|
||||
filepath: string
|
||||
): Promise<string | stream.Readable> {
|
||||
const objectStore = ObjectStore()
|
||||
export async function retrieve(bucketName: string, filepath: string) {
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
const params = {
|
||||
Bucket: sanitizeBucket(bucketName),
|
||||
Key: sanitizeKey(filepath),
|
||||
}
|
||||
const response = await objectStore.getObject(params)
|
||||
if (!response.Body) {
|
||||
throw new Error("Unable to retrieve object")
|
||||
}
|
||||
const nodeResponse =
|
||||
response.Body as NodeJsRuntimeStreamingBlobPayloadOutputTypes
|
||||
const response: any = await objectStore.getObject(params).promise()
|
||||
// currently these are all strings
|
||||
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
|
||||
return nodeResponse.toString()
|
||||
return response.Body.toString("utf8")
|
||||
} else {
|
||||
return nodeResponse
|
||||
return response.Body
|
||||
}
|
||||
}
|
||||
|
||||
export async function listAllObjects(
|
||||
bucketName: string,
|
||||
path: string
|
||||
): Promise<S3Object[]> {
|
||||
const objectStore = ObjectStore()
|
||||
export async function listAllObjects(bucketName: string, path: string) {
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
const list = (params: ListParams = {}) => {
|
||||
return objectStore.listObjectsV2({
|
||||
return objectStore
|
||||
.listObjectsV2({
|
||||
...params,
|
||||
Bucket: sanitizeBucket(bucketName),
|
||||
Prefix: sanitizeKey(path),
|
||||
})
|
||||
.promise()
|
||||
}
|
||||
let isTruncated = false,
|
||||
token,
|
||||
objects: Object[] = []
|
||||
objects: AWS.S3.Types.Object[] = []
|
||||
do {
|
||||
let params: ListParams = {}
|
||||
if (token) {
|
||||
|
@ -354,19 +331,18 @@ export async function listAllObjects(
|
|||
/**
|
||||
* Generate a presigned url with a default TTL of 1 hour
|
||||
*/
|
||||
export async function getPresignedUrl(
|
||||
export function getPresignedUrl(
|
||||
bucketName: string,
|
||||
key: string,
|
||||
durationSeconds = 3600
|
||||
) {
|
||||
const objectStore = ObjectStore({ presigning: true })
|
||||
const objectStore = ObjectStore(bucketName, { presigning: true })
|
||||
const params = {
|
||||
Bucket: sanitizeBucket(bucketName),
|
||||
Key: sanitizeKey(key),
|
||||
Expires: durationSeconds,
|
||||
}
|
||||
const url = await getSignedUrl(objectStore, new GetObjectCommand(params), {
|
||||
expiresIn: durationSeconds,
|
||||
})
|
||||
const url = objectStore.getSignedUrl("getObject", params)
|
||||
|
||||
if (!env.MINIO_ENABLED) {
|
||||
// return the full URL to the client
|
||||
|
@ -390,11 +366,7 @@ export async function retrieveToTmp(bucketName: string, filepath: string) {
|
|||
filepath = sanitizeKey(filepath)
|
||||
const data = await retrieve(bucketName, filepath)
|
||||
const outputPath = join(budibaseTempDir(), v4())
|
||||
if (data instanceof stream.Readable) {
|
||||
data.pipe(fs.createWriteStream(outputPath))
|
||||
} else {
|
||||
fs.writeFileSync(outputPath, data)
|
||||
}
|
||||
return outputPath
|
||||
}
|
||||
|
||||
|
@ -436,17 +408,17 @@ export async function retrieveDirectory(bucketName: string, path: string) {
|
|||
* Delete a single file.
|
||||
*/
|
||||
export async function deleteFile(bucketName: string, filepath: string) {
|
||||
const objectStore = ObjectStore()
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
await createBucketIfNotExists(objectStore, bucketName)
|
||||
const params = {
|
||||
Bucket: bucketName,
|
||||
Key: sanitizeKey(filepath),
|
||||
}
|
||||
return objectStore.deleteObject(params)
|
||||
return objectStore.deleteObject(params).promise()
|
||||
}
|
||||
|
||||
export async function deleteFiles(bucketName: string, filepaths: string[]) {
|
||||
const objectStore = ObjectStore()
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
await createBucketIfNotExists(objectStore, bucketName)
|
||||
const params = {
|
||||
Bucket: bucketName,
|
||||
|
@ -454,7 +426,7 @@ export async function deleteFiles(bucketName: string, filepaths: string[]) {
|
|||
Objects: filepaths.map((path: any) => ({ Key: sanitizeKey(path) })),
|
||||
},
|
||||
}
|
||||
return objectStore.deleteObjects(params)
|
||||
return objectStore.deleteObjects(params).promise()
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -466,13 +438,13 @@ export async function deleteFolder(
|
|||
): Promise<any> {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
folder = sanitizeKey(folder)
|
||||
const client = ObjectStore()
|
||||
const client = ObjectStore(bucketName)
|
||||
const listParams = {
|
||||
Bucket: bucketName,
|
||||
Prefix: folder,
|
||||
}
|
||||
|
||||
const existingObjectsResponse = await client.listObjects(listParams)
|
||||
const existingObjectsResponse = await client.listObjects(listParams).promise()
|
||||
if (existingObjectsResponse.Contents?.length === 0) {
|
||||
return
|
||||
}
|
||||
|
@ -487,7 +459,7 @@ export async function deleteFolder(
|
|||
deleteParams.Delete.Objects.push({ Key: content.Key })
|
||||
})
|
||||
|
||||
const deleteResponse = await client.deleteObjects(deleteParams)
|
||||
const deleteResponse = await client.deleteObjects(deleteParams).promise()
|
||||
// can only empty 1000 items at once
|
||||
if (deleteResponse.Deleted?.length === 1000) {
|
||||
return deleteFolder(bucketName, folder)
|
||||
|
@ -562,33 +534,29 @@ export async function getReadStream(
|
|||
): Promise<Readable> {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
path = sanitizeKey(path)
|
||||
const client = ObjectStore()
|
||||
const client = ObjectStore(bucketName)
|
||||
const params = {
|
||||
Bucket: bucketName,
|
||||
Key: path,
|
||||
}
|
||||
const response = await client.getObject(params)
|
||||
if (!response.Body || !(response.Body instanceof stream.Readable)) {
|
||||
throw new Error("Unable to retrieve stream - invalid response")
|
||||
}
|
||||
return response.Body
|
||||
return client.getObject(params).createReadStream()
|
||||
}
|
||||
|
||||
export async function getObjectMetadata(
|
||||
bucket: string,
|
||||
path: string
|
||||
): Promise<HeadObjectCommandOutput> {
|
||||
): Promise<HeadObjectOutput> {
|
||||
bucket = sanitizeBucket(bucket)
|
||||
path = sanitizeKey(path)
|
||||
|
||||
const client = ObjectStore()
|
||||
const client = ObjectStore(bucket)
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Key: path,
|
||||
}
|
||||
|
||||
try {
|
||||
return await client.headObject(params)
|
||||
return await client.headObject(params).promise()
|
||||
} catch (err: any) {
|
||||
throw new Error("Unable to retrieve metadata from object")
|
||||
}
|
||||
|
|
|
@ -2,10 +2,7 @@ import path, { join } from "path"
|
|||
import { tmpdir } from "os"
|
||||
import fs from "fs"
|
||||
import env from "../environment"
|
||||
import {
|
||||
LifecycleRule,
|
||||
PutBucketLifecycleConfigurationCommandInput,
|
||||
} from "@aws-sdk/client-s3"
|
||||
import { PutBucketLifecycleConfigurationRequest } from "aws-sdk/clients/s3"
|
||||
import * as objectStore from "./objectStore"
|
||||
import {
|
||||
AutomationAttachment,
|
||||
|
@ -46,8 +43,8 @@ export function budibaseTempDir() {
|
|||
export const bucketTTLConfig = (
|
||||
bucketName: string,
|
||||
days: number
|
||||
): PutBucketLifecycleConfigurationCommandInput => {
|
||||
const lifecycleRule: LifecycleRule = {
|
||||
): PutBucketLifecycleConfigurationRequest => {
|
||||
const lifecycleRule = {
|
||||
ID: `${bucketName}-ExpireAfter${days}days`,
|
||||
Prefix: "",
|
||||
Status: "Enabled",
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
declare module "./helpers" {
|
||||
export const cloneDeep: <T>(obj: T) => T
|
||||
}
|
|
@ -43,7 +43,6 @@
|
|||
export let showDataProviders = true
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
const arrayTypes = ["attachment", "array"]
|
||||
|
||||
let anchorRight, dropdownRight
|
||||
let drawer
|
||||
|
@ -116,8 +115,11 @@
|
|||
}
|
||||
})
|
||||
$: fields = bindings
|
||||
.filter(x => arrayTypes.includes(x.fieldSchema?.type))
|
||||
.filter(x => x.fieldSchema?.tableId != null)
|
||||
.filter(
|
||||
x =>
|
||||
x.fieldSchema?.type === "attachment" ||
|
||||
(x.fieldSchema?.type === "array" && x.tableId)
|
||||
)
|
||||
.map(binding => {
|
||||
const { providerId, readableBinding, runtimeBinding } = binding
|
||||
const { name, type, tableId } = binding.fieldSchema
|
||||
|
|
|
@ -1,138 +0,0 @@
|
|||
import { derived } from "svelte/store"
|
||||
import { admin } from "./admin"
|
||||
import { auth } from "./auth"
|
||||
import { isEnabled } from "@/helpers/featureFlags"
|
||||
import { sdk } from "@budibase/shared-core"
|
||||
import { FeatureFlag } from "@budibase/types"
|
||||
|
||||
export const menu = derived([admin, auth], ([$admin, $auth]) => {
|
||||
const user = $auth?.user
|
||||
const isAdmin = sdk.users.isAdmin(user)
|
||||
const cloud = $admin?.cloud
|
||||
// Determine user sub pages
|
||||
let userSubPages = [
|
||||
{
|
||||
title: "Users",
|
||||
href: "/builder/portal/users/users",
|
||||
},
|
||||
]
|
||||
userSubPages.push({
|
||||
title: "Groups",
|
||||
href: "/builder/portal/users/groups",
|
||||
})
|
||||
|
||||
// Pages that all devs and admins can access
|
||||
let menu = [
|
||||
{
|
||||
title: "Apps",
|
||||
href: "/builder/portal/apps",
|
||||
},
|
||||
]
|
||||
if (sdk.users.isGlobalBuilder(user)) {
|
||||
menu.push({
|
||||
title: "Users",
|
||||
href: "/builder/portal/users",
|
||||
subPages: userSubPages,
|
||||
})
|
||||
menu.push({
|
||||
title: "Plugins",
|
||||
href: "/builder/portal/plugins",
|
||||
})
|
||||
}
|
||||
|
||||
// Add settings page for admins
|
||||
if (isAdmin) {
|
||||
let settingsSubPages = [
|
||||
{
|
||||
title: "Auth",
|
||||
href: "/builder/portal/settings/auth",
|
||||
},
|
||||
{
|
||||
title: "Email",
|
||||
href: "/builder/portal/settings/email",
|
||||
},
|
||||
{
|
||||
title: "Organisation",
|
||||
href: "/builder/portal/settings/organisation",
|
||||
},
|
||||
{
|
||||
title: "Branding",
|
||||
href: "/builder/portal/settings/branding",
|
||||
},
|
||||
{
|
||||
title: "Environment",
|
||||
href: "/builder/portal/settings/environment",
|
||||
},
|
||||
]
|
||||
if (isEnabled(FeatureFlag.AI_CUSTOM_CONFIGS)) {
|
||||
settingsSubPages.push({
|
||||
title: "AI",
|
||||
href: "/builder/portal/settings/ai",
|
||||
})
|
||||
}
|
||||
|
||||
if (!cloud) {
|
||||
settingsSubPages.push({
|
||||
title: "Version",
|
||||
href: "/builder/portal/settings/version",
|
||||
})
|
||||
settingsSubPages.push({
|
||||
title: "Diagnostics",
|
||||
href: "/builder/portal/settings/diagnostics",
|
||||
})
|
||||
}
|
||||
menu.push({
|
||||
title: "Settings",
|
||||
href: "/builder/portal/settings",
|
||||
subPages: [...settingsSubPages].sort((a, b) =>
|
||||
a.title.localeCompare(b.title)
|
||||
),
|
||||
})
|
||||
}
|
||||
|
||||
// Add account page
|
||||
let accountSubPages = [
|
||||
{
|
||||
title: "Usage",
|
||||
href: "/builder/portal/account/usage",
|
||||
},
|
||||
]
|
||||
if (isAdmin) {
|
||||
accountSubPages.push({
|
||||
title: "Audit Logs",
|
||||
href: "/builder/portal/account/auditLogs",
|
||||
})
|
||||
|
||||
if (!cloud) {
|
||||
accountSubPages.push({
|
||||
title: "System Logs",
|
||||
href: "/builder/portal/account/systemLogs",
|
||||
})
|
||||
}
|
||||
}
|
||||
if (cloud && user?.accountPortalAccess) {
|
||||
accountSubPages.push({
|
||||
title: "Upgrade",
|
||||
href: $admin?.accountPortalUrl + "/portal/upgrade",
|
||||
})
|
||||
} else if (!cloud && isAdmin) {
|
||||
accountSubPages.push({
|
||||
title: "Upgrade",
|
||||
href: "/builder/portal/account/upgrade",
|
||||
})
|
||||
}
|
||||
// add license check here
|
||||
if (user?.accountPortalAccess && user.account.stripeCustomerId) {
|
||||
accountSubPages.push({
|
||||
title: "Billing",
|
||||
href: $admin?.accountPortalUrl + "/portal/billing",
|
||||
})
|
||||
}
|
||||
menu.push({
|
||||
title: "Account",
|
||||
href: "/builder/portal/account",
|
||||
subPages: accountSubPages,
|
||||
})
|
||||
|
||||
return menu
|
||||
})
|
|
@ -0,0 +1,149 @@
|
|||
import { derived, Readable } from "svelte/store"
|
||||
import { admin } from "./admin"
|
||||
import { auth } from "./auth"
|
||||
import { isEnabled } from "@/helpers/featureFlags"
|
||||
import { sdk } from "@budibase/shared-core"
|
||||
import { FeatureFlag } from "@budibase/types"
|
||||
|
||||
interface MenuItem {
|
||||
title: string
|
||||
href: string
|
||||
subPages?: MenuItem[]
|
||||
}
|
||||
|
||||
export const menu: Readable<MenuItem[]> = derived(
|
||||
[admin, auth],
|
||||
([$admin, $auth]) => {
|
||||
const user = $auth?.user
|
||||
const isAdmin = user != null && sdk.users.isAdmin(user)
|
||||
const isGlobalBuilder = user != null && sdk.users.isGlobalBuilder(user)
|
||||
const cloud = $admin?.cloud
|
||||
|
||||
// Determine user sub pages
|
||||
let userSubPages: MenuItem[] = [
|
||||
{
|
||||
title: "Users",
|
||||
href: "/builder/portal/users/users",
|
||||
},
|
||||
]
|
||||
userSubPages.push({
|
||||
title: "Groups",
|
||||
href: "/builder/portal/users/groups",
|
||||
})
|
||||
|
||||
// Pages that all devs and admins can access
|
||||
let menu: MenuItem[] = [
|
||||
{
|
||||
title: "Apps",
|
||||
href: "/builder/portal/apps",
|
||||
},
|
||||
]
|
||||
if (isGlobalBuilder) {
|
||||
menu.push({
|
||||
title: "Users",
|
||||
href: "/builder/portal/users",
|
||||
subPages: userSubPages,
|
||||
})
|
||||
menu.push({
|
||||
title: "Plugins",
|
||||
href: "/builder/portal/plugins",
|
||||
})
|
||||
}
|
||||
|
||||
// Add settings page for admins
|
||||
if (isAdmin) {
|
||||
let settingsSubPages: MenuItem[] = [
|
||||
{
|
||||
title: "Auth",
|
||||
href: "/builder/portal/settings/auth",
|
||||
},
|
||||
{
|
||||
title: "Email",
|
||||
href: "/builder/portal/settings/email",
|
||||
},
|
||||
{
|
||||
title: "Organisation",
|
||||
href: "/builder/portal/settings/organisation",
|
||||
},
|
||||
{
|
||||
title: "Branding",
|
||||
href: "/builder/portal/settings/branding",
|
||||
},
|
||||
{
|
||||
title: "Environment",
|
||||
href: "/builder/portal/settings/environment",
|
||||
},
|
||||
]
|
||||
if (isEnabled(FeatureFlag.AI_CUSTOM_CONFIGS)) {
|
||||
settingsSubPages.push({
|
||||
title: "AI",
|
||||
href: "/builder/portal/settings/ai",
|
||||
})
|
||||
}
|
||||
|
||||
if (!cloud) {
|
||||
settingsSubPages.push({
|
||||
title: "Version",
|
||||
href: "/builder/portal/settings/version",
|
||||
})
|
||||
settingsSubPages.push({
|
||||
title: "Diagnostics",
|
||||
href: "/builder/portal/settings/diagnostics",
|
||||
})
|
||||
}
|
||||
menu.push({
|
||||
title: "Settings",
|
||||
href: "/builder/portal/settings",
|
||||
subPages: [...settingsSubPages].sort((a, b) =>
|
||||
a.title.localeCompare(b.title)
|
||||
),
|
||||
})
|
||||
}
|
||||
|
||||
// Add account page
|
||||
let accountSubPages: MenuItem[] = [
|
||||
{
|
||||
title: "Usage",
|
||||
href: "/builder/portal/account/usage",
|
||||
},
|
||||
]
|
||||
if (isAdmin) {
|
||||
accountSubPages.push({
|
||||
title: "Audit Logs",
|
||||
href: "/builder/portal/account/auditLogs",
|
||||
})
|
||||
|
||||
if (!cloud) {
|
||||
accountSubPages.push({
|
||||
title: "System Logs",
|
||||
href: "/builder/portal/account/systemLogs",
|
||||
})
|
||||
}
|
||||
}
|
||||
if (cloud && user?.accountPortalAccess) {
|
||||
accountSubPages.push({
|
||||
title: "Upgrade",
|
||||
href: $admin?.accountPortalUrl + "/portal/upgrade",
|
||||
})
|
||||
} else if (!cloud && isAdmin) {
|
||||
accountSubPages.push({
|
||||
title: "Upgrade",
|
||||
href: "/builder/portal/account/upgrade",
|
||||
})
|
||||
}
|
||||
// add license check here
|
||||
if (user?.accountPortalAccess && user?.account?.stripeCustomerId) {
|
||||
accountSubPages.push({
|
||||
title: "Billing",
|
||||
href: $admin?.accountPortalUrl + "/portal/billing",
|
||||
})
|
||||
}
|
||||
menu.push({
|
||||
title: "Account",
|
||||
href: "/builder/portal/account",
|
||||
subPages: accountSubPages,
|
||||
})
|
||||
|
||||
return menu
|
||||
}
|
||||
)
|
|
@ -1,31 +0,0 @@
|
|||
import { writable, get } from "svelte/store"
|
||||
import { API } from "@/api"
|
||||
import { auth } from "@/stores/portal"
|
||||
|
||||
const OIDC_CONFIG = {
|
||||
logo: undefined,
|
||||
name: undefined,
|
||||
uuid: undefined,
|
||||
}
|
||||
|
||||
export function createOidcStore() {
|
||||
const store = writable(OIDC_CONFIG)
|
||||
const { set, subscribe } = store
|
||||
return {
|
||||
subscribe,
|
||||
set,
|
||||
init: async () => {
|
||||
const tenantId = get(auth).tenantId
|
||||
const config = await API.getOIDCConfig(tenantId)
|
||||
if (Object.keys(config || {}).length) {
|
||||
// Just use the first config for now.
|
||||
// We will be support multiple logins buttons later on.
|
||||
set(...config)
|
||||
} else {
|
||||
set(OIDC_CONFIG)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const oidc = createOidcStore()
|
|
@ -0,0 +1,21 @@
|
|||
import { get } from "svelte/store"
|
||||
import { API } from "@/api"
|
||||
import { auth } from "@/stores/portal"
|
||||
import { BudiStore } from "../BudiStore"
|
||||
import { PublicOIDCConfig } from "@budibase/types"
|
||||
|
||||
class OIDCStore extends BudiStore<PublicOIDCConfig> {
|
||||
constructor() {
|
||||
super({})
|
||||
}
|
||||
|
||||
async init() {
|
||||
const tenantId = get(auth).tenantId
|
||||
const configs = await API.getOIDCConfigs(tenantId)
|
||||
// Just use the first config for now.
|
||||
// We will be support multiple logins buttons later on.
|
||||
this.set(configs[0] || {})
|
||||
}
|
||||
}
|
||||
|
||||
export const oidc = new OIDCStore()
|
|
@ -1,66 +0,0 @@
|
|||
import { writable, get } from "svelte/store"
|
||||
import { API } from "@/api"
|
||||
import { auth } from "@/stores/portal"
|
||||
import _ from "lodash"
|
||||
|
||||
const DEFAULT_CONFIG = {
|
||||
platformUrl: "",
|
||||
logoUrl: undefined,
|
||||
faviconUrl: undefined,
|
||||
emailBrandingEnabled: true,
|
||||
testimonialsEnabled: true,
|
||||
platformTitle: "Budibase",
|
||||
loginHeading: undefined,
|
||||
loginButton: undefined,
|
||||
metaDescription: undefined,
|
||||
metaImageUrl: undefined,
|
||||
metaTitle: undefined,
|
||||
docsUrl: undefined,
|
||||
company: "Budibase",
|
||||
oidc: undefined,
|
||||
google: undefined,
|
||||
googleDatasourceConfigured: undefined,
|
||||
oidcCallbackUrl: "",
|
||||
googleCallbackUrl: "",
|
||||
isSSOEnforced: false,
|
||||
loaded: false,
|
||||
}
|
||||
|
||||
export function createOrganisationStore() {
|
||||
const store = writable(DEFAULT_CONFIG)
|
||||
const { subscribe, set } = store
|
||||
|
||||
async function init() {
|
||||
const tenantId = get(auth).tenantId
|
||||
const settingsConfigDoc = await API.getTenantConfig(tenantId)
|
||||
set({ ...DEFAULT_CONFIG, ...settingsConfigDoc.config, loaded: true })
|
||||
}
|
||||
|
||||
async function save(config) {
|
||||
// Delete non-persisted fields
|
||||
const storeConfig = _.cloneDeep(get(store))
|
||||
delete storeConfig.oidc
|
||||
delete storeConfig.google
|
||||
delete storeConfig.googleDatasourceConfigured
|
||||
delete storeConfig.oidcCallbackUrl
|
||||
delete storeConfig.googleCallbackUrl
|
||||
|
||||
// delete internal store field
|
||||
delete storeConfig.loaded
|
||||
|
||||
await API.saveConfig({
|
||||
type: "settings",
|
||||
config: { ...storeConfig, ...config },
|
||||
})
|
||||
await init()
|
||||
}
|
||||
|
||||
return {
|
||||
subscribe,
|
||||
set,
|
||||
save,
|
||||
init,
|
||||
}
|
||||
}
|
||||
|
||||
export const organisation = createOrganisationStore()
|
|
@ -0,0 +1,71 @@
|
|||
import { get } from "svelte/store"
|
||||
import { API } from "@/api"
|
||||
import { auth } from "@/stores/portal"
|
||||
import {
|
||||
ConfigType,
|
||||
PublicSettingsInnerConfig,
|
||||
SettingsBrandingConfig,
|
||||
SettingsInnerConfig,
|
||||
} from "@budibase/types"
|
||||
import { BudiStore } from "../BudiStore"
|
||||
|
||||
interface LocalOrganisationState {
|
||||
loaded: boolean
|
||||
}
|
||||
|
||||
type SavedOrganisationState = SettingsInnerConfig & SettingsBrandingConfig
|
||||
type OrganisationState = SavedOrganisationState &
|
||||
PublicSettingsInnerConfig &
|
||||
LocalOrganisationState
|
||||
|
||||
const DEFAULT_STATE: OrganisationState = {
|
||||
platformUrl: "",
|
||||
emailBrandingEnabled: true,
|
||||
testimonialsEnabled: true,
|
||||
platformTitle: "Budibase",
|
||||
company: "Budibase",
|
||||
google: false,
|
||||
googleDatasourceConfigured: false,
|
||||
oidc: false,
|
||||
oidcCallbackUrl: "",
|
||||
googleCallbackUrl: "",
|
||||
loaded: false,
|
||||
}
|
||||
|
||||
class OrganisationStore extends BudiStore<OrganisationState> {
|
||||
constructor() {
|
||||
super(DEFAULT_STATE)
|
||||
}
|
||||
|
||||
async init() {
|
||||
const tenantId = get(auth).tenantId
|
||||
const settingsConfigDoc = await API.getTenantConfig(tenantId)
|
||||
this.set({ ...DEFAULT_STATE, ...settingsConfigDoc.config, loaded: true })
|
||||
}
|
||||
|
||||
async save(changes: Partial<SavedOrganisationState>) {
|
||||
// Strip non persisted fields
|
||||
const {
|
||||
oidc,
|
||||
google,
|
||||
googleDatasourceConfigured,
|
||||
oidcCallbackUrl,
|
||||
googleCallbackUrl,
|
||||
loaded,
|
||||
...config
|
||||
} = get(this.store)
|
||||
|
||||
// Save new config
|
||||
const newConfig: SavedOrganisationState = {
|
||||
...config,
|
||||
...changes,
|
||||
}
|
||||
await API.saveConfig({
|
||||
type: ConfigType.SETTINGS,
|
||||
config: newConfig,
|
||||
})
|
||||
await this.init()
|
||||
}
|
||||
}
|
||||
|
||||
export const organisation = new OrganisationStore()
|
|
@ -3,7 +3,6 @@ import fs from "fs"
|
|||
import { join } from "path"
|
||||
import { TEMP_DIR, MINIO_DIR } from "./utils"
|
||||
import { progressBar } from "../utils"
|
||||
import * as stream from "node:stream"
|
||||
|
||||
const {
|
||||
ObjectStoreBuckets,
|
||||
|
@ -21,21 +20,15 @@ export async function exportObjects() {
|
|||
let fullList: any[] = []
|
||||
let errorCount = 0
|
||||
for (let bucket of bucketList) {
|
||||
const client = ObjectStore()
|
||||
const client = ObjectStore(bucket)
|
||||
try {
|
||||
await client.headBucket({
|
||||
Bucket: bucket,
|
||||
})
|
||||
await client.headBucket().promise()
|
||||
} catch (err) {
|
||||
errorCount++
|
||||
continue
|
||||
}
|
||||
const list = await client.listObjectsV2({
|
||||
Bucket: bucket,
|
||||
})
|
||||
fullList = fullList.concat(
|
||||
list.Contents?.map(el => ({ ...el, bucket })) || []
|
||||
)
|
||||
const list = (await client.listObjectsV2().promise()) as { Contents: any[] }
|
||||
fullList = fullList.concat(list.Contents.map(el => ({ ...el, bucket })))
|
||||
}
|
||||
if (errorCount === bucketList.length) {
|
||||
throw new Error("Unable to access MinIO/S3 - check environment config.")
|
||||
|
@ -50,13 +43,7 @@ export async function exportObjects() {
|
|||
const dirs = possiblePath.slice(0, possiblePath.length - 1)
|
||||
fs.mkdirSync(join(path, object.bucket, ...dirs), { recursive: true })
|
||||
}
|
||||
if (data instanceof stream.Readable) {
|
||||
data.pipe(
|
||||
fs.createWriteStream(join(path, object.bucket, ...possiblePath))
|
||||
)
|
||||
} else {
|
||||
fs.writeFileSync(join(path, object.bucket, ...possiblePath), data)
|
||||
}
|
||||
bar.update(++count)
|
||||
}
|
||||
bar.stop()
|
||||
|
@ -73,7 +60,7 @@ export async function importObjects() {
|
|||
const bar = progressBar(total)
|
||||
let count = 0
|
||||
for (let bucket of buckets) {
|
||||
const client = ObjectStore()
|
||||
const client = ObjectStore(bucket)
|
||||
await createBucketIfNotExists(client, bucket)
|
||||
const files = await uploadDirectory(bucket, join(path, bucket), "/")
|
||||
count += files.length
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import { API } from "api"
|
||||
import TableFetch from "@budibase/frontend-core/src/fetch/TableFetch.js"
|
||||
import ViewFetch from "@budibase/frontend-core/src/fetch/ViewFetch.js"
|
||||
import QueryFetch from "@budibase/frontend-core/src/fetch/QueryFetch.js"
|
||||
import RelationshipFetch from "@budibase/frontend-core/src/fetch/RelationshipFetch.js"
|
||||
import NestedProviderFetch from "@budibase/frontend-core/src/fetch/NestedProviderFetch.js"
|
||||
import FieldFetch from "@budibase/frontend-core/src/fetch/FieldFetch.js"
|
||||
import JSONArrayFetch from "@budibase/frontend-core/src/fetch/JSONArrayFetch.js"
|
||||
import ViewV2Fetch from "@budibase/frontend-core/src/fetch/ViewV2Fetch.js"
|
||||
import TableFetch from "@budibase/frontend-core/src/fetch/TableFetch"
|
||||
import ViewFetch from "@budibase/frontend-core/src/fetch/ViewFetch"
|
||||
import QueryFetch from "@budibase/frontend-core/src/fetch/QueryFetch"
|
||||
import RelationshipFetch from "@budibase/frontend-core/src/fetch/RelationshipFetch"
|
||||
import NestedProviderFetch from "@budibase/frontend-core/src/fetch/NestedProviderFetch"
|
||||
import FieldFetch from "@budibase/frontend-core/src/fetch/FieldFetch"
|
||||
import JSONArrayFetch from "@budibase/frontend-core/src/fetch/JSONArrayFetch"
|
||||
import ViewV2Fetch from "@budibase/frontend-core/src/fetch/ViewV2Fetch"
|
||||
import QueryArrayFetch from "@budibase/frontend-core/src/fetch/QueryArrayFetch"
|
||||
|
||||
/**
|
||||
|
|
|
@ -16,7 +16,7 @@ import { BaseAPIClient } from "./types"
|
|||
export interface ConfigEndpoints {
|
||||
getConfig: (type: ConfigType) => Promise<FindConfigResponse>
|
||||
getTenantConfig: (tentantId: string) => Promise<GetPublicSettingsResponse>
|
||||
getOIDCConfig: (tenantId: string) => Promise<GetPublicOIDCConfigResponse>
|
||||
getOIDCConfigs: (tenantId: string) => Promise<GetPublicOIDCConfigResponse>
|
||||
getOIDCLogos: () => Promise<Config<OIDCLogosConfig>>
|
||||
saveConfig: (config: SaveConfigRequest) => Promise<SaveConfigResponse>
|
||||
deleteConfig: (id: string, rev: string) => Promise<DeleteConfigResponse>
|
||||
|
@ -73,7 +73,7 @@ export const buildConfigEndpoints = (API: BaseAPIClient): ConfigEndpoints => ({
|
|||
* Gets the OIDC config for a certain tenant.
|
||||
* @param tenantId the tenant ID to get the config for
|
||||
*/
|
||||
getOIDCConfig: async tenantId => {
|
||||
getOIDCConfigs: async tenantId => {
|
||||
return await API.get({
|
||||
url: `/api/global/configs/public/oidc?tenantId=${tenantId}`,
|
||||
})
|
||||
|
|
|
@ -3,7 +3,15 @@ import { BaseAPIClient } from "./types"
|
|||
|
||||
export interface ViewEndpoints {
|
||||
// Missing request or response types
|
||||
fetchViewData: (name: string, opts: any) => Promise<Row[]>
|
||||
fetchViewData: (
|
||||
name: string,
|
||||
opts: {
|
||||
calculation?: string
|
||||
field?: string
|
||||
groupBy?: string
|
||||
tableId: string
|
||||
}
|
||||
) => Promise<Row[]>
|
||||
exportView: (name: string, format: string) => Promise<any>
|
||||
saveView: (view: any) => Promise<any>
|
||||
deleteView: (name: string) => Promise<any>
|
||||
|
@ -20,7 +28,9 @@ export const buildViewEndpoints = (API: BaseAPIClient): ViewEndpoints => ({
|
|||
fetchViewData: async (name, { field, groupBy, calculation }) => {
|
||||
const params = new URLSearchParams()
|
||||
if (calculation) {
|
||||
if (field) {
|
||||
params.set("field", field)
|
||||
}
|
||||
params.set("calculation", calculation)
|
||||
}
|
||||
if (groupBy) {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import {
|
||||
CreateViewRequest,
|
||||
CreateViewResponse,
|
||||
PaginatedSearchRowResponse,
|
||||
SearchRowResponse,
|
||||
SearchViewRowRequest,
|
||||
UpdateViewRequest,
|
||||
|
@ -13,10 +14,14 @@ export interface ViewV2Endpoints {
|
|||
fetchDefinition: (viewId: string) => Promise<ViewResponseEnriched>
|
||||
create: (view: CreateViewRequest) => Promise<CreateViewResponse>
|
||||
update: (view: UpdateViewRequest) => Promise<UpdateViewResponse>
|
||||
fetch: (
|
||||
fetch: <T extends SearchViewRowRequest>(
|
||||
viewId: string,
|
||||
opts: SearchViewRowRequest
|
||||
) => Promise<SearchRowResponse>
|
||||
opts: T
|
||||
) => Promise<
|
||||
T extends { paginate: true }
|
||||
? PaginatedSearchRowResponse
|
||||
: SearchRowResponse
|
||||
>
|
||||
delete: (viewId: string) => Promise<void>
|
||||
}
|
||||
|
||||
|
@ -59,7 +64,7 @@ export const buildViewV2Endpoints = (API: BaseAPIClient): ViewV2Endpoints => ({
|
|||
* @param viewId the id of the view
|
||||
* @param opts the search options
|
||||
*/
|
||||
fetch: async (viewId, opts) => {
|
||||
fetch: async (viewId, opts: SearchViewRowRequest) => {
|
||||
return await API.post({
|
||||
url: `/api/v2/views/${encodeURIComponent(viewId)}/search`,
|
||||
body: opts,
|
||||
|
|
|
@ -69,7 +69,7 @@ export const deriveStores = (context: StoreContext): ConfigDerivedStore => {
|
|||
}
|
||||
|
||||
// Disable features for non DS+
|
||||
if (!["table", "viewV2"].includes(type)) {
|
||||
if (type && !["table", "viewV2"].includes(type)) {
|
||||
config.canAddRows = false
|
||||
config.canEditRows = false
|
||||
config.canDeleteRows = false
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
// TODO: datasource and defitions are unions of the different implementations. At this point, the datasource does not know what type is being used, and the assignations will cause TS exceptions. Casting it "as any" for now. This should be fixed improving the type usages.
|
||||
|
||||
import { derived, get, Readable, Writable } from "svelte/store"
|
||||
import { getDatasourceDefinition, getDatasourceSchema } from "../../../fetch"
|
||||
import { enrichSchemaWithRelColumns, memo } from "../../../utils"
|
||||
|
@ -71,10 +73,10 @@ export const deriveStores = (context: StoreContext): DerivedDatasourceStore => {
|
|||
} = context
|
||||
|
||||
const schema = derived(definition, $definition => {
|
||||
let schema: Record<string, UIFieldSchema> = getDatasourceSchema({
|
||||
const schema: Record<string, any> | undefined = getDatasourceSchema({
|
||||
API,
|
||||
datasource: get(datasource),
|
||||
definition: $definition,
|
||||
datasource: get(datasource) as any, // TODO: see line 1
|
||||
definition: $definition ?? undefined,
|
||||
})
|
||||
if (!schema) {
|
||||
return null
|
||||
|
@ -82,7 +84,7 @@ export const deriveStores = (context: StoreContext): DerivedDatasourceStore => {
|
|||
|
||||
// Ensure schema is configured as objects.
|
||||
// Certain datasources like queries use primitives.
|
||||
Object.keys(schema || {}).forEach(key => {
|
||||
Object.keys(schema).forEach(key => {
|
||||
if (typeof schema[key] !== "object") {
|
||||
schema[key] = { name: key, type: schema[key] }
|
||||
}
|
||||
|
@ -130,13 +132,13 @@ export const deriveStores = (context: StoreContext): DerivedDatasourceStore => {
|
|||
([$datasource, $definition]) => {
|
||||
let type = $datasource?.type
|
||||
if (type === "provider") {
|
||||
type = ($datasource as any).value?.datasource?.type
|
||||
type = ($datasource as any).value?.datasource?.type // TODO: see line 1
|
||||
}
|
||||
// Handle calculation views
|
||||
if (type === "viewV2" && $definition?.type === ViewV2Type.CALCULATION) {
|
||||
return false
|
||||
}
|
||||
return ["table", "viewV2", "link"].includes(type)
|
||||
return !!type && ["table", "viewV2", "link"].includes(type)
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -184,9 +186,9 @@ export const createActions = (context: StoreContext): ActionDatasourceStore => {
|
|||
const refreshDefinition = async () => {
|
||||
const def = await getDatasourceDefinition({
|
||||
API,
|
||||
datasource: get(datasource),
|
||||
datasource: get(datasource) as any, // TODO: see line 1
|
||||
})
|
||||
definition.set(def)
|
||||
definition.set(def as any) // TODO: see line 1
|
||||
}
|
||||
|
||||
// Saves the datasource definition
|
||||
|
@ -231,7 +233,7 @@ export const createActions = (context: StoreContext): ActionDatasourceStore => {
|
|||
if ("default" in newDefinition.schema[column]) {
|
||||
delete newDefinition.schema[column].default
|
||||
}
|
||||
return await saveDefinition(newDefinition as any)
|
||||
return await saveDefinition(newDefinition as any) // TODO: see line 1
|
||||
}
|
||||
|
||||
// Adds a schema mutation for a single field
|
||||
|
@ -307,7 +309,7 @@ export const createActions = (context: StoreContext): ActionDatasourceStore => {
|
|||
await saveDefinition({
|
||||
...$definition,
|
||||
schema: newSchema,
|
||||
} as any)
|
||||
} as any) // TODO: see line 1
|
||||
resetSchemaMutations()
|
||||
}
|
||||
|
||||
|
|
|
@ -10,9 +10,10 @@ import {
|
|||
import { tick } from "svelte"
|
||||
import { Helpers } from "@budibase/bbui"
|
||||
import { sleep } from "../../../utils/utils"
|
||||
import { FieldType, Row, UIFetchAPI, UIRow } from "@budibase/types"
|
||||
import { FieldType, Row, UIRow } from "@budibase/types"
|
||||
import { getRelatedTableValues } from "../../../utils"
|
||||
import { Store as StoreContext } from "."
|
||||
import DataFetch from "../../../fetch/DataFetch"
|
||||
|
||||
interface IndexedUIRow extends UIRow {
|
||||
__idx: number
|
||||
|
@ -20,7 +21,7 @@ interface IndexedUIRow extends UIRow {
|
|||
|
||||
interface RowStore {
|
||||
rows: Writable<UIRow[]>
|
||||
fetch: Writable<UIFetchAPI | null>
|
||||
fetch: Writable<DataFetch<any, any, any> | null> // TODO: type this properly, having a union of all the possible options
|
||||
loaded: Writable<boolean>
|
||||
refreshing: Writable<boolean>
|
||||
loading: Writable<boolean>
|
||||
|
@ -225,7 +226,7 @@ export const createActions = (context: StoreContext): RowActionStore => {
|
|||
})
|
||||
|
||||
// Subscribe to changes of this fetch model
|
||||
unsubscribe = newFetch.subscribe(async ($fetch: UIFetchAPI) => {
|
||||
unsubscribe = newFetch.subscribe(async $fetch => {
|
||||
if ($fetch.error) {
|
||||
// Present a helpful error to the user
|
||||
let message = "An unknown error occurred"
|
||||
|
@ -253,7 +254,7 @@ export const createActions = (context: StoreContext): RowActionStore => {
|
|||
|
||||
// Reset state properties when dataset changes
|
||||
if (!$instanceLoaded || resetRows) {
|
||||
definition.set($fetch.definition)
|
||||
definition.set($fetch.definition as any) // TODO: datasource and defitions are unions of the different implementations. At this point, the datasource does not know what type is being used, and the assignations will cause TS exceptions. Casting it "as any" for now. This should be fixed improving the type usages.
|
||||
}
|
||||
|
||||
// Reset scroll state when data changes
|
||||
|
|
|
@ -32,8 +32,8 @@ export const Cookies = {
|
|||
}
|
||||
|
||||
// Table names
|
||||
export const TableNames = {
|
||||
USERS: "ta_users",
|
||||
export const enum TableNames {
|
||||
USERS = "ta_users",
|
||||
}
|
||||
|
||||
export const BudibaseRoles = {
|
||||
|
|
|
@ -1,8 +1,17 @@
|
|||
import DataFetch from "./DataFetch.js"
|
||||
import DataFetch from "./DataFetch"
|
||||
|
||||
export default class CustomFetch extends DataFetch {
|
||||
interface CustomDatasource {
|
||||
data: any
|
||||
}
|
||||
|
||||
type CustomDefinition = Record<string, any>
|
||||
|
||||
export default class CustomFetch extends DataFetch<
|
||||
CustomDatasource,
|
||||
CustomDefinition
|
||||
> {
|
||||
// Gets the correct Budibase type for a JS value
|
||||
getType(value) {
|
||||
getType(value: any) {
|
||||
if (value == null) {
|
||||
return "string"
|
||||
}
|
||||
|
@ -22,7 +31,7 @@ export default class CustomFetch extends DataFetch {
|
|||
}
|
||||
|
||||
// Parses the custom data into an array format
|
||||
parseCustomData(data) {
|
||||
parseCustomData(data: any) {
|
||||
if (!data) {
|
||||
return []
|
||||
}
|
||||
|
@ -55,7 +64,7 @@ export default class CustomFetch extends DataFetch {
|
|||
}
|
||||
|
||||
// Enriches the custom data to ensure the structure and format is usable
|
||||
enrichCustomData(data) {
|
||||
enrichCustomData(data: (string | any)[]) {
|
||||
if (!data?.length) {
|
||||
return []
|
||||
}
|
||||
|
@ -72,7 +81,7 @@ export default class CustomFetch extends DataFetch {
|
|||
// Try parsing strings
|
||||
if (typeof value === "string") {
|
||||
const split = value.split(",").map(x => x.trim())
|
||||
let obj = {}
|
||||
const obj: Record<string, string> = {}
|
||||
for (let i = 0; i < split.length; i++) {
|
||||
const suffix = i === 0 ? "" : ` ${i + 1}`
|
||||
const key = `Value${suffix}`
|
||||
|
@ -87,27 +96,29 @@ export default class CustomFetch extends DataFetch {
|
|||
}
|
||||
|
||||
// Extracts and parses the custom data from the datasource definition
|
||||
getCustomData(datasource) {
|
||||
getCustomData(datasource: CustomDatasource) {
|
||||
return this.enrichCustomData(this.parseCustomData(datasource?.data))
|
||||
}
|
||||
|
||||
async getDefinition(datasource) {
|
||||
async getDefinition() {
|
||||
const { datasource } = this.options
|
||||
|
||||
// Try and work out the schema from the array provided
|
||||
let schema = {}
|
||||
const schema: CustomDefinition = {}
|
||||
const data = this.getCustomData(datasource)
|
||||
if (!data?.length) {
|
||||
return { schema }
|
||||
}
|
||||
|
||||
// Go through every object and extract all valid keys
|
||||
for (let datum of data) {
|
||||
for (let key of Object.keys(datum)) {
|
||||
for (const datum of data) {
|
||||
for (const key of Object.keys(datum)) {
|
||||
if (key === "_id") {
|
||||
continue
|
||||
}
|
||||
if (!schema[key]) {
|
||||
let type = this.getType(datum[key])
|
||||
let constraints = {}
|
||||
const constraints: any = {}
|
||||
|
||||
// Determine whether we should render text columns as options instead
|
||||
if (type === "string") {
|
|
@ -1,25 +1,102 @@
|
|||
import { writable, derived, get } from "svelte/store"
|
||||
import { writable, derived, get, Writable, Readable } from "svelte/store"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { QueryUtils } from "../utils"
|
||||
import { convertJSONSchemaToTableSchema } from "../utils/json"
|
||||
import { FieldType, SortOrder, SortType } from "@budibase/types"
|
||||
import {
|
||||
FieldType,
|
||||
LegacyFilter,
|
||||
Row,
|
||||
SearchFilters,
|
||||
SortOrder,
|
||||
SortType,
|
||||
TableSchema,
|
||||
UISearchFilter,
|
||||
} from "@budibase/types"
|
||||
import { APIClient } from "../api/types"
|
||||
|
||||
const { buildQuery, limit: queryLimit, runQuery, sort } = QueryUtils
|
||||
|
||||
interface DataFetchStore<TDefinition, TQuery> {
|
||||
rows: Row[]
|
||||
info: any
|
||||
schema: TableSchema | null
|
||||
loading: boolean
|
||||
loaded: boolean
|
||||
query: TQuery
|
||||
pageNumber: number
|
||||
cursor: string | null
|
||||
cursors: string[]
|
||||
resetKey: string
|
||||
error: {
|
||||
message: string
|
||||
status: number
|
||||
} | null
|
||||
definition?: TDefinition | null
|
||||
}
|
||||
|
||||
interface DataFetchDerivedStore<TDefinition, TQuery>
|
||||
extends DataFetchStore<TDefinition, TQuery> {
|
||||
hasNextPage: boolean
|
||||
hasPrevPage: boolean
|
||||
supportsSearch: boolean
|
||||
supportsSort: boolean
|
||||
supportsPagination: boolean
|
||||
}
|
||||
|
||||
export interface DataFetchParams<
|
||||
TDatasource,
|
||||
TQuery = SearchFilters | undefined
|
||||
> {
|
||||
API: APIClient
|
||||
datasource: TDatasource
|
||||
query: TQuery
|
||||
options?: {}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parent class which handles the implementation of fetching data from an
|
||||
* internal table or datasource plus.
|
||||
* For other types of datasource, this class is overridden and extended.
|
||||
*/
|
||||
export default class DataFetch {
|
||||
export default abstract class DataFetch<
|
||||
TDatasource extends {},
|
||||
TDefinition extends {
|
||||
schema?: Record<string, any> | null
|
||||
primaryDisplay?: string
|
||||
},
|
||||
TQuery extends {} = SearchFilters
|
||||
> {
|
||||
API: APIClient
|
||||
features: {
|
||||
supportsSearch: boolean
|
||||
supportsSort: boolean
|
||||
supportsPagination: boolean
|
||||
}
|
||||
options: {
|
||||
datasource: TDatasource
|
||||
limit: number
|
||||
// Search config
|
||||
filter: UISearchFilter | LegacyFilter[] | null
|
||||
query: TQuery
|
||||
// Sorting config
|
||||
sortColumn: string | null
|
||||
sortOrder: SortOrder
|
||||
sortType: SortType | null
|
||||
// Pagination config
|
||||
paginate: boolean
|
||||
// Client side feature customisation
|
||||
clientSideSearching: boolean
|
||||
clientSideSorting: boolean
|
||||
clientSideLimiting: boolean
|
||||
}
|
||||
store: Writable<DataFetchStore<TDefinition, TQuery>>
|
||||
derivedStore: Readable<DataFetchDerivedStore<TDefinition, TQuery>>
|
||||
|
||||
/**
|
||||
* Constructs a new DataFetch instance.
|
||||
* @param opts the fetch options
|
||||
*/
|
||||
constructor(opts) {
|
||||
// API client
|
||||
this.API = null
|
||||
|
||||
constructor(opts: DataFetchParams<TDatasource, TQuery>) {
|
||||
// Feature flags
|
||||
this.features = {
|
||||
supportsSearch: false,
|
||||
|
@ -29,12 +106,12 @@ export default class DataFetch {
|
|||
|
||||
// Config
|
||||
this.options = {
|
||||
datasource: null,
|
||||
datasource: opts.datasource,
|
||||
limit: 10,
|
||||
|
||||
// Search config
|
||||
filter: null,
|
||||
query: null,
|
||||
query: opts.query,
|
||||
|
||||
// Sorting config
|
||||
sortColumn: null,
|
||||
|
@ -57,11 +134,11 @@ export default class DataFetch {
|
|||
schema: null,
|
||||
loading: false,
|
||||
loaded: false,
|
||||
query: null,
|
||||
query: opts.query,
|
||||
pageNumber: 0,
|
||||
cursor: null,
|
||||
cursors: [],
|
||||
resetKey: Math.random(),
|
||||
resetKey: Math.random().toString(),
|
||||
error: null,
|
||||
})
|
||||
|
||||
|
@ -118,7 +195,10 @@ export default class DataFetch {
|
|||
/**
|
||||
* Gets the default sort column for this datasource
|
||||
*/
|
||||
getDefaultSortColumn(definition, schema) {
|
||||
getDefaultSortColumn(
|
||||
definition: { primaryDisplay?: string } | null,
|
||||
schema: Record<string, any>
|
||||
): string | null {
|
||||
if (definition?.primaryDisplay && schema[definition.primaryDisplay]) {
|
||||
return definition.primaryDisplay
|
||||
} else {
|
||||
|
@ -130,13 +210,13 @@ export default class DataFetch {
|
|||
* Fetches a fresh set of data from the server, resetting pagination
|
||||
*/
|
||||
async getInitialData() {
|
||||
const { datasource, filter, paginate } = this.options
|
||||
const { filter, paginate } = this.options
|
||||
|
||||
// Fetch datasource definition and extract sort properties if configured
|
||||
const definition = await this.getDefinition(datasource)
|
||||
const definition = await this.getDefinition()
|
||||
|
||||
// Determine feature flags
|
||||
const features = this.determineFeatureFlags(definition)
|
||||
const features = await this.determineFeatureFlags()
|
||||
this.features = {
|
||||
supportsSearch: !!features?.supportsSearch,
|
||||
supportsSort: !!features?.supportsSort,
|
||||
|
@ -144,11 +224,11 @@ export default class DataFetch {
|
|||
}
|
||||
|
||||
// Fetch and enrich schema
|
||||
let schema = this.getSchema(datasource, definition)
|
||||
schema = this.enrichSchema(schema)
|
||||
let schema = this.getSchema(definition)
|
||||
if (!schema) {
|
||||
return
|
||||
}
|
||||
schema = this.enrichSchema(schema)
|
||||
|
||||
// If an invalid sort column is specified, delete it
|
||||
if (this.options.sortColumn && !schema[this.options.sortColumn]) {
|
||||
|
@ -172,20 +252,25 @@ export default class DataFetch {
|
|||
if (
|
||||
fieldSchema?.type === FieldType.NUMBER ||
|
||||
fieldSchema?.type === FieldType.BIGINT ||
|
||||
fieldSchema?.calculationType
|
||||
("calculationType" in fieldSchema && fieldSchema?.calculationType)
|
||||
) {
|
||||
this.options.sortType = SortType.NUMBER
|
||||
}
|
||||
|
||||
// If no sort order, default to ascending
|
||||
if (!this.options.sortOrder) {
|
||||
this.options.sortOrder = SortOrder.ASCENDING
|
||||
} else {
|
||||
// Ensure sortOrder matches the enum
|
||||
this.options.sortOrder =
|
||||
this.options.sortOrder.toLowerCase() as SortOrder
|
||||
}
|
||||
}
|
||||
|
||||
// Build the query
|
||||
let query = this.options.query
|
||||
if (!query) {
|
||||
query = buildQuery(filter)
|
||||
query = buildQuery(filter ?? undefined) as TQuery
|
||||
}
|
||||
|
||||
// Update store
|
||||
|
@ -210,7 +295,7 @@ export default class DataFetch {
|
|||
info: page.info,
|
||||
cursors: paginate && page.hasNextPage ? [null, page.cursor] : [null],
|
||||
error: page.error,
|
||||
resetKey: Math.random(),
|
||||
resetKey: Math.random().toString(),
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -238,8 +323,8 @@ export default class DataFetch {
|
|||
}
|
||||
|
||||
// If we don't support sorting, do a client-side sort
|
||||
if (!this.features.supportsSort && clientSideSorting) {
|
||||
rows = sort(rows, sortColumn, sortOrder, sortType)
|
||||
if (!this.features.supportsSort && clientSideSorting && sortType) {
|
||||
rows = sort(rows, sortColumn as any, sortOrder, sortType)
|
||||
}
|
||||
|
||||
// If we don't support pagination, do a client-side limit
|
||||
|
@ -256,49 +341,28 @@ export default class DataFetch {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches a single page of data from the remote resource.
|
||||
* Must be overridden by a datasource specific child class.
|
||||
*/
|
||||
async getData() {
|
||||
return {
|
||||
rows: [],
|
||||
info: null,
|
||||
hasNextPage: false,
|
||||
cursor: null,
|
||||
}
|
||||
}
|
||||
abstract getData(): Promise<{
|
||||
rows: Row[]
|
||||
info?: any
|
||||
hasNextPage?: boolean
|
||||
cursor?: any
|
||||
error?: any
|
||||
}>
|
||||
|
||||
/**
|
||||
* Gets the definition for this datasource.
|
||||
* Defaults to fetching a table definition.
|
||||
* @param datasource
|
||||
|
||||
* @return {object} the definition
|
||||
*/
|
||||
async getDefinition(datasource) {
|
||||
if (!datasource?.tableId) {
|
||||
return null
|
||||
}
|
||||
try {
|
||||
return await this.API.fetchTableDefinition(datasource.tableId)
|
||||
} catch (error) {
|
||||
this.store.update(state => ({
|
||||
...state,
|
||||
error,
|
||||
}))
|
||||
return null
|
||||
}
|
||||
}
|
||||
abstract getDefinition(): Promise<TDefinition | null>
|
||||
|
||||
/**
|
||||
* Gets the schema definition for a datasource.
|
||||
* Defaults to getting the "schema" property of the definition.
|
||||
* @param datasource the datasource
|
||||
* @param definition the datasource definition
|
||||
* @return {object} the schema
|
||||
*/
|
||||
getSchema(datasource, definition) {
|
||||
return definition?.schema
|
||||
getSchema(definition: TDefinition | null): Record<string, any> | undefined {
|
||||
return definition?.schema ?? undefined
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -307,32 +371,30 @@ export default class DataFetch {
|
|||
* @param schema the datasource schema
|
||||
* @return {object} the enriched datasource schema
|
||||
*/
|
||||
enrichSchema(schema) {
|
||||
if (schema == null) {
|
||||
return null
|
||||
}
|
||||
|
||||
private enrichSchema(schema: TableSchema): TableSchema {
|
||||
// Check for any JSON fields so we can add any top level properties
|
||||
let jsonAdditions = {}
|
||||
Object.keys(schema).forEach(fieldKey => {
|
||||
let jsonAdditions: Record<string, { type: string; nestedJSON: true }> = {}
|
||||
for (const fieldKey of Object.keys(schema)) {
|
||||
const fieldSchema = schema[fieldKey]
|
||||
if (fieldSchema?.type === FieldType.JSON) {
|
||||
if (fieldSchema.type === FieldType.JSON) {
|
||||
const jsonSchema = convertJSONSchemaToTableSchema(fieldSchema, {
|
||||
squashObjects: true,
|
||||
})
|
||||
Object.keys(jsonSchema).forEach(jsonKey => {
|
||||
}) as Record<string, { type: string }> | null // TODO: remove when convertJSONSchemaToTableSchema is typed
|
||||
if (jsonSchema) {
|
||||
for (const jsonKey of Object.keys(jsonSchema)) {
|
||||
jsonAdditions[`${fieldKey}.${jsonKey}`] = {
|
||||
type: jsonSchema[jsonKey].type,
|
||||
nestedJSON: true,
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
schema = { ...schema, ...jsonAdditions }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure schema is in the correct structure
|
||||
let enrichedSchema = {}
|
||||
Object.entries(schema).forEach(([fieldName, fieldSchema]) => {
|
||||
let enrichedSchema: TableSchema = {}
|
||||
Object.entries({ ...schema, ...jsonAdditions }).forEach(
|
||||
([fieldName, fieldSchema]) => {
|
||||
if (typeof fieldSchema === "string") {
|
||||
enrichedSchema[fieldName] = {
|
||||
type: fieldSchema,
|
||||
|
@ -341,19 +403,24 @@ export default class DataFetch {
|
|||
} else {
|
||||
enrichedSchema[fieldName] = {
|
||||
...fieldSchema,
|
||||
type: fieldSchema.type as any, // TODO: check type union definition conflicts
|
||||
name: fieldName,
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
return enrichedSchema
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the feature flag for this datasource definition
|
||||
* @param definition
|
||||
* Determine the feature flag for this datasource
|
||||
*/
|
||||
determineFeatureFlags(_definition) {
|
||||
async determineFeatureFlags(): Promise<{
|
||||
supportsPagination: boolean
|
||||
supportsSearch?: boolean
|
||||
supportsSort?: boolean
|
||||
}> {
|
||||
return {
|
||||
supportsSearch: false,
|
||||
supportsSort: false,
|
||||
|
@ -365,12 +432,11 @@ export default class DataFetch {
|
|||
* Resets the data set and updates options
|
||||
* @param newOptions any new options
|
||||
*/
|
||||
async update(newOptions) {
|
||||
async update(newOptions: any) {
|
||||
// Check if any settings have actually changed
|
||||
let refresh = false
|
||||
const entries = Object.entries(newOptions || {})
|
||||
for (let [key, value] of entries) {
|
||||
const oldVal = this.options[key] == null ? null : this.options[key]
|
||||
for (const [key, value] of Object.entries(newOptions || {})) {
|
||||
const oldVal = this.options[key as keyof typeof this.options] ?? null
|
||||
const newVal = value == null ? null : value
|
||||
if (JSON.stringify(newVal) !== JSON.stringify(oldVal)) {
|
||||
refresh = true
|
||||
|
@ -437,7 +503,7 @@ export default class DataFetch {
|
|||
* @param state the current store state
|
||||
* @return {boolean} whether there is a next page of data or not
|
||||
*/
|
||||
hasNextPage(state) {
|
||||
private hasNextPage(state: DataFetchStore<TDefinition, TQuery>): boolean {
|
||||
return state.cursors[state.pageNumber + 1] != null
|
||||
}
|
||||
|
||||
|
@ -447,7 +513,7 @@ export default class DataFetch {
|
|||
* @param state the current store state
|
||||
* @return {boolean} whether there is a previous page of data or not
|
||||
*/
|
||||
hasPrevPage(state) {
|
||||
private hasPrevPage(state: { pageNumber: number }): boolean {
|
||||
return state.pageNumber > 0
|
||||
}
|
||||
|
|
@ -1,7 +1,27 @@
|
|||
import DataFetch from "./DataFetch.js"
|
||||
import { Row } from "@budibase/types"
|
||||
import DataFetch from "./DataFetch"
|
||||
|
||||
export interface FieldDatasource {
|
||||
tableId: string
|
||||
fieldType: "attachment" | "array"
|
||||
value: string[] | Row[]
|
||||
}
|
||||
|
||||
export interface FieldDefinition {
|
||||
schema?: Record<string, { type: string }> | null
|
||||
}
|
||||
|
||||
function isArrayOfStrings(value: string[] | Row[]): value is string[] {
|
||||
return Array.isArray(value) && !!value[0] && typeof value[0] !== "object"
|
||||
}
|
||||
|
||||
export default class FieldFetch extends DataFetch<
|
||||
FieldDatasource,
|
||||
FieldDefinition
|
||||
> {
|
||||
async getDefinition(): Promise<FieldDefinition | null> {
|
||||
const { datasource } = this.options
|
||||
|
||||
export default class FieldFetch extends DataFetch {
|
||||
async getDefinition(datasource) {
|
||||
// Field sources have their schema statically defined
|
||||
let schema
|
||||
if (datasource.fieldType === "attachment") {
|
||||
|
@ -28,8 +48,8 @@ export default class FieldFetch extends DataFetch {
|
|||
|
||||
// These sources will be available directly from context
|
||||
const data = datasource?.value || []
|
||||
let rows
|
||||
if (Array.isArray(data) && data[0] && typeof data[0] !== "object") {
|
||||
let rows: Row[]
|
||||
if (isArrayOfStrings(data)) {
|
||||
rows = data.map(value => ({ value }))
|
||||
} else {
|
||||
rows = data
|
|
@ -1,9 +1,22 @@
|
|||
import { get } from "svelte/store"
|
||||
import DataFetch from "./DataFetch.js"
|
||||
import DataFetch, { DataFetchParams } from "./DataFetch"
|
||||
import { TableNames } from "../constants"
|
||||
|
||||
export default class GroupUserFetch extends DataFetch {
|
||||
constructor(opts) {
|
||||
interface GroupUserQuery {
|
||||
groupId: string
|
||||
emailSearch: string
|
||||
}
|
||||
|
||||
interface GroupUserDatasource {
|
||||
tableId: TableNames.USERS
|
||||
}
|
||||
|
||||
export default class GroupUserFetch extends DataFetch<
|
||||
GroupUserDatasource,
|
||||
{},
|
||||
GroupUserQuery
|
||||
> {
|
||||
constructor(opts: DataFetchParams<GroupUserDatasource, GroupUserQuery>) {
|
||||
super({
|
||||
...opts,
|
||||
datasource: {
|
||||
|
@ -12,7 +25,7 @@ export default class GroupUserFetch extends DataFetch {
|
|||
})
|
||||
}
|
||||
|
||||
determineFeatureFlags() {
|
||||
async determineFeatureFlags() {
|
||||
return {
|
||||
supportsSearch: true,
|
||||
supportsSort: false,
|
||||
|
@ -28,11 +41,12 @@ export default class GroupUserFetch extends DataFetch {
|
|||
|
||||
async getData() {
|
||||
const { query, cursor } = get(this.store)
|
||||
|
||||
try {
|
||||
const res = await this.API.getGroupUsers({
|
||||
id: query.groupId,
|
||||
emailSearch: query.emailSearch,
|
||||
bookmark: cursor,
|
||||
bookmark: cursor ?? undefined,
|
||||
})
|
||||
|
||||
return {
|
|
@ -1,8 +1,10 @@
|
|||
import FieldFetch from "./FieldFetch.js"
|
||||
import FieldFetch from "./FieldFetch"
|
||||
import { getJSONArrayDatasourceSchema } from "../utils/json"
|
||||
|
||||
export default class JSONArrayFetch extends FieldFetch {
|
||||
async getDefinition(datasource) {
|
||||
async getDefinition() {
|
||||
const { datasource } = this.options
|
||||
|
||||
// JSON arrays need their table definitions fetched.
|
||||
// We can then extract their schema as a subset of the table schema.
|
||||
try {
|
|
@ -1,21 +0,0 @@
|
|||
import DataFetch from "./DataFetch.js"
|
||||
|
||||
export default class NestedProviderFetch extends DataFetch {
|
||||
async getDefinition(datasource) {
|
||||
// Nested providers should already have exposed their own schema
|
||||
return {
|
||||
schema: datasource?.value?.schema,
|
||||
primaryDisplay: datasource?.value?.primaryDisplay,
|
||||
}
|
||||
}
|
||||
|
||||
async getData() {
|
||||
const { datasource } = this.options
|
||||
// Pull the rows from the existing data provider
|
||||
return {
|
||||
rows: datasource?.value?.rows || [],
|
||||
hasNextPage: false,
|
||||
cursor: null,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
import { Row, TableSchema } from "@budibase/types"
|
||||
import DataFetch from "./DataFetch"
|
||||
|
||||
interface NestedProviderDatasource {
|
||||
value?: {
|
||||
schema: TableSchema
|
||||
primaryDisplay: string
|
||||
rows: Row[]
|
||||
}
|
||||
}
|
||||
|
||||
interface NestedProviderDefinition {
|
||||
schema?: TableSchema
|
||||
primaryDisplay?: string
|
||||
}
|
||||
export default class NestedProviderFetch extends DataFetch<
|
||||
NestedProviderDatasource,
|
||||
NestedProviderDefinition
|
||||
> {
|
||||
async getDefinition() {
|
||||
const { datasource } = this.options
|
||||
|
||||
// Nested providers should already have exposed their own schema
|
||||
return {
|
||||
schema: datasource?.value?.schema,
|
||||
primaryDisplay: datasource?.value?.primaryDisplay,
|
||||
}
|
||||
}
|
||||
|
||||
async getData() {
|
||||
const { datasource } = this.options
|
||||
// Pull the rows from the existing data provider
|
||||
return {
|
||||
rows: datasource?.value?.rows || [],
|
||||
hasNextPage: false,
|
||||
cursor: null,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,11 +1,13 @@
|
|||
import FieldFetch from "./FieldFetch.js"
|
||||
import FieldFetch from "./FieldFetch"
|
||||
import {
|
||||
getJSONArrayDatasourceSchema,
|
||||
generateQueryArraySchemas,
|
||||
} from "../utils/json"
|
||||
|
||||
export default class QueryArrayFetch extends FieldFetch {
|
||||
async getDefinition(datasource) {
|
||||
async getDefinition() {
|
||||
const { datasource } = this.options
|
||||
|
||||
if (!datasource?.tableId) {
|
||||
return null
|
||||
}
|
||||
|
@ -14,10 +16,14 @@ export default class QueryArrayFetch extends FieldFetch {
|
|||
try {
|
||||
const table = await this.API.fetchQueryDefinition(datasource.tableId)
|
||||
const schema = generateQueryArraySchemas(
|
||||
table?.schema,
|
||||
table?.nestedSchemaFields
|
||||
table.schema,
|
||||
table.nestedSchemaFields
|
||||
)
|
||||
return { schema: getJSONArrayDatasourceSchema(schema, datasource) }
|
||||
const result = {
|
||||
schema: getJSONArrayDatasourceSchema(schema, datasource),
|
||||
}
|
||||
|
||||
return result
|
||||
} catch (error) {
|
||||
return null
|
||||
}
|
|
@ -1,9 +1,24 @@
|
|||
import DataFetch from "./DataFetch.js"
|
||||
import DataFetch from "./DataFetch"
|
||||
import { Helpers } from "@budibase/bbui"
|
||||
import { ExecuteQueryRequest, Query } from "@budibase/types"
|
||||
import { get } from "svelte/store"
|
||||
|
||||
export default class QueryFetch extends DataFetch {
|
||||
determineFeatureFlags(definition) {
|
||||
interface QueryDatasource {
|
||||
_id: string
|
||||
fields: Record<string, any> & {
|
||||
pagination?: {
|
||||
type: string
|
||||
location: string
|
||||
pageParam: string
|
||||
}
|
||||
}
|
||||
queryParams?: Record<string, string>
|
||||
parameters: { name: string; default: string }[]
|
||||
}
|
||||
|
||||
export default class QueryFetch extends DataFetch<QueryDatasource, Query> {
|
||||
async determineFeatureFlags() {
|
||||
const definition = await this.getDefinition()
|
||||
const supportsPagination =
|
||||
!!definition?.fields?.pagination?.type &&
|
||||
!!definition?.fields?.pagination?.location &&
|
||||
|
@ -11,7 +26,9 @@ export default class QueryFetch extends DataFetch {
|
|||
return { supportsPagination }
|
||||
}
|
||||
|
||||
async getDefinition(datasource) {
|
||||
async getDefinition() {
|
||||
const { datasource } = this.options
|
||||
|
||||
if (!datasource?._id) {
|
||||
return null
|
||||
}
|
||||
|
@ -40,17 +57,17 @@ export default class QueryFetch extends DataFetch {
|
|||
const type = definition?.fields?.pagination?.type
|
||||
|
||||
// Set the default query params
|
||||
let parameters = Helpers.cloneDeep(datasource?.queryParams || {})
|
||||
for (let param of datasource?.parameters || {}) {
|
||||
const parameters = Helpers.cloneDeep(datasource.queryParams || {})
|
||||
for (const param of datasource?.parameters || []) {
|
||||
if (!parameters[param.name]) {
|
||||
parameters[param.name] = param.default
|
||||
}
|
||||
}
|
||||
|
||||
// Add pagination to query if supported
|
||||
let queryPayload = { parameters }
|
||||
const queryPayload: ExecuteQueryRequest = { parameters }
|
||||
if (paginate && supportsPagination) {
|
||||
const requestCursor = type === "page" ? parseInt(cursor || 1) : cursor
|
||||
const requestCursor = type === "page" ? parseInt(cursor || "1") : cursor
|
||||
queryPayload.pagination = { page: requestCursor, limit }
|
||||
}
|
||||
|
||||
|
@ -65,7 +82,7 @@ export default class QueryFetch extends DataFetch {
|
|||
if (paginate && supportsPagination) {
|
||||
if (type === "page") {
|
||||
// For "page number" pagination, increment the existing page number
|
||||
nextCursor = queryPayload.pagination.page + 1
|
||||
nextCursor = queryPayload.pagination!.page! + 1
|
||||
hasNextPage = data?.length === limit && limit > 0
|
||||
} else {
|
||||
// For "cursor" pagination, the cursor should be in the response
|
|
@ -1,20 +0,0 @@
|
|||
import DataFetch from "./DataFetch.js"
|
||||
|
||||
export default class RelationshipFetch extends DataFetch {
|
||||
async getData() {
|
||||
const { datasource } = this.options
|
||||
if (!datasource?.rowId || !datasource?.rowTableId) {
|
||||
return { rows: [] }
|
||||
}
|
||||
try {
|
||||
const res = await this.API.fetchRelationshipData(
|
||||
datasource.rowTableId,
|
||||
datasource.rowId,
|
||||
datasource.fieldName
|
||||
)
|
||||
return { rows: res }
|
||||
} catch (error) {
|
||||
return { rows: [] }
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
import { Table } from "@budibase/types"
|
||||
import DataFetch from "./DataFetch"
|
||||
|
||||
interface RelationshipDatasource {
|
||||
tableId: string
|
||||
rowId: string
|
||||
rowTableId: string
|
||||
fieldName: string
|
||||
}
|
||||
|
||||
export default class RelationshipFetch extends DataFetch<
|
||||
RelationshipDatasource,
|
||||
Table
|
||||
> {
|
||||
async getDefinition() {
|
||||
const { datasource } = this.options
|
||||
|
||||
if (!datasource?.tableId) {
|
||||
return null
|
||||
}
|
||||
try {
|
||||
return await this.API.fetchTableDefinition(datasource.tableId)
|
||||
} catch (error: any) {
|
||||
this.store.update(state => ({
|
||||
...state,
|
||||
error,
|
||||
}))
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
async getData() {
|
||||
const { datasource } = this.options
|
||||
if (!datasource?.rowId || !datasource?.rowTableId) {
|
||||
return { rows: [] }
|
||||
}
|
||||
try {
|
||||
const res = await this.API.fetchRelationshipData(
|
||||
datasource.rowTableId,
|
||||
datasource.rowId,
|
||||
datasource.fieldName
|
||||
)
|
||||
return { rows: res }
|
||||
} catch (error) {
|
||||
return { rows: [] }
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,9 +1,9 @@
|
|||
import { get } from "svelte/store"
|
||||
import DataFetch from "./DataFetch.js"
|
||||
import { SortOrder } from "@budibase/types"
|
||||
import DataFetch from "./DataFetch"
|
||||
import { SortOrder, Table, UITable } from "@budibase/types"
|
||||
|
||||
export default class TableFetch extends DataFetch {
|
||||
determineFeatureFlags() {
|
||||
export default class TableFetch extends DataFetch<UITable, Table> {
|
||||
async determineFeatureFlags() {
|
||||
return {
|
||||
supportsSearch: true,
|
||||
supportsSort: true,
|
||||
|
@ -11,6 +11,23 @@ export default class TableFetch extends DataFetch {
|
|||
}
|
||||
}
|
||||
|
||||
async getDefinition() {
|
||||
const { datasource } = this.options
|
||||
|
||||
if (!datasource?.tableId) {
|
||||
return null
|
||||
}
|
||||
try {
|
||||
return await this.API.fetchTableDefinition(datasource.tableId)
|
||||
} catch (error: any) {
|
||||
this.store.update(state => ({
|
||||
...state,
|
||||
error,
|
||||
}))
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
async getData() {
|
||||
const { datasource, limit, sortColumn, sortOrder, sortType, paginate } =
|
||||
this.options
|
||||
|
@ -23,7 +40,7 @@ export default class TableFetch extends DataFetch {
|
|||
query,
|
||||
limit,
|
||||
sort: sortColumn,
|
||||
sortOrder: sortOrder?.toLowerCase() ?? SortOrder.ASCENDING,
|
||||
sortOrder: sortOrder ?? SortOrder.ASCENDING,
|
||||
sortType,
|
||||
paginate,
|
||||
bookmark: cursor,
|
|
@ -1,10 +1,28 @@
|
|||
import { get } from "svelte/store"
|
||||
import DataFetch from "./DataFetch.js"
|
||||
import DataFetch, { DataFetchParams } from "./DataFetch"
|
||||
import { TableNames } from "../constants"
|
||||
import { utils } from "@budibase/shared-core"
|
||||
import {
|
||||
BasicOperator,
|
||||
SearchFilters,
|
||||
SearchUsersRequest,
|
||||
} from "@budibase/types"
|
||||
|
||||
export default class UserFetch extends DataFetch {
|
||||
constructor(opts) {
|
||||
interface UserFetchQuery {
|
||||
appId: string
|
||||
paginated: boolean
|
||||
}
|
||||
|
||||
interface UserDatasource {
|
||||
tableId: string
|
||||
}
|
||||
|
||||
export default class UserFetch extends DataFetch<
|
||||
UserDatasource,
|
||||
{},
|
||||
UserFetchQuery
|
||||
> {
|
||||
constructor(opts: DataFetchParams<UserDatasource, UserFetchQuery>) {
|
||||
super({
|
||||
...opts,
|
||||
datasource: {
|
||||
|
@ -13,7 +31,7 @@ export default class UserFetch extends DataFetch {
|
|||
})
|
||||
}
|
||||
|
||||
determineFeatureFlags() {
|
||||
async determineFeatureFlags() {
|
||||
return {
|
||||
supportsSearch: true,
|
||||
supportsSort: false,
|
||||
|
@ -22,9 +40,7 @@ export default class UserFetch extends DataFetch {
|
|||
}
|
||||
|
||||
async getDefinition() {
|
||||
return {
|
||||
schema: {},
|
||||
}
|
||||
return { schema: {} }
|
||||
}
|
||||
|
||||
async getData() {
|
||||
|
@ -32,15 +48,16 @@ export default class UserFetch extends DataFetch {
|
|||
const { cursor, query } = get(this.store)
|
||||
|
||||
// Convert old format to new one - we now allow use of the lucene format
|
||||
const { appId, paginated, ...rest } = query || {}
|
||||
const finalQuery = utils.isSupportedUserSearch(rest)
|
||||
? query
|
||||
: { string: { email: null } }
|
||||
const { appId, paginated, ...rest } = query
|
||||
|
||||
const finalQuery: SearchFilters = utils.isSupportedUserSearch(rest)
|
||||
? rest
|
||||
: { [BasicOperator.EMPTY]: { email: null } }
|
||||
|
||||
try {
|
||||
const opts = {
|
||||
bookmark: cursor,
|
||||
query: finalQuery,
|
||||
const opts: SearchUsersRequest = {
|
||||
bookmark: cursor ?? undefined,
|
||||
query: finalQuery ?? undefined,
|
||||
appId: appId,
|
||||
paginate: paginated || paginate,
|
||||
limit,
|
|
@ -1,23 +0,0 @@
|
|||
import DataFetch from "./DataFetch.js"
|
||||
|
||||
export default class ViewFetch extends DataFetch {
|
||||
getSchema(datasource, definition) {
|
||||
return definition?.views?.[datasource.name]?.schema
|
||||
}
|
||||
|
||||
async getData() {
|
||||
const { datasource } = this.options
|
||||
try {
|
||||
const res = await this.API.fetchViewData(datasource.name, {
|
||||
calculation: datasource.calculation,
|
||||
field: datasource.field,
|
||||
groupBy: datasource.groupBy,
|
||||
tableId: datasource.tableId,
|
||||
})
|
||||
return { rows: res || [] }
|
||||
} catch (error) {
|
||||
console.error(error)
|
||||
return { rows: [] }
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
import { Table, View } from "@budibase/types"
|
||||
import DataFetch from "./DataFetch"
|
||||
|
||||
type ViewV1 = View & { name: string }
|
||||
|
||||
export default class ViewFetch extends DataFetch<ViewV1, Table> {
|
||||
async getDefinition() {
|
||||
const { datasource } = this.options
|
||||
|
||||
if (!datasource?.tableId) {
|
||||
return null
|
||||
}
|
||||
try {
|
||||
return await this.API.fetchTableDefinition(datasource.tableId)
|
||||
} catch (error: any) {
|
||||
this.store.update(state => ({
|
||||
...state,
|
||||
error,
|
||||
}))
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
getSchema(definition: Table) {
|
||||
const { datasource } = this.options
|
||||
return definition?.views?.[datasource.name]?.schema
|
||||
}
|
||||
|
||||
async getData() {
|
||||
const { datasource } = this.options
|
||||
try {
|
||||
const res = await this.API.fetchViewData(datasource.name, {
|
||||
calculation: datasource.calculation,
|
||||
field: datasource.field,
|
||||
groupBy: datasource.groupBy,
|
||||
tableId: datasource.tableId,
|
||||
})
|
||||
return { rows: res || [] }
|
||||
} catch (error) {
|
||||
console.error(error, { datasource })
|
||||
return { rows: [] }
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,9 +1,10 @@
|
|||
import { ViewV2Type } from "@budibase/types"
|
||||
import DataFetch from "./DataFetch.js"
|
||||
import { SortOrder, UIView, ViewV2, ViewV2Type } from "@budibase/types"
|
||||
import DataFetch from "./DataFetch"
|
||||
import { get } from "svelte/store"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
|
||||
export default class ViewV2Fetch extends DataFetch {
|
||||
determineFeatureFlags() {
|
||||
export default class ViewV2Fetch extends DataFetch<UIView, ViewV2> {
|
||||
async determineFeatureFlags() {
|
||||
return {
|
||||
supportsSearch: true,
|
||||
supportsSort: true,
|
||||
|
@ -11,18 +12,13 @@ export default class ViewV2Fetch extends DataFetch {
|
|||
}
|
||||
}
|
||||
|
||||
getSchema(datasource, definition) {
|
||||
return definition?.schema
|
||||
}
|
||||
async getDefinition() {
|
||||
const { datasource } = this.options
|
||||
|
||||
async getDefinition(datasource) {
|
||||
if (!datasource?.id) {
|
||||
return null
|
||||
}
|
||||
try {
|
||||
const res = await this.API.viewV2.fetchDefinition(datasource.id)
|
||||
return res?.data
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
this.store.update(state => ({
|
||||
...state,
|
||||
error,
|
||||
|
@ -42,8 +38,10 @@ export default class ViewV2Fetch extends DataFetch {
|
|||
|
||||
// If this is a calculation view and we have no calculations, return nothing
|
||||
if (
|
||||
definition.type === ViewV2Type.CALCULATION &&
|
||||
!Object.values(definition.schema || {}).some(x => x.calculationType)
|
||||
definition?.type === ViewV2Type.CALCULATION &&
|
||||
!Object.values(definition.schema || {}).some(
|
||||
helpers.views.isCalculationField
|
||||
)
|
||||
) {
|
||||
return {
|
||||
rows: [],
|
||||
|
@ -56,26 +54,42 @@ export default class ViewV2Fetch extends DataFetch {
|
|||
// If sort/filter params are not defined, update options to store the
|
||||
// params built in to this view. This ensures that we can accurately
|
||||
// compare old and new params and skip a redundant API call.
|
||||
if (!sortColumn && definition.sort?.field) {
|
||||
if (!sortColumn && definition?.sort?.field) {
|
||||
this.options.sortColumn = definition.sort.field
|
||||
this.options.sortOrder = definition.sort.order
|
||||
this.options.sortOrder = definition.sort.order || SortOrder.ASCENDING
|
||||
}
|
||||
|
||||
try {
|
||||
const res = await this.API.viewV2.fetch(datasource.id, {
|
||||
...(query ? { query } : {}),
|
||||
const request = {
|
||||
query,
|
||||
paginate,
|
||||
limit,
|
||||
bookmark: cursor,
|
||||
sort: sortColumn,
|
||||
sortOrder: sortOrder?.toLowerCase(),
|
||||
sortOrder: sortOrder,
|
||||
sortType,
|
||||
}
|
||||
if (paginate) {
|
||||
const res = await this.API.viewV2.fetch(datasource.id, {
|
||||
...request,
|
||||
paginate,
|
||||
})
|
||||
return {
|
||||
rows: res?.rows || [],
|
||||
hasNextPage: res?.hasNextPage || false,
|
||||
cursor: res?.bookmark || null,
|
||||
}
|
||||
} else {
|
||||
const res = await this.API.viewV2.fetch(datasource.id, {
|
||||
...request,
|
||||
paginate,
|
||||
})
|
||||
return {
|
||||
rows: res?.rows || [],
|
||||
hasNextPage: false,
|
||||
cursor: null,
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
rows: [],
|
|
@ -1,57 +0,0 @@
|
|||
import TableFetch from "./TableFetch.js"
|
||||
import ViewFetch from "./ViewFetch.js"
|
||||
import ViewV2Fetch from "./ViewV2Fetch.js"
|
||||
import QueryFetch from "./QueryFetch.js"
|
||||
import RelationshipFetch from "./RelationshipFetch.js"
|
||||
import NestedProviderFetch from "./NestedProviderFetch.js"
|
||||
import FieldFetch from "./FieldFetch.js"
|
||||
import JSONArrayFetch from "./JSONArrayFetch.js"
|
||||
import UserFetch from "./UserFetch.js"
|
||||
import GroupUserFetch from "./GroupUserFetch.js"
|
||||
import CustomFetch from "./CustomFetch.js"
|
||||
import QueryArrayFetch from "./QueryArrayFetch.js"
|
||||
|
||||
const DataFetchMap = {
|
||||
table: TableFetch,
|
||||
view: ViewFetch,
|
||||
viewV2: ViewV2Fetch,
|
||||
query: QueryFetch,
|
||||
link: RelationshipFetch,
|
||||
user: UserFetch,
|
||||
groupUser: GroupUserFetch,
|
||||
custom: CustomFetch,
|
||||
|
||||
// Client specific datasource types
|
||||
provider: NestedProviderFetch,
|
||||
field: FieldFetch,
|
||||
jsonarray: JSONArrayFetch,
|
||||
queryarray: QueryArrayFetch,
|
||||
}
|
||||
|
||||
// Constructs a new fetch model for a certain datasource
|
||||
export const fetchData = ({ API, datasource, options }) => {
|
||||
const Fetch = DataFetchMap[datasource?.type] || TableFetch
|
||||
return new Fetch({ API, datasource, ...options })
|
||||
}
|
||||
|
||||
// Creates an empty fetch instance with no datasource configured, so no data
|
||||
// will initially be loaded
|
||||
const createEmptyFetchInstance = ({ API, datasource }) => {
|
||||
const handler = DataFetchMap[datasource?.type]
|
||||
if (!handler) {
|
||||
return null
|
||||
}
|
||||
return new handler({ API })
|
||||
}
|
||||
|
||||
// Fetches the definition of any type of datasource
|
||||
export const getDatasourceDefinition = async ({ API, datasource }) => {
|
||||
const instance = createEmptyFetchInstance({ API, datasource })
|
||||
return await instance?.getDefinition(datasource)
|
||||
}
|
||||
|
||||
// Fetches the schema of any type of datasource
|
||||
export const getDatasourceSchema = ({ API, datasource, definition }) => {
|
||||
const instance = createEmptyFetchInstance({ API, datasource })
|
||||
return instance?.getSchema(datasource, definition)
|
||||
}
|
|
@ -0,0 +1,91 @@
|
|||
import TableFetch from "./TableFetch.js"
|
||||
import ViewFetch from "./ViewFetch.js"
|
||||
import ViewV2Fetch from "./ViewV2Fetch.js"
|
||||
import QueryFetch from "./QueryFetch"
|
||||
import RelationshipFetch from "./RelationshipFetch"
|
||||
import NestedProviderFetch from "./NestedProviderFetch"
|
||||
import FieldFetch from "./FieldFetch"
|
||||
import JSONArrayFetch from "./JSONArrayFetch"
|
||||
import UserFetch from "./UserFetch.js"
|
||||
import GroupUserFetch from "./GroupUserFetch"
|
||||
import CustomFetch from "./CustomFetch"
|
||||
import QueryArrayFetch from "./QueryArrayFetch.js"
|
||||
import { APIClient } from "../api/types.js"
|
||||
|
||||
const DataFetchMap = {
|
||||
table: TableFetch,
|
||||
view: ViewFetch,
|
||||
viewV2: ViewV2Fetch,
|
||||
query: QueryFetch,
|
||||
link: RelationshipFetch,
|
||||
user: UserFetch,
|
||||
groupUser: GroupUserFetch,
|
||||
custom: CustomFetch,
|
||||
|
||||
// Client specific datasource types
|
||||
provider: NestedProviderFetch,
|
||||
field: FieldFetch,
|
||||
jsonarray: JSONArrayFetch,
|
||||
queryarray: QueryArrayFetch,
|
||||
}
|
||||
|
||||
// Constructs a new fetch model for a certain datasource
|
||||
export const fetchData = ({ API, datasource, options }: any) => {
|
||||
const Fetch =
|
||||
DataFetchMap[datasource?.type as keyof typeof DataFetchMap] || TableFetch
|
||||
return new Fetch({ API, datasource, ...options })
|
||||
}
|
||||
|
||||
// Creates an empty fetch instance with no datasource configured, so no data
|
||||
// will initially be loaded
|
||||
const createEmptyFetchInstance = <
|
||||
TDatasource extends {
|
||||
type: keyof typeof DataFetchMap
|
||||
}
|
||||
>({
|
||||
API,
|
||||
datasource,
|
||||
}: {
|
||||
API: APIClient
|
||||
datasource: TDatasource
|
||||
}) => {
|
||||
const handler = DataFetchMap[datasource?.type as keyof typeof DataFetchMap]
|
||||
if (!handler) {
|
||||
return null
|
||||
}
|
||||
return new handler({ API, datasource: null as any, query: null as any })
|
||||
}
|
||||
|
||||
// Fetches the definition of any type of datasource
|
||||
export const getDatasourceDefinition = async <
|
||||
TDatasource extends {
|
||||
type: keyof typeof DataFetchMap
|
||||
}
|
||||
>({
|
||||
API,
|
||||
datasource,
|
||||
}: {
|
||||
API: APIClient
|
||||
datasource: TDatasource
|
||||
}) => {
|
||||
const instance = createEmptyFetchInstance({ API, datasource })
|
||||
return await instance?.getDefinition()
|
||||
}
|
||||
|
||||
// Fetches the schema of any type of datasource
|
||||
export const getDatasourceSchema = <
|
||||
TDatasource extends {
|
||||
type: keyof typeof DataFetchMap
|
||||
}
|
||||
>({
|
||||
API,
|
||||
datasource,
|
||||
definition,
|
||||
}: {
|
||||
API: APIClient
|
||||
datasource: TDatasource
|
||||
definition?: any
|
||||
}) => {
|
||||
const instance = createEmptyFetchInstance({ API, datasource })
|
||||
return instance?.getSchema(definition)
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
import { JsonFieldMetadata, QuerySchema } from "@budibase/types"
|
||||
|
||||
type Schema = Record<string, QuerySchema | string>
|
||||
|
||||
declare module "./json" {
|
||||
export const getJSONArrayDatasourceSchema: (
|
||||
tableSchema: Schema,
|
||||
datasource: any
|
||||
) => Record<string, { type: string; name: string; prefixKeys: string }>
|
||||
|
||||
export const generateQueryArraySchemas: (
|
||||
schema: Schema,
|
||||
nestedSchemaFields?: Record<string, Schema>
|
||||
) => Schema
|
||||
|
||||
export const convertJSONSchemaToTableSchema: (
|
||||
jsonSchema: JsonFieldMetadata,
|
||||
options: {
|
||||
squashObjects?: boolean
|
||||
prefixKeys?: string
|
||||
}
|
||||
) => Record<string, { type: string; name: string; prefixKeys: string }>
|
||||
}
|
|
@ -50,10 +50,6 @@
|
|||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"@apidevtools/swagger-parser": "10.0.3",
|
||||
"@aws-sdk/client-dynamodb": "3.709.0",
|
||||
"@aws-sdk/client-s3": "3.709.0",
|
||||
"@aws-sdk/lib-dynamodb": "3.709.0",
|
||||
"@aws-sdk/s3-request-presigner": "3.709.0",
|
||||
"@azure/msal-node": "^2.5.1",
|
||||
"@budibase/backend-core": "*",
|
||||
"@budibase/client": "*",
|
||||
|
@ -74,6 +70,7 @@
|
|||
"airtable": "0.12.2",
|
||||
"arangojs": "7.2.0",
|
||||
"archiver": "7.0.1",
|
||||
"aws-sdk": "2.1692.0",
|
||||
"bcrypt": "5.1.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bson": "^6.9.0",
|
||||
|
|
|
@ -230,7 +230,7 @@ export async function fetchAppPackage(
|
|||
const license = await licensing.cache.getCachedLicense()
|
||||
|
||||
// Enrich plugin URLs
|
||||
application.usedPlugins = await objectStore.enrichPluginURLs(
|
||||
application.usedPlugins = objectStore.enrichPluginURLs(
|
||||
application.usedPlugins
|
||||
)
|
||||
|
||||
|
|
|
@ -355,7 +355,7 @@ async function execute(
|
|||
ExecuteQueryRequest,
|
||||
ExecuteV2QueryResponse | ExecuteV1QueryResponse
|
||||
>,
|
||||
opts: any = { rowsOnly: false, isAutomation: false }
|
||||
opts = { rowsOnly: false, isAutomation: false }
|
||||
) {
|
||||
const db = context.getAppDB()
|
||||
|
||||
|
@ -416,7 +416,7 @@ export async function executeV1(
|
|||
export async function executeV2(
|
||||
ctx: UserCtx<ExecuteQueryRequest, ExecuteV2QueryResponse>
|
||||
) {
|
||||
return execute(ctx, { rowsOnly: false })
|
||||
return execute(ctx, { rowsOnly: false, isAutomation: false })
|
||||
}
|
||||
|
||||
export async function executeV2AsAutomation(
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
import {
|
||||
UserCtx,
|
||||
ViewV2,
|
||||
SearchRowResponse,
|
||||
SearchViewRowRequest,
|
||||
RequiredKeys,
|
||||
RowSearchParams,
|
||||
PaginatedSearchRowResponse,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
import { context } from "@budibase/backend-core"
|
||||
|
||||
export async function searchView(
|
||||
ctx: UserCtx<SearchViewRowRequest, SearchRowResponse>
|
||||
ctx: UserCtx<SearchViewRowRequest, PaginatedSearchRowResponse>
|
||||
) {
|
||||
const { viewId } = ctx.params
|
||||
|
||||
|
@ -49,7 +49,13 @@ export async function searchView(
|
|||
user: sdk.users.getUserContextBindings(ctx.user),
|
||||
})
|
||||
result.rows.forEach(r => (r._viewId = view.id))
|
||||
ctx.body = result
|
||||
|
||||
ctx.body = {
|
||||
rows: result.rows,
|
||||
bookmark: result.bookmark,
|
||||
hasNextPage: result.hasNextPage,
|
||||
totalRows: result.totalRows,
|
||||
}
|
||||
}
|
||||
|
||||
function getSortOptions(request: SearchViewRowRequest, view: ViewV2) {
|
||||
|
|
|
@ -18,8 +18,7 @@ import {
|
|||
objectStore,
|
||||
utils,
|
||||
} from "@budibase/backend-core"
|
||||
import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
|
||||
import { PutObjectCommand, S3 } from "@aws-sdk/client-s3"
|
||||
import AWS from "aws-sdk"
|
||||
import fs from "fs"
|
||||
import sdk from "../../../sdk"
|
||||
import * as pro from "@budibase/pro"
|
||||
|
@ -129,9 +128,9 @@ export const uploadFile = async function (
|
|||
return {
|
||||
size: file.size,
|
||||
name: file.name,
|
||||
url: await objectStore.getAppFileUrl(s3Key),
|
||||
url: objectStore.getAppFileUrl(s3Key),
|
||||
extension,
|
||||
key: response.Key!,
|
||||
key: response.Key,
|
||||
}
|
||||
})
|
||||
)
|
||||
|
@ -211,11 +210,11 @@ export const serveApp = async function (ctx: UserCtx<void, ServeAppResponse>) {
|
|||
usedPlugins: plugins,
|
||||
favicon:
|
||||
branding.faviconUrl !== ""
|
||||
? await objectStore.getGlobalFileUrl("settings", "faviconUrl")
|
||||
? objectStore.getGlobalFileUrl("settings", "faviconUrl")
|
||||
: "",
|
||||
logo:
|
||||
config?.logoUrl !== ""
|
||||
? await objectStore.getGlobalFileUrl("settings", "logoUrl")
|
||||
? objectStore.getGlobalFileUrl("settings", "logoUrl")
|
||||
: "",
|
||||
appMigrating: needMigrations,
|
||||
nonce: ctx.state.nonce,
|
||||
|
@ -244,7 +243,7 @@ export const serveApp = async function (ctx: UserCtx<void, ServeAppResponse>) {
|
|||
metaDescription: branding?.metaDescription || "",
|
||||
favicon:
|
||||
branding.faviconUrl !== ""
|
||||
? await objectStore.getGlobalFileUrl("settings", "faviconUrl")
|
||||
? objectStore.getGlobalFileUrl("settings", "faviconUrl")
|
||||
: "",
|
||||
})
|
||||
|
||||
|
@ -335,17 +334,16 @@ export const getSignedUploadURL = async function (
|
|||
ctx.throw(400, "bucket and key values are required")
|
||||
}
|
||||
try {
|
||||
const s3 = new S3({
|
||||
const s3 = new AWS.S3({
|
||||
region: awsRegion,
|
||||
endpoint: datasource?.config?.endpoint || undefined,
|
||||
|
||||
credentials: {
|
||||
accessKeyId: datasource?.config?.accessKeyId as string,
|
||||
secretAccessKey: datasource?.config?.secretAccessKey as string,
|
||||
},
|
||||
apiVersion: "2006-03-01",
|
||||
signatureVersion: "v4",
|
||||
})
|
||||
const params = { Bucket: bucket, Key: key }
|
||||
signedUrl = await getSignedUrl(s3, new PutObjectCommand(params))
|
||||
signedUrl = s3.getSignedUrl("putObject", params)
|
||||
if (datasource?.config?.endpoint) {
|
||||
publicUrl = `${datasource.config.endpoint}/${bucket}/${key}`
|
||||
} else {
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
// Directly mock the AWS SDK
|
||||
jest.mock("@aws-sdk/s3-request-presigner", () => ({
|
||||
getSignedUrl: jest.fn(() => {
|
||||
return `http://example.com`
|
||||
}),
|
||||
jest.mock("aws-sdk", () => ({
|
||||
S3: jest.fn(() => ({
|
||||
getSignedUrl: jest.fn(
|
||||
(operation, params) => `http://example.com/${params.Bucket}/${params.Key}`
|
||||
),
|
||||
upload: jest.fn(() => ({ Contents: {} })),
|
||||
})),
|
||||
}))
|
||||
jest.mock("@aws-sdk/client-s3")
|
||||
|
||||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { setEnv } from "../../../environment"
|
||||
|
@ -75,10 +77,7 @@ describe("/static", () => {
|
|||
type: "datasource",
|
||||
name: "Test",
|
||||
source: SourceName.S3,
|
||||
config: {
|
||||
accessKeyId: "bb",
|
||||
secretAccessKey: "bb",
|
||||
},
|
||||
config: {},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
@ -92,7 +91,7 @@ describe("/static", () => {
|
|||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(res.body.signedUrl).toEqual("http://example.com")
|
||||
expect(res.body.signedUrl).toEqual("http://example.com/foo/bar")
|
||||
expect(res.body.publicUrl).toEqual(
|
||||
`https://${bucket}.s3.eu-west-1.amazonaws.com/${key}`
|
||||
)
|
||||
|
|
|
@ -154,12 +154,11 @@ describe("test the create row action", () => {
|
|||
expect(result.steps[1].outputs.row.file_attachment[0]).toHaveProperty("key")
|
||||
let s3Key = result.steps[1].outputs.row.file_attachment[0].key
|
||||
|
||||
const client = objectStore.ObjectStore()
|
||||
const client = objectStore.ObjectStore(objectStore.ObjectStoreBuckets.APPS)
|
||||
|
||||
const objectData = await client.headObject({
|
||||
Bucket: objectStore.ObjectStoreBuckets.APPS,
|
||||
Key: s3Key,
|
||||
})
|
||||
const objectData = await client
|
||||
.headObject({ Bucket: objectStore.ObjectStoreBuckets.APPS, Key: s3Key })
|
||||
.promise()
|
||||
|
||||
expect(objectData).toBeDefined()
|
||||
expect(objectData.ContentLength).toBeGreaterThan(0)
|
||||
|
@ -230,12 +229,11 @@ describe("test the create row action", () => {
|
|||
)
|
||||
let s3Key = result.steps[1].outputs.row.single_file_attachment.key
|
||||
|
||||
const client = objectStore.ObjectStore()
|
||||
const client = objectStore.ObjectStore(objectStore.ObjectStoreBuckets.APPS)
|
||||
|
||||
const objectData = await client.headObject({
|
||||
Bucket: objectStore.ObjectStoreBuckets.APPS,
|
||||
Key: s3Key,
|
||||
})
|
||||
const objectData = await client
|
||||
.headObject({ Bucket: objectStore.ObjectStoreBuckets.APPS, Key: s3Key })
|
||||
.promise()
|
||||
|
||||
expect(objectData).toBeDefined()
|
||||
expect(objectData.ContentLength).toBeGreaterThan(0)
|
||||
|
|
|
@ -7,15 +7,9 @@ import {
|
|||
ConnectionInfo,
|
||||
} from "@budibase/types"
|
||||
|
||||
import {
|
||||
DynamoDBDocument,
|
||||
PutCommandInput,
|
||||
GetCommandInput,
|
||||
UpdateCommandInput,
|
||||
DeleteCommandInput,
|
||||
} from "@aws-sdk/lib-dynamodb"
|
||||
import { DynamoDB } from "@aws-sdk/client-dynamodb"
|
||||
import AWS from "aws-sdk"
|
||||
import { AWS_REGION } from "../constants"
|
||||
import { DocumentClient } from "aws-sdk/clients/dynamodb"
|
||||
|
||||
interface DynamoDBConfig {
|
||||
region: string
|
||||
|
@ -157,7 +151,7 @@ class DynamoDBIntegration implements IntegrationBase {
|
|||
region: config.region || AWS_REGION,
|
||||
endpoint: config.endpoint || undefined,
|
||||
}
|
||||
this.client = DynamoDBDocument.from(new DynamoDB(this.config))
|
||||
this.client = new AWS.DynamoDB.DocumentClient(this.config)
|
||||
}
|
||||
|
||||
async testConnection() {
|
||||
|
@ -165,8 +159,8 @@ class DynamoDBIntegration implements IntegrationBase {
|
|||
connected: false,
|
||||
}
|
||||
try {
|
||||
const scanRes = await new DynamoDB(this.config).listTables()
|
||||
response.connected = !!scanRes.$metadata
|
||||
const scanRes = await new AWS.DynamoDB(this.config).listTables().promise()
|
||||
response.connected = !!scanRes.$response
|
||||
} catch (e: any) {
|
||||
response.error = e.message as string
|
||||
}
|
||||
|
@ -175,13 +169,13 @@ class DynamoDBIntegration implements IntegrationBase {
|
|||
|
||||
async create(query: {
|
||||
table: string
|
||||
json: Omit<PutCommandInput, "TableName">
|
||||
json: Omit<DocumentClient.PutItemInput, "TableName">
|
||||
}) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
...query.json,
|
||||
}
|
||||
return this.client.put(params)
|
||||
return this.client.put(params).promise()
|
||||
}
|
||||
|
||||
async read(query: { table: string; json: object; index: null | string }) {
|
||||
|
@ -190,7 +184,7 @@ class DynamoDBIntegration implements IntegrationBase {
|
|||
IndexName: query.index ? query.index : undefined,
|
||||
...query.json,
|
||||
}
|
||||
const response = await this.client.query(params)
|
||||
const response = await this.client.query(params).promise()
|
||||
if (response.Items) {
|
||||
return response.Items
|
||||
}
|
||||
|
@ -203,7 +197,7 @@ class DynamoDBIntegration implements IntegrationBase {
|
|||
IndexName: query.index ? query.index : undefined,
|
||||
...query.json,
|
||||
}
|
||||
const response = await this.client.scan(params)
|
||||
const response = await this.client.scan(params).promise()
|
||||
if (response.Items) {
|
||||
return response.Items
|
||||
}
|
||||
|
@ -214,40 +208,40 @@ class DynamoDBIntegration implements IntegrationBase {
|
|||
const params = {
|
||||
TableName: query.table,
|
||||
}
|
||||
return new DynamoDB(this.config).describeTable(params)
|
||||
return new AWS.DynamoDB(this.config).describeTable(params).promise()
|
||||
}
|
||||
|
||||
async get(query: {
|
||||
table: string
|
||||
json: Omit<GetCommandInput, "TableName">
|
||||
json: Omit<DocumentClient.GetItemInput, "TableName">
|
||||
}) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
...query.json,
|
||||
}
|
||||
return this.client.get(params)
|
||||
return this.client.get(params).promise()
|
||||
}
|
||||
|
||||
async update(query: {
|
||||
table: string
|
||||
json: Omit<UpdateCommandInput, "TableName">
|
||||
json: Omit<DocumentClient.UpdateItemInput, "TableName">
|
||||
}) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
...query.json,
|
||||
}
|
||||
return this.client.update(params)
|
||||
return this.client.update(params).promise()
|
||||
}
|
||||
|
||||
async delete(query: {
|
||||
table: string
|
||||
json: Omit<DeleteCommandInput, "TableName">
|
||||
json: Omit<DocumentClient.DeleteItemInput, "TableName">
|
||||
}) {
|
||||
const params = {
|
||||
TableName: query.table,
|
||||
...query.json,
|
||||
}
|
||||
return this.client.delete(params)
|
||||
return this.client.delete(params).promise()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -7,9 +7,8 @@ import {
|
|||
ConnectionInfo,
|
||||
} from "@budibase/types"
|
||||
|
||||
import { S3 } from "@aws-sdk/client-s3"
|
||||
import AWS from "aws-sdk"
|
||||
import csv from "csvtojson"
|
||||
import stream from "stream"
|
||||
|
||||
interface S3Config {
|
||||
region: string
|
||||
|
@ -168,7 +167,7 @@ class S3Integration implements IntegrationBase {
|
|||
delete this.config.endpoint
|
||||
}
|
||||
|
||||
this.client = new S3(this.config)
|
||||
this.client = new AWS.S3(this.config)
|
||||
}
|
||||
|
||||
async testConnection() {
|
||||
|
@ -176,7 +175,7 @@ class S3Integration implements IntegrationBase {
|
|||
connected: false,
|
||||
}
|
||||
try {
|
||||
await this.client.listBuckets()
|
||||
await this.client.listBuckets().promise()
|
||||
response.connected = true
|
||||
} catch (e: any) {
|
||||
response.error = e.message as string
|
||||
|
@ -210,7 +209,7 @@ class S3Integration implements IntegrationBase {
|
|||
LocationConstraint: query.location,
|
||||
}
|
||||
}
|
||||
return await this.client.createBucket(params)
|
||||
return await this.client.createBucket(params).promise()
|
||||
}
|
||||
|
||||
async read(query: {
|
||||
|
@ -221,39 +220,37 @@ class S3Integration implements IntegrationBase {
|
|||
maxKeys: number
|
||||
prefix: string
|
||||
}) {
|
||||
const response = await this.client.listObjects({
|
||||
const response = await this.client
|
||||
.listObjects({
|
||||
Bucket: query.bucket,
|
||||
Delimiter: query.delimiter,
|
||||
Marker: query.marker,
|
||||
MaxKeys: query.maxKeys,
|
||||
Prefix: query.prefix,
|
||||
})
|
||||
.promise()
|
||||
return response.Contents
|
||||
}
|
||||
|
||||
async readCsv(query: { bucket: string; key: string }) {
|
||||
const response = await this.client.getObject({
|
||||
const stream = this.client
|
||||
.getObject({
|
||||
Bucket: query.bucket,
|
||||
Key: query.key,
|
||||
})
|
||||
|
||||
const fileStream = response.Body?.transformToWebStream()
|
||||
|
||||
if (!fileStream || !(fileStream instanceof stream.Readable)) {
|
||||
throw new Error("Unable to retrieve CSV - invalid stream")
|
||||
}
|
||||
.createReadStream()
|
||||
|
||||
let csvError = false
|
||||
return new Promise((resolve, reject) => {
|
||||
fileStream.on("error", (err: Error) => {
|
||||
stream.on("error", (err: Error) => {
|
||||
reject(err)
|
||||
})
|
||||
const response = csv()
|
||||
.fromStream(fileStream)
|
||||
.fromStream(stream)
|
||||
.on("error", () => {
|
||||
csvError = true
|
||||
})
|
||||
fileStream.on("finish", () => {
|
||||
stream.on("finish", () => {
|
||||
resolve(response)
|
||||
})
|
||||
}).catch(err => {
|
||||
|
@ -266,10 +263,12 @@ class S3Integration implements IntegrationBase {
|
|||
}
|
||||
|
||||
async delete(query: { bucket: string; delete: string }) {
|
||||
return await this.client.deleteObjects({
|
||||
return await this.client
|
||||
.deleteObjects({
|
||||
Bucket: query.bucket,
|
||||
Delete: JSON.parse(query.delete),
|
||||
})
|
||||
.promise()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,76 @@
|
|||
const response = (body: any, extra?: any) => () => ({
|
||||
promise: () => body,
|
||||
...extra,
|
||||
})
|
||||
|
||||
class DocumentClient {
|
||||
put = jest.fn(response({}))
|
||||
query = jest.fn(
|
||||
response({
|
||||
Items: [],
|
||||
})
|
||||
)
|
||||
scan = jest.fn(
|
||||
response({
|
||||
Items: [
|
||||
{
|
||||
Name: "test",
|
||||
},
|
||||
],
|
||||
})
|
||||
)
|
||||
get = jest.fn(response({}))
|
||||
update = jest.fn(response({}))
|
||||
delete = jest.fn(response({}))
|
||||
}
|
||||
|
||||
class S3 {
|
||||
listObjects = jest.fn(
|
||||
response({
|
||||
Contents: [],
|
||||
})
|
||||
)
|
||||
createBucket = jest.fn(
|
||||
response({
|
||||
Contents: {},
|
||||
})
|
||||
)
|
||||
deleteObjects = jest.fn(
|
||||
response({
|
||||
Contents: {},
|
||||
})
|
||||
)
|
||||
getSignedUrl = jest.fn((operation, params) => {
|
||||
return `http://example.com/${params.Bucket}/${params.Key}`
|
||||
})
|
||||
headBucket = jest.fn(
|
||||
response({
|
||||
Contents: {},
|
||||
})
|
||||
)
|
||||
upload = jest.fn(
|
||||
response({
|
||||
Contents: {},
|
||||
})
|
||||
)
|
||||
getObject = jest.fn(
|
||||
response(
|
||||
{
|
||||
Body: "",
|
||||
},
|
||||
{
|
||||
createReadStream: jest.fn().mockReturnValue("stream"),
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
DynamoDB: {
|
||||
DocumentClient,
|
||||
},
|
||||
S3,
|
||||
config: {
|
||||
update: jest.fn(),
|
||||
},
|
||||
}
|
|
@ -1,20 +1,4 @@
|
|||
jest.mock("@aws-sdk/lib-dynamodb", () => ({
|
||||
DynamoDBDocument: {
|
||||
from: jest.fn(() => ({
|
||||
update: jest.fn(),
|
||||
put: jest.fn(),
|
||||
query: jest.fn(() => ({
|
||||
Items: [],
|
||||
})),
|
||||
scan: jest.fn(() => ({
|
||||
Items: [],
|
||||
})),
|
||||
delete: jest.fn(),
|
||||
get: jest.fn(),
|
||||
})),
|
||||
},
|
||||
}))
|
||||
jest.mock("@aws-sdk/client-dynamodb")
|
||||
jest.mock("aws-sdk", () => require("./aws-sdk.mock"))
|
||||
import { default as DynamoDBIntegration } from "../dynamodb"
|
||||
|
||||
class TestConfiguration {
|
||||
|
@ -73,7 +57,11 @@ describe("DynamoDB Integration", () => {
|
|||
TableName: tableName,
|
||||
IndexName: indexName,
|
||||
})
|
||||
expect(response).toEqual([])
|
||||
expect(response).toEqual([
|
||||
{
|
||||
Name: "test",
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("calls the get method with the correct params", async () => {
|
||||
|
|
|
@ -1,52 +1,5 @@
|
|||
jest.mock("aws-sdk", () => require("./aws-sdk.mock"))
|
||||
import { default as S3Integration } from "../s3"
|
||||
jest.mock("@aws-sdk/client-s3", () => {
|
||||
class S3Mock {
|
||||
response(body: any, extra?: any) {
|
||||
return () => ({
|
||||
promise: () => body,
|
||||
...extra,
|
||||
})
|
||||
}
|
||||
|
||||
listObjects = jest.fn(
|
||||
this.response({
|
||||
Contents: [],
|
||||
})
|
||||
)
|
||||
createBucket = jest.fn(
|
||||
this.response({
|
||||
Contents: {},
|
||||
})
|
||||
)
|
||||
deleteObjects = jest.fn(
|
||||
this.response({
|
||||
Contents: {},
|
||||
})
|
||||
)
|
||||
headBucket = jest.fn(
|
||||
this.response({
|
||||
Contents: {},
|
||||
})
|
||||
)
|
||||
upload = jest.fn(
|
||||
this.response({
|
||||
Contents: {},
|
||||
})
|
||||
)
|
||||
getObject = jest.fn(
|
||||
this.response(
|
||||
{
|
||||
Body: "",
|
||||
},
|
||||
{
|
||||
createReadStream: jest.fn().mockReturnValue("stream"),
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
return { S3: S3Mock }
|
||||
})
|
||||
|
||||
class TestConfiguration {
|
||||
integration: any
|
||||
|
|
|
@ -430,7 +430,7 @@ export async function handleFileResponse(
|
|||
size = details.ContentLength
|
||||
}
|
||||
}
|
||||
presignedUrl = await objectStore.getPresignedUrl(bucket, key)
|
||||
presignedUrl = objectStore.getPresignedUrl(bucket, key)
|
||||
return {
|
||||
data: {
|
||||
size,
|
||||
|
|
|
@ -18,7 +18,7 @@ export async function fetch(type?: PluginType): Promise<Plugin[]> {
|
|||
})
|
||||
)
|
||||
let plugins = response.rows.map((row: any) => row.doc) as Plugin[]
|
||||
plugins = await objectStore.enrichPluginURLs(plugins)
|
||||
plugins = objectStore.enrichPluginURLs(plugins)
|
||||
if (type) {
|
||||
return plugins.filter((plugin: Plugin) => plugin.schema?.type === type)
|
||||
} else {
|
||||
|
|
|
@ -3,7 +3,10 @@ import { Datasource, Row, Query } from "@budibase/types"
|
|||
export type WorkerCallback = (error: any, response?: any) => void
|
||||
|
||||
export interface QueryEvent
|
||||
extends Omit<Query, "datasourceId" | "name" | "parameters" | "readable"> {
|
||||
extends Omit<
|
||||
Query,
|
||||
"datasourceId" | "name" | "parameters" | "readable" | "nestedSchemaFields"
|
||||
> {
|
||||
appId?: string
|
||||
datasource: Datasource
|
||||
pagination?: any
|
||||
|
|
|
@ -78,7 +78,7 @@ export const getComponentLibraryManifest = async (library: string) => {
|
|||
resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
|
||||
}
|
||||
if (typeof resp !== "string") {
|
||||
resp = resp.toString()
|
||||
resp = resp.toString("utf8")
|
||||
}
|
||||
return JSON.parse(resp)
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@ import { budibaseTempDir } from "../budibaseDir"
|
|||
import fs from "fs"
|
||||
import { join } from "path"
|
||||
import { objectStore } from "@budibase/backend-core"
|
||||
import stream from "stream"
|
||||
|
||||
const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
|
||||
const AUTOMATION_PATH = join(budibaseTempDir(), "automation")
|
||||
|
@ -59,11 +58,7 @@ async function getPluginImpl(path: string, plugin: Plugin) {
|
|||
pluginKey
|
||||
)
|
||||
|
||||
if (pluginJs instanceof stream.Readable) {
|
||||
pluginJs.pipe(fs.createWriteStream(filename))
|
||||
} else {
|
||||
fs.writeFileSync(filename, pluginJs)
|
||||
}
|
||||
fs.writeFileSync(metadataName, hash)
|
||||
|
||||
return require(filename)
|
||||
|
|
|
@ -359,9 +359,9 @@ export async function coreOutputProcessing(
|
|||
if (row[property] == null) {
|
||||
continue
|
||||
}
|
||||
const process = async (attachment: RowAttachment) => {
|
||||
const process = (attachment: RowAttachment) => {
|
||||
if (!attachment.url && attachment.key) {
|
||||
attachment.url = await objectStore.getAppFileUrl(attachment.key)
|
||||
attachment.url = objectStore.getAppFileUrl(attachment.key)
|
||||
}
|
||||
return attachment
|
||||
}
|
||||
|
@ -369,13 +369,11 @@ export async function coreOutputProcessing(
|
|||
row[property] = JSON.parse(row[property])
|
||||
}
|
||||
if (Array.isArray(row[property])) {
|
||||
await Promise.all(
|
||||
row[property].map((attachment: RowAttachment) =>
|
||||
row[property].forEach((attachment: RowAttachment) => {
|
||||
process(attachment)
|
||||
)
|
||||
)
|
||||
})
|
||||
} else {
|
||||
await process(row[property])
|
||||
process(row[property])
|
||||
}
|
||||
}
|
||||
} else if (
|
||||
|
|
|
@ -911,8 +911,8 @@ export function sort<T extends Record<string, any>>(
|
|||
* @param docs the data
|
||||
* @param limit the number of docs to limit to
|
||||
*/
|
||||
export function limit<T>(docs: T[], limit: string): T[] {
|
||||
const numLimit = parseFloat(limit)
|
||||
export function limit<T>(docs: T[], limit: string | number): T[] {
|
||||
const numLimit = typeof limit === "number" ? limit : parseFloat(limit)
|
||||
if (isNaN(numLimit)) {
|
||||
return docs
|
||||
}
|
||||
|
|
|
@ -109,7 +109,9 @@ export function trimOtherProps(object: any, allowedProps: string[]) {
|
|||
return result
|
||||
}
|
||||
|
||||
export function isSupportedUserSearch(query: SearchFilters) {
|
||||
export function isSupportedUserSearch(
|
||||
query: SearchFilters
|
||||
): query is SearchFilters {
|
||||
const allowed = [
|
||||
{ op: BasicOperator.STRING, key: "email" },
|
||||
{ op: BasicOperator.EQUAL, key: "_id" },
|
||||
|
|
|
@ -40,6 +40,10 @@ export interface ExecuteQueryRequest {
|
|||
export type ExecuteV1QueryResponse = Record<string, any>[]
|
||||
export interface ExecuteV2QueryResponse {
|
||||
data: Record<string, any>[]
|
||||
pagination?: {
|
||||
page: number
|
||||
cursor: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface DeleteQueryResponse {
|
||||
|
|
|
@ -24,4 +24,5 @@ export interface PaginationRequest extends BasicPaginationRequest {
|
|||
export interface PaginationResponse {
|
||||
bookmark: string | number | undefined
|
||||
hasNextPage?: boolean
|
||||
totalRows?: number
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { Document } from "../document"
|
||||
import { Row } from "./row"
|
||||
|
||||
export interface QuerySchema {
|
||||
name?: string
|
||||
|
@ -13,6 +14,7 @@ export interface Query extends Document {
|
|||
fields: RestQueryFields | any
|
||||
transformer: string | null
|
||||
schema: Record<string, QuerySchema | string>
|
||||
nestedSchemaFields?: Record<string, Record<string, QuerySchema | string>>
|
||||
readable: boolean
|
||||
queryVerb: string
|
||||
// flag to state whether the default bindings are empty strings (old behaviour) or null
|
||||
|
@ -29,7 +31,7 @@ export interface QueryParameter {
|
|||
}
|
||||
|
||||
export interface QueryResponse {
|
||||
rows: any[]
|
||||
rows: Row[]
|
||||
keys: string[]
|
||||
info: any
|
||||
extra: any
|
||||
|
|
|
@ -227,6 +227,7 @@ interface OtherFieldMetadata extends BaseFieldSchema {
|
|||
| FieldType.OPTIONS
|
||||
| FieldType.BOOLEAN
|
||||
| FieldType.BIGINT
|
||||
| FieldType.JSON
|
||||
>
|
||||
}
|
||||
|
||||
|
|
|
@ -26,13 +26,11 @@ export interface SMTPConfig extends Config<SMTPInnerConfig> {}
|
|||
export interface SettingsBrandingConfig {
|
||||
faviconUrl?: string
|
||||
faviconUrlEtag?: string
|
||||
|
||||
emailBrandingEnabled?: boolean
|
||||
testimonialsEnabled?: boolean
|
||||
platformTitle?: string
|
||||
loginHeading?: string
|
||||
loginButton?: string
|
||||
|
||||
metaDescription?: string
|
||||
metaImageUrl?: string
|
||||
metaTitle?: string
|
||||
|
@ -42,6 +40,7 @@ export interface SettingsInnerConfig {
|
|||
platformUrl?: string
|
||||
company?: string
|
||||
logoUrl?: string // Populated on read
|
||||
docsUrl?: string
|
||||
logoUrlEtag?: string
|
||||
uniqueTenantId?: string
|
||||
analyticsEnabled?: boolean
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
import { UITable, UIView } from "@budibase/types"
|
||||
|
||||
export type UIDatasource = (UITable | UIView) & {
|
||||
type: string
|
||||
}
|
||||
export type UIDatasource = UITable | UIView
|
||||
|
||||
export interface UIFieldMutation {
|
||||
visible?: boolean
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
import {
|
||||
Row,
|
||||
SortOrder,
|
||||
UIDatasource,
|
||||
UILegacyFilter,
|
||||
UISearchFilter,
|
||||
} from "@budibase/types"
|
||||
|
||||
export interface UIFetchAPI {
|
||||
definition: UIDatasource
|
||||
|
||||
getInitialData: () => Promise<void>
|
||||
loading: any
|
||||
loaded: boolean
|
||||
|
||||
resetKey: string | null
|
||||
error: any
|
||||
|
||||
hasNextPage: boolean
|
||||
nextPage: () => Promise<void>
|
||||
|
||||
rows: Row[]
|
||||
|
||||
options?: {
|
||||
datasource?: {
|
||||
tableId: string
|
||||
id: string
|
||||
}
|
||||
}
|
||||
update: ({
|
||||
sortOrder,
|
||||
sortColumn,
|
||||
}: {
|
||||
sortOrder?: SortOrder
|
||||
sortColumn?: string
|
||||
filter?: UILegacyFilter[] | UISearchFilter
|
||||
}) => any
|
||||
}
|
|
@ -6,4 +6,3 @@ export * from "./view"
|
|||
export * from "./user"
|
||||
export * from "./filters"
|
||||
export * from "./rows"
|
||||
export * from "./fetch"
|
||||
|
|
|
@ -322,27 +322,27 @@ export async function save(
|
|||
}
|
||||
}
|
||||
|
||||
async function enrichOIDCLogos(oidcLogos: OIDCLogosConfig) {
|
||||
function enrichOIDCLogos(oidcLogos: OIDCLogosConfig) {
|
||||
if (!oidcLogos) {
|
||||
return
|
||||
}
|
||||
const newConfig: Record<string, string> = {}
|
||||
const keys = Object.keys(oidcLogos.config || {})
|
||||
|
||||
for (const key of keys) {
|
||||
oidcLogos.config = Object.keys(oidcLogos.config || {}).reduce(
|
||||
(acc: any, key: string) => {
|
||||
if (!key.endsWith("Etag")) {
|
||||
const etag = oidcLogos.config[`${key}Etag`]
|
||||
const objectStoreUrl = await objectStore.getGlobalFileUrl(
|
||||
const objectStoreUrl = objectStore.getGlobalFileUrl(
|
||||
oidcLogos.type,
|
||||
key,
|
||||
etag
|
||||
)
|
||||
newConfig[key] = objectStoreUrl
|
||||
acc[key] = objectStoreUrl
|
||||
} else {
|
||||
newConfig[key] = oidcLogos.config[key]
|
||||
acc[key] = oidcLogos.config[key]
|
||||
}
|
||||
}
|
||||
oidcLogos.config = newConfig
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
)
|
||||
}
|
||||
|
||||
export async function find(ctx: UserCtx<void, FindConfigResponse>) {
|
||||
|
@ -370,7 +370,7 @@ export async function find(ctx: UserCtx<void, FindConfigResponse>) {
|
|||
|
||||
async function handleConfigType(type: ConfigType, config: Config) {
|
||||
if (type === ConfigType.OIDC_LOGOS) {
|
||||
await enrichOIDCLogos(config)
|
||||
enrichOIDCLogos(config)
|
||||
} else if (type === ConfigType.AI) {
|
||||
await handleAIConfig(config)
|
||||
}
|
||||
|
@ -396,7 +396,7 @@ export async function publicOidc(ctx: Ctx<void, GetPublicOIDCConfigResponse>) {
|
|||
const oidcCustomLogos = await configs.getOIDCLogosDoc()
|
||||
|
||||
if (oidcCustomLogos) {
|
||||
await enrichOIDCLogos(oidcCustomLogos)
|
||||
enrichOIDCLogos(oidcCustomLogos)
|
||||
}
|
||||
|
||||
if (!oidcConfig) {
|
||||
|
@ -427,7 +427,7 @@ export async function publicSettings(
|
|||
|
||||
// enrich the logo url - empty url means deleted
|
||||
if (config.logoUrl && config.logoUrl !== "") {
|
||||
config.logoUrl = await objectStore.getGlobalFileUrl(
|
||||
config.logoUrl = objectStore.getGlobalFileUrl(
|
||||
"settings",
|
||||
"logoUrl",
|
||||
config.logoUrlEtag
|
||||
|
@ -437,7 +437,7 @@ export async function publicSettings(
|
|||
// enrich the favicon url - empty url means deleted
|
||||
const faviconUrl =
|
||||
branding.faviconUrl && branding.faviconUrl !== ""
|
||||
? await objectStore.getGlobalFileUrl(
|
||||
? objectStore.getGlobalFileUrl(
|
||||
"settings",
|
||||
"faviconUrl",
|
||||
branding.faviconUrlEtag
|
||||
|
@ -522,7 +522,7 @@ export async function upload(ctx: UserCtx<void, UploadConfigFileResponse>) {
|
|||
|
||||
ctx.body = {
|
||||
message: "File has been uploaded and url stored to config.",
|
||||
url: await objectStore.getGlobalFileUrl(type, name, etag),
|
||||
url: objectStore.getGlobalFileUrl(type, name, etag),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue