Merge branch 'master' of github.com:budibase/budibase into helm-sqs
This commit is contained in:
commit
3d5babe0d7
|
@ -9,7 +9,7 @@ on:
|
|||
jobs:
|
||||
ensure-is-master-tag:
|
||||
name: Ensure is a master tag
|
||||
runs-on: qa-arc-runner-set
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout monorepo
|
||||
uses: actions/checkout@v4
|
||||
|
|
|
@ -17,6 +17,6 @@ version: 0.0.0
|
|||
appVersion: 0.0.0
|
||||
dependencies:
|
||||
- name: couchdb
|
||||
version: 4.3.0
|
||||
version: 4.5.3
|
||||
repository: https://apache.github.io/couchdb-helm
|
||||
condition: services.couchdb.enabled
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.27.5",
|
||||
"version": "2.27.6",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 39acfff42a063e5a8a7d58d36721ec3103e16348
|
||||
Subproject commit a03225549e3ce61f43d0da878da162e08941b939
|
|
@ -14,6 +14,7 @@ import { v4 } from "uuid"
|
|||
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
|
||||
import fsp from "fs/promises"
|
||||
import { HeadObjectOutput } from "aws-sdk/clients/s3"
|
||||
import { ReadableStream } from "stream/web"
|
||||
|
||||
const streamPipeline = promisify(stream.pipeline)
|
||||
// use this as a temporary store of buckets that are being created
|
||||
|
@ -41,10 +42,7 @@ type UploadParams = BaseUploadParams & {
|
|||
path?: string | PathLike
|
||||
}
|
||||
|
||||
export type StreamTypes =
|
||||
| ReadStream
|
||||
| NodeJS.ReadableStream
|
||||
| ReadableStream<Uint8Array>
|
||||
export type StreamTypes = ReadStream | NodeJS.ReadableStream
|
||||
|
||||
export type StreamUploadParams = BaseUploadParams & {
|
||||
stream?: StreamTypes
|
||||
|
@ -222,6 +220,9 @@ export async function streamUpload({
|
|||
extra,
|
||||
ttl,
|
||||
}: StreamUploadParams) {
|
||||
if (!stream) {
|
||||
throw new Error("Stream to upload is invalid/undefined")
|
||||
}
|
||||
const extension = filename.split(".").pop()
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
|
||||
|
@ -251,14 +252,27 @@ export async function streamUpload({
|
|||
: CONTENT_TYPE_MAP.txt
|
||||
}
|
||||
|
||||
const bucket = sanitizeBucket(bucketName),
|
||||
objKey = sanitizeKey(filename)
|
||||
const params = {
|
||||
Bucket: sanitizeBucket(bucketName),
|
||||
Key: sanitizeKey(filename),
|
||||
Bucket: bucket,
|
||||
Key: objKey,
|
||||
Body: stream,
|
||||
ContentType: contentType,
|
||||
...extra,
|
||||
}
|
||||
return objectStore.upload(params).promise()
|
||||
|
||||
const details = await objectStore.upload(params).promise()
|
||||
const headDetails = await objectStore
|
||||
.headObject({
|
||||
Bucket: bucket,
|
||||
Key: objKey,
|
||||
})
|
||||
.promise()
|
||||
return {
|
||||
...details,
|
||||
ContentLength: headDetails.ContentLength,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -60,6 +60,7 @@
|
|||
userLimitReachedModal
|
||||
let searchEmail = undefined
|
||||
let selectedRows = []
|
||||
let selectedInvites = []
|
||||
let bulkSaveResponse
|
||||
let customRenderers = [
|
||||
{ column: "email", component: EmailTableRenderer },
|
||||
|
@ -123,7 +124,7 @@
|
|||
return {}
|
||||
}
|
||||
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
|
||||
pendingSchema.email.displayName = "Pending Invites"
|
||||
pendingSchema.email.displayName = "Pending Users"
|
||||
return pendingSchema
|
||||
}
|
||||
|
||||
|
@ -132,6 +133,7 @@
|
|||
const { admin, builder, userGroups, apps } = invite.info
|
||||
|
||||
return {
|
||||
_id: invite.code,
|
||||
email: invite.email,
|
||||
builder,
|
||||
admin,
|
||||
|
@ -260,9 +262,26 @@
|
|||
return
|
||||
}
|
||||
|
||||
await users.bulkDelete(ids)
|
||||
notifications.success(`Successfully deleted ${selectedRows.length} rows`)
|
||||
if (ids.length > 0) {
|
||||
await users.bulkDelete(ids)
|
||||
}
|
||||
|
||||
if (selectedInvites.length > 0) {
|
||||
await users.removeInvites(
|
||||
selectedInvites.map(invite => ({
|
||||
code: invite._id,
|
||||
}))
|
||||
)
|
||||
pendingInvites = await users.getInvites()
|
||||
}
|
||||
|
||||
notifications.success(
|
||||
`Successfully deleted ${
|
||||
selectedRows.length + selectedInvites.length
|
||||
} users`
|
||||
)
|
||||
selectedRows = []
|
||||
selectedInvites = []
|
||||
await fetch.refresh()
|
||||
} catch (error) {
|
||||
notifications.error("Error deleting users")
|
||||
|
@ -328,15 +347,15 @@
|
|||
</div>
|
||||
{/if}
|
||||
<div class="controls-right">
|
||||
<Search bind:value={searchEmail} placeholder="Search" />
|
||||
{#if selectedRows.length > 0}
|
||||
{#if selectedRows.length > 0 || selectedInvites.length > 0}
|
||||
<DeleteRowsButton
|
||||
item="user"
|
||||
on:updaterows
|
||||
{selectedRows}
|
||||
selectedRows={[...selectedRows, ...selectedInvites]}
|
||||
deleteRows={deleteUsers}
|
||||
/>
|
||||
{/if}
|
||||
<Search bind:value={searchEmail} placeholder="Search" />
|
||||
</div>
|
||||
</div>
|
||||
<Table
|
||||
|
@ -362,10 +381,12 @@
|
|||
</div>
|
||||
|
||||
<Table
|
||||
bind:selectedRows={selectedInvites}
|
||||
schema={pendingSchema}
|
||||
data={parsedInvites}
|
||||
allowEditColumns={false}
|
||||
allowEditRows={false}
|
||||
allowSelectRows={!readonly}
|
||||
{customRenderers}
|
||||
loading={!invitesLoaded}
|
||||
allowClickRows={false}
|
||||
|
|
|
@ -38,6 +38,10 @@ export function createUsersStore() {
|
|||
return API.inviteUsers(payload)
|
||||
}
|
||||
|
||||
async function removeInvites(payload) {
|
||||
return API.removeUserInvites(payload)
|
||||
}
|
||||
|
||||
async function acceptInvite(inviteCode, password, firstName, lastName) {
|
||||
return API.acceptInvite({
|
||||
inviteCode,
|
||||
|
@ -154,6 +158,7 @@ export function createUsersStore() {
|
|||
onboard,
|
||||
fetchInvite,
|
||||
getInvites,
|
||||
removeInvites,
|
||||
updateInvite,
|
||||
getUserCountByApp,
|
||||
addAppBuilder,
|
||||
|
|
|
@ -234,6 +234,16 @@ export const buildUserEndpoints = API => ({
|
|||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Removes multiple user invites from Redis cache
|
||||
*/
|
||||
removeUserInvites: async inviteCodes => {
|
||||
return await API.post({
|
||||
url: "/api/global/users/multi/invite/delete",
|
||||
body: inviteCodes,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Accepts an invite to join the platform and creates a user.
|
||||
* @param inviteCode the invite code sent in the email
|
||||
|
|
|
@ -68,7 +68,6 @@
|
|||
"aws-sdk": "2.1030.0",
|
||||
"bcrypt": "5.1.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bl": "^6.0.12",
|
||||
"bull": "4.10.1",
|
||||
"chokidar": "3.5.3",
|
||||
"content-disposition": "^0.5.4",
|
||||
|
@ -97,7 +96,7 @@
|
|||
"memorystream": "0.3.1",
|
||||
"mongodb": "^6.3.0",
|
||||
"mssql": "10.0.1",
|
||||
"mysql2": "3.9.7",
|
||||
"mysql2": "3.9.8",
|
||||
"node-fetch": "2.6.7",
|
||||
"object-sizeof": "2.6.1",
|
||||
"openai": "^3.2.1",
|
||||
|
@ -116,7 +115,8 @@
|
|||
"uuid": "^8.3.2",
|
||||
"validate.js": "0.13.1",
|
||||
"worker-farm": "1.7.0",
|
||||
"xml2js": "0.5.0"
|
||||
"xml2js": "0.5.0",
|
||||
"tmp": "0.2.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/preset-env": "7.16.11",
|
||||
|
@ -137,6 +137,7 @@
|
|||
"@types/supertest": "2.0.14",
|
||||
"@types/tar": "6.1.5",
|
||||
"@types/uuid": "8.3.4",
|
||||
"@types/tmp": "0.2.6",
|
||||
"copyfiles": "2.4.1",
|
||||
"docker-compose": "0.23.17",
|
||||
"jest": "29.7.0",
|
||||
|
|
|
@ -149,13 +149,12 @@ class RestIntegration implements IntegrationBase {
|
|||
{ downloadImages: this.config.downloadImages }
|
||||
)
|
||||
let contentLength = response.headers.get("content-length")
|
||||
if (!contentLength && raw) {
|
||||
contentLength = Buffer.byteLength(raw, "utf8").toString()
|
||||
}
|
||||
let isSuccess = response.status >= 200 && response.status < 300
|
||||
if (
|
||||
contentDisposition.includes("filename") ||
|
||||
contentDisposition.includes("attachment") ||
|
||||
contentDisposition.includes("form-data")
|
||||
(contentDisposition.includes("filename") ||
|
||||
contentDisposition.includes("attachment") ||
|
||||
contentDisposition.includes("form-data")) &&
|
||||
isSuccess
|
||||
) {
|
||||
filename =
|
||||
path.basename(parse(contentDisposition).parameters?.filename) || ""
|
||||
|
@ -168,6 +167,9 @@ class RestIntegration implements IntegrationBase {
|
|||
return handleFileResponse(response, filename, this.startTimeMs)
|
||||
} else {
|
||||
responseTxt = response.text ? await response.text() : ""
|
||||
if (!contentLength && responseTxt) {
|
||||
contentLength = Buffer.byteLength(responseTxt, "utf8").toString()
|
||||
}
|
||||
const hasContent =
|
||||
(contentLength && parseInt(contentLength) > 0) ||
|
||||
responseTxt.length > 0
|
||||
|
|
|
@ -657,6 +657,7 @@ describe("REST Integration", () => {
|
|||
mockReadable.push(null)
|
||||
;(fetch as unknown as jest.Mock).mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
status: 200,
|
||||
headers: {
|
||||
raw: () => ({
|
||||
"content-type": [contentType],
|
||||
|
@ -700,6 +701,7 @@ describe("REST Integration", () => {
|
|||
mockReadable.push(null)
|
||||
;(fetch as unknown as jest.Mock).mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
status: 200,
|
||||
headers: {
|
||||
raw: () => ({
|
||||
"content-type": [contentType],
|
||||
|
|
|
@ -18,7 +18,7 @@ class MariaDBWaitStrategy extends AbstractWaitStrategy {
|
|||
await logs.waitUntilReady(container, boundPorts, startTime)
|
||||
|
||||
const command = Wait.forSuccessfulCommand(
|
||||
`mysqladmin ping -h localhost -P 3306 -u root -ppassword`
|
||||
`/usr/local/bin/healthcheck.sh --innodb_initialized`
|
||||
)
|
||||
await command.waitUntilReady(container)
|
||||
}
|
||||
|
|
|
@ -9,10 +9,12 @@ import { context, objectStore, sql } from "@budibase/backend-core"
|
|||
import { v4 } from "uuid"
|
||||
import { parseStringPromise as xmlParser } from "xml2js"
|
||||
import { formatBytes } from "../../utilities"
|
||||
import bl from "bl"
|
||||
import env from "../../environment"
|
||||
import { InvalidColumns } from "../../constants"
|
||||
import { helpers, utils } from "@budibase/shared-core"
|
||||
import { pipeline } from "stream/promises"
|
||||
import tmp from "tmp"
|
||||
import fs from "fs"
|
||||
|
||||
type PrimitiveTypes =
|
||||
| FieldType.STRING
|
||||
|
@ -360,35 +362,44 @@ export async function handleFileResponse(
|
|||
const key = `${context.getProdAppId()}/${processedFileName}`
|
||||
const bucket = objectStore.ObjectStoreBuckets.TEMP
|
||||
|
||||
const stream = response.body.pipe(bl((error, data) => data))
|
||||
// put the response stream to disk temporarily as a buffer
|
||||
const tmpObj = tmp.fileSync()
|
||||
try {
|
||||
await pipeline(response.body, fs.createWriteStream(tmpObj.name))
|
||||
if (response.body) {
|
||||
const contentLength = response.headers.get("content-length")
|
||||
if (contentLength) {
|
||||
size = parseInt(contentLength, 10)
|
||||
}
|
||||
|
||||
if (response.body) {
|
||||
const contentLength = response.headers.get("content-length")
|
||||
if (contentLength) {
|
||||
size = parseInt(contentLength, 10)
|
||||
const details = await objectStore.streamUpload({
|
||||
bucket,
|
||||
filename: key,
|
||||
stream: fs.createReadStream(tmpObj.name),
|
||||
ttl: 1,
|
||||
type: response.headers["content-type"],
|
||||
})
|
||||
if (!size && details.ContentLength) {
|
||||
size = details.ContentLength
|
||||
}
|
||||
}
|
||||
|
||||
await objectStore.streamUpload({
|
||||
bucket,
|
||||
filename: key,
|
||||
stream,
|
||||
ttl: 1,
|
||||
type: response.headers["content-type"],
|
||||
})
|
||||
}
|
||||
presignedUrl = objectStore.getPresignedUrl(bucket, key)
|
||||
return {
|
||||
data: {
|
||||
size,
|
||||
name: processedFileName,
|
||||
url: presignedUrl,
|
||||
extension: fileExtension,
|
||||
key: key,
|
||||
},
|
||||
info: {
|
||||
code: response.status,
|
||||
size: formatBytes(size.toString()),
|
||||
time: `${Math.round(performance.now() - startTime)}ms`,
|
||||
},
|
||||
presignedUrl = objectStore.getPresignedUrl(bucket, key)
|
||||
return {
|
||||
data: {
|
||||
size,
|
||||
name: processedFileName,
|
||||
url: presignedUrl,
|
||||
extension: fileExtension,
|
||||
key: key,
|
||||
},
|
||||
info: {
|
||||
code: response.status,
|
||||
size: formatBytes(size.toString()),
|
||||
time: `${Math.round(performance.now() - startTime)}ms`,
|
||||
},
|
||||
}
|
||||
} finally {
|
||||
// cleanup tmp
|
||||
tmpObj.removeCallback()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -196,12 +196,22 @@ class QueryRunner {
|
|||
return { rows, keys, info, extra, pagination }
|
||||
}
|
||||
|
||||
async runAnotherQuery(queryId: string, parameters: any) {
|
||||
async runAnotherQuery(
|
||||
queryId: string,
|
||||
currentParameters: Record<string, any>
|
||||
) {
|
||||
const db = context.getAppDB()
|
||||
const query = await db.get<Query>(queryId)
|
||||
const datasource = await sdk.datasources.get(query.datasourceId, {
|
||||
enriched: true,
|
||||
})
|
||||
// enrich parameters with dynamic queries defaults
|
||||
const defaultParams = query.parameters || []
|
||||
for (let param of defaultParams) {
|
||||
if (!currentParameters[param.name]) {
|
||||
currentParameters[param.name] = param.default
|
||||
}
|
||||
}
|
||||
return new QueryRunner(
|
||||
{
|
||||
schema: query.schema,
|
||||
|
@ -210,7 +220,7 @@ class QueryRunner {
|
|||
transformer: query.transformer,
|
||||
nullDefaultSupport: query.nullDefaultSupport,
|
||||
ctx: this.ctx,
|
||||
parameters,
|
||||
parameters: currentParameters,
|
||||
datasource,
|
||||
queryId,
|
||||
},
|
||||
|
|
|
@ -45,7 +45,12 @@ export interface InviteUserRequest {
|
|||
userInfo: any
|
||||
}
|
||||
|
||||
export interface DeleteInviteUserRequest {
|
||||
code: string
|
||||
}
|
||||
|
||||
export type InviteUsersRequest = InviteUserRequest[]
|
||||
export type DeleteInviteUsersRequest = DeleteInviteUserRequest[]
|
||||
|
||||
export interface InviteUsersResponse {
|
||||
successful: { email: string }[]
|
||||
|
|
|
@ -245,7 +245,7 @@ export type AutomationAttachment = {
|
|||
|
||||
export type AutomationAttachmentContent = {
|
||||
filename: string
|
||||
content: ReadStream | NodeJS.ReadableStream | ReadableStream<Uint8Array>
|
||||
content: ReadStream | NodeJS.ReadableStream
|
||||
}
|
||||
|
||||
export type BucketedContent = AutomationAttachmentContent & {
|
||||
|
|
|
@ -10,6 +10,8 @@ import {
|
|||
CreateAdminUserRequest,
|
||||
CreateAdminUserResponse,
|
||||
Ctx,
|
||||
DeleteInviteUserRequest,
|
||||
DeleteInviteUsersRequest,
|
||||
InviteUserRequest,
|
||||
InviteUsersRequest,
|
||||
InviteUsersResponse,
|
||||
|
@ -335,6 +337,20 @@ export const inviteMultiple = async (ctx: Ctx<InviteUsersRequest>) => {
|
|||
ctx.body = await userSdk.invite(ctx.request.body)
|
||||
}
|
||||
|
||||
export const removeMultipleInvites = async (
|
||||
ctx: Ctx<DeleteInviteUsersRequest>
|
||||
) => {
|
||||
const inviteCodesToRemove = ctx.request.body.map(
|
||||
(invite: DeleteInviteUserRequest) => invite.code
|
||||
)
|
||||
for (const code of inviteCodesToRemove) {
|
||||
await cache.invite.deleteCode(code)
|
||||
}
|
||||
ctx.body = {
|
||||
message: "User invites successfully removed.",
|
||||
}
|
||||
}
|
||||
|
||||
export const checkInvite = async (ctx: any) => {
|
||||
const { code } = ctx.params
|
||||
let invite
|
||||
|
|
|
@ -108,6 +108,11 @@ router
|
|||
buildInviteMultipleValidation(),
|
||||
controller.inviteMultiple
|
||||
)
|
||||
.post(
|
||||
"/api/global/users/multi/invite/delete",
|
||||
auth.builderOrAdmin,
|
||||
controller.removeMultipleInvites
|
||||
)
|
||||
|
||||
// non-global endpoints
|
||||
.get("/api/global/users/invite/:code", controller.checkInvite)
|
||||
|
|
67
yarn.lock
67
yarn.lock
|
@ -6348,6 +6348,11 @@
|
|||
dependencies:
|
||||
"@types/estree" "*"
|
||||
|
||||
"@types/tmp@0.2.6":
|
||||
version "0.2.6"
|
||||
resolved "https://registry.yarnpkg.com/@types/tmp/-/tmp-0.2.6.tgz#d785ee90c52d7cc020e249c948c36f7b32d1e217"
|
||||
integrity sha512-chhaNf2oKHlRkDGt+tiKE2Z5aJ6qalm7Z9rlLdBwmOiAAf09YQvvoLXjWK4HWPF1xU/fqvMgfNfpVoBscA/tKA==
|
||||
|
||||
"@types/tough-cookie@*", "@types/tough-cookie@^4.0.2":
|
||||
version "4.0.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.2.tgz#6286b4c7228d58ab7866d19716f3696e03a09397"
|
||||
|
@ -7700,7 +7705,7 @@ bl@^4.0.3, bl@^4.1.0:
|
|||
inherits "^2.0.4"
|
||||
readable-stream "^3.4.0"
|
||||
|
||||
bl@^6.0.12, bl@^6.0.3:
|
||||
bl@^6.0.3:
|
||||
version "6.0.12"
|
||||
resolved "https://registry.yarnpkg.com/bl/-/bl-6.0.12.tgz#77c35b96e13aeff028496c798b75389ddee9c7f8"
|
||||
integrity sha512-EnEYHilP93oaOa2MnmNEjAcovPS3JlQZOyzGXi3EyEpPhm9qWvdDp7BmAVEVusGzp8LlwQK56Av+OkDoRjzE0w==
|
||||
|
@ -11904,6 +11909,17 @@ glob@^10.0.0, glob@^10.2.2:
|
|||
minipass "^7.0.4"
|
||||
path-scurry "^1.10.2"
|
||||
|
||||
glob@^10.3.7:
|
||||
version "10.4.1"
|
||||
resolved "https://registry.yarnpkg.com/glob/-/glob-10.4.1.tgz#0cfb01ab6a6b438177bfe6a58e2576f6efe909c2"
|
||||
integrity sha512-2jelhlq3E4ho74ZyVLN03oKdAZVUa6UDZzFLVH1H7dnoax+y9qyaq8zBkfDIggjniU19z0wU18y16jMB2eyVIw==
|
||||
dependencies:
|
||||
foreground-child "^3.1.0"
|
||||
jackspeak "^3.1.2"
|
||||
minimatch "^9.0.4"
|
||||
minipass "^7.1.2"
|
||||
path-scurry "^1.11.1"
|
||||
|
||||
glob@^5.0.15:
|
||||
version "5.0.15"
|
||||
resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1"
|
||||
|
@ -13472,6 +13488,15 @@ jackspeak@^2.3.6:
|
|||
optionalDependencies:
|
||||
"@pkgjs/parseargs" "^0.11.0"
|
||||
|
||||
jackspeak@^3.1.2:
|
||||
version "3.1.2"
|
||||
resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-3.1.2.tgz#eada67ea949c6b71de50f1b09c92a961897b90ab"
|
||||
integrity sha512-kWmLKn2tRtfYMF/BakihVVRzBKOxz4gJMiL2Rj91WnAB5TPZumSH99R/Yf1qE1u4uRimvCSJfm6hnxohXeEXjQ==
|
||||
dependencies:
|
||||
"@isaacs/cliui" "^8.0.2"
|
||||
optionalDependencies:
|
||||
"@pkgjs/parseargs" "^0.11.0"
|
||||
|
||||
jake@^10.8.5:
|
||||
version "10.8.5"
|
||||
resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46"
|
||||
|
@ -15751,6 +15776,13 @@ minimatch@^8.0.2:
|
|||
dependencies:
|
||||
brace-expansion "^2.0.1"
|
||||
|
||||
minimatch@^9.0.4:
|
||||
version "9.0.4"
|
||||
resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.4.tgz#8e49c731d1749cbec05050ee5145147b32496a51"
|
||||
integrity sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==
|
||||
dependencies:
|
||||
brace-expansion "^2.0.1"
|
||||
|
||||
minimist-options@4.1.0:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/minimist-options/-/minimist-options-4.1.0.tgz#c0655713c53a8a2ebd77ffa247d342c40f010619"
|
||||
|
@ -15845,6 +15877,11 @@ minipass@^5.0.0:
|
|||
resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.0.4.tgz#dbce03740f50a4786ba994c1fb908844d27b038c"
|
||||
integrity sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==
|
||||
|
||||
minipass@^7.1.2:
|
||||
version "7.1.2"
|
||||
resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707"
|
||||
integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==
|
||||
|
||||
minizlib@^2.1.1, minizlib@^2.1.2:
|
||||
version "2.1.2"
|
||||
resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931"
|
||||
|
@ -16033,10 +16070,10 @@ mute-stream@~1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-1.0.0.tgz#e31bd9fe62f0aed23520aa4324ea6671531e013e"
|
||||
integrity sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==
|
||||
|
||||
mysql2@3.9.7:
|
||||
version "3.9.7"
|
||||
resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-3.9.7.tgz#843755daf65b5ef08afe545fe14b8fb62824741a"
|
||||
integrity sha512-KnJT8vYRcNAZv73uf9zpXqNbvBG7DJrs+1nACsjZP1HMJ1TgXEy8wnNilXAn/5i57JizXKtrUtwDB7HxT9DDpw==
|
||||
mysql2@3.9.8:
|
||||
version "3.9.8"
|
||||
resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-3.9.8.tgz#fe8a0f975f2c495ed76ca988ddc5505801dc49ce"
|
||||
integrity sha512-+5JKNjPuks1FNMoy9TYpl77f+5frbTklz7eb3XDwbpsERRLEeXiW2PDEkakYF50UuKU2qwfGnyXpKYvukv8mGA==
|
||||
dependencies:
|
||||
denque "^2.1.0"
|
||||
generate-function "^2.3.1"
|
||||
|
@ -17378,6 +17415,14 @@ path-scurry@^1.10.2, path-scurry@^1.6.1:
|
|||
lru-cache "^10.2.0"
|
||||
minipass "^5.0.0 || ^6.0.2 || ^7.0.0"
|
||||
|
||||
path-scurry@^1.11.1:
|
||||
version "1.11.1"
|
||||
resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.11.1.tgz#7960a668888594a0720b12a911d1a742ab9f11d2"
|
||||
integrity sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==
|
||||
dependencies:
|
||||
lru-cache "^10.2.0"
|
||||
minipass "^5.0.0 || ^6.0.2 || ^7.0.0"
|
||||
|
||||
path-to-regexp@1.x:
|
||||
version "1.8.0"
|
||||
resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a"
|
||||
|
@ -19318,6 +19363,13 @@ rimraf@^4.4.1:
|
|||
dependencies:
|
||||
glob "^9.2.0"
|
||||
|
||||
rimraf@^5.0.7:
|
||||
version "5.0.7"
|
||||
resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-5.0.7.tgz#27bddf202e7d89cb2e0381656380d1734a854a74"
|
||||
integrity sha512-nV6YcJo5wbLW77m+8KjH8aB/7/rxQy9SZ0HY5shnwULfS+9nmTtVXAJET5NdZmCzA4fPI/Hm1wo/Po/4mopOdg==
|
||||
dependencies:
|
||||
glob "^10.3.7"
|
||||
|
||||
ripemd160@^2.0.0, ripemd160@^2.0.1:
|
||||
version "2.0.2"
|
||||
resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c"
|
||||
|
@ -21236,6 +21288,11 @@ tlhunter-sorted-set@^0.1.0:
|
|||
resolved "https://registry.yarnpkg.com/tlhunter-sorted-set/-/tlhunter-sorted-set-0.1.0.tgz#1c3eae28c0fa4dff97e9501d2e3c204b86406f4b"
|
||||
integrity sha512-eGYW4bjf1DtrHzUYxYfAcSytpOkA44zsr7G2n3PV7yOUR23vmkGe3LL4R+1jL9OsXtbsFOwe8XtbCrabeaEFnw==
|
||||
|
||||
tmp@0.2.3:
|
||||
version "0.2.3"
|
||||
resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.2.3.tgz#eb783cc22bc1e8bebd0671476d46ea4eb32a79ae"
|
||||
integrity sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==
|
||||
|
||||
tmp@^0.0.33:
|
||||
version "0.0.33"
|
||||
resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9"
|
||||
|
|
Loading…
Reference in New Issue