Merge branch 'master' into feature/count-creators-in-groups
This commit is contained in:
commit
aa1153abde
|
@ -12,6 +12,7 @@
|
|||
export let getOptionIcon = () => null
|
||||
export let getOptionColour = () => null
|
||||
export let getOptionSubtitle = () => null
|
||||
export let compare = null
|
||||
export let useOptionIconImage = false
|
||||
export let isOptionEnabled
|
||||
export let readonly = false
|
||||
|
@ -34,13 +35,19 @@
|
|||
$: fieldIcon = getFieldAttribute(getOptionIcon, value, options)
|
||||
$: fieldColour = getFieldAttribute(getOptionColour, value, options)
|
||||
|
||||
function compareOptionAndValue(option, value) {
|
||||
return typeof compare === "function"
|
||||
? compare(option, value)
|
||||
: option === value
|
||||
}
|
||||
|
||||
const getFieldAttribute = (getAttribute, value, options) => {
|
||||
// Wait for options to load if there is a value but no options
|
||||
if (!options?.length) {
|
||||
return ""
|
||||
}
|
||||
const index = options.findIndex(
|
||||
(option, idx) => getOptionValue(option, idx) === value
|
||||
const index = options.findIndex((option, idx) =>
|
||||
compareOptionAndValue(getOptionValue(option, idx), value)
|
||||
)
|
||||
return index !== -1 ? getAttribute(options[index], index) : null
|
||||
}
|
||||
|
@ -94,7 +101,7 @@
|
|||
{customPopoverMaxHeight}
|
||||
isPlaceholder={value == null || value === ""}
|
||||
placeholderOption={placeholder === false ? null : placeholder}
|
||||
isOptionSelected={option => option === value}
|
||||
isOptionSelected={option => compareOptionAndValue(option, value)}
|
||||
onSelectOption={selectOption}
|
||||
{loading}
|
||||
/>
|
||||
|
|
|
@ -28,6 +28,7 @@
|
|||
export let footer = null
|
||||
export let tag = null
|
||||
export let helpText = null
|
||||
export let compare
|
||||
const dispatch = createEventDispatcher()
|
||||
const onChange = e => {
|
||||
value = e.detail
|
||||
|
@ -65,6 +66,7 @@
|
|||
{autocomplete}
|
||||
{customPopoverHeight}
|
||||
{tag}
|
||||
{compare}
|
||||
on:change={onChange}
|
||||
on:click
|
||||
/>
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
export let bindingDrawerLeft
|
||||
export let allowHelpers = true
|
||||
export let customButtonText = null
|
||||
export let compare = (option, value) => option === value
|
||||
|
||||
let fields = Object.entries(object || {}).map(([name, value]) => ({
|
||||
name,
|
||||
|
@ -112,7 +113,12 @@
|
|||
on:blur={changed}
|
||||
/>
|
||||
{#if options}
|
||||
<Select bind:value={field.value} on:change={changed} {options} />
|
||||
<Select
|
||||
bind:value={field.value}
|
||||
{compare}
|
||||
on:change={changed}
|
||||
{options}
|
||||
/>
|
||||
{:else if bindings && bindings.length}
|
||||
<DrawerBindableInput
|
||||
{bindings}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<script>
|
||||
import KeyValueBuilder from "../KeyValueBuilder.svelte"
|
||||
import { SchemaTypeOptions } from "constants/backend"
|
||||
import { SchemaTypeOptionsExpanded } from "constants/backend"
|
||||
|
||||
export let schema
|
||||
export let onSchemaChange = () => {}
|
||||
|
@ -24,6 +24,7 @@
|
|||
object={schema}
|
||||
name="field"
|
||||
headings
|
||||
options={SchemaTypeOptions}
|
||||
options={SchemaTypeOptionsExpanded}
|
||||
compare={(option, value) => option.type === value.type}
|
||||
/>
|
||||
{/key}
|
||||
|
|
|
@ -33,7 +33,7 @@
|
|||
PaginationTypes,
|
||||
RawRestBodyTypes,
|
||||
RestBodyTypes as bodyTypes,
|
||||
SchemaTypeOptions,
|
||||
SchemaTypeOptionsExpanded,
|
||||
} from "constants/backend"
|
||||
import JSONPreview from "components/integration/JSONPreview.svelte"
|
||||
import AccessLevelSelect from "components/integration/AccessLevelSelect.svelte"
|
||||
|
@ -97,9 +97,7 @@
|
|||
$: schemaReadOnly = !responseSuccess
|
||||
$: variablesReadOnly = !responseSuccess
|
||||
$: showVariablesTab = shouldShowVariables(dynamicVariables, variablesReadOnly)
|
||||
$: hasSchema =
|
||||
Object.keys(schema || {}).length !== 0 ||
|
||||
Object.keys(query?.schema || {}).length !== 0
|
||||
$: hasSchema = Object.keys(schema || {}).length !== 0
|
||||
|
||||
$: runtimeUrlQueries = readableToRuntimeMap(mergedBindings, breakQs)
|
||||
|
||||
|
@ -161,7 +159,7 @@
|
|||
newQuery.fields.queryString = queryString
|
||||
newQuery.fields.authConfigId = authConfigId
|
||||
newQuery.fields.disabledHeaders = restUtils.flipHeaderState(enabledHeaders)
|
||||
newQuery.schema = restUtils.fieldsToSchema(schema)
|
||||
newQuery.schema = schema
|
||||
|
||||
return newQuery
|
||||
}
|
||||
|
@ -231,6 +229,14 @@
|
|||
notifications.info("Request did not return any data")
|
||||
} else {
|
||||
response.info = response.info || { code: 200 }
|
||||
// if existing schema, copy over what it is
|
||||
if (schema) {
|
||||
for (let [name, field] of Object.entries(schema)) {
|
||||
if (response.schema[name]) {
|
||||
response.schema[name] = field
|
||||
}
|
||||
}
|
||||
}
|
||||
schema = response.schema
|
||||
notifications.success("Request sent successfully")
|
||||
}
|
||||
|
@ -386,6 +392,7 @@
|
|||
|
||||
onMount(async () => {
|
||||
query = getSelectedQuery()
|
||||
schema = query.schema
|
||||
|
||||
try {
|
||||
// Clear any unsaved changes to the datasource
|
||||
|
@ -416,7 +423,6 @@
|
|||
query.fields.path = `${datasource.config.url}/${path ? path : ""}`
|
||||
}
|
||||
url = buildUrl(query.fields.path, breakQs)
|
||||
schema = restUtils.schemaToFields(query.schema)
|
||||
requestBindings = restUtils.queryParametersToKeyValue(query.parameters)
|
||||
authConfigId = getAuthConfigId()
|
||||
if (!query.fields.disabledHeaders) {
|
||||
|
@ -682,10 +688,11 @@
|
|||
bind:object={schema}
|
||||
name="schema"
|
||||
headings
|
||||
options={SchemaTypeOptions}
|
||||
options={SchemaTypeOptionsExpanded}
|
||||
menuItems={schemaMenuItems}
|
||||
showMenu={!schemaReadOnly}
|
||||
readOnly={schemaReadOnly}
|
||||
compare={(option, value) => option.type === value.type}
|
||||
/>
|
||||
</Tab>
|
||||
{/if}
|
||||
|
|
|
@ -271,6 +271,11 @@ export const SchemaTypeOptions = [
|
|||
{ label: "Datetime", value: "datetime" },
|
||||
]
|
||||
|
||||
export const SchemaTypeOptionsExpanded = SchemaTypeOptions.map(el => ({
|
||||
...el,
|
||||
value: { type: el.value },
|
||||
}))
|
||||
|
||||
export const RawRestBodyTypes = {
|
||||
NONE: "none",
|
||||
FORM: "form",
|
||||
|
|
|
@ -1,26 +1,6 @@
|
|||
import { IntegrationTypes } from "constants/backend"
|
||||
import { findHBSBlocks } from "@budibase/string-templates"
|
||||
|
||||
export function schemaToFields(schema) {
|
||||
const response = {}
|
||||
if (schema && typeof schema === "object") {
|
||||
for (let [field, value] of Object.entries(schema)) {
|
||||
response[field] = value?.type || "string"
|
||||
}
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
||||
export function fieldsToSchema(fields) {
|
||||
const response = {}
|
||||
if (fields && typeof fields === "object") {
|
||||
for (let [name, type] of Object.entries(fields)) {
|
||||
response[name] = { name, type }
|
||||
}
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
||||
export function breakQueryString(qs) {
|
||||
if (!qs) {
|
||||
return {}
|
||||
|
@ -184,10 +164,8 @@ export const parseToCsv = (headers, rows) => {
|
|||
export default {
|
||||
breakQueryString,
|
||||
buildQueryString,
|
||||
fieldsToSchema,
|
||||
flipHeaderState,
|
||||
keyValueToQueryParameters,
|
||||
parseToCsv,
|
||||
queryParametersToKeyValue,
|
||||
schemaToFields,
|
||||
}
|
||||
|
|
|
@ -89,8 +89,8 @@ export function createQueriesStore() {
|
|||
// Assume all the fields are strings and create a basic schema from the
|
||||
// unique fields returned by the server
|
||||
const schema = {}
|
||||
for (let [field, type] of Object.entries(result.schemaFields)) {
|
||||
schema[field] = type || "string"
|
||||
for (let [field, metadata] of Object.entries(result.schema)) {
|
||||
schema[field] = metadata || { type: "string" }
|
||||
}
|
||||
return { ...result, schema, rows: result.rows || [] }
|
||||
}
|
||||
|
|
|
@ -1,15 +1,21 @@
|
|||
import { generateQueryID } from "../../../db/utils"
|
||||
import { BaseQueryVerbs, FieldTypes } from "../../../constants"
|
||||
import { BaseQueryVerbs } from "../../../constants"
|
||||
import { Thread, ThreadType } from "../../../threads"
|
||||
import { save as saveDatasource } from "../datasource"
|
||||
import { RestImporter } from "./import"
|
||||
import { invalidateDynamicVariables } from "../../../threads/utils"
|
||||
import env from "../../../environment"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import { events, context, utils, constants } from "@budibase/backend-core"
|
||||
import sdk from "../../../sdk"
|
||||
import { QueryEvent } from "../../../threads/definitions"
|
||||
import { ConfigType, Query, UserCtx, SessionCookie } from "@budibase/types"
|
||||
import { QueryEvent, QueryResponse } from "../../../threads/definitions"
|
||||
import {
|
||||
ConfigType,
|
||||
Query,
|
||||
UserCtx,
|
||||
SessionCookie,
|
||||
QuerySchema,
|
||||
FieldType,
|
||||
} from "@budibase/types"
|
||||
import { ValidQueryNameRegex } from "@budibase/shared-core"
|
||||
|
||||
const Runner = new Thread(ThreadType.QUERY, {
|
||||
|
@ -162,39 +168,43 @@ export async function preview(ctx: UserCtx) {
|
|||
},
|
||||
}
|
||||
|
||||
const { rows, keys, info, extra } = (await Runner.run(inputs)) as any
|
||||
const schemaFields: any = {}
|
||||
const { rows, keys, info, extra } = await Runner.run<QueryResponse>(inputs)
|
||||
const previewSchema: Record<string, QuerySchema> = {}
|
||||
const makeQuerySchema = (type: FieldType, name: string): QuerySchema => ({
|
||||
type,
|
||||
name,
|
||||
})
|
||||
if (rows?.length > 0) {
|
||||
for (let key of [...new Set(keys)] as string[]) {
|
||||
const field = rows[0][key]
|
||||
let type = typeof field,
|
||||
fieldType = FieldTypes.STRING
|
||||
fieldMetadata = makeQuerySchema(FieldType.STRING, key)
|
||||
if (field)
|
||||
switch (type) {
|
||||
case "boolean":
|
||||
schemaFields[key] = FieldTypes.BOOLEAN
|
||||
fieldMetadata = makeQuerySchema(FieldType.BOOLEAN, key)
|
||||
break
|
||||
case "object":
|
||||
if (field instanceof Date) {
|
||||
fieldType = FieldTypes.DATETIME
|
||||
fieldMetadata = makeQuerySchema(FieldType.DATETIME, key)
|
||||
} else if (Array.isArray(field)) {
|
||||
fieldType = FieldTypes.ARRAY
|
||||
fieldMetadata = makeQuerySchema(FieldType.ARRAY, key)
|
||||
} else {
|
||||
fieldType = FieldTypes.JSON
|
||||
fieldMetadata = makeQuerySchema(FieldType.JSON, key)
|
||||
}
|
||||
break
|
||||
case "number":
|
||||
fieldType = FieldTypes.NUMBER
|
||||
fieldMetadata = makeQuerySchema(FieldType.NUMBER, key)
|
||||
break
|
||||
}
|
||||
schemaFields[key] = fieldType
|
||||
previewSchema[key] = fieldMetadata
|
||||
}
|
||||
}
|
||||
// if existing schema, update to include any previous schema keys
|
||||
if (existingSchema) {
|
||||
for (let key of Object.keys(schemaFields)) {
|
||||
if (existingSchema[key]?.type) {
|
||||
schemaFields[key] = existingSchema[key].type
|
||||
for (let key of Object.keys(previewSchema)) {
|
||||
if (existingSchema[key]) {
|
||||
previewSchema[key] = existingSchema[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -203,7 +213,7 @@ export async function preview(ctx: UserCtx) {
|
|||
await events.query.previewed(datasource, query)
|
||||
ctx.body = {
|
||||
rows,
|
||||
schemaFields,
|
||||
schema: previewSchema,
|
||||
info,
|
||||
extra,
|
||||
}
|
||||
|
@ -257,7 +267,9 @@ async function execute(
|
|||
schema: query.schema,
|
||||
}
|
||||
|
||||
const { rows, pagination, extra, info } = (await Runner.run(inputs)) as any
|
||||
const { rows, pagination, extra, info } = await Runner.run<QueryResponse>(
|
||||
inputs
|
||||
)
|
||||
// remove the raw from execution incase transformer being used to hide data
|
||||
if (extra?.raw) {
|
||||
delete extra.raw
|
||||
|
|
|
@ -235,9 +235,9 @@ describe("/queries", () => {
|
|||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
// these responses come from the mock
|
||||
expect(res.body.schemaFields).toEqual({
|
||||
a: "string",
|
||||
b: "number",
|
||||
expect(res.body.schema).toEqual({
|
||||
a: { type: "string", name: "a" },
|
||||
b: { type: "number", name: "b" },
|
||||
})
|
||||
expect(res.body.rows.length).toEqual(1)
|
||||
expect(events.query.previewed).toBeCalledTimes(1)
|
||||
|
@ -300,10 +300,10 @@ describe("/queries", () => {
|
|||
queryString: "test={{ variable2 }}",
|
||||
})
|
||||
// these responses come from the mock
|
||||
expect(res.body.schemaFields).toEqual({
|
||||
opts: "json",
|
||||
url: "string",
|
||||
value: "string",
|
||||
expect(res.body.schema).toEqual({
|
||||
opts: { type: "json", name: "opts" },
|
||||
url: { type: "string", name: "url" },
|
||||
value: { type: "string", name: "value" },
|
||||
})
|
||||
expect(res.body.rows[0].url).toEqual("http://www.google.com?test=1")
|
||||
})
|
||||
|
@ -314,10 +314,10 @@ describe("/queries", () => {
|
|||
path: "www.google.com",
|
||||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
expect(res.body.schemaFields).toEqual({
|
||||
opts: "json",
|
||||
url: "string",
|
||||
value: "string",
|
||||
expect(res.body.schema).toEqual({
|
||||
opts: { type: "json", name: "opts" },
|
||||
url: { type: "string", name: "url" },
|
||||
value: { type: "string", name: "value" },
|
||||
})
|
||||
expect(res.body.rows[0].url).toContain("doctype%20html")
|
||||
})
|
||||
|
@ -337,10 +337,10 @@ describe("/queries", () => {
|
|||
path: "www.failonce.com",
|
||||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
expect(res.body.schemaFields).toEqual({
|
||||
fails: "number",
|
||||
opts: "json",
|
||||
url: "string",
|
||||
expect(res.body.schema).toEqual({
|
||||
fails: { type: "number", name: "fails" },
|
||||
opts: { type: "json", name: "opts" },
|
||||
url: { type: "string", name: "url" },
|
||||
})
|
||||
expect(res.body.rows[0].fails).toEqual(1)
|
||||
})
|
||||
|
|
|
@ -376,8 +376,8 @@ export function checkExternalTables(
|
|||
errors[name] = "Table must have a primary key."
|
||||
}
|
||||
|
||||
const schemaFields = Object.keys(table.schema)
|
||||
if (schemaFields.find(f => invalidColumns.includes(f))) {
|
||||
const columnNames = Object.keys(table.schema)
|
||||
if (columnNames.find(f => invalidColumns.includes(f))) {
|
||||
errors[name] = "Table contains invalid columns."
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,6 +3,27 @@ import { processStringSync } from "@budibase/string-templates"
|
|||
import { context } from "@budibase/backend-core"
|
||||
import { getQueryParams, isProdAppID } from "../../../db/utils"
|
||||
import { BaseQueryVerbs } from "../../../constants"
|
||||
import { Query, QuerySchema } from "@budibase/types"
|
||||
|
||||
function updateSchema(query: Query): Query {
|
||||
if (!query.schema) {
|
||||
return query
|
||||
}
|
||||
const schema: Record<string, QuerySchema> = {}
|
||||
for (let key of Object.keys(query.schema)) {
|
||||
if (typeof query.schema[key] === "string") {
|
||||
schema[key] = { type: query.schema[key] as string, name: key }
|
||||
} else {
|
||||
schema[key] = query.schema[key] as QuerySchema
|
||||
}
|
||||
}
|
||||
query.schema = schema
|
||||
return query
|
||||
}
|
||||
|
||||
function updateSchemas(queries: Query[]): Query[] {
|
||||
return queries.map(query => updateSchema(query))
|
||||
}
|
||||
|
||||
// simple function to append "readable" to all read queries
|
||||
function enrichQueries(input: any) {
|
||||
|
@ -25,7 +46,7 @@ export async function find(queryId: string) {
|
|||
delete query.fields
|
||||
delete query.parameters
|
||||
}
|
||||
return query
|
||||
return updateSchema(query)
|
||||
}
|
||||
|
||||
export async function fetch(opts: { enrich: boolean } = { enrich: true }) {
|
||||
|
@ -37,12 +58,11 @@ export async function fetch(opts: { enrich: boolean } = { enrich: true }) {
|
|||
})
|
||||
)
|
||||
|
||||
const queries = body.rows.map((row: any) => row.doc)
|
||||
let queries = body.rows.map((row: any) => row.doc)
|
||||
if (opts.enrich) {
|
||||
return enrichQueries(queries)
|
||||
} else {
|
||||
return queries
|
||||
queries = await enrichQueries(queries)
|
||||
}
|
||||
return updateSchemas(queries)
|
||||
}
|
||||
|
||||
export async function enrichContext(
|
||||
|
|
|
@ -278,6 +278,9 @@ class TestConfiguration {
|
|||
if (params) {
|
||||
request.params = params
|
||||
}
|
||||
request.throw = (status: number, message: string) => {
|
||||
throw new Error(`Error ${status} - ${message}`)
|
||||
}
|
||||
return this.doInContext(appId, async () => {
|
||||
await controlFunc(request)
|
||||
return request.body
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { QuerySchema, Row } from "@budibase/types"
|
||||
|
||||
export type WorkerCallback = (error: any, response?: any) => void
|
||||
|
||||
export interface QueryEvent {
|
||||
|
@ -11,7 +13,15 @@ export interface QueryEvent {
|
|||
queryId: string
|
||||
environmentVariables?: Record<string, string>
|
||||
ctx?: any
|
||||
schema?: Record<string, { name?: string; type: string }>
|
||||
schema?: Record<string, QuerySchema | string>
|
||||
}
|
||||
|
||||
export interface QueryResponse {
|
||||
rows: Row[]
|
||||
keys: string[]
|
||||
info: any
|
||||
extra: any
|
||||
pagination: any
|
||||
}
|
||||
|
||||
export interface QueryVariable {
|
||||
|
|
|
@ -74,7 +74,7 @@ export class Thread {
|
|||
)
|
||||
}
|
||||
|
||||
run(job: AutomationJob | QueryEvent) {
|
||||
run<T>(job: AutomationJob | QueryEvent): Promise<T> {
|
||||
const timeout = this.timeoutMs
|
||||
return new Promise((resolve, reject) => {
|
||||
function fire(worker: any) {
|
||||
|
|
|
@ -1,7 +1,12 @@
|
|||
import { default as threadUtils } from "./utils"
|
||||
|
||||
threadUtils.threadSetup()
|
||||
import { WorkerCallback, QueryEvent, QueryVariable } from "./definitions"
|
||||
import {
|
||||
WorkerCallback,
|
||||
QueryEvent,
|
||||
QueryVariable,
|
||||
QueryResponse,
|
||||
} from "./definitions"
|
||||
import ScriptRunner from "../utilities/scriptRunner"
|
||||
import { getIntegration } from "../integrations"
|
||||
import { processStringSync } from "@budibase/string-templates"
|
||||
|
@ -9,7 +14,7 @@ import { context, cache, auth } from "@budibase/backend-core"
|
|||
import { getGlobalIDFromUserMetadataID } from "../db/utils"
|
||||
import sdk from "../sdk"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { SourceName, Query } from "@budibase/types"
|
||||
import { Query } from "@budibase/types"
|
||||
|
||||
import { isSQL } from "../integrations/utils"
|
||||
import { interpolateSQL } from "../integrations/queries/sql"
|
||||
|
@ -53,7 +58,7 @@ class QueryRunner {
|
|||
this.hasDynamicVariables = false
|
||||
}
|
||||
|
||||
async execute(): Promise<any> {
|
||||
async execute(): Promise<QueryResponse> {
|
||||
let { datasource, fields, queryVerb, transformer, schema } = this
|
||||
let datasourceClone = cloneDeep(datasource)
|
||||
let fieldsClone = cloneDeep(fields)
|
||||
|
|
|
@ -1,12 +1,17 @@
|
|||
import { Document } from "../document"
|
||||
|
||||
export interface QuerySchema {
|
||||
name?: string
|
||||
type: string
|
||||
}
|
||||
|
||||
export interface Query extends Document {
|
||||
datasourceId: string
|
||||
name: string
|
||||
parameters: QueryParameter[]
|
||||
fields: RestQueryFields | any
|
||||
transformer: string | null
|
||||
schema: Record<string, { name?: string; type: string }>
|
||||
schema: Record<string, QuerySchema | string>
|
||||
readable: boolean
|
||||
queryVerb: string
|
||||
}
|
||||
|
|
24
yarn.lock
24
yarn.lock
|
@ -5557,9 +5557,9 @@
|
|||
integrity sha512-7GgtHCs/QZrBrDzgIJnQtuSvhFSwhyYSI2uafSwZoNt1iOGhEN5fwNrQMjtONyHm9+/LoA4453jH0CMYcr06Pg==
|
||||
|
||||
"@types/node@>=8.1.0":
|
||||
version "20.11.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.2.tgz#39cea3fe02fbbc2f80ed283e94e1d24f2d3856fb"
|
||||
integrity sha512-cZShBaVa+UO1LjWWBPmWRR4+/eY/JR/UIEcDlVsw3okjWEu+rB7/mH6X3B/L+qJVHDLjk9QW/y2upp9wp1yDXA==
|
||||
version "20.11.6"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.6.tgz#6adf4241460e28be53836529c033a41985f85b6e"
|
||||
integrity sha512-+EOokTnksGVgip2PbYbr3xnR7kZigh4LbybAfBAw5BpnQ+FqBYUsvCEjYd70IXKlbohQ64mzEYmMtlWUY8q//Q==
|
||||
dependencies:
|
||||
undici-types "~5.26.4"
|
||||
|
||||
|
@ -9497,9 +9497,9 @@ dotenv@8.6.0, dotenv@^8.2.0:
|
|||
integrity sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g==
|
||||
|
||||
dotenv@^16.3.1:
|
||||
version "16.3.1"
|
||||
resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e"
|
||||
integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ==
|
||||
version "16.4.0"
|
||||
resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.4.0.tgz#ac21c3fcaad2e7832a1cd0c0e4e8e52225ecda0e"
|
||||
integrity sha512-WvImr5kpN5NGNn7KaDjJnLTh5rDVLZiDf/YLA8T1ZEZEBZNEDOE+mnkS0PVjPax8ZxBP5zC5SLMB3/9VV5de9g==
|
||||
|
||||
dotenv@~10.0.0:
|
||||
version "10.0.0"
|
||||
|
@ -17426,11 +17426,12 @@ postgres-interval@^1.1.0:
|
|||
xtend "^4.0.0"
|
||||
|
||||
posthog-js@^1.13.4:
|
||||
version "1.100.0"
|
||||
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.100.0.tgz#687b9a6e4ed226aa6572f4040b418ea0c8b3d353"
|
||||
integrity sha512-r2XZEiHQ9mBK7D1G9k57I8uYZ2kZTAJ0OCX6K/OOdCWN8jKPhw3h5F9No5weilP6eVAn+hrsy7NvPV7SCX7gMg==
|
||||
version "1.101.0"
|
||||
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.101.0.tgz#00e0fc6e164addd52b1738f087996bb0d6685943"
|
||||
integrity sha512-mzwYSSWr9FdEMDeVpc+diLfc85+10r/LgELGtsW/HaYk+0du/GEql6szpqG8YXMMgb2dE4dnj0JICZFIJd7K3w==
|
||||
dependencies:
|
||||
fflate "^0.4.1"
|
||||
preact "^10.19.3"
|
||||
|
||||
posthog-js@^1.36.0:
|
||||
version "1.96.1"
|
||||
|
@ -17676,6 +17677,11 @@ pprof-format@^2.0.7:
|
|||
resolved "https://registry.yarnpkg.com/pprof-format/-/pprof-format-2.0.7.tgz#526e4361f8b37d16b2ec4bb0696b5292de5046a4"
|
||||
integrity sha512-1qWaGAzwMpaXJP9opRa23nPnt2Egi7RMNoNBptEE/XwHbcn4fC2b/4U4bKc5arkGkIh2ZabpF2bEb+c5GNHEKA==
|
||||
|
||||
preact@^10.19.3:
|
||||
version "10.19.3"
|
||||
resolved "https://registry.yarnpkg.com/preact/-/preact-10.19.3.tgz#7a7107ed2598a60676c943709ea3efb8aaafa899"
|
||||
integrity sha512-nHHTeFVBTHRGxJXKkKu5hT8C/YWBkPso4/Gad6xuj5dbptt9iF9NZr9pHbPhBrnT2klheu7mHTxTZ/LjwJiEiQ==
|
||||
|
||||
precinct@^8.1.0:
|
||||
version "8.3.1"
|
||||
resolved "https://registry.yarnpkg.com/precinct/-/precinct-8.3.1.tgz#94b99b623df144eed1ce40e0801c86078466f0dc"
|
||||
|
|
Loading…
Reference in New Issue