Merge branch 'master' into security-updates
This commit is contained in:
commit
e5e0563546
|
@ -114,9 +114,9 @@ jobs:
|
|||
- name: Test
|
||||
run: |
|
||||
if ${{ env.ONLY_AFFECTED_TASKS }}; then
|
||||
yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
|
||||
yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --no-prefix --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions
|
||||
else
|
||||
yarn test --ignore=@budibase/worker --ignore=@budibase/server
|
||||
yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --no-prefix -- --verbose --reporters=default --reporters=github-actions
|
||||
fi
|
||||
|
||||
test-worker:
|
||||
|
@ -138,10 +138,15 @@ jobs:
|
|||
- name: Test worker
|
||||
run: |
|
||||
if ${{ env.ONLY_AFFECTED_TASKS }}; then
|
||||
node scripts/run-affected.js --task=test --scope=@budibase/worker --since=${{ env.NX_BASE_BRANCH }}
|
||||
else
|
||||
yarn test --scope=@budibase/worker
|
||||
AFFECTED=$(yarn --silent nx show projects --affected -t test --base=${{ env.NX_BASE_BRANCH }} -p @budibase/worker)
|
||||
if [ -z "$AFFECTED" ]; then
|
||||
echo "No affected tests to run"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
cd packages/worker
|
||||
yarn test --verbose --reporters=default --reporters=github-actions
|
||||
|
||||
test-server:
|
||||
runs-on: ubuntu-latest
|
||||
|
@ -211,7 +216,7 @@ jobs:
|
|||
fi
|
||||
|
||||
cd packages/server
|
||||
yarn test --filter $FILTER --passWithNoTests
|
||||
yarn test --filter $FILTER --verbose --reporters=default --reporters=github-actions
|
||||
|
||||
check-pro-submodule:
|
||||
runs-on: ubuntu-latest
|
||||
|
|
|
@ -57,7 +57,7 @@
|
|||
"dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server",
|
||||
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
|
||||
"dev:docker": "./scripts/devDocker.sh",
|
||||
"test": "lerna run --concurrency 1 --stream test --stream",
|
||||
"test": "lerna run --concurrency 1 --stream test",
|
||||
"test:containers:kill": "./scripts/killTestcontainers.sh",
|
||||
"lint:eslint": "eslint packages --max-warnings=0",
|
||||
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
|
||||
|
|
|
@ -164,9 +164,12 @@ describe("/datasources", () => {
|
|||
})
|
||||
})
|
||||
|
||||
datasourceDescribe(
|
||||
{ name: "%s", exclude: [DatabaseName.MONGODB, DatabaseName.SQS] },
|
||||
({ config, dsProvider }) => {
|
||||
const descriptions = datasourceDescribe({
|
||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
||||
})
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)("$dbName", ({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let rawDatasource: Datasource
|
||||
let client: Knex
|
||||
|
@ -492,5 +495,5 @@ datasourceDescribe(
|
|||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -14,8 +14,13 @@ import { events } from "@budibase/backend-core"
|
|||
import { Knex } from "knex"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
datasourceDescribe(
|
||||
{ name: "queries (%s)", exclude: [DatabaseName.MONGODB, DatabaseName.SQS] },
|
||||
const descriptions = datasourceDescribe({
|
||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
||||
})
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
"queries ($dbName)",
|
||||
({ config, dsProvider, isOracle, isMSSQL, isPostgres }) => {
|
||||
let rawDatasource: Datasource
|
||||
let datasource: Datasource
|
||||
|
@ -945,4 +950,5 @@ datasourceDescribe(
|
|||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
|
|
@ -9,8 +9,11 @@ import { generator } from "@budibase/backend-core/tests"
|
|||
const expectValidId = expect.stringMatching(/^\w{24}$/)
|
||||
const expectValidBsonObjectId = expect.any(BSON.ObjectId)
|
||||
|
||||
datasourceDescribe(
|
||||
{ name: "/queries", only: [DatabaseName.MONGODB] },
|
||||
const descriptions = datasourceDescribe({ only: [DatabaseName.MONGODB] })
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
"/queries ($dbName)",
|
||||
({ config, dsProvider }) => {
|
||||
let collection: string
|
||||
let datasource: Datasource
|
||||
|
@ -714,4 +717,5 @@ datasourceDescribe(
|
|||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
|
|
@ -85,8 +85,11 @@ function encodeJS(binding: string) {
|
|||
return `{{ js "${Buffer.from(binding).toString("base64")}"}}`
|
||||
}
|
||||
|
||||
datasourceDescribe(
|
||||
{ name: "/rows (%s)", exclude: [DatabaseName.MONGODB] },
|
||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
"/rows ($dbName)",
|
||||
({ config, dsProvider, isInternal, isMSSQL, isOracle }) => {
|
||||
let table: Table
|
||||
let datasource: Datasource | undefined
|
||||
|
@ -338,7 +341,9 @@ datasourceDescribe(
|
|||
await new Promise(r => setTimeout(r, Math.random() * 50))
|
||||
}
|
||||
}
|
||||
throw new Error(`Failed to create row after ${attempts} attempts`)
|
||||
throw new Error(
|
||||
`Failed to create row after ${attempts} attempts`
|
||||
)
|
||||
})
|
||||
)
|
||||
|
||||
|
@ -1495,7 +1500,9 @@ datasourceDescribe(
|
|||
it("should return no errors on valid row", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.validate(table._id!, { name: "ivan" })
|
||||
const res = await config.api.row.validate(table._id!, {
|
||||
name: "ivan",
|
||||
})
|
||||
|
||||
expect(res.valid).toBe(true)
|
||||
expect(Object.keys(res.errors)).toEqual([])
|
||||
|
@ -2244,7 +2251,10 @@ datasourceDescribe(
|
|||
const table = await config.api.table.save(tableRequest)
|
||||
const toCreate = generator
|
||||
.unique(() => generator.integer({ min: 0, max: 10000 }), 10)
|
||||
.map(number => ({ number, string: generator.word({ length: 30 }) }))
|
||||
.map(number => ({
|
||||
number,
|
||||
string: generator.word({ length: 30 }),
|
||||
}))
|
||||
|
||||
const rows = await Promise.all(
|
||||
toCreate.map(d => config.api.row.save(table._id!, d))
|
||||
|
@ -3019,7 +3029,10 @@ datasourceDescribe(
|
|||
},
|
||||
],
|
||||
["from original saved row", (row: Row) => row],
|
||||
["from updated row", (row: Row) => config.api.row.save(viewId, row)],
|
||||
[
|
||||
"from updated row",
|
||||
(row: Row) => config.api.row.save(viewId, row),
|
||||
],
|
||||
]
|
||||
|
||||
it.each(testScenarios)(
|
||||
|
@ -3243,7 +3256,10 @@ datasourceDescribe(
|
|||
|
||||
async function updateFormulaColumn(
|
||||
formula: string,
|
||||
opts?: { responseType?: FormulaResponseType; formulaType?: FormulaType }
|
||||
opts?: {
|
||||
responseType?: FormulaResponseType
|
||||
formulaType?: FormulaType
|
||||
}
|
||||
) {
|
||||
table = await config.api.table.save({
|
||||
...table,
|
||||
|
@ -3480,6 +3496,5 @@ datasourceDescribe(
|
|||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
// todo: remove me
|
||||
)
|
||||
}
|
||||
|
|
|
@ -977,8 +977,13 @@ describe("/rowsActions", () => {
|
|||
})
|
||||
})
|
||||
|
||||
datasourceDescribe(
|
||||
{ name: "row actions (%s)", only: [DatabaseName.SQS, DatabaseName.POSTGRES] },
|
||||
const descriptions = datasourceDescribe({
|
||||
only: [DatabaseName.SQS, DatabaseName.POSTGRES],
|
||||
})
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
"row actions ($dbName)",
|
||||
({ config, dsProvider, isInternal }) => {
|
||||
let datasource: Datasource | undefined
|
||||
|
||||
|
@ -1036,4 +1041,5 @@ datasourceDescribe(
|
|||
expect(await getRowActionsFromDb(tableId)).toBeUndefined()
|
||||
})
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
|
|
@ -59,11 +59,11 @@ jest.mock("@budibase/pro", () => ({
|
|||
},
|
||||
}))
|
||||
|
||||
datasourceDescribe(
|
||||
{
|
||||
name: "search (%s)",
|
||||
exclude: [DatabaseName.MONGODB],
|
||||
},
|
||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
"search ($dbName)",
|
||||
({ config, dsProvider, isInternal, isOracle, isSql }) => {
|
||||
let datasource: Datasource | undefined
|
||||
let client: Knex | undefined
|
||||
|
@ -198,7 +198,9 @@ datasourceDescribe(
|
|||
])
|
||||
}
|
||||
|
||||
describe.each(tableOrView)("from %s", (sourceType, createTableOrView) => {
|
||||
describe.each(tableOrView)(
|
||||
"from %s",
|
||||
(sourceType, createTableOrView) => {
|
||||
const isView = sourceType === "view"
|
||||
|
||||
class SearchAssertion {
|
||||
|
@ -263,12 +265,16 @@ datasourceDescribe(
|
|||
expectedRow: T,
|
||||
foundRows: T[]
|
||||
): NonNullable<T> {
|
||||
const row = foundRows.find(row => this.isMatch(expectedRow, row))
|
||||
const row = foundRows.find(row =>
|
||||
this.isMatch(expectedRow, row)
|
||||
)
|
||||
if (!row) {
|
||||
const fields = Object.keys(expectedRow)
|
||||
// To make the error message more readable, we only include the fields
|
||||
// that are present in the expected row.
|
||||
const searchedObjects = foundRows.map(row => _.pick(row, fields))
|
||||
const searchedObjects = foundRows.map(row =>
|
||||
_.pick(row, fields)
|
||||
)
|
||||
throw new Error(
|
||||
`Failed to find row:\n\n${JSON.stringify(
|
||||
expectedRow,
|
||||
|
@ -316,7 +322,9 @@ datasourceDescribe(
|
|||
expect([...foundRows]).toEqual(
|
||||
expect.arrayContaining(
|
||||
expectedRows.map((expectedRow: any) =>
|
||||
expect.objectContaining(this.popRow(expectedRow, foundRows))
|
||||
expect.objectContaining(
|
||||
this.popRow(expectedRow, foundRows)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@ -344,7 +352,9 @@ datasourceDescribe(
|
|||
}
|
||||
|
||||
// Asserts that the query doesn't return a property, e.g. pagination parameters.
|
||||
async toNotHaveProperty(properties: (keyof SearchResponse<Row>)[]) {
|
||||
async toNotHaveProperty(
|
||||
properties: (keyof SearchResponse<Row>)[]
|
||||
) {
|
||||
const response = await this.performSearch()
|
||||
const cloned = cloneDeep(response)
|
||||
for (let property of properties) {
|
||||
|
@ -366,7 +376,9 @@ datasourceDescribe(
|
|||
expect([...foundRows]).toEqual(
|
||||
expect.arrayContaining(
|
||||
expectedRows.map((expectedRow: any) =>
|
||||
expect.objectContaining(this.popRow(expectedRow, foundRows))
|
||||
expect.objectContaining(
|
||||
this.popRow(expectedRow, foundRows)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@ -403,15 +415,15 @@ datasourceDescribe(
|
|||
|
||||
describe("equal", () => {
|
||||
it("successfully finds true row", async () => {
|
||||
await expectQuery({ equal: { isTrue: true } }).toMatchExactly([
|
||||
{ isTrue: true },
|
||||
])
|
||||
await expectQuery({ equal: { isTrue: true } }).toMatchExactly(
|
||||
[{ isTrue: true }]
|
||||
)
|
||||
})
|
||||
|
||||
it("successfully finds false row", async () => {
|
||||
await expectQuery({ equal: { isTrue: false } }).toMatchExactly([
|
||||
{ isTrue: false },
|
||||
])
|
||||
await expectQuery({
|
||||
equal: { isTrue: false },
|
||||
}).toMatchExactly([{ isTrue: false }])
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -431,9 +443,9 @@ datasourceDescribe(
|
|||
|
||||
describe("oneOf", () => {
|
||||
it("successfully finds true row", async () => {
|
||||
await expectQuery({ oneOf: { isTrue: [true] } }).toContainExactly(
|
||||
[{ isTrue: true }]
|
||||
)
|
||||
await expectQuery({
|
||||
oneOf: { isTrue: [true] },
|
||||
}).toContainExactly([{ isTrue: true }])
|
||||
})
|
||||
|
||||
it("successfully finds false row", async () => {
|
||||
|
@ -485,7 +497,10 @@ datasourceDescribe(
|
|||
name: currentUser.firstName,
|
||||
appointment: future.toISOString(),
|
||||
},
|
||||
{ name: "serverDate", appointment: serverTime.toISOString() },
|
||||
{
|
||||
name: "serverDate",
|
||||
appointment: serverTime.toISOString(),
|
||||
},
|
||||
{
|
||||
name: "single user, session user",
|
||||
single_user: currentUser,
|
||||
|
@ -540,7 +555,10 @@ datasourceDescribe(
|
|||
|
||||
tableOrViewId = await createTableOrView({
|
||||
name: { name: "name", type: FieldType.STRING },
|
||||
appointment: { name: "appointment", type: FieldType.DATETIME },
|
||||
appointment: {
|
||||
name: "appointment",
|
||||
type: FieldType.DATETIME,
|
||||
},
|
||||
single_user: {
|
||||
name: "single_user",
|
||||
type: FieldType.BB_REFERENCE_SINGLE,
|
||||
|
@ -586,7 +604,9 @@ datasourceDescribe(
|
|||
it("should return all rows matching the session user firstname when logical operator used", async () => {
|
||||
await expectQuery({
|
||||
$and: {
|
||||
conditions: [{ equal: { name: "{{ [user].firstName }}" } }],
|
||||
conditions: [
|
||||
{ equal: { name: "{{ [user].firstName }}" } },
|
||||
],
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
|
@ -610,7 +630,10 @@ datasourceDescribe(
|
|||
name: config.getUser().firstName,
|
||||
appointment: future.toISOString(),
|
||||
},
|
||||
{ name: "serverDate", appointment: serverTime.toISOString() },
|
||||
{
|
||||
name: "serverDate",
|
||||
appointment: serverTime.toISOString(),
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
@ -626,7 +649,10 @@ datasourceDescribe(
|
|||
}).toContainExactly([
|
||||
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||
{ name: "serverDate", appointment: serverTime.toISOString() },
|
||||
{
|
||||
name: "serverDate",
|
||||
appointment: serverTime.toISOString(),
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -736,7 +762,9 @@ datasourceDescribe(
|
|||
|
||||
it("should not match the session user id in a deprecated multi user field", async () => {
|
||||
await expectQuery({
|
||||
notContains: { deprecated_multi_user: ["{{ [user]._id }}"] },
|
||||
notContains: {
|
||||
deprecated_multi_user: ["{{ [user]._id }}"],
|
||||
},
|
||||
notEmpty: { deprecated_multi_user: true },
|
||||
}).toContainExactly([
|
||||
{
|
||||
|
@ -870,9 +898,9 @@ datasourceDescribe(
|
|||
|
||||
describe("equal", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({ equal: { name: "foo" } }).toContainExactly([
|
||||
{ name: "foo" },
|
||||
])
|
||||
await expectQuery({
|
||||
equal: { name: "foo" },
|
||||
}).toContainExactly([{ name: "foo" }])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
|
@ -897,27 +925,29 @@ datasourceDescribe(
|
|||
|
||||
describe("notEqual", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({ notEqual: { name: "foo" } }).toContainExactly(
|
||||
[{ name: "bar" }]
|
||||
)
|
||||
await expectQuery({
|
||||
notEqual: { name: "foo" },
|
||||
}).toContainExactly([{ name: "bar" }])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({ notEqual: { name: "bar" } }).toContainExactly(
|
||||
[{ name: "foo" }]
|
||||
)
|
||||
await expectQuery({
|
||||
notEqual: { name: "bar" },
|
||||
}).toContainExactly([{ name: "foo" }])
|
||||
})
|
||||
})
|
||||
|
||||
describe("oneOf", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({ oneOf: { name: ["foo"] } }).toContainExactly([
|
||||
{ name: "foo" },
|
||||
])
|
||||
await expectQuery({
|
||||
oneOf: { name: ["foo"] },
|
||||
}).toContainExactly([{ name: "foo" }])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({ oneOf: { name: ["none"] } }).toFindNothing()
|
||||
await expectQuery({
|
||||
oneOf: { name: ["none"] },
|
||||
}).toFindNothing()
|
||||
})
|
||||
|
||||
it("can have multiple values for same column", async () => {
|
||||
|
@ -965,9 +995,9 @@ datasourceDescribe(
|
|||
|
||||
describe("fuzzy", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({ fuzzy: { name: "oo" } }).toContainExactly([
|
||||
{ name: "foo" },
|
||||
])
|
||||
await expectQuery({ fuzzy: { name: "oo" } }).toContainExactly(
|
||||
[{ name: "foo" }]
|
||||
)
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
|
@ -977,19 +1007,21 @@ datasourceDescribe(
|
|||
|
||||
describe("string", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({ string: { name: "fo" } }).toContainExactly([
|
||||
{ name: "foo" },
|
||||
])
|
||||
await expectQuery({
|
||||
string: { name: "fo" },
|
||||
}).toContainExactly([{ name: "foo" }])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({ string: { name: "none" } }).toFindNothing()
|
||||
await expectQuery({
|
||||
string: { name: "none" },
|
||||
}).toFindNothing()
|
||||
})
|
||||
|
||||
it("is case-insensitive", async () => {
|
||||
await expectQuery({ string: { name: "FO" } }).toContainExactly([
|
||||
{ name: "foo" },
|
||||
])
|
||||
await expectQuery({
|
||||
string: { name: "FO" },
|
||||
}).toContainExactly([{ name: "foo" }])
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1048,10 +1080,9 @@ datasourceDescribe(
|
|||
|
||||
describe("notEmpty", () => {
|
||||
it("finds all non-empty rows", async () => {
|
||||
await expectQuery({ notEmpty: { name: null } }).toContainExactly([
|
||||
{ name: "foo" },
|
||||
{ name: "bar" },
|
||||
])
|
||||
await expectQuery({
|
||||
notEmpty: { name: null },
|
||||
}).toContainExactly([{ name: "foo" }, { name: "bar" }])
|
||||
})
|
||||
|
||||
it("should not be affected by when filter empty behaviour", async () => {
|
||||
|
@ -1167,9 +1198,9 @@ datasourceDescribe(
|
|||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({ notEqual: { age: 10 } }).toContainExactly([
|
||||
{ age: 1 },
|
||||
])
|
||||
await expectQuery({ notEqual: { age: 10 } }).toContainExactly(
|
||||
[{ age: 1 }]
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1317,9 +1348,9 @@ datasourceDescribe(
|
|||
|
||||
describe("equal", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({ equal: { dob: JAN_1ST } }).toContainExactly([
|
||||
{ dob: JAN_1ST },
|
||||
])
|
||||
await expectQuery({
|
||||
equal: { dob: JAN_1ST },
|
||||
}).toContainExactly([{ dob: JAN_1ST }])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
|
@ -1343,13 +1374,15 @@ datasourceDescribe(
|
|||
|
||||
describe("oneOf", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({ oneOf: { dob: [JAN_1ST] } }).toContainExactly(
|
||||
[{ dob: JAN_1ST }]
|
||||
)
|
||||
await expectQuery({
|
||||
oneOf: { dob: [JAN_1ST] },
|
||||
}).toContainExactly([{ dob: JAN_1ST }])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({ oneOf: { dob: [JAN_2ND] } }).toFindNothing()
|
||||
await expectQuery({
|
||||
oneOf: { dob: [JAN_2ND] },
|
||||
}).toFindNothing()
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1381,7 +1414,10 @@ datasourceDescribe(
|
|||
it("greater than equal to", async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
dob: { low: JAN_10TH, high: MAX_VALID_DATE.toISOString() },
|
||||
dob: {
|
||||
low: JAN_10TH,
|
||||
high: MAX_VALID_DATE.toISOString(),
|
||||
},
|
||||
},
|
||||
}).toContainExactly([{ dob: JAN_10TH }])
|
||||
})
|
||||
|
@ -1484,9 +1520,9 @@ datasourceDescribe(
|
|||
|
||||
describe("equal", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({ equal: { time: T_1000 } }).toContainExactly(
|
||||
[{ time: "10:00:00" }]
|
||||
)
|
||||
await expectQuery({
|
||||
equal: { time: T_1000 },
|
||||
}).toContainExactly([{ time: "10:00:00" }])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
|
@ -1692,7 +1728,9 @@ datasourceDescribe(
|
|||
describe("oneOf", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
oneOf: { ai: ["Mock LLM Response", "Other LLM Response"] },
|
||||
oneOf: {
|
||||
ai: ["Mock LLM Response", "Other LLM Response"],
|
||||
},
|
||||
}).toContainExactly([
|
||||
{ product: "Big Mac" },
|
||||
{ product: "McCrispy" },
|
||||
|
@ -1745,9 +1783,12 @@ datasourceDescribe(
|
|||
})
|
||||
|
||||
it("finds all with empty list", async () => {
|
||||
await expectQuery({ contains: { numbers: [] } }).toContainExactly(
|
||||
[{ numbers: ["one", "two"] }, { numbers: ["three"] }]
|
||||
)
|
||||
await expectQuery({
|
||||
contains: { numbers: [] },
|
||||
}).toContainExactly([
|
||||
{ numbers: ["one", "two"] },
|
||||
{ numbers: ["three"] },
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1817,14 +1858,18 @@ datasourceDescribe(
|
|||
tableOrViewId = await createTableOrView({
|
||||
num: { name: "num", type: FieldType.BIGINT },
|
||||
})
|
||||
await createRows([{ num: SMALL }, { num: MEDIUM }, { num: BIG }])
|
||||
await createRows([
|
||||
{ num: SMALL },
|
||||
{ num: MEDIUM },
|
||||
{ num: BIG },
|
||||
])
|
||||
})
|
||||
|
||||
describe("equal", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({ equal: { num: SMALL } }).toContainExactly([
|
||||
{ num: SMALL },
|
||||
])
|
||||
await expectQuery({ equal: { num: SMALL } }).toContainExactly(
|
||||
[{ num: SMALL }]
|
||||
)
|
||||
})
|
||||
|
||||
it("successfully finds a big value", async () => {
|
||||
|
@ -1840,26 +1885,23 @@ datasourceDescribe(
|
|||
|
||||
describe("notEqual", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({ notEqual: { num: SMALL } }).toContainExactly([
|
||||
{ num: MEDIUM },
|
||||
{ num: BIG },
|
||||
])
|
||||
await expectQuery({
|
||||
notEqual: { num: SMALL },
|
||||
}).toContainExactly([{ num: MEDIUM }, { num: BIG }])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({ notEqual: { num: 10 } }).toContainExactly([
|
||||
{ num: SMALL },
|
||||
{ num: MEDIUM },
|
||||
{ num: BIG },
|
||||
])
|
||||
await expectQuery({ notEqual: { num: 10 } }).toContainExactly(
|
||||
[{ num: SMALL }, { num: MEDIUM }, { num: BIG }]
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("oneOf", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({ oneOf: { num: [SMALL] } }).toContainExactly([
|
||||
{ num: SMALL },
|
||||
])
|
||||
await expectQuery({
|
||||
oneOf: { num: [SMALL] },
|
||||
}).toContainExactly([{ num: SMALL }])
|
||||
})
|
||||
|
||||
it("successfully finds all rows", async () => {
|
||||
|
@ -1944,7 +1986,9 @@ datasourceDescribe(
|
|||
|
||||
describe("not equal", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({ notEqual: { auto: 1 } }).toContainExactly([
|
||||
await expectQuery({
|
||||
notEqual: { auto: 1 },
|
||||
}).toContainExactly([
|
||||
{ auto: 2 },
|
||||
{ auto: 3 },
|
||||
{ auto: 4 },
|
||||
|
@ -1958,7 +2002,9 @@ datasourceDescribe(
|
|||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({ notEqual: { auto: 0 } }).toContainExactly([
|
||||
await expectQuery({
|
||||
notEqual: { auto: 0 },
|
||||
}).toContainExactly([
|
||||
{ auto: 1 },
|
||||
{ auto: 2 },
|
||||
{ auto: 3 },
|
||||
|
@ -1975,9 +2021,9 @@ datasourceDescribe(
|
|||
|
||||
describe("oneOf", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({ oneOf: { auto: [1] } }).toContainExactly([
|
||||
{ auto: 1 },
|
||||
])
|
||||
await expectQuery({
|
||||
oneOf: { auto: [1] },
|
||||
}).toContainExactly([{ auto: 1 }])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
|
@ -2081,13 +2127,16 @@ datasourceDescribe(
|
|||
hasNextPage: boolean | undefined = true,
|
||||
rowCount: number = 0
|
||||
do {
|
||||
const response = await config.api.row.search(tableOrViewId, {
|
||||
const response = await config.api.row.search(
|
||||
tableOrViewId,
|
||||
{
|
||||
tableId: tableOrViewId,
|
||||
limit: 1,
|
||||
paginate: true,
|
||||
query: {},
|
||||
bookmark,
|
||||
})
|
||||
}
|
||||
)
|
||||
bookmark = response.bookmark
|
||||
hasNextPage = response.hasNextPage
|
||||
expect(response.rows.length).toEqual(1)
|
||||
|
@ -2105,13 +2154,16 @@ datasourceDescribe(
|
|||
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
const response = await config.api.row.search(tableOrViewId, {
|
||||
const response = await config.api.row.search(
|
||||
tableOrViewId,
|
||||
{
|
||||
tableId: tableOrViewId,
|
||||
limit: 3,
|
||||
query: {},
|
||||
bookmark,
|
||||
paginate: true,
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
rows.push(...response.rows)
|
||||
|
||||
|
@ -2144,7 +2196,9 @@ datasourceDescribe(
|
|||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({ equal: { "1:1:name": "none" } }).toFindNothing()
|
||||
await expectQuery({
|
||||
equal: { "1:1:name": "none" },
|
||||
}).toFindNothing()
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -2221,7 +2275,11 @@ datasourceDescribe(
|
|||
},
|
||||
})
|
||||
|
||||
await createRows([{ user: user1 }, { user: user2 }, { user: null }])
|
||||
await createRows([
|
||||
{ user: user1 },
|
||||
{ user: user2 },
|
||||
{ user: null },
|
||||
])
|
||||
})
|
||||
|
||||
describe("equal", () => {
|
||||
|
@ -2232,7 +2290,9 @@ datasourceDescribe(
|
|||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({ equal: { user: "us_none" } }).toFindNothing()
|
||||
await expectQuery({
|
||||
equal: { user: "us_none" },
|
||||
}).toFindNothing()
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -2270,15 +2330,17 @@ datasourceDescribe(
|
|||
|
||||
describe("empty", () => {
|
||||
it("finds empty rows", async () => {
|
||||
await expectQuery({ empty: { user: null } }).toContainExactly([
|
||||
{},
|
||||
])
|
||||
await expectQuery({ empty: { user: null } }).toContainExactly(
|
||||
[{}]
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("notEmpty", () => {
|
||||
it("finds non-empty rows", async () => {
|
||||
await expectQuery({ notEmpty: { user: null } }).toContainExactly([
|
||||
await expectQuery({
|
||||
notEmpty: { user: null },
|
||||
}).toContainExactly([
|
||||
{ user: { _id: user1._id } },
|
||||
{ user: { _id: user2._id } },
|
||||
])
|
||||
|
@ -2385,7 +2447,9 @@ datasourceDescribe(
|
|||
await expectQuery({
|
||||
equal: { number: 1 },
|
||||
contains: { users: [user1._id] },
|
||||
}).toContainExactly([{ users: [{ _id: user1._id }], number: 1 }])
|
||||
}).toContainExactly([
|
||||
{ users: [{ _id: user1._id }], number: 1 },
|
||||
])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
|
@ -2408,15 +2472,18 @@ datasourceDescribe(
|
|||
let productCategoryTable: Table, productCatRows: Row[]
|
||||
|
||||
beforeAll(async () => {
|
||||
const { relatedTable, tableId } = await basicRelationshipTables(
|
||||
relationshipType
|
||||
)
|
||||
const { relatedTable, tableId } =
|
||||
await basicRelationshipTables(relationshipType)
|
||||
tableOrViewId = tableId
|
||||
productCategoryTable = relatedTable
|
||||
|
||||
productCatRows = await Promise.all([
|
||||
config.api.row.save(productCategoryTable._id!, { name: "foo" }),
|
||||
config.api.row.save(productCategoryTable._id!, { name: "bar" }),
|
||||
config.api.row.save(productCategoryTable._id!, {
|
||||
name: "foo",
|
||||
}),
|
||||
config.api.row.save(productCategoryTable._id!, {
|
||||
name: "bar",
|
||||
}),
|
||||
])
|
||||
|
||||
await Promise.all([
|
||||
|
@ -2439,7 +2506,10 @@ datasourceDescribe(
|
|||
await expectQuery({
|
||||
equal: { ["productCat.name"]: "foo" },
|
||||
}).toContainExactly([
|
||||
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
|
||||
{
|
||||
name: "foo",
|
||||
productCat: [{ _id: productCatRows[0]._id }],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -2447,7 +2517,10 @@ datasourceDescribe(
|
|||
await expectQuery({
|
||||
equal: { [`${productCategoryTable.name}.name`]: "foo" },
|
||||
}).toContainExactly([
|
||||
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
|
||||
{
|
||||
name: "foo",
|
||||
productCat: [{ _id: productCatRows[0]._id }],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -2458,7 +2531,10 @@ datasourceDescribe(
|
|||
})
|
||||
|
||||
describe("logical filters", () => {
|
||||
const logicalOperators = [LogicalOperator.AND, LogicalOperator.OR]
|
||||
const logicalOperators = [
|
||||
LogicalOperator.AND,
|
||||
LogicalOperator.OR,
|
||||
]
|
||||
|
||||
describe("$and", () => {
|
||||
it("should allow single conditions", async () => {
|
||||
|
@ -2699,9 +2775,8 @@ datasourceDescribe(
|
|||
RelationshipType.MANY_TO_MANY,
|
||||
])("big relations (%s)", relationshipType => {
|
||||
beforeAll(async () => {
|
||||
const { relatedTable, tableId } = await basicRelationshipTables(
|
||||
relationshipType
|
||||
)
|
||||
const { relatedTable, tableId } =
|
||||
await basicRelationshipTables(relationshipType)
|
||||
tableOrViewId = tableId
|
||||
const mainRow = await config.api.row.save(tableOrViewId, {
|
||||
name: "foo",
|
||||
|
@ -2715,12 +2790,15 @@ datasourceDescribe(
|
|||
})
|
||||
|
||||
it("can only pull 10 related rows", async () => {
|
||||
await withCoreEnv({ SQL_MAX_RELATED_ROWS: "10" }, async () => {
|
||||
await withCoreEnv(
|
||||
{ SQL_MAX_RELATED_ROWS: "10" },
|
||||
async () => {
|
||||
const response = await expectQuery({}).toContain([
|
||||
{ name: "foo" },
|
||||
])
|
||||
expect(response.rows[0].productCat).toBeArrayOfSize(10)
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("can pull max rows when env not set (defaults to 500)", async () => {
|
||||
|
@ -2935,9 +3013,11 @@ datasourceDescribe(
|
|||
})
|
||||
})
|
||||
|
||||
describe.each(["data_name_test", "name_data_test", "name_test_data_"])(
|
||||
"special (%s) case",
|
||||
column => {
|
||||
describe.each([
|
||||
"data_name_test",
|
||||
"name_data_test",
|
||||
"name_test_data_",
|
||||
])("special (%s) case", column => {
|
||||
beforeAll(async () => {
|
||||
tableOrViewId = await createTableOrView({
|
||||
[column]: {
|
||||
|
@ -2957,8 +3037,7 @@ datasourceDescribe(
|
|||
},
|
||||
}).toContainExactly([{ [column]: "a" }])
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
describe("sample data", () => {
|
||||
|
@ -2980,10 +3059,22 @@ datasourceDescribe(
|
|||
})
|
||||
|
||||
describe.each([
|
||||
{ low: "2024-07-03T00:00:00.000Z", high: "9999-00-00T00:00:00.000Z" },
|
||||
{ low: "2024-07-03T00:00:00.000Z", high: "9998-00-00T00:00:00.000Z" },
|
||||
{ low: "0000-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" },
|
||||
{ low: "0001-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" },
|
||||
{
|
||||
low: "2024-07-03T00:00:00.000Z",
|
||||
high: "9999-00-00T00:00:00.000Z",
|
||||
},
|
||||
{
|
||||
low: "2024-07-03T00:00:00.000Z",
|
||||
high: "9998-00-00T00:00:00.000Z",
|
||||
},
|
||||
{
|
||||
low: "0000-00-00T00:00:00.000Z",
|
||||
high: "2024-07-04T00:00:00.000Z",
|
||||
},
|
||||
{
|
||||
low: "0001-00-00T00:00:00.000Z",
|
||||
high: "2024-07-04T00:00:00.000Z",
|
||||
},
|
||||
])("date special cases", ({ low, high }) => {
|
||||
const earlyDate = "2024-07-03T10:00:00.000Z",
|
||||
laterDate = "2024-07-03T11:00:00.000Z"
|
||||
|
@ -3260,13 +3351,17 @@ datasourceDescribe(
|
|||
},
|
||||
})
|
||||
|
||||
const toRelateTable = await config.api.table.get(toRelateTableId)
|
||||
const toRelateTable = await config.api.table.get(
|
||||
toRelateTableId
|
||||
)
|
||||
await config.api.table.save({
|
||||
...toRelateTable,
|
||||
primaryDisplay: "link",
|
||||
})
|
||||
const relatedRows = await Promise.all([
|
||||
config.api.row.save(toRelateTable._id!, { name: "related" }),
|
||||
config.api.row.save(toRelateTable._id!, {
|
||||
name: "related",
|
||||
}),
|
||||
])
|
||||
await config.api.row.save(tableOrViewId, {
|
||||
name: "test",
|
||||
|
@ -3645,7 +3740,9 @@ datasourceDescribe(
|
|||
"'; SHUTDOWN --",
|
||||
]
|
||||
|
||||
describe.each(badStrings)("bad string: %s", badStringTemplate => {
|
||||
describe.each(badStrings)(
|
||||
"bad string: %s",
|
||||
badStringTemplate => {
|
||||
// The SQL that knex generates when you try to use a double quote in a
|
||||
// field name is always invalid and never works, so we skip it for these
|
||||
// tests.
|
||||
|
@ -3665,12 +3762,17 @@ datasourceDescribe(
|
|||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
[badString]: { name: badString, type: FieldType.STRING },
|
||||
[badString]: {
|
||||
name: badString,
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
if (docIds.isViewId(tableOrViewId)) {
|
||||
const view = await config.api.viewV2.get(tableOrViewId)
|
||||
const view = await config.api.viewV2.get(
|
||||
tableOrViewId
|
||||
)
|
||||
await config.api.viewV2.update({
|
||||
...view,
|
||||
schema: {
|
||||
|
@ -3726,9 +3828,12 @@ datasourceDescribe(
|
|||
await assertTableExists(table)
|
||||
await assertTableNumRows(table, 1)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
)
|
||||
)
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
|
@ -38,8 +38,11 @@ import timekeeper from "timekeeper"
|
|||
const { basicTable } = setup.structures
|
||||
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
|
||||
|
||||
datasourceDescribe(
|
||||
{ name: "/tables (%s)", exclude: [DatabaseName.MONGODB] },
|
||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
"/tables ($dbName)",
|
||||
({ config, dsProvider, isInternal, isOracle }) => {
|
||||
let datasource: Datasource | undefined
|
||||
|
||||
|
@ -332,7 +335,9 @@ datasourceDescribe(
|
|||
|
||||
expect(updatedTable).toEqual(expect.objectContaining(expected))
|
||||
|
||||
const persistedTable = await config.api.table.get(updatedTable._id!)
|
||||
const persistedTable = await config.api.table.get(
|
||||
updatedTable._id!
|
||||
)
|
||||
expected = {
|
||||
...table,
|
||||
name: newName,
|
||||
|
@ -561,8 +566,14 @@ datasourceDescribe(
|
|||
await config.api.table.save(saveTableRequest, {
|
||||
status: 200,
|
||||
})
|
||||
saveTableRequest.schema.foo = { type: FieldType.STRING, name: "foo" }
|
||||
saveTableRequest.schema.FOO = { type: FieldType.STRING, name: "FOO" }
|
||||
saveTableRequest.schema.foo = {
|
||||
type: FieldType.STRING,
|
||||
name: "foo",
|
||||
}
|
||||
saveTableRequest.schema.FOO = {
|
||||
type: FieldType.STRING,
|
||||
name: "FOO",
|
||||
}
|
||||
|
||||
await config.api.table.save(saveTableRequest, {
|
||||
status: 400,
|
||||
|
@ -1180,10 +1191,12 @@ datasourceDescribe(
|
|||
schema,
|
||||
})
|
||||
)
|
||||
const result = await config.api.table.validateExistingTableImport({
|
||||
const result = await config.api.table.validateExistingTableImport(
|
||||
{
|
||||
tableId: table._id,
|
||||
rows,
|
||||
})
|
||||
}
|
||||
)
|
||||
return result
|
||||
},
|
||||
],
|
||||
|
@ -1267,7 +1280,9 @@ datasourceDescribe(
|
|||
|
||||
isInternal &&
|
||||
it.each(
|
||||
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
|
||||
isInternal
|
||||
? PROTECTED_INTERNAL_COLUMNS
|
||||
: PROTECTED_EXTERNAL_COLUMNS
|
||||
)(
|
||||
"don't allow protected names in the rows (%s)",
|
||||
async columnName => {
|
||||
|
@ -1487,7 +1502,8 @@ datasourceDescribe(
|
|||
schema: basicSchema,
|
||||
})
|
||||
)
|
||||
const result = await config.api.table.validateExistingTableImport({
|
||||
const result = await config.api.table.validateExistingTableImport(
|
||||
{
|
||||
tableId: table._id,
|
||||
rows: [
|
||||
{
|
||||
|
@ -1496,7 +1512,8 @@ datasourceDescribe(
|
|||
name: generator.first(),
|
||||
},
|
||||
],
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
expect(result).toEqual({
|
||||
allValid: true,
|
||||
|
@ -1512,4 +1529,5 @@ datasourceDescribe(
|
|||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
|
|
@ -44,8 +44,11 @@ import merge from "lodash/merge"
|
|||
import { quotas } from "@budibase/pro"
|
||||
import { db, roles, context } from "@budibase/backend-core"
|
||||
|
||||
datasourceDescribe(
|
||||
{ name: "/v2/views (%s)", exclude: [DatabaseName.MONGODB] },
|
||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
"/v2/views ($dbName)",
|
||||
({ config, isInternal, dsProvider }) => {
|
||||
let table: Table
|
||||
let rawDatasource: Datasource | undefined
|
||||
|
@ -129,7 +132,8 @@ datasourceDescribe(
|
|||
})
|
||||
|
||||
it("can persist views with all fields", async () => {
|
||||
const newView: Required<Omit<CreateViewRequest, "query" | "type">> = {
|
||||
const newView: Required<Omit<CreateViewRequest, "query" | "type">> =
|
||||
{
|
||||
name: generator.name(),
|
||||
tableId: table._id!,
|
||||
primaryDisplay: "id",
|
||||
|
@ -194,8 +198,9 @@ datasourceDescribe(
|
|||
})
|
||||
|
||||
it("can create a view with just a query field, no queryUI, for backwards compatibility", async () => {
|
||||
const newView: Required<Omit<CreateViewRequest, "queryUI" | "type">> =
|
||||
{
|
||||
const newView: Required<
|
||||
Omit<CreateViewRequest, "queryUI" | "type">
|
||||
> = {
|
||||
name: generator.name(),
|
||||
tableId: table._id!,
|
||||
primaryDisplay: "id",
|
||||
|
@ -1162,7 +1167,8 @@ datasourceDescribe(
|
|||
.expect(400)
|
||||
|
||||
expect(result.body).toEqual({
|
||||
message: "View id does not match between the body and the uri path",
|
||||
message:
|
||||
"View id does not match between the body and the uri path",
|
||||
status: 400,
|
||||
})
|
||||
})
|
||||
|
@ -2016,7 +2022,10 @@ datasourceDescribe(
|
|||
schema,
|
||||
})
|
||||
|
||||
const renameColumn = async (table: Table, renaming: RenameColumn) => {
|
||||
const renameColumn = async (
|
||||
table: Table,
|
||||
renaming: RenameColumn
|
||||
) => {
|
||||
const newSchema = { ...table.schema }
|
||||
newSchema[renaming.updated] = {
|
||||
...table.schema[renaming.old],
|
||||
|
@ -2617,7 +2626,9 @@ datasourceDescribe(
|
|||
])
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await config.api.row.bulkDelete(view.id, { rows: [rows[0], rows[2]] })
|
||||
await config.api.row.bulkDelete(view.id, {
|
||||
rows: [rows[0], rows[2]],
|
||||
})
|
||||
|
||||
await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage)
|
||||
|
||||
|
@ -3470,7 +3481,10 @@ datasourceDescribe(
|
|||
expect(response.rows).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
"Quantity Sum": rows.reduce((acc, r) => acc + r.quantity, 0),
|
||||
"Quantity Sum": rows.reduce(
|
||||
(acc, r) => acc + r.quantity,
|
||||
0
|
||||
),
|
||||
}),
|
||||
])
|
||||
)
|
||||
|
@ -3511,7 +3525,9 @@ datasourceDescribe(
|
|||
}
|
||||
|
||||
for (const row of response.rows) {
|
||||
expect(row["Total Price"]).toEqual(priceByQuantity[row.quantity])
|
||||
expect(row["Total Price"]).toEqual(
|
||||
priceByQuantity[row.quantity]
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -3701,9 +3717,12 @@ datasourceDescribe(
|
|||
},
|
||||
})
|
||||
|
||||
const apertureScience = await config.api.row.save(companies._id!, {
|
||||
const apertureScience = await config.api.row.save(
|
||||
companies._id!,
|
||||
{
|
||||
name: "Aperture Science Laboratories",
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
const blackMesa = await config.api.row.save(companies._id!, {
|
||||
name: "Black Mesa",
|
||||
|
@ -4402,7 +4421,9 @@ datasourceDescribe(
|
|||
}),
|
||||
expected: () => [
|
||||
{
|
||||
user: expect.objectContaining({ _id: config.getUser()._id }),
|
||||
user: expect.objectContaining({
|
||||
_id: config.getUser()._id,
|
||||
}),
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -4631,4 +4652,5 @@ datasourceDescribe(
|
|||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
|
|
@ -7,11 +7,13 @@ import {
|
|||
import { Knex } from "knex"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
datasourceDescribe(
|
||||
{
|
||||
name: "execute query action",
|
||||
const descriptions = datasourceDescribe({
|
||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
||||
},
|
||||
})
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
"execute query action ($dbName)",
|
||||
({ config, dsProvider }) => {
|
||||
let tableName: string
|
||||
let client: Knex
|
||||
|
@ -74,4 +76,5 @@ datasourceDescribe(
|
|||
expect(res.success).toEqual(false)
|
||||
})
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
|
|
@ -433,9 +433,10 @@ describe("Automation Scenarios", () => {
|
|||
})
|
||||
})
|
||||
|
||||
datasourceDescribe(
|
||||
{ name: "", only: [DatabaseName.MYSQL] },
|
||||
({ config, dsProvider }) => {
|
||||
const descriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] })
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)("/rows ($dbName)", ({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
|
@ -531,5 +532,5 @@ datasourceDescribe(
|
|||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -10,11 +10,11 @@ function uniqueTableName(length?: number): string {
|
|||
.substring(0, length || 10)
|
||||
}
|
||||
|
||||
datasourceDescribe(
|
||||
{
|
||||
name: "Integration compatibility with mysql search_path",
|
||||
only: [DatabaseName.MYSQL],
|
||||
},
|
||||
const mainDescriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] })
|
||||
|
||||
if (mainDescriptions.length) {
|
||||
describe.each(mainDescriptions)(
|
||||
"/Integration compatibility with mysql search_path ($dbName)",
|
||||
({ config, dsProvider }) => {
|
||||
let rawDatasource: Datasource
|
||||
let datasource: Datasource
|
||||
|
@ -71,18 +71,20 @@ datasourceDescribe(
|
|||
datasourceId: datasource._id!,
|
||||
tablesFilter: [repeated_table_name],
|
||||
})
|
||||
expect(res.datasource.entities![repeated_table_name].schema).toBeDefined()
|
||||
expect(
|
||||
res.datasource.entities![repeated_table_name].schema
|
||||
).toBeDefined()
|
||||
const schema = res.datasource.entities![repeated_table_name].schema
|
||||
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
|
||||
})
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
datasourceDescribe(
|
||||
{
|
||||
name: "POST /api/datasources/:datasourceId/schema",
|
||||
only: [DatabaseName.MYSQL],
|
||||
},
|
||||
const descriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] })
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
"POST /api/datasources/:datasourceId/schema ($dbName)",
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
@ -125,4 +127,6 @@ datasourceDescribe(
|
|||
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
|
||||
})
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,8 +8,11 @@ import {
|
|||
} from "../integrations/tests/utils"
|
||||
import { Knex } from "knex"
|
||||
|
||||
datasourceDescribe(
|
||||
{ name: "postgres integrations", only: [DatabaseName.POSTGRES] },
|
||||
const mainDescriptions = datasourceDescribe({ only: [DatabaseName.POSTGRES] })
|
||||
|
||||
if (mainDescriptions.length) {
|
||||
describe.each(mainDescriptions)(
|
||||
"/postgres integrations",
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
@ -199,18 +202,21 @@ datasourceDescribe(
|
|||
row = await config.api.row.save(table._id!, { ...row, price: 300 })
|
||||
expect(row.price).toBe("300.00")
|
||||
|
||||
row = await config.api.row.save(table._id!, { ...row, price: "400.00" })
|
||||
row = await config.api.row.save(table._id!, {
|
||||
...row,
|
||||
price: "400.00",
|
||||
})
|
||||
expect(row.price).toBe("400.00")
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
datasourceDescribe(
|
||||
{
|
||||
name: "Integration compatibility with postgres search_path",
|
||||
only: [DatabaseName.POSTGRES],
|
||||
},
|
||||
const descriptions = datasourceDescribe({ only: [DatabaseName.POSTGRES] })
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
"Integration compatibility with postgres search_path",
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
@ -283,8 +289,11 @@ datasourceDescribe(
|
|||
expect(
|
||||
response.datasource.entities?.[repeated_table_name].schema
|
||||
).toBeDefined()
|
||||
const schema = response.datasource.entities?.[repeated_table_name].schema
|
||||
const schema =
|
||||
response.datasource.entities?.[repeated_table_name].schema
|
||||
expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"])
|
||||
})
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,7 +35,6 @@ const providers: Record<DatabaseName, DatasourceProvider> = {
|
|||
}
|
||||
|
||||
export interface DatasourceDescribeOpts {
|
||||
name: string
|
||||
only?: DatabaseName[]
|
||||
exclude?: DatabaseName[]
|
||||
}
|
||||
|
@ -102,16 +101,12 @@ function createDummyTest() {
|
|||
})
|
||||
}
|
||||
|
||||
export function datasourceDescribe(
|
||||
opts: DatasourceDescribeOpts,
|
||||
cb: (args: DatasourceDescribeReturn) => void
|
||||
) {
|
||||
export function datasourceDescribe(opts: DatasourceDescribeOpts) {
|
||||
if (process.env.DATASOURCE === "none") {
|
||||
createDummyTest()
|
||||
return
|
||||
}
|
||||
|
||||
const { name, only, exclude } = opts
|
||||
const { only, exclude } = opts
|
||||
|
||||
if (only && exclude) {
|
||||
throw new Error("you can only supply one of 'only' or 'exclude'")
|
||||
|
@ -130,36 +125,28 @@ export function datasourceDescribe(
|
|||
|
||||
if (databases.length === 0) {
|
||||
createDummyTest()
|
||||
return
|
||||
}
|
||||
|
||||
describe.each(databases)(name, name => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
afterAll(() => {
|
||||
config.end()
|
||||
})
|
||||
|
||||
cb({
|
||||
name,
|
||||
return databases.map(dbName => ({
|
||||
dbName,
|
||||
config,
|
||||
dsProvider: () => createDatasources(config, name),
|
||||
isInternal: name === DatabaseName.SQS,
|
||||
isExternal: name !== DatabaseName.SQS,
|
||||
dsProvider: () => createDatasources(config, dbName),
|
||||
isInternal: dbName === DatabaseName.SQS,
|
||||
isExternal: dbName !== DatabaseName.SQS,
|
||||
isSql: [
|
||||
DatabaseName.MARIADB,
|
||||
DatabaseName.MYSQL,
|
||||
DatabaseName.POSTGRES,
|
||||
DatabaseName.SQL_SERVER,
|
||||
DatabaseName.ORACLE,
|
||||
].includes(name),
|
||||
isMySQL: name === DatabaseName.MYSQL,
|
||||
isPostgres: name === DatabaseName.POSTGRES,
|
||||
isMongodb: name === DatabaseName.MONGODB,
|
||||
isMSSQL: name === DatabaseName.SQL_SERVER,
|
||||
isOracle: name === DatabaseName.ORACLE,
|
||||
})
|
||||
})
|
||||
].includes(dbName),
|
||||
isMySQL: dbName === DatabaseName.MYSQL,
|
||||
isPostgres: dbName === DatabaseName.POSTGRES,
|
||||
isMongodb: dbName === DatabaseName.MONGODB,
|
||||
isMSSQL: dbName === DatabaseName.SQL_SERVER,
|
||||
isOracle: dbName === DatabaseName.ORACLE,
|
||||
}))
|
||||
}
|
||||
|
||||
function getDatasource(
|
||||
|
|
|
@ -19,8 +19,11 @@ import { tableForDatasource } from "../../../../../tests/utilities/structures"
|
|||
// These test cases are only for things that cannot be tested through the API
|
||||
// (e.g. limiting searches to returning specific fields). If it's possible to
|
||||
// test through the API, it should be done there instead.
|
||||
datasourceDescribe(
|
||||
{ name: "search sdk (%s)", exclude: [DatabaseName.MONGODB] },
|
||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
"search sdk ($dbName)",
|
||||
({ config, dsProvider, isInternal }) => {
|
||||
let datasource: Datasource | undefined
|
||||
let table: Table
|
||||
|
@ -217,4 +220,5 @@ datasourceDescribe(
|
|||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
/***
|
||||
* Running lerna with since and scope is not working as expected.
|
||||
* For example, running the command `yarn test --scope=@budibase/worker --since=master`, with changes only on `@budibase/backend-core` will not work as expected, as it does not analyse the dependencies properly. The actual `@budibase/worker` task will not be triggered.
|
||||
*
|
||||
* This script is using `lerna ls` to detect all the affected projects from a given commit, and if the scoped package is affected, the actual command will be executed.
|
||||
*
|
||||
* The current version of the script only supports a single project in the scope.
|
||||
*/
|
||||
|
||||
const { execSync } = require("child_process")
|
||||
|
||||
const argv = require("yargs").demandOption(["task", "since", "scope"]).argv
|
||||
|
||||
const { task, since, scope } = argv
|
||||
|
||||
const affectedPackages = execSync(
|
||||
`yarn --silent nx show projects --affected -t ${task} --base=${since} --json`,
|
||||
{
|
||||
encoding: "utf-8",
|
||||
}
|
||||
)
|
||||
|
||||
const packages = JSON.parse(affectedPackages)
|
||||
|
||||
const isAffected = packages.includes(scope)
|
||||
|
||||
if (isAffected) {
|
||||
console.log(`${scope} is affected. Running task "${task}"`)
|
||||
execSync(`yarn ${task} --scope=${scope}`, {
|
||||
stdio: "inherit",
|
||||
})
|
||||
} else {
|
||||
console.log(`${scope} is not affected. Skipping task "${task}"`)
|
||||
}
|
Loading…
Reference in New Issue