Merge branch 'master' into BUDI-8064/doc-writethrough

This commit is contained in:
Adria Navarro 2024-03-05 13:55:33 +01:00 committed by GitHub
commit 74a9aa4a72
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
35 changed files with 718 additions and 162 deletions

View File

@ -1,5 +1,5 @@
{ {
"version": "2.21.0", "version": "2.21.2",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

@ -1 +1 @@
Subproject commit 19f7a5829f4d23cbc694136e45d94482a59a475a Subproject commit 0c050591c21d3b67dc0c9225d60cc9e2324c8dac

View File

@ -147,6 +147,12 @@ export function createTablesStore() {
if (indexes) { if (indexes) {
draft.indexes = indexes draft.indexes = indexes
} }
// Add object to indicate if column is being added
if (draft.schema[field.name] === undefined) {
draft._add = {
name: field.name,
}
}
draft.schema = { draft.schema = {
...draft.schema, ...draft.schema,
[field.name]: cloneDeep(field), [field.name]: cloneDeep(field),

View File

@ -1,7 +1,7 @@
<script> <script>
import { CoreSelect, CoreMultiselect } from "@budibase/bbui" import { CoreSelect, CoreMultiselect } from "@budibase/bbui"
import { fetchData, Utils } from "@budibase/frontend-core" import { fetchData, Utils } from "@budibase/frontend-core"
import { getContext } from "svelte" import { getContext, onMount } from "svelte"
import Field from "./Field.svelte" import Field from "./Field.svelte"
import { FieldTypes } from "../../../constants" import { FieldTypes } from "../../../constants"
@ -28,6 +28,7 @@
let tableDefinition let tableDefinition
let searchTerm let searchTerm
let open let open
let initialValue
$: type = $: type =
datasourceType === "table" ? FieldTypes.LINK : FieldTypes.BB_REFERENCE datasourceType === "table" ? FieldTypes.LINK : FieldTypes.BB_REFERENCE
@ -109,7 +110,11 @@
} }
$: forceFetchRows(filter) $: forceFetchRows(filter)
$: debouncedFetchRows(searchTerm, primaryDisplay, defaultValue) $: debouncedFetchRows(
searchTerm,
primaryDisplay,
initialValue || defaultValue
)
const forceFetchRows = async () => { const forceFetchRows = async () => {
// if the filter has changed, then we need to reset the options, clear the selection, and re-fetch // if the filter has changed, then we need to reset the options, clear the selection, and re-fetch
@ -127,9 +132,13 @@
if (allRowsFetched || !primaryDisplay) { if (allRowsFetched || !primaryDisplay) {
return return
} }
if (defaultVal && !optionsObj[defaultVal]) { // must be an array
if (defaultVal && !Array.isArray(defaultVal)) {
defaultVal = defaultVal.split(",")
}
if (defaultVal && defaultVal.some(val => !optionsObj[val])) {
await fetch.update({ await fetch.update({
query: { equal: { _id: defaultVal } }, query: { oneOf: { _id: defaultVal } },
}) })
} }
@ -202,6 +211,16 @@
fetch.nextPage() fetch.nextPage()
} }
} }
onMount(() => {
// if the form is in 'Update' mode, then we need to fetch the matching row so that the value is correctly set
if (fieldState?.value) {
initialValue =
fieldSchema?.relationshipType !== "one-to-many"
? flatten(fieldState?.value) ?? []
: flatten(fieldState?.value)?.[0]
}
})
</script> </script>
<Field <Field

@ -1 +1 @@
Subproject commit 183b35d3acd42433dcb2d32bcd89a36abe13afec Subproject commit 22a278da720d92991dabdcd4cb6c96e7abe29781

View File

@ -10,6 +10,11 @@ CREATE TABLE Persons (
City varchar(255), City varchar(255),
PRIMARY KEY (PersonID) PRIMARY KEY (PersonID)
); );
CREATE TABLE Person (
PersonID int NOT NULL AUTO_INCREMENT,
Name varchar(255),
PRIMARY KEY (PersonID)
);
CREATE TABLE Tasks ( CREATE TABLE Tasks (
TaskID int NOT NULL AUTO_INCREMENT, TaskID int NOT NULL AUTO_INCREMENT,
PersonID INT, PersonID INT,
@ -27,6 +32,7 @@ CREATE TABLE Products (
); );
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Mike', 'Hughes', 28.2, '123 Fake Street', 'Belfast', '2021-01-19 03:14:07'); INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Mike', 'Hughes', 28.2, '123 Fake Street', 'Belfast', '2021-01-19 03:14:07');
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Dave', 'Johnson', 29, '124 Fake Street', 'Belfast', '2022-04-01 00:11:11'); INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Dave', 'Johnson', 29, '124 Fake Street', 'Belfast', '2022-04-01 00:11:11');
INSERT INTO Person (Name) VALUES ('Elf');
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (1, 'assembling', '2020-01-01'); INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (1, 'assembling', '2020-01-01');
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (2, 'processing', '2019-12-31'); INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (2, 'processing', '2019-12-31');
INSERT INTO Products (name, updated) VALUES ('Meat', '11:00:22'), ('Fruit', '10:00:00'); INSERT INTO Products (name, updated) VALUES ('Meat', '11:00:22'), ('Fruit', '10:00:00');

View File

@ -62,7 +62,11 @@ export default class AliasTables {
if (idx === -1 || idx > 1) { if (idx === -1 || idx > 1) {
return return
} }
return Math.abs(tableName.length - name.length) <= 2 // this might look a bit mad, but the idea is if the field is wrapped, say in "", `` or []
// then the idx of the table name will be 1, and we should allow for it ending in a closing
// character - otherwise it should be the full length if the index is zero
const allowedCharacterDiff = idx * 2
return Math.abs(tableName.length - name.length) <= allowedCharacterDiff
}) })
if (foundTableName) { if (foundTableName) {
const aliasedTableName = tableName.replace( const aliasedTableName = tableName.replace(
@ -107,57 +111,55 @@ export default class AliasTables {
} }
async queryWithAliasing(json: QueryJson): DatasourcePlusQueryResponse { async queryWithAliasing(json: QueryJson): DatasourcePlusQueryResponse {
json = cloneDeep(json) const fieldLength = json.resource?.fields?.length
const aliasTable = (table: Table) => ({ const aliasingEnabled = fieldLength && fieldLength > 0
...table, if (aliasingEnabled) {
name: this.getAlias(table.name), json = cloneDeep(json)
}) // run through the query json to update anywhere a table may be used
// run through the query json to update anywhere a table may be used if (json.resource?.fields) {
if (json.resource?.fields) { json.resource.fields = json.resource.fields.map(field =>
json.resource.fields = json.resource.fields.map(field => this.aliasField(field)
this.aliasField(field) )
)
}
if (json.filters) {
for (let [filterKey, filter] of Object.entries(json.filters)) {
if (typeof filter !== "object") {
continue
}
const aliasedFilters: typeof filter = {}
for (let key of Object.keys(filter)) {
aliasedFilters[this.aliasField(key)] = filter[key]
}
json.filters[filterKey as keyof SearchFilters] = aliasedFilters
} }
} if (json.filters) {
if (json.relationships) { for (let [filterKey, filter] of Object.entries(json.filters)) {
json.relationships = json.relationships.map(relationship => ({ if (typeof filter !== "object") {
...relationship, continue
aliases: this.aliasMap([ }
relationship.through, const aliasedFilters: typeof filter = {}
relationship.tableName, for (let key of Object.keys(filter)) {
json.endpoint.entityId, aliasedFilters[this.aliasField(key)] = filter[key]
]), }
})) json.filters[filterKey as keyof SearchFilters] = aliasedFilters
} }
if (json.meta?.table) {
json.meta.table = aliasTable(json.meta.table)
}
if (json.meta?.tables) {
const aliasedTables: Record<string, Table> = {}
for (let [tableName, table] of Object.entries(json.meta.tables)) {
aliasedTables[this.getAlias(tableName)] = aliasTable(table)
} }
json.meta.tables = aliasedTables if (json.meta?.table) {
this.getAlias(json.meta.table.name)
}
if (json.meta?.tables) {
Object.keys(json.meta.tables).forEach(tableName =>
this.getAlias(tableName)
)
}
if (json.relationships) {
json.relationships = json.relationships.map(relationship => ({
...relationship,
aliases: this.aliasMap([
relationship.through,
relationship.tableName,
json.endpoint.entityId,
]),
}))
}
// invert and return
const invertedTableAliases: Record<string, string> = {}
for (let [key, value] of Object.entries(this.tableAliases)) {
invertedTableAliases[value] = key
}
json.tableAliases = invertedTableAliases
} }
// invert and return
const invertedTableAliases: Record<string, string> = {}
for (let [key, value] of Object.entries(this.tableAliases)) {
invertedTableAliases[value] = key
}
json.tableAliases = invertedTableAliases
const response = await getDatasourceAndQuery(json) const response = await getDatasourceAndQuery(json)
if (Array.isArray(response)) { if (Array.isArray(response) && aliasingEnabled) {
return this.reverse(response) return this.reverse(response)
} else { } else {
return response return response

View File

@ -6,6 +6,7 @@ import {
BulkImportRequest, BulkImportRequest,
BulkImportResponse, BulkImportResponse,
Operation, Operation,
RenameColumn,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
Table, Table,
@ -25,9 +26,12 @@ function getDatasourceId(table: Table) {
return breakExternalTableId(table._id).datasourceId return breakExternalTableId(table._id).datasourceId
} }
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) { export async function save(
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
renaming?: RenameColumn
) {
const inputs = ctx.request.body const inputs = ctx.request.body
const renaming = inputs?._rename const adding = inputs?._add
// can't do this right now // can't do this right now
delete inputs.rows delete inputs.rows
const tableId = ctx.request.body._id const tableId = ctx.request.body._id
@ -40,7 +44,7 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const { datasource, table } = await sdk.tables.external.save( const { datasource, table } = await sdk.tables.external.save(
datasourceId!, datasourceId!,
inputs, inputs,
{ tableId, renaming } { tableId, renaming, adding }
) )
builderSocket?.emitDatasourceUpdate(ctx, datasource) builderSocket?.emitDatasourceUpdate(ctx, datasource)
return table return table

View File

@ -74,8 +74,15 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const appId = ctx.appId const appId = ctx.appId
const table = ctx.request.body const table = ctx.request.body
const isImport = table.rows const isImport = table.rows
const renaming = ctx.request.body._rename
let savedTable = await pickApi({ table }).save(ctx) const api = pickApi({ table })
// do not pass _rename or _add if saving to CouchDB
if (api === internal) {
delete ctx.request.body._add
delete ctx.request.body._rename
}
let savedTable = await api.save(ctx, renaming)
if (!table._id) { if (!table._id) {
await events.table.created(savedTable) await events.table.created(savedTable)
savedTable = sdk.tables.enrichViewSchemas(savedTable) savedTable = sdk.tables.enrichViewSchemas(savedTable)

View File

@ -12,11 +12,12 @@ import {
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) { export async function save(
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
renaming?: RenameColumn
) {
const { rows, ...rest } = ctx.request.body const { rows, ...rest } = ctx.request.body
let tableToSave: Table & { let tableToSave: Table = {
_rename?: RenameColumn
} = {
_id: generateTableID(), _id: generateTableID(),
...rest, ...rest,
// Ensure these fields are populated, even if not sent in the request // Ensure these fields are populated, even if not sent in the request
@ -28,15 +29,12 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
tableToSave.views = {} tableToSave.views = {}
} }
const renaming = tableToSave._rename
delete tableToSave._rename
try { try {
const { table } = await sdk.tables.internal.save(tableToSave, { const { table } = await sdk.tables.internal.save(tableToSave, {
user: ctx.user, user: ctx.user,
rowsToImport: rows, rowsToImport: rows,
tableId: ctx.request.body._id, tableId: ctx.request.body._id,
renaming: renaming, renaming,
}) })
return table return table

View File

@ -13,7 +13,7 @@ describe("/api/keys", () => {
describe("fetch", () => { describe("fetch", () => {
it("should allow fetching", async () => { it("should allow fetching", async () => {
await setup.switchToSelfHosted(async () => { await config.withEnv({ SELF_HOSTED: "true" }, async () => {
const res = await request const res = await request
.get(`/api/keys`) .get(`/api/keys`)
.set(config.defaultHeaders()) .set(config.defaultHeaders())
@ -34,7 +34,7 @@ describe("/api/keys", () => {
describe("update", () => { describe("update", () => {
it("should allow updating a value", async () => { it("should allow updating a value", async () => {
await setup.switchToSelfHosted(async () => { await config.withEnv({ SELF_HOSTED: "true" }, async () => {
const res = await request const res = await request
.put(`/api/keys/TEST`) .put(`/api/keys/TEST`)
.send({ .send({

View File

@ -248,4 +248,13 @@ describe("/applications", () => {
expect(devLogs.data.length).toBe(0) expect(devLogs.data.length).toBe(0)
}) })
}) })
describe("permissions", () => {
it("should only return apps a user has access to", async () => {
const user = await config.createUser()
const apps = await config.api.application.fetch()
expect(apps.length).toBeGreaterThan(0)
})
})
}) })

View File

@ -157,7 +157,7 @@ describe("/queries", () => {
}) })
it("should find a query in cloud", async () => { it("should find a query in cloud", async () => {
await setup.switchToSelfHosted(async () => { await config.withEnv({ SELF_HOSTED: "true" }, async () => {
const query = await config.createQuery() const query = await config.createQuery()
const res = await request const res = await request
.get(`/api/queries/${query._id}`) .get(`/api/queries/${query._id}`)

View File

@ -882,8 +882,7 @@ describe.each([
], ],
tableId: table._id, tableId: table._id,
}) })
// the environment needs configured for this await config.withEnv({ SELF_HOSTED: "true" }, async () => {
await setup.switchToSelfHosted(async () => {
return context.doInAppContext(config.getAppId(), async () => { return context.doInAppContext(config.getAppId(), async () => {
const enriched = await outputProcessing(table, [row]) const enriched = await outputProcessing(table, [row])
expect((enriched as Row[])[0].attachment[0].url).toBe( expect((enriched as Row[])[0].attachment[0].url).toBe(

View File

@ -26,6 +26,7 @@ import { TableToBuild } from "../../../tests/utilities/TestConfiguration"
tk.freeze(mocks.date.MOCK_DATE) tk.freeze(mocks.date.MOCK_DATE)
const { basicTable } = setup.structures const { basicTable } = setup.structures
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
describe("/tables", () => { describe("/tables", () => {
let request = setup.getRequest() let request = setup.getRequest()
@ -285,6 +286,35 @@ describe("/tables", () => {
expect(res.body.schema.roleId).toBeDefined() expect(res.body.schema.roleId).toBeDefined()
}) })
}) })
it("should add a new column for an internal DB table", async () => {
const saveTableRequest: SaveTableRequest = {
_add: {
name: "NEW_COLUMN",
},
...basicTable(),
}
const response = await request
.post(`/api/tables`)
.send(saveTableRequest)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
const expectedResponse = {
...saveTableRequest,
_rev: expect.stringMatching(/^\d-.+/),
_id: expect.stringMatching(/^ta_.+/),
createdAt: expect.stringMatching(ISO_REGEX_PATTERN),
updatedAt: expect.stringMatching(ISO_REGEX_PATTERN),
views: {},
}
delete expectedResponse._add
expect(response.status).toBe(200)
expect(response.body).toEqual(expectedResponse)
})
}) })
describe("import", () => { describe("import", () => {

View File

@ -77,21 +77,3 @@ export function getConfig() {
} }
return config! return config!
} }
export async function switchToSelfHosted(func: any) {
// self hosted stops any attempts to Dynamo
env._set("NODE_ENV", "production")
env._set("SELF_HOSTED", true)
let error
try {
await func()
} catch (err) {
error = err
}
env._set("NODE_ENV", "jest")
env._set("SELF_HOSTED", false)
// don't throw error until after reset
if (error) {
throw error
}
}

View File

@ -0,0 +1,363 @@
import fetch from "node-fetch"
import {
generateMakeRequest,
MakeRequestResponse,
} from "../api/routes/public/tests/utils"
import { v4 as uuidv4 } from "uuid"
import * as setup from "../api/routes/tests/utilities"
import {
Datasource,
FieldType,
Table,
TableRequest,
TableSourceType,
} from "@budibase/types"
import _ from "lodash"
import { databaseTestProviders } from "../integrations/tests/utils"
import mysql from "mysql2/promise"
import { builderSocket } from "../websockets"
// @ts-ignore
fetch.mockSearch()
const config = setup.getConfig()!
jest.unmock("mysql2/promise")
jest.mock("../websockets", () => ({
clientAppSocket: jest.fn(),
gridAppSocket: jest.fn(),
initialise: jest.fn(),
builderSocket: {
emitTableUpdate: jest.fn(),
emitTableDeletion: jest.fn(),
emitDatasourceUpdate: jest.fn(),
emitDatasourceDeletion: jest.fn(),
emitScreenUpdate: jest.fn(),
emitAppMetadataUpdate: jest.fn(),
emitAppPublish: jest.fn(),
},
}))
describe("mysql integrations", () => {
let makeRequest: MakeRequestResponse,
mysqlDatasource: Datasource,
primaryMySqlTable: Table
beforeAll(async () => {
await config.init()
const apiKey = await config.generateApiKey()
makeRequest = generateMakeRequest(apiKey, true)
mysqlDatasource = await config.api.datasource.create(
await databaseTestProviders.mysql.datasource()
)
})
afterAll(async () => {
await databaseTestProviders.mysql.stop()
})
beforeEach(async () => {
primaryMySqlTable = await config.createTable({
name: uuidv4(),
type: "table",
primary: ["id"],
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
},
name: {
name: "name",
type: FieldType.STRING,
},
description: {
name: "description",
type: FieldType.STRING,
},
value: {
name: "value",
type: FieldType.NUMBER,
},
},
sourceId: mysqlDatasource._id,
sourceType: TableSourceType.EXTERNAL,
})
})
afterAll(config.end)
it("validate table schema", async () => {
const res = await makeRequest(
"get",
`/api/datasources/${mysqlDatasource._id}`
)
expect(res.status).toBe(200)
expect(res.body).toEqual({
config: {
database: "mysql",
host: mysqlDatasource.config!.host,
password: "--secret-value--",
port: mysqlDatasource.config!.port,
user: "root",
},
plus: true,
source: "MYSQL",
type: "datasource_plus",
_id: expect.any(String),
_rev: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
entities: expect.any(Object),
})
})
describe("POST /api/datasources/verify", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
datasource: await databaseTestProviders.mysql.datasource(),
},
{
body: {
connected: true,
},
}
)
})
it("should state an invalid datasource cannot connect", async () => {
const dbConfig = await databaseTestProviders.mysql.datasource()
await config.api.datasource.verify(
{
datasource: {
...dbConfig,
config: {
...dbConfig.config,
password: "wrongpassword",
},
},
},
{
body: {
connected: false,
error:
"Access denied for the specified user. User does not have the necessary privileges or the provided credentials are incorrect. Please verify the credentials, and ensure that the user has appropriate permissions.",
},
}
)
})
})
describe("POST /api/datasources/info", () => {
it("should fetch information about mysql datasource", async () => {
const primaryName = primaryMySqlTable.name
const response = await makeRequest("post", "/api/datasources/info", {
datasource: mysqlDatasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1)
})
})
describe("Integration compatibility with mysql search_path", () => {
let client: mysql.Connection, pathDatasource: Datasource
const database = "test1"
const database2 = "test-2"
beforeAll(async () => {
const dsConfig = await databaseTestProviders.mysql.datasource()
const dbConfig = dsConfig.config!
client = await mysql.createConnection(dbConfig)
await client.query(`CREATE DATABASE \`${database}\`;`)
await client.query(`CREATE DATABASE \`${database2}\`;`)
const pathConfig: any = {
...dsConfig,
config: {
...dbConfig,
database,
},
}
pathDatasource = await config.api.datasource.create(pathConfig)
})
afterAll(async () => {
await client.query(`DROP DATABASE \`${database}\`;`)
await client.query(`DROP DATABASE \`${database2}\`;`)
await client.end()
})
it("discovers tables from any schema in search path", async () => {
await client.query(
`CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);`
)
const response = await makeRequest("post", "/api/datasources/info", {
datasource: pathDatasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
expect(response.body.tableNames).toEqual(
expect.arrayContaining(["table1"])
)
})
it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name"
await client.query(
`CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
)
await client.query(
`CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
)
const response = await makeRequest(
"post",
`/api/datasources/${pathDatasource._id}/schema`,
{
tablesFilter: [repeated_table_name],
}
)
expect(response.status).toBe(200)
expect(
response.body.datasource.entities[repeated_table_name].schema
).toBeDefined()
const schema =
response.body.datasource.entities[repeated_table_name].schema
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
})
})
describe("POST /api/tables/", () => {
let client: mysql.Connection
const emitDatasourceUpdateMock = jest.fn()
beforeEach(async () => {
client = await mysql.createConnection(
(
await databaseTestProviders.mysql.datasource()
).config!
)
mysqlDatasource = await config.api.datasource.create(
await databaseTestProviders.mysql.datasource()
)
})
afterEach(async () => {
await client.end()
})
it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => {
const addColumnToTable: TableRequest = {
type: "table",
sourceType: TableSourceType.EXTERNAL,
name: "table",
sourceId: mysqlDatasource._id!,
primary: ["id"],
schema: {
id: {
type: FieldType.AUTO,
name: "id",
autocolumn: true,
},
new_column: {
type: FieldType.NUMBER,
name: "new_column",
},
},
_add: {
name: "new_column",
},
}
jest
.spyOn(builderSocket!, "emitDatasourceUpdate")
.mockImplementation(emitDatasourceUpdateMock)
await makeRequest("post", "/api/tables/", addColumnToTable)
const expectedTable: TableRequest = {
...addColumnToTable,
schema: {
id: {
type: FieldType.NUMBER,
name: "id",
autocolumn: true,
constraints: {
presence: false,
},
externalType: "int unsigned",
},
new_column: {
type: FieldType.NUMBER,
name: "new_column",
autocolumn: false,
constraints: {
presence: false,
},
externalType: "float(8,2)",
},
},
created: true,
_id: `${mysqlDatasource._id}__table`,
}
delete expectedTable._add
expect(emitDatasourceUpdateMock).toBeCalledTimes(1)
const emittedDatasource: Datasource =
emitDatasourceUpdateMock.mock.calls[0][1]
expect(emittedDatasource.entities!["table"]).toEqual(expectedTable)
})
it("will rename a column", async () => {
await makeRequest("post", "/api/tables/", primaryMySqlTable)
let renameColumnOnTable: TableRequest = {
...primaryMySqlTable,
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
externalType: "unsigned integer",
},
name: {
name: "name",
type: FieldType.STRING,
externalType: "text",
},
description: {
name: "description",
type: FieldType.STRING,
externalType: "text",
},
age: {
name: "age",
type: FieldType.NUMBER,
externalType: "float(8,2)",
},
},
}
const response = await makeRequest(
"post",
"/api/tables/",
renameColumnOnTable
)
mysqlDatasource = (
await makeRequest(
"post",
`/api/datasources/${mysqlDatasource._id}/schema`
)
).body.datasource
expect(response.status).toEqual(200)
expect(
Object.keys(mysqlDatasource.entities![primaryMySqlTable.name].schema)
).toEqual(["id", "name", "description", "age"])
})
})
})

View File

@ -12,6 +12,8 @@ import {
} from "@budibase/types" } from "@budibase/types"
import environment from "../../environment" import environment from "../../environment"
type QueryFunction = (query: Knex.SqlNative, operation: Operation) => any
const envLimit = environment.SQL_MAX_ROWS const envLimit = environment.SQL_MAX_ROWS
? parseInt(environment.SQL_MAX_ROWS) ? parseInt(environment.SQL_MAX_ROWS)
: null : null
@ -322,15 +324,18 @@ class InternalBuilder {
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
let { sort, paginate } = json let { sort, paginate } = json
const table = json.meta?.table const table = json.meta?.table
const aliases = json.tableAliases
const aliased =
table?.name && aliases?.[table.name] ? aliases[table.name] : table?.name
if (sort && Object.keys(sort || {}).length > 0) { if (sort && Object.keys(sort || {}).length > 0) {
for (let [key, value] of Object.entries(sort)) { for (let [key, value] of Object.entries(sort)) {
const direction = const direction =
value.direction === SortDirection.ASCENDING ? "asc" : "desc" value.direction === SortDirection.ASCENDING ? "asc" : "desc"
query = query.orderBy(`${table?.name}.${key}`, direction) query = query.orderBy(`${aliased}.${key}`, direction)
} }
} else if (this.client === SqlClient.MS_SQL && paginate?.limit) { } else if (this.client === SqlClient.MS_SQL && paginate?.limit) {
// @ts-ignore // @ts-ignore
query = query.orderBy(`${table?.name}.${table?.primary[0]}`) query = query.orderBy(`${aliased}.${table?.primary[0]}`)
} }
return query return query
} }
@ -605,7 +610,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
return query.toSQL().toNative() return query.toSQL().toNative()
} }
async getReturningRow(queryFn: Function, json: QueryJson) { async getReturningRow(queryFn: QueryFunction, json: QueryJson) {
if (!json.extra || !json.extra.idFilter) { if (!json.extra || !json.extra.idFilter) {
return {} return {}
} }
@ -617,7 +622,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
resource: { resource: {
fields: [], fields: [],
}, },
filters: json.extra.idFilter, filters: json.extra?.idFilter,
paginate: { paginate: {
limit: 1, limit: 1,
}, },
@ -646,7 +651,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
// this function recreates the returning functionality of postgres // this function recreates the returning functionality of postgres
async queryWithReturning( async queryWithReturning(
json: QueryJson, json: QueryJson,
queryFn: Function, queryFn: QueryFunction,
processFn: Function = (result: any) => result processFn: Function = (result: any) => result
) { ) {
const sqlClient = this.getSqlClient() const sqlClient = this.getSqlClient()

View File

@ -4,6 +4,7 @@ import Sql from "../base/sql"
import { SqlClient } from "../utils" import { SqlClient } from "../utils"
import AliasTables from "../../api/controllers/row/alias" import AliasTables from "../../api/controllers/row/alias"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { Knex } from "knex"
function multiline(sql: string) { function multiline(sql: string) {
return sql.replace(/\n/g, "").replace(/ +/g, " ") return sql.replace(/\n/g, "").replace(/ +/g, " ")
@ -160,6 +161,28 @@ describe("Captures of real examples", () => {
}) })
}) })
describe("returning (everything bar Postgres)", () => {
it("should be able to handle row returning", () => {
const queryJson = getJson("createSimple.json")
const SQL = new Sql(SqlClient.MS_SQL, limit)
let query = SQL._query(queryJson, { disableReturning: true })
expect(query).toEqual({
sql: "insert into [people] ([age], [name]) values (@p0, @p1)",
bindings: [22, "Test"],
})
// now check returning
let returningQuery: Knex.SqlNative = { sql: "", bindings: [] }
SQL.getReturningRow((input: Knex.SqlNative) => {
returningQuery = input
}, queryJson)
expect(returningQuery).toEqual({
sql: "select * from (select top (@p0) * from [people] where [people].[name] = @p1 and [people].[age] = @p2 order by [people].[name] asc) as [people]",
bindings: [1, "Test", 22],
})
})
})
describe("check max character aliasing", () => { describe("check max character aliasing", () => {
it("should handle over 'z' max character alias", () => { it("should handle over 'z' max character alias", () => {
const tableNames = [] const tableNames = []

View File

@ -68,7 +68,7 @@
"primary": [ "primary": [
"personid" "personid"
], ],
"name": "a", "name": "persons",
"schema": { "schema": {
"year": { "year": {
"type": "number", "type": "number",

View File

@ -0,0 +1,64 @@
{
"endpoint": {
"datasourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
"entityId": "people",
"operation": "CREATE"
},
"resource": {
"fields": [
"a.name",
"a.age"
]
},
"filters": {},
"relationships": [],
"body": {
"name": "Test",
"age": 22
},
"extra": {
"idFilter": {
"equal": {
"name": "Test",
"age": 22
}
}
},
"meta": {
"table": {
"_id": "datasource_plus_0ed5835e5552496285df546030f7c4ae__people",
"type": "table",
"sourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
"sourceType": "external",
"primary": [
"name",
"age"
],
"name": "people",
"schema": {
"name": {
"type": "string",
"externalType": "varchar",
"autocolumn": false,
"name": "name",
"constraints": {
"presence": true
}
},
"age": {
"type": "number",
"externalType": "int",
"autocolumn": false,
"name": "age",
"constraints": {
"presence": false
}
}
},
"primaryDisplay": "name"
}
},
"tableAliases": {
"people": "a"
}
}

View File

@ -58,7 +58,7 @@
"primary": [ "primary": [
"personid" "personid"
], ],
"name": "a", "name": "persons",
"schema": { "schema": {
"year": { "year": {
"type": "number", "type": "number",

View File

@ -34,7 +34,7 @@
"keypartone", "keypartone",
"keyparttwo" "keyparttwo"
], ],
"name": "a", "name": "compositetable",
"schema": { "schema": {
"keyparttwo": { "keyparttwo": {
"type": "string", "type": "string",

View File

@ -49,7 +49,7 @@
"primary": [ "primary": [
"taskid" "taskid"
], ],
"name": "a", "name": "tasks",
"schema": { "schema": {
"executorid": { "executorid": {
"type": "number", "type": "number",

View File

@ -63,7 +63,7 @@
"primary": [ "primary": [
"productid" "productid"
], ],
"name": "a", "name": "products",
"schema": { "schema": {
"productname": { "productname": {
"type": "string", "type": "string",

View File

@ -53,7 +53,7 @@
"primary": [ "primary": [
"productid" "productid"
], ],
"name": "a", "name": "products",
"schema": { "schema": {
"productname": { "productname": {
"type": "string", "type": "string",

View File

@ -109,7 +109,7 @@
"primary": [ "primary": [
"taskid" "taskid"
], ],
"name": "a", "name": "tasks",
"schema": { "schema": {
"executorid": { "executorid": {
"type": "number", "type": "number",

View File

@ -66,7 +66,7 @@
"primary": [ "primary": [
"personid" "personid"
], ],
"name": "a", "name": "persons",
"schema": { "schema": {
"year": { "year": {
"type": "number", "type": "number",

View File

@ -66,7 +66,7 @@
"primary": [ "primary": [
"personid" "personid"
], ],
"name": "a", "name": "persons",
"schema": { "schema": {
"year": { "year": {
"type": "number", "type": "number",

View File

@ -11,7 +11,10 @@ import {
import * as exporters from "../../../../api/controllers/view/exporters" import * as exporters from "../../../../api/controllers/view/exporters"
import sdk from "../../../../sdk" import sdk from "../../../../sdk"
import { handleRequest } from "../../../../api/controllers/row/external" import { handleRequest } from "../../../../api/controllers/row/external"
import { breakExternalTableId } from "../../../../integrations/utils" import {
breakExternalTableId,
breakRowIdField,
} from "../../../../integrations/utils"
import { cleanExportRows } from "../utils" import { cleanExportRows } from "../utils"
import { utils } from "@budibase/shared-core" import { utils } from "@budibase/shared-core"
import { ExportRowsParams, ExportRowsResult } from "../search" import { ExportRowsParams, ExportRowsResult } from "../search"
@ -52,6 +55,15 @@ export async function search(options: SearchParams) {
} }
} }
// Make sure oneOf _id queries decode the Row IDs
if (query?.oneOf?._id) {
const rowIds = query.oneOf._id
query.oneOf._id = rowIds.map((row: string) => {
const ids = breakRowIdField(row)
return ids[0]
})
}
try { try {
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
options = searchInputMapping(table, options) options = searchInputMapping(table, options)
@ -119,9 +131,7 @@ export async function exportRows(
requestQuery = { requestQuery = {
oneOf: { oneOf: {
_id: rowIds.map((row: string) => { _id: rowIds.map((row: string) => {
const ids = JSON.parse( const ids = breakRowIdField(row)
decodeURI(row).replace(/'/g, `"`).replace(/%2C/g, ",")
)
if (ids.length > 1) { if (ids.length > 1) {
throw new HTTPError( throw new HTTPError(
"Export data does not support composite keys.", "Export data does not support composite keys.",

View File

@ -21,10 +21,11 @@ jest.unmock("mysql2/promise")
jest.setTimeout(30000) jest.setTimeout(30000)
describe.skip("external", () => { describe("external search", () => {
const config = new TestConfiguration() const config = new TestConfiguration()
let externalDatasource: Datasource, tableData: Table let externalDatasource: Datasource, tableData: Table
const rows: Row[] = []
beforeAll(async () => { beforeAll(async () => {
const container = await new GenericContainer("mysql") const container = await new GenericContainer("mysql")
@ -89,67 +90,81 @@ describe.skip("external", () => {
}, },
}, },
} }
const table = await config.createExternalTable({
...tableData,
sourceId: externalDatasource._id,
})
for (let i = 0; i < 10; i++) {
rows.push(
await config.createRow({
tableId: table._id,
name: generator.first(),
surname: generator.last(),
age: generator.age(),
address: generator.address(),
})
)
}
}) })
describe("search", () => { it("default search returns all the data", async () => {
const rows: Row[] = [] await config.doInContext(config.appId, async () => {
beforeAll(async () => { const tableId = config.table!._id!
const table = await config.createExternalTable({
...tableData, const searchParams: SearchParams = {
sourceId: externalDatasource._id, tableId,
}) query: {},
for (let i = 0; i < 10; i++) {
rows.push(
await config.createRow({
tableId: table._id,
name: generator.first(),
surname: generator.last(),
age: generator.age(),
address: generator.address(),
})
)
} }
const result = await search(searchParams)
expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual(
expect.arrayContaining(rows.map(r => expect.objectContaining(r)))
)
}) })
})
it("default search returns all the data", async () => { it("querying by fields will always return data attribute columns", async () => {
await config.doInContext(config.appId, async () => { await config.doInContext(config.appId, async () => {
const tableId = config.table!._id! const tableId = config.table!._id!
const searchParams: SearchParams = { const searchParams: SearchParams = {
tableId, tableId,
query: {}, query: {},
} fields: ["name", "age"],
const result = await search(searchParams) }
const result = await search(searchParams)
expect(result.rows).toHaveLength(10) expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual( expect(result.rows).toEqual(
expect.arrayContaining(rows.map(r => expect.objectContaining(r))) expect.arrayContaining(
rows.map(r => ({
...expectAnyExternalColsAttributes,
name: r.name,
age: r.age,
}))
) )
}) )
}) })
})
it("querying by fields will always return data attribute columns", async () => { it("will decode _id in oneOf query", async () => {
await config.doInContext(config.appId, async () => { await config.doInContext(config.appId, async () => {
const tableId = config.table!._id! const tableId = config.table!._id!
const searchParams: SearchParams = { const searchParams: SearchParams = {
tableId, tableId,
query: {}, query: {
fields: ["name", "age"], oneOf: {
} _id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"],
const result = await search(searchParams) },
},
}
const result = await search(searchParams)
expect(result.rows).toHaveLength(10) expect(result.rows).toHaveLength(3)
expect(result.rows).toEqual( expect(result.rows.map(row => row.id)).toEqual([1, 4, 8])
expect.arrayContaining(
rows.map(r => ({
...expectAnyExternalColsAttributes,
name: r.name,
age: r.age,
}))
)
)
})
}) })
}) })
}) })

View File

@ -1,6 +1,5 @@
import { import {
FieldType, FieldType,
FieldTypeSubtypes,
SearchParams, SearchParams,
Table, Table,
DocumentType, DocumentType,

View File

@ -3,6 +3,7 @@ import {
Operation, Operation,
RelationshipType, RelationshipType,
RenameColumn, RenameColumn,
AddColumn,
Table, Table,
TableRequest, TableRequest,
ViewV2, ViewV2,
@ -32,7 +33,7 @@ import * as viewSdk from "../../views"
export async function save( export async function save(
datasourceId: string, datasourceId: string,
update: Table, update: Table,
opts?: { tableId?: string; renaming?: RenameColumn } opts?: { tableId?: string; renaming?: RenameColumn; adding?: AddColumn }
) { ) {
let tableToSave: TableRequest = { let tableToSave: TableRequest = {
...update, ...update,
@ -165,8 +166,17 @@ export async function save(
// remove the rename prop // remove the rename prop
delete tableToSave._rename delete tableToSave._rename
// if adding a new column, we need to rebuild the schema for that table to get the 'externalType' of the column
if (opts?.adding) {
datasource.entities[tableToSave.name] = (
await datasourceSdk.buildFilteredSchema(datasource, [tableToSave.name])
).tables[tableToSave.name]
} else {
datasource.entities[tableToSave.name] = tableToSave
}
// store it into couch now for budibase reference // store it into couch now for budibase reference
datasource.entities[tableToSave.name] = tableToSave
await db.put(populateExternalTableSchemas(datasource)) await db.put(populateExternalTableSchemas(datasource))
// Since tables are stored inside datasources, we need to notify clients // Since tables are stored inside datasources, we need to notify clients

View File

@ -1,6 +1,6 @@
import { Document } from "../../document" import { Document } from "../../document"
import { View, ViewV2 } from "../view" import { View, ViewV2 } from "../view"
import { RenameColumn } from "../../../sdk" import { AddColumn, RenameColumn } from "../../../sdk"
import { TableSchema } from "./schema" import { TableSchema } from "./schema"
export const INTERNAL_TABLE_SOURCE_ID = "bb_internal" export const INTERNAL_TABLE_SOURCE_ID = "bb_internal"
@ -29,5 +29,6 @@ export interface Table extends Document {
export interface TableRequest extends Table { export interface TableRequest extends Table {
_rename?: RenameColumn _rename?: RenameColumn
_add?: AddColumn
created?: boolean created?: boolean
} }

View File

@ -60,6 +60,10 @@ export interface RenameColumn {
updated: string updated: string
} }
export interface AddColumn {
name: string
}
export interface RelationshipsJson { export interface RelationshipsJson {
through?: string through?: string
from?: string from?: string