Merge branch 'master' into BUDI-8064/doc-writethrough
This commit is contained in:
commit
74a9aa4a72
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.21.0",
|
||||
"version": "2.21.2",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 19f7a5829f4d23cbc694136e45d94482a59a475a
|
||||
Subproject commit 0c050591c21d3b67dc0c9225d60cc9e2324c8dac
|
|
@ -147,6 +147,12 @@ export function createTablesStore() {
|
|||
if (indexes) {
|
||||
draft.indexes = indexes
|
||||
}
|
||||
// Add object to indicate if column is being added
|
||||
if (draft.schema[field.name] === undefined) {
|
||||
draft._add = {
|
||||
name: field.name,
|
||||
}
|
||||
}
|
||||
draft.schema = {
|
||||
...draft.schema,
|
||||
[field.name]: cloneDeep(field),
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<script>
|
||||
import { CoreSelect, CoreMultiselect } from "@budibase/bbui"
|
||||
import { fetchData, Utils } from "@budibase/frontend-core"
|
||||
import { getContext } from "svelte"
|
||||
import { getContext, onMount } from "svelte"
|
||||
import Field from "./Field.svelte"
|
||||
import { FieldTypes } from "../../../constants"
|
||||
|
||||
|
@ -28,6 +28,7 @@
|
|||
let tableDefinition
|
||||
let searchTerm
|
||||
let open
|
||||
let initialValue
|
||||
|
||||
$: type =
|
||||
datasourceType === "table" ? FieldTypes.LINK : FieldTypes.BB_REFERENCE
|
||||
|
@ -109,7 +110,11 @@
|
|||
}
|
||||
|
||||
$: forceFetchRows(filter)
|
||||
$: debouncedFetchRows(searchTerm, primaryDisplay, defaultValue)
|
||||
$: debouncedFetchRows(
|
||||
searchTerm,
|
||||
primaryDisplay,
|
||||
initialValue || defaultValue
|
||||
)
|
||||
|
||||
const forceFetchRows = async () => {
|
||||
// if the filter has changed, then we need to reset the options, clear the selection, and re-fetch
|
||||
|
@ -127,9 +132,13 @@
|
|||
if (allRowsFetched || !primaryDisplay) {
|
||||
return
|
||||
}
|
||||
if (defaultVal && !optionsObj[defaultVal]) {
|
||||
// must be an array
|
||||
if (defaultVal && !Array.isArray(defaultVal)) {
|
||||
defaultVal = defaultVal.split(",")
|
||||
}
|
||||
if (defaultVal && defaultVal.some(val => !optionsObj[val])) {
|
||||
await fetch.update({
|
||||
query: { equal: { _id: defaultVal } },
|
||||
query: { oneOf: { _id: defaultVal } },
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -202,6 +211,16 @@
|
|||
fetch.nextPage()
|
||||
}
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
// if the form is in 'Update' mode, then we need to fetch the matching row so that the value is correctly set
|
||||
if (fieldState?.value) {
|
||||
initialValue =
|
||||
fieldSchema?.relationshipType !== "one-to-many"
|
||||
? flatten(fieldState?.value) ?? []
|
||||
: flatten(fieldState?.value)?.[0]
|
||||
}
|
||||
})
|
||||
</script>
|
||||
|
||||
<Field
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 183b35d3acd42433dcb2d32bcd89a36abe13afec
|
||||
Subproject commit 22a278da720d92991dabdcd4cb6c96e7abe29781
|
|
@ -10,6 +10,11 @@ CREATE TABLE Persons (
|
|||
City varchar(255),
|
||||
PRIMARY KEY (PersonID)
|
||||
);
|
||||
CREATE TABLE Person (
|
||||
PersonID int NOT NULL AUTO_INCREMENT,
|
||||
Name varchar(255),
|
||||
PRIMARY KEY (PersonID)
|
||||
);
|
||||
CREATE TABLE Tasks (
|
||||
TaskID int NOT NULL AUTO_INCREMENT,
|
||||
PersonID INT,
|
||||
|
@ -27,6 +32,7 @@ CREATE TABLE Products (
|
|||
);
|
||||
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Mike', 'Hughes', 28.2, '123 Fake Street', 'Belfast', '2021-01-19 03:14:07');
|
||||
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Dave', 'Johnson', 29, '124 Fake Street', 'Belfast', '2022-04-01 00:11:11');
|
||||
INSERT INTO Person (Name) VALUES ('Elf');
|
||||
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (1, 'assembling', '2020-01-01');
|
||||
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (2, 'processing', '2019-12-31');
|
||||
INSERT INTO Products (name, updated) VALUES ('Meat', '11:00:22'), ('Fruit', '10:00:00');
|
||||
|
|
|
@ -62,7 +62,11 @@ export default class AliasTables {
|
|||
if (idx === -1 || idx > 1) {
|
||||
return
|
||||
}
|
||||
return Math.abs(tableName.length - name.length) <= 2
|
||||
// this might look a bit mad, but the idea is if the field is wrapped, say in "", `` or []
|
||||
// then the idx of the table name will be 1, and we should allow for it ending in a closing
|
||||
// character - otherwise it should be the full length if the index is zero
|
||||
const allowedCharacterDiff = idx * 2
|
||||
return Math.abs(tableName.length - name.length) <= allowedCharacterDiff
|
||||
})
|
||||
if (foundTableName) {
|
||||
const aliasedTableName = tableName.replace(
|
||||
|
@ -107,57 +111,55 @@ export default class AliasTables {
|
|||
}
|
||||
|
||||
async queryWithAliasing(json: QueryJson): DatasourcePlusQueryResponse {
|
||||
json = cloneDeep(json)
|
||||
const aliasTable = (table: Table) => ({
|
||||
...table,
|
||||
name: this.getAlias(table.name),
|
||||
})
|
||||
// run through the query json to update anywhere a table may be used
|
||||
if (json.resource?.fields) {
|
||||
json.resource.fields = json.resource.fields.map(field =>
|
||||
this.aliasField(field)
|
||||
)
|
||||
}
|
||||
if (json.filters) {
|
||||
for (let [filterKey, filter] of Object.entries(json.filters)) {
|
||||
if (typeof filter !== "object") {
|
||||
continue
|
||||
}
|
||||
const aliasedFilters: typeof filter = {}
|
||||
for (let key of Object.keys(filter)) {
|
||||
aliasedFilters[this.aliasField(key)] = filter[key]
|
||||
}
|
||||
json.filters[filterKey as keyof SearchFilters] = aliasedFilters
|
||||
const fieldLength = json.resource?.fields?.length
|
||||
const aliasingEnabled = fieldLength && fieldLength > 0
|
||||
if (aliasingEnabled) {
|
||||
json = cloneDeep(json)
|
||||
// run through the query json to update anywhere a table may be used
|
||||
if (json.resource?.fields) {
|
||||
json.resource.fields = json.resource.fields.map(field =>
|
||||
this.aliasField(field)
|
||||
)
|
||||
}
|
||||
}
|
||||
if (json.relationships) {
|
||||
json.relationships = json.relationships.map(relationship => ({
|
||||
...relationship,
|
||||
aliases: this.aliasMap([
|
||||
relationship.through,
|
||||
relationship.tableName,
|
||||
json.endpoint.entityId,
|
||||
]),
|
||||
}))
|
||||
}
|
||||
if (json.meta?.table) {
|
||||
json.meta.table = aliasTable(json.meta.table)
|
||||
}
|
||||
if (json.meta?.tables) {
|
||||
const aliasedTables: Record<string, Table> = {}
|
||||
for (let [tableName, table] of Object.entries(json.meta.tables)) {
|
||||
aliasedTables[this.getAlias(tableName)] = aliasTable(table)
|
||||
if (json.filters) {
|
||||
for (let [filterKey, filter] of Object.entries(json.filters)) {
|
||||
if (typeof filter !== "object") {
|
||||
continue
|
||||
}
|
||||
const aliasedFilters: typeof filter = {}
|
||||
for (let key of Object.keys(filter)) {
|
||||
aliasedFilters[this.aliasField(key)] = filter[key]
|
||||
}
|
||||
json.filters[filterKey as keyof SearchFilters] = aliasedFilters
|
||||
}
|
||||
}
|
||||
json.meta.tables = aliasedTables
|
||||
if (json.meta?.table) {
|
||||
this.getAlias(json.meta.table.name)
|
||||
}
|
||||
if (json.meta?.tables) {
|
||||
Object.keys(json.meta.tables).forEach(tableName =>
|
||||
this.getAlias(tableName)
|
||||
)
|
||||
}
|
||||
if (json.relationships) {
|
||||
json.relationships = json.relationships.map(relationship => ({
|
||||
...relationship,
|
||||
aliases: this.aliasMap([
|
||||
relationship.through,
|
||||
relationship.tableName,
|
||||
json.endpoint.entityId,
|
||||
]),
|
||||
}))
|
||||
}
|
||||
// invert and return
|
||||
const invertedTableAliases: Record<string, string> = {}
|
||||
for (let [key, value] of Object.entries(this.tableAliases)) {
|
||||
invertedTableAliases[value] = key
|
||||
}
|
||||
json.tableAliases = invertedTableAliases
|
||||
}
|
||||
// invert and return
|
||||
const invertedTableAliases: Record<string, string> = {}
|
||||
for (let [key, value] of Object.entries(this.tableAliases)) {
|
||||
invertedTableAliases[value] = key
|
||||
}
|
||||
json.tableAliases = invertedTableAliases
|
||||
const response = await getDatasourceAndQuery(json)
|
||||
if (Array.isArray(response)) {
|
||||
if (Array.isArray(response) && aliasingEnabled) {
|
||||
return this.reverse(response)
|
||||
} else {
|
||||
return response
|
||||
|
|
|
@ -6,6 +6,7 @@ import {
|
|||
BulkImportRequest,
|
||||
BulkImportResponse,
|
||||
Operation,
|
||||
RenameColumn,
|
||||
SaveTableRequest,
|
||||
SaveTableResponse,
|
||||
Table,
|
||||
|
@ -25,9 +26,12 @@ function getDatasourceId(table: Table) {
|
|||
return breakExternalTableId(table._id).datasourceId
|
||||
}
|
||||
|
||||
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
||||
export async function save(
|
||||
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
|
||||
renaming?: RenameColumn
|
||||
) {
|
||||
const inputs = ctx.request.body
|
||||
const renaming = inputs?._rename
|
||||
const adding = inputs?._add
|
||||
// can't do this right now
|
||||
delete inputs.rows
|
||||
const tableId = ctx.request.body._id
|
||||
|
@ -40,7 +44,7 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
|||
const { datasource, table } = await sdk.tables.external.save(
|
||||
datasourceId!,
|
||||
inputs,
|
||||
{ tableId, renaming }
|
||||
{ tableId, renaming, adding }
|
||||
)
|
||||
builderSocket?.emitDatasourceUpdate(ctx, datasource)
|
||||
return table
|
||||
|
|
|
@ -74,8 +74,15 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
|||
const appId = ctx.appId
|
||||
const table = ctx.request.body
|
||||
const isImport = table.rows
|
||||
const renaming = ctx.request.body._rename
|
||||
|
||||
let savedTable = await pickApi({ table }).save(ctx)
|
||||
const api = pickApi({ table })
|
||||
// do not pass _rename or _add if saving to CouchDB
|
||||
if (api === internal) {
|
||||
delete ctx.request.body._add
|
||||
delete ctx.request.body._rename
|
||||
}
|
||||
let savedTable = await api.save(ctx, renaming)
|
||||
if (!table._id) {
|
||||
await events.table.created(savedTable)
|
||||
savedTable = sdk.tables.enrichViewSchemas(savedTable)
|
||||
|
|
|
@ -12,11 +12,12 @@ import {
|
|||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
||||
export async function save(
|
||||
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
|
||||
renaming?: RenameColumn
|
||||
) {
|
||||
const { rows, ...rest } = ctx.request.body
|
||||
let tableToSave: Table & {
|
||||
_rename?: RenameColumn
|
||||
} = {
|
||||
let tableToSave: Table = {
|
||||
_id: generateTableID(),
|
||||
...rest,
|
||||
// Ensure these fields are populated, even if not sent in the request
|
||||
|
@ -28,15 +29,12 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
|||
tableToSave.views = {}
|
||||
}
|
||||
|
||||
const renaming = tableToSave._rename
|
||||
delete tableToSave._rename
|
||||
|
||||
try {
|
||||
const { table } = await sdk.tables.internal.save(tableToSave, {
|
||||
user: ctx.user,
|
||||
rowsToImport: rows,
|
||||
tableId: ctx.request.body._id,
|
||||
renaming: renaming,
|
||||
renaming,
|
||||
})
|
||||
|
||||
return table
|
||||
|
|
|
@ -13,7 +13,7 @@ describe("/api/keys", () => {
|
|||
|
||||
describe("fetch", () => {
|
||||
it("should allow fetching", async () => {
|
||||
await setup.switchToSelfHosted(async () => {
|
||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||
const res = await request
|
||||
.get(`/api/keys`)
|
||||
.set(config.defaultHeaders())
|
||||
|
@ -34,7 +34,7 @@ describe("/api/keys", () => {
|
|||
|
||||
describe("update", () => {
|
||||
it("should allow updating a value", async () => {
|
||||
await setup.switchToSelfHosted(async () => {
|
||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||
const res = await request
|
||||
.put(`/api/keys/TEST`)
|
||||
.send({
|
||||
|
|
|
@ -248,4 +248,13 @@ describe("/applications", () => {
|
|||
expect(devLogs.data.length).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("permissions", () => {
|
||||
it("should only return apps a user has access to", async () => {
|
||||
const user = await config.createUser()
|
||||
|
||||
const apps = await config.api.application.fetch()
|
||||
expect(apps.length).toBeGreaterThan(0)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -157,7 +157,7 @@ describe("/queries", () => {
|
|||
})
|
||||
|
||||
it("should find a query in cloud", async () => {
|
||||
await setup.switchToSelfHosted(async () => {
|
||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||
const query = await config.createQuery()
|
||||
const res = await request
|
||||
.get(`/api/queries/${query._id}`)
|
||||
|
|
|
@ -882,8 +882,7 @@ describe.each([
|
|||
],
|
||||
tableId: table._id,
|
||||
})
|
||||
// the environment needs configured for this
|
||||
await setup.switchToSelfHosted(async () => {
|
||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||
return context.doInAppContext(config.getAppId(), async () => {
|
||||
const enriched = await outputProcessing(table, [row])
|
||||
expect((enriched as Row[])[0].attachment[0].url).toBe(
|
||||
|
|
|
@ -26,6 +26,7 @@ import { TableToBuild } from "../../../tests/utilities/TestConfiguration"
|
|||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
const { basicTable } = setup.structures
|
||||
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
|
||||
|
||||
describe("/tables", () => {
|
||||
let request = setup.getRequest()
|
||||
|
@ -285,6 +286,35 @@ describe("/tables", () => {
|
|||
expect(res.body.schema.roleId).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
it("should add a new column for an internal DB table", async () => {
|
||||
const saveTableRequest: SaveTableRequest = {
|
||||
_add: {
|
||||
name: "NEW_COLUMN",
|
||||
},
|
||||
...basicTable(),
|
||||
}
|
||||
|
||||
const response = await request
|
||||
.post(`/api/tables`)
|
||||
.send(saveTableRequest)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
|
||||
const expectedResponse = {
|
||||
...saveTableRequest,
|
||||
_rev: expect.stringMatching(/^\d-.+/),
|
||||
_id: expect.stringMatching(/^ta_.+/),
|
||||
createdAt: expect.stringMatching(ISO_REGEX_PATTERN),
|
||||
updatedAt: expect.stringMatching(ISO_REGEX_PATTERN),
|
||||
views: {},
|
||||
}
|
||||
delete expectedResponse._add
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body).toEqual(expectedResponse)
|
||||
})
|
||||
})
|
||||
|
||||
describe("import", () => {
|
||||
|
|
|
@ -77,21 +77,3 @@ export function getConfig() {
|
|||
}
|
||||
return config!
|
||||
}
|
||||
|
||||
export async function switchToSelfHosted(func: any) {
|
||||
// self hosted stops any attempts to Dynamo
|
||||
env._set("NODE_ENV", "production")
|
||||
env._set("SELF_HOSTED", true)
|
||||
let error
|
||||
try {
|
||||
await func()
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
env._set("NODE_ENV", "jest")
|
||||
env._set("SELF_HOSTED", false)
|
||||
// don't throw error until after reset
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,363 @@
|
|||
import fetch from "node-fetch"
|
||||
import {
|
||||
generateMakeRequest,
|
||||
MakeRequestResponse,
|
||||
} from "../api/routes/public/tests/utils"
|
||||
import { v4 as uuidv4 } from "uuid"
|
||||
import * as setup from "../api/routes/tests/utilities"
|
||||
import {
|
||||
Datasource,
|
||||
FieldType,
|
||||
Table,
|
||||
TableRequest,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import _ from "lodash"
|
||||
import { databaseTestProviders } from "../integrations/tests/utils"
|
||||
import mysql from "mysql2/promise"
|
||||
import { builderSocket } from "../websockets"
|
||||
// @ts-ignore
|
||||
fetch.mockSearch()
|
||||
|
||||
const config = setup.getConfig()!
|
||||
|
||||
jest.unmock("mysql2/promise")
|
||||
jest.mock("../websockets", () => ({
|
||||
clientAppSocket: jest.fn(),
|
||||
gridAppSocket: jest.fn(),
|
||||
initialise: jest.fn(),
|
||||
builderSocket: {
|
||||
emitTableUpdate: jest.fn(),
|
||||
emitTableDeletion: jest.fn(),
|
||||
emitDatasourceUpdate: jest.fn(),
|
||||
emitDatasourceDeletion: jest.fn(),
|
||||
emitScreenUpdate: jest.fn(),
|
||||
emitAppMetadataUpdate: jest.fn(),
|
||||
emitAppPublish: jest.fn(),
|
||||
},
|
||||
}))
|
||||
|
||||
describe("mysql integrations", () => {
|
||||
let makeRequest: MakeRequestResponse,
|
||||
mysqlDatasource: Datasource,
|
||||
primaryMySqlTable: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
const apiKey = await config.generateApiKey()
|
||||
|
||||
makeRequest = generateMakeRequest(apiKey, true)
|
||||
|
||||
mysqlDatasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await databaseTestProviders.mysql.stop()
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
primaryMySqlTable = await config.createTable({
|
||||
name: uuidv4(),
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
autocolumn: true,
|
||||
},
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
description: {
|
||||
name: "description",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
value: {
|
||||
name: "value",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
},
|
||||
sourceId: mysqlDatasource._id,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
})
|
||||
})
|
||||
|
||||
afterAll(config.end)
|
||||
|
||||
it("validate table schema", async () => {
|
||||
const res = await makeRequest(
|
||||
"get",
|
||||
`/api/datasources/${mysqlDatasource._id}`
|
||||
)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body).toEqual({
|
||||
config: {
|
||||
database: "mysql",
|
||||
host: mysqlDatasource.config!.host,
|
||||
password: "--secret-value--",
|
||||
port: mysqlDatasource.config!.port,
|
||||
user: "root",
|
||||
},
|
||||
plus: true,
|
||||
source: "MYSQL",
|
||||
type: "datasource_plus",
|
||||
_id: expect.any(String),
|
||||
_rev: expect.any(String),
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
entities: expect.any(Object),
|
||||
})
|
||||
})
|
||||
|
||||
describe("POST /api/datasources/verify", () => {
|
||||
it("should be able to verify the connection", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: await databaseTestProviders.mysql.datasource(),
|
||||
},
|
||||
{
|
||||
body: {
|
||||
connected: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should state an invalid datasource cannot connect", async () => {
|
||||
const dbConfig = await databaseTestProviders.mysql.datasource()
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: {
|
||||
...dbConfig,
|
||||
config: {
|
||||
...dbConfig.config,
|
||||
password: "wrongpassword",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
body: {
|
||||
connected: false,
|
||||
error:
|
||||
"Access denied for the specified user. User does not have the necessary privileges or the provided credentials are incorrect. Please verify the credentials, and ensure that the user has appropriate permissions.",
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("POST /api/datasources/info", () => {
|
||||
it("should fetch information about mysql datasource", async () => {
|
||||
const primaryName = primaryMySqlTable.name
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: mysqlDatasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("Integration compatibility with mysql search_path", () => {
|
||||
let client: mysql.Connection, pathDatasource: Datasource
|
||||
const database = "test1"
|
||||
const database2 = "test-2"
|
||||
|
||||
beforeAll(async () => {
|
||||
const dsConfig = await databaseTestProviders.mysql.datasource()
|
||||
const dbConfig = dsConfig.config!
|
||||
|
||||
client = await mysql.createConnection(dbConfig)
|
||||
await client.query(`CREATE DATABASE \`${database}\`;`)
|
||||
await client.query(`CREATE DATABASE \`${database2}\`;`)
|
||||
|
||||
const pathConfig: any = {
|
||||
...dsConfig,
|
||||
config: {
|
||||
...dbConfig,
|
||||
database,
|
||||
},
|
||||
}
|
||||
pathDatasource = await config.api.datasource.create(pathConfig)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await client.query(`DROP DATABASE \`${database}\`;`)
|
||||
await client.query(`DROP DATABASE \`${database2}\`;`)
|
||||
await client.end()
|
||||
})
|
||||
|
||||
it("discovers tables from any schema in search path", async () => {
|
||||
await client.query(
|
||||
`CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);`
|
||||
)
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: pathDatasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
expect(response.body.tableNames).toEqual(
|
||||
expect.arrayContaining(["table1"])
|
||||
)
|
||||
})
|
||||
|
||||
it("does not mix columns from different tables", async () => {
|
||||
const repeated_table_name = "table_same_name"
|
||||
await client.query(
|
||||
`CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
|
||||
)
|
||||
await client.query(
|
||||
`CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
|
||||
)
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${pathDatasource._id}/schema`,
|
||||
{
|
||||
tablesFilter: [repeated_table_name],
|
||||
}
|
||||
)
|
||||
expect(response.status).toBe(200)
|
||||
expect(
|
||||
response.body.datasource.entities[repeated_table_name].schema
|
||||
).toBeDefined()
|
||||
const schema =
|
||||
response.body.datasource.entities[repeated_table_name].schema
|
||||
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
|
||||
})
|
||||
})
|
||||
|
||||
describe("POST /api/tables/", () => {
|
||||
let client: mysql.Connection
|
||||
const emitDatasourceUpdateMock = jest.fn()
|
||||
|
||||
beforeEach(async () => {
|
||||
client = await mysql.createConnection(
|
||||
(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
).config!
|
||||
)
|
||||
mysqlDatasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.end()
|
||||
})
|
||||
|
||||
it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => {
|
||||
const addColumnToTable: TableRequest = {
|
||||
type: "table",
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
name: "table",
|
||||
sourceId: mysqlDatasource._id!,
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: {
|
||||
type: FieldType.AUTO,
|
||||
name: "id",
|
||||
autocolumn: true,
|
||||
},
|
||||
new_column: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "new_column",
|
||||
},
|
||||
},
|
||||
_add: {
|
||||
name: "new_column",
|
||||
},
|
||||
}
|
||||
|
||||
jest
|
||||
.spyOn(builderSocket!, "emitDatasourceUpdate")
|
||||
.mockImplementation(emitDatasourceUpdateMock)
|
||||
|
||||
await makeRequest("post", "/api/tables/", addColumnToTable)
|
||||
|
||||
const expectedTable: TableRequest = {
|
||||
...addColumnToTable,
|
||||
schema: {
|
||||
id: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "id",
|
||||
autocolumn: true,
|
||||
constraints: {
|
||||
presence: false,
|
||||
},
|
||||
externalType: "int unsigned",
|
||||
},
|
||||
new_column: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "new_column",
|
||||
autocolumn: false,
|
||||
constraints: {
|
||||
presence: false,
|
||||
},
|
||||
externalType: "float(8,2)",
|
||||
},
|
||||
},
|
||||
created: true,
|
||||
_id: `${mysqlDatasource._id}__table`,
|
||||
}
|
||||
delete expectedTable._add
|
||||
|
||||
expect(emitDatasourceUpdateMock).toBeCalledTimes(1)
|
||||
const emittedDatasource: Datasource =
|
||||
emitDatasourceUpdateMock.mock.calls[0][1]
|
||||
expect(emittedDatasource.entities!["table"]).toEqual(expectedTable)
|
||||
})
|
||||
|
||||
it("will rename a column", async () => {
|
||||
await makeRequest("post", "/api/tables/", primaryMySqlTable)
|
||||
|
||||
let renameColumnOnTable: TableRequest = {
|
||||
...primaryMySqlTable,
|
||||
schema: {
|
||||
id: {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
autocolumn: true,
|
||||
externalType: "unsigned integer",
|
||||
},
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
externalType: "text",
|
||||
},
|
||||
description: {
|
||||
name: "description",
|
||||
type: FieldType.STRING,
|
||||
externalType: "text",
|
||||
},
|
||||
age: {
|
||||
name: "age",
|
||||
type: FieldType.NUMBER,
|
||||
externalType: "float(8,2)",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
"/api/tables/",
|
||||
renameColumnOnTable
|
||||
)
|
||||
mysqlDatasource = (
|
||||
await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${mysqlDatasource._id}/schema`
|
||||
)
|
||||
).body.datasource
|
||||
|
||||
expect(response.status).toEqual(200)
|
||||
expect(
|
||||
Object.keys(mysqlDatasource.entities![primaryMySqlTable.name].schema)
|
||||
).toEqual(["id", "name", "description", "age"])
|
||||
})
|
||||
})
|
||||
})
|
|
@ -12,6 +12,8 @@ import {
|
|||
} from "@budibase/types"
|
||||
import environment from "../../environment"
|
||||
|
||||
type QueryFunction = (query: Knex.SqlNative, operation: Operation) => any
|
||||
|
||||
const envLimit = environment.SQL_MAX_ROWS
|
||||
? parseInt(environment.SQL_MAX_ROWS)
|
||||
: null
|
||||
|
@ -322,15 +324,18 @@ class InternalBuilder {
|
|||
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
|
||||
let { sort, paginate } = json
|
||||
const table = json.meta?.table
|
||||
const aliases = json.tableAliases
|
||||
const aliased =
|
||||
table?.name && aliases?.[table.name] ? aliases[table.name] : table?.name
|
||||
if (sort && Object.keys(sort || {}).length > 0) {
|
||||
for (let [key, value] of Object.entries(sort)) {
|
||||
const direction =
|
||||
value.direction === SortDirection.ASCENDING ? "asc" : "desc"
|
||||
query = query.orderBy(`${table?.name}.${key}`, direction)
|
||||
query = query.orderBy(`${aliased}.${key}`, direction)
|
||||
}
|
||||
} else if (this.client === SqlClient.MS_SQL && paginate?.limit) {
|
||||
// @ts-ignore
|
||||
query = query.orderBy(`${table?.name}.${table?.primary[0]}`)
|
||||
query = query.orderBy(`${aliased}.${table?.primary[0]}`)
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
@ -605,7 +610,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
return query.toSQL().toNative()
|
||||
}
|
||||
|
||||
async getReturningRow(queryFn: Function, json: QueryJson) {
|
||||
async getReturningRow(queryFn: QueryFunction, json: QueryJson) {
|
||||
if (!json.extra || !json.extra.idFilter) {
|
||||
return {}
|
||||
}
|
||||
|
@ -617,7 +622,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
resource: {
|
||||
fields: [],
|
||||
},
|
||||
filters: json.extra.idFilter,
|
||||
filters: json.extra?.idFilter,
|
||||
paginate: {
|
||||
limit: 1,
|
||||
},
|
||||
|
@ -646,7 +651,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
// this function recreates the returning functionality of postgres
|
||||
async queryWithReturning(
|
||||
json: QueryJson,
|
||||
queryFn: Function,
|
||||
queryFn: QueryFunction,
|
||||
processFn: Function = (result: any) => result
|
||||
) {
|
||||
const sqlClient = this.getSqlClient()
|
||||
|
|
|
@ -4,6 +4,7 @@ import Sql from "../base/sql"
|
|||
import { SqlClient } from "../utils"
|
||||
import AliasTables from "../../api/controllers/row/alias"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { Knex } from "knex"
|
||||
|
||||
function multiline(sql: string) {
|
||||
return sql.replace(/\n/g, "").replace(/ +/g, " ")
|
||||
|
@ -160,6 +161,28 @@ describe("Captures of real examples", () => {
|
|||
})
|
||||
})
|
||||
|
||||
describe("returning (everything bar Postgres)", () => {
|
||||
it("should be able to handle row returning", () => {
|
||||
const queryJson = getJson("createSimple.json")
|
||||
const SQL = new Sql(SqlClient.MS_SQL, limit)
|
||||
let query = SQL._query(queryJson, { disableReturning: true })
|
||||
expect(query).toEqual({
|
||||
sql: "insert into [people] ([age], [name]) values (@p0, @p1)",
|
||||
bindings: [22, "Test"],
|
||||
})
|
||||
|
||||
// now check returning
|
||||
let returningQuery: Knex.SqlNative = { sql: "", bindings: [] }
|
||||
SQL.getReturningRow((input: Knex.SqlNative) => {
|
||||
returningQuery = input
|
||||
}, queryJson)
|
||||
expect(returningQuery).toEqual({
|
||||
sql: "select * from (select top (@p0) * from [people] where [people].[name] = @p1 and [people].[age] = @p2 order by [people].[name] asc) as [people]",
|
||||
bindings: [1, "Test", 22],
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("check max character aliasing", () => {
|
||||
it("should handle over 'z' max character alias", () => {
|
||||
const tableNames = []
|
||||
|
|
|
@ -68,7 +68,7 @@
|
|||
"primary": [
|
||||
"personid"
|
||||
],
|
||||
"name": "a",
|
||||
"name": "persons",
|
||||
"schema": {
|
||||
"year": {
|
||||
"type": "number",
|
||||
|
|
|
@ -0,0 +1,64 @@
|
|||
{
|
||||
"endpoint": {
|
||||
"datasourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
|
||||
"entityId": "people",
|
||||
"operation": "CREATE"
|
||||
},
|
||||
"resource": {
|
||||
"fields": [
|
||||
"a.name",
|
||||
"a.age"
|
||||
]
|
||||
},
|
||||
"filters": {},
|
||||
"relationships": [],
|
||||
"body": {
|
||||
"name": "Test",
|
||||
"age": 22
|
||||
},
|
||||
"extra": {
|
||||
"idFilter": {
|
||||
"equal": {
|
||||
"name": "Test",
|
||||
"age": 22
|
||||
}
|
||||
}
|
||||
},
|
||||
"meta": {
|
||||
"table": {
|
||||
"_id": "datasource_plus_0ed5835e5552496285df546030f7c4ae__people",
|
||||
"type": "table",
|
||||
"sourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae",
|
||||
"sourceType": "external",
|
||||
"primary": [
|
||||
"name",
|
||||
"age"
|
||||
],
|
||||
"name": "people",
|
||||
"schema": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"externalType": "varchar",
|
||||
"autocolumn": false,
|
||||
"name": "name",
|
||||
"constraints": {
|
||||
"presence": true
|
||||
}
|
||||
},
|
||||
"age": {
|
||||
"type": "number",
|
||||
"externalType": "int",
|
||||
"autocolumn": false,
|
||||
"name": "age",
|
||||
"constraints": {
|
||||
"presence": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"primaryDisplay": "name"
|
||||
}
|
||||
},
|
||||
"tableAliases": {
|
||||
"people": "a"
|
||||
}
|
||||
}
|
|
@ -58,7 +58,7 @@
|
|||
"primary": [
|
||||
"personid"
|
||||
],
|
||||
"name": "a",
|
||||
"name": "persons",
|
||||
"schema": {
|
||||
"year": {
|
||||
"type": "number",
|
||||
|
|
|
@ -34,7 +34,7 @@
|
|||
"keypartone",
|
||||
"keyparttwo"
|
||||
],
|
||||
"name": "a",
|
||||
"name": "compositetable",
|
||||
"schema": {
|
||||
"keyparttwo": {
|
||||
"type": "string",
|
||||
|
|
|
@ -49,7 +49,7 @@
|
|||
"primary": [
|
||||
"taskid"
|
||||
],
|
||||
"name": "a",
|
||||
"name": "tasks",
|
||||
"schema": {
|
||||
"executorid": {
|
||||
"type": "number",
|
||||
|
|
|
@ -63,7 +63,7 @@
|
|||
"primary": [
|
||||
"productid"
|
||||
],
|
||||
"name": "a",
|
||||
"name": "products",
|
||||
"schema": {
|
||||
"productname": {
|
||||
"type": "string",
|
||||
|
|
|
@ -53,7 +53,7 @@
|
|||
"primary": [
|
||||
"productid"
|
||||
],
|
||||
"name": "a",
|
||||
"name": "products",
|
||||
"schema": {
|
||||
"productname": {
|
||||
"type": "string",
|
||||
|
|
|
@ -109,7 +109,7 @@
|
|||
"primary": [
|
||||
"taskid"
|
||||
],
|
||||
"name": "a",
|
||||
"name": "tasks",
|
||||
"schema": {
|
||||
"executorid": {
|
||||
"type": "number",
|
||||
|
|
|
@ -66,7 +66,7 @@
|
|||
"primary": [
|
||||
"personid"
|
||||
],
|
||||
"name": "a",
|
||||
"name": "persons",
|
||||
"schema": {
|
||||
"year": {
|
||||
"type": "number",
|
||||
|
|
|
@ -66,7 +66,7 @@
|
|||
"primary": [
|
||||
"personid"
|
||||
],
|
||||
"name": "a",
|
||||
"name": "persons",
|
||||
"schema": {
|
||||
"year": {
|
||||
"type": "number",
|
||||
|
|
|
@ -11,7 +11,10 @@ import {
|
|||
import * as exporters from "../../../../api/controllers/view/exporters"
|
||||
import sdk from "../../../../sdk"
|
||||
import { handleRequest } from "../../../../api/controllers/row/external"
|
||||
import { breakExternalTableId } from "../../../../integrations/utils"
|
||||
import {
|
||||
breakExternalTableId,
|
||||
breakRowIdField,
|
||||
} from "../../../../integrations/utils"
|
||||
import { cleanExportRows } from "../utils"
|
||||
import { utils } from "@budibase/shared-core"
|
||||
import { ExportRowsParams, ExportRowsResult } from "../search"
|
||||
|
@ -52,6 +55,15 @@ export async function search(options: SearchParams) {
|
|||
}
|
||||
}
|
||||
|
||||
// Make sure oneOf _id queries decode the Row IDs
|
||||
if (query?.oneOf?._id) {
|
||||
const rowIds = query.oneOf._id
|
||||
query.oneOf._id = rowIds.map((row: string) => {
|
||||
const ids = breakRowIdField(row)
|
||||
return ids[0]
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
options = searchInputMapping(table, options)
|
||||
|
@ -119,9 +131,7 @@ export async function exportRows(
|
|||
requestQuery = {
|
||||
oneOf: {
|
||||
_id: rowIds.map((row: string) => {
|
||||
const ids = JSON.parse(
|
||||
decodeURI(row).replace(/'/g, `"`).replace(/%2C/g, ",")
|
||||
)
|
||||
const ids = breakRowIdField(row)
|
||||
if (ids.length > 1) {
|
||||
throw new HTTPError(
|
||||
"Export data does not support composite keys.",
|
||||
|
|
|
@ -21,10 +21,11 @@ jest.unmock("mysql2/promise")
|
|||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
describe.skip("external", () => {
|
||||
describe("external search", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
let externalDatasource: Datasource, tableData: Table
|
||||
const rows: Row[] = []
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("mysql")
|
||||
|
@ -89,67 +90,81 @@ describe.skip("external", () => {
|
|||
},
|
||||
},
|
||||
}
|
||||
|
||||
const table = await config.createExternalTable({
|
||||
...tableData,
|
||||
sourceId: externalDatasource._id,
|
||||
})
|
||||
for (let i = 0; i < 10; i++) {
|
||||
rows.push(
|
||||
await config.createRow({
|
||||
tableId: table._id,
|
||||
name: generator.first(),
|
||||
surname: generator.last(),
|
||||
age: generator.age(),
|
||||
address: generator.address(),
|
||||
})
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
describe("search", () => {
|
||||
const rows: Row[] = []
|
||||
beforeAll(async () => {
|
||||
const table = await config.createExternalTable({
|
||||
...tableData,
|
||||
sourceId: externalDatasource._id,
|
||||
})
|
||||
for (let i = 0; i < 10; i++) {
|
||||
rows.push(
|
||||
await config.createRow({
|
||||
tableId: table._id,
|
||||
name: generator.first(),
|
||||
surname: generator.last(),
|
||||
age: generator.age(),
|
||||
address: generator.address(),
|
||||
})
|
||||
)
|
||||
it("default search returns all the data", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const tableId = config.table!._id!
|
||||
|
||||
const searchParams: SearchParams = {
|
||||
tableId,
|
||||
query: {},
|
||||
}
|
||||
const result = await search(searchParams)
|
||||
|
||||
expect(result.rows).toHaveLength(10)
|
||||
expect(result.rows).toEqual(
|
||||
expect.arrayContaining(rows.map(r => expect.objectContaining(r)))
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("default search returns all the data", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const tableId = config.table!._id!
|
||||
it("querying by fields will always return data attribute columns", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const tableId = config.table!._id!
|
||||
|
||||
const searchParams: SearchParams = {
|
||||
tableId,
|
||||
query: {},
|
||||
}
|
||||
const result = await search(searchParams)
|
||||
const searchParams: SearchParams = {
|
||||
tableId,
|
||||
query: {},
|
||||
fields: ["name", "age"],
|
||||
}
|
||||
const result = await search(searchParams)
|
||||
|
||||
expect(result.rows).toHaveLength(10)
|
||||
expect(result.rows).toEqual(
|
||||
expect.arrayContaining(rows.map(r => expect.objectContaining(r)))
|
||||
expect(result.rows).toHaveLength(10)
|
||||
expect(result.rows).toEqual(
|
||||
expect.arrayContaining(
|
||||
rows.map(r => ({
|
||||
...expectAnyExternalColsAttributes,
|
||||
name: r.name,
|
||||
age: r.age,
|
||||
}))
|
||||
)
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("querying by fields will always return data attribute columns", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const tableId = config.table!._id!
|
||||
it("will decode _id in oneOf query", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const tableId = config.table!._id!
|
||||
|
||||
const searchParams: SearchParams = {
|
||||
tableId,
|
||||
query: {},
|
||||
fields: ["name", "age"],
|
||||
}
|
||||
const result = await search(searchParams)
|
||||
const searchParams: SearchParams = {
|
||||
tableId,
|
||||
query: {
|
||||
oneOf: {
|
||||
_id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"],
|
||||
},
|
||||
},
|
||||
}
|
||||
const result = await search(searchParams)
|
||||
|
||||
expect(result.rows).toHaveLength(10)
|
||||
expect(result.rows).toEqual(
|
||||
expect.arrayContaining(
|
||||
rows.map(r => ({
|
||||
...expectAnyExternalColsAttributes,
|
||||
name: r.name,
|
||||
age: r.age,
|
||||
}))
|
||||
)
|
||||
)
|
||||
})
|
||||
expect(result.rows).toHaveLength(3)
|
||||
expect(result.rows.map(row => row.id)).toEqual([1, 4, 8])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import {
|
||||
FieldType,
|
||||
FieldTypeSubtypes,
|
||||
SearchParams,
|
||||
Table,
|
||||
DocumentType,
|
||||
|
|
|
@ -3,6 +3,7 @@ import {
|
|||
Operation,
|
||||
RelationshipType,
|
||||
RenameColumn,
|
||||
AddColumn,
|
||||
Table,
|
||||
TableRequest,
|
||||
ViewV2,
|
||||
|
@ -32,7 +33,7 @@ import * as viewSdk from "../../views"
|
|||
export async function save(
|
||||
datasourceId: string,
|
||||
update: Table,
|
||||
opts?: { tableId?: string; renaming?: RenameColumn }
|
||||
opts?: { tableId?: string; renaming?: RenameColumn; adding?: AddColumn }
|
||||
) {
|
||||
let tableToSave: TableRequest = {
|
||||
...update,
|
||||
|
@ -165,8 +166,17 @@ export async function save(
|
|||
|
||||
// remove the rename prop
|
||||
delete tableToSave._rename
|
||||
|
||||
// if adding a new column, we need to rebuild the schema for that table to get the 'externalType' of the column
|
||||
if (opts?.adding) {
|
||||
datasource.entities[tableToSave.name] = (
|
||||
await datasourceSdk.buildFilteredSchema(datasource, [tableToSave.name])
|
||||
).tables[tableToSave.name]
|
||||
} else {
|
||||
datasource.entities[tableToSave.name] = tableToSave
|
||||
}
|
||||
|
||||
// store it into couch now for budibase reference
|
||||
datasource.entities[tableToSave.name] = tableToSave
|
||||
await db.put(populateExternalTableSchemas(datasource))
|
||||
|
||||
// Since tables are stored inside datasources, we need to notify clients
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { Document } from "../../document"
|
||||
import { View, ViewV2 } from "../view"
|
||||
import { RenameColumn } from "../../../sdk"
|
||||
import { AddColumn, RenameColumn } from "../../../sdk"
|
||||
import { TableSchema } from "./schema"
|
||||
|
||||
export const INTERNAL_TABLE_SOURCE_ID = "bb_internal"
|
||||
|
@ -29,5 +29,6 @@ export interface Table extends Document {
|
|||
|
||||
export interface TableRequest extends Table {
|
||||
_rename?: RenameColumn
|
||||
_add?: AddColumn
|
||||
created?: boolean
|
||||
}
|
||||
|
|
|
@ -60,6 +60,10 @@ export interface RenameColumn {
|
|||
updated: string
|
||||
}
|
||||
|
||||
export interface AddColumn {
|
||||
name: string
|
||||
}
|
||||
|
||||
export interface RelationshipsJson {
|
||||
through?: string
|
||||
from?: string
|
||||
|
|
Loading…
Reference in New Issue