Type table endpoints
This commit is contained in:
parent
66d7fb23b8
commit
b329187591
|
@ -30,11 +30,7 @@
|
|||
const importData = async () => {
|
||||
try {
|
||||
loading = true
|
||||
await API.importTableData({
|
||||
tableId,
|
||||
rows,
|
||||
identifierFields,
|
||||
})
|
||||
await API.importTableData(tableId, rows, identifierFields)
|
||||
notifications.success("Rows successfully imported")
|
||||
popover.hide()
|
||||
} catch (error) {
|
||||
|
|
|
@ -128,11 +128,7 @@
|
|||
allValid = false
|
||||
|
||||
if (rows.length > 0) {
|
||||
const response = await API.validateExistingTableImport({
|
||||
rows,
|
||||
tableId,
|
||||
})
|
||||
|
||||
const response = await API.validateExistingTableImport(rows, tableId)
|
||||
validation = response.schemaValidation
|
||||
invalidColumns = response.invalidColumns
|
||||
allValid = response.allValid
|
||||
|
|
|
@ -147,7 +147,7 @@
|
|||
loading = true
|
||||
try {
|
||||
if (rows.length > 0) {
|
||||
const response = await API.validateNewTableImport({ rows, schema })
|
||||
const response = await API.validateNewTableImport(rows, schema)
|
||||
validation = response.schemaValidation
|
||||
allValid = response.allValid
|
||||
errors = response.errors
|
||||
|
|
|
@ -110,10 +110,7 @@ export function createTablesStore() {
|
|||
if (!table?._id) {
|
||||
return
|
||||
}
|
||||
await API.deleteTable({
|
||||
tableId: table._id,
|
||||
tableRev: table._rev || "rev",
|
||||
})
|
||||
await API.deleteTable(table._id, table._rev || "rev")
|
||||
replaceTable(table._id, null)
|
||||
}
|
||||
|
||||
|
|
|
@ -77,12 +77,11 @@ export const patchAPI = API => {
|
|||
return await enrichRows(rows, tableId)
|
||||
}
|
||||
const searchTable = API.searchTable
|
||||
API.searchTable = async params => {
|
||||
const tableId = params?.tableId
|
||||
const output = await searchTable(params)
|
||||
API.searchTable = async (sourceId, opts) => {
|
||||
const output = await searchTable(sourceId, opts)
|
||||
return {
|
||||
...output,
|
||||
rows: await enrichRows(output?.rows, tableId),
|
||||
rows: await enrichRows(output.rows, sourceId),
|
||||
}
|
||||
}
|
||||
const fetchViewData = API.fetchViewData
|
||||
|
|
|
@ -1,152 +0,0 @@
|
|||
export const buildTableEndpoints = API => ({
|
||||
/**
|
||||
* Fetches a table definition.
|
||||
* Since definitions cannot change at runtime, the result is cached.
|
||||
* @param tableId the ID of the table to fetch
|
||||
*/
|
||||
fetchTableDefinition: async tableId => {
|
||||
return await API.get({
|
||||
url: `/api/tables/${tableId}`,
|
||||
cache: true,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetches all rows from a table.
|
||||
* @param tableId the ID of the table for fetch
|
||||
*/
|
||||
fetchTableData: async tableId => {
|
||||
return await API.get({ url: `/api/${tableId}/rows` })
|
||||
},
|
||||
|
||||
/**
|
||||
* Searches a table using Lucene.
|
||||
* @param tableId the ID of the table to search
|
||||
* @param query the lucene search query
|
||||
* @param bookmark the current pagination bookmark
|
||||
* @param limit the number of rows to retrieve
|
||||
* @param sort the field to sort by
|
||||
* @param sortOrder the order to sort by
|
||||
* @param sortType the type to sort by, either numerically or alphabetically
|
||||
* @param paginate whether to paginate the data
|
||||
*/
|
||||
searchTable: async ({
|
||||
tableId,
|
||||
query,
|
||||
bookmark,
|
||||
limit,
|
||||
sort,
|
||||
sortOrder,
|
||||
sortType,
|
||||
paginate,
|
||||
}) => {
|
||||
if (!tableId) {
|
||||
return {
|
||||
rows: [],
|
||||
}
|
||||
}
|
||||
return await API.post({
|
||||
url: `/api/${tableId}/search`,
|
||||
body: {
|
||||
...(query ? { query } : {}),
|
||||
bookmark,
|
||||
limit,
|
||||
sort,
|
||||
sortOrder,
|
||||
sortType,
|
||||
paginate,
|
||||
},
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Imports data into an existing table
|
||||
* @param tableId the table ID to import to
|
||||
* @param rows the data import object
|
||||
* @param identifierFields column names to be used as keys for overwriting existing rows
|
||||
*/
|
||||
importTableData: async ({ tableId, rows, identifierFields }) => {
|
||||
return await API.post({
|
||||
url: `/api/tables/${tableId}/import`,
|
||||
body: {
|
||||
rows,
|
||||
identifierFields,
|
||||
},
|
||||
})
|
||||
},
|
||||
csvToJson: async csvString => {
|
||||
return await API.post({
|
||||
url: "/api/convert/csvToJson",
|
||||
body: {
|
||||
csvString,
|
||||
},
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets a list of tables.
|
||||
*/
|
||||
getTables: async () => {
|
||||
return await API.get({
|
||||
url: "/api/tables",
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Get a single table based on table ID.
|
||||
*/
|
||||
getTable: async tableId => {
|
||||
return await API.get({
|
||||
url: `/api/tables/${tableId}`,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Saves a table.
|
||||
* @param table the table to save
|
||||
*/
|
||||
saveTable: async table => {
|
||||
return await API.post({
|
||||
url: "/api/tables",
|
||||
body: table,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Deletes a table.
|
||||
* @param tableId the ID of the table to delete
|
||||
* @param tableRev the rev of the table to delete
|
||||
*/
|
||||
deleteTable: async ({ tableId, tableRev }) => {
|
||||
return await API.delete({
|
||||
url: `/api/tables/${tableId}/${tableRev}`,
|
||||
})
|
||||
},
|
||||
validateNewTableImport: async ({ rows, schema }) => {
|
||||
return await API.post({
|
||||
url: "/api/tables/validateNewTableImport",
|
||||
body: {
|
||||
rows,
|
||||
schema,
|
||||
},
|
||||
})
|
||||
},
|
||||
validateExistingTableImport: async ({ rows, tableId }) => {
|
||||
return await API.post({
|
||||
url: "/api/tables/validateExistingTableImport",
|
||||
body: {
|
||||
rows,
|
||||
tableId,
|
||||
},
|
||||
})
|
||||
},
|
||||
migrateColumn: async ({ tableId, oldColumn, newColumn }) => {
|
||||
return await API.post({
|
||||
url: `/api/tables/${tableId}/migrate`,
|
||||
body: {
|
||||
oldColumn,
|
||||
newColumn,
|
||||
},
|
||||
})
|
||||
},
|
||||
})
|
|
@ -0,0 +1,190 @@
|
|||
import {
|
||||
BulkImportRequest,
|
||||
BulkImportResponse,
|
||||
CsvToJsonRequest,
|
||||
CsvToJsonResponse,
|
||||
FetchTablesResponse,
|
||||
MigrateRequest,
|
||||
MigrateResponse,
|
||||
Row,
|
||||
SaveTableRequest,
|
||||
SaveTableResponse,
|
||||
SearchRowRequest,
|
||||
PaginatedSearchRowResponse,
|
||||
TableResponse,
|
||||
TableSchema,
|
||||
ValidateNewTableImportRequest,
|
||||
ValidateTableImportRequest,
|
||||
ValidateTableImportResponse,
|
||||
} from "@budibase/types"
|
||||
import { BaseAPIClient } from "./types"
|
||||
|
||||
export interface TableEndpoints {
|
||||
fetchTableDefinition: (tableId: string) => Promise<TableResponse>
|
||||
fetchTableData: (tableId: string) => Promise<Row[]>
|
||||
searchTable: (
|
||||
sourceId: string,
|
||||
opts: SearchRowRequest
|
||||
) => Promise<PaginatedSearchRowResponse>
|
||||
importTableData: (
|
||||
tableId: string,
|
||||
rows: Row[],
|
||||
identifierFields?: string[]
|
||||
) => Promise<BulkImportResponse>
|
||||
csvToJson: (csvString: string) => Promise<CsvToJsonResponse>
|
||||
getTables: () => Promise<FetchTablesResponse>
|
||||
getTable: (tableId: string) => Promise<TableResponse>
|
||||
saveTable: (table: SaveTableRequest) => Promise<SaveTableResponse>
|
||||
deleteTable: (id: string, rev: string) => Promise<{ message: string }>
|
||||
validateNewTableImport: (
|
||||
rows: Row[],
|
||||
schema: TableSchema
|
||||
) => Promise<ValidateTableImportResponse>
|
||||
validateExistingTableImport: (
|
||||
rows: Row[],
|
||||
tableId?: string
|
||||
) => Promise<ValidateTableImportResponse>
|
||||
migrateColumn: (
|
||||
tableId: string,
|
||||
oldColumn: string,
|
||||
newColumn: string
|
||||
) => Promise<MigrateResponse>
|
||||
}
|
||||
|
||||
export const buildTableEndpoints = (API: BaseAPIClient): TableEndpoints => ({
|
||||
/**
|
||||
* Fetches a table definition.
|
||||
* Since definitions cannot change at runtime, the result is cached.
|
||||
* @param tableId the ID of the table to fetch
|
||||
*/
|
||||
fetchTableDefinition: async tableId => {
|
||||
return await API.get({
|
||||
url: `/api/tables/${tableId}`,
|
||||
cache: true,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetches all rows from a table.
|
||||
* @param sourceId the ID of the table to fetch
|
||||
*/
|
||||
fetchTableData: async sourceId => {
|
||||
return await API.get({ url: `/api/${sourceId}/rows` })
|
||||
},
|
||||
|
||||
/**
|
||||
* Searches a table using Lucene.
|
||||
* @param sourceId the ID of the table to search
|
||||
* @param opts the search opts
|
||||
*/
|
||||
searchTable: async (sourceId, opts) => {
|
||||
return await API.post({
|
||||
url: `/api/${sourceId}/search`,
|
||||
body: opts,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Imports data into an existing table
|
||||
* @param tableId the table ID to import to
|
||||
* @param rows the data import object
|
||||
* @param identifierFields column names to be used as keys for overwriting existing rows
|
||||
*/
|
||||
importTableData: async (tableId, rows, identifierFields) => {
|
||||
return await API.post<BulkImportRequest, BulkImportResponse>({
|
||||
url: `/api/tables/${tableId}/import`,
|
||||
body: {
|
||||
rows,
|
||||
identifierFields,
|
||||
},
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Converts a CSV string to JSON
|
||||
* @param csvString the CSV string
|
||||
*/
|
||||
csvToJson: async csvString => {
|
||||
return await API.post<CsvToJsonRequest, CsvToJsonResponse>({
|
||||
url: "/api/convert/csvToJson",
|
||||
body: {
|
||||
csvString,
|
||||
},
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets a list of tables.
|
||||
*/
|
||||
getTables: async () => {
|
||||
return await API.get({
|
||||
url: "/api/tables",
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Get a single table based on table ID.
|
||||
* Dupe of fetchTableDefinition but not cached?
|
||||
*/
|
||||
getTable: async tableId => {
|
||||
return await API.get({
|
||||
url: `/api/tables/${tableId}`,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Saves a table.
|
||||
* @param table the table to save
|
||||
*/
|
||||
saveTable: async table => {
|
||||
return await API.post({
|
||||
url: "/api/tables",
|
||||
body: table,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Deletes a table.
|
||||
* @param id the ID of the table to delete
|
||||
* @param rev the rev of the table to delete
|
||||
*/
|
||||
deleteTable: async (id, rev) => {
|
||||
return await API.delete({
|
||||
url: `/api/tables/${id}/${rev}`,
|
||||
})
|
||||
},
|
||||
|
||||
validateNewTableImport: async (rows, schema) => {
|
||||
return await API.post<
|
||||
ValidateNewTableImportRequest,
|
||||
ValidateTableImportResponse
|
||||
>({
|
||||
url: "/api/tables/validateNewTableImport",
|
||||
body: {
|
||||
rows,
|
||||
schema,
|
||||
},
|
||||
})
|
||||
},
|
||||
validateExistingTableImport: async (rows, tableId) => {
|
||||
return await API.post<
|
||||
ValidateTableImportRequest,
|
||||
ValidateTableImportResponse
|
||||
>({
|
||||
url: "/api/tables/validateExistingTableImport",
|
||||
body: {
|
||||
rows,
|
||||
tableId,
|
||||
},
|
||||
})
|
||||
},
|
||||
migrateColumn: async (tableId, oldColumn, newColumn) => {
|
||||
return await API.post<MigrateRequest, MigrateResponse>({
|
||||
url: `/api/tables/${tableId}/migrate`,
|
||||
body: {
|
||||
oldColumn,
|
||||
newColumn,
|
||||
},
|
||||
})
|
||||
},
|
||||
})
|
|
@ -27,6 +27,7 @@ import { RowActionEndpoints } from "./rowActions"
|
|||
import { RowEndpoints } from "./rows"
|
||||
import { ScreenEndpoints } from "./screens"
|
||||
import { SelfEndpoints } from "./self"
|
||||
import { TableEndpoints } from "./tables"
|
||||
|
||||
export enum HTTPMethod {
|
||||
POST = "POST",
|
||||
|
@ -124,4 +125,5 @@ export type APIClient = BaseAPIClient &
|
|||
RouteEndpoints &
|
||||
RowEndpoints &
|
||||
ScreenEndpoints &
|
||||
SelfEndpoints & { rowActions: RowActionEndpoints; [key: string]: any }
|
||||
SelfEndpoints &
|
||||
TableEndpoints & { rowActions: RowActionEndpoints; [key: string]: any }
|
||||
|
|
|
@ -29,11 +29,11 @@
|
|||
|
||||
const migrateUserColumn = async () => {
|
||||
try {
|
||||
await API.migrateColumn({
|
||||
tableId: $definition._id,
|
||||
oldColumn: column.schema.name,
|
||||
newColumn: newColumnName,
|
||||
})
|
||||
await API.migrateColumn(
|
||||
$definition._id,
|
||||
column.schema.name,
|
||||
newColumnName
|
||||
)
|
||||
notifications.success("Column migrated")
|
||||
} catch (e) {
|
||||
notifications.error(`Failed to migrate: ${e.message}`)
|
||||
|
|
|
@ -27,8 +27,7 @@ export const createActions = context => {
|
|||
}
|
||||
|
||||
const getRow = async id => {
|
||||
const res = await API.searchTable({
|
||||
tableId: get(datasource).tableId,
|
||||
const res = await API.searchTable(get(datasource).tableId, {
|
||||
limit: 1,
|
||||
query: {
|
||||
equal: {
|
||||
|
|
|
@ -19,8 +19,7 @@ export default class TableFetch extends DataFetch {
|
|||
|
||||
// Search table
|
||||
try {
|
||||
const res = await this.API.searchTable({
|
||||
tableId,
|
||||
const res = await this.API.searchTable(tableId, {
|
||||
query,
|
||||
limit,
|
||||
sort: sortColumn,
|
||||
|
|
Loading…
Reference in New Issue