diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte
index 352f094507..b4293a2a0a 100644
--- a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte
+++ b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte
@@ -308,7 +308,7 @@
{ name: "Auto Column", type: AUTO_TYPE },
]
} else {
- return [
+ let fields = [
FIELDS.STRING,
FIELDS.BARCODEQR,
FIELDS.LONGFORM,
@@ -316,10 +316,13 @@
FIELDS.DATETIME,
FIELDS.NUMBER,
FIELDS.BOOLEAN,
- FIELDS.ARRAY,
FIELDS.FORMULA,
- FIELDS.LINK,
]
+ // no-sql or a spreadsheet
+ if (!external || table.sql) {
+ fields = [...fields, FIELDS.LINK, FIELDS.ARRAY]
+ }
+ return fields
}
}
diff --git a/packages/builder/src/components/backend/DatasourceNavigator/TableIntegrationMenu/CreateExternalTableModal.svelte b/packages/builder/src/components/backend/DatasourceNavigator/TableIntegrationMenu/CreateExternalTableModal.svelte
index 45269a365c..664b5629d4 100644
--- a/packages/builder/src/components/backend/DatasourceNavigator/TableIntegrationMenu/CreateExternalTableModal.svelte
+++ b/packages/builder/src/components/backend/DatasourceNavigator/TableIntegrationMenu/CreateExternalTableModal.svelte
@@ -35,7 +35,9 @@
await datasources.fetch()
$goto(`../../table/${table._id}`)
} catch (error) {
- notifications.error("Error saving table")
+ notifications.error(
+ `Error saving table - ${error?.message || "unknown error"}`
+ )
}
}
diff --git a/packages/builder/src/components/backend/DatasourceNavigator/modals/GoogleDatasourceConfigModal.svelte b/packages/builder/src/components/backend/DatasourceNavigator/modals/GoogleDatasourceConfigModal.svelte
index 7d03dafeb9..c12ddab78d 100644
--- a/packages/builder/src/components/backend/DatasourceNavigator/modals/GoogleDatasourceConfigModal.svelte
+++ b/packages/builder/src/components/backend/DatasourceNavigator/modals/GoogleDatasourceConfigModal.svelte
@@ -1,15 +1,22 @@
-
- Authenticate with your google account to use the {IntegrationNames[
- datasource.type
- ]} integration.
+ {#if isGoogleConfigured === true}
+
+ Authenticate with your google account to use the {IntegrationNames[
+ datasource.type
+ ]} integration.
+
+ save(datasource, true)} />
+ {:else if isGoogleConfigured === false}
+ Google authentication is not enabled, please complete Google SSO
+ configuration.
-
- save(datasource, true)} />
+ Configure Google SSO
+ {/if}
diff --git a/packages/builder/src/pages/builder/portal/settings/auth/index.svelte b/packages/builder/src/pages/builder/portal/settings/auth/index.svelte
index 0e82dd31e7..2d4dc7ee46 100644
--- a/packages/builder/src/pages/builder/portal/settings/auth/index.svelte
+++ b/packages/builder/src/pages/builder/portal/settings/auth/index.svelte
@@ -47,8 +47,9 @@
$: googleCallbackTooltip = $admin.cloud
? null
: googleCallbackReadonly
- ? "Vist the organisation page to update the platform URL"
+ ? "Visit the organisation page to update the platform URL"
: "Leave blank to use the default callback URL"
+ $: googleSheetsCallbackUrl = `${$organisation.platformUrl}/api/global/auth/datasource/google/callback`
$: GoogleConfigFields = {
Google: [
@@ -62,6 +63,14 @@
placeholder: $organisation.googleCallbackUrl,
copyButton: true,
},
+ {
+ name: "sheetsURL",
+ label: "Sheets URL",
+ readonly: googleCallbackReadonly,
+ tooltip: googleCallbackTooltip,
+ placeholder: googleSheetsCallbackUrl,
+ copyButton: true,
+ },
],
}
@@ -396,7 +405,11 @@
To allow users to authenticate using their Google accounts, fill out the
- fields below.
+ fields below. Read the documentation for more information.
diff --git a/packages/server/src/api/controllers/datasource.ts b/packages/server/src/api/controllers/datasource.ts
index d212f7f361..3d41cd89af 100644
--- a/packages/server/src/api/controllers/datasource.ts
+++ b/packages/server/src/api/controllers/datasource.ts
@@ -84,8 +84,9 @@ export async function buildSchemaFromDb(ctx: UserCtx) {
setDefaultDisplayColumns(datasource)
const dbResp = await db.put(datasource)
datasource._rev = dbResp.rev
+ const cleanedDatasource = await sdk.datasources.removeSecretSingle(datasource)
- const response: any = { datasource }
+ const response: any = { datasource: cleanedDatasource }
if (error) {
response.error = error
}
diff --git a/packages/server/src/api/controllers/row/external.ts b/packages/server/src/api/controllers/row/external.ts
index 8a7a9a6c69..ee796e32d4 100644
--- a/packages/server/src/api/controllers/row/external.ts
+++ b/packages/server/src/api/controllers/row/external.ts
@@ -12,7 +12,7 @@ import * as exporters from "../view/exporters"
import { apiFileReturn } from "../../../utilities/fileSystem"
import {
Operation,
- BBContext,
+ UserCtx,
Row,
PaginationJson,
Table,
@@ -21,6 +21,7 @@ import {
SortJson,
} from "@budibase/types"
import sdk from "../../../sdk"
+import * as utils from "./utils"
const { cleanExportRows } = require("./utils")
@@ -49,12 +50,19 @@ export async function handleRequest(
)
}
-export async function patch(ctx: BBContext) {
+export async function patch(ctx: UserCtx) {
const inputs = ctx.request.body
const tableId = ctx.params.tableId
const id = inputs._id
// don't save the ID to db
delete inputs._id
+ const validateResult = await utils.validate({
+ row: inputs,
+ tableId,
+ })
+ if (!validateResult.valid) {
+ throw { validation: validateResult.errors }
+ }
return handleRequest(Operation.UPDATE, tableId, {
id: breakRowIdField(id),
row: inputs,
@@ -62,16 +70,23 @@ export async function patch(ctx: BBContext) {
})
}
-export async function save(ctx: BBContext) {
+export async function save(ctx: UserCtx) {
const inputs = ctx.request.body
const tableId = ctx.params.tableId
+ const validateResult = await utils.validate({
+ row: inputs,
+ tableId,
+ })
+ if (!validateResult.valid) {
+ throw { validation: validateResult.errors }
+ }
return handleRequest(Operation.CREATE, tableId, {
row: inputs,
includeSqlRelationships: IncludeRelationship.EXCLUDE,
})
}
-export async function fetchView(ctx: BBContext) {
+export async function fetchView(ctx: UserCtx) {
// there are no views in external datasources, shouldn't ever be called
// for now just fetch
const split = ctx.params.viewName.split("all_")
@@ -79,14 +94,14 @@ export async function fetchView(ctx: BBContext) {
return fetch(ctx)
}
-export async function fetch(ctx: BBContext) {
+export async function fetch(ctx: UserCtx) {
const tableId = ctx.params.tableId
return handleRequest(Operation.READ, tableId, {
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
}
-export async function find(ctx: BBContext) {
+export async function find(ctx: UserCtx) {
const id = ctx.params.rowId
const tableId = ctx.params.tableId
const response = (await handleRequest(Operation.READ, tableId, {
@@ -96,7 +111,7 @@ export async function find(ctx: BBContext) {
return response ? response[0] : response
}
-export async function destroy(ctx: BBContext) {
+export async function destroy(ctx: UserCtx) {
const tableId = ctx.params.tableId
const id = ctx.request.body._id
const { row } = (await handleRequest(Operation.DELETE, tableId, {
@@ -106,7 +121,7 @@ export async function destroy(ctx: BBContext) {
return { response: { ok: true }, row }
}
-export async function bulkDestroy(ctx: BBContext) {
+export async function bulkDestroy(ctx: UserCtx) {
const { rows } = ctx.request.body
const tableId = ctx.params.tableId
let promises: Promise[] = []
@@ -122,7 +137,7 @@ export async function bulkDestroy(ctx: BBContext) {
return { response: { ok: true }, rows: responses.map(resp => resp.row) }
}
-export async function search(ctx: BBContext) {
+export async function search(ctx: UserCtx) {
const tableId = ctx.params.tableId
const { paginate, query, ...params } = ctx.request.body
let { bookmark, limit } = params
@@ -185,12 +200,7 @@ export async function search(ctx: BBContext) {
}
}
-export async function validate(ctx: BBContext) {
- // can't validate external right now - maybe in future
- return { valid: true }
-}
-
-export async function exportRows(ctx: BBContext) {
+export async function exportRows(ctx: UserCtx) {
const { datasourceId, tableName } = breakExternalTableId(ctx.params.tableId)
const format = ctx.query.format
const { columns } = ctx.request.body
@@ -244,7 +254,7 @@ export async function exportRows(ctx: BBContext) {
return apiFileReturn(exporter(headers, exportRows))
}
-export async function fetchEnrichedRow(ctx: BBContext) {
+export async function fetchEnrichedRow(ctx: UserCtx) {
const id = ctx.params.rowId
const tableId = ctx.params.tableId
const { datasourceId, tableName } = breakExternalTableId(tableId)
diff --git a/packages/server/src/api/controllers/row/index.ts b/packages/server/src/api/controllers/row/index.ts
index b59f245098..348d441c78 100644
--- a/packages/server/src/api/controllers/row/index.ts
+++ b/packages/server/src/api/controllers/row/index.ts
@@ -2,6 +2,8 @@ import { quotas } from "@budibase/pro"
import * as internal from "./internal"
import * as external from "./external"
import { isExternalTable } from "../../../integrations/utils"
+import { Ctx } from "@budibase/types"
+import * as utils from "./utils"
function pickApi(tableId: any) {
if (isExternalTable(tableId)) {
@@ -129,9 +131,12 @@ export async function search(ctx: any) {
})
}
-export async function validate(ctx: any) {
+export async function validate(ctx: Ctx) {
const tableId = getTableId(ctx)
- ctx.body = await pickApi(tableId).validate(ctx)
+ ctx.body = await utils.validate({
+ row: ctx.request.body,
+ tableId,
+ })
}
export async function fetchEnrichedRow(ctx: any) {
diff --git a/packages/server/src/api/controllers/row/internal.ts b/packages/server/src/api/controllers/row/internal.ts
index d36f9bf2f1..039f03c015 100644
--- a/packages/server/src/api/controllers/row/internal.ts
+++ b/packages/server/src/api/controllers/row/internal.ts
@@ -387,13 +387,6 @@ export async function search(ctx: Ctx) {
return response
}
-export async function validate(ctx: Ctx) {
- return utils.validate({
- tableId: ctx.params.tableId,
- row: ctx.request.body,
- })
-}
-
export async function exportRows(ctx: Ctx) {
const db = context.getAppDB()
const table = await db.get(ctx.params.tableId)
diff --git a/packages/server/src/api/controllers/row/utils.ts b/packages/server/src/api/controllers/row/utils.ts
index 82232b7f98..2e8f2f4536 100644
--- a/packages/server/src/api/controllers/row/utils.ts
+++ b/packages/server/src/api/controllers/row/utils.ts
@@ -4,11 +4,11 @@ import { FieldTypes } from "../../../constants"
import { context } from "@budibase/backend-core"
import { makeExternalQuery } from "../../../integrations/base/query"
import { Row, Table } from "@budibase/types"
-const validateJs = require("validate.js")
-const { cloneDeep } = require("lodash/fp")
import { Format } from "../view/exporters"
import { Ctx } from "@budibase/types"
import sdk from "../../../sdk"
+const validateJs = require("validate.js")
+const { cloneDeep } = require("lodash/fp")
validateJs.extend(validateJs.validators.datetime, {
parse: function (value: string) {
@@ -56,8 +56,7 @@ export async function validate({
}) {
let fetchedTable: Table
if (!table) {
- const db = context.getAppDB()
- fetchedTable = await db.get(tableId)
+ fetchedTable = await sdk.tables.getTable(tableId)
} else {
fetchedTable = table
}
diff --git a/packages/server/src/api/controllers/table/external.ts b/packages/server/src/api/controllers/table/external.ts
index a6ad5bba99..a00e65687f 100644
--- a/packages/server/src/api/controllers/table/external.ts
+++ b/packages/server/src/api/controllers/table/external.ts
@@ -7,6 +7,7 @@ import {
generateJunctionTableName,
foreignKeyStructure,
hasTypeChanged,
+ setStaticSchemas,
} from "./utils"
import { FieldTypes } from "../../../constants"
import { makeExternalQuery } from "../../../integrations/base/query"
@@ -20,7 +21,7 @@ import {
Operation,
RenameColumn,
FieldSchema,
- BBContext,
+ UserCtx,
TableRequest,
RelationshipTypes,
} from "@budibase/types"
@@ -194,20 +195,20 @@ function isRelationshipSetup(column: FieldSchema) {
return column.foreignKey || column.through
}
-export async function save(ctx: BBContext) {
- const table: TableRequest = ctx.request.body
- const renamed = table?._rename
+export async function save(ctx: UserCtx) {
+ const inputs: TableRequest = ctx.request.body
+ const renamed = inputs?._rename
// can't do this right now
- delete table.rows
+ delete inputs.rows
const datasourceId = getDatasourceId(ctx.request.body)!
// table doesn't exist already, note that it is created
- if (!table._id) {
- table.created = true
+ if (!inputs._id) {
+ inputs.created = true
}
let tableToSave: TableRequest = {
type: "table",
- _id: buildExternalTableId(datasourceId, table.name),
- ...table,
+ _id: buildExternalTableId(datasourceId, inputs.name),
+ ...inputs,
}
let oldTable
@@ -224,6 +225,10 @@ export async function save(ctx: BBContext) {
if (!datasource.entities) {
datasource.entities = {}
}
+
+ // GSheets is a specific case - only ever has a static primary key
+ tableToSave = setStaticSchemas(datasource, tableToSave)
+
const oldTables = cloneDeep(datasource.entities)
const tables: Record = datasource.entities
@@ -246,7 +251,7 @@ export async function save(ctx: BBContext) {
const junctionTable = generateManyLinkSchema(
datasource,
schema,
- table,
+ tableToSave,
relatedTable
)
if (tables[junctionTable.name]) {
@@ -256,10 +261,12 @@ export async function save(ctx: BBContext) {
extraTablesToUpdate.push(junctionTable)
} else {
const fkTable =
- relationType === RelationshipTypes.ONE_TO_MANY ? table : relatedTable
+ relationType === RelationshipTypes.ONE_TO_MANY
+ ? tableToSave
+ : relatedTable
const foreignKey = generateLinkSchema(
schema,
- table,
+ tableToSave,
relatedTable,
relationType
)
@@ -271,11 +278,11 @@ export async function save(ctx: BBContext) {
fkTable.constrained.push(foreignKey)
}
// foreign key is in other table, need to save it to external
- if (fkTable._id !== table._id) {
+ if (fkTable._id !== tableToSave._id) {
extraTablesToUpdate.push(fkTable)
}
}
- generateRelatedSchema(schema, relatedTable, table, relatedColumnName)
+ generateRelatedSchema(schema, relatedTable, tableToSave, relatedColumnName)
schema.main = true
}
@@ -313,7 +320,7 @@ export async function save(ctx: BBContext) {
return tableToSave
}
-export async function destroy(ctx: BBContext) {
+export async function destroy(ctx: UserCtx) {
const tableToDelete: TableRequest = await sdk.tables.getTable(
ctx.params.tableId
)
@@ -339,7 +346,7 @@ export async function destroy(ctx: BBContext) {
return tableToDelete
}
-export async function bulkImport(ctx: BBContext) {
+export async function bulkImport(ctx: UserCtx) {
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows }: { rows: unknown } = ctx.request.body
const schema: unknown = table.schema
@@ -348,7 +355,7 @@ export async function bulkImport(ctx: BBContext) {
ctx.throw(400, "Provided data import information is invalid.")
}
- const parsedRows = await parse(rows, schema)
+ const parsedRows = parse(rows, schema)
await handleRequest(Operation.BULK_CREATE, table._id!, {
rows: parsedRows,
})
diff --git a/packages/server/src/api/controllers/table/index.ts b/packages/server/src/api/controllers/table/index.ts
index aa6dfde536..2ab7ad7b38 100644
--- a/packages/server/src/api/controllers/table/index.ts
+++ b/packages/server/src/api/controllers/table/index.ts
@@ -8,7 +8,7 @@ import {
import { isExternalTable, isSQL } from "../../../integrations/utils"
import { getDatasourceParams } from "../../../db/utils"
import { context, events } from "@budibase/backend-core"
-import { Table, BBContext } from "@budibase/types"
+import { Table, UserCtx } from "@budibase/types"
import sdk from "../../../sdk"
import csv from "csvtojson"
@@ -25,7 +25,7 @@ function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
}
// covers both internal and external
-export async function fetch(ctx: BBContext) {
+export async function fetch(ctx: UserCtx) {
const db = context.getAppDB()
const internal = await sdk.tables.getAllInternalTables()
@@ -53,12 +53,12 @@ export async function fetch(ctx: BBContext) {
ctx.body = [...internal, ...external]
}
-export async function find(ctx: BBContext) {
+export async function find(ctx: UserCtx) {
const tableId = ctx.params.tableId
ctx.body = await sdk.tables.getTable(tableId)
}
-export async function save(ctx: BBContext) {
+export async function save(ctx: UserCtx) {
const appId = ctx.appId
const table = ctx.request.body
const isImport = table.rows
@@ -79,7 +79,7 @@ export async function save(ctx: BBContext) {
ctx.body = savedTable
}
-export async function destroy(ctx: BBContext) {
+export async function destroy(ctx: UserCtx) {
const appId = ctx.appId
const tableId = ctx.params.tableId
const deletedTable = await pickApi({ tableId }).destroy(ctx)
@@ -91,7 +91,7 @@ export async function destroy(ctx: BBContext) {
ctx.body = { message: `Table ${tableId} deleted.` }
}
-export async function bulkImport(ctx: BBContext) {
+export async function bulkImport(ctx: UserCtx) {
const tableId = ctx.params.tableId
await pickApi({ tableId }).bulkImport(ctx)
// right now we don't trigger anything for bulk import because it
@@ -101,7 +101,7 @@ export async function bulkImport(ctx: BBContext) {
ctx.body = { message: `Bulk rows created.` }
}
-export async function csvToJson(ctx: BBContext) {
+export async function csvToJson(ctx: UserCtx) {
const { csvString } = ctx.request.body
const result = await csv().fromString(csvString)
@@ -110,7 +110,7 @@ export async function csvToJson(ctx: BBContext) {
ctx.body = result
}
-export async function validateNewTableImport(ctx: BBContext) {
+export async function validateNewTableImport(ctx: UserCtx) {
const { rows, schema }: { rows: unknown; schema: unknown } = ctx.request.body
if (isRows(rows) && isSchema(schema)) {
@@ -121,7 +121,7 @@ export async function validateNewTableImport(ctx: BBContext) {
}
}
-export async function validateExistingTableImport(ctx: BBContext) {
+export async function validateExistingTableImport(ctx: UserCtx) {
const { rows, tableId }: { rows: unknown; tableId: unknown } =
ctx.request.body
diff --git a/packages/server/src/api/controllers/table/utils.ts b/packages/server/src/api/controllers/table/utils.ts
index bbccde467b..7c5c81939a 100644
--- a/packages/server/src/api/controllers/table/utils.ts
+++ b/packages/server/src/api/controllers/table/utils.ts
@@ -1,7 +1,11 @@
import { parse, isSchema, isRows } from "../../../utilities/schema"
import { getRowParams, generateRowID, InternalTables } from "../../../db/utils"
import { isEqual } from "lodash"
-import { AutoFieldSubTypes, FieldTypes } from "../../../constants"
+import {
+ AutoFieldSubTypes,
+ FieldTypes,
+ GOOGLE_SHEETS_PRIMARY_KEY,
+} from "../../../constants"
import {
inputProcessing,
cleanupAttachments,
@@ -16,7 +20,7 @@ import viewTemplate from "../view/viewBuilder"
import { cloneDeep } from "lodash/fp"
import { quotas } from "@budibase/pro"
import { events, context } from "@budibase/backend-core"
-import { Database } from "@budibase/types"
+import { Database, Datasource, SourceName, Table } from "@budibase/types"
export async function clearColumns(table: any, columnNames: any) {
const db: Database = context.getAppDB()
@@ -392,5 +396,17 @@ export function hasTypeChanged(table: any, oldTable: any) {
return false
}
+// used for external tables, some of them will have static schemas that need
+// to be hard set
+export function setStaticSchemas(datasource: Datasource, table: Table) {
+ // GSheets is a specific case - only ever has a static primary key
+ if (table && datasource.source === SourceName.GOOGLE_SHEETS) {
+ table.primary = [GOOGLE_SHEETS_PRIMARY_KEY]
+ // if there is an id column, remove it, should never exist in GSheets
+ delete table.schema?.id
+ }
+ return table
+}
+
const _TableSaveFunctions = TableSaveFunctions
export { _TableSaveFunctions as TableSaveFunctions }
diff --git a/packages/server/src/constants/index.ts b/packages/server/src/constants/index.ts
index e55ad09add..9d6a1c247a 100644
--- a/packages/server/src/constants/index.ts
+++ b/packages/server/src/constants/index.ts
@@ -180,3 +180,4 @@ export enum AutomationErrors {
// pass through the list from the auth/core lib
export const ObjectStoreBuckets = objectStore.ObjectStoreBuckets
export const MAX_AUTOMATION_RECURRING_ERRORS = 5
+export const GOOGLE_SHEETS_PRIMARY_KEY = "rowNumber"
diff --git a/packages/server/src/integrations/googlesheets.ts b/packages/server/src/integrations/googlesheets.ts
index 0c658df0f5..f8bc84adea 100644
--- a/packages/server/src/integrations/googlesheets.ts
+++ b/packages/server/src/integrations/googlesheets.ts
@@ -1,22 +1,25 @@
import {
DatasourceFieldType,
DatasourcePlus,
+ FieldType,
Integration,
+ Operation,
PaginationJson,
QueryJson,
QueryType,
+ Row,
SearchFilters,
SortJson,
Table,
- TableSchema,
+ TableRequest,
} from "@budibase/types"
import { OAuth2Client } from "google-auth-library"
-import { buildExternalTableId } from "./utils"
-import { DataSourceOperation, FieldTypes } from "../constants"
+import { buildExternalTableId, finaliseExternalTables } from "./utils"
import { GoogleSpreadsheet } from "google-spreadsheet"
import fetch from "node-fetch"
import { configs, HTTPError } from "@budibase/backend-core"
import { dataFilters } from "@budibase/shared-core"
+import { GOOGLE_SHEETS_PRIMARY_KEY } from "../constants"
interface GoogleSheetsConfig {
spreadsheetId: string
@@ -39,6 +42,17 @@ interface AuthTokenResponse {
access_token: string
}
+const ALLOWED_TYPES = [
+ FieldType.STRING,
+ FieldType.FORMULA,
+ FieldType.NUMBER,
+ FieldType.LONGFORM,
+ FieldType.DATETIME,
+ FieldType.OPTIONS,
+ FieldType.BOOLEAN,
+ FieldType.BARCODEQR,
+]
+
const SCHEMA: Integration = {
plus: true,
auth: {
@@ -199,73 +213,90 @@ class GoogleSheetsIntegration implements DatasourcePlus {
this.client.useOAuth2Client(oauthClient)
await this.client.loadInfo()
- } catch (err) {
+ } catch (err: any) {
+ // this happens for xlsx imports
+ if (err.message?.includes("operation is not supported")) {
+ err.message =
+ "This operation is not supported - XLSX sheets must be converted."
+ }
console.error("Error connecting to google sheets", err)
throw err
}
}
- async buildSchema(datasourceId: string) {
+ getTableSchema(title: string, headerValues: string[], id?: string) {
+ // base table
+ const table: Table = {
+ name: title,
+ primary: [GOOGLE_SHEETS_PRIMARY_KEY],
+ schema: {},
+ }
+ if (id) {
+ table._id = id
+ }
+ // build schema from headers
+ for (let header of headerValues) {
+ table.schema[header] = {
+ name: header,
+ type: FieldType.STRING,
+ }
+ }
+ return table
+ }
+
+ async buildSchema(datasourceId: string, entities: Record) {
await this.connect()
const sheets = this.client.sheetsByIndex
const tables: Record = {}
for (let sheet of sheets) {
// must fetch rows to determine schema
await sheet.getRows()
- // build schema
- const schema: TableSchema = {}
- // build schema from headers
- for (let header of sheet.headerValues) {
- schema[header] = {
- name: header,
- type: FieldTypes.STRING,
- }
- }
-
- // create tables
- tables[sheet.title] = {
- _id: buildExternalTableId(datasourceId, sheet.title),
- name: sheet.title,
- primary: ["rowNumber"],
- schema,
- }
+ const id = buildExternalTableId(datasourceId, sheet.title)
+ tables[sheet.title] = this.getTableSchema(
+ sheet.title,
+ sheet.headerValues,
+ id
+ )
}
-
- this.tables = tables
+ const final = finaliseExternalTables(tables, entities)
+ this.tables = final.tables
+ this.schemaErrors = final.errors
}
async query(json: QueryJson) {
const sheet = json.endpoint.entityId
-
- const handlers = {
- [DataSourceOperation.CREATE]: () =>
- this.create({ sheet, row: json.body }),
- [DataSourceOperation.READ]: () => this.read({ ...json, sheet }),
- [DataSourceOperation.UPDATE]: () =>
- this.update({
+ switch (json.endpoint.operation) {
+ case Operation.CREATE:
+ return this.create({ sheet, row: json.body as Row })
+ case Operation.BULK_CREATE:
+ return this.createBulk({ sheet, rows: json.body as Row[] })
+ case Operation.READ:
+ return this.read({ ...json, sheet })
+ case Operation.UPDATE:
+ return this.update({
// exclude the header row and zero index
rowIndex: json.extra?.idFilter?.equal?.rowNumber - 2,
sheet,
row: json.body,
- }),
- [DataSourceOperation.DELETE]: () =>
- this.delete({
+ })
+ case Operation.DELETE:
+ return this.delete({
// exclude the header row and zero index
rowIndex: json.extra?.idFilter?.equal?.rowNumber - 2,
sheet,
- }),
- [DataSourceOperation.CREATE_TABLE]: () =>
- this.createTable(json?.table?.name),
- [DataSourceOperation.UPDATE_TABLE]: () => this.updateTable(json.table),
- [DataSourceOperation.DELETE_TABLE]: () =>
- this.deleteTable(json?.table?.name),
+ })
+ case Operation.CREATE_TABLE:
+ return this.createTable(json?.table?.name)
+ case Operation.UPDATE_TABLE:
+ return this.updateTable(json.table!)
+ case Operation.DELETE_TABLE:
+ return this.deleteTable(json?.table?.name)
+ default:
+ throw new Error(
+ `GSheets integration does not support "${json.endpoint.operation}".`
+ )
}
-
- // @ts-ignore
- const internalQueryMethod = handlers[json.endpoint.operation]
-
- return await internalQueryMethod()
}
buildRowObject(headers: string[], values: string[], rowNumber: number) {
@@ -278,47 +309,70 @@ class GoogleSheetsIntegration implements DatasourcePlus {
}
async createTable(name?: string) {
+ if (!name) {
+ throw new Error("Must provide name for new sheet.")
+ }
try {
await this.connect()
- return await this.client.addSheet({ title: name, headerValues: ["test"] })
+ return await this.client.addSheet({ title: name, headerValues: [name] })
} catch (err) {
console.error("Error creating new table in google sheets", err)
throw err
}
}
- async updateTable(table?: any) {
- try {
- await this.connect()
- const sheet = this.client.sheetsByTitle[table.name]
- await sheet.loadHeaderRow()
+ async updateTable(table: TableRequest) {
+ await this.connect()
+ const sheet = this.client.sheetsByTitle[table.name]
+ await sheet.loadHeaderRow()
- if (table._rename) {
- const headers = []
- for (let header of sheet.headerValues) {
- if (header === table._rename.old) {
- headers.push(table._rename.updated)
- } else {
- headers.push(header)
- }
+ if (table._rename) {
+ const headers = []
+ for (let header of sheet.headerValues) {
+ if (header === table._rename.old) {
+ headers.push(table._rename.updated)
+ } else {
+ headers.push(header)
}
- await sheet.setHeaderRow(headers)
- } else {
- const updatedHeaderValues = [...sheet.headerValues]
-
- const newField = Object.keys(table.schema).find(
- key => !sheet.headerValues.includes(key)
- )
-
- if (newField) {
- updatedHeaderValues.push(newField)
- }
-
- await sheet.setHeaderRow(updatedHeaderValues)
}
- } catch (err) {
- console.error("Error updating table in google sheets", err)
- throw err
+ try {
+ await sheet.setHeaderRow(headers)
+ } catch (err) {
+ console.error("Error updating column name in google sheets", err)
+ throw err
+ }
+ } else {
+ const updatedHeaderValues = [...sheet.headerValues]
+
+ // add new column - doesn't currently exist
+ for (let [key, column] of Object.entries(table.schema)) {
+ if (!ALLOWED_TYPES.includes(column.type)) {
+ throw new Error(
+ `Column type: ${column.type} not allowed for GSheets integration.`
+ )
+ }
+ if (
+ !sheet.headerValues.includes(key) &&
+ column.type !== FieldType.FORMULA
+ ) {
+ updatedHeaderValues.push(key)
+ }
+ }
+
+ // clear out deleted columns
+ for (let key of sheet.headerValues) {
+ if (!Object.keys(table.schema).includes(key)) {
+ const idx = updatedHeaderValues.indexOf(key)
+ updatedHeaderValues.splice(idx, 1)
+ }
+ }
+
+ try {
+ await sheet.setHeaderRow(updatedHeaderValues)
+ } catch (err) {
+ console.error("Error updating table in google sheets", err)
+ throw err
+ }
}
}
@@ -349,6 +403,24 @@ class GoogleSheetsIntegration implements DatasourcePlus {
}
}
+ async createBulk(query: { sheet: string; rows: any[] }) {
+ try {
+ await this.connect()
+ const sheet = this.client.sheetsByTitle[query.sheet]
+ let rowsToInsert = []
+ for (let row of query.rows) {
+ rowsToInsert.push(typeof row === "string" ? JSON.parse(row) : row)
+ }
+ const rows = await sheet.addRows(rowsToInsert)
+ return rows.map(row =>
+ this.buildRowObject(sheet.headerValues, row._rawData, row._rowNumber)
+ )
+ } catch (err) {
+ console.error("Error bulk writing to google sheets", err)
+ throw err
+ }
+ }
+
async read(query: {
sheet: string
filters?: SearchFilters
diff --git a/packages/server/src/integrations/utils.ts b/packages/server/src/integrations/utils.ts
index 356a08f4a0..de8b318bb1 100644
--- a/packages/server/src/integrations/utils.ts
+++ b/packages/server/src/integrations/utils.ts
@@ -4,6 +4,7 @@ import { FieldTypes, BuildSchemaErrors, InvalidColumns } from "../constants"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
const ROW_ID_REGEX = /^\[.*]$/g
+const ENCODED_SPACE = encodeURIComponent(" ")
const SQL_NUMBER_TYPE_MAP = {
integer: FieldTypes.NUMBER,
@@ -79,6 +80,10 @@ export function isExternalTable(tableId: string) {
}
export function buildExternalTableId(datasourceId: string, tableName: string) {
+ // encode spaces
+ if (tableName.includes(" ")) {
+ tableName = encodeURIComponent(tableName)
+ }
return `${datasourceId}${DOUBLE_SEPARATOR}${tableName}`
}
@@ -90,6 +95,10 @@ export function breakExternalTableId(tableId: string | undefined) {
let datasourceId = parts.shift()
// if they need joined
let tableName = parts.join(DOUBLE_SEPARATOR)
+ // if contains encoded spaces, decode it
+ if (tableName.includes(ENCODED_SPACE)) {
+ tableName = decodeURIComponent(tableName)
+ }
return { datasourceId, tableName }
}
@@ -200,9 +209,9 @@ export function isIsoDateString(str: string) {
* @param column The column to check, to see if it is a valid relationship.
* @param tableIds The IDs of the tables which currently exist.
*/
-function shouldCopyRelationship(
+export function shouldCopyRelationship(
column: { type: string; tableId?: string },
- tableIds: [string]
+ tableIds: string[]
) {
return (
column.type === FieldTypes.LINK &&
@@ -219,7 +228,7 @@ function shouldCopyRelationship(
* @param column The column to check for options or boolean type.
* @param fetchedColumn The fetched column to check for the type in the external database.
*/
-function shouldCopySpecialColumn(
+export function shouldCopySpecialColumn(
column: { type: string },
fetchedColumn: { type: string } | undefined
) {
@@ -257,9 +266,12 @@ function copyExistingPropsOver(
tableIds: [string]
) {
if (entities && entities[tableName]) {
- if (entities[tableName].primaryDisplay) {
+ if (entities[tableName]?.primaryDisplay) {
table.primaryDisplay = entities[tableName].primaryDisplay
}
+ if (entities[tableName]?.created) {
+ table.created = entities[tableName]?.created
+ }
const existingTableSchema = entities[tableName].schema
for (let key in existingTableSchema) {
if (!existingTableSchema.hasOwnProperty(key)) {
diff --git a/packages/types/src/documents/app/table.ts b/packages/types/src/documents/app/table.ts
index 01d2486dcb..929409d0e9 100644
--- a/packages/types/src/documents/app/table.ts
+++ b/packages/types/src/documents/app/table.ts
@@ -76,6 +76,7 @@ export interface Table extends Document {
sql?: boolean
indexes?: { [key: string]: any }
rows?: { [key: string]: any }
+ created?: boolean
}
export interface TableRequest extends Table {