Initial attempt to get SQS up and running within BB.
This commit is contained in:
parent
9cab2fbb12
commit
adef1ed5ce
|
@ -0,0 +1 @@
|
||||||
|
sqlite
|
|
@ -3,6 +3,7 @@ FROM couchdb:3.2.1
|
||||||
ENV COUCHDB_USER admin
|
ENV COUCHDB_USER admin
|
||||||
ENV COUCHDB_PASSWORD admin
|
ENV COUCHDB_PASSWORD admin
|
||||||
EXPOSE 5984
|
EXPOSE 5984
|
||||||
|
EXPOSE 4984
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
|
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
|
||||||
wget -qO - https://adoptopenjdk.jfrog.io/adoptopenjdk/api/gpg/key/public | apt-key add - && \
|
wget -qO - https://adoptopenjdk.jfrog.io/adoptopenjdk/api/gpg/key/public | apt-key add - && \
|
||||||
|
@ -28,8 +29,11 @@ ADD clouseau/log4j.properties clouseau/clouseau.ini ./
|
||||||
WORKDIR /opt/couchdb
|
WORKDIR /opt/couchdb
|
||||||
ADD couch/vm.args couch/local.ini ./etc/
|
ADD couch/vm.args couch/local.ini ./etc/
|
||||||
|
|
||||||
|
WORKDIR /opt/sqs
|
||||||
|
ADD sqlite/sqs sqlite/better_sqlite3.node ./
|
||||||
|
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
ADD build-target-paths.sh .
|
ADD build-target-paths.sh .
|
||||||
ADD runner.sh ./bbcouch-runner.sh
|
ADD runner.sh ./bbcouch-runner.sh
|
||||||
RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau ./build-target-paths.sh
|
RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau ./build-target-paths.sh /opt/sqs/sqs
|
||||||
CMD ["./bbcouch-runner.sh"]
|
CMD ["./bbcouch-runner.sh"]
|
||||||
|
|
|
@ -8,6 +8,7 @@ chown -R couchdb:couchdb ${DATA_DIR}/couch
|
||||||
/build-target-paths.sh
|
/build-target-paths.sh
|
||||||
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
|
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
|
||||||
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
|
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
|
||||||
|
/opt/sqs/sqs --server "http://localhost:5984" --data-dir ${DATA_DIR}/sqs --bind-address=0.0.0.0 &
|
||||||
sleep 10
|
sleep 10
|
||||||
curl -X PUT http://${COUCHDB_USER}:${COUCHDB_PASSWORD}@localhost:5984/_users
|
curl -X PUT http://${COUCHDB_USER}:${COUCHDB_PASSWORD}@localhost:5984/_users
|
||||||
curl -X PUT http://${COUCHDB_USER}:${COUCHDB_PASSWORD}@localhost:5984/_replicator
|
curl -X PUT http://${COUCHDB_USER}:${COUCHDB_PASSWORD}@localhost:5984/_replicator
|
||||||
|
|
|
@ -40,15 +40,15 @@ services:
|
||||||
- PROXY_ADDRESS=host.docker.internal
|
- PROXY_ADDRESS=host.docker.internal
|
||||||
|
|
||||||
couchdb-service:
|
couchdb-service:
|
||||||
# platform: linux/amd64
|
|
||||||
container_name: budi-couchdb3-dev
|
container_name: budi-couchdb3-dev
|
||||||
restart: on-failure
|
restart: on-failure
|
||||||
image: budibase/couchdb
|
image: couch-sqs
|
||||||
environment:
|
environment:
|
||||||
- COUCHDB_PASSWORD=${COUCH_DB_PASSWORD}
|
- COUCHDB_PASSWORD=${COUCH_DB_PASSWORD}
|
||||||
- COUCHDB_USER=${COUCH_DB_USER}
|
- COUCHDB_USER=${COUCH_DB_USER}
|
||||||
ports:
|
ports:
|
||||||
- "${COUCH_DB_PORT}:5984"
|
- "${COUCH_DB_PORT}:5984"
|
||||||
|
- "4984:4984"
|
||||||
volumes:
|
volumes:
|
||||||
- couchdb_data:/data
|
- couchdb_data:/data
|
||||||
|
|
||||||
|
|
|
@ -99,3 +99,4 @@ export const APP_PREFIX = DocumentType.APP + SEPARATOR
|
||||||
export const APP_DEV = DocumentType.APP_DEV + SEPARATOR
|
export const APP_DEV = DocumentType.APP_DEV + SEPARATOR
|
||||||
export const APP_DEV_PREFIX = APP_DEV
|
export const APP_DEV_PREFIX = APP_DEV
|
||||||
export const BUDIBASE_DATASOURCE_TYPE = "budibase"
|
export const BUDIBASE_DATASOURCE_TYPE = "budibase"
|
||||||
|
export const SQLITE_DESIGN_DOC_ID = "_design/sqlite"
|
||||||
|
|
|
@ -16,6 +16,7 @@ import { directCouchUrlCall } from "./utils"
|
||||||
import { getPouchDB } from "./pouchDB"
|
import { getPouchDB } from "./pouchDB"
|
||||||
import { WriteStream, ReadStream } from "fs"
|
import { WriteStream, ReadStream } from "fs"
|
||||||
import { newid } from "../../docIds/newid"
|
import { newid } from "../../docIds/newid"
|
||||||
|
import { SQLITE_DESIGN_DOC_ID } from "../../constants"
|
||||||
|
|
||||||
function buildNano(couchInfo: { url: string; cookie: string }) {
|
function buildNano(couchInfo: { url: string; cookie: string }) {
|
||||||
return Nano({
|
return Nano({
|
||||||
|
@ -180,6 +181,21 @@ export class DatabaseImpl implements Database {
|
||||||
return this.updateOutput(() => db.list(params))
|
return this.updateOutput(() => db.list(params))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async sql<T>(sql: string): Promise<T> {
|
||||||
|
const dbName = this.name
|
||||||
|
const url = `/${dbName}/${SQLITE_DESIGN_DOC_ID}`
|
||||||
|
const response = await directCouchUrlCall({
|
||||||
|
url: `${this.couchInfo.sqlUrl}/${url}`,
|
||||||
|
method: "POST",
|
||||||
|
cookie: this.couchInfo.cookie,
|
||||||
|
body: sql,
|
||||||
|
})
|
||||||
|
if (response.status > 300) {
|
||||||
|
throw new Error(await response.text())
|
||||||
|
}
|
||||||
|
return (await response.json()) as T
|
||||||
|
}
|
||||||
|
|
||||||
async query<T>(
|
async query<T>(
|
||||||
viewName: string,
|
viewName: string,
|
||||||
params: DatabaseQueryOpts
|
params: DatabaseQueryOpts
|
||||||
|
|
|
@ -25,6 +25,7 @@ export const getCouchInfo = (connection?: string) => {
|
||||||
const authCookie = Buffer.from(`${username}:${password}`).toString("base64")
|
const authCookie = Buffer.from(`${username}:${password}`).toString("base64")
|
||||||
return {
|
return {
|
||||||
url: urlInfo.url!,
|
url: urlInfo.url!,
|
||||||
|
sqlUrl: env.COUCH_DB_SQL_URL,
|
||||||
auth: {
|
auth: {
|
||||||
username: username,
|
username: username,
|
||||||
password: password,
|
password: password,
|
||||||
|
|
|
@ -30,8 +30,13 @@ export async function directCouchUrlCall({
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
if (body && method !== "GET") {
|
if (body && method !== "GET") {
|
||||||
params.body = JSON.stringify(body)
|
if (typeof body === "string") {
|
||||||
params.headers["Content-Type"] = "application/json"
|
params.body = body
|
||||||
|
params.headers["Content-Type"] = "text/plain"
|
||||||
|
} else {
|
||||||
|
params.body = JSON.stringify(body)
|
||||||
|
params.headers["Content-Type"] = "application/json"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return await fetch(checkSlashesInUrl(encodeURI(url)), params)
|
return await fetch(checkSlashesInUrl(encodeURI(url)), params)
|
||||||
}
|
}
|
||||||
|
|
|
@ -89,6 +89,7 @@ const environment = {
|
||||||
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,
|
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,
|
||||||
API_ENCRYPTION_KEY: getAPIEncryptionKey(),
|
API_ENCRYPTION_KEY: getAPIEncryptionKey(),
|
||||||
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
|
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
|
||||||
|
COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4984",
|
||||||
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
|
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
|
||||||
COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD,
|
COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD,
|
||||||
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,
|
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,
|
||||||
|
|
|
@ -4,11 +4,11 @@ import { get } from "svelte/store"
|
||||||
|
|
||||||
export function getTableFields(linkField) {
|
export function getTableFields(linkField) {
|
||||||
const table = get(tables).list.find(table => table._id === linkField.tableId)
|
const table = get(tables).list.find(table => table._id === linkField.tableId)
|
||||||
if (!table || !table.sql) {
|
if (!table) {
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
const linkFields = getFields(Object.values(table.schema), {
|
const linkFields = getFields(Object.values(table.schema), {
|
||||||
allowLinks: false,
|
tableFields: true,
|
||||||
})
|
})
|
||||||
return linkFields.map(field => ({
|
return linkFields.map(field => ({
|
||||||
...field,
|
...field,
|
||||||
|
@ -16,11 +16,11 @@ export function getTableFields(linkField) {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getFields(fields, { allowLinks } = { allowLinks: true }) {
|
export function getFields(fields, { tableFields }) {
|
||||||
let filteredFields = fields.filter(
|
let filteredFields = fields.filter(
|
||||||
field => !BannedSearchTypes.includes(field.type)
|
field => !BannedSearchTypes.includes(field.type)
|
||||||
)
|
)
|
||||||
if (allowLinks) {
|
if (!tableFields) {
|
||||||
const linkFields = fields.filter(field => field.type === "link")
|
const linkFields = fields.filter(field => field.type === "link")
|
||||||
for (let linkField of linkFields) {
|
for (let linkField of linkFields) {
|
||||||
// only allow one depth of SQL relationship filtering
|
// only allow one depth of SQL relationship filtering
|
||||||
|
|
|
@ -6,7 +6,6 @@ import {
|
||||||
IncludeRelationship,
|
IncludeRelationship,
|
||||||
Operation,
|
Operation,
|
||||||
PaginationJson,
|
PaginationJson,
|
||||||
RelationshipsJson,
|
|
||||||
RelationshipTypes,
|
RelationshipTypes,
|
||||||
Row,
|
Row,
|
||||||
SearchFilters,
|
SearchFilters,
|
||||||
|
@ -18,19 +17,21 @@ import {
|
||||||
breakExternalTableId,
|
breakExternalTableId,
|
||||||
breakRowIdField,
|
breakRowIdField,
|
||||||
convertRowId,
|
convertRowId,
|
||||||
generateRowIdField,
|
|
||||||
getPrimaryDisplay,
|
|
||||||
isRowId,
|
isRowId,
|
||||||
isSQL,
|
isSQL,
|
||||||
} from "../../../integrations/utils"
|
} from "../../../integrations/utils"
|
||||||
import { getDatasourceAndQuery } from "./utils"
|
import {
|
||||||
|
getDatasourceAndQuery,
|
||||||
|
generateIdForRow,
|
||||||
|
buildExternalRelationships,
|
||||||
|
buildSqlFieldList,
|
||||||
|
sqlOutputProcessing,
|
||||||
|
} from "./utils"
|
||||||
import { FieldTypes } from "../../../constants"
|
import { FieldTypes } from "../../../constants"
|
||||||
import { processObjectSync } from "@budibase/string-templates"
|
import { processObjectSync } from "@budibase/string-templates"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
import { processDates, processFormulas } from "../../../utilities/rowProcessor"
|
|
||||||
import { db as dbCore } from "@budibase/backend-core"
|
import { db as dbCore } from "@budibase/backend-core"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
import { isEditableColumn } from "../../../sdk/app/tables/validation"
|
|
||||||
|
|
||||||
export interface ManyRelationship {
|
export interface ManyRelationship {
|
||||||
tableId?: string
|
tableId?: string
|
||||||
|
@ -146,34 +147,6 @@ function cleanupConfig(config: RunConfig, table: Table): RunConfig {
|
||||||
return config
|
return config
|
||||||
}
|
}
|
||||||
|
|
||||||
function generateIdForRow(
|
|
||||||
row: Row | undefined,
|
|
||||||
table: Table,
|
|
||||||
isLinked: boolean = false
|
|
||||||
): string {
|
|
||||||
const primary = table.primary
|
|
||||||
if (!row || !primary) {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
// build id array
|
|
||||||
let idParts = []
|
|
||||||
for (let field of primary) {
|
|
||||||
let fieldValue = extractFieldValue({
|
|
||||||
row,
|
|
||||||
tableName: table.name,
|
|
||||||
fieldName: field,
|
|
||||||
isLinked,
|
|
||||||
})
|
|
||||||
if (fieldValue) {
|
|
||||||
idParts.push(fieldValue)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (idParts.length === 0) {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return generateRowIdField(idParts)
|
|
||||||
}
|
|
||||||
|
|
||||||
function getEndpoint(tableId: string | undefined, operation: string) {
|
function getEndpoint(tableId: string | undefined, operation: string) {
|
||||||
if (!tableId) {
|
if (!tableId) {
|
||||||
return {}
|
return {}
|
||||||
|
@ -186,74 +159,6 @@ function getEndpoint(tableId: string | undefined, operation: string) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// need to handle table name + field or just field, depending on if relationships used
|
|
||||||
function extractFieldValue({
|
|
||||||
row,
|
|
||||||
tableName,
|
|
||||||
fieldName,
|
|
||||||
isLinked,
|
|
||||||
}: {
|
|
||||||
row: Row
|
|
||||||
tableName: string
|
|
||||||
fieldName: string
|
|
||||||
isLinked: boolean
|
|
||||||
}) {
|
|
||||||
let value = row[`${tableName}.${fieldName}`]
|
|
||||||
if (value == null && !isLinked) {
|
|
||||||
value = row[fieldName]
|
|
||||||
}
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
|
|
||||||
function basicProcessing({
|
|
||||||
row,
|
|
||||||
table,
|
|
||||||
isLinked,
|
|
||||||
}: {
|
|
||||||
row: Row
|
|
||||||
table: Table
|
|
||||||
isLinked: boolean
|
|
||||||
}): Row {
|
|
||||||
const thisRow: Row = {}
|
|
||||||
// filter the row down to what is actually the row (not joined)
|
|
||||||
for (let field of Object.values(table.schema)) {
|
|
||||||
const fieldName = field.name
|
|
||||||
|
|
||||||
const value = extractFieldValue({
|
|
||||||
row,
|
|
||||||
tableName: table.name,
|
|
||||||
fieldName,
|
|
||||||
isLinked,
|
|
||||||
})
|
|
||||||
|
|
||||||
// all responses include "select col as table.col" so that overlaps are handled
|
|
||||||
if (value != null) {
|
|
||||||
thisRow[fieldName] = value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
thisRow._id = generateIdForRow(row, table, isLinked)
|
|
||||||
thisRow.tableId = table._id
|
|
||||||
thisRow._rev = "rev"
|
|
||||||
return processFormulas(table, thisRow)
|
|
||||||
}
|
|
||||||
|
|
||||||
function fixArrayTypes(row: Row, table: Table) {
|
|
||||||
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
|
||||||
if (
|
|
||||||
schema.type === FieldTypes.ARRAY &&
|
|
||||||
typeof row[fieldName] === "string"
|
|
||||||
) {
|
|
||||||
try {
|
|
||||||
row[fieldName] = JSON.parse(row[fieldName])
|
|
||||||
} catch (err) {
|
|
||||||
// couldn't convert back to array, ignore
|
|
||||||
delete row[fieldName]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return row
|
|
||||||
}
|
|
||||||
|
|
||||||
function isOneSide(field: FieldSchema) {
|
function isOneSide(field: FieldSchema) {
|
||||||
return (
|
return (
|
||||||
field.relationshipType && field.relationshipType.split("-")[0] === "one"
|
field.relationshipType && field.relationshipType.split("-")[0] === "one"
|
||||||
|
@ -372,189 +277,6 @@ export class ExternalRequest {
|
||||||
return { row: newRow, manyRelationships }
|
return { row: newRow, manyRelationships }
|
||||||
}
|
}
|
||||||
|
|
||||||
squashRelationshipColumns(
|
|
||||||
table: Table,
|
|
||||||
row: Row,
|
|
||||||
relationships: RelationshipsJson[]
|
|
||||||
): Row {
|
|
||||||
for (let relationship of relationships) {
|
|
||||||
const linkedTable = this.tables[relationship.tableName]
|
|
||||||
if (!linkedTable || !row[relationship.column]) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const display = linkedTable.primaryDisplay
|
|
||||||
for (let key of Object.keys(row[relationship.column])) {
|
|
||||||
let relatedRow: Row = row[relationship.column][key]
|
|
||||||
// add this row as context for the relationship
|
|
||||||
for (let col of Object.values(linkedTable.schema)) {
|
|
||||||
if (col.type === FieldType.LINK && col.tableId === table._id) {
|
|
||||||
relatedRow[col.name] = [row]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
relatedRow = processFormulas(linkedTable, relatedRow)
|
|
||||||
let relatedDisplay
|
|
||||||
if (display) {
|
|
||||||
relatedDisplay = getPrimaryDisplay(relatedRow[display])
|
|
||||||
}
|
|
||||||
row[relationship.column][key] = {
|
|
||||||
primaryDisplay: relatedDisplay || "Invalid display column",
|
|
||||||
_id: relatedRow._id,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return row
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This iterates through the returned rows and works out what elements of the rows
|
|
||||||
* actually match up to another row (based on primary keys) - this is pretty specific
|
|
||||||
* to SQL and the way that SQL relationships are returned based on joins.
|
|
||||||
* This is complicated, but the idea is that when a SQL query returns all the relations
|
|
||||||
* will be separate rows, with all of the data in each row. We have to decipher what comes
|
|
||||||
* from where (which tables) and how to convert that into budibase columns.
|
|
||||||
*/
|
|
||||||
updateRelationshipColumns(
|
|
||||||
table: Table,
|
|
||||||
row: Row,
|
|
||||||
rows: { [key: string]: Row },
|
|
||||||
relationships: RelationshipsJson[]
|
|
||||||
) {
|
|
||||||
const columns: { [key: string]: any } = {}
|
|
||||||
for (let relationship of relationships) {
|
|
||||||
const linkedTable = this.tables[relationship.tableName]
|
|
||||||
if (!linkedTable) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const fromColumn = `${table.name}.${relationship.from}`
|
|
||||||
const toColumn = `${linkedTable.name}.${relationship.to}`
|
|
||||||
// this is important when working with multiple relationships
|
|
||||||
// between the same tables, don't want to overlap/multiply the relations
|
|
||||||
if (
|
|
||||||
!relationship.through &&
|
|
||||||
row[fromColumn]?.toString() !== row[toColumn]?.toString()
|
|
||||||
) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
let linked = basicProcessing({ row, table: linkedTable, isLinked: true })
|
|
||||||
if (!linked._id) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
columns[relationship.column] = linked
|
|
||||||
}
|
|
||||||
for (let [column, related] of Object.entries(columns)) {
|
|
||||||
if (!row._id) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const rowId: string = row._id
|
|
||||||
if (!Array.isArray(rows[rowId][column])) {
|
|
||||||
rows[rowId][column] = []
|
|
||||||
}
|
|
||||||
// make sure relationship hasn't been found already
|
|
||||||
if (
|
|
||||||
!rows[rowId][column].find(
|
|
||||||
(relation: Row) => relation._id === related._id
|
|
||||||
)
|
|
||||||
) {
|
|
||||||
rows[rowId][column].push(related)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return rows
|
|
||||||
}
|
|
||||||
|
|
||||||
outputProcessing(
|
|
||||||
rows: Row[] = [],
|
|
||||||
table: Table,
|
|
||||||
relationships: RelationshipsJson[]
|
|
||||||
) {
|
|
||||||
if (!rows || rows.length === 0 || rows[0].read === true) {
|
|
||||||
return []
|
|
||||||
}
|
|
||||||
let finalRows: { [key: string]: Row } = {}
|
|
||||||
for (let row of rows) {
|
|
||||||
const rowId = generateIdForRow(row, table)
|
|
||||||
row._id = rowId
|
|
||||||
// this is a relationship of some sort
|
|
||||||
if (finalRows[rowId]) {
|
|
||||||
finalRows = this.updateRelationshipColumns(
|
|
||||||
table,
|
|
||||||
row,
|
|
||||||
finalRows,
|
|
||||||
relationships
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const thisRow = fixArrayTypes(
|
|
||||||
basicProcessing({ row, table, isLinked: false }),
|
|
||||||
table
|
|
||||||
)
|
|
||||||
if (thisRow._id == null) {
|
|
||||||
throw "Unable to generate row ID for SQL rows"
|
|
||||||
}
|
|
||||||
finalRows[thisRow._id] = thisRow
|
|
||||||
// do this at end once its been added to the final rows
|
|
||||||
finalRows = this.updateRelationshipColumns(
|
|
||||||
table,
|
|
||||||
row,
|
|
||||||
finalRows,
|
|
||||||
relationships
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process some additional data types
|
|
||||||
let finalRowArray = Object.values(finalRows)
|
|
||||||
finalRowArray = processDates(table, finalRowArray)
|
|
||||||
finalRowArray = processFormulas(table, finalRowArray) as Row[]
|
|
||||||
|
|
||||||
return finalRowArray.map((row: Row) =>
|
|
||||||
this.squashRelationshipColumns(table, row, relationships)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets the list of relationship JSON structures based on the columns in the table,
|
|
||||||
* this will be used by the underlying library to build whatever relationship mechanism
|
|
||||||
* it has (e.g. SQL joins).
|
|
||||||
*/
|
|
||||||
buildRelationships(table: Table): RelationshipsJson[] {
|
|
||||||
const relationships = []
|
|
||||||
for (let [fieldName, field] of Object.entries(table.schema)) {
|
|
||||||
if (field.type !== FieldTypes.LINK) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
|
||||||
// no table to link to, this is not a valid relationships
|
|
||||||
if (!linkTableName || !this.tables[linkTableName]) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const linkTable = this.tables[linkTableName]
|
|
||||||
if (!table.primary || !linkTable.primary) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const definition: any = {
|
|
||||||
// if no foreign key specified then use the name of the field in other table
|
|
||||||
from: field.foreignKey || table.primary[0],
|
|
||||||
to: field.fieldName,
|
|
||||||
tableName: linkTableName,
|
|
||||||
// need to specify where to put this back into
|
|
||||||
column: fieldName,
|
|
||||||
}
|
|
||||||
if (field.through) {
|
|
||||||
const { tableName: throughTableName } = breakExternalTableId(
|
|
||||||
field.through
|
|
||||||
)
|
|
||||||
definition.through = throughTableName
|
|
||||||
// don't support composite keys for relationships
|
|
||||||
definition.from = field.throughTo || table.primary[0]
|
|
||||||
definition.to = field.throughFrom || linkTable.primary[0]
|
|
||||||
definition.fromPrimary = table.primary[0]
|
|
||||||
definition.toPrimary = linkTable.primary[0]
|
|
||||||
}
|
|
||||||
relationships.push(definition)
|
|
||||||
}
|
|
||||||
return relationships
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This is a cached lookup, of relationship records, this is mainly for creating/deleting junction
|
* This is a cached lookup, of relationship records, this is mainly for creating/deleting junction
|
||||||
* information.
|
* information.
|
||||||
|
@ -704,41 +426,6 @@ export class ExternalRequest {
|
||||||
await Promise.all(promises)
|
await Promise.all(promises)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which
|
|
||||||
* you have column overlap in relationships, e.g. we join a few different tables and they all have the
|
|
||||||
* concept of an ID, but for some of them it will be null (if they say don't have a relationship).
|
|
||||||
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us
|
|
||||||
* is more performant and has the added benefit of protecting against this scenario.
|
|
||||||
*/
|
|
||||||
buildFields(table: Table, includeRelations: boolean) {
|
|
||||||
function extractRealFields(table: Table, existing: string[] = []) {
|
|
||||||
return Object.entries(table.schema)
|
|
||||||
.filter(
|
|
||||||
column =>
|
|
||||||
column[1].type !== FieldTypes.LINK &&
|
|
||||||
column[1].type !== FieldTypes.FORMULA &&
|
|
||||||
!existing.find((field: string) => field === column[0])
|
|
||||||
)
|
|
||||||
.map(column => `${table.name}.${column[0]}`)
|
|
||||||
}
|
|
||||||
let fields = extractRealFields(table)
|
|
||||||
for (let field of Object.values(table.schema)) {
|
|
||||||
if (field.type !== FieldTypes.LINK || !includeRelations) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
|
||||||
if (linkTableName) {
|
|
||||||
const linkTable = this.tables[linkTableName]
|
|
||||||
if (linkTable) {
|
|
||||||
const linkedFields = extractRealFields(linkTable, fields)
|
|
||||||
fields = fields.concat(linkedFields)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return fields
|
|
||||||
}
|
|
||||||
|
|
||||||
async run(config: RunConfig) {
|
async run(config: RunConfig) {
|
||||||
const { operation, tableId } = this
|
const { operation, tableId } = this
|
||||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||||
|
@ -777,9 +464,9 @@ export class ExternalRequest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
filters = buildFilters(id, filters || {}, table)
|
filters = buildFilters(id, filters || {}, table)
|
||||||
const relationships = this.buildRelationships(table)
|
const relationships = buildExternalRelationships(table, this.tables)
|
||||||
|
|
||||||
const includeSqlRelationships =
|
const incRelationships =
|
||||||
config.includeSqlRelationships === IncludeRelationship.INCLUDE
|
config.includeSqlRelationships === IncludeRelationship.INCLUDE
|
||||||
|
|
||||||
// clean up row on ingress using schema
|
// clean up row on ingress using schema
|
||||||
|
@ -799,7 +486,11 @@ export class ExternalRequest {
|
||||||
},
|
},
|
||||||
resource: {
|
resource: {
|
||||||
// have to specify the fields to avoid column overlap (for SQL)
|
// have to specify the fields to avoid column overlap (for SQL)
|
||||||
fields: isSql ? this.buildFields(table, includeSqlRelationships) : [],
|
fields: isSql
|
||||||
|
? buildSqlFieldList(table, this.tables, {
|
||||||
|
relationships: incRelationships,
|
||||||
|
})
|
||||||
|
: [],
|
||||||
},
|
},
|
||||||
filters,
|
filters,
|
||||||
sort,
|
sort,
|
||||||
|
@ -825,7 +516,12 @@ export class ExternalRequest {
|
||||||
processed.manyRelationships
|
processed.manyRelationships
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
const output = this.outputProcessing(response, table, relationships)
|
const output = sqlOutputProcessing(
|
||||||
|
response,
|
||||||
|
table,
|
||||||
|
this.tables,
|
||||||
|
relationships
|
||||||
|
)
|
||||||
// if reading it'll just be an array of rows, return whole thing
|
// if reading it'll just be an array of rows, return whole thing
|
||||||
return operation === Operation.READ && Array.isArray(response)
|
return operation === Operation.READ && Array.isArray(response)
|
||||||
? output
|
? output
|
||||||
|
|
|
@ -14,8 +14,8 @@ import {
|
||||||
} from "../../../utilities/rowProcessor"
|
} from "../../../utilities/rowProcessor"
|
||||||
import { FieldTypes } from "../../../constants"
|
import { FieldTypes } from "../../../constants"
|
||||||
import * as utils from "./utils"
|
import * as utils from "./utils"
|
||||||
import { fullSearch, paginatedSearch } from "./internalSearch"
|
// import { fullSearch, paginatedSearch } from "./internalSearch"
|
||||||
import { getGlobalUsersFromMetadata } from "../../../utilities/global"
|
// import { getGlobalUsersFromMetadata } from "../../../utilities/global"
|
||||||
import * as inMemoryViews from "../../../db/inMemoryView"
|
import * as inMemoryViews from "../../../db/inMemoryView"
|
||||||
import env from "../../../environment"
|
import env from "../../../environment"
|
||||||
import {
|
import {
|
||||||
|
@ -36,6 +36,7 @@ import {
|
||||||
Row,
|
Row,
|
||||||
Table,
|
Table,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
import { sqlSearch } from "./internalSql"
|
||||||
|
|
||||||
import { cleanExportRows } from "./utils"
|
import { cleanExportRows } from "./utils"
|
||||||
|
|
||||||
|
@ -355,43 +356,44 @@ export async function bulkDestroy(ctx: UserCtx) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function search(ctx: UserCtx) {
|
export async function search(ctx: UserCtx) {
|
||||||
// Fetch the whole table when running in cypress, as search doesn't work
|
return sqlSearch(ctx)
|
||||||
if (!env.COUCH_DB_URL && env.isCypress()) {
|
// // Fetch the whole table when running in cypress, as search doesn't work
|
||||||
return { rows: await fetch(ctx) }
|
// if (!env.COUCH_DB_URL && env.isCypress()) {
|
||||||
}
|
// return { rows: await fetch(ctx) }
|
||||||
|
// }
|
||||||
const { tableId } = ctx.params
|
//
|
||||||
const db = context.getAppDB()
|
// const { tableId } = ctx.params
|
||||||
const { paginate, query, ...params } = ctx.request.body
|
// const db = context.getAppDB()
|
||||||
params.version = ctx.version
|
// const { paginate, query, ...params } = ctx.request.body
|
||||||
params.tableId = tableId
|
// params.version = ctx.version
|
||||||
|
// params.tableId = tableId
|
||||||
let table
|
//
|
||||||
if (params.sort && !params.sortType) {
|
// let table
|
||||||
table = await db.get(tableId)
|
// if (params.sort && !params.sortType) {
|
||||||
const schema = table.schema
|
// table = await db.get(tableId)
|
||||||
const sortField = schema[params.sort]
|
// const schema = table.schema
|
||||||
params.sortType = sortField.type == "number" ? "number" : "string"
|
// const sortField = schema[params.sort]
|
||||||
}
|
// params.sortType = sortField.type == "number" ? "number" : "string"
|
||||||
|
// }
|
||||||
let response
|
//
|
||||||
if (paginate) {
|
// let response
|
||||||
response = await paginatedSearch(query, params)
|
// if (paginate) {
|
||||||
} else {
|
// response = await paginatedSearch(query, params)
|
||||||
response = await fullSearch(query, params)
|
// } else {
|
||||||
}
|
// response = await fullSearch(query, params)
|
||||||
|
// }
|
||||||
// Enrich search results with relationships
|
//
|
||||||
if (response.rows && response.rows.length) {
|
// // Enrich search results with relationships
|
||||||
// enrich with global users if from users table
|
// if (response.rows && response.rows.length) {
|
||||||
if (tableId === InternalTables.USER_METADATA) {
|
// // enrich with global users if from users table
|
||||||
response.rows = await getGlobalUsersFromMetadata(response.rows)
|
// if (tableId === InternalTables.USER_METADATA) {
|
||||||
}
|
// response.rows = await getGlobalUsersFromMetadata(response.rows)
|
||||||
table = table || (await db.get(tableId))
|
// }
|
||||||
response.rows = await outputProcessing(table, response.rows)
|
// table = table || (await db.get(tableId))
|
||||||
}
|
// response.rows = await outputProcessing(table, response.rows)
|
||||||
|
// }
|
||||||
return response
|
//
|
||||||
|
// return response
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function exportRows(ctx: UserCtx) {
|
export async function exportRows(ctx: UserCtx) {
|
||||||
|
@ -404,7 +406,7 @@ export async function exportRows(ctx: UserCtx) {
|
||||||
}
|
}
|
||||||
const { columns, query } = ctx.request.body
|
const { columns, query } = ctx.request.body
|
||||||
|
|
||||||
let result
|
let result: Row[] = []
|
||||||
if (rowIds) {
|
if (rowIds) {
|
||||||
let response = (
|
let response = (
|
||||||
await db.allDocs({
|
await db.allDocs({
|
||||||
|
@ -413,7 +415,7 @@ export async function exportRows(ctx: UserCtx) {
|
||||||
})
|
})
|
||||||
).rows.map(row => row.doc)
|
).rows.map(row => row.doc)
|
||||||
|
|
||||||
result = await outputProcessing(table, response)
|
result = (await outputProcessing(table, response)) as Row[]
|
||||||
} else if (query) {
|
} else if (query) {
|
||||||
let searchResponse = await search(ctx)
|
let searchResponse = await search(ctx)
|
||||||
result = searchResponse.rows
|
result = searchResponse.rows
|
||||||
|
|
|
@ -0,0 +1,158 @@
|
||||||
|
import {
|
||||||
|
FieldType,
|
||||||
|
Operation,
|
||||||
|
QueryJson,
|
||||||
|
Row,
|
||||||
|
SearchFilters,
|
||||||
|
SortType,
|
||||||
|
Table,
|
||||||
|
UserCtx,
|
||||||
|
} from "@budibase/types"
|
||||||
|
import SqlQueryBuilder from "../../../integrations/base/sql"
|
||||||
|
import { SqlClient } from "../../../integrations/utils"
|
||||||
|
import { buildInternalRelationships, sqlOutputProcessing } from "./utils"
|
||||||
|
import sdk from "../../../sdk"
|
||||||
|
import { context } from "@budibase/backend-core"
|
||||||
|
import { CONSTANT_INTERNAL_ROW_COLS } from "../../../db/utils"
|
||||||
|
|
||||||
|
function buildInternalFieldList(
|
||||||
|
table: Table,
|
||||||
|
tables: Table[],
|
||||||
|
opts: { relationships: boolean } = { relationships: true }
|
||||||
|
) {
|
||||||
|
let fieldList: string[] = []
|
||||||
|
fieldList = fieldList.concat(
|
||||||
|
CONSTANT_INTERNAL_ROW_COLS.map(col => `${table._id}.${col}`)
|
||||||
|
)
|
||||||
|
for (let col of Object.values(table.schema)) {
|
||||||
|
const isLink = col.type === FieldType.LINK
|
||||||
|
if (isLink && !opts.relationships) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (isLink) {
|
||||||
|
const relatedTable = tables.find(table => table._id === col.tableId)!
|
||||||
|
fieldList = fieldList.concat(
|
||||||
|
buildInternalFieldList(relatedTable, tables, { relationships: false })
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
fieldList.push(`${table._id}.${col.name}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fieldList
|
||||||
|
}
|
||||||
|
|
||||||
|
function tableInFilter(name: string) {
|
||||||
|
return `:${name}.`
|
||||||
|
}
|
||||||
|
|
||||||
|
function cleanupFilters(filters: SearchFilters, tables: Table[]) {
|
||||||
|
for (let filter of Object.values(filters)) {
|
||||||
|
if (typeof filter !== "object") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for (let [key, keyFilter] of Object.entries(filter)) {
|
||||||
|
if (keyFilter === "") {
|
||||||
|
delete filter[key]
|
||||||
|
}
|
||||||
|
|
||||||
|
// relationship, switch to table ID
|
||||||
|
const tableRelated = tables.find(table =>
|
||||||
|
key.includes(tableInFilter(table.originalName!))
|
||||||
|
)
|
||||||
|
if (tableRelated) {
|
||||||
|
filter[
|
||||||
|
key.replace(
|
||||||
|
tableInFilter(tableRelated.originalName!),
|
||||||
|
tableInFilter(tableRelated._id!)
|
||||||
|
)
|
||||||
|
] = filter[key]
|
||||||
|
delete filter[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return filters
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildTableMap(tables: Table[]) {
|
||||||
|
const tableMap: Record<string, Table> = {}
|
||||||
|
for (let table of tables) {
|
||||||
|
// update the table name, should never query by name for SQLite
|
||||||
|
table.originalName = table.name
|
||||||
|
table.name = table._id!
|
||||||
|
tableMap[table._id!] = table
|
||||||
|
}
|
||||||
|
return tableMap
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function sqlSearch(ctx: UserCtx) {
|
||||||
|
const { tableId } = ctx.params
|
||||||
|
const { paginate, query, ...params } = ctx.request.body
|
||||||
|
|
||||||
|
const builder = new SqlQueryBuilder(SqlClient.SQL_LITE)
|
||||||
|
const allTables = await sdk.tables.getAllInternalTables()
|
||||||
|
const allTablesMap = buildTableMap(allTables)
|
||||||
|
const table = allTables.find(table => table._id === tableId)
|
||||||
|
if (!table) {
|
||||||
|
ctx.throw(400, "Unable to find table")
|
||||||
|
}
|
||||||
|
|
||||||
|
const relationships = buildInternalRelationships(table)
|
||||||
|
|
||||||
|
const request: QueryJson = {
|
||||||
|
endpoint: {
|
||||||
|
// not important, we query ourselves
|
||||||
|
datasourceId: "internal",
|
||||||
|
entityId: table._id!,
|
||||||
|
operation: Operation.READ,
|
||||||
|
},
|
||||||
|
filters: cleanupFilters(query, allTables),
|
||||||
|
table,
|
||||||
|
meta: {
|
||||||
|
table,
|
||||||
|
tables: allTablesMap,
|
||||||
|
},
|
||||||
|
resource: {
|
||||||
|
fields: buildInternalFieldList(table, allTables),
|
||||||
|
},
|
||||||
|
relationships,
|
||||||
|
}
|
||||||
|
// make sure only rows returned
|
||||||
|
request.filters!.equal = {
|
||||||
|
...request.filters?.equal,
|
||||||
|
type: "row",
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.sort && !params.sortType) {
|
||||||
|
const sortField = table.schema[params.sort]
|
||||||
|
const sortType = sortField.type == "number" ? "number" : "string"
|
||||||
|
request.sort = {
|
||||||
|
[sortField.name]: {
|
||||||
|
direction: params.sortOrder,
|
||||||
|
type: sortType as SortType,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (paginate) {
|
||||||
|
request.paginate = {
|
||||||
|
limit: params.limit,
|
||||||
|
page: params.bookmark,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let sql = builder._query(request, {
|
||||||
|
disableReturning: true,
|
||||||
|
disablePreparedStatements: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
// quick hack for docIds
|
||||||
|
sql = sql.replace(/`doc1`.`rowId`/g, "`doc1.rowId`")
|
||||||
|
sql = sql.replace(/`doc2`.`rowId`/g, "`doc2.rowId`")
|
||||||
|
|
||||||
|
const db = context.getAppDB()
|
||||||
|
const rows = await db.sql<Row[]>(sql)
|
||||||
|
|
||||||
|
return {
|
||||||
|
rows: sqlOutputProcessing(rows, table, allTablesMap, relationships, {
|
||||||
|
internal: true,
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,107 @@
|
||||||
|
// need to handle table name + field or just field, depending on if relationships used
|
||||||
|
import { Row, Table } from "@budibase/types"
|
||||||
|
import { generateRowIdField } from "../../../../integrations/utils"
|
||||||
|
import { processFormulas } from "../../../../utilities/rowProcessor"
|
||||||
|
import { FieldTypes } from "../../../../constants"
|
||||||
|
import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils"
|
||||||
|
|
||||||
|
function extractFieldValue({
|
||||||
|
row,
|
||||||
|
tableName,
|
||||||
|
fieldName,
|
||||||
|
isLinked,
|
||||||
|
}: {
|
||||||
|
row: Row
|
||||||
|
tableName: string
|
||||||
|
fieldName: string
|
||||||
|
isLinked: boolean
|
||||||
|
}) {
|
||||||
|
let value = row[`${tableName}.${fieldName}`]
|
||||||
|
if (value == null && !isLinked) {
|
||||||
|
value = row[fieldName]
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
export function generateIdForRow(
|
||||||
|
row: Row | undefined,
|
||||||
|
table: Table,
|
||||||
|
isLinked: boolean = false
|
||||||
|
): string {
|
||||||
|
const primary = table.primary
|
||||||
|
if (!row || !primary) {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
// build id array
|
||||||
|
let idParts = []
|
||||||
|
for (let field of primary) {
|
||||||
|
let fieldValue = extractFieldValue({
|
||||||
|
row,
|
||||||
|
tableName: table.name,
|
||||||
|
fieldName: field,
|
||||||
|
isLinked,
|
||||||
|
})
|
||||||
|
if (fieldValue) {
|
||||||
|
idParts.push(fieldValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (idParts.length === 0) {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return generateRowIdField(idParts)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function basicProcessing({
|
||||||
|
row,
|
||||||
|
table,
|
||||||
|
isLinked,
|
||||||
|
internal,
|
||||||
|
}: {
|
||||||
|
row: Row
|
||||||
|
table: Table
|
||||||
|
isLinked: boolean
|
||||||
|
internal?: boolean
|
||||||
|
}): Row {
|
||||||
|
let thisRow: Row = {}
|
||||||
|
// filter the row down to what is actually the row (not joined)
|
||||||
|
let toIterate = Object.keys(table.schema)
|
||||||
|
if (internal) {
|
||||||
|
toIterate = toIterate.concat(CONSTANT_INTERNAL_ROW_COLS)
|
||||||
|
}
|
||||||
|
for (let fieldName of toIterate) {
|
||||||
|
const value = extractFieldValue({
|
||||||
|
row,
|
||||||
|
tableName: internal ? table._id! : table.name,
|
||||||
|
fieldName,
|
||||||
|
isLinked,
|
||||||
|
})
|
||||||
|
|
||||||
|
// all responses include "select col as table.col" so that overlaps are handled
|
||||||
|
if (value != null) {
|
||||||
|
thisRow[fieldName] = value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!internal) {
|
||||||
|
thisRow._id = generateIdForRow(row, table, isLinked)
|
||||||
|
thisRow.tableId = table._id
|
||||||
|
thisRow._rev = "rev"
|
||||||
|
}
|
||||||
|
return processFormulas(table, thisRow)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function fixArrayTypes(row: Row, table: Table) {
|
||||||
|
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
||||||
|
if (
|
||||||
|
schema.type === FieldTypes.ARRAY &&
|
||||||
|
typeof row[fieldName] === "string"
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
row[fieldName] = JSON.parse(row[fieldName])
|
||||||
|
} catch (err) {
|
||||||
|
// couldn't convert back to array, ignore
|
||||||
|
delete row[fieldName]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return row
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
export * from "./basic"
|
||||||
|
export * from "./sqlUtils"
|
||||||
|
export * from "./utils"
|
|
@ -0,0 +1,224 @@
|
||||||
|
import { FieldType, RelationshipsJson, Row, Table } from "@budibase/types"
|
||||||
|
import { processFormulas } from "../../../../utilities/rowProcessor"
|
||||||
|
import {
|
||||||
|
breakExternalTableId,
|
||||||
|
getPrimaryDisplay,
|
||||||
|
} from "../../../../integrations/utils"
|
||||||
|
import { basicProcessing } from "./basic"
|
||||||
|
import { generateJunctionTableID } from "../../../../db/utils"
|
||||||
|
|
||||||
|
type TableMap = Record<string, Table>
|
||||||
|
|
||||||
|
export function squashRelationshipColumns(
|
||||||
|
table: Table,
|
||||||
|
tables: TableMap,
|
||||||
|
row: Row,
|
||||||
|
relationships: RelationshipsJson[]
|
||||||
|
): Row {
|
||||||
|
for (let relationship of relationships) {
|
||||||
|
const linkedTable = tables[relationship.tableName]
|
||||||
|
if (!linkedTable || !row[relationship.column]) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const display = linkedTable.primaryDisplay
|
||||||
|
for (let key of Object.keys(row[relationship.column])) {
|
||||||
|
let relatedRow: Row = row[relationship.column][key]
|
||||||
|
// add this row as context for the relationship
|
||||||
|
for (let col of Object.values(linkedTable.schema)) {
|
||||||
|
if (col.type === FieldType.LINK && col.tableId === table._id) {
|
||||||
|
relatedRow[col.name] = [row]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
relatedRow = processFormulas(linkedTable, relatedRow)
|
||||||
|
let relatedDisplay
|
||||||
|
if (display) {
|
||||||
|
relatedDisplay = getPrimaryDisplay(relatedRow[display])
|
||||||
|
}
|
||||||
|
row[relationship.column][key] = {
|
||||||
|
primaryDisplay: relatedDisplay || "Invalid display column",
|
||||||
|
_id: relatedRow._id,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return row
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This iterates through the returned rows and works out what elements of the rows
|
||||||
|
* actually match up to another row (based on primary keys) - this is pretty specific
|
||||||
|
* to SQL and the way that SQL relationships are returned based on joins.
|
||||||
|
* This is complicated, but the idea is that when a SQL query returns all the relations
|
||||||
|
* will be separate rows, with all of the data in each row. We have to decipher what comes
|
||||||
|
* from where (which tables) and how to convert that into budibase columns.
|
||||||
|
*/
|
||||||
|
export function updateRelationshipColumns(
|
||||||
|
table: Table,
|
||||||
|
tables: TableMap,
|
||||||
|
row: Row,
|
||||||
|
rows: { [key: string]: Row },
|
||||||
|
relationships: RelationshipsJson[],
|
||||||
|
opts?: { internal?: boolean }
|
||||||
|
) {
|
||||||
|
const columns: { [key: string]: any } = {}
|
||||||
|
for (let relationship of relationships) {
|
||||||
|
const linkedTable = tables[relationship.tableName]
|
||||||
|
if (!linkedTable) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const fromColumn = `${table.name}.${relationship.from}`
|
||||||
|
const toColumn = `${linkedTable.name}.${relationship.to}`
|
||||||
|
// this is important when working with multiple relationships
|
||||||
|
// between the same tables, don't want to overlap/multiply the relations
|
||||||
|
if (
|
||||||
|
!relationship.through &&
|
||||||
|
row[fromColumn]?.toString() !== row[toColumn]?.toString()
|
||||||
|
) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
let linked = basicProcessing({
|
||||||
|
row,
|
||||||
|
table: linkedTable,
|
||||||
|
isLinked: true,
|
||||||
|
internal: opts?.internal,
|
||||||
|
})
|
||||||
|
if (!linked._id) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
columns[relationship.column] = linked
|
||||||
|
}
|
||||||
|
for (let [column, related] of Object.entries(columns)) {
|
||||||
|
let rowId: string = row._id!
|
||||||
|
if (opts?.internal) {
|
||||||
|
const { _id } = basicProcessing({
|
||||||
|
row,
|
||||||
|
table,
|
||||||
|
isLinked: false,
|
||||||
|
internal: opts?.internal,
|
||||||
|
})
|
||||||
|
rowId = _id!
|
||||||
|
}
|
||||||
|
if (!rowId) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (!Array.isArray(rows[rowId][column])) {
|
||||||
|
rows[rowId][column] = []
|
||||||
|
}
|
||||||
|
// make sure relationship hasn't been found already
|
||||||
|
if (
|
||||||
|
!rows[rowId][column].find((relation: Row) => relation._id === related._id)
|
||||||
|
) {
|
||||||
|
rows[rowId][column].push(related)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return rows
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the list of relationship JSON structures based on the columns in the table,
|
||||||
|
* this will be used by the underlying library to build whatever relationship mechanism
|
||||||
|
* it has (e.g. SQL joins).
|
||||||
|
*/
|
||||||
|
export function buildExternalRelationships(
|
||||||
|
table: Table,
|
||||||
|
tables: TableMap
|
||||||
|
): RelationshipsJson[] {
|
||||||
|
const relationships = []
|
||||||
|
for (let [fieldName, field] of Object.entries(table.schema)) {
|
||||||
|
if (field.type !== FieldType.LINK) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||||
|
// no table to link to, this is not a valid relationships
|
||||||
|
if (!linkTableName || !tables[linkTableName]) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const linkTable = tables[linkTableName]
|
||||||
|
if (!table.primary || !linkTable.primary) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const definition: any = {
|
||||||
|
// if no foreign key specified then use the name of the field in other table
|
||||||
|
from: field.foreignKey || table.primary[0],
|
||||||
|
to: field.fieldName,
|
||||||
|
tableName: linkTableName,
|
||||||
|
// need to specify where to put this back into
|
||||||
|
column: fieldName,
|
||||||
|
}
|
||||||
|
if (field.through) {
|
||||||
|
const { tableName: throughTableName } = breakExternalTableId(
|
||||||
|
field.through
|
||||||
|
)
|
||||||
|
definition.through = throughTableName
|
||||||
|
// don't support composite keys for relationships
|
||||||
|
definition.from = field.throughTo || table.primary[0]
|
||||||
|
definition.to = field.throughFrom || linkTable.primary[0]
|
||||||
|
definition.fromPrimary = table.primary[0]
|
||||||
|
definition.toPrimary = linkTable.primary[0]
|
||||||
|
}
|
||||||
|
relationships.push(definition)
|
||||||
|
}
|
||||||
|
return relationships
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildInternalRelationships(table: Table): RelationshipsJson[] {
|
||||||
|
const relationships: RelationshipsJson[] = []
|
||||||
|
const links = Object.values(table.schema).filter(
|
||||||
|
column => column.type === FieldType.LINK
|
||||||
|
)
|
||||||
|
const tableId = table._id!
|
||||||
|
for (let link of links) {
|
||||||
|
const linkTableId = link.tableId!
|
||||||
|
const junctionTableId = generateJunctionTableID(tableId, linkTableId)
|
||||||
|
const isFirstTable = tableId > linkTableId
|
||||||
|
relationships.push({
|
||||||
|
through: junctionTableId,
|
||||||
|
column: link.name,
|
||||||
|
tableName: linkTableId,
|
||||||
|
fromPrimary: "_id",
|
||||||
|
to: isFirstTable ? "doc2.rowId" : "doc1.rowId",
|
||||||
|
from: isFirstTable ? "doc1.rowId" : "doc2.rowId",
|
||||||
|
toPrimary: "_id",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return relationships
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which
|
||||||
|
* you have column overlap in relationships, e.g. we join a few different tables and they all have the
|
||||||
|
* concept of an ID, but for some of them it will be null (if they say don't have a relationship).
|
||||||
|
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us
|
||||||
|
* is more performant and has the added benefit of protecting against this scenario.
|
||||||
|
*/
|
||||||
|
export function buildSqlFieldList(
|
||||||
|
table: Table,
|
||||||
|
tables: TableMap,
|
||||||
|
opts?: { relationships: boolean }
|
||||||
|
) {
|
||||||
|
function extractRealFields(table: Table, existing: string[] = []) {
|
||||||
|
return Object.entries(table.schema)
|
||||||
|
.filter(
|
||||||
|
column =>
|
||||||
|
column[1].type !== FieldType.LINK &&
|
||||||
|
column[1].type !== FieldType.FORMULA &&
|
||||||
|
!existing.find((field: string) => field === column[0])
|
||||||
|
)
|
||||||
|
.map(column => `${table.name}.${column[0]}`)
|
||||||
|
}
|
||||||
|
let fields = extractRealFields(table)
|
||||||
|
for (let field of Object.values(table.schema)) {
|
||||||
|
if (field.type !== FieldType.LINK || !opts?.relationships) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||||
|
if (linkTableName) {
|
||||||
|
const linkTable = tables[linkTableName]
|
||||||
|
if (linkTable) {
|
||||||
|
const linkedFields = extractRealFields(linkTable, fields)
|
||||||
|
fields = fields.concat(linkedFields)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fields
|
||||||
|
}
|
|
@ -1,11 +1,26 @@
|
||||||
import { InternalTables } from "../../../db/utils"
|
import { InternalTables } from "../../../../db/utils"
|
||||||
import * as userController from "../user"
|
import * as userController from "../../user"
|
||||||
import { FieldTypes } from "../../../constants"
|
import { FieldTypes } from "../../../../constants"
|
||||||
import { context } from "@budibase/backend-core"
|
import { context } from "@budibase/backend-core"
|
||||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
import { makeExternalQuery } from "../../../../integrations/base/query"
|
||||||
import { FieldType, Row, Table, UserCtx } from "@budibase/types"
|
import {
|
||||||
import { Format } from "../view/exporters"
|
FieldType,
|
||||||
import sdk from "../../../sdk"
|
RelationshipsJson,
|
||||||
|
Row,
|
||||||
|
Table,
|
||||||
|
UserCtx,
|
||||||
|
} from "@budibase/types"
|
||||||
|
import { Format } from "../../view/exporters"
|
||||||
|
import sdk from "../../../../sdk"
|
||||||
|
import {
|
||||||
|
processDates,
|
||||||
|
processFormulas,
|
||||||
|
} from "../../../../utilities/rowProcessor"
|
||||||
|
import {
|
||||||
|
squashRelationshipColumns,
|
||||||
|
updateRelationshipColumns,
|
||||||
|
} from "./sqlUtils"
|
||||||
|
import { basicProcessing, generateIdForRow, fixArrayTypes } from "./basic"
|
||||||
|
|
||||||
const validateJs = require("validate.js")
|
const validateJs = require("validate.js")
|
||||||
const { cloneDeep } = require("lodash/fp")
|
const { cloneDeep } = require("lodash/fp")
|
||||||
|
@ -177,3 +192,65 @@ export function getTableId(ctx: any) {
|
||||||
return ctx.params.viewName
|
return ctx.params.viewName
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function sqlOutputProcessing(
|
||||||
|
rows: Row[] = [],
|
||||||
|
table: Table,
|
||||||
|
tables: Record<string, Table>,
|
||||||
|
relationships: RelationshipsJson[],
|
||||||
|
opts?: { internal?: boolean }
|
||||||
|
) {
|
||||||
|
if (!rows || rows.length === 0 || rows[0].read === true) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
let finalRows: { [key: string]: Row } = {}
|
||||||
|
for (let row of rows) {
|
||||||
|
let rowId = row._id
|
||||||
|
if (!rowId) {
|
||||||
|
rowId = generateIdForRow(row, table)
|
||||||
|
row._id = rowId
|
||||||
|
}
|
||||||
|
// this is a relationship of some sort
|
||||||
|
if (finalRows[rowId]) {
|
||||||
|
finalRows = updateRelationshipColumns(
|
||||||
|
table,
|
||||||
|
tables,
|
||||||
|
row,
|
||||||
|
finalRows,
|
||||||
|
relationships
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const thisRow = fixArrayTypes(
|
||||||
|
basicProcessing({
|
||||||
|
row,
|
||||||
|
table,
|
||||||
|
isLinked: false,
|
||||||
|
internal: opts?.internal,
|
||||||
|
}),
|
||||||
|
table
|
||||||
|
)
|
||||||
|
if (thisRow._id == null) {
|
||||||
|
throw "Unable to generate row ID for SQL rows"
|
||||||
|
}
|
||||||
|
finalRows[thisRow._id] = thisRow
|
||||||
|
// do this at end once its been added to the final rows
|
||||||
|
finalRows = updateRelationshipColumns(
|
||||||
|
table,
|
||||||
|
tables,
|
||||||
|
row,
|
||||||
|
finalRows,
|
||||||
|
relationships,
|
||||||
|
opts
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process some additional data types
|
||||||
|
let finalRowArray = Object.values(finalRows)
|
||||||
|
finalRowArray = processDates(table, finalRowArray)
|
||||||
|
finalRowArray = processFormulas(table, finalRowArray) as Row[]
|
||||||
|
|
||||||
|
return finalRowArray.map((row: Row) =>
|
||||||
|
squashRelationshipColumns(table, tables, row, relationships)
|
||||||
|
)
|
||||||
|
}
|
|
@ -0,0 +1,75 @@
|
||||||
|
import { context, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core"
|
||||||
|
import { FieldType, SQLiteDefinition, SQLiteType, Table } from "@budibase/types"
|
||||||
|
import { cloneDeep } from "lodash"
|
||||||
|
import sdk from "../../../sdk"
|
||||||
|
import { CONSTANT_INTERNAL_ROW_COLS } from "../../../db/utils"
|
||||||
|
|
||||||
|
const BASIC_SQLITE_DOC: SQLiteDefinition = {
|
||||||
|
_id: SQLITE_DESIGN_DOC_ID,
|
||||||
|
language: "sqlite",
|
||||||
|
sql: {
|
||||||
|
tables: {},
|
||||||
|
options: {
|
||||||
|
table_name: "tableId",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const FieldTypeMap: Record<FieldType, SQLiteType> = {
|
||||||
|
[FieldType.BOOLEAN]: SQLiteType.NUMERIC,
|
||||||
|
[FieldType.DATETIME]: SQLiteType.TEXT,
|
||||||
|
[FieldType.FORMULA]: SQLiteType.TEXT,
|
||||||
|
[FieldType.LONGFORM]: SQLiteType.TEXT,
|
||||||
|
[FieldType.NUMBER]: SQLiteType.REAL,
|
||||||
|
[FieldType.STRING]: SQLiteType.TEXT,
|
||||||
|
[FieldType.AUTO]: SQLiteType.TEXT,
|
||||||
|
[FieldType.JSON]: SQLiteType.BLOB,
|
||||||
|
[FieldType.OPTIONS]: SQLiteType.BLOB,
|
||||||
|
[FieldType.INTERNAL]: SQLiteType.BLOB,
|
||||||
|
[FieldType.BARCODEQR]: SQLiteType.BLOB,
|
||||||
|
[FieldType.ATTACHMENT]: SQLiteType.BLOB,
|
||||||
|
[FieldType.ARRAY]: SQLiteType.BLOB,
|
||||||
|
[FieldType.LINK]: SQLiteType.BLOB,
|
||||||
|
}
|
||||||
|
|
||||||
|
function mapTable(table: Table): { [key: string]: SQLiteType } {
|
||||||
|
const fields: Record<string, SQLiteType> = {}
|
||||||
|
for (let [key, column] of Object.entries(table.schema)) {
|
||||||
|
fields[key] = FieldTypeMap[column.type]
|
||||||
|
}
|
||||||
|
// there are some extra columns to map - add these in
|
||||||
|
const constantMap: Record<string, SQLiteType> = {}
|
||||||
|
CONSTANT_INTERNAL_ROW_COLS.forEach(col => {
|
||||||
|
constantMap[col] = SQLiteType.TEXT
|
||||||
|
})
|
||||||
|
return {
|
||||||
|
...constantMap,
|
||||||
|
...fields,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// nothing exists, need to iterate though existing tables
|
||||||
|
async function buildBaseDefinition(): Promise<SQLiteDefinition> {
|
||||||
|
const tables = await sdk.tables.getAllInternalTables()
|
||||||
|
const definition = cloneDeep(BASIC_SQLITE_DOC)
|
||||||
|
for (let table of tables) {
|
||||||
|
definition.sql.tables[table._id!] = {
|
||||||
|
fields: mapTable(table),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return definition
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function addTableToSqlite(table: Table) {
|
||||||
|
const db = context.getAppDB()
|
||||||
|
let definition: SQLiteDefinition
|
||||||
|
try {
|
||||||
|
definition = await db.get(SQLITE_DESIGN_DOC_ID)
|
||||||
|
} catch (err) {
|
||||||
|
definition = await buildBaseDefinition()
|
||||||
|
}
|
||||||
|
definition.sql.tables[table._id!] = {
|
||||||
|
fields: mapTable(table),
|
||||||
|
}
|
||||||
|
await db.put(definition)
|
||||||
|
}
|
|
@ -27,6 +27,7 @@ import {
|
||||||
SourceName,
|
SourceName,
|
||||||
Table,
|
Table,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
import { addTableToSqlite } from "./sqlite"
|
||||||
|
|
||||||
export async function clearColumns(table: any, columnNames: any) {
|
export async function clearColumns(table: any, columnNames: any) {
|
||||||
const db: Database = context.getAppDB()
|
const db: Database = context.getAppDB()
|
||||||
|
@ -293,6 +294,7 @@ class TableSaveFunctions {
|
||||||
async after(table: any) {
|
async after(table: any) {
|
||||||
table = await handleSearchIndexes(table)
|
table = await handleSearchIndexes(table)
|
||||||
table = await handleDataImport(this.user, table, this.importRows)
|
table = await handleDataImport(this.user, table, this.importRows)
|
||||||
|
await addTableToSqlite(table)
|
||||||
return table
|
return table
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { generateLinkID } from "../utils"
|
import { generateLinkID, generateJunctionTableID } from "../utils"
|
||||||
import { FieldTypes } from "../../constants"
|
import { FieldTypes } from "../../constants"
|
||||||
import { LinkDocument } from "@budibase/types"
|
import { LinkDocument } from "@budibase/types"
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@ import { LinkDocument } from "@budibase/types"
|
||||||
class LinkDocumentImpl implements LinkDocument {
|
class LinkDocumentImpl implements LinkDocument {
|
||||||
_id: string
|
_id: string
|
||||||
type: string
|
type: string
|
||||||
|
tableId: string
|
||||||
doc1: {
|
doc1: {
|
||||||
rowId: string
|
rowId: string
|
||||||
fieldName: string
|
fieldName: string
|
||||||
|
@ -44,16 +45,20 @@ class LinkDocumentImpl implements LinkDocument {
|
||||||
fieldName2
|
fieldName2
|
||||||
)
|
)
|
||||||
this.type = FieldTypes.LINK
|
this.type = FieldTypes.LINK
|
||||||
this.doc1 = {
|
this.tableId = generateJunctionTableID(tableId1, tableId2)
|
||||||
|
const docA = {
|
||||||
tableId: tableId1,
|
tableId: tableId1,
|
||||||
fieldName: fieldName1,
|
fieldName: fieldName1,
|
||||||
rowId: rowId1,
|
rowId: rowId1,
|
||||||
}
|
}
|
||||||
this.doc2 = {
|
const docB = {
|
||||||
tableId: tableId2,
|
tableId: tableId2,
|
||||||
fieldName: fieldName2,
|
fieldName: fieldName2,
|
||||||
rowId: rowId2,
|
rowId: rowId2,
|
||||||
}
|
}
|
||||||
|
// have to determine which one will be doc1 - very important for SQL linking
|
||||||
|
this.doc1 = docA.tableId > docB.tableId ? docA : docB
|
||||||
|
this.doc2 = docA.tableId > docB.tableId ? docB : docA
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import newid from "./newid"
|
import newid from "./newid"
|
||||||
import { db as dbCore } from "@budibase/backend-core"
|
import { db as dbCore } from "@budibase/backend-core"
|
||||||
|
import { SQLiteType } from "@budibase/types"
|
||||||
|
|
||||||
type Optional = string | null
|
type Optional = string | null
|
||||||
|
|
||||||
|
@ -43,6 +44,14 @@ export const getUserMetadataParams = dbCore.getUserMetadataParams
|
||||||
export const generateUserMetadataID = dbCore.generateUserMetadataID
|
export const generateUserMetadataID = dbCore.generateUserMetadataID
|
||||||
export const getGlobalIDFromUserMetadataID =
|
export const getGlobalIDFromUserMetadataID =
|
||||||
dbCore.getGlobalIDFromUserMetadataID
|
dbCore.getGlobalIDFromUserMetadataID
|
||||||
|
export const CONSTANT_INTERNAL_ROW_COLS = [
|
||||||
|
"_id",
|
||||||
|
"_rev",
|
||||||
|
"type",
|
||||||
|
"createdAt",
|
||||||
|
"updatedAt",
|
||||||
|
"tableId",
|
||||||
|
]
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets parameters for retrieving tables, this is a utility function for the getDocParams function.
|
* Gets parameters for retrieving tables, this is a utility function for the getDocParams function.
|
||||||
|
@ -262,6 +271,12 @@ export function generatePluginID(name: string) {
|
||||||
return `${DocumentType.PLUGIN}${SEPARATOR}${name}`
|
return `${DocumentType.PLUGIN}${SEPARATOR}${name}`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function generateJunctionTableID(tableId1: string, tableId2: string) {
|
||||||
|
const first = tableId1 > tableId2 ? tableId1 : tableId2
|
||||||
|
const second = tableId1 > tableId2 ? tableId2 : tableId1
|
||||||
|
return `${first}${SEPARATOR}${second}`
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This can be used with the db.allDocs to get a list of IDs
|
* This can be used with the db.allDocs to get a list of IDs
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -6,4 +6,5 @@
|
||||||
|
|
||||||
export interface QueryOptions {
|
export interface QueryOptions {
|
||||||
disableReturning?: boolean
|
disableReturning?: boolean
|
||||||
|
disablePreparedStatements?: boolean
|
||||||
}
|
}
|
||||||
|
|
|
@ -564,8 +564,12 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
||||||
throw `Operation type is not supported by SQL query builder`
|
throw `Operation type is not supported by SQL query builder`
|
||||||
}
|
}
|
||||||
|
|
||||||
// @ts-ignore
|
if (opts?.disablePreparedStatements) {
|
||||||
return query.toSQL().toNative()
|
return query.toString()
|
||||||
|
} else {
|
||||||
|
// @ts-ignore
|
||||||
|
return query.toSQL().toNative()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async getReturningRow(queryFn: Function, json: QueryJson) {
|
async getReturningRow(queryFn: Function, json: QueryJson) {
|
||||||
|
|
|
@ -74,6 +74,7 @@ export enum SqlClient {
|
||||||
POSTGRES = "pg",
|
POSTGRES = "pg",
|
||||||
MY_SQL = "mysql2",
|
MY_SQL = "mysql2",
|
||||||
ORACLE = "oracledb",
|
ORACLE = "oracledb",
|
||||||
|
SQL_LITE = "better-sqlite3",
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isExternalTable(tableId: string) {
|
export function isExternalTable(tableId: string) {
|
||||||
|
|
|
@ -14,3 +14,4 @@ export * from "./backup"
|
||||||
export * from "./webhook"
|
export * from "./webhook"
|
||||||
export * from "./links"
|
export * from "./links"
|
||||||
export * from "./component"
|
export * from "./component"
|
||||||
|
export * from "./sqlite"
|
||||||
|
|
|
@ -2,6 +2,7 @@ import { Document } from "../document"
|
||||||
|
|
||||||
export interface LinkDocument extends Document {
|
export interface LinkDocument extends Document {
|
||||||
type: string
|
type: string
|
||||||
|
tableId: string
|
||||||
doc1: {
|
doc1: {
|
||||||
rowId: string
|
rowId: string
|
||||||
fieldName: string
|
fieldName: string
|
||||||
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
export enum SQLiteType {
|
||||||
|
REAL = "REAL",
|
||||||
|
TEXT = "VARCHAR",
|
||||||
|
INT = "INTEGER",
|
||||||
|
BLOB = "BLOB",
|
||||||
|
NUMERIC = "NUMERIC",
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SQLiteDefinition {
|
||||||
|
_id: string
|
||||||
|
language: string
|
||||||
|
sql: {
|
||||||
|
tables: {
|
||||||
|
[tableName: string]: {
|
||||||
|
fields: {
|
||||||
|
[key: string]: SQLiteType | { field: string; type: SQLiteType }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
options: {
|
||||||
|
table_name: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -74,6 +74,7 @@ export interface Table extends Document {
|
||||||
type?: string
|
type?: string
|
||||||
views?: { [key: string]: View }
|
views?: { [key: string]: View }
|
||||||
name: string
|
name: string
|
||||||
|
originalName?: string
|
||||||
primary?: string[]
|
primary?: string[]
|
||||||
schema: TableSchema
|
schema: TableSchema
|
||||||
primaryDisplay?: string
|
primaryDisplay?: string
|
||||||
|
|
|
@ -100,6 +100,7 @@ export interface Database {
|
||||||
): Promise<Nano.DocumentInsertResponse>
|
): Promise<Nano.DocumentInsertResponse>
|
||||||
bulkDocs(documents: AnyDocument[]): Promise<Nano.DocumentBulkResponse[]>
|
bulkDocs(documents: AnyDocument[]): Promise<Nano.DocumentBulkResponse[]>
|
||||||
allDocs<T>(params: DatabaseQueryOpts): Promise<AllDocsResponse<T>>
|
allDocs<T>(params: DatabaseQueryOpts): Promise<AllDocsResponse<T>>
|
||||||
|
sql<T>(sql: string): Promise<T>
|
||||||
query<T>(
|
query<T>(
|
||||||
viewName: string,
|
viewName: string,
|
||||||
params: DatabaseQueryOpts
|
params: DatabaseQueryOpts
|
||||||
|
|
Loading…
Reference in New Issue