AI functionaly working E2E
This commit is contained in:
parent
2d21afbe10
commit
0b9b9ab440
|
@ -47,11 +47,11 @@
|
||||||
prompt: OperationFieldTypes.BINDABLE_TEXT,
|
prompt: OperationFieldTypes.BINDABLE_TEXT,
|
||||||
},
|
},
|
||||||
CLEAN_DATA: {
|
CLEAN_DATA: {
|
||||||
columns: OperationFieldTypes.MULTI_COLUMN,
|
column: OperationFieldTypes.COLUMN,
|
||||||
prompt: OperationFieldTypes.BINDABLE_TEXT,
|
prompt: OperationFieldTypes.BINDABLE_TEXT,
|
||||||
},
|
},
|
||||||
TRANSLATE: {
|
TRANSLATE: {
|
||||||
columns: OperationFieldTypes.MULTI_COLUMN,
|
column: OperationFieldTypes.COLUMN,
|
||||||
language: OperationFieldTypes.BINDABLE_TEXT,
|
language: OperationFieldTypes.BINDABLE_TEXT,
|
||||||
prompt: OperationFieldTypes.BINDABLE_TEXT,
|
prompt: OperationFieldTypes.BINDABLE_TEXT,
|
||||||
},
|
},
|
||||||
|
|
|
@ -102,8 +102,9 @@ export async function processAIColumns<T extends Row | Row[]>(
|
||||||
): Promise<T> {
|
): Promise<T> {
|
||||||
return tracer.trace("processAIColumns", {}, async span => {
|
return tracer.trace("processAIColumns", {}, async span => {
|
||||||
const numRows = Array.isArray(inputRows) ? inputRows.length : 1
|
const numRows = Array.isArray(inputRows) ? inputRows.length : 1
|
||||||
span?.addTags({ table_id: table._id })
|
span?.addTags({ table_id: table._id, numRows })
|
||||||
const rows = Array.isArray(inputRows) ? inputRows : [inputRows]
|
const rows = Array.isArray(inputRows) ? inputRows : [inputRows]
|
||||||
|
const llm = await pro.ai.LargeLanguageModel.forCurrentTenant("gpt-4o-mini")
|
||||||
if (rows) {
|
if (rows) {
|
||||||
// Ensure we have snippet context
|
// Ensure we have snippet context
|
||||||
await context.ensureSnippetContext()
|
await context.ensureSnippetContext()
|
||||||
|
@ -113,29 +114,34 @@ export async function processAIColumns<T extends Row | Row[]>(
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// const llm = pro.ai.LargeLanguageModel()
|
const rowUpdates = rows.map((row, i) => {
|
||||||
// if (
|
const contextRow = contextRows ? contextRows[i] : row
|
||||||
// schema.formula == null ||
|
// TODO: Map the prompts with string-templates
|
||||||
// (dynamic && isStatic) ||
|
// grab the operation based on the schema
|
||||||
// (!dynamic && !isStatic)
|
// then check the types in the fields, and decide whether to pass them through string templates
|
||||||
// ) {
|
// TODO: cleaner way to map to the schema, move things into BB types and check against the AI schema
|
||||||
// continue
|
for (const key in schema) {
|
||||||
// }
|
if (["prompt", "categories"].includes(key)) {
|
||||||
// iterate through rows and process formula
|
schema[key] = processStringSync(schema[key], contextRow)
|
||||||
for (let i = 0; i < rows.length; i++) {
|
}
|
||||||
let row = rows[i]
|
|
||||||
// let context = contextRows ? contextRows[i] : row
|
|
||||||
// let formula = schema.prompt
|
|
||||||
rows[i] = {
|
|
||||||
...row,
|
|
||||||
[column]: tracer.trace("processAIColumn", {}, span => {
|
|
||||||
span?.addTags({ table_id: table._id, column })
|
|
||||||
// return processStringSync(formula, context)
|
|
||||||
// TODO: Add the AI stuff in to this
|
|
||||||
return "YEET AI"
|
|
||||||
}),
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
const prompt = llm.buildPromptFromAIOperation({ schema, row })
|
||||||
|
|
||||||
|
return tracer.trace("processAIColumn", {}, async span => {
|
||||||
|
span?.addTags({ table_id: table._id, column })
|
||||||
|
const llmResponse = await llm.run(prompt)
|
||||||
|
return {
|
||||||
|
...row,
|
||||||
|
[column]: llmResponse
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const processedRows = await Promise.all(rowUpdates)
|
||||||
|
|
||||||
|
// Promise.all is deterministic so can rely on the indexing here
|
||||||
|
processedRows.forEach((processedRow, index) => rows[index] = processedRow)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return Array.isArray(inputRows) ? rows : rows[0]
|
return Array.isArray(inputRows) ? rows : rows[0]
|
||||||
|
|
Loading…
Reference in New Issue