diff --git a/package.json b/package.json index 75ceaba6c87..c597e90d4bd 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "flowise", - "version": "3.0.5", + "version": "3.0.6", "private": true, "homepage": "https://flowiseai.com", "workspaces": [ @@ -66,7 +66,7 @@ "sqlite3" ], "overrides": { - "axios": "1.10.0", + "axios": "1.12.0", "body-parser": "2.0.2", "braces": "3.0.3", "cross-spawn": "7.0.6", diff --git a/packages/components/credentials/CometApi.credential.ts b/packages/components/credentials/CometApi.credential.ts new file mode 100644 index 00000000000..58ec6061010 --- /dev/null +++ b/packages/components/credentials/CometApi.credential.ts @@ -0,0 +1,23 @@ +import { INodeCredential, INodeParams } from '../src/Interface' + +class CometApi implements INodeCredential { + label: string + name: string + version: number + inputs: INodeParams[] + + constructor() { + this.label = 'Comet API' + this.name = 'cometApi' + this.version = 1.0 + this.inputs = [ + { + label: 'Comet API Key', + name: 'cometApiKey', + type: 'password' + } + ] + } +} + +module.exports = { credClass: CometApi } diff --git a/packages/components/credentials/SambanovaApi.credential.ts b/packages/components/credentials/SambanovaApi.credential.ts new file mode 100644 index 00000000000..60a7e13d8ae --- /dev/null +++ b/packages/components/credentials/SambanovaApi.credential.ts @@ -0,0 +1,23 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +class SambanovaApi implements INodeCredential { + label: string + name: string + version: number + inputs: INodeParams[] + + constructor() { + this.label = 'Sambanova API' + this.name = 'sambanovaApi' + this.version = 1.0 + this.inputs = [ + { + label: 'Sambanova Api Key', + name: 'sambanovaApiKey', + type: 'password' + } + ] + } +} + +module.exports = { credClass: SambanovaApi } diff --git a/packages/components/jest.config.js b/packages/components/jest.config.js new file mode 100644 index 00000000000..deffa4d4b11 --- /dev/null +++ b/packages/components/jest.config.js @@ -0,0 +1,15 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + roots: ['/nodes'], + transform: { + '^.+\\.tsx?$': 'ts-jest' + }, + testRegex: '(/__tests__/.*|(\\.|/)(test|spec))\\.tsx?$', + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + verbose: true, + testPathIgnorePatterns: ['/node_modules/', '/dist/'], + moduleNameMapper: { + '^../../../src/(.*)$': '/src/$1' + } +} diff --git a/packages/components/models.json b/packages/components/models.json index 3da8e990187..73a3c7bb7f2 100644 --- a/packages/components/models.json +++ b/packages/components/models.json @@ -3,6 +3,20 @@ { "name": "awsChatBedrock", "models": [ + { + "label": "openai.gpt-oss-20b-1:0", + "name": "openai.gpt-oss-20b-1:0", + "description": "21B parameters model optimized for lower latency, local, and specialized use cases", + "input_cost": 0.00007, + "output_cost": 0.0003 + }, + { + "label": "openai.gpt-oss-120b-1:0", + "name": "openai.gpt-oss-120b-1:0", + "description": "120B parameters model optimized for production, general purpose, and high-reasoning use cases", + "input_cost": 0.00015, + "output_cost": 0.0006 + }, { "label": "anthropic.claude-opus-4-1-20250805-v1:0", "name": "anthropic.claude-opus-4-1-20250805-v1:0", @@ -556,17 +570,23 @@ "name": "chatGoogleGenerativeAI", "models": [ { - "label": "gemini-2.5-flash-preview-05-20", - "name": "gemini-2.5-flash-preview-05-20", - "input_cost": 0.15e-6, - "output_cost": 6e-7 + "label": "gemini-2.5-pro", + "name": "gemini-2.5-pro", + "input_cost": 0.3e-6, + "output_cost": 0.000025 }, { - "label": "gemini-2.5-pro-preview-03-25", - "name": "gemini-2.5-pro-preview-03-25", + "label": "gemini-2.5-flash", + "name": "gemini-2.5-flash", "input_cost": 1.25e-6, "output_cost": 0.00001 }, + { + "label": "gemini-2.5-flash-lite", + "name": "gemini-2.5-flash-lite", + "input_cost": 1e-7, + "output_cost": 4e-7 + }, { "label": "gemini-2.0-flash", "name": "gemini-2.0-flash", @@ -613,18 +633,24 @@ { "name": "chatGoogleVertexAI", "models": [ - { - "label": "gemini-2.5-flash", - "name": "gemini-2.5-flash", - "input_cost": 0.15e-6, - "output_cost": 6e-7 - }, { "label": "gemini-2.5-pro", "name": "gemini-2.5-pro", + "input_cost": 0.3e-6, + "output_cost": 0.000025 + }, + { + "label": "gemini-2.5-flash", + "name": "gemini-2.5-flash", "input_cost": 1.25e-6, "output_cost": 0.00001 }, + { + "label": "gemini-2.5-flash-lite", + "name": "gemini-2.5-flash-lite", + "input_cost": 1e-7, + "output_cost": 4e-7 + }, { "label": "gemini-2.0-flash", "name": "gemini-2.0-flash-001", diff --git a/packages/components/nodes/agentflow/Agent/Agent.ts b/packages/components/nodes/agentflow/Agent/Agent.ts index e9ba9178950..39759f0e622 100644 --- a/packages/components/nodes/agentflow/Agent/Agent.ts +++ b/packages/components/nodes/agentflow/Agent/Agent.ts @@ -28,7 +28,7 @@ import { replaceBase64ImagesWithFileReferences, updateFlowState } from '../utils' -import { convertMultiOptionsToStringArray, getCredentialData, getCredentialParam } from '../../../src/utils' +import { convertMultiOptionsToStringArray, getCredentialData, getCredentialParam, processTemplateVariables } from '../../../src/utils' import { addSingleFileToStorage } from '../../../src/storageUtils' import fetch from 'node-fetch' @@ -797,7 +797,11 @@ class Agent_Agentflow implements INode { const role = msg.role const content = msg.content if (role && content) { - messages.push({ role, content }) + if (role === 'system') { + messages.unshift({ role, content }) + } else { + messages.push({ role, content }) + } } } @@ -1082,13 +1086,7 @@ class Agent_Agentflow implements INode { } // Process template variables in state - if (newState && Object.keys(newState).length > 0) { - for (const key in newState) { - if (newState[key].toString().includes('{{ output }}')) { - newState[key] = newState[key].replaceAll('{{ output }}', finalResponse) - } - } - } + newState = processTemplateVariables(newState, finalResponse) // Replace the actual messages array with one that includes the file references for images instead of base64 data const messagesWithFileReferences = replaceBase64ImagesWithFileReferences( @@ -1719,9 +1717,20 @@ class Agent_Agentflow implements INode { } console.error('Error invoking tool:', e) + const errMsg = getErrorMessage(e) + let toolInput = toolCall.args + if (typeof errMsg === 'string' && errMsg.includes(TOOL_ARGS_PREFIX)) { + const [_, args] = errMsg.split(TOOL_ARGS_PREFIX) + try { + toolInput = JSON.parse(args) + } catch (e) { + console.error('Error parsing tool input from tool:', e) + } + } + usedTools.push({ tool: selectedTool.name, - toolInput: toolCall.args, + toolInput, toolOutput: '', error: getErrorMessage(e) }) @@ -1991,9 +2000,20 @@ class Agent_Agentflow implements INode { } console.error('Error invoking tool:', e) + const errMsg = getErrorMessage(e) + let toolInput = toolCall.args + if (typeof errMsg === 'string' && errMsg.includes(TOOL_ARGS_PREFIX)) { + const [_, args] = errMsg.split(TOOL_ARGS_PREFIX) + try { + toolInput = JSON.parse(args) + } catch (e) { + console.error('Error parsing tool input from tool:', e) + } + } + usedTools.push({ tool: selectedTool.name, - toolInput: toolCall.args, + toolInput, toolOutput: '', error: getErrorMessage(e) }) diff --git a/packages/components/nodes/agentflow/CustomFunction/CustomFunction.ts b/packages/components/nodes/agentflow/CustomFunction/CustomFunction.ts index 3bdc60681b5..fbb1097f7bc 100644 --- a/packages/components/nodes/agentflow/CustomFunction/CustomFunction.ts +++ b/packages/components/nodes/agentflow/CustomFunction/CustomFunction.ts @@ -8,7 +8,7 @@ import { INodeParams, IServerSideEventStreamer } from '../../../src/Interface' -import { getVars, executeJavaScriptCode, createCodeExecutionSandbox } from '../../../src/utils' +import { getVars, executeJavaScriptCode, createCodeExecutionSandbox, processTemplateVariables } from '../../../src/utils' import { updateFlowState } from '../utils' interface ICustomFunctionInputVariables { @@ -145,19 +145,13 @@ class CustomFunction_Agentflow implements INode { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity - // Update flow state if needed - let newState = { ...state } - if (_customFunctionUpdateState && Array.isArray(_customFunctionUpdateState) && _customFunctionUpdateState.length > 0) { - newState = updateFlowState(state, _customFunctionUpdateState) - } - const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, sessionId: options.sessionId, chatId: options.chatId, input, - state: newState + state } // Create additional sandbox variables for custom function inputs @@ -190,15 +184,14 @@ class CustomFunction_Agentflow implements INode { finalOutput = JSON.stringify(response, null, 2) } - // Process template variables in state - if (newState && Object.keys(newState).length > 0) { - for (const key in newState) { - if (newState[key].toString().includes('{{ output }}')) { - newState[key] = newState[key].replaceAll('{{ output }}', finalOutput) - } - } + // Update flow state if needed + let newState = { ...state } + if (_customFunctionUpdateState && Array.isArray(_customFunctionUpdateState) && _customFunctionUpdateState.length > 0) { + newState = updateFlowState(state, _customFunctionUpdateState) } + newState = processTemplateVariables(newState, finalOutput) + const returnOutput = { id: nodeData.id, name: this.name, diff --git a/packages/components/nodes/agentflow/ExecuteFlow/ExecuteFlow.ts b/packages/components/nodes/agentflow/ExecuteFlow/ExecuteFlow.ts index 492c00af1d3..06bc674643c 100644 --- a/packages/components/nodes/agentflow/ExecuteFlow/ExecuteFlow.ts +++ b/packages/components/nodes/agentflow/ExecuteFlow/ExecuteFlow.ts @@ -8,7 +8,8 @@ import { IServerSideEventStreamer } from '../../../src/Interface' import axios, { AxiosRequestConfig } from 'axios' -import { getCredentialData, getCredentialParam } from '../../../src/utils' +import { getCredentialData, getCredentialParam, processTemplateVariables } from '../../../src/utils' +import JSON5 from 'json5' import { DataSource } from 'typeorm' import { BaseMessageLike } from '@langchain/core/messages' import { updateFlowState } from '../utils' @@ -167,9 +168,7 @@ class ExecuteFlow_Agentflow implements INode { let overrideConfig = nodeData.inputs?.executeFlowOverrideConfig if (typeof overrideConfig === 'string' && overrideConfig.startsWith('{') && overrideConfig.endsWith('}')) { try { - // Handle escaped square brackets and other common escape sequences - const unescapedConfig = overrideConfig.replace(/\\(\[|\])/g, '$1') - overrideConfig = JSON.parse(unescapedConfig) + overrideConfig = JSON5.parse(overrideConfig) } catch (parseError) { throw new Error(`Invalid JSON in executeFlowOverrideConfig: ${parseError.message}`) } @@ -222,13 +221,7 @@ class ExecuteFlow_Agentflow implements INode { } // Process template variables in state - if (newState && Object.keys(newState).length > 0) { - for (const key in newState) { - if (newState[key].toString().includes('{{ output }}')) { - newState[key] = newState[key].replaceAll('{{ output }}', resultText) - } - } - } + newState = processTemplateVariables(newState, resultText) // Only add to runtime chat history if this is the first node const inputMessages = [] diff --git a/packages/components/nodes/agentflow/HTTP/HTTP.ts b/packages/components/nodes/agentflow/HTTP/HTTP.ts index f7635ce03ad..405881a4bbc 100644 --- a/packages/components/nodes/agentflow/HTTP/HTTP.ts +++ b/packages/components/nodes/agentflow/HTTP/HTTP.ts @@ -4,6 +4,7 @@ import FormData from 'form-data' import * as querystring from 'querystring' import { getCredentialData, getCredentialParam } from '../../../src/utils' import { secureAxiosRequest } from '../../../src/httpSecurity' +import JSON5 from 'json5' class HTTP_Agentflow implements INode { label: string @@ -19,34 +20,13 @@ class HTTP_Agentflow implements INode { credential: INodeParams inputs: INodeParams[] - private sanitizeJsonString(jsonString: string): string { - // Remove common problematic escape sequences that are not valid JSON - let sanitized = jsonString - // Remove escaped square brackets (not valid JSON) - .replace(/\\(\[|\])/g, '$1') - // Fix unquoted string values in JSON (simple case) - .replace(/:\s*([a-zA-Z][a-zA-Z0-9]*)\s*([,}])/g, ': "$1"$2') - // Fix trailing commas - .replace(/,(\s*[}\]])/g, '$1') - - return sanitized - } - private parseJsonBody(body: string): any { try { - // First try to parse as-is - return JSON.parse(body) + return JSON5.parse(body) } catch (error) { - try { - // If that fails, try to sanitize and parse - const sanitized = this.sanitizeJsonString(body) - return JSON.parse(sanitized) - } catch (sanitizeError) { - // If sanitization also fails, throw the original error with helpful message - throw new Error( - `Invalid JSON format in body. Original error: ${error.message}. Please ensure your JSON is properly formatted with quoted strings and valid escape sequences.` - ) - } + throw new Error( + `Invalid JSON format in body. Original error: ${error.message}. Please ensure your JSON is properly formatted with quoted strings and valid escape sequences.` + ) } } diff --git a/packages/components/nodes/agentflow/HumanInput/HumanInput.ts b/packages/components/nodes/agentflow/HumanInput/HumanInput.ts index 6fa388e26d7..8d5818ff36d 100644 --- a/packages/components/nodes/agentflow/HumanInput/HumanInput.ts +++ b/packages/components/nodes/agentflow/HumanInput/HumanInput.ts @@ -208,7 +208,7 @@ class HumanInput_Agentflow implements INode { humanInputDescription = (nodeData.inputs?.humanInputDescription as string) || 'Do you want to proceed?' const messages = [...pastChatHistory, ...runtimeChatHistory] // Find the last message in the messages array - const lastMessage = (messages[messages.length - 1] as any).content || '' + const lastMessage = messages.length > 0 ? (messages[messages.length - 1] as any).content || '' : '' humanInputDescription = `${lastMessage}\n\n${humanInputDescription}` if (isStreamable) { const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer diff --git a/packages/components/nodes/agentflow/Iteration/Iteration.ts b/packages/components/nodes/agentflow/Iteration/Iteration.ts index 048035fb2b1..04dfbb519d7 100644 --- a/packages/components/nodes/agentflow/Iteration/Iteration.ts +++ b/packages/components/nodes/agentflow/Iteration/Iteration.ts @@ -1,4 +1,5 @@ import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import JSON5 from 'json5' class Iteration_Agentflow implements INode { label: string @@ -39,12 +40,17 @@ class Iteration_Agentflow implements INode { const iterationInput = nodeData.inputs?.iterationInput // Helper function to clean JSON strings with redundant backslashes - const cleanJsonString = (str: string): string => { - return str.replace(/\\(["'[\]{}])/g, '$1') + const safeParseJson = (str: string): string => { + try { + return JSON5.parse(str) + } catch { + // Try parsing after cleaning + return JSON5.parse(str.replace(/\\(["'[\]{}])/g, '$1')) + } } const iterationInputArray = - typeof iterationInput === 'string' && iterationInput !== '' ? JSON.parse(cleanJsonString(iterationInput)) : iterationInput + typeof iterationInput === 'string' && iterationInput !== '' ? safeParseJson(iterationInput) : iterationInput if (!iterationInputArray || !Array.isArray(iterationInputArray)) { throw new Error('Invalid input array') diff --git a/packages/components/nodes/agentflow/LLM/LLM.ts b/packages/components/nodes/agentflow/LLM/LLM.ts index d0e684ab64d..8ad1d2aae34 100644 --- a/packages/components/nodes/agentflow/LLM/LLM.ts +++ b/packages/components/nodes/agentflow/LLM/LLM.ts @@ -12,7 +12,7 @@ import { replaceBase64ImagesWithFileReferences, updateFlowState } from '../utils' -import { get } from 'lodash' +import { processTemplateVariables } from '../../../src/utils' class LLM_Agentflow implements INode { label: string @@ -399,7 +399,11 @@ class LLM_Agentflow implements INode { const role = msg.role const content = msg.content if (role && content) { - messages.push({ role, content }) + if (role === 'system') { + messages.unshift({ role, content }) + } else { + messages.push({ role, content }) + } } } @@ -525,36 +529,7 @@ class LLM_Agentflow implements INode { } // Process template variables in state - if (newState && Object.keys(newState).length > 0) { - for (const key in newState) { - const stateValue = newState[key].toString() - if (stateValue.includes('{{ output')) { - // Handle simple output replacement - if (stateValue === '{{ output }}') { - newState[key] = finalResponse - continue - } - - // Handle JSON path expressions like {{ output.item1 }} - // eslint-disable-next-line - const match = stateValue.match(/{{[\s]*output\.([\w\.]+)[\s]*}}/) - if (match) { - try { - // Parse the response if it's JSON - const jsonResponse = typeof finalResponse === 'string' ? JSON.parse(finalResponse) : finalResponse - // Get the value using lodash get - const path = match[1] - const value = get(jsonResponse, path) - newState[key] = value ?? stateValue // Fall back to original if path not found - } catch (e) { - // If JSON parsing fails, keep original template - console.warn(`Failed to parse JSON or find path in output: ${e}`) - newState[key] = stateValue - } - } - } - } - } + newState = processTemplateVariables(newState, finalResponse) // Replace the actual messages array with one that includes the file references for images instead of base64 data const messagesWithFileReferences = replaceBase64ImagesWithFileReferences( diff --git a/packages/components/nodes/agentflow/Retriever/Retriever.ts b/packages/components/nodes/agentflow/Retriever/Retriever.ts index b9af63b766c..e7ce426c230 100644 --- a/packages/components/nodes/agentflow/Retriever/Retriever.ts +++ b/packages/components/nodes/agentflow/Retriever/Retriever.ts @@ -8,6 +8,7 @@ import { IServerSideEventStreamer } from '../../../src/Interface' import { updateFlowState } from '../utils' +import { processTemplateVariables } from '../../../src/utils' import { DataSource } from 'typeorm' import { BaseRetriever } from '@langchain/core/retrievers' import { Document } from '@langchain/core/documents' @@ -197,14 +198,7 @@ class Retriever_Agentflow implements INode { sseStreamer.streamTokenEvent(chatId, finalOutput) } - // Process template variables in state - if (newState && Object.keys(newState).length > 0) { - for (const key in newState) { - if (newState[key].toString().includes('{{ output }}')) { - newState[key] = newState[key].replaceAll('{{ output }}', finalOutput) - } - } - } + newState = processTemplateVariables(newState, finalOutput) const returnOutput = { id: nodeData.id, diff --git a/packages/components/nodes/agentflow/Tool/Tool.ts b/packages/components/nodes/agentflow/Tool/Tool.ts index 59b81f5535b..300aaafa199 100644 --- a/packages/components/nodes/agentflow/Tool/Tool.ts +++ b/packages/components/nodes/agentflow/Tool/Tool.ts @@ -1,5 +1,6 @@ import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams, IServerSideEventStreamer } from '../../../src/Interface' import { updateFlowState } from '../utils' +import { processTemplateVariables } from '../../../src/utils' import { Tool } from '@langchain/core/tools' import { ARTIFACTS_PREFIX, TOOL_ARGS_PREFIX } from '../../../src/agents' import zodToJsonSchema from 'zod-to-json-schema' @@ -330,14 +331,7 @@ class Tool_Agentflow implements INode { sseStreamer.streamTokenEvent(chatId, toolOutput) } - // Process template variables in state - if (newState && Object.keys(newState).length > 0) { - for (const key in newState) { - if (newState[key].toString().includes('{{ output }}')) { - newState[key] = newState[key].replaceAll('{{ output }}', toolOutput) - } - } - } + newState = processTemplateVariables(newState, toolOutput) const returnOutput = { id: nodeData.id, diff --git a/packages/components/nodes/agentflow/utils.ts b/packages/components/nodes/agentflow/utils.ts index 1701b7e4dde..14d832c8abb 100644 --- a/packages/components/nodes/agentflow/utils.ts +++ b/packages/components/nodes/agentflow/utils.ts @@ -459,9 +459,9 @@ export const getPastChatHistoryImageMessages = async ( /** * Updates the flow state with new values */ -export const updateFlowState = (state: ICommonObject, llmUpdateState: IFlowState[]): ICommonObject => { +export const updateFlowState = (state: ICommonObject, updateState: IFlowState[]): ICommonObject => { let newFlowState: Record = {} - for (const state of llmUpdateState) { + for (const state of updateState) { newFlowState[state.key] = state.value } diff --git a/packages/components/nodes/chatmodels/AzureChatOpenAI/AzureChatOpenAI.ts b/packages/components/nodes/chatmodels/AzureChatOpenAI/AzureChatOpenAI.ts index 120ae416966..786a17d49ad 100644 --- a/packages/components/nodes/chatmodels/AzureChatOpenAI/AzureChatOpenAI.ts +++ b/packages/components/nodes/chatmodels/AzureChatOpenAI/AzureChatOpenAI.ts @@ -273,10 +273,9 @@ class AzureChatOpenAI_ChatModels implements INode { console.error('Error parsing base options', exception) } } - if (modelName === 'o3-mini' || modelName.includes('o1')) { + if (modelName.includes('o1') || modelName.includes('o3') || modelName.includes('gpt-5')) { delete obj.temperature - } - if (modelName.includes('o1') || modelName.includes('o3')) { + delete obj.stop const reasoning: OpenAIClient.Reasoning = {} if (reasoningEffort) { reasoning.effort = reasoningEffort @@ -285,6 +284,11 @@ class AzureChatOpenAI_ChatModels implements INode { reasoning.summary = reasoningSummary } obj.reasoning = reasoning + + if (maxTokens) { + delete obj.maxTokens + obj.maxCompletionTokens = parseInt(maxTokens, 10) + } } const multiModalOption: IMultiModalOption = { diff --git a/packages/components/nodes/chatmodels/ChatCometAPI/ChatCometAPI.ts b/packages/components/nodes/chatmodels/ChatCometAPI/ChatCometAPI.ts new file mode 100644 index 00000000000..295c5e7ceac --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatCometAPI/ChatCometAPI.ts @@ -0,0 +1,176 @@ +import { BaseCache } from '@langchain/core/caches' +import { ChatOpenAI, ChatOpenAIFields } from '@langchain/openai' +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +class ChatCometAPI_ChatModels implements INode { + readonly baseURL: string = 'https://api.cometapi.com/v1' + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'ChatCometAPI' + this.name = 'chatCometAPI' + this.version = 1.0 + this.type = 'ChatCometAPI' + this.icon = 'cometapi.svg' + this.category = 'Chat Models' + this.description = 'Wrapper around CometAPI large language models that use the Chat endpoint' + this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['cometApi'] + } + this.inputs = [ + { + label: 'Cache', + name: 'cache', + type: 'BaseCache', + optional: true + }, + { + label: 'Model Name', + name: 'modelName', + type: 'string', + default: 'gpt-5-mini', + description: 'Enter the model name (e.g., gpt-5-mini, claude-sonnet-4-20250514, gemini-2.0-flash)' + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + step: 0.1, + default: 0.7, + optional: true + }, + { + label: 'Streaming', + name: 'streaming', + type: 'boolean', + default: true, + optional: true, + additionalParams: true + }, + { + label: 'Max Tokens', + name: 'maxTokens', + type: 'number', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Top Probability', + name: 'topP', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Frequency Penalty', + name: 'frequencyPenalty', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Presence Penalty', + name: 'presencePenalty', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Base Options', + name: 'baseOptions', + type: 'json', + optional: true, + additionalParams: true, + description: 'Additional options to pass to the CometAPI client. This should be a JSON object.' + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const maxTokens = nodeData.inputs?.maxTokens as string + const topP = nodeData.inputs?.topP as string + const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string + const presencePenalty = nodeData.inputs?.presencePenalty as string + const streaming = nodeData.inputs?.streaming as boolean + const baseOptions = nodeData.inputs?.baseOptions + + if (nodeData.inputs?.credentialId) { + nodeData.credential = nodeData.inputs?.credentialId + } + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const openAIApiKey = getCredentialParam('cometApiKey', credentialData, nodeData) + + // Custom error handling for missing API key + if (!openAIApiKey || openAIApiKey.trim() === '') { + throw new Error( + 'CometAPI API Key is missing or empty. Please provide a valid CometAPI API key in the credential configuration.' + ) + } + + // Custom error handling for missing model name + if (!modelName || modelName.trim() === '') { + throw new Error('Model Name is required. Please enter a valid model name (e.g., gpt-5-mini, claude-sonnet-4-20250514).') + } + + const cache = nodeData.inputs?.cache as BaseCache + + const obj: ChatOpenAIFields = { + temperature: parseFloat(temperature), + modelName, + openAIApiKey, + apiKey: openAIApiKey, + streaming: streaming ?? true + } + + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) + if (topP) obj.topP = parseFloat(topP) + if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty) + if (presencePenalty) obj.presencePenalty = parseFloat(presencePenalty) + if (cache) obj.cache = cache + + let parsedBaseOptions: any | undefined = undefined + + if (baseOptions) { + try { + parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions) + if (parsedBaseOptions.baseURL) { + console.warn("The 'baseURL' parameter is not allowed when using the ChatCometAPI node.") + parsedBaseOptions.baseURL = undefined + } + } catch (exception) { + throw new Error('Invalid JSON in the BaseOptions: ' + exception) + } + } + + const model = new ChatOpenAI({ + ...obj, + configuration: { + baseURL: this.baseURL, + ...parsedBaseOptions + } + }) + return model + } +} + +module.exports = { nodeClass: ChatCometAPI_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatCometAPI/cometapi.svg b/packages/components/nodes/chatmodels/ChatCometAPI/cometapi.svg new file mode 100644 index 00000000000..9f1d803d4ef --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatCometAPI/cometapi.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/packages/components/nodes/chatmodels/ChatSambanova/ChatSambanova.ts b/packages/components/nodes/chatmodels/ChatSambanova/ChatSambanova.ts new file mode 100644 index 00000000000..a62ebfb30d0 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatSambanova/ChatSambanova.ts @@ -0,0 +1,123 @@ +import { BaseCache } from '@langchain/core/caches' +import { ChatOpenAI, ChatOpenAIFields } from '@langchain/openai' +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +class ChatSambanova_ChatModels implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'ChatSambanova' + this.name = 'chatSambanova' + this.version = 1.0 + this.type = 'ChatSambanova' + this.icon = 'sambanova.png' + this.category = 'Chat Models' + this.description = 'Wrapper around Sambanova Chat Endpoints' + this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['sambanovaApi'] + } + this.inputs = [ + { + label: 'Cache', + name: 'cache', + type: 'BaseCache', + optional: true + }, + { + label: 'Model', + name: 'modelName', + type: 'string', + default: 'Meta-Llama-3.3-70B-Instruct', + placeholder: 'Meta-Llama-3.3-70B-Instruct' + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + step: 0.1, + default: 0.9, + optional: true + }, + { + label: 'Streaming', + name: 'streaming', + type: 'boolean', + default: true, + optional: true + }, + { + label: 'BasePath', + name: 'basepath', + type: 'string', + optional: true, + default: 'htps://api.sambanova.ai/v1', + additionalParams: true + }, + { + label: 'BaseOptions', + name: 'baseOptions', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const cache = nodeData.inputs?.cache as BaseCache + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const streaming = nodeData.inputs?.streaming as boolean + const basePath = nodeData.inputs?.basepath as string + const baseOptions = nodeData.inputs?.baseOptions + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const sambanovaApiKey = getCredentialParam('sambanovaApiKey', credentialData, nodeData) + + const obj: ChatOpenAIFields = { + temperature: temperature ? parseFloat(temperature) : undefined, + model: modelName, + apiKey: sambanovaApiKey, + openAIApiKey: sambanovaApiKey, + streaming: streaming ?? true + } + + if (cache) obj.cache = cache + + let parsedBaseOptions: any | undefined = undefined + + if (baseOptions) { + try { + parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions) + } catch (exception) { + throw new Error("Invalid JSON in the ChatSambanova's BaseOptions: " + exception) + } + } + + if (basePath || parsedBaseOptions) { + obj.configuration = { + baseURL: basePath, + defaultHeaders: parsedBaseOptions + } + } + + const model = new ChatOpenAI(obj) + return model + } +} + +module.exports = { nodeClass: ChatSambanova_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatSambanova/sambanova.png b/packages/components/nodes/chatmodels/ChatSambanova/sambanova.png new file mode 100644 index 00000000000..8bc16c5d5e3 Binary files /dev/null and b/packages/components/nodes/chatmodels/ChatSambanova/sambanova.png differ diff --git a/packages/components/nodes/documentloaders/File/File.ts b/packages/components/nodes/documentloaders/File/File.ts index 53f5be5ea7e..80216323574 100644 --- a/packages/components/nodes/documentloaders/File/File.ts +++ b/packages/components/nodes/documentloaders/File/File.ts @@ -136,9 +136,10 @@ class File_DocumentLoaders implements INode { let files: string[] = [] const fileBlobs: { blob: Blob; ext: string }[] = [] + const processRaw = options.processRaw //FILE-STORAGE::["CONTRIBUTING.md","LICENSE.md","README.md"] - const totalFiles = getOverrideFileInputs(nodeData) || fileBase64 + const totalFiles = getOverrideFileInputs(nodeData, processRaw) || fileBase64 if (totalFiles.startsWith('FILE-STORAGE::')) { const fileName = totalFiles.replace('FILE-STORAGE::', '') if (fileName.startsWith('[') && fileName.endsWith(']')) { @@ -298,7 +299,7 @@ class File_DocumentLoaders implements INode { } } -const getOverrideFileInputs = (nodeData: INodeData) => { +const getOverrideFileInputs = (nodeData: INodeData, processRaw: boolean) => { const txtFileBase64 = nodeData.inputs?.txtFile as string const pdfFileBase64 = nodeData.inputs?.pdfFile as string const jsonFileBase64 = nodeData.inputs?.jsonFile as string @@ -347,6 +348,10 @@ const getOverrideFileInputs = (nodeData: INodeData) => { files.push(...removePrefix(powerpointFileBase64)) } + if (processRaw) { + return files.length ? JSON.stringify(files) : '' + } + return files.length ? `FILE-STORAGE::${JSON.stringify(files)}` : '' } diff --git a/packages/components/nodes/documentloaders/Oxylabs/Oxylabs.ts b/packages/components/nodes/documentloaders/Oxylabs/Oxylabs.ts index f90252d4441..51fb2613366 100644 --- a/packages/components/nodes/documentloaders/Oxylabs/Oxylabs.ts +++ b/packages/components/nodes/documentloaders/Oxylabs/Oxylabs.ts @@ -95,14 +95,20 @@ export class OxylabsLoader extends BaseDocumentLoader { } public async load(): Promise { - const response = await this.sendAPIRequest({ - url: this.params.query, + let isUrlSource = this.params.source == 'universal' + + const params = { source: this.params.source, geo_location: this.params.geo_location, render: this.params.render, parse: this.params.parse, - user_agent_type: this.params.user_agent_type - }) + user_agent_type: this.params.user_agent_type, + markdown: !this.params.parse, + url: isUrlSource ? this.params.query : null, + query: !isUrlSource ? this.params.query : null + } + + const response = await this.sendAPIRequest(params) const docs: OxylabsDocument[] = response.data.results.map((result, index) => ({ id: `${response.data.job.id.toString()}-${index}`, diff --git a/packages/components/nodes/llms/SambaNova/Sambanova.ts b/packages/components/nodes/llms/SambaNova/Sambanova.ts new file mode 100644 index 00000000000..4cb76aefa76 --- /dev/null +++ b/packages/components/nodes/llms/SambaNova/Sambanova.ts @@ -0,0 +1,71 @@ +import { getBaseClasses, getCredentialData, getCredentialParam, ICommonObject, INode, INodeData, INodeParams } from '../../../src' +import { OpenAI } from '@langchain/openai' +import { BaseCache } from '@langchain/core/caches' + +class Sambanova_LLMs implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'Sambanova' + this.name = 'sambanova' + this.version = 1.0 + this.type = 'Sambanova' + this.icon = 'sambanova.png' + this.category = 'LLMs' + this.description = 'Wrapper around Sambanova API for large language models' + this.baseClasses = [this.type, ...getBaseClasses(OpenAI)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['sambanovaApi'] + } + this.inputs = [ + { + label: 'Cache', + name: 'cache', + type: 'BaseCache', + optional: true + }, + { + label: 'Model Name', + name: 'modelName', + type: 'string', + default: 'Meta-Llama-3.3-70B-Instruct', + description: 'For more details see https://docs.sambanova.ai/cloud/docs/get-started/supported-models', + optional: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const cache = nodeData.inputs?.cache as BaseCache + const modelName = nodeData.inputs?.modelName as string + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const sambanovaKey = getCredentialParam('sambanovaApiKey', credentialData, nodeData) + + const obj: any = { + model: modelName, + configuration: { + baseURL: 'https://api.sambanova.ai/v1', + apiKey: sambanovaKey + } + } + if (cache) obj.cache = cache + + const sambanova = new OpenAI(obj) + return sambanova + } +} + +module.exports = { nodeClass: Sambanova_LLMs } diff --git a/packages/components/nodes/llms/SambaNova/sambanova.png b/packages/components/nodes/llms/SambaNova/sambanova.png new file mode 100644 index 00000000000..8bc16c5d5e3 Binary files /dev/null and b/packages/components/nodes/llms/SambaNova/sambanova.png differ diff --git a/packages/components/nodes/tools/AWSDynamoDBKVStorage/AWSDynamoDBKVStorage.test.ts b/packages/components/nodes/tools/AWSDynamoDBKVStorage/AWSDynamoDBKVStorage.test.ts new file mode 100644 index 00000000000..c69b48aebad --- /dev/null +++ b/packages/components/nodes/tools/AWSDynamoDBKVStorage/AWSDynamoDBKVStorage.test.ts @@ -0,0 +1,479 @@ +// Mock AWS SDK DynamoDB client +jest.mock('@aws-sdk/client-dynamodb', () => { + const mockSend = jest.fn() + + // Create mock constructors that capture inputs + const PutItemCommandMock = jest.fn((input) => ({ input, _type: 'PutItemCommand' })) + const QueryCommandMock = jest.fn((input) => ({ input, _type: 'QueryCommand' })) + + return { + DynamoDBClient: jest.fn().mockImplementation(() => ({ + send: mockSend + })), + DescribeTableCommand: jest.fn(), + ListTablesCommand: jest.fn(), + PutItemCommand: PutItemCommandMock, + QueryCommand: QueryCommandMock, + __mockSend: mockSend + } +}) + +// Mock AWS credentials utility +jest.mock('../../../src/awsToolsUtils', () => ({ + AWS_REGIONS: [ + { label: 'US East (N. Virginia)', name: 'us-east-1' }, + { label: 'US West (Oregon)', name: 'us-west-2' } + ], + DEFAULT_AWS_REGION: 'us-east-1', + getAWSCredentials: jest.fn(() => + Promise.resolve({ + accessKeyId: 'test-access-key', + secretAccessKey: 'test-secret-key', + sessionToken: 'test-session-token' + }) + ) +})) + +// Mock getBaseClasses function +jest.mock('../../../src/utils', () => ({ + getBaseClasses: jest.fn(() => ['Tool', 'StructuredTool']) +})) + +describe('AWSDynamoDBKVStorage', () => { + let AWSDynamoDBKVStorage_Tools: any + let mockSend: jest.Mock + let PutItemCommandMock: jest.Mock + let QueryCommandMock: jest.Mock + + // Helper function to create a node instance + const createNode = () => new AWSDynamoDBKVStorage_Tools() + + // Helper function to create nodeData + const createNodeData = (overrides = {}) => ({ + inputs: { + region: 'us-east-1', + tableName: 'test-table', + keyPrefix: '', + operation: 'store', + ...overrides + } + }) + + beforeEach(async () => { + // Clear all mocks before each test + jest.clearAllMocks() + + // Get the mock functions + const dynamoDBModule = require('@aws-sdk/client-dynamodb') + mockSend = dynamoDBModule.__mockSend + PutItemCommandMock = dynamoDBModule.PutItemCommand + QueryCommandMock = dynamoDBModule.QueryCommand + + mockSend.mockReset() + PutItemCommandMock.mockClear() + QueryCommandMock.mockClear() + + // Dynamic import to get fresh module instance + const module = (await import('./AWSDynamoDBKVStorage')) as any + AWSDynamoDBKVStorage_Tools = module.nodeClass + }) + + describe('AWSDynamoDBKVStorage_Tools Node', () => { + it('should have correct input parameters', () => { + const node = createNode() + const inputNames = node.inputs.map((input: any) => input.name) + + expect(inputNames).toEqual(['region', 'tableName', 'keyPrefix', 'operation']) + }) + }) + + describe('loadMethods - listTables', () => { + it('should list valid DynamoDB tables with correct schema', async () => { + const node = createNode() + + // Mock responses for list and describe commands + mockSend + .mockResolvedValueOnce({ + TableNames: ['table1', 'table2', 'invalid-table'] + }) + .mockResolvedValueOnce({ + Table: { + KeySchema: [ + { AttributeName: 'pk', KeyType: 'HASH' }, + { AttributeName: 'sk', KeyType: 'RANGE' } + ] + } + }) + .mockResolvedValueOnce({ + Table: { + KeySchema: [ + { AttributeName: 'pk', KeyType: 'HASH' }, + { AttributeName: 'sk', KeyType: 'RANGE' } + ] + } + }) + .mockResolvedValueOnce({ + Table: { + KeySchema: [{ AttributeName: 'id', KeyType: 'HASH' }] + } + }) + + const nodeData = { inputs: { region: 'us-east-1' } } + + const result = await node.loadMethods.listTables(nodeData, {}) + + expect(result).toEqual([ + { + label: 'table1', + name: 'table1', + description: 'Table with pk (partition) and sk (sort) keys' + }, + { + label: 'table2', + name: 'table2', + description: 'Table with pk (partition) and sk (sort) keys' + } + ]) + }) + + it('should return error when no tables found', async () => { + const node = createNode() + + mockSend.mockResolvedValueOnce({ + TableNames: [] + }) + + const nodeData = { inputs: { region: 'us-east-1' } } + + const result = await node.loadMethods.listTables(nodeData, {}) + + expect(result).toEqual([ + { + label: 'No tables found', + name: 'error', + description: 'No DynamoDB tables found in this region' + } + ]) + }) + + it('should return error when no compatible tables found', async () => { + const node = createNode() + + mockSend + .mockResolvedValueOnce({ + TableNames: ['invalid-table'] + }) + .mockResolvedValueOnce({ + Table: { + KeySchema: [{ AttributeName: 'id', KeyType: 'HASH' }] + } + }) + + const nodeData = { inputs: { region: 'us-east-1' } } + + const result = await node.loadMethods.listTables(nodeData, {}) + + expect(result).toHaveLength(1) + expect(result[0]).toMatchObject({ + label: 'No compatible tables found', + name: 'error' + }) + expect(result[0].description).toContain('Found 1 table(s) with different schema') + }) + + it('should handle AWS credentials error', async () => { + const node = createNode() + const { getAWSCredentials } = require('../../../src/awsToolsUtils') + + getAWSCredentials.mockRejectedValueOnce(new Error('AWS Access Key not found')) + + const nodeData = { inputs: { region: 'us-east-1' } } + + const result = await node.loadMethods.listTables(nodeData, {}) + + expect(result).toEqual([ + { + label: 'AWS Credentials Required', + name: 'error', + description: 'Enter AWS Access Key ID and Secret Access Key' + } + ]) + }) + }) + + describe('init method', () => { + it.each([ + ['store', 'test-prefix', 'dynamodb_kv_store', 'Store a text value with a key in DynamoDB'], + ['retrieve', '', 'dynamodb_kv_retrieve', 'Retrieve a value by key from DynamoDB'] + ])('should create correct tool for %s operation', async (operation, keyPrefix, expectedName, expectedDescription) => { + const node = createNode() + const nodeData = createNodeData({ keyPrefix, operation }) + + const tool = await node.init(nodeData, '', {}) + + expect(tool.name).toBe(expectedName) + expect(tool.description).toContain(expectedDescription) + }) + + it.each([ + ['error', '', 'Valid DynamoDB Table selection is required'], + ['test-table', 'prefix#invalid', 'Key prefix cannot contain "#" character'] + ])('should throw error for invalid config (table: %s, prefix: %s)', async (tableName, keyPrefix, expectedError) => { + const node = createNode() + const nodeData = createNodeData({ tableName, keyPrefix }) + + await expect(node.init(nodeData, '', {})).rejects.toThrow(expectedError) + }) + }) + + describe('DynamoDBStoreTool', () => { + it('should store value successfully', async () => { + const node = createNode() + + mockSend.mockResolvedValueOnce({}) + + const nodeData = createNodeData({ keyPrefix: 'test' }) + + const tool = await node.init(nodeData, '', {}) + const result = await tool._call({ key: 'mykey', value: 'myvalue' }) + + expect(result).toContain('Successfully stored value with key "mykey"') + expect(mockSend).toHaveBeenCalledTimes(1) + + // Verify PutItemCommand was called with correct parameters + expect(PutItemCommandMock).toHaveBeenCalledTimes(1) + const putCommandInput = PutItemCommandMock.mock.calls[0][0] + + expect(putCommandInput).toMatchObject({ + TableName: 'test-table', + Item: { + pk: { S: 'test#mykey' }, + value: { S: 'myvalue' } + } + }) + + // Verify timestamp fields exist + expect(putCommandInput.Item.sk).toBeDefined() + expect(putCommandInput.Item.timestamp).toBeDefined() + }) + + it.each([ + ['', 'Key must be a non-empty string'], + [' ', 'Key must be a non-empty string'], + ['a'.repeat(2049), 'Key too long'] + ])('should handle invalid key: "%s"', async (key, expectedError) => { + const node = createNode() + + const nodeData = createNodeData() + + const tool = await node.init(nodeData, '', {}) + await expect(tool._call({ key, value: 'myvalue' })).rejects.toThrow(expectedError) + }) + + it.each([ + ['store', { key: 'mykey', value: 'myvalue' }, 'Failed to store value: DynamoDB error'], + ['retrieve', { key: 'mykey' }, 'Failed to retrieve value: DynamoDB error'] + ])('should handle DynamoDB error for %s', async (operation, callParams, expectedError) => { + const node = createNode() + mockSend.mockRejectedValueOnce(new Error('DynamoDB error')) + + const nodeData = createNodeData({ operation }) + const tool = await node.init(nodeData, '', {}) + + await expect(tool._call(callParams)).rejects.toThrow(expectedError) + }) + }) + + describe('DynamoDBRetrieveTool', () => { + it('should retrieve latest value successfully', async () => { + const node = createNode() + + mockSend.mockResolvedValueOnce({ + Items: [ + { + pk: { S: 'test#mykey' }, + sk: { S: '1234567890' }, + value: { S: 'myvalue' }, + timestamp: { S: '2024-01-01T00:00:00.000Z' } + } + ] + }) + + const nodeData = createNodeData({ keyPrefix: 'test', operation: 'retrieve' }) + + const tool = await node.init(nodeData, '', {}) + const result = await tool._call({ key: 'mykey' }) + const parsed = JSON.parse(result) + + expect(parsed).toEqual({ + value: 'myvalue', + timestamp: '2024-01-01T00:00:00.000Z' + }) + expect(mockSend).toHaveBeenCalledTimes(1) + + // Verify QueryCommand was called with correct parameters + expect(QueryCommandMock).toHaveBeenCalledTimes(1) + const queryCommandInput = QueryCommandMock.mock.calls[0][0] + + expect(queryCommandInput).toMatchObject({ + TableName: 'test-table', + KeyConditionExpression: 'pk = :pk', + ExpressionAttributeValues: { + ':pk': { S: 'test#mykey' } + }, + ScanIndexForward: false, + Limit: 1 + }) + }) + + it('should retrieve nth latest value', async () => { + const node = createNode() + + mockSend.mockResolvedValueOnce({ + Items: [ + { + pk: { S: 'mykey' }, + sk: { S: '1234567892' }, + value: { S: 'newest' }, + timestamp: { S: '2024-01-03T00:00:00.000Z' } + }, + { + pk: { S: 'mykey' }, + sk: { S: '1234567891' }, + value: { S: 'second' }, + timestamp: { S: '2024-01-02T00:00:00.000Z' } + }, + { + pk: { S: 'mykey' }, + sk: { S: '1234567890' }, + value: { S: 'oldest' }, + timestamp: { S: '2024-01-01T00:00:00.000Z' } + } + ] + }) + + const nodeData = createNodeData({ operation: 'retrieve' }) + + const tool = await node.init(nodeData, '', {}) + const result = await tool._call({ key: 'mykey', nthLatest: '2' }) + const parsed = JSON.parse(result) + + expect(parsed).toEqual({ + value: 'second', + timestamp: '2024-01-02T00:00:00.000Z' + }) + + // Verify QueryCommand was called with Limit: 2 + expect(QueryCommandMock).toHaveBeenCalledTimes(1) + const queryCommandInput = QueryCommandMock.mock.calls[0][0] + expect(queryCommandInput.Limit).toBe(2) + }) + + it('should return null when key not found', async () => { + const node = createNode() + + mockSend.mockResolvedValueOnce({ + Items: [] + }) + + const nodeData = createNodeData({ operation: 'retrieve' }) + + const tool = await node.init(nodeData, '', {}) + const result = await tool._call({ key: 'nonexistent' }) + const parsed = JSON.parse(result) + + expect(parsed).toEqual({ + value: null, + timestamp: null + }) + }) + + it('should return null when nth version does not exist', async () => { + const node = createNode() + + mockSend.mockResolvedValueOnce({ + Items: [ + { + pk: { S: 'mykey' }, + sk: { S: '1234567890' }, + value: { S: 'only-one' }, + timestamp: { S: '2024-01-01T00:00:00.000Z' } + } + ] + }) + + const nodeData = createNodeData({ operation: 'retrieve' }) + + const tool = await node.init(nodeData, '', {}) + const result = await tool._call({ key: 'mykey', nthLatest: '3' }) + const parsed = JSON.parse(result) + + expect(parsed).toEqual({ + value: null, + timestamp: null + }) + }) + + it.each([ + ['0', 'nthLatest must be a positive number'], + ['-1', 'nthLatest must be a positive number'] + ])('should reject invalid nthLatest value "%s"', async (nthLatest, expectedError) => { + const node = createNode() + + const nodeData = createNodeData({ operation: 'retrieve' }) + + const tool = await node.init(nodeData, '', {}) + await expect(tool._call({ key: 'mykey', nthLatest })).rejects.toThrow(expectedError) + }) + + it.each([ + ['', 'Key must be a non-empty string'], + [' ', 'Key must be a non-empty string'] + ])('should handle invalid key for retrieve: "%s"', async (key, expectedError) => { + const node = createNode() + + const nodeData = createNodeData({ operation: 'retrieve' }) + + const tool = await node.init(nodeData, '', {}) + await expect(tool._call({ key })).rejects.toThrow(expectedError) + }) + }) + + describe('Helper Functions', () => { + it.each([ + ['myapp', 'userdata', 'myapp#userdata'], + ['', 'userdata', 'userdata'] + ])('should build full key correctly (prefix: "%s", key: "%s", expected: "%s")', async (keyPrefix, key, expectedFullKey) => { + const node = createNode() + mockSend.mockResolvedValueOnce({}) + const nodeData = createNodeData({ keyPrefix }) + + const tool = await node.init(nodeData, '', {}) + await tool._call({ key, value: 'test' }) + + // Verify the put command was called with the correct full key + expect(mockSend).toHaveBeenCalledTimes(1) + expect(PutItemCommandMock).toHaveBeenCalledTimes(1) + + const putCommandInput = PutItemCommandMock.mock.calls[0][0] + expect(putCommandInput.Item.pk.S).toBe(expectedFullKey) + }) + + it.each([ + [{ accessKeyId: 'test-key', secretAccessKey: 'test-secret', sessionToken: 'test-token' }, 'with session token'], + [{ accessKeyId: 'test-key', secretAccessKey: 'test-secret' }, 'without session token'] + ])('should work %s', async (credentials, _description) => { + const node = createNode() + const { getAWSCredentials } = require('../../../src/awsToolsUtils') + + getAWSCredentials.mockResolvedValueOnce(credentials) + mockSend.mockResolvedValueOnce({}) + + const nodeData = createNodeData() + + const tool = await node.init(nodeData, '', {}) + await tool._call({ key: 'test', value: 'value' }) + expect(getAWSCredentials).toHaveBeenCalled() + }) + }) +}) diff --git a/packages/components/nodes/tools/AWSDynamoDBKVStorage/AWSDynamoDBKVStorage.ts b/packages/components/nodes/tools/AWSDynamoDBKVStorage/AWSDynamoDBKVStorage.ts new file mode 100644 index 00000000000..6d11415925b --- /dev/null +++ b/packages/components/nodes/tools/AWSDynamoDBKVStorage/AWSDynamoDBKVStorage.ts @@ -0,0 +1,375 @@ +import { z } from 'zod' +import { StructuredTool } from '@langchain/core/tools' +import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { AWS_REGIONS, DEFAULT_AWS_REGION, AWSCredentials, getAWSCredentials } from '../../../src/awsToolsUtils' +import { DynamoDBClient, DescribeTableCommand, ListTablesCommand, PutItemCommand, QueryCommand } from '@aws-sdk/client-dynamodb' + +// Operation enum +enum Operation { + STORE = 'store', + RETRIEVE = 'retrieve' +} + +// Constants +const ERROR_PLACEHOLDER = 'error' +const KEY_SEPARATOR = '#' +const MAX_KEY_LENGTH = 2048 // DynamoDB limit for partition key + +// Helper function to create DynamoDB client +function createDynamoDBClient(credentials: AWSCredentials, region: string): DynamoDBClient { + return new DynamoDBClient({ + region, + credentials: { + accessKeyId: credentials.accessKeyId, + secretAccessKey: credentials.secretAccessKey, + ...(credentials.sessionToken && { sessionToken: credentials.sessionToken }) + } + }) +} + +// Helper function to build full key with optional prefix +function buildFullKey(key: string, keyPrefix: string): string { + const fullKey = keyPrefix ? `${keyPrefix}${KEY_SEPARATOR}${key}` : key + + // Validate key length (DynamoDB limit) + if (fullKey.length > MAX_KEY_LENGTH) { + throw new Error(`Key too long. Maximum length is ${MAX_KEY_LENGTH} characters, got ${fullKey.length}`) + } + + return fullKey +} + +// Helper function to validate and sanitize input +function validateKey(key: string): void { + if (!key || key.trim().length === 0) { + throw new Error('Key must be a non-empty string') + } +} + +/** + * Tool for storing key-value pairs in DynamoDB with automatic versioning + */ +class DynamoDBStoreTool extends StructuredTool { + name = 'dynamodb_kv_store' + description = 'Store a text value with a key in DynamoDB. Input must be an object with "key" and "value" properties.' + schema = z.object({ + key: z.string().min(1).describe('The key to store the value under'), + value: z.string().describe('The text value to store') + }) + private readonly dynamoClient: DynamoDBClient + private readonly tableName: string + private readonly keyPrefix: string + + constructor(dynamoClient: DynamoDBClient, tableName: string, keyPrefix: string = '') { + super() + this.dynamoClient = dynamoClient + this.tableName = tableName + this.keyPrefix = keyPrefix + } + + async _call({ key, value }: z.infer): Promise { + try { + validateKey(key) + const fullKey = buildFullKey(key, this.keyPrefix) + const timestamp = Date.now() + const isoTimestamp = new Date(timestamp).toISOString() + + const putCommand = new PutItemCommand({ + TableName: this.tableName, + Item: { + pk: { S: fullKey }, + sk: { S: timestamp.toString() }, + value: { S: value }, + timestamp: { S: isoTimestamp } + } + }) + + await this.dynamoClient.send(putCommand) + return `Successfully stored value with key "${key}" at ${isoTimestamp}` + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + throw new Error(`Failed to store value: ${errorMessage}`) + } + } +} + +/** + * Tool for retrieving key-value pairs from DynamoDB with version control + */ +class DynamoDBRetrieveTool extends StructuredTool { + name = 'dynamodb_kv_retrieve' + description = + 'Retrieve a value by key from DynamoDB. Returns JSON with value and timestamp. Specify which version to get (1=latest, 2=2nd latest, etc).' + schema = z.object({ + key: z.string().min(1).describe('The key to retrieve the value for'), + nthLatest: z + .string() + .regex(/^\d+$/, 'Must be a positive number') + .describe( + 'Which version to retrieve: "1" for latest, "2" for 2nd latest, "3" for 3rd latest, etc. Use "1" to get the most recent value.' + ) + .optional() + .default('1') + }) + private readonly dynamoClient: DynamoDBClient + private readonly tableName: string + private readonly keyPrefix: string + + constructor(dynamoClient: DynamoDBClient, tableName: string, keyPrefix: string = '') { + super() + this.dynamoClient = dynamoClient + this.tableName = tableName + this.keyPrefix = keyPrefix + } + + async _call(input: z.infer): Promise { + try { + const { key, nthLatest = '1' } = input + validateKey(key) + const fullKey = buildFullKey(key, this.keyPrefix) + + // Convert string to number and validate + const nthLatestNum = parseInt(nthLatest, 10) + if (isNaN(nthLatestNum) || nthLatestNum < 1) { + throw new Error('nthLatest must be a positive number (1 or greater)') + } + + const queryCommand = new QueryCommand({ + TableName: this.tableName, + KeyConditionExpression: 'pk = :pk', + ExpressionAttributeValues: { + ':pk': { S: fullKey } + }, + ScanIndexForward: false, // Sort descending (newest first) + Limit: nthLatestNum + }) + + const result = await this.dynamoClient.send(queryCommand) + + if (!result.Items || result.Items.length === 0) { + return JSON.stringify({ + value: null, + timestamp: null + }) + } + + if (result.Items.length < nthLatestNum) { + return JSON.stringify({ + value: null, + timestamp: null + }) + } + + const item = result.Items[nthLatestNum - 1] + const value = item.value?.S || null + const timestamp = item.timestamp?.S || item.sk?.S || null + + // Return JSON with value and timestamp + return JSON.stringify({ + value: value, + timestamp: timestamp + }) + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + throw new Error(`Failed to retrieve value: ${errorMessage}`) + } + } +} + +/** + * Node implementation for AWS DynamoDB KV Storage tools + */ +class AWSDynamoDBKVStorage_Tools implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'AWS DynamoDB KV Storage' + this.name = 'awsDynamoDBKVStorage' + this.version = 1.0 + this.type = 'AWSDynamoDBKVStorage' + this.icon = 'dynamodbkvstorage.svg' + this.category = 'Tools' + this.description = 'Store and retrieve versioned text values in AWS DynamoDB' + this.baseClasses = [this.type, ...getBaseClasses(DynamoDBStoreTool)] + this.credential = { + label: 'AWS Credentials', + name: 'credential', + type: 'credential', + credentialNames: ['awsApi'] + } + this.inputs = [ + { + label: 'AWS Region', + name: 'region', + type: 'options', + options: AWS_REGIONS, + default: DEFAULT_AWS_REGION, + description: 'AWS Region where your DynamoDB tables are located' + }, + { + label: 'DynamoDB Table', + name: 'tableName', + type: 'asyncOptions', + loadMethod: 'listTables', + description: 'Select a DynamoDB table with partition key "pk" and sort key "sk"', + refresh: true + }, + { + label: 'Key Prefix', + name: 'keyPrefix', + type: 'string', + description: 'Optional prefix to add to all keys (e.g., "myapp" would make keys like "myapp#userdata")', + optional: true, + additionalParams: true + }, + { + label: 'Operation', + name: 'operation', + type: 'options', + options: [ + { label: 'Store', name: Operation.STORE }, + { label: 'Retrieve', name: Operation.RETRIEVE } + ], + default: Operation.STORE, + description: 'Choose whether to store or retrieve data' + } + ] + } + + loadMethods: Record Promise> = { + listTables: async (nodeData: INodeData, options?: ICommonObject): Promise => { + try { + const credentials = await getAWSCredentials(nodeData, options ?? {}) + const region = (nodeData.inputs?.region as string) || DEFAULT_AWS_REGION + const dynamoClient = createDynamoDBClient(credentials, region) + + const listCommand = new ListTablesCommand({}) + const listResponse = await dynamoClient.send(listCommand) + + if (!listResponse.TableNames || listResponse.TableNames.length === 0) { + return [ + { + label: 'No tables found', + name: ERROR_PLACEHOLDER, + description: 'No DynamoDB tables found in this region' + } + ] + } + + const validTables: INodeOptionsValue[] = [] + const invalidTables: string[] = [] + + // Check tables in parallel for better performance + const tableChecks = await Promise.allSettled( + listResponse.TableNames.map(async (tableName) => { + const describeCommand = new DescribeTableCommand({ + TableName: tableName + }) + const describeResponse = await dynamoClient.send(describeCommand) + + const keySchema = describeResponse.Table?.KeySchema + if (keySchema) { + const hasPk = keySchema.some((key) => key.AttributeName === 'pk' && key.KeyType === 'HASH') + const hasSk = keySchema.some((key) => key.AttributeName === 'sk' && key.KeyType === 'RANGE') + + if (hasPk && hasSk) { + return { + valid: true, + table: { + label: tableName, + name: tableName, + description: `Table with pk (partition) and sk (sort) keys` + } + } + } + } + return { valid: false, tableName } + }) + ) + + tableChecks.forEach((result) => { + if (result.status === 'fulfilled') { + if (result.value.valid) { + validTables.push(result.value.table!) + } else if (result.value.tableName) { + invalidTables.push(result.value.tableName) + } + } + }) + + if (validTables.length === 0) { + return [ + { + label: 'No compatible tables found', + name: ERROR_PLACEHOLDER, + description: `No tables with partition key "pk" and sort key "sk" found. ${ + invalidTables.length > 0 ? `Found ${invalidTables.length} table(s) with different schema.` : '' + } Please create a table with these keys.` + } + ] + } + + // Sort tables alphabetically + validTables.sort((a, b) => a.label.localeCompare(b.label)) + + return validTables + } catch (error) { + if (error instanceof Error && error.message.includes('AWS Access Key')) { + return [ + { + label: 'AWS Credentials Required', + name: ERROR_PLACEHOLDER, + description: 'Enter AWS Access Key ID and Secret Access Key' + } + ] + } + console.error('Error loading DynamoDB tables:', error) + return [ + { + label: 'Error Loading Tables', + name: ERROR_PLACEHOLDER, + description: `Failed to load tables: ${error instanceof Error ? error.message : String(error)}` + } + ] + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const credentials = await getAWSCredentials(nodeData, options) + + const region = (nodeData.inputs?.region as string) || DEFAULT_AWS_REGION + const tableName = nodeData.inputs?.tableName as string + const keyPrefix = (nodeData.inputs?.keyPrefix as string) || '' + const operation = (nodeData.inputs?.operation as string) || Operation.STORE + + if (!tableName || tableName === ERROR_PLACEHOLDER) { + throw new Error('Valid DynamoDB Table selection is required') + } + + // Validate key prefix doesn't contain separator + if (keyPrefix && keyPrefix.includes(KEY_SEPARATOR)) { + throw new Error(`Key prefix cannot contain "${KEY_SEPARATOR}" character`) + } + + const dynamoClient = createDynamoDBClient(credentials, region) + + if (operation === Operation.STORE) { + return new DynamoDBStoreTool(dynamoClient, tableName, keyPrefix) + } else { + return new DynamoDBRetrieveTool(dynamoClient, tableName, keyPrefix) + } + } +} + +module.exports = { nodeClass: AWSDynamoDBKVStorage_Tools } diff --git a/packages/components/nodes/tools/AWSDynamoDBKVStorage/dynamodbkvstorage.svg b/packages/components/nodes/tools/AWSDynamoDBKVStorage/dynamodbkvstorage.svg new file mode 100644 index 00000000000..3912d7a8f56 --- /dev/null +++ b/packages/components/nodes/tools/AWSDynamoDBKVStorage/dynamodbkvstorage.svg @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + K:V + \ No newline at end of file diff --git a/packages/components/nodes/tools/AWSSNS/AWSSNS.ts b/packages/components/nodes/tools/AWSSNS/AWSSNS.ts index a25d0464034..8dc09d8ee2f 100644 --- a/packages/components/nodes/tools/AWSSNS/AWSSNS.ts +++ b/packages/components/nodes/tools/AWSSNS/AWSSNS.ts @@ -1,6 +1,7 @@ import { Tool } from '@langchain/core/tools' import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface' -import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { getBaseClasses } from '../../../src/utils' +import { AWS_REGIONS, DEFAULT_AWS_REGION, getAWSCredentials } from '../../../src/awsToolsUtils' import { SNSClient, ListTopicsCommand, PublishCommand } from '@aws-sdk/client-sns' class AWSSNSTool extends Tool { @@ -62,30 +63,8 @@ class AWSSNS_Tools implements INode { label: 'AWS Region', name: 'region', type: 'options', - options: [ - { label: 'US East (N. Virginia) - us-east-1', name: 'us-east-1' }, - { label: 'US East (Ohio) - us-east-2', name: 'us-east-2' }, - { label: 'US West (N. California) - us-west-1', name: 'us-west-1' }, - { label: 'US West (Oregon) - us-west-2', name: 'us-west-2' }, - { label: 'Africa (Cape Town) - af-south-1', name: 'af-south-1' }, - { label: 'Asia Pacific (Hong Kong) - ap-east-1', name: 'ap-east-1' }, - { label: 'Asia Pacific (Mumbai) - ap-south-1', name: 'ap-south-1' }, - { label: 'Asia Pacific (Osaka) - ap-northeast-3', name: 'ap-northeast-3' }, - { label: 'Asia Pacific (Seoul) - ap-northeast-2', name: 'ap-northeast-2' }, - { label: 'Asia Pacific (Singapore) - ap-southeast-1', name: 'ap-southeast-1' }, - { label: 'Asia Pacific (Sydney) - ap-southeast-2', name: 'ap-southeast-2' }, - { label: 'Asia Pacific (Tokyo) - ap-northeast-1', name: 'ap-northeast-1' }, - { label: 'Canada (Central) - ca-central-1', name: 'ca-central-1' }, - { label: 'Europe (Frankfurt) - eu-central-1', name: 'eu-central-1' }, - { label: 'Europe (Ireland) - eu-west-1', name: 'eu-west-1' }, - { label: 'Europe (London) - eu-west-2', name: 'eu-west-2' }, - { label: 'Europe (Milan) - eu-south-1', name: 'eu-south-1' }, - { label: 'Europe (Paris) - eu-west-3', name: 'eu-west-3' }, - { label: 'Europe (Stockholm) - eu-north-1', name: 'eu-north-1' }, - { label: 'Middle East (Bahrain) - me-south-1', name: 'me-south-1' }, - { label: 'South America (São Paulo) - sa-east-1', name: 'sa-east-1' } - ], - default: 'us-east-1', + options: AWS_REGIONS, + default: DEFAULT_AWS_REGION, description: 'AWS Region where your SNS topics are located' }, { @@ -103,32 +82,8 @@ class AWSSNS_Tools implements INode { loadMethods = { listTopics: async (nodeData: INodeData, options?: ICommonObject): Promise => { try { - const credentialData = await getCredentialData(nodeData.credential ?? '', options ?? {}) - - const accessKeyId = getCredentialParam('awsKey', credentialData, nodeData) - const secretAccessKey = getCredentialParam('awsSecret', credentialData, nodeData) - const sessionToken = getCredentialParam('awsSession', credentialData, nodeData) - - const region = (nodeData.inputs?.region as string) || 'us-east-1' - - if (!accessKeyId || !secretAccessKey) { - return [ - { - label: 'AWS Credentials Required', - name: 'placeholder', - description: 'Enter AWS Access Key ID and Secret Access Key' - } - ] - } - - const credentials: any = { - accessKeyId: accessKeyId, - secretAccessKey: secretAccessKey - } - - if (sessionToken) { - credentials.sessionToken = sessionToken - } + const credentials = await getAWSCredentials(nodeData, options ?? {}) + const region = (nodeData.inputs?.region as string) || DEFAULT_AWS_REGION const snsClient = new SNSClient({ region: region, @@ -161,9 +116,9 @@ class AWSSNS_Tools implements INode { console.error('Error loading SNS topics:', error) return [ { - label: 'Error Loading Topics', - name: 'error', - description: `Failed to load topics: ${error}` + label: 'AWS Credentials Required', + name: 'placeholder', + description: 'Enter AWS Access Key ID and Secret Access Key' } ] } @@ -171,32 +126,14 @@ class AWSSNS_Tools implements INode { } async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { - const credentialData = await getCredentialData(nodeData.credential ?? '', options) - - const accessKeyId = getCredentialParam('awsKey', credentialData, nodeData) - const secretAccessKey = getCredentialParam('awsSecret', credentialData, nodeData) - const sessionToken = getCredentialParam('awsSession', credentialData, nodeData) - - const region = (nodeData.inputs?.region as string) || 'us-east-1' + const credentials = await getAWSCredentials(nodeData, options) + const region = (nodeData.inputs?.region as string) || DEFAULT_AWS_REGION const topicArn = nodeData.inputs?.topicArn as string - if (!accessKeyId || !secretAccessKey) { - throw new Error('AWS Access Key ID and Secret Access Key are required') - } - if (!topicArn) { throw new Error('SNS Topic ARN is required') } - const credentials: any = { - accessKeyId: accessKeyId, - secretAccessKey: secretAccessKey - } - - if (sessionToken) { - credentials.sessionToken = sessionToken - } - const snsClient = new SNSClient({ region: region, credentials: credentials diff --git a/packages/components/nodes/tools/AgentAsTool/AgentAsTool.ts b/packages/components/nodes/tools/AgentAsTool/AgentAsTool.ts index cedf338e6d4..8bb9c1595ec 100644 --- a/packages/components/nodes/tools/AgentAsTool/AgentAsTool.ts +++ b/packages/components/nodes/tools/AgentAsTool/AgentAsTool.ts @@ -69,7 +69,8 @@ class AgentAsTool_Tools implements INode { description: 'Override the config passed to the Agentflow.', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Base URL', @@ -362,11 +363,15 @@ try { const sandbox = createCodeExecutionSandbox('', [], {}, additionalSandbox) - const response = await executeJavaScriptCode(code, sandbox, { + let response = await executeJavaScriptCode(code, sandbox, { useSandbox: false, timeout: 10000 }) + if (typeof response === 'object') { + response = JSON.stringify(response) + } + return response } } diff --git a/packages/components/nodes/tools/ChatflowTool/ChatflowTool.ts b/packages/components/nodes/tools/ChatflowTool/ChatflowTool.ts index b9d0d449a6f..b42bf716b13 100644 --- a/packages/components/nodes/tools/ChatflowTool/ChatflowTool.ts +++ b/packages/components/nodes/tools/ChatflowTool/ChatflowTool.ts @@ -69,7 +69,8 @@ class ChatflowTool_Tools implements INode { description: 'Override the config passed to the Chatflow.', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Base URL', @@ -370,11 +371,15 @@ try { const sandbox = createCodeExecutionSandbox('', [], {}, additionalSandbox) - const response = await executeJavaScriptCode(code, sandbox, { + let response = await executeJavaScriptCode(code, sandbox, { useSandbox: false, timeout: 10000 }) + if (typeof response === 'object') { + response = JSON.stringify(response) + } + return response } } diff --git a/packages/components/nodes/tools/CustomTool/core.ts b/packages/components/nodes/tools/CustomTool/core.ts index f27f36ff661..1301c01b4c3 100644 --- a/packages/components/nodes/tools/CustomTool/core.ts +++ b/packages/components/nodes/tools/CustomTool/core.ts @@ -124,10 +124,14 @@ export class DynamicStructuredTool< const sandbox = createCodeExecutionSandbox('', this.variables || [], flow, additionalSandbox) - const response = await executeJavaScriptCode(this.code, sandbox, { + let response = await executeJavaScriptCode(this.code, sandbox, { timeout: 10000 }) + if (typeof response === 'object') { + response = JSON.stringify(response) + } + return response } diff --git a/packages/components/nodes/tools/Gmail/core.ts b/packages/components/nodes/tools/Gmail/core.ts index 14d242c84e1..00f053c035d 100644 --- a/packages/components/nodes/tools/Gmail/core.ts +++ b/packages/components/nodes/tools/Gmail/core.ts @@ -1,7 +1,7 @@ import { z } from 'zod' import fetch from 'node-fetch' import { DynamicStructuredTool } from '../OpenAPIToolkit/core' -import { TOOL_ARGS_PREFIX } from '../../../src/agents' +import { TOOL_ARGS_PREFIX, formatToolError } from '../../../src/agents' export const desc = `Use this when you want to access Gmail API for managing drafts, messages, labels, and threads` @@ -140,7 +140,7 @@ class ListDraftsTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'GET', undefined, params) return response } catch (error) { - return `Error listing drafts: ${error}` + return formatToolError(`Error listing drafts: ${error}`, params) } } } @@ -176,7 +176,7 @@ class CreateDraftTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'POST', draftData, params) return response } catch (error) { - return `Error creating draft: ${error}` + return formatToolError(`Error creating draft: ${error}`, params) } } } @@ -199,7 +199,7 @@ class GetDraftTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const draftId = params.id || params.draftId + const draftId = params.draftId || params.id if (!draftId) { return 'Error: Draft ID is required' @@ -210,7 +210,7 @@ class GetDraftTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'GET', undefined, params) return response } catch (error) { - return `Error getting draft: ${error}` + return formatToolError(`Error getting draft: ${error}`, params) } } } @@ -233,7 +233,7 @@ class UpdateDraftTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const draftId = params.id || params.draftId + const draftId = params.draftId || params.id if (!draftId) { return 'Error: Draft ID is required' @@ -251,7 +251,7 @@ class UpdateDraftTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'PUT', draftData, params) return response } catch (error) { - return `Error updating draft: ${error}` + return formatToolError(`Error updating draft: ${error}`, params) } } } @@ -274,7 +274,7 @@ class SendDraftTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const draftId = params.id || params.draftId + const draftId = params.draftId || params.id if (!draftId) { return 'Error: Draft ID is required' @@ -285,7 +285,7 @@ class SendDraftTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'POST', { id: draftId }, params) return response } catch (error) { - return `Error sending draft: ${error}` + return formatToolError(`Error sending draft: ${error}`, params) } } } @@ -308,7 +308,7 @@ class DeleteDraftTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const draftId = params.id || params.draftId + const draftId = params.draftId || params.id if (!draftId) { return 'Error: Draft ID is required' @@ -319,7 +319,7 @@ class DeleteDraftTool extends BaseGmailTool { await this.makeGmailRequest(url, 'DELETE', undefined, params) return `Draft ${draftId} deleted successfully` } catch (error) { - return `Error deleting draft: ${error}` + return formatToolError(`Error deleting draft: ${error}`, params) } } } @@ -354,7 +354,7 @@ class ListMessagesTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'GET', undefined, params) return response } catch (error) { - return `Error listing messages: ${error}` + return formatToolError(`Error listing messages: ${error}`, params) } } } @@ -377,7 +377,7 @@ class GetMessageTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const messageId = params.id || params.messageId + const messageId = params.messageId || params.id if (!messageId) { return 'Error: Message ID is required' @@ -388,7 +388,7 @@ class GetMessageTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'GET', undefined, params) return response } catch (error) { - return `Error getting message: ${error}` + return formatToolError(`Error getting message: ${error}`, params) } } } @@ -422,7 +422,7 @@ class SendMessageTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'POST', messageData, params) return response } catch (error) { - return `Error sending message: ${error}` + return formatToolError(`Error sending message: ${error}`, params) } } } @@ -445,7 +445,7 @@ class ModifyMessageTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const messageId = params.id || params.messageId + const messageId = params.messageId || params.id if (!messageId) { return 'Error: Message ID is required' @@ -464,7 +464,7 @@ class ModifyMessageTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'POST', modifyData, params) return response } catch (error) { - return `Error modifying message: ${error}` + return formatToolError(`Error modifying message: ${error}`, params) } } } @@ -487,7 +487,7 @@ class TrashMessageTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const messageId = params.id || params.messageId + const messageId = params.messageId || params.id if (!messageId) { return 'Error: Message ID is required' @@ -498,7 +498,7 @@ class TrashMessageTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'POST', undefined, params) return response } catch (error) { - return `Error moving message to trash: ${error}` + return formatToolError(`Error moving message to trash: ${error}`, params) } } } @@ -521,7 +521,7 @@ class UntrashMessageTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const messageId = params.id || params.messageId + const messageId = params.messageId || params.id if (!messageId) { return 'Error: Message ID is required' @@ -532,7 +532,7 @@ class UntrashMessageTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'POST', undefined, params) return response } catch (error) { - return `Error removing message from trash: ${error}` + return formatToolError(`Error removing message from trash: ${error}`, params) } } } @@ -555,7 +555,7 @@ class DeleteMessageTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const messageId = params.id || params.messageId + const messageId = params.messageId || params.id if (!messageId) { return 'Error: Message ID is required' @@ -566,7 +566,7 @@ class DeleteMessageTool extends BaseGmailTool { await this.makeGmailRequest(url, 'DELETE', undefined, params) return `Message ${messageId} deleted successfully` } catch (error) { - return `Error deleting message: ${error}` + return formatToolError(`Error deleting message: ${error}`, params) } } } @@ -594,7 +594,7 @@ class ListLabelsTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'GET', undefined, {}) return response } catch (error) { - return `Error listing labels: ${error}` + return formatToolError(`Error listing labels: ${error}`, {}) } } } @@ -617,7 +617,7 @@ class GetLabelTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const labelId = params.id || params.labelId + const labelId = params.labelId || params.id if (!labelId) { return 'Error: Label ID is required' @@ -628,7 +628,7 @@ class GetLabelTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'GET', undefined, params) return response } catch (error) { - return `Error getting label: ${error}` + return formatToolError(`Error getting label: ${error}`, params) } } } @@ -673,7 +673,7 @@ class CreateLabelTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'POST', labelData, params) return response } catch (error) { - return `Error creating label: ${error}` + return formatToolError(`Error creating label: ${error}`, params) } } } @@ -696,7 +696,7 @@ class UpdateLabelTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const labelId = params.labelId + const labelId = params.labelId || params.id if (!labelId) { return 'Error: Label ID is required' @@ -717,7 +717,7 @@ class UpdateLabelTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'PUT', labelData, params) return response } catch (error) { - return `Error updating label: ${error}` + return formatToolError(`Error updating label: ${error}`, params) } } } @@ -740,7 +740,7 @@ class DeleteLabelTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const labelId = params.id || params.labelId + const labelId = params.labelId || params.id if (!labelId) { return 'Error: Label ID is required' @@ -751,7 +751,7 @@ class DeleteLabelTool extends BaseGmailTool { await this.makeGmailRequest(url, 'DELETE', undefined, params) return `Label ${labelId} deleted successfully` } catch (error) { - return `Error deleting label: ${error}` + return formatToolError(`Error deleting label: ${error}`, params) } } } @@ -786,7 +786,7 @@ class ListThreadsTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'GET', undefined, params) return response } catch (error) { - return `Error listing threads: ${error}` + return formatToolError(`Error listing threads: ${error}`, params) } } } @@ -809,7 +809,7 @@ class GetThreadTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const threadId = params.id || params.threadId + const threadId = params.threadId || params.id if (!threadId) { return 'Error: Thread ID is required' @@ -820,7 +820,7 @@ class GetThreadTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'GET', undefined, params) return response } catch (error) { - return `Error getting thread: ${error}` + return formatToolError(`Error getting thread: ${error}`, params) } } } @@ -843,7 +843,7 @@ class ModifyThreadTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const threadId = params.id || params.threadId + const threadId = params.threadId || params.id if (!threadId) { return 'Error: Thread ID is required' @@ -862,7 +862,7 @@ class ModifyThreadTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'POST', modifyData, params) return response } catch (error) { - return `Error modifying thread: ${error}` + return formatToolError(`Error modifying thread: ${error}`, params) } } } @@ -885,7 +885,7 @@ class TrashThreadTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const threadId = params.id || params.threadId + const threadId = params.threadId || params.id if (!threadId) { return 'Error: Thread ID is required' @@ -896,7 +896,7 @@ class TrashThreadTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'POST', undefined, params) return response } catch (error) { - return `Error moving thread to trash: ${error}` + return formatToolError(`Error moving thread to trash: ${error}`, params) } } } @@ -919,7 +919,7 @@ class UntrashThreadTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const threadId = params.id || params.threadId + const threadId = params.threadId || params.id if (!threadId) { return 'Error: Thread ID is required' @@ -930,7 +930,7 @@ class UntrashThreadTool extends BaseGmailTool { const response = await this.makeGmailRequest(url, 'POST', undefined, params) return response } catch (error) { - return `Error removing thread from trash: ${error}` + return formatToolError(`Error removing thread from trash: ${error}`, params) } } } @@ -953,7 +953,7 @@ class DeleteThreadTool extends BaseGmailTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const threadId = params.id || params.threadId + const threadId = params.threadId || params.id if (!threadId) { return 'Error: Thread ID is required' @@ -964,7 +964,7 @@ class DeleteThreadTool extends BaseGmailTool { await this.makeGmailRequest(url, 'DELETE', undefined, params) return `Thread ${threadId} deleted successfully` } catch (error) { - return `Error deleting thread: ${error}` + return formatToolError(`Error deleting thread: ${error}`, params) } } } @@ -977,222 +977,102 @@ export const createGmailTools = (args?: RequestParameters): DynamicStructuredToo // Draft tools if (actions.includes('listDrafts')) { - tools.push( - new ListDraftsTool({ - accessToken, - defaultParams: defaultParams.listDrafts - }) - ) + tools.push(new ListDraftsTool({ accessToken, defaultParams })) } if (actions.includes('createDraft')) { - tools.push( - new CreateDraftTool({ - accessToken, - defaultParams: defaultParams.createDraft - }) - ) + tools.push(new CreateDraftTool({ accessToken, defaultParams })) } if (actions.includes('getDraft')) { - tools.push( - new GetDraftTool({ - accessToken, - defaultParams: defaultParams.getDraft - }) - ) + tools.push(new GetDraftTool({ accessToken, defaultParams })) } if (actions.includes('updateDraft')) { - tools.push( - new UpdateDraftTool({ - accessToken, - defaultParams: defaultParams.updateDraft - }) - ) + tools.push(new UpdateDraftTool({ accessToken, defaultParams })) } if (actions.includes('sendDraft')) { - tools.push( - new SendDraftTool({ - accessToken, - defaultParams: defaultParams.sendDraft - }) - ) + tools.push(new SendDraftTool({ accessToken, defaultParams })) } if (actions.includes('deleteDraft')) { - tools.push( - new DeleteDraftTool({ - accessToken, - defaultParams: defaultParams.deleteDraft - }) - ) + tools.push(new DeleteDraftTool({ accessToken, defaultParams })) } // Message tools if (actions.includes('listMessages')) { - tools.push( - new ListMessagesTool({ - accessToken, - defaultParams: defaultParams.listMessages - }) - ) + tools.push(new ListMessagesTool({ accessToken, defaultParams })) } if (actions.includes('getMessage')) { - tools.push( - new GetMessageTool({ - accessToken, - defaultParams: defaultParams.getMessage - }) - ) + tools.push(new GetMessageTool({ accessToken, defaultParams })) } if (actions.includes('sendMessage')) { - tools.push( - new SendMessageTool({ - accessToken, - defaultParams: defaultParams.sendMessage - }) - ) + tools.push(new SendMessageTool({ accessToken, defaultParams })) } if (actions.includes('modifyMessage')) { - tools.push( - new ModifyMessageTool({ - accessToken, - defaultParams: defaultParams.modifyMessage - }) - ) + tools.push(new ModifyMessageTool({ accessToken, defaultParams })) } if (actions.includes('trashMessage')) { - tools.push( - new TrashMessageTool({ - accessToken, - defaultParams: defaultParams.trashMessage - }) - ) + tools.push(new TrashMessageTool({ accessToken, defaultParams })) } if (actions.includes('untrashMessage')) { - tools.push( - new UntrashMessageTool({ - accessToken, - defaultParams: defaultParams.untrashMessage - }) - ) + tools.push(new UntrashMessageTool({ accessToken, defaultParams })) } if (actions.includes('deleteMessage')) { - tools.push( - new DeleteMessageTool({ - accessToken, - defaultParams: defaultParams.deleteMessage - }) - ) + tools.push(new DeleteMessageTool({ accessToken, defaultParams })) } // Label tools if (actions.includes('listLabels')) { - tools.push( - new ListLabelsTool({ - accessToken, - defaultParams: defaultParams.listLabels - }) - ) + tools.push(new ListLabelsTool({ accessToken, defaultParams })) } if (actions.includes('getLabel')) { - tools.push( - new GetLabelTool({ - accessToken, - defaultParams: defaultParams.getLabel - }) - ) + tools.push(new GetLabelTool({ accessToken, defaultParams })) } if (actions.includes('createLabel')) { - tools.push( - new CreateLabelTool({ - accessToken, - defaultParams: defaultParams.createLabel - }) - ) + tools.push(new CreateLabelTool({ accessToken, defaultParams })) } if (actions.includes('updateLabel')) { - tools.push( - new UpdateLabelTool({ - accessToken, - defaultParams: defaultParams.updateLabel - }) - ) + tools.push(new UpdateLabelTool({ accessToken, defaultParams })) } if (actions.includes('deleteLabel')) { - tools.push( - new DeleteLabelTool({ - accessToken, - defaultParams: defaultParams.deleteLabel - }) - ) + tools.push(new DeleteLabelTool({ accessToken, defaultParams })) } // Thread tools if (actions.includes('listThreads')) { - tools.push( - new ListThreadsTool({ - accessToken, - defaultParams: defaultParams.listThreads - }) - ) + tools.push(new ListThreadsTool({ accessToken, defaultParams })) } if (actions.includes('getThread')) { - tools.push( - new GetThreadTool({ - accessToken, - defaultParams: defaultParams.getThread - }) - ) + tools.push(new GetThreadTool({ accessToken, defaultParams })) } if (actions.includes('modifyThread')) { - tools.push( - new ModifyThreadTool({ - accessToken, - defaultParams: defaultParams.modifyThread - }) - ) + tools.push(new ModifyThreadTool({ accessToken, defaultParams })) } if (actions.includes('trashThread')) { - tools.push( - new TrashThreadTool({ - accessToken, - defaultParams: defaultParams.trashThread - }) - ) + tools.push(new TrashThreadTool({ accessToken, defaultParams })) } if (actions.includes('untrashThread')) { - tools.push( - new UntrashThreadTool({ - accessToken, - defaultParams: defaultParams.untrashThread - }) - ) + tools.push(new UntrashThreadTool({ accessToken, defaultParams })) } if (actions.includes('deleteThread')) { - tools.push( - new DeleteThreadTool({ - accessToken, - defaultParams: defaultParams.deleteThread - }) - ) + tools.push(new DeleteThreadTool({ accessToken, defaultParams })) } return tools diff --git a/packages/components/nodes/tools/GoogleCalendar/core.ts b/packages/components/nodes/tools/GoogleCalendar/core.ts index 1c89c9c8819..b613d0d1f44 100644 --- a/packages/components/nodes/tools/GoogleCalendar/core.ts +++ b/packages/components/nodes/tools/GoogleCalendar/core.ts @@ -1,7 +1,7 @@ import { z } from 'zod' import fetch from 'node-fetch' import { DynamicStructuredTool } from '../OpenAPIToolkit/core' -import { TOOL_ARGS_PREFIX } from '../../../src/agents' +import { TOOL_ARGS_PREFIX, formatToolError } from '../../../src/agents' export const desc = `Use this when you want to access Google Calendar API for managing events and calendars` @@ -208,7 +208,7 @@ class ListEventsTool extends BaseGoogleCalendarTool { const response = await this.makeGoogleCalendarRequest({ endpoint, params }) return response } catch (error) { - return `Error listing events: ${error}` + return formatToolError(`Error listing events: ${error}`, params) } } } @@ -291,7 +291,7 @@ class CreateEventTool extends BaseGoogleCalendarTool { const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'POST', body: eventData, params }) return response } catch (error) { - return `Error creating event: ${error}` + return formatToolError(`Error creating event: ${error}`, params) } } } @@ -323,7 +323,7 @@ class GetEventTool extends BaseGoogleCalendarTool { const response = await this.makeGoogleCalendarRequest({ endpoint, params }) return response } catch (error) { - return `Error getting event: ${error}` + return formatToolError(`Error getting event: ${error}`, params) } } } @@ -400,7 +400,7 @@ class UpdateEventTool extends BaseGoogleCalendarTool { const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'PUT', body: updateData, params }) return response } catch (error) { - return `Error updating event: ${error}` + return formatToolError(`Error updating event: ${error}`, params) } } } @@ -432,7 +432,7 @@ class DeleteEventTool extends BaseGoogleCalendarTool { const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'DELETE', params }) return response || 'Event deleted successfully' } catch (error) { - return `Error deleting event: ${error}` + return formatToolError(`Error deleting event: ${error}`, params) } } } @@ -467,7 +467,7 @@ class QuickAddEventTool extends BaseGoogleCalendarTool { const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'POST', params }) return response } catch (error) { - return `Error quick adding event: ${error}` + return formatToolError(`Error quick adding event: ${error}`, params) } } } @@ -505,7 +505,7 @@ class ListCalendarsTool extends BaseGoogleCalendarTool { const response = await this.makeGoogleCalendarRequest({ endpoint, params }) return response } catch (error) { - return `Error listing calendars: ${error}` + return formatToolError(`Error listing calendars: ${error}`, params) } } } @@ -545,7 +545,7 @@ class CreateCalendarTool extends BaseGoogleCalendarTool { const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'POST', body: calendarData, params }) return response } catch (error) { - return `Error creating calendar: ${error}` + return formatToolError(`Error creating calendar: ${error}`, params) } } } @@ -577,7 +577,7 @@ class GetCalendarTool extends BaseGoogleCalendarTool { const response = await this.makeGoogleCalendarRequest({ endpoint, params }) return response } catch (error) { - return `Error getting calendar: ${error}` + return formatToolError(`Error getting calendar: ${error}`, params) } } } @@ -616,7 +616,7 @@ class UpdateCalendarTool extends BaseGoogleCalendarTool { const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'PUT', body: updateData, params }) return response } catch (error) { - return `Error updating calendar: ${error}` + return formatToolError(`Error updating calendar: ${error}`, params) } } } @@ -648,7 +648,7 @@ class DeleteCalendarTool extends BaseGoogleCalendarTool { const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'DELETE', params }) return response || 'Calendar deleted successfully' } catch (error) { - return `Error deleting calendar: ${error}` + return formatToolError(`Error deleting calendar: ${error}`, params) } } } @@ -680,7 +680,7 @@ class ClearCalendarTool extends BaseGoogleCalendarTool { const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'POST', params }) return response || 'Calendar cleared successfully' } catch (error) { - return `Error clearing calendar: ${error}` + return formatToolError(`Error clearing calendar: ${error}`, params) } } } @@ -729,7 +729,7 @@ class QueryFreebusyTool extends BaseGoogleCalendarTool { const response = await this.makeGoogleCalendarRequest({ endpoint, method: 'POST', body: freebusyData, params }) return response } catch (error) { - return `Error querying freebusy: ${error}` + return formatToolError(`Error querying freebusy: ${error}`, params) } } } @@ -742,122 +742,57 @@ export const createGoogleCalendarTools = (args?: RequestParameters): DynamicStru // Event tools if (actions.includes('listEvents')) { - tools.push( - new ListEventsTool({ - accessToken, - defaultParams: defaultParams.listEvents - }) - ) + tools.push(new ListEventsTool({ accessToken, defaultParams })) } if (actions.includes('createEvent')) { - tools.push( - new CreateEventTool({ - accessToken, - defaultParams: defaultParams.createEvent - }) - ) + tools.push(new CreateEventTool({ accessToken, defaultParams })) } if (actions.includes('getEvent')) { - tools.push( - new GetEventTool({ - accessToken, - defaultParams: defaultParams.getEvent - }) - ) + tools.push(new GetEventTool({ accessToken, defaultParams })) } if (actions.includes('updateEvent')) { - tools.push( - new UpdateEventTool({ - accessToken, - defaultParams: defaultParams.updateEvent - }) - ) + tools.push(new UpdateEventTool({ accessToken, defaultParams })) } if (actions.includes('deleteEvent')) { - tools.push( - new DeleteEventTool({ - accessToken, - defaultParams: defaultParams.deleteEvent - }) - ) + tools.push(new DeleteEventTool({ accessToken, defaultParams })) } if (actions.includes('quickAddEvent')) { - tools.push( - new QuickAddEventTool({ - accessToken, - defaultParams: defaultParams.quickAddEvent - }) - ) + tools.push(new QuickAddEventTool({ accessToken, defaultParams })) } // Calendar tools if (actions.includes('listCalendars')) { - tools.push( - new ListCalendarsTool({ - accessToken, - defaultParams: defaultParams.listCalendars - }) - ) + tools.push(new ListCalendarsTool({ accessToken, defaultParams })) } if (actions.includes('createCalendar')) { - tools.push( - new CreateCalendarTool({ - accessToken, - defaultParams: defaultParams.createCalendar - }) - ) + tools.push(new CreateCalendarTool({ accessToken, defaultParams })) } if (actions.includes('getCalendar')) { - tools.push( - new GetCalendarTool({ - accessToken, - defaultParams: defaultParams.getCalendar - }) - ) + tools.push(new GetCalendarTool({ accessToken, defaultParams })) } if (actions.includes('updateCalendar')) { - tools.push( - new UpdateCalendarTool({ - accessToken, - defaultParams: defaultParams.updateCalendar - }) - ) + tools.push(new UpdateCalendarTool({ accessToken, defaultParams })) } if (actions.includes('deleteCalendar')) { - tools.push( - new DeleteCalendarTool({ - accessToken, - defaultParams: defaultParams.deleteCalendar - }) - ) + tools.push(new DeleteCalendarTool({ accessToken, defaultParams })) } if (actions.includes('clearCalendar')) { - tools.push( - new ClearCalendarTool({ - accessToken, - defaultParams: defaultParams.clearCalendar - }) - ) + tools.push(new ClearCalendarTool({ accessToken, defaultParams })) } // Freebusy tools if (actions.includes('queryFreebusy')) { - tools.push( - new QueryFreebusyTool({ - accessToken, - defaultParams: defaultParams.queryFreebusy - }) - ) + tools.push(new QueryFreebusyTool({ accessToken, defaultParams })) } return tools diff --git a/packages/components/nodes/tools/GoogleDocs/core.ts b/packages/components/nodes/tools/GoogleDocs/core.ts index 2b43114f648..51cfa6f8f4f 100644 --- a/packages/components/nodes/tools/GoogleDocs/core.ts +++ b/packages/components/nodes/tools/GoogleDocs/core.ts @@ -1,7 +1,7 @@ import { z } from 'zod' import fetch from 'node-fetch' import { DynamicStructuredTool } from '../OpenAPIToolkit/core' -import { TOOL_ARGS_PREFIX } from '../../../src/agents' +import { TOOL_ARGS_PREFIX, formatToolError } from '../../../src/agents' export const desc = `Use this when you want to access Google Docs API for managing documents` @@ -256,7 +256,7 @@ class CreateDocumentTool extends BaseGoogleDocsTool { return createResponse } catch (error) { - return `Error creating document: ${error}` + return formatToolError(`Error creating document: ${error}`, params) } } } @@ -288,7 +288,7 @@ class GetDocumentTool extends BaseGoogleDocsTool { const response = await this.makeGoogleDocsRequest({ endpoint, params }) return response } catch (error) { - return `Error getting document: ${error}` + return formatToolError(`Error getting document: ${error}`, params) } } } @@ -381,7 +381,7 @@ class UpdateDocumentTool extends BaseGoogleDocsTool { return `No updates specified` + TOOL_ARGS_PREFIX + JSON.stringify(params) } } catch (error) { - return `Error updating document: ${error}` + return formatToolError(`Error updating document: ${error}`, params) } } } @@ -429,7 +429,7 @@ class InsertTextTool extends BaseGoogleDocsTool { }) return response } catch (error) { - return `Error inserting text: ${error}` + return formatToolError(`Error inserting text: ${error}`, params) } } } @@ -478,7 +478,7 @@ class ReplaceTextTool extends BaseGoogleDocsTool { }) return response } catch (error) { - return `Error replacing text: ${error}` + return formatToolError(`Error replacing text: ${error}`, params) } } } @@ -534,7 +534,7 @@ class AppendTextTool extends BaseGoogleDocsTool { }) return response } catch (error) { - return `Error appending text: ${error}` + return formatToolError(`Error appending text: ${error}`, params) } } } @@ -583,7 +583,7 @@ class GetTextContentTool extends BaseGoogleDocsTool { return JSON.stringify({ textContent }) + TOOL_ARGS_PREFIX + JSON.stringify(params) } catch (error) { - return `Error getting text content: ${error}` + return formatToolError(`Error getting text content: ${error}`, params) } } } @@ -631,7 +631,7 @@ class InsertImageTool extends BaseGoogleDocsTool { }) return response } catch (error) { - return `Error inserting image: ${error}` + return formatToolError(`Error inserting image: ${error}`, params) } } } @@ -680,7 +680,7 @@ class CreateTableTool extends BaseGoogleDocsTool { }) return response } catch (error) { - return `Error creating table: ${error}` + return formatToolError(`Error creating table: ${error}`, params) } } } diff --git a/packages/components/nodes/tools/GoogleDrive/core.ts b/packages/components/nodes/tools/GoogleDrive/core.ts index 62377f5dc02..e67cc6ae7ea 100644 --- a/packages/components/nodes/tools/GoogleDrive/core.ts +++ b/packages/components/nodes/tools/GoogleDrive/core.ts @@ -1,7 +1,7 @@ import { z } from 'zod' import fetch from 'node-fetch' import { DynamicStructuredTool } from '../OpenAPIToolkit/core' -import { TOOL_ARGS_PREFIX } from '../../../src/agents' +import { TOOL_ARGS_PREFIX, formatToolError } from '../../../src/agents' export const desc = `Use this when you want to access Google Drive API for managing files and folders` @@ -202,7 +202,7 @@ class ListFilesTool extends BaseGoogleDriveTool { const response = await this.makeGoogleDriveRequest({ endpoint, params }) return response } catch (error) { - return `Error listing files: ${error}` + return formatToolError(`Error listing files: ${error}`, params) } } } @@ -240,7 +240,7 @@ class GetFileTool extends BaseGoogleDriveTool { const response = await this.makeGoogleDriveRequest({ endpoint, params }) return response } catch (error) { - return `Error getting file: ${error}` + return formatToolError(`Error getting file: ${error}`, params) } } } @@ -323,7 +323,7 @@ class CreateFileTool extends BaseGoogleDriveTool { } } } catch (error) { - return `Error creating file: ${error}` + return formatToolError(`Error creating file: ${error}`, params) } } @@ -452,7 +452,7 @@ class UpdateFileTool extends BaseGoogleDriveTool { }) return response } catch (error) { - return `Error updating file: ${error}` + return formatToolError(`Error updating file: ${error}`, params) } } } @@ -492,7 +492,7 @@ class DeleteFileTool extends BaseGoogleDriveTool { }) return `File deleted successfully` } catch (error) { - return `Error deleting file: ${error}` + return formatToolError(`Error deleting file: ${error}`, params) } } } @@ -541,7 +541,7 @@ class CopyFileTool extends BaseGoogleDriveTool { }) return response } catch (error) { - return `Error copying file: ${error}` + return formatToolError(`Error copying file: ${error}`, params) } } } @@ -579,7 +579,7 @@ class DownloadFileTool extends BaseGoogleDriveTool { const response = await this.makeGoogleDriveRequest({ endpoint, params }) return response } catch (error) { - return `Error downloading file: ${error}` + return formatToolError(`Error downloading file: ${error}`, params) } } } @@ -630,7 +630,7 @@ class CreateFolderTool extends BaseGoogleDriveTool { }) return response } catch (error) { - return `Error creating folder: ${error}` + return formatToolError(`Error creating folder: ${error}`, params) } } } @@ -671,7 +671,7 @@ class SearchFilesTool extends BaseGoogleDriveTool { const response = await this.makeGoogleDriveRequest({ endpoint, params }) return response } catch (error) { - return `Error searching files: ${error}` + return formatToolError(`Error searching files: ${error}`, params) } } } @@ -724,7 +724,7 @@ class ShareFileTool extends BaseGoogleDriveTool { }) return response } catch (error) { - return `Error sharing file: ${error}` + return formatToolError(`Error sharing file: ${error}`, params) } } } @@ -774,7 +774,7 @@ class ListFolderContentsTool extends BaseGoogleDriveTool { const response = await this.makeGoogleDriveRequest({ endpoint, params }) return response } catch (error) { - return `Error listing folder contents: ${error}` + return formatToolError(`Error listing folder contents: ${error}`, params) } } } @@ -820,7 +820,7 @@ class DeleteFolderTool extends BaseGoogleDriveTool { }) return `Folder deleted successfully` } catch (error) { - return `Error deleting folder: ${error}` + return formatToolError(`Error deleting folder: ${error}`, params) } } } @@ -862,7 +862,7 @@ class GetPermissionsTool extends BaseGoogleDriveTool { const response = await this.makeGoogleDriveRequest({ endpoint, params }) return response } catch (error) { - return `Error getting permissions: ${error}` + return formatToolError(`Error getting permissions: ${error}`, params) } } } @@ -911,7 +911,7 @@ class RemovePermissionTool extends BaseGoogleDriveTool { }) return `Permission removed successfully` } catch (error) { - return `Error removing permission: ${error}` + return formatToolError(`Error removing permission: ${error}`, params) } } } diff --git a/packages/components/nodes/tools/GoogleSheets/core.ts b/packages/components/nodes/tools/GoogleSheets/core.ts index 8b6359844b7..ad64ce49855 100644 --- a/packages/components/nodes/tools/GoogleSheets/core.ts +++ b/packages/components/nodes/tools/GoogleSheets/core.ts @@ -1,7 +1,7 @@ import { z } from 'zod' import fetch from 'node-fetch' import { DynamicStructuredTool } from '../OpenAPIToolkit/core' -import { TOOL_ARGS_PREFIX } from '../../../src/agents' +import { TOOL_ARGS_PREFIX, formatToolError } from '../../../src/agents' export const desc = `Use this when you want to access Google Sheets API for managing spreadsheets and values` @@ -183,33 +183,37 @@ class CreateSpreadsheetTool extends BaseGoogleSheetsTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const body: any = { - properties: { - title: params.title + try { + const body: any = { + properties: { + title: params.title + } } - } - - if (params.locale) body.properties.locale = params.locale - if (params.timeZone) body.properties.timeZone = params.timeZone - // Add sheets if specified - if (params.sheetCount && params.sheetCount > 1) { - body.sheets = [] - for (let i = 0; i < params.sheetCount; i++) { - body.sheets.push({ - properties: { - title: i === 0 ? 'Sheet1' : `Sheet${i + 1}` - } - }) + if (params.locale) body.properties.locale = params.locale + if (params.timeZone) body.properties.timeZone = params.timeZone + + // Add sheets if specified + if (params.sheetCount && params.sheetCount > 1) { + body.sheets = [] + for (let i = 0; i < params.sheetCount; i++) { + body.sheets.push({ + properties: { + title: i === 0 ? 'Sheet1' : `Sheet${i + 1}` + } + }) + } } - } - return await this.makeGoogleSheetsRequest({ - endpoint: 'spreadsheets', - method: 'POST', - body, - params - }) + return await this.makeGoogleSheetsRequest({ + endpoint: 'spreadsheets', + method: 'POST', + body, + params + }) + } catch (error) { + return formatToolError(`Error creating spreadsheet: ${error}`, params) + } } } @@ -234,23 +238,28 @@ class GetSpreadsheetTool extends BaseGoogleSheetsTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const queryParams = new URLSearchParams() - if (params.ranges) { - params.ranges.split(',').forEach((range: string) => { - queryParams.append('ranges', range.trim()) - }) - } - if (params.includeGridData) queryParams.append('includeGridData', 'true') + try { + const queryParams = new URLSearchParams() + + if (params.ranges) { + params.ranges.split(',').forEach((range: string) => { + queryParams.append('ranges', range.trim()) + }) + } + if (params.includeGridData) queryParams.append('includeGridData', 'true') - const queryString = queryParams.toString() - const endpoint = `spreadsheets/${params.spreadsheetId}${queryString ? `?${queryString}` : ''}` + const queryString = queryParams.toString() + const endpoint = `spreadsheets/${params.spreadsheetId}${queryString ? `?${queryString}` : ''}` - return await this.makeGoogleSheetsRequest({ - endpoint, - method: 'GET', - params - }) + return await this.makeGoogleSheetsRequest({ + endpoint, + method: 'GET', + params + }) + } catch (error) { + return formatToolError(`Error getting spreadsheet: ${error}`, params) + } } } @@ -276,29 +285,33 @@ class UpdateSpreadsheetTool extends BaseGoogleSheetsTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const requests = [] - if (params.title || params.locale || params.timeZone) { - const updateProperties: any = {} - if (params.title) updateProperties.title = params.title - if (params.locale) updateProperties.locale = params.locale - if (params.timeZone) updateProperties.timeZone = params.timeZone - - requests.push({ - updateSpreadsheetProperties: { - properties: updateProperties, - fields: Object.keys(updateProperties).join(',') - } - }) - } + try { + const requests = [] + if (params.title || params.locale || params.timeZone) { + const updateProperties: any = {} + if (params.title) updateProperties.title = params.title + if (params.locale) updateProperties.locale = params.locale + if (params.timeZone) updateProperties.timeZone = params.timeZone + + requests.push({ + updateSpreadsheetProperties: { + properties: updateProperties, + fields: Object.keys(updateProperties).join(',') + } + }) + } - const body = { requests } + const body = { requests } - return await this.makeGoogleSheetsRequest({ - endpoint: `spreadsheets/${params.spreadsheetId}:batchUpdate`, - method: 'POST', - body, - params - }) + return await this.makeGoogleSheetsRequest({ + endpoint: `spreadsheets/${params.spreadsheetId}:batchUpdate`, + method: 'POST', + body, + params + }) + } catch (error) { + return formatToolError(`Error updating spreadsheet: ${error}`, params) + } } } @@ -324,21 +337,26 @@ class GetValuesTool extends BaseGoogleSheetsTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const queryParams = new URLSearchParams() - if (params.valueRenderOption) queryParams.append('valueRenderOption', params.valueRenderOption) - if (params.dateTimeRenderOption) queryParams.append('dateTimeRenderOption', params.dateTimeRenderOption) - if (params.majorDimension) queryParams.append('majorDimension', params.majorDimension) + try { + const queryParams = new URLSearchParams() - const queryString = queryParams.toString() - const encodedRange = encodeURIComponent(params.range) - const endpoint = `spreadsheets/${params.spreadsheetId}/values/${encodedRange}${queryString ? `?${queryString}` : ''}` + if (params.valueRenderOption) queryParams.append('valueRenderOption', params.valueRenderOption) + if (params.dateTimeRenderOption) queryParams.append('dateTimeRenderOption', params.dateTimeRenderOption) + if (params.majorDimension) queryParams.append('majorDimension', params.majorDimension) - return await this.makeGoogleSheetsRequest({ - endpoint, - method: 'GET', - params - }) + const queryString = queryParams.toString() + const encodedRange = encodeURIComponent(params.range) + const endpoint = `spreadsheets/${params.spreadsheetId}/values/${encodedRange}${queryString ? `?${queryString}` : ''}` + + return await this.makeGoogleSheetsRequest({ + endpoint, + method: 'GET', + params + }) + } catch (error) { + return formatToolError(`Error getting values: ${error}`, params) + } } } @@ -364,30 +382,34 @@ class UpdateValuesTool extends BaseGoogleSheetsTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - let values try { - values = JSON.parse(params.values) - } catch (error) { - throw new Error('Values must be a valid JSON array') - } + let values + try { + values = JSON.parse(params.values) + } catch (error) { + throw new Error('Values must be a valid JSON array') + } - const body = { - values, - majorDimension: params.majorDimension || 'ROWS' - } + const body = { + values, + majorDimension: params.majorDimension || 'ROWS' + } - const queryParams = new URLSearchParams() - queryParams.append('valueInputOption', params.valueInputOption || 'USER_ENTERED') + const queryParams = new URLSearchParams() + queryParams.append('valueInputOption', params.valueInputOption || 'USER_ENTERED') - const encodedRange = encodeURIComponent(params.range) - const endpoint = `spreadsheets/${params.spreadsheetId}/values/${encodedRange}?${queryParams.toString()}` + const encodedRange = encodeURIComponent(params.range) + const endpoint = `spreadsheets/${params.spreadsheetId}/values/${encodedRange}?${queryParams.toString()}` - return await this.makeGoogleSheetsRequest({ - endpoint, - method: 'PUT', - body, - params - }) + return await this.makeGoogleSheetsRequest({ + endpoint, + method: 'PUT', + body, + params + }) + } catch (error) { + return formatToolError(`Error updating values: ${error}`, params) + } } } @@ -413,31 +435,35 @@ class AppendValuesTool extends BaseGoogleSheetsTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - let values try { - values = JSON.parse(params.values) - } catch (error) { - throw new Error('Values must be a valid JSON array') - } + let values + try { + values = JSON.parse(params.values) + } catch (error) { + throw new Error('Values must be a valid JSON array') + } - const body = { - values, - majorDimension: params.majorDimension || 'ROWS' - } + const body = { + values, + majorDimension: params.majorDimension || 'ROWS' + } - const queryParams = new URLSearchParams() - queryParams.append('valueInputOption', params.valueInputOption || 'USER_ENTERED') - queryParams.append('insertDataOption', params.insertDataOption || 'OVERWRITE') + const queryParams = new URLSearchParams() + queryParams.append('valueInputOption', params.valueInputOption || 'USER_ENTERED') + queryParams.append('insertDataOption', params.insertDataOption || 'OVERWRITE') - const encodedRange = encodeURIComponent(params.range) - const endpoint = `spreadsheets/${params.spreadsheetId}/values/${encodedRange}:append?${queryParams.toString()}` + const encodedRange = encodeURIComponent(params.range) + const endpoint = `spreadsheets/${params.spreadsheetId}/values/${encodedRange}:append?${queryParams.toString()}` - return await this.makeGoogleSheetsRequest({ - endpoint, - method: 'POST', - body, - params - }) + return await this.makeGoogleSheetsRequest({ + endpoint, + method: 'POST', + body, + params + }) + } catch (error) { + return formatToolError(`Error appending values: ${error}`, params) + } } } @@ -463,15 +489,19 @@ class ClearValuesTool extends BaseGoogleSheetsTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const encodedRange = encodeURIComponent(params.range) - const endpoint = `spreadsheets/${params.spreadsheetId}/values/${encodedRange}:clear` - - return await this.makeGoogleSheetsRequest({ - endpoint, - method: 'POST', - body: {}, - params - }) + try { + const encodedRange = encodeURIComponent(params.range) + const endpoint = `spreadsheets/${params.spreadsheetId}/values/${encodedRange}:clear` + + return await this.makeGoogleSheetsRequest({ + endpoint, + method: 'POST', + body: {}, + params + }) + } catch (error) { + return formatToolError(`Error clearing values: ${error}`, params) + } } } @@ -496,24 +526,29 @@ class BatchGetValuesTool extends BaseGoogleSheetsTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const queryParams = new URLSearchParams() - // Add ranges - params.ranges.split(',').forEach((range: string) => { - queryParams.append('ranges', range.trim()) - }) + try { + const queryParams = new URLSearchParams() - if (params.valueRenderOption) queryParams.append('valueRenderOption', params.valueRenderOption) - if (params.dateTimeRenderOption) queryParams.append('dateTimeRenderOption', params.dateTimeRenderOption) - if (params.majorDimension) queryParams.append('majorDimension', params.majorDimension) + // Add ranges + params.ranges.split(',').forEach((range: string) => { + queryParams.append('ranges', range.trim()) + }) - const endpoint = `spreadsheets/${params.spreadsheetId}/values:batchGet?${queryParams.toString()}` + if (params.valueRenderOption) queryParams.append('valueRenderOption', params.valueRenderOption) + if (params.dateTimeRenderOption) queryParams.append('dateTimeRenderOption', params.dateTimeRenderOption) + if (params.majorDimension) queryParams.append('majorDimension', params.majorDimension) - return await this.makeGoogleSheetsRequest({ - endpoint, - method: 'GET', - params - }) + const endpoint = `spreadsheets/${params.spreadsheetId}/values:batchGet?${queryParams.toString()}` + + return await this.makeGoogleSheetsRequest({ + endpoint, + method: 'GET', + params + }) + } catch (error) { + return formatToolError(`Error batch getting values: ${error}`, params) + } } } @@ -539,27 +574,31 @@ class BatchUpdateValuesTool extends BaseGoogleSheetsTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - let valueRanges try { - valueRanges = JSON.parse(params.values) - } catch (error) { - throw new Error('Values must be a valid JSON array of value ranges') - } + let valueRanges + try { + valueRanges = JSON.parse(params.values) + } catch (error) { + throw new Error('Values must be a valid JSON array of value ranges') + } - const body = { - valueInputOption: params.valueInputOption || 'USER_ENTERED', - data: valueRanges, - includeValuesInResponse: params.includeValuesInResponse || false - } + const body = { + valueInputOption: params.valueInputOption || 'USER_ENTERED', + data: valueRanges, + includeValuesInResponse: params.includeValuesInResponse || false + } - const endpoint = `spreadsheets/${params.spreadsheetId}/values:batchUpdate` + const endpoint = `spreadsheets/${params.spreadsheetId}/values:batchUpdate` - return await this.makeGoogleSheetsRequest({ - endpoint, - method: 'POST', - body, - params - }) + return await this.makeGoogleSheetsRequest({ + endpoint, + method: 'POST', + body, + params + }) + } catch (error) { + return formatToolError(`Error batch updating values: ${error}`, params) + } } } @@ -585,17 +624,21 @@ class BatchClearValuesTool extends BaseGoogleSheetsTool { async _call(arg: any): Promise { const params = { ...arg, ...this.defaultParams } - const ranges = params.ranges.split(',').map((range: string) => range.trim()) - const body = { ranges } + try { + const ranges = params.ranges.split(',').map((range: string) => range.trim()) + const body = { ranges } - const endpoint = `spreadsheets/${params.spreadsheetId}/values:batchClear` + const endpoint = `spreadsheets/${params.spreadsheetId}/values:batchClear` - return await this.makeGoogleSheetsRequest({ - endpoint, - method: 'POST', - body, - params - }) + return await this.makeGoogleSheetsRequest({ + endpoint, + method: 'POST', + body, + params + }) + } catch (error) { + return formatToolError(`Error batch clearing values: ${error}`, params) + } } } diff --git a/packages/components/nodes/tools/JSONPathExtractor/JSONPathExtractor.test.ts b/packages/components/nodes/tools/JSONPathExtractor/JSONPathExtractor.test.ts new file mode 100644 index 00000000000..a1c6755f7b9 --- /dev/null +++ b/packages/components/nodes/tools/JSONPathExtractor/JSONPathExtractor.test.ts @@ -0,0 +1,261 @@ +const { nodeClass: JSONPathExtractor_Tools } = require('./JSONPathExtractor') +import { INodeData } from '../../../src/Interface' + +// Mock the getBaseClasses function +jest.mock('../../../src/utils', () => ({ + getBaseClasses: jest.fn(() => ['Tool', 'StructuredTool']) +})) + +// Helper function to create a valid INodeData object +function createNodeData(id: string, inputs: any): INodeData { + return { + id: id, + label: 'JSON Path Extractor', + name: 'jsonPathExtractor', + type: 'JSONPathExtractor', + icon: 'jsonpathextractor.svg', + version: 1.0, + category: 'Tools', + baseClasses: ['JSONPathExtractor', 'Tool'], + inputs: inputs + } +} + +describe('JSONPathExtractor', () => { + let nodeClass: any + + beforeEach(() => { + nodeClass = new JSONPathExtractor_Tools() + }) + + describe('Tool Initialization', () => { + it('should throw error when path is not provided', async () => { + const nodeData = createNodeData('test-node-1', { + path: '' + }) + + await expect(nodeClass.init(nodeData, '')).rejects.toThrow('JSON Path is required') + }) + + it('should initialize tool with path and default returnNullOnError', async () => { + const nodeData = createNodeData('test-node-2', { + path: 'data.value' + }) + + const tool = await nodeClass.init(nodeData, '') + expect(tool).toBeDefined() + expect(tool.name).toBe('json_path_extractor') + }) + + it('should initialize tool with custom returnNullOnError', async () => { + const nodeData = createNodeData('test-node-3', { + path: 'data.value', + returnNullOnError: true + }) + + const tool = await nodeClass.init(nodeData, '') + expect(tool).toBeDefined() + }) + }) + + describe('JSONPathExtractorTool Functionality', () => { + describe('Positive test cases - Path extraction', () => { + const successCases = [ + { + name: 'simple path from object', + path: 'data.value', + input: { data: { value: 'test' } }, + expected: 'test' + }, + { + name: 'nested path from object', + path: 'user.profile.name', + input: { user: { profile: { name: 'John' } } }, + expected: 'John' + }, + { + name: 'array index access', + path: 'items[0].name', + input: { items: [{ name: 'first' }, { name: 'second' }] }, + expected: 'first' + }, + { + name: 'multi-dimensional array', + path: 'matrix[0][1]', + input: { + matrix: [ + ['a', 'b'], + ['c', 'd'] + ] + }, + expected: 'b' + }, + { + name: 'object return (stringified)', + path: 'data', + input: { data: { nested: 'object' } }, + expected: '{"nested":"object"}' + }, + { + name: 'array return (stringified)', + path: 'tags', + input: { tags: ['a', 'b', 'c'] }, + expected: '["a","b","c"]' + }, + { + name: 'deep nesting', + path: 'a.b.c.d.e', + input: { a: { b: { c: { d: { e: 'deep' } } } } }, + expected: 'deep' + }, + { + name: 'array at root with index', + path: '[1]', + input: ['first', 'second', 'third'], + expected: 'second' + } + ] + + test.each(successCases)('should extract $name', async ({ path, input, expected }) => { + const nodeData = createNodeData(`test-node-${path}`, { + path: path, + returnNullOnError: false + }) + const tool = await nodeClass.init(nodeData, '') + const result = await tool._call({ json: input }) + expect(result).toBe(expected) + }) + }) + + describe('Primitive value handling', () => { + const primitiveTests = [ + { name: 'string', path: 'val', input: { val: 'text' }, expected: 'text' }, + { name: 'number', path: 'val', input: { val: 42 }, expected: '42' }, + { name: 'zero', path: 'val', input: { val: 0 }, expected: '0' }, + { name: 'boolean true', path: 'val', input: { val: true }, expected: 'true' }, + { name: 'boolean false', path: 'val', input: { val: false }, expected: 'false' }, + { name: 'null', path: 'val', input: { val: null }, expected: 'null' }, + { name: 'empty string', path: 'val', input: { val: '' }, expected: '' } + ] + + test.each(primitiveTests)('should handle $name value', async ({ path, input, expected }) => { + const nodeData = createNodeData(`test-primitive`, { + path: path, + returnNullOnError: false + }) + const tool = await nodeClass.init(nodeData, '') + const result = await tool._call({ json: input }) + expect(result).toBe(expected) + }) + }) + + describe('Special characters in keys', () => { + const specialCharTests = [ + { name: 'dashes', path: 'data.key-with-dash', input: { data: { 'key-with-dash': 'value' } } }, + { name: 'spaces', path: 'data.key with spaces', input: { data: { 'key with spaces': 'value' } } }, + { name: 'unicode', path: 'data.emoji🔑', input: { data: { 'emoji🔑': 'value' } } }, + { name: 'numeric strings', path: 'data.123', input: { data: { '123': 'value' } } } + ] + + test.each(specialCharTests)('should handle $name in keys', async ({ path, input }) => { + const nodeData = createNodeData(`test-special`, { + path: path, + returnNullOnError: false + }) + const tool = await nodeClass.init(nodeData, '') + const result = await tool._call({ json: input }) + expect(result).toBe('value') + }) + }) + + describe('Error handling - throw mode', () => { + const errorCases = [ + { + name: 'path not found', + path: 'data.value', + input: { data: { other: 'value' } }, + errorPattern: /Path "data.value" not found in JSON/ + }, + { + name: 'invalid JSON string', + path: 'data', + input: 'invalid json', + errorPattern: /Invalid JSON string/ + }, + { + name: 'array index on object', + path: 'data[0]', + input: { data: { key: 'value' } }, + errorPattern: /Path "data\[0\]" not found in JSON/ + }, + { + name: 'out of bounds array', + path: 'items[10]', + input: { items: ['a', 'b'] }, + errorPattern: /Path "items\[10\]" not found in JSON/ + } + ] + + test.each(errorCases)('should throw error for $name', async ({ path, input, errorPattern }) => { + const nodeData = createNodeData(`test-error`, { + path: path, + returnNullOnError: false + }) + const tool = await nodeClass.init(nodeData, '') + await expect(tool._call({ json: input })).rejects.toThrow(errorPattern) + }) + }) + + describe('Error handling - null mode', () => { + const nullCases = [ + { name: 'path not found', path: 'missing.path', input: { data: 'value' } }, + { name: 'invalid JSON string', path: 'data', input: 'invalid json' }, + { name: 'null in path', path: 'data.nested.value', input: { data: { nested: null } } }, + { name: 'empty array access', path: 'items[0]', input: { items: [] } }, + { name: 'property on primitive', path: 'value.nested', input: { value: 'string' } } + ] + + test.each(nullCases)('should return null for $name', async ({ path, input }) => { + const nodeData = createNodeData(`test-null`, { + path: path, + returnNullOnError: true + }) + const tool = await nodeClass.init(nodeData, '') + const result = await tool._call({ json: input }) + expect(result).toBe('null') + }) + + it('should still extract valid paths when returnNullOnError is true', async () => { + const nodeData = createNodeData('test-valid-null-mode', { + path: 'data.value', + returnNullOnError: true + }) + const tool = await nodeClass.init(nodeData, '') + const result = await tool._call({ + json: { data: { value: 'test' } } + }) + expect(result).toBe('test') + }) + }) + + describe('Complex structures', () => { + it('should handle deeply nested arrays and objects', async () => { + const nodeData = createNodeData('test-complex', { + path: 'users[0].addresses[1].city', + returnNullOnError: false + }) + const tool = await nodeClass.init(nodeData, '') + const result = await tool._call({ + json: { + users: [ + { + addresses: [{ city: 'New York' }, { city: 'Los Angeles' }] + } + ] + } + }) + expect(result).toBe('Los Angeles') + }) + }) + }) +}) diff --git a/packages/components/nodes/tools/JSONPathExtractor/JSONPathExtractor.ts b/packages/components/nodes/tools/JSONPathExtractor/JSONPathExtractor.ts new file mode 100644 index 00000000000..4ab7adc1cb9 --- /dev/null +++ b/packages/components/nodes/tools/JSONPathExtractor/JSONPathExtractor.ts @@ -0,0 +1,125 @@ +import { z } from 'zod' +import { StructuredTool } from '@langchain/core/tools' +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { get } from 'lodash' + +/** + * Tool that extracts values from JSON using path + */ +class JSONPathExtractorTool extends StructuredTool { + name = 'json_path_extractor' + description = 'Extract value from JSON using configured path' + + schema = z.object({ + json: z + .union([z.string().describe('JSON string'), z.record(z.any()).describe('JSON object'), z.array(z.any()).describe('JSON array')]) + .describe('JSON data to extract value from') + }) + + private readonly path: string + private readonly returnNullOnError: boolean + + constructor(path: string, returnNullOnError: boolean = false) { + super() + this.path = path + this.returnNullOnError = returnNullOnError + } + + async _call({ json }: z.infer): Promise { + // Validate that path is configured + if (!this.path) { + if (this.returnNullOnError) { + return 'null' + } + throw new Error('No extraction path configured') + } + + let data: any + + // Parse JSON string if needed + if (typeof json === 'string') { + try { + data = JSON.parse(json) + } catch (error) { + if (this.returnNullOnError) { + return 'null' + } + throw new Error(`Invalid JSON string: ${error instanceof Error ? error.message : 'Parse error'}`) + } + } else { + data = json + } + + // Extract value using lodash get + const value = get(data, this.path) + + if (value === undefined) { + if (this.returnNullOnError) { + return 'null' + } + const jsonPreview = JSON.stringify(data, null, 2) + const preview = jsonPreview.length > 200 ? jsonPreview.substring(0, 200) + '...' : jsonPreview + throw new Error(`Path "${this.path}" not found in JSON. Received: ${preview}`) + } + + return typeof value === 'string' ? value : JSON.stringify(value) + } +} + +/** + * Node implementation for JSON Path Extractor tool + */ +class JSONPathExtractor_Tools implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'JSON Path Extractor' + this.name = 'jsonPathExtractor' + this.version = 1.0 + this.type = 'JSONPathExtractor' + this.icon = 'jsonpathextractor.svg' + this.category = 'Tools' + this.description = 'Extract values from JSON using path expressions' + this.baseClasses = [this.type, ...getBaseClasses(JSONPathExtractorTool)] + this.inputs = [ + { + label: 'JSON Path', + name: 'path', + type: 'string', + description: 'Path to extract. Examples: data, user.name, items[0].id', + placeholder: 'data' + }, + { + label: 'Return Null on Error', + name: 'returnNullOnError', + type: 'boolean', + default: false, + description: 'Return null instead of throwing error when extraction fails', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData, _: string): Promise { + const path = (nodeData.inputs?.path as string) || '' + const returnNullOnError = (nodeData.inputs?.returnNullOnError as boolean) || false + + if (!path) { + throw new Error('JSON Path is required') + } + + return new JSONPathExtractorTool(path, returnNullOnError) + } +} + +module.exports = { nodeClass: JSONPathExtractor_Tools } diff --git a/packages/components/nodes/tools/JSONPathExtractor/jsonpathextractor.svg b/packages/components/nodes/tools/JSONPathExtractor/jsonpathextractor.svg new file mode 100644 index 00000000000..30b50a208c7 --- /dev/null +++ b/packages/components/nodes/tools/JSONPathExtractor/jsonpathextractor.svg @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/tools/Jira/core.ts b/packages/components/nodes/tools/Jira/core.ts index 09d73e0ca53..07cb078c539 100644 --- a/packages/components/nodes/tools/Jira/core.ts +++ b/packages/components/nodes/tools/Jira/core.ts @@ -1,7 +1,7 @@ import { z } from 'zod' import fetch from 'node-fetch' import { DynamicStructuredTool } from '../OpenAPIToolkit/core' -import { TOOL_ARGS_PREFIX } from '../../../src/agents' +import { TOOL_ARGS_PREFIX, formatToolError } from '../../../src/agents' export const desc = `Use this when you want to access Jira API for managing issues, comments, and users` @@ -222,7 +222,7 @@ class ListIssuesTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint, params }) return response } catch (error) { - return `Error listing issues: ${error}` + return formatToolError(`Error listing issues: ${error}`, params) } } } @@ -302,7 +302,7 @@ class CreateIssueTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint: 'issue', method: 'POST', body: issueData, params }) return response } catch (error) { - return `Error creating issue: ${error}` + return formatToolError(`Error creating issue: ${error}`, params) } } } @@ -337,7 +337,7 @@ class GetIssueTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint, params }) return response } catch (error) { - return `Error getting issue: ${error}` + return formatToolError(`Error getting issue: ${error}`, params) } } } @@ -405,7 +405,7 @@ class UpdateIssueTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint, method: 'PUT', body: updateData, params }) return response || 'Issue updated successfully' } catch (error) { - return `Error updating issue: ${error}` + return formatToolError(`Error updating issue: ${error}`, params) } } } @@ -440,7 +440,7 @@ class DeleteIssueTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint, method: 'DELETE', params }) return response || 'Issue deleted successfully' } catch (error) { - return `Error deleting issue: ${error}` + return formatToolError(`Error deleting issue: ${error}`, params) } } } @@ -479,7 +479,7 @@ class AssignIssueTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint, method: 'PUT', body: assignData, params }) return response || 'Issue assigned successfully' } catch (error) { - return `Error assigning issue: ${error}` + return formatToolError(`Error assigning issue: ${error}`, params) } } } @@ -520,7 +520,7 @@ class TransitionIssueTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint, method: 'POST', body: transitionData, params }) return response || 'Issue transitioned successfully' } catch (error) { - return `Error transitioning issue: ${error}` + return formatToolError(`Error transitioning issue: ${error}`, params) } } } @@ -561,7 +561,7 @@ class ListCommentsTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint, params }) return response } catch (error) { - return `Error listing comments: ${error}` + return formatToolError(`Error listing comments: ${error}`, params) } } } @@ -618,7 +618,7 @@ class CreateCommentTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint, method: 'POST', body: commentData, params }) return response } catch (error) { - return `Error creating comment: ${error}` + return formatToolError(`Error creating comment: ${error}`, params) } } } @@ -653,7 +653,7 @@ class GetCommentTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint, params }) return response } catch (error) { - return `Error getting comment: ${error}` + return formatToolError(`Error getting comment: ${error}`, params) } } } @@ -706,7 +706,7 @@ class UpdateCommentTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint, method: 'PUT', body: commentData, params }) return response || 'Comment updated successfully' } catch (error) { - return `Error updating comment: ${error}` + return formatToolError(`Error updating comment: ${error}`, params) } } } @@ -741,7 +741,7 @@ class DeleteCommentTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint, method: 'DELETE', params }) return response || 'Comment deleted successfully' } catch (error) { - return `Error deleting comment: ${error}` + return formatToolError(`Error deleting comment: ${error}`, params) } } } @@ -783,7 +783,7 @@ class SearchUsersTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint, params }) return response } catch (error) { - return `Error searching users: ${error}` + return formatToolError(`Error searching users: ${error}`, params) } } } @@ -822,7 +822,7 @@ class GetUserTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint, params }) return response } catch (error) { - return `Error getting user: ${error}` + return formatToolError(`Error getting user: ${error}`, params) } } } @@ -866,7 +866,7 @@ class CreateUserTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint, method: 'POST', body: userData, params }) return response } catch (error) { - return `Error creating user: ${error}` + return formatToolError(`Error creating user: ${error}`, params) } } } @@ -909,7 +909,7 @@ class UpdateUserTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint, method: 'PUT', body: userData, params }) return response || 'User updated successfully' } catch (error) { - return `Error updating user: ${error}` + return formatToolError(`Error updating user: ${error}`, params) } } } @@ -947,7 +947,7 @@ class DeleteUserTool extends BaseJiraTool { const response = await this.makeJiraRequest({ endpoint, method: 'DELETE', params }) return response || 'User deleted successfully' } catch (error) { - return `Error deleting user: ${error}` + return formatToolError(`Error deleting user: ${error}`, params) } } } @@ -969,7 +969,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.listIssues + defaultParams }) ) } @@ -981,7 +981,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.createIssue + defaultParams }) ) } @@ -993,7 +993,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.getIssue + defaultParams }) ) } @@ -1005,7 +1005,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.updateIssue + defaultParams }) ) } @@ -1017,7 +1017,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.deleteIssue + defaultParams }) ) } @@ -1029,7 +1029,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.assignIssue + defaultParams }) ) } @@ -1041,7 +1041,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.transitionIssue + defaultParams }) ) } @@ -1054,7 +1054,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.listComments + defaultParams }) ) } @@ -1066,7 +1066,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.createComment + defaultParams }) ) } @@ -1078,7 +1078,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.getComment + defaultParams }) ) } @@ -1090,7 +1090,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.updateComment + defaultParams }) ) } @@ -1102,7 +1102,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.deleteComment + defaultParams }) ) } @@ -1115,7 +1115,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.searchUsers + defaultParams }) ) } @@ -1127,7 +1127,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.getUser + defaultParams }) ) } @@ -1139,7 +1139,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.createUser + defaultParams }) ) } @@ -1151,7 +1151,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.updateUser + defaultParams }) ) } @@ -1163,7 +1163,7 @@ export const createJiraTools = (args?: RequestParameters): DynamicStructuredTool accessToken, jiraHost, maxOutputLength, - defaultParams: defaultParams.deleteUser + defaultParams }) ) } diff --git a/packages/components/nodes/tools/MCP/CustomMCP/CustomMCP.ts b/packages/components/nodes/tools/MCP/CustomMCP/CustomMCP.ts index 3fa177bfb9a..8592692a6fc 100644 --- a/packages/components/nodes/tools/MCP/CustomMCP/CustomMCP.ts +++ b/packages/components/nodes/tools/MCP/CustomMCP/CustomMCP.ts @@ -4,6 +4,7 @@ import { MCPToolkit } from '../core' import { getVars, prepareSandboxVars } from '../../../../src/utils' import { DataSource } from 'typeorm' import hash from 'object-hash' +import JSON5 from 'json5' const mcpServerConfig = `{ "command": "npx", @@ -261,7 +262,7 @@ function substituteVariablesInString(str: string, sandbox: any): string { function convertToValidJSONString(inputString: string) { try { - const jsObject = Function('return ' + inputString)() + const jsObject = JSON5.parse(inputString) return JSON.stringify(jsObject, null, 2) } catch (error) { console.error('Error converting to JSON:', error) diff --git a/packages/components/nodes/tools/MicrosoftOutlook/core.ts b/packages/components/nodes/tools/MicrosoftOutlook/core.ts index ce6fe8ba8fc..0468da632c9 100644 --- a/packages/components/nodes/tools/MicrosoftOutlook/core.ts +++ b/packages/components/nodes/tools/MicrosoftOutlook/core.ts @@ -1,7 +1,7 @@ import { z } from 'zod' import fetch from 'node-fetch' import { DynamicStructuredTool } from '../OpenAPIToolkit/core' -import { TOOL_ARGS_PREFIX } from '../../../src/agents' +import { TOOL_ARGS_PREFIX, formatToolError } from '../../../src/agents' export const desc = `Use this when you want to access Microsoft Outlook API for managing calendars, events, and messages` @@ -201,7 +201,7 @@ class ListCalendarsTool extends BaseOutlookTool { const response = await this.makeGraphRequest(url, 'GET', undefined, params) return response } catch (error) { - return `Error listing calendars: ${error}` + return formatToolError(`Error listing calendars: ${error}`, {}) } } } @@ -230,7 +230,7 @@ class GetCalendarTool extends BaseOutlookTool { const response = await this.makeGraphRequest(url, 'GET', undefined, params) return response } catch (error) { - return `Error getting calendar: ${error}` + return formatToolError(`Error getting calendar: ${error}`, params) } } } @@ -263,7 +263,7 @@ class CreateCalendarTool extends BaseOutlookTool { const response = await this.makeGraphRequest(url, 'POST', calendarData, params) return response } catch (error) { - return `Error creating calendar: ${error}` + return formatToolError(`Error creating calendar: ${error}`, params) } } } @@ -296,7 +296,7 @@ class UpdateCalendarTool extends BaseOutlookTool { const response = await this.makeGraphRequest(url, 'PATCH', calendarData, params) return response } catch (error) { - return `Error updating calendar: ${error}` + return formatToolError(`Error updating calendar: ${error}`, params) } } } @@ -325,7 +325,7 @@ class DeleteCalendarTool extends BaseOutlookTool { await this.makeGraphRequest(url, 'DELETE', undefined, params) return `Calendar ${params.calendarId} deleted successfully` } catch (error) { - return `Error deleting calendar: ${error}` + return formatToolError(`Error deleting calendar: ${error}`, params) } } } @@ -372,7 +372,7 @@ class ListEventsTool extends BaseOutlookTool { const response = await this.makeGraphRequest(url, 'GET', undefined, params) return response } catch (error) { - return `Error listing events: ${error}` + return formatToolError(`Error listing events: ${error}`, params) } } } @@ -401,7 +401,7 @@ class GetEventTool extends BaseOutlookTool { const response = await this.makeGraphRequest(url, 'GET', undefined, params) return response } catch (error) { - return `Error getting event: ${error}` + return formatToolError(`Error getting event: ${error}`, params) } } } @@ -452,7 +452,7 @@ class CreateEventTool extends BaseOutlookTool { const response = await this.makeGraphRequest(url, 'POST', eventData, params) return response } catch (error) { - return `Error creating event: ${error}` + return formatToolError(`Error creating event: ${error}`, params) } } } @@ -484,7 +484,7 @@ class UpdateEventTool extends BaseOutlookTool { const response = await this.makeGraphRequest(url, 'PATCH', eventData, params) return response } catch (error) { - return `Error updating event: ${error}` + return formatToolError(`Error updating event: ${error}`, params) } } } @@ -513,7 +513,7 @@ class DeleteEventTool extends BaseOutlookTool { await this.makeGraphRequest(url, 'DELETE', undefined, params) return `Event ${params.eventId} deleted successfully` } catch (error) { - return `Error deleting event: ${error}` + return formatToolError(`Error deleting event: ${error}`, params) } } } @@ -548,7 +548,7 @@ class ListMessagesTool extends BaseOutlookTool { const response = await this.makeGraphRequest(url, 'GET', undefined, params) return response } catch (error) { - return `Error listing messages: ${error}` + return formatToolError(`Error listing messages: ${error}`, params) } } } @@ -577,7 +577,7 @@ class GetMessageTool extends BaseOutlookTool { const response = await this.makeGraphRequest(url, 'GET', undefined, params) return response } catch (error) { - return `Error getting message: ${error}` + return formatToolError(`Error getting message: ${error}`, params) } } } @@ -617,7 +617,7 @@ class CreateDraftMessageTool extends BaseOutlookTool { const response = await this.makeGraphRequest(url, 'POST', messageData, params) return response } catch (error) { - return `Error creating draft message: ${error}` + return formatToolError(`Error creating draft message: ${error}`, params) } } } @@ -658,7 +658,7 @@ class SendMessageTool extends BaseOutlookTool { await this.makeGraphRequest(url, 'POST', messageData, params) return 'Message sent successfully' } catch (error) { - return `Error sending message: ${error}` + return formatToolError(`Error sending message: ${error}`, params) } } } @@ -690,7 +690,7 @@ class UpdateMessageTool extends BaseOutlookTool { const response = await this.makeGraphRequest(url, 'PATCH', messageData, params) return response } catch (error) { - return `Error updating message: ${error}` + return formatToolError(`Error updating message: ${error}`, params) } } } @@ -719,7 +719,7 @@ class DeleteMessageTool extends BaseOutlookTool { await this.makeGraphRequest(url, 'DELETE', undefined, params) return `Message ${params.messageId} deleted successfully` } catch (error) { - return `Error deleting message: ${error}` + return formatToolError(`Error deleting message: ${error}`, params) } } } @@ -752,7 +752,7 @@ class CopyMessageTool extends BaseOutlookTool { const response = await this.makeGraphRequest(url, 'POST', copyData, params) return response } catch (error) { - return `Error copying message: ${error}` + return formatToolError(`Error copying message: ${error}`, params) } } } @@ -785,7 +785,7 @@ class MoveMessageTool extends BaseOutlookTool { const response = await this.makeGraphRequest(url, 'POST', moveData, params) return response } catch (error) { - return `Error moving message: ${error}` + return formatToolError(`Error moving message: ${error}`, params) } } } @@ -818,7 +818,7 @@ class ReplyMessageTool extends BaseOutlookTool { await this.makeGraphRequest(url, 'POST', replyData, params) return 'Reply sent successfully' } catch (error) { - return `Error replying to message: ${error}` + return formatToolError(`Error replying to message: ${error}`, params) } } } @@ -865,163 +865,103 @@ export const createOutlookTools = (args?: RequestParameters): DynamicStructuredT // Calendar tools if (actions.includes('listCalendars')) { - const listTool = new ListCalendarsTool({ - accessToken, - defaultParams: defaultParams.listCalendars - }) + const listTool = new ListCalendarsTool({ accessToken, defaultParams }) tools.push(listTool) } if (actions.includes('getCalendar')) { - const getTool = new GetCalendarTool({ - accessToken, - defaultParams: defaultParams.getCalendar - }) + const getTool = new GetCalendarTool({ accessToken, defaultParams }) tools.push(getTool) } if (actions.includes('createCalendar')) { - const createTool = new CreateCalendarTool({ - accessToken, - defaultParams: defaultParams.createCalendar - }) + const createTool = new CreateCalendarTool({ accessToken, defaultParams }) tools.push(createTool) } if (actions.includes('updateCalendar')) { - const updateTool = new UpdateCalendarTool({ - accessToken, - defaultParams: defaultParams.updateCalendar - }) + const updateTool = new UpdateCalendarTool({ accessToken, defaultParams }) tools.push(updateTool) } if (actions.includes('deleteCalendar')) { - const deleteTool = new DeleteCalendarTool({ - accessToken, - defaultParams: defaultParams.deleteCalendar - }) + const deleteTool = new DeleteCalendarTool({ accessToken, defaultParams }) tools.push(deleteTool) } if (actions.includes('listEvents')) { - const listTool = new ListEventsTool({ - accessToken, - defaultParams: defaultParams.listEvents - }) + const listTool = new ListEventsTool({ accessToken, defaultParams }) tools.push(listTool) } if (actions.includes('getEvent')) { - const getTool = new GetEventTool({ - accessToken, - defaultParams: defaultParams.getEvent - }) + const getTool = new GetEventTool({ accessToken, defaultParams }) tools.push(getTool) } if (actions.includes('createEvent')) { - const createTool = new CreateEventTool({ - accessToken, - defaultParams: defaultParams.createEvent - }) + const createTool = new CreateEventTool({ accessToken, defaultParams }) tools.push(createTool) } if (actions.includes('updateEvent')) { - const updateTool = new UpdateEventTool({ - accessToken, - defaultParams: defaultParams.updateEvent - }) + const updateTool = new UpdateEventTool({ accessToken, defaultParams }) tools.push(updateTool) } if (actions.includes('deleteEvent')) { - const deleteTool = new DeleteEventTool({ - accessToken, - defaultParams: defaultParams.deleteEvent - }) + const deleteTool = new DeleteEventTool({ accessToken, defaultParams }) tools.push(deleteTool) } // Message tools if (actions.includes('listMessages')) { - const listTool = new ListMessagesTool({ - accessToken, - defaultParams: defaultParams.listMessages - }) + const listTool = new ListMessagesTool({ accessToken, defaultParams }) tools.push(listTool) } if (actions.includes('getMessage')) { - const getTool = new GetMessageTool({ - accessToken, - defaultParams: defaultParams.getMessage - }) + const getTool = new GetMessageTool({ accessToken, defaultParams }) tools.push(getTool) } if (actions.includes('createDraftMessage')) { - const createTool = new CreateDraftMessageTool({ - accessToken, - defaultParams: defaultParams.createDraftMessage - }) + const createTool = new CreateDraftMessageTool({ accessToken, defaultParams }) tools.push(createTool) } if (actions.includes('sendMessage')) { - const sendTool = new SendMessageTool({ - accessToken, - defaultParams: defaultParams.sendMessage - }) + const sendTool = new SendMessageTool({ accessToken, defaultParams }) tools.push(sendTool) } if (actions.includes('updateMessage')) { - const updateTool = new UpdateMessageTool({ - accessToken, - defaultParams: defaultParams.updateMessage - }) + const updateTool = new UpdateMessageTool({ accessToken, defaultParams }) tools.push(updateTool) } if (actions.includes('deleteMessage')) { - const deleteTool = new DeleteMessageTool({ - accessToken, - defaultParams: defaultParams.deleteMessage - }) + const deleteTool = new DeleteMessageTool({ accessToken, defaultParams }) tools.push(deleteTool) } if (actions.includes('copyMessage')) { - const copyTool = new CopyMessageTool({ - accessToken, - defaultParams: defaultParams.copyMessage - }) + const copyTool = new CopyMessageTool({ accessToken, defaultParams }) tools.push(copyTool) } if (actions.includes('moveMessage')) { - const moveTool = new MoveMessageTool({ - accessToken, - defaultParams: defaultParams.moveMessage - }) + const moveTool = new MoveMessageTool({ accessToken, defaultParams }) tools.push(moveTool) } if (actions.includes('replyMessage')) { - const replyTool = new ReplyMessageTool({ - accessToken, - defaultParams: defaultParams.replyMessage - }) + const replyTool = new ReplyMessageTool({ accessToken, defaultParams }) tools.push(replyTool) } if (actions.includes('forwardMessage')) { - const forwardTool = new ForwardMessageTool({ - accessToken, - defaultParams: defaultParams.forwardMessage - }) + const forwardTool = new ForwardMessageTool({ accessToken, defaultParams }) tools.push(forwardTool) } diff --git a/packages/components/nodes/tools/MicrosoftTeams/core.ts b/packages/components/nodes/tools/MicrosoftTeams/core.ts index a0c08091469..77c6feaf9b7 100644 --- a/packages/components/nodes/tools/MicrosoftTeams/core.ts +++ b/packages/components/nodes/tools/MicrosoftTeams/core.ts @@ -119,7 +119,7 @@ class ListChannelsTool extends BaseTeamsTool { return this.formatResponse(responseData, params) } catch (error) { - return `Error listing channels: ${error}` + return this.formatResponse(`Error listing channels: ${error}`, params) } } } @@ -1519,236 +1519,149 @@ export function createTeamsTools(options: TeamsToolOptions): DynamicStructuredTo // Channel tools if (actions.includes('listChannels')) { - const listTool = new ListChannelsTool({ - accessToken, - defaultParams: defaultParams.listChannels - }) + const listTool = new ListChannelsTool({ accessToken, defaultParams }) tools.push(listTool) } if (actions.includes('getChannel')) { - const getTool = new GetChannelTool({ - accessToken, - defaultParams: defaultParams.getChannel - }) + const getTool = new GetChannelTool({ accessToken, defaultParams }) tools.push(getTool) } if (actions.includes('createChannel')) { - const createTool = new CreateChannelTool({ - accessToken, - defaultParams: defaultParams.createChannel - }) + const createTool = new CreateChannelTool({ accessToken, defaultParams }) tools.push(createTool) } if (actions.includes('updateChannel')) { - const updateTool = new UpdateChannelTool({ - accessToken, - defaultParams: defaultParams.updateChannel - }) + const updateTool = new UpdateChannelTool({ accessToken, defaultParams }) tools.push(updateTool) } if (actions.includes('deleteChannel')) { - const deleteTool = new DeleteChannelTool({ - accessToken, - defaultParams: defaultParams.deleteChannel - }) + const deleteTool = new DeleteChannelTool({ accessToken, defaultParams }) tools.push(deleteTool) } if (actions.includes('archiveChannel')) { - const archiveTool = new ArchiveChannelTool({ - accessToken, - defaultParams: defaultParams.archiveChannel - }) + const archiveTool = new ArchiveChannelTool({ accessToken, defaultParams }) tools.push(archiveTool) } if (actions.includes('unarchiveChannel')) { - const unarchiveTool = new UnarchiveChannelTool({ - accessToken, - defaultParams: defaultParams.unarchiveChannel - }) + const unarchiveTool = new UnarchiveChannelTool({ accessToken, defaultParams }) tools.push(unarchiveTool) } if (actions.includes('listChannelMembers')) { - const listMembersTool = new ListChannelMembersTool({ - accessToken, - defaultParams: defaultParams.listChannelMembers - }) + const listMembersTool = new ListChannelMembersTool({ accessToken, defaultParams }) tools.push(listMembersTool) } if (actions.includes('addChannelMember')) { - const addMemberTool = new AddChannelMemberTool({ - accessToken, - defaultParams: defaultParams.addChannelMember - }) + const addMemberTool = new AddChannelMemberTool({ accessToken, defaultParams }) tools.push(addMemberTool) } if (actions.includes('removeChannelMember')) { - const removeMemberTool = new RemoveChannelMemberTool({ - accessToken, - defaultParams: defaultParams.removeChannelMember - }) + const removeMemberTool = new RemoveChannelMemberTool({ accessToken, defaultParams }) tools.push(removeMemberTool) } // Chat tools if (actions.includes('listChats')) { - const listTool = new ListChatsTool({ - accessToken, - defaultParams: defaultParams.listChats - }) + const listTool = new ListChatsTool({ accessToken, defaultParams }) tools.push(listTool) } if (actions.includes('getChat')) { - const getTool = new GetChatTool({ - accessToken, - defaultParams: defaultParams.getChat - }) + const getTool = new GetChatTool({ accessToken, defaultParams }) tools.push(getTool) } if (actions.includes('createChat')) { - const createTool = new CreateChatTool({ - accessToken, - defaultParams: defaultParams.createChat - }) + const createTool = new CreateChatTool({ accessToken, defaultParams }) tools.push(createTool) } if (actions.includes('updateChat')) { - const updateTool = new UpdateChatTool({ - accessToken, - defaultParams: defaultParams.updateChat - }) + const updateTool = new UpdateChatTool({ accessToken, defaultParams }) tools.push(updateTool) } if (actions.includes('deleteChat')) { - const deleteTool = new DeleteChatTool({ - accessToken, - defaultParams: defaultParams.deleteChat - }) + const deleteTool = new DeleteChatTool({ accessToken, defaultParams }) tools.push(deleteTool) } if (actions.includes('listChatMembers')) { - const listMembersTool = new ListChatMembersTool({ - accessToken, - defaultParams: defaultParams.listChatMembers - }) + const listMembersTool = new ListChatMembersTool({ accessToken, defaultParams }) tools.push(listMembersTool) } if (actions.includes('addChatMember')) { - const addMemberTool = new AddChatMemberTool({ - accessToken, - defaultParams: defaultParams.addChatMember - }) + const addMemberTool = new AddChatMemberTool({ accessToken, defaultParams }) tools.push(addMemberTool) } if (actions.includes('removeChatMember')) { - const removeMemberTool = new RemoveChatMemberTool({ - accessToken, - defaultParams: defaultParams.removeChatMember - }) + const removeMemberTool = new RemoveChatMemberTool({ accessToken, defaultParams }) tools.push(removeMemberTool) } if (actions.includes('pinMessage')) { - const pinTool = new PinMessageTool({ - accessToken, - defaultParams: defaultParams.pinMessage - }) + const pinTool = new PinMessageTool({ accessToken, defaultParams }) tools.push(pinTool) } if (actions.includes('unpinMessage')) { - const unpinTool = new UnpinMessageTool({ - accessToken, - defaultParams: defaultParams.unpinMessage - }) + const unpinTool = new UnpinMessageTool({ accessToken, defaultParams }) tools.push(unpinTool) } // Chat message tools if (actions.includes('listMessages')) { - const listTool = new ListMessagesTool({ - accessToken, - defaultParams: defaultParams.listMessages - }) + const listTool = new ListMessagesTool({ accessToken, defaultParams }) tools.push(listTool) } if (actions.includes('getMessage')) { - const getTool = new GetMessageTool({ - accessToken, - defaultParams: defaultParams.getMessage - }) + const getTool = new GetMessageTool({ accessToken, defaultParams }) tools.push(getTool) } if (actions.includes('sendMessage')) { - const sendTool = new SendMessageTool({ - accessToken, - defaultParams: defaultParams.sendMessage - }) + const sendTool = new SendMessageTool({ accessToken, defaultParams }) tools.push(sendTool) } if (actions.includes('updateMessage')) { - const updateTool = new UpdateMessageTool({ - accessToken, - defaultParams: defaultParams.updateMessage - }) + const updateTool = new UpdateMessageTool({ accessToken, defaultParams }) tools.push(updateTool) } if (actions.includes('deleteMessage')) { - const deleteTool = new DeleteMessageTool({ - accessToken, - defaultParams: defaultParams.deleteMessage - }) + const deleteTool = new DeleteMessageTool({ accessToken, defaultParams }) tools.push(deleteTool) } if (actions.includes('replyToMessage')) { - const replyTool = new ReplyToMessageTool({ - accessToken, - defaultParams: defaultParams.replyToMessage - }) + const replyTool = new ReplyToMessageTool({ accessToken, defaultParams }) tools.push(replyTool) } if (actions.includes('setReaction')) { - const reactionTool = new SetReactionTool({ - accessToken, - defaultParams: defaultParams.setReaction - }) + const reactionTool = new SetReactionTool({ accessToken, defaultParams }) tools.push(reactionTool) } if (actions.includes('unsetReaction')) { - const unsetReactionTool = new UnsetReactionTool({ - accessToken, - defaultParams: defaultParams.unsetReaction - }) + const unsetReactionTool = new UnsetReactionTool({ accessToken, defaultParams }) tools.push(unsetReactionTool) } if (actions.includes('getAllMessages')) { - const getAllTool = new GetAllMessagesTool({ - accessToken, - defaultParams: defaultParams.getAllMessages - }) + const getAllTool = new GetAllMessagesTool({ accessToken, defaultParams }) tools.push(getAllTool) } diff --git a/packages/components/nodes/tools/OpenAPIToolkit/OpenAPIToolkit.ts b/packages/components/nodes/tools/OpenAPIToolkit/OpenAPIToolkit.ts index ccc7ef7d36b..d4c664638d6 100644 --- a/packages/components/nodes/tools/OpenAPIToolkit/OpenAPIToolkit.ts +++ b/packages/components/nodes/tools/OpenAPIToolkit/OpenAPIToolkit.ts @@ -46,7 +46,8 @@ class OpenAPIToolkit_Tools implements INode { type: 'json', description: 'Request headers to be sent with the API request. For example, {"Authorization": "Bearer token"}', additionalParams: true, - optional: true + optional: true, + acceptVariable: true }, { label: 'Remove null parameters', diff --git a/packages/components/nodes/tools/OpenAPIToolkit/core.ts b/packages/components/nodes/tools/OpenAPIToolkit/core.ts index 491033ce70d..f4b3499dace 100644 --- a/packages/components/nodes/tools/OpenAPIToolkit/core.ts +++ b/packages/components/nodes/tools/OpenAPIToolkit/core.ts @@ -253,10 +253,14 @@ export class DynamicStructuredTool< const sandbox = createCodeExecutionSandbox('', this.variables || [], flow, additionalSandbox) - const response = await executeJavaScriptCode(this.customCode || defaultCode, sandbox, { + let response = await executeJavaScriptCode(this.customCode || defaultCode, sandbox, { timeout: 10000 }) + if (typeof response === 'object') { + response = JSON.stringify(response) + } + return response } diff --git a/packages/components/nodes/tools/RequestsDelete/RequestsDelete.ts b/packages/components/nodes/tools/RequestsDelete/RequestsDelete.ts index 50c640c3fda..e66d40b5eae 100644 --- a/packages/components/nodes/tools/RequestsDelete/RequestsDelete.ts +++ b/packages/components/nodes/tools/RequestsDelete/RequestsDelete.ts @@ -1,6 +1,7 @@ import { INode, INodeData, INodeParams } from '../../../src/Interface' import { getBaseClasses, stripHTMLFromToolInput } from '../../../src/utils' import { desc, RequestParameters, RequestsDeleteTool } from './core' +import JSON5 from 'json5' const codeExample = `{ "id": { @@ -130,7 +131,7 @@ class RequestsDelete_Tools implements INode { if (queryParamsSchema) obj.queryParamsSchema = queryParamsSchema if (maxOutputLength) obj.maxOutputLength = parseInt(maxOutputLength, 10) if (headers) { - const parsedHeaders = typeof headers === 'object' ? headers : JSON.parse(stripHTMLFromToolInput(headers)) + const parsedHeaders = typeof headers === 'object' ? headers : JSON5.parse(stripHTMLFromToolInput(headers)) obj.headers = parsedHeaders } diff --git a/packages/components/nodes/tools/RequestsDelete/core.ts b/packages/components/nodes/tools/RequestsDelete/core.ts index a60dee95983..1c71911d668 100644 --- a/packages/components/nodes/tools/RequestsDelete/core.ts +++ b/packages/components/nodes/tools/RequestsDelete/core.ts @@ -1,6 +1,7 @@ import { z } from 'zod' import { DynamicStructuredTool } from '../OpenAPIToolkit/core' import { secureFetch } from '../../../src/httpSecurity' +import JSON5 from 'json5' export const desc = `Use this when you need to execute a DELETE request to remove data from a website.` @@ -22,7 +23,7 @@ const createRequestsDeleteSchema = (queryParamsSchema?: string) => { // If queryParamsSchema is provided, parse it and add dynamic query params if (queryParamsSchema) { try { - const parsedSchema = JSON.parse(queryParamsSchema) + const parsedSchema = JSON5.parse(queryParamsSchema) const queryParamsObject: Record = {} Object.entries(parsedSchema).forEach(([key, config]: [string, any]) => { @@ -108,7 +109,7 @@ export class RequestsDeleteTool extends DynamicStructuredTool { if (this.queryParamsSchema && params.queryParams && Object.keys(params.queryParams).length > 0) { try { - const parsedSchema = JSON.parse(this.queryParamsSchema) + const parsedSchema = JSON5.parse(this.queryParamsSchema) const pathParams: Array<{ key: string; value: string }> = [] Object.entries(params.queryParams).forEach(([key, value]) => { diff --git a/packages/components/nodes/tools/RequestsGet/RequestsGet.ts b/packages/components/nodes/tools/RequestsGet/RequestsGet.ts index cb7c5a59479..258c3807aac 100644 --- a/packages/components/nodes/tools/RequestsGet/RequestsGet.ts +++ b/packages/components/nodes/tools/RequestsGet/RequestsGet.ts @@ -1,6 +1,7 @@ import { INode, INodeData, INodeParams } from '../../../src/Interface' import { getBaseClasses, stripHTMLFromToolInput } from '../../../src/utils' import { desc, RequestParameters, RequestsGetTool } from './core' +import JSON5 from 'json5' const codeExample = `{ "id": { @@ -130,7 +131,7 @@ class RequestsGet_Tools implements INode { if (queryParamsSchema) obj.queryParamsSchema = queryParamsSchema if (maxOutputLength) obj.maxOutputLength = parseInt(maxOutputLength, 10) if (headers) { - const parsedHeaders = typeof headers === 'object' ? headers : JSON.parse(stripHTMLFromToolInput(headers)) + const parsedHeaders = typeof headers === 'object' ? headers : JSON5.parse(stripHTMLFromToolInput(headers)) obj.headers = parsedHeaders } diff --git a/packages/components/nodes/tools/RequestsGet/core.ts b/packages/components/nodes/tools/RequestsGet/core.ts index 931a494e8f9..b519341ab64 100644 --- a/packages/components/nodes/tools/RequestsGet/core.ts +++ b/packages/components/nodes/tools/RequestsGet/core.ts @@ -1,6 +1,7 @@ import { z } from 'zod' import { DynamicStructuredTool } from '../OpenAPIToolkit/core' import { secureFetch } from '../../../src/httpSecurity' +import JSON5 from 'json5' export const desc = `Use this when you need to execute a GET request to get data from a website.` @@ -22,7 +23,7 @@ const createRequestsGetSchema = (queryParamsSchema?: string) => { // If queryParamsSchema is provided, parse it and add dynamic query params if (queryParamsSchema) { try { - const parsedSchema = JSON.parse(queryParamsSchema) + const parsedSchema = JSON5.parse(queryParamsSchema) const queryParamsObject: Record = {} Object.entries(parsedSchema).forEach(([key, config]: [string, any]) => { @@ -108,7 +109,7 @@ export class RequestsGetTool extends DynamicStructuredTool { if (this.queryParamsSchema && params.queryParams && Object.keys(params.queryParams).length > 0) { try { - const parsedSchema = JSON.parse(this.queryParamsSchema) + const parsedSchema = JSON5.parse(this.queryParamsSchema) const pathParams: Array<{ key: string; value: string }> = [] Object.entries(params.queryParams).forEach(([key, value]) => { diff --git a/packages/components/nodes/tools/RequestsPost/RequestsPost.ts b/packages/components/nodes/tools/RequestsPost/RequestsPost.ts index 326084d9c52..ea9c8669443 100644 --- a/packages/components/nodes/tools/RequestsPost/RequestsPost.ts +++ b/packages/components/nodes/tools/RequestsPost/RequestsPost.ts @@ -1,6 +1,7 @@ import { INode, INodeData, INodeParams } from '../../../src/Interface' import { getBaseClasses, stripHTMLFromToolInput } from '../../../src/utils' import { RequestParameters, desc, RequestsPostTool } from './core' +import JSON5 from 'json5' const codeExample = `{ "name": { @@ -140,11 +141,11 @@ class RequestsPost_Tools implements INode { if (bodySchema) obj.bodySchema = stripHTMLFromToolInput(bodySchema) if (maxOutputLength) obj.maxOutputLength = parseInt(maxOutputLength, 10) if (headers) { - const parsedHeaders = typeof headers === 'object' ? headers : JSON.parse(stripHTMLFromToolInput(headers)) + const parsedHeaders = typeof headers === 'object' ? headers : JSON5.parse(stripHTMLFromToolInput(headers)) obj.headers = parsedHeaders } if (body) { - const parsedBody = typeof body === 'object' ? body : JSON.parse(body) + const parsedBody = typeof body === 'object' ? body : JSON5.parse(body) obj.body = parsedBody } diff --git a/packages/components/nodes/tools/RequestsPost/core.ts b/packages/components/nodes/tools/RequestsPost/core.ts index bbffe9379fb..5ee082c41bf 100644 --- a/packages/components/nodes/tools/RequestsPost/core.ts +++ b/packages/components/nodes/tools/RequestsPost/core.ts @@ -1,6 +1,7 @@ import { z } from 'zod' import { DynamicStructuredTool } from '../OpenAPIToolkit/core' import { secureFetch } from '../../../src/httpSecurity' +import JSON5 from 'json5' export const desc = `Use this when you want to execute a POST request to create or update a resource.` @@ -27,7 +28,7 @@ const createRequestsPostSchema = (bodySchema?: string) => { // If bodySchema is provided, parse it and add dynamic body params if (bodySchema) { try { - const parsedSchema = JSON.parse(bodySchema) + const parsedSchema = JSON5.parse(bodySchema) const bodyParamsObject: Record = {} Object.entries(parsedSchema).forEach(([key, config]: [string, any]) => { diff --git a/packages/components/nodes/tools/RequestsPut/RequestsPut.ts b/packages/components/nodes/tools/RequestsPut/RequestsPut.ts index c269401571e..ce5bfe38b99 100644 --- a/packages/components/nodes/tools/RequestsPut/RequestsPut.ts +++ b/packages/components/nodes/tools/RequestsPut/RequestsPut.ts @@ -1,6 +1,7 @@ import { INode, INodeData, INodeParams } from '../../../src/Interface' import { getBaseClasses, stripHTMLFromToolInput } from '../../../src/utils' import { RequestParameters, desc, RequestsPutTool } from './core' +import JSON5 from 'json5' const codeExample = `{ "name": { @@ -140,11 +141,11 @@ class RequestsPut_Tools implements INode { if (bodySchema) obj.bodySchema = stripHTMLFromToolInput(bodySchema) if (maxOutputLength) obj.maxOutputLength = parseInt(maxOutputLength, 10) if (headers) { - const parsedHeaders = typeof headers === 'object' ? headers : JSON.parse(stripHTMLFromToolInput(headers)) + const parsedHeaders = typeof headers === 'object' ? headers : JSON5.parse(stripHTMLFromToolInput(headers)) obj.headers = parsedHeaders } if (body) { - const parsedBody = typeof body === 'object' ? body : JSON.parse(body) + const parsedBody = typeof body === 'object' ? body : JSON5.parse(body) obj.body = parsedBody } diff --git a/packages/components/nodes/tools/RequestsPut/core.ts b/packages/components/nodes/tools/RequestsPut/core.ts index 28003f9d046..0984be332ec 100644 --- a/packages/components/nodes/tools/RequestsPut/core.ts +++ b/packages/components/nodes/tools/RequestsPut/core.ts @@ -1,6 +1,7 @@ import { z } from 'zod' import { DynamicStructuredTool } from '../OpenAPIToolkit/core' import { secureFetch } from '../../../src/httpSecurity' +import JSON5 from 'json5' export const desc = `Use this when you want to execute a PUT request to update or replace a resource.` @@ -27,7 +28,7 @@ const createRequestsPutSchema = (bodySchema?: string) => { // If bodySchema is provided, parse it and add dynamic body params if (bodySchema) { try { - const parsedSchema = JSON.parse(bodySchema) + const parsedSchema = JSON5.parse(bodySchema) const bodyParamsObject: Record = {} Object.entries(parsedSchema).forEach(([key, config]: [string, any]) => { diff --git a/packages/components/nodes/tools/RetrieverTool/RetrieverTool.ts b/packages/components/nodes/tools/RetrieverTool/RetrieverTool.ts index 0010bce9c50..d95ee11d619 100644 --- a/packages/components/nodes/tools/RetrieverTool/RetrieverTool.ts +++ b/packages/components/nodes/tools/RetrieverTool/RetrieverTool.ts @@ -173,7 +173,8 @@ class Retriever_Tools implements INode { hint: { label: 'What can you filter?', value: howToUse - } + }, + acceptVariable: true } ] } diff --git a/packages/components/nodes/vectorstores/Chroma/Chroma.ts b/packages/components/nodes/vectorstores/Chroma/Chroma.ts index 90fb2c552e6..80ddbe35b7a 100644 --- a/packages/components/nodes/vectorstores/Chroma/Chroma.ts +++ b/packages/components/nodes/vectorstores/Chroma/Chroma.ts @@ -74,7 +74,8 @@ class Chroma_VectorStores implements INode { name: 'chromaMetadataFilter', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Top K', diff --git a/packages/components/nodes/vectorstores/Couchbase/Couchbase.ts b/packages/components/nodes/vectorstores/Couchbase/Couchbase.ts index 83c45271e6e..cbc25fb4ac7 100644 --- a/packages/components/nodes/vectorstores/Couchbase/Couchbase.ts +++ b/packages/components/nodes/vectorstores/Couchbase/Couchbase.ts @@ -96,7 +96,8 @@ class Couchbase_VectorStores implements INode { name: 'couchbaseMetadataFilter', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Top K', diff --git a/packages/components/nodes/vectorstores/Kendra/Kendra.ts b/packages/components/nodes/vectorstores/Kendra/Kendra.ts new file mode 100644 index 00000000000..d870558c77d --- /dev/null +++ b/packages/components/nodes/vectorstores/Kendra/Kendra.ts @@ -0,0 +1,294 @@ +import { flatten } from 'lodash' +import { AmazonKendraRetriever } from '@langchain/aws' +import { KendraClient, BatchPutDocumentCommand, BatchDeleteDocumentCommand } from '@aws-sdk/client-kendra' +import { Document } from '@langchain/core/documents' +import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface' +import { FLOWISE_CHATID, getCredentialData, getCredentialParam } from '../../../src/utils' +import { howToUseFileUpload } from '../VectorStoreUtils' +import { MODEL_TYPE, getRegions } from '../../../src/modelLoader' + +class Kendra_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'AWS Kendra' + this.name = 'kendra' + this.version = 1.0 + this.type = 'Kendra' + this.icon = 'kendra.svg' + this.category = 'Vector Stores' + this.description = `Use AWS Kendra's intelligent search service for document retrieval and semantic search` + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.credential = { + label: 'AWS Credential', + name: 'credential', + type: 'credential', + credentialNames: ['awsApi'], + optional: true + } + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true, + optional: true + }, + { + label: 'Region', + name: 'region', + type: 'asyncOptions', + loadMethod: 'listRegions', + default: 'us-east-1' + }, + { + label: 'Kendra Index ID', + name: 'indexId', + type: 'string', + placeholder: 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx', + description: 'The ID of your AWS Kendra index' + }, + { + label: 'File Upload', + name: 'fileUpload', + description: 'Allow file upload on the chat', + hint: { + label: 'How to use', + value: howToUseFileUpload + }, + type: 'boolean', + additionalParams: true, + optional: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 10', + placeholder: '10', + type: 'number', + additionalParams: true, + optional: true + }, + { + label: 'Attribute Filter', + name: 'attributeFilter', + description: 'Optional filter to apply when retrieving documents', + type: 'json', + optional: true, + additionalParams: true, + acceptVariable: true + } + ] + // Note: Kendra doesn't support MMR search, but keeping the structure consistent + this.outputs = [ + { + label: 'Kendra Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Kendra Vector Store', + name: 'vectorStore', + baseClasses: [this.type, 'BaseRetriever'] + } + ] + } + + loadMethods = { + async listRegions(): Promise { + return await getRegions(MODEL_TYPE.CHAT, 'awsChatBedrock') + } + } + + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData, options: ICommonObject): Promise> { + const indexId = nodeData.inputs?.indexId as string + const region = nodeData.inputs?.region as string + const docs = nodeData.inputs?.document as Document[] + const isFileUploadEnabled = nodeData.inputs?.fileUpload as boolean + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + let clientConfig: any = { region } + + if (credentialData && Object.keys(credentialData).length !== 0) { + const accessKeyId = getCredentialParam('awsKey', credentialData, nodeData) + const secretAccessKey = getCredentialParam('awsSecret', credentialData, nodeData) + const sessionToken = getCredentialParam('awsSession', credentialData, nodeData) + + if (accessKeyId && secretAccessKey) { + clientConfig.credentials = { + accessKeyId, + secretAccessKey, + ...(sessionToken && { sessionToken }) + } + } + } + + const client = new KendraClient(clientConfig) + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + const kendraDocuments = [] + + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + if (isFileUploadEnabled && options.chatId) { + flattenDocs[i].metadata = { ...flattenDocs[i].metadata, [FLOWISE_CHATID]: options.chatId } + } + finalDocs.push(new Document(flattenDocs[i])) + + // Prepare document for Kendra + const docId = `doc_${Date.now()}_${i}` + const docTitle = flattenDocs[i].metadata?.title || flattenDocs[i].metadata?.source || `Document ${i + 1}` + + kendraDocuments.push({ + Id: docId, + Title: docTitle, + Blob: new Uint8Array(Buffer.from(flattenDocs[i].pageContent, 'utf-8')), + ContentType: 'PLAIN_TEXT' as any + }) + } + } + + try { + if (kendraDocuments.length > 0) { + // Kendra has a limit of 10 documents per batch + const batchSize = 10 + for (let i = 0; i < kendraDocuments.length; i += batchSize) { + const batch = kendraDocuments.slice(i, i + batchSize) + const command = new BatchPutDocumentCommand({ + IndexId: indexId, + Documents: batch + }) + + const response = await client.send(command) + + if (response.FailedDocuments && response.FailedDocuments.length > 0) { + console.error('Failed documents:', response.FailedDocuments) + throw new Error(`Failed to index some documents: ${JSON.stringify(response.FailedDocuments)}`) + } + } + } + + return { numAdded: finalDocs.length, addedDocs: finalDocs } + } catch (error) { + throw new Error(`Failed to index documents to Kendra: ${error}`) + } + }, + + async delete(nodeData: INodeData, ids: string[], options: ICommonObject): Promise { + const indexId = nodeData.inputs?.indexId as string + const region = nodeData.inputs?.region as string + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + let clientConfig: any = { region } + + if (credentialData && Object.keys(credentialData).length !== 0) { + const accessKeyId = getCredentialParam('awsKey', credentialData, nodeData) + const secretAccessKey = getCredentialParam('awsSecret', credentialData, nodeData) + const sessionToken = getCredentialParam('awsSession', credentialData, nodeData) + + if (accessKeyId && secretAccessKey) { + clientConfig.credentials = { + accessKeyId, + secretAccessKey, + ...(sessionToken && { sessionToken }) + } + } + } + + const client = new KendraClient(clientConfig) + + try { + // Kendra has a limit of 10 documents per batch delete + const batchSize = 10 + for (let i = 0; i < ids.length; i += batchSize) { + const batch = ids.slice(i, i + batchSize) + const command = new BatchDeleteDocumentCommand({ + IndexId: indexId, + DocumentIdList: batch + }) + await client.send(command) + } + } catch (error) { + throw new Error(`Failed to delete documents from Kendra: ${error}`) + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const indexId = nodeData.inputs?.indexId as string + const region = nodeData.inputs?.region as string + const topK = nodeData.inputs?.topK as string + const attributeFilter = nodeData.inputs?.attributeFilter + const isFileUploadEnabled = nodeData.inputs?.fileUpload as boolean + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + let clientOptions: any = {} + + if (credentialData && Object.keys(credentialData).length !== 0) { + clientOptions.credentials = { + accessKeyId: getCredentialParam('awsKey', credentialData, nodeData), + secretAccessKey: getCredentialParam('awsSecret', credentialData, nodeData), + sessionToken: getCredentialParam('awsSession', credentialData, nodeData) + } + } + + let filter = undefined + if (attributeFilter) { + filter = typeof attributeFilter === 'object' ? attributeFilter : JSON.parse(attributeFilter) + } + + // Add chat-specific filtering if file upload is enabled + if (isFileUploadEnabled && options.chatId) { + if (!filter) { + filter = {} + } + filter.OrAllFilters = [ + ...(filter.OrAllFilters || []), + { + EqualsTo: { + Key: FLOWISE_CHATID, + Value: { + StringValue: options.chatId + } + } + } + ] + } + + const retriever = new AmazonKendraRetriever({ + topK: topK ? parseInt(topK) : 10, + indexId, + region, + attributeFilter: filter, + clientOptions + }) + + const output = nodeData.outputs?.output as string + + if (output === 'retriever') { + return retriever + } else if (output === 'vectorStore') { + // Kendra doesn't have a traditional vector store interface, + // but we can return the retriever with additional properties + ;(retriever as any).k = topK ? parseInt(topK) : 10 + ;(retriever as any).filter = filter + return retriever + } + } +} + +module.exports = { nodeClass: Kendra_VectorStores } diff --git a/packages/components/nodes/vectorstores/Kendra/kendra.svg b/packages/components/nodes/vectorstores/Kendra/kendra.svg new file mode 100644 index 00000000000..89f101bd5b2 --- /dev/null +++ b/packages/components/nodes/vectorstores/Kendra/kendra.svg @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kendra + + \ No newline at end of file diff --git a/packages/components/nodes/vectorstores/Meilisearch/Meilisearch.ts b/packages/components/nodes/vectorstores/Meilisearch/Meilisearch.ts index eed428cdbcc..7be405c6724 100644 --- a/packages/components/nodes/vectorstores/Meilisearch/Meilisearch.ts +++ b/packages/components/nodes/vectorstores/Meilisearch/Meilisearch.ts @@ -89,7 +89,8 @@ class MeilisearchRetriever_node implements INode { type: 'string', description: 'search filter to apply on searchable attributes', additionalParams: true, - optional: true + optional: true, + acceptVariable: true } ] this.outputs = [ diff --git a/packages/components/nodes/vectorstores/Milvus/Milvus.ts b/packages/components/nodes/vectorstores/Milvus/Milvus.ts index ac2e4bd2fef..527d57bf5d2 100644 --- a/packages/components/nodes/vectorstores/Milvus/Milvus.ts +++ b/packages/components/nodes/vectorstores/Milvus/Milvus.ts @@ -100,7 +100,8 @@ class Milvus_VectorStores implements INode { description: 'Filter data with a simple string query. Refer Milvus docs for more details.', placeholder: 'doc=="a"', - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Top K', diff --git a/packages/components/nodes/vectorstores/MongoDBAtlas/MongoDBAtlas.ts b/packages/components/nodes/vectorstores/MongoDBAtlas/MongoDBAtlas.ts index 785c64484f6..8fb6efc749c 100644 --- a/packages/components/nodes/vectorstores/MongoDBAtlas/MongoDBAtlas.ts +++ b/packages/components/nodes/vectorstores/MongoDBAtlas/MongoDBAtlas.ts @@ -90,7 +90,8 @@ class MongoDBAtlas_VectorStores implements INode { name: 'mongoMetadataFilter', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Top K', diff --git a/packages/components/nodes/vectorstores/Pinecone/Pinecone.ts b/packages/components/nodes/vectorstores/Pinecone/Pinecone.ts index c9ff257fb18..972f8b7125c 100644 --- a/packages/components/nodes/vectorstores/Pinecone/Pinecone.ts +++ b/packages/components/nodes/vectorstores/Pinecone/Pinecone.ts @@ -97,7 +97,8 @@ class Pinecone_VectorStores implements INode { name: 'pineconeMetadataFilter', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Top K', diff --git a/packages/components/nodes/vectorstores/Postgres/Postgres.ts b/packages/components/nodes/vectorstores/Postgres/Postgres.ts index ad0f82bb057..ce4a7ba22e0 100644 --- a/packages/components/nodes/vectorstores/Postgres/Postgres.ts +++ b/packages/components/nodes/vectorstores/Postgres/Postgres.ts @@ -194,7 +194,8 @@ class Postgres_VectorStores implements INode { name: 'pgMetadataFilter', type: 'json', additionalParams: true, - optional: true + optional: true, + acceptVariable: true }, { label: 'Content Column Name', diff --git a/packages/components/nodes/vectorstores/Qdrant/Qdrant.ts b/packages/components/nodes/vectorstores/Qdrant/Qdrant.ts index 2bdbcf353e2..d6bd2157bb3 100644 --- a/packages/components/nodes/vectorstores/Qdrant/Qdrant.ts +++ b/packages/components/nodes/vectorstores/Qdrant/Qdrant.ts @@ -171,7 +171,8 @@ class Qdrant_VectorStores implements INode { description: 'Only return points which satisfy the conditions', type: 'json', additionalParams: true, - optional: true + optional: true, + acceptVariable: true } ] this.outputs = [ diff --git a/packages/components/nodes/vectorstores/Supabase/Supabase.ts b/packages/components/nodes/vectorstores/Supabase/Supabase.ts index 72f84f20238..9a088eb24dc 100644 --- a/packages/components/nodes/vectorstores/Supabase/Supabase.ts +++ b/packages/components/nodes/vectorstores/Supabase/Supabase.ts @@ -79,7 +79,8 @@ class Supabase_VectorStores implements INode { name: 'supabaseMetadataFilter', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Supabase RPC Filter', diff --git a/packages/components/nodes/vectorstores/Upstash/Upstash.ts b/packages/components/nodes/vectorstores/Upstash/Upstash.ts index d126daa869c..2886afcfa94 100644 --- a/packages/components/nodes/vectorstores/Upstash/Upstash.ts +++ b/packages/components/nodes/vectorstores/Upstash/Upstash.ts @@ -80,7 +80,8 @@ class Upstash_VectorStores implements INode { name: 'upstashMetadataFilter', type: 'string', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Top K', diff --git a/packages/components/nodes/vectorstores/Vectara/Vectara.ts b/packages/components/nodes/vectorstores/Vectara/Vectara.ts index 9514bdc8f85..ae752602f05 100644 --- a/packages/components/nodes/vectorstores/Vectara/Vectara.ts +++ b/packages/components/nodes/vectorstores/Vectara/Vectara.ts @@ -65,7 +65,8 @@ class Vectara_VectorStores implements INode { 'Filter to apply to Vectara metadata. Refer to the documentation on how to use Vectara filters with Flowise.', type: 'string', additionalParams: true, - optional: true + optional: true, + acceptVariable: true }, { label: 'Sentences Before', diff --git a/packages/components/nodes/vectorstores/Weaviate/Weaviate.ts b/packages/components/nodes/vectorstores/Weaviate/Weaviate.ts index 5d83eaa9a6a..05bfe23988c 100644 --- a/packages/components/nodes/vectorstores/Weaviate/Weaviate.ts +++ b/packages/components/nodes/vectorstores/Weaviate/Weaviate.ts @@ -120,7 +120,8 @@ class Weaviate_VectorStores implements INode { name: 'weaviateFilter', type: 'json', additionalParams: true, - optional: true + optional: true, + acceptVariable: true } ] addMMRInputParams(this.inputs) diff --git a/packages/components/nodes/vectorstores/Zep/Zep.ts b/packages/components/nodes/vectorstores/Zep/Zep.ts index 5caa8e515d4..8158ccc898b 100644 --- a/packages/components/nodes/vectorstores/Zep/Zep.ts +++ b/packages/components/nodes/vectorstores/Zep/Zep.ts @@ -69,7 +69,8 @@ class Zep_VectorStores implements INode { name: 'zepMetadataFilter', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Embedding Dimension', diff --git a/packages/components/nodes/vectorstores/ZepCloud/ZepCloud.ts b/packages/components/nodes/vectorstores/ZepCloud/ZepCloud.ts index c7e68f05f4d..862b496bdc5 100644 --- a/packages/components/nodes/vectorstores/ZepCloud/ZepCloud.ts +++ b/packages/components/nodes/vectorstores/ZepCloud/ZepCloud.ts @@ -59,7 +59,8 @@ class Zep_CloudVectorStores implements INode { name: 'zepMetadataFilter', type: 'json', optional: true, - additionalParams: true + additionalParams: true, + acceptVariable: true }, { label: 'Top K', diff --git a/packages/components/package.json b/packages/components/package.json index 85516b222d3..5eecc02c801 100644 --- a/packages/components/package.json +++ b/packages/components/package.json @@ -1,6 +1,6 @@ { "name": "flowise-components", - "version": "3.0.5", + "version": "3.0.6", "description": "Flowiseai Components", "main": "dist/src/index", "types": "dist/src/index.d.ts", @@ -8,7 +8,10 @@ "build": "tsc && gulp", "lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0", "clean": "rimraf dist", - "nuke": "rimraf dist node_modules .turbo" + "nuke": "rimraf dist node_modules .turbo", + "test": "jest", + "test:watch": "jest --watch", + "test:coverage": "jest --coverage" }, "keywords": [], "homepage": "https://flowiseai.com", @@ -22,6 +25,7 @@ "@arizeai/openinference-instrumentation-langchain": "^2.0.0", "@aws-sdk/client-bedrock-runtime": "3.422.0", "@aws-sdk/client-dynamodb": "^3.360.0", + "@aws-sdk/client-kendra": "^3.750.0", "@aws-sdk/client-s3": "^3.844.0", "@aws-sdk/client-secrets-manager": "^3.699.0", "@aws-sdk/client-sns": "^3.699.0", @@ -79,7 +83,7 @@ "@zilliz/milvus2-sdk-node": "^2.2.24", "apify-client": "^2.7.1", "assemblyai": "^4.2.2", - "axios": "1.7.9", + "axios": "1.12.0", "cheerio": "^1.0.0-rc.12", "chromadb": "^1.10.0", "cohere-ai": "^7.7.5", @@ -101,6 +105,7 @@ "ioredis": "^5.3.2", "ipaddr.js": "^2.2.0", "jsdom": "^22.1.0", + "json5": "2.2.3", "jsonpointer": "^5.0.1", "jsonrepair": "^3.11.1", "langchain": "^0.3.5", @@ -150,6 +155,7 @@ "@swc/core": "^1.3.99", "@types/crypto-js": "^4.1.1", "@types/gulp": "4.0.9", + "@types/jest": "^29.5.14", "@types/lodash": "^4.17.20", "@types/node-fetch": "2.6.2", "@types/object-hash": "^3.0.2", @@ -157,7 +163,9 @@ "@types/pg": "^8.10.2", "@types/ws": "^8.5.3", "gulp": "^4.0.2", + "jest": "^29.7.0", "rimraf": "^5.0.5", + "ts-jest": "^29.3.2", "tsc-watch": "^6.0.4", "tslib": "^2.6.2", "typescript": "^5.4.5" diff --git a/packages/components/src/agents.ts b/packages/components/src/agents.ts index 022a5be09f6..7b23f4788e8 100644 --- a/packages/components/src/agents.ts +++ b/packages/components/src/agents.ts @@ -30,6 +30,16 @@ export const SOURCE_DOCUMENTS_PREFIX = '\n\n----FLOWISE_SOURCE_DOCUMENTS----\n\n export const ARTIFACTS_PREFIX = '\n\n----FLOWISE_ARTIFACTS----\n\n' export const TOOL_ARGS_PREFIX = '\n\n----FLOWISE_TOOL_ARGS----\n\n' +/** + * Utility function to format tool error messages with parameters for debugging + * @param errorMessage - The base error message + * @param params - The parameters that were passed to the tool + * @returns Formatted error message with tool arguments appended + */ +export const formatToolError = (errorMessage: string, params: any): string => { + return errorMessage + TOOL_ARGS_PREFIX + JSON.stringify(params) +} + export type AgentFinish = { returnValues: Record log: string diff --git a/packages/components/src/awsToolsUtils.ts b/packages/components/src/awsToolsUtils.ts new file mode 100644 index 00000000000..46edafeff39 --- /dev/null +++ b/packages/components/src/awsToolsUtils.ts @@ -0,0 +1,65 @@ +import { ICommonObject, INodeData } from './Interface' +import { getCredentialData, getCredentialParam } from './utils' + +// AWS Regions constant +export const AWS_REGIONS = [ + { label: 'US East (N. Virginia) - us-east-1', name: 'us-east-1' }, + { label: 'US East (Ohio) - us-east-2', name: 'us-east-2' }, + { label: 'US West (N. California) - us-west-1', name: 'us-west-1' }, + { label: 'US West (Oregon) - us-west-2', name: 'us-west-2' }, + { label: 'Africa (Cape Town) - af-south-1', name: 'af-south-1' }, + { label: 'Asia Pacific (Hong Kong) - ap-east-1', name: 'ap-east-1' }, + { label: 'Asia Pacific (Mumbai) - ap-south-1', name: 'ap-south-1' }, + { label: 'Asia Pacific (Osaka) - ap-northeast-3', name: 'ap-northeast-3' }, + { label: 'Asia Pacific (Seoul) - ap-northeast-2', name: 'ap-northeast-2' }, + { label: 'Asia Pacific (Singapore) - ap-southeast-1', name: 'ap-southeast-1' }, + { label: 'Asia Pacific (Sydney) - ap-southeast-2', name: 'ap-southeast-2' }, + { label: 'Asia Pacific (Tokyo) - ap-northeast-1', name: 'ap-northeast-1' }, + { label: 'Canada (Central) - ca-central-1', name: 'ca-central-1' }, + { label: 'Europe (Frankfurt) - eu-central-1', name: 'eu-central-1' }, + { label: 'Europe (Ireland) - eu-west-1', name: 'eu-west-1' }, + { label: 'Europe (London) - eu-west-2', name: 'eu-west-2' }, + { label: 'Europe (Milan) - eu-south-1', name: 'eu-south-1' }, + { label: 'Europe (Paris) - eu-west-3', name: 'eu-west-3' }, + { label: 'Europe (Stockholm) - eu-north-1', name: 'eu-north-1' }, + { label: 'Middle East (Bahrain) - me-south-1', name: 'me-south-1' }, + { label: 'South America (São Paulo) - sa-east-1', name: 'sa-east-1' } +] + +export const DEFAULT_AWS_REGION = 'us-east-1' + +// AWS Credentials interface +export interface AWSCredentials { + accessKeyId: string + secretAccessKey: string + sessionToken?: string +} + +/** + * Get AWS credentials from node data + * @param {INodeData} nodeData - Node data containing credential information + * @param {ICommonObject} options - Options containing appDataSource and databaseEntities + * @returns {Promise} - AWS credentials object + */ +export async function getAWSCredentials(nodeData: INodeData, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + + const accessKeyId = getCredentialParam('awsKey', credentialData, nodeData) + const secretAccessKey = getCredentialParam('awsSecret', credentialData, nodeData) + const sessionToken = getCredentialParam('awsSession', credentialData, nodeData) + + if (!accessKeyId || !secretAccessKey) { + throw new Error('AWS Access Key ID and Secret Access Key are required') + } + + const credentials: AWSCredentials = { + accessKeyId, + secretAccessKey + } + + if (sessionToken) { + credentials.sessionToken = sessionToken + } + + return credentials +} diff --git a/packages/components/src/handler.test.ts b/packages/components/src/handler.test.ts new file mode 100644 index 00000000000..333b2cba818 --- /dev/null +++ b/packages/components/src/handler.test.ts @@ -0,0 +1,51 @@ +import { getPhoenixTracer } from './handler' + +jest.mock('@opentelemetry/exporter-trace-otlp-proto', () => { + return { + ProtoOTLPTraceExporter: jest.fn().mockImplementation((args) => { + return { args } + }) + } +}) + +import { OTLPTraceExporter as ProtoOTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto' + +describe('URL Handling For Phoenix Tracer', () => { + const apiKey = 'test-api-key' + const projectName = 'test-project-name' + + const makeOptions = (baseUrl: string) => ({ + baseUrl, + apiKey, + projectName, + enableCallback: false + }) + + beforeEach(() => { + jest.clearAllMocks() + }) + + const cases: [string, string][] = [ + ['http://localhost:6006', 'http://localhost:6006/v1/traces'], + ['http://localhost:6006/v1/traces', 'http://localhost:6006/v1/traces'], + ['https://app.phoenix.arize.com', 'https://app.phoenix.arize.com/v1/traces'], + ['https://app.phoenix.arize.com/v1/traces', 'https://app.phoenix.arize.com/v1/traces'], + ['https://app.phoenix.arize.com/s/my-space', 'https://app.phoenix.arize.com/s/my-space/v1/traces'], + ['https://app.phoenix.arize.com/s/my-space/v1/traces', 'https://app.phoenix.arize.com/s/my-space/v1/traces'], + ['https://my-phoenix.com/my-slug', 'https://my-phoenix.com/my-slug/v1/traces'], + ['https://my-phoenix.com/my-slug/v1/traces', 'https://my-phoenix.com/my-slug/v1/traces'] + ] + + it.each(cases)('baseUrl %s - exporterUrl %s', (input, expected) => { + getPhoenixTracer(makeOptions(input)) + expect(ProtoOTLPTraceExporter).toHaveBeenCalledWith( + expect.objectContaining({ + url: expected, + headers: expect.objectContaining({ + api_key: apiKey, + authorization: `Bearer ${apiKey}` + }) + }) + ) + }) +}) diff --git a/packages/components/src/handler.ts b/packages/components/src/handler.ts index 1a8830232ee..bed8c41f06d 100644 --- a/packages/components/src/handler.ts +++ b/packages/components/src/handler.ts @@ -1,4 +1,5 @@ import { Logger } from 'winston' +import { URL } from 'url' import { v4 as uuidv4 } from 'uuid' import { Client } from 'langsmith' import CallbackHandler from 'langfuse-langchain' @@ -91,14 +92,27 @@ interface PhoenixTracerOptions { enableCallback?: boolean } -function getPhoenixTracer(options: PhoenixTracerOptions): Tracer | undefined { +export function getPhoenixTracer(options: PhoenixTracerOptions): Tracer | undefined { const SEMRESATTRS_PROJECT_NAME = 'openinference.project.name' try { + const parsedURL = new URL(options.baseUrl) + const baseEndpoint = `${parsedURL.protocol}//${parsedURL.host}` + + // Remove trailing slashes + let path = parsedURL.pathname.replace(/\/$/, '') + + // Remove any existing /v1/traces suffix + path = path.replace(/\/v1\/traces$/, '') + + const exporterUrl = `${baseEndpoint}${path}/v1/traces` + const exporterHeaders = { + api_key: options.apiKey || '', + authorization: `Bearer ${options.apiKey || ''}` + } + const traceExporter = new ProtoOTLPTraceExporter({ - url: `${options.baseUrl}/v1/traces`, - headers: { - api_key: options.apiKey - } + url: exporterUrl, + headers: exporterHeaders }) const tracerProvider = new NodeTracerProvider({ resource: new Resource({ @@ -591,6 +605,15 @@ export const additionalCallbacks = async (nodeData: INodeData, options: ICommonO }) const trace = langwatch.getTrace() + + if (nodeData?.inputs?.analytics?.langWatch) { + trace.update({ + metadata: { + ...nodeData?.inputs?.analytics?.langWatch + } + }) + } + callbacks.push(trace.getLangChainCallback()) } else if (provider === 'arize') { const arizeApiKey = getCredentialParam('arizeApiKey', credentialData, nodeData) diff --git a/packages/components/src/index.ts b/packages/components/src/index.ts index f2dc564042d..6b9f4cced45 100644 --- a/packages/components/src/index.ts +++ b/packages/components/src/index.ts @@ -13,3 +13,4 @@ export * from '../evaluation/EvaluationRunner' export * from './followUpPrompts' export * from './validator' export * from './agentflowv2Generator' +export * from './httpSecurity' diff --git a/packages/components/src/secureZodParser.ts b/packages/components/src/secureZodParser.ts index 999359d18bd..bd6afd0dfc7 100644 --- a/packages/components/src/secureZodParser.ts +++ b/packages/components/src/secureZodParser.ts @@ -135,6 +135,28 @@ export class SecureZodSchemaParser { } private static parseZodType(typeStr: string): any { + // Check if this is a nested object (not in an array) + if (typeStr.startsWith('z.object(') && !typeStr.startsWith('z.array(')) { + // Extract object content + const objectMatch = typeStr.match(/z\.object\(\s*\{([\s\S]*)\}\s*\)/) + if (!objectMatch) { + throw new Error('Invalid object syntax') + } + + const objectContent = objectMatch[1] + const objectProperties = this.parseObjectProperties(objectContent) + + return { + isNestedObject: true, + objectSchema: objectProperties + } + } + + // Check if this is any kind of array + if (typeStr.startsWith('z.array(')) { + return this.parseArray(typeStr) + } + const type: { base: string; modifiers: any[]; baseArgs?: any[] } = { base: '', modifiers: [] } // Handle chained methods like z.string().max(500).optional() @@ -181,6 +203,74 @@ export class SecureZodSchemaParser { return type } + private static parseArray(typeStr: string): any { + // Extract the content inside array() + const arrayContentMatch = typeStr.match(/z\.array\(\s*([\s\S]*)\s*\)$/) + if (!arrayContentMatch) { + throw new Error('Invalid array syntax') + } + + const arrayContent = arrayContentMatch[1].trim() + + // Parse the object inside the array + if (arrayContent.startsWith('z.object(')) { + // Extract object content + const objectMatch = arrayContent.match(/z\.object\(\s*\{([\s\S]*)\}\s*\)/) + if (!objectMatch) { + throw new Error('Invalid object syntax inside array') + } + + const objectContent = objectMatch[1] + const objectProperties = this.parseObjectProperties(objectContent) + + // Validate each property in the nested object + for (const propValue of Object.values(objectProperties)) { + this.validateTypeInfo(propValue) + } + + return { + isArrayOfObjects: true, + objectSchema: objectProperties + } + } + + // Handle simple arrays (e.g., z.array(z.string())) + const innerType = this.parseZodType(arrayContent) + + return { + isSimpleArray: true, + innerType: innerType + } + } + + private static validateTypeInfo(typeInfo: any): void { + // If it's a nested object or array of objects, validate each property + if (typeInfo.isNestedObject || typeInfo.isArrayOfObjects) { + for (const propValue of Object.values(typeInfo.objectSchema)) { + this.validateTypeInfo(propValue) + } + return + } + + // If it's a simple array, validate the inner type + if (typeInfo.isSimpleArray) { + this.validateTypeInfo(typeInfo.innerType) + return + } + + // Validate base type + if (!this.ALLOWED_TYPES.includes(typeInfo.base)) { + throw new Error(`Unsupported type: ${typeInfo.base}`) + } + + // Validate modifiers + for (const modifier of typeInfo.modifiers || []) { + if (!this.ALLOWED_TYPES.includes(modifier.name)) { + throw new Error(`Unsupported modifier: ${modifier.name}`) + } + } + } + private static parseArguments(argsStr: string): any[] { // Remove outer parentheses const inner = argsStr.slice(1, -1).trim() @@ -250,6 +340,23 @@ export class SecureZodSchemaParser { } private static buildZodType(typeInfo: any): z.ZodTypeAny { + // Special case for nested objects + if (typeInfo.isNestedObject) { + return this.buildZodSchema(typeInfo.objectSchema) + } + + // Special case for array of objects + if (typeInfo.isArrayOfObjects) { + const objectSchema = this.buildZodSchema(typeInfo.objectSchema) + return z.array(objectSchema) + } + + // Special case for simple arrays + if (typeInfo.isSimpleArray) { + const innerZodType = this.buildZodType(typeInfo.innerType) + return z.array(innerZodType) + } + let zodType: z.ZodTypeAny // Build base type diff --git a/packages/components/src/storageUtils.ts b/packages/components/src/storageUtils.ts index 954609a2801..ff48bb0569e 100644 --- a/packages/components/src/storageUtils.ts +++ b/packages/components/src/storageUtils.ts @@ -753,8 +753,8 @@ export const streamStorageFile = async ( } // Check for path traversal attempts - if (isPathTraversal(chatflowId)) { - throw new Error('Invalid path characters detected in chatflowId') + if (isPathTraversal(chatflowId) || isPathTraversal(chatId)) { + throw new Error('Invalid path characters detected in chatflowId or chatId') } const storageType = getStorageType() @@ -1036,15 +1036,12 @@ export const getGcsClient = () => { const projectId = process.env.GOOGLE_CLOUD_STORAGE_PROJ_ID const bucketName = process.env.GOOGLE_CLOUD_STORAGE_BUCKET_NAME - if (!pathToGcsCredential) { - throw new Error('GOOGLE_CLOUD_STORAGE_CREDENTIAL env variable is required') - } if (!bucketName) { throw new Error('GOOGLE_CLOUD_STORAGE_BUCKET_NAME env variable is required') } const storageConfig = { - keyFilename: pathToGcsCredential, + ...(pathToGcsCredential ? { keyFilename: pathToGcsCredential } : {}), ...(projectId ? { projectId } : {}) } diff --git a/packages/components/src/utils.ts b/packages/components/src/utils.ts index 940961ac758..7b0c4a25d5f 100644 --- a/packages/components/src/utils.ts +++ b/packages/components/src/utils.ts @@ -8,7 +8,7 @@ import TurndownService from 'turndown' import { DataSource, Equal } from 'typeorm' import { ICommonObject, IDatabaseEntity, IFileUpload, IMessage, INodeData, IVariable, MessageContentImageUrl } from './Interface' import { AES, enc } from 'crypto-js' -import { omit } from 'lodash' +import { omit, get } from 'lodash' import { AIMessage, HumanMessage, BaseMessage } from '@langchain/core/messages' import { Document } from '@langchain/core/documents' import { getFileFromStorage } from './storageUtils' @@ -18,6 +18,8 @@ import { TextSplitter } from 'langchain/text_splitter' import { DocumentLoader } from 'langchain/document_loaders/base' import { NodeVM } from '@flowiseai/nodevm' import { Sandbox } from '@e2b/code-interpreter' +import { secureFetch, checkDenyList } from './httpSecurity' +import JSON5 from 'json5' export const numberOrExpressionRegex = '^(\\d+\\.?\\d*|{{.*}})$' //return true if string consists only numbers OR expression {{}} export const notEmptyRegex = '(.|\\s)*\\S(.|\\s)*' //return true if string is not empty or blank @@ -421,7 +423,7 @@ async function crawl(baseURL: string, currentURL: string, pages: string[], limit if (process.env.DEBUG === 'true') console.info(`actively crawling ${currentURL}`) try { - const resp = await fetch(currentURL) + const resp = await secureFetch(currentURL) if (resp.status > 399) { if (process.env.DEBUG === 'true') console.error(`error in fetch with status code: ${resp.status}, on page: ${currentURL}`) @@ -452,6 +454,8 @@ async function crawl(baseURL: string, currentURL: string, pages: string[], limit * @returns {Promise} */ export async function webCrawl(stringURL: string, limit: number): Promise { + await checkDenyList(stringURL) + const URLObj = new URL(stringURL) const modifyURL = stringURL.slice(-1) === '/' ? stringURL.slice(0, -1) : stringURL return await crawl(URLObj.protocol + '//' + URLObj.hostname, modifyURL, [], limit) @@ -475,7 +479,7 @@ export async function xmlScrape(currentURL: string, limit: number): Promise 399) { if (process.env.DEBUG === 'true') console.error(`error in fetch with status code: ${resp.status}, on page: ${currentURL}`) @@ -1382,6 +1386,39 @@ const convertRequireToImport = (requireLine: string): string | null => { return null } +/** + * Parse output if it's a stringified JSON or array + * @param {any} output - The output to parse + * @returns {any} - The parsed output or original output if not parseable + */ +const parseOutput = (output: any): any => { + // If output is not a string, return as-is + if (typeof output !== 'string') { + return output + } + + // Trim whitespace + const trimmedOutput = output.trim() + + // Check if it's an empty string + if (!trimmedOutput) { + return output + } + + // Check if it looks like JSON (starts with { or [) + if ((trimmedOutput.startsWith('{') && trimmedOutput.endsWith('}')) || (trimmedOutput.startsWith('[') && trimmedOutput.endsWith(']'))) { + try { + const parsedOutput = JSON5.parse(trimmedOutput) + return parsedOutput + } catch (e) { + return output + } + } + + // Return the original string if it doesn't look like JSON + return output +} + /** * Execute JavaScript code using either Sandbox or NodeVM * @param {string} code - The JavaScript code to execute @@ -1400,7 +1437,7 @@ export const executeJavaScriptCode = async ( nodeVMOptions?: ICommonObject } = {} ): Promise => { - const { timeout = 10000, useSandbox = true, streamOutput, libraries = [], nodeVMOptions = {} } = options + const { timeout = 300000, useSandbox = true, streamOutput, libraries = [], nodeVMOptions = {} } = options const shouldUseSandbox = useSandbox && process.env.E2B_APIKEY if (shouldUseSandbox) { @@ -1458,7 +1495,7 @@ export const executeJavaScriptCode = async ( } } - const sbx = await Sandbox.create({ apiKey: process.env.E2B_APIKEY }) + const sbx = await Sandbox.create({ apiKey: process.env.E2B_APIKEY, timeoutMs: timeout }) // Install libraries for (const library of libraries) { @@ -1497,7 +1534,7 @@ export const executeJavaScriptCode = async ( // Clean up sandbox sbx.kill() - return output + return parseOutput(output) } catch (e) { throw new Error(`Sandbox Execution Error: ${e}`) } @@ -1529,16 +1566,17 @@ export const executeJavaScriptCode = async ( const response = await vm.run(`module.exports = async function() {${code}}()`, __dirname) let finalOutput = response - if (typeof response === 'object') { - finalOutput = JSON.stringify(response, null, 2) - } // Stream output if streaming function provided if (streamOutput && finalOutput) { - streamOutput(finalOutput) + let streamOutputString = finalOutput + if (typeof response === 'object') { + streamOutputString = JSON.stringify(finalOutput, null, 2) + } + streamOutput(streamOutputString) } - return finalOutput + return parseOutput(finalOutput) } catch (e) { throw new Error(`NodeVM Execution Error: ${e}`) } @@ -1574,3 +1612,50 @@ export const createCodeExecutionSandbox = ( return sandbox } + +/** + * Process template variables in state object, replacing {{ output }} and {{ output.property }} patterns + * @param {ICommonObject} state - The state object to process + * @param {any} finalOutput - The output value to substitute + * @returns {ICommonObject} - The processed state object + */ +export const processTemplateVariables = (state: ICommonObject, finalOutput: any): ICommonObject => { + if (!state || Object.keys(state).length === 0) { + return state + } + + const newState = { ...state } + + for (const key in newState) { + const stateValue = newState[key].toString() + if (stateValue.includes('{{ output') || stateValue.includes('{{output')) { + // Handle simple output replacement (with or without spaces) + if (stateValue === '{{ output }}' || stateValue === '{{output}}') { + newState[key] = finalOutput + continue + } + + // Handle JSON path expressions like {{ output.updated }} or {{output.updated}} + // eslint-disable-next-line + const match = stateValue.match(/\{\{\s*output\.([\w\.]+)\s*\}\}/) + if (match) { + try { + // Parse the response if it's JSON + const jsonResponse = typeof finalOutput === 'string' ? JSON.parse(finalOutput) : finalOutput + // Get the value using lodash get + const path = match[1] + const value = get(jsonResponse, path) + newState[key] = value ?? stateValue // Fall back to original if path not found + } catch (e) { + // If JSON parsing fails, keep original template + newState[key] = stateValue + } + } else { + // Handle simple {{ output }} replacement for backward compatibility + newState[key] = newState[key].replaceAll('{{ output }}', finalOutput) + } + } + } + + return newState +} diff --git a/packages/components/tsconfig.json b/packages/components/tsconfig.json index bda815f9abc..edac0ceea87 100644 --- a/packages/components/tsconfig.json +++ b/packages/components/tsconfig.json @@ -17,5 +17,5 @@ "module": "commonjs" }, "include": ["src", "nodes", "credentials"], - "exclude": ["gulpfile.ts", "node_modules", "dist"] + "exclude": ["gulpfile.ts", "node_modules", "dist", "**/*.test.ts", "**/*.test.js", "**/*.spec.ts", "**/*.spec.js"] } diff --git a/packages/server/package.json b/packages/server/package.json index 3262b605a04..9eb122e414f 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,6 +1,6 @@ { "name": "flowise", - "version": "3.0.5", + "version": "3.0.6", "description": "Flowiseai Server", "main": "dist/index", "types": "dist/index.d.ts", @@ -87,7 +87,7 @@ "@types/passport-local": "^1.0.38", "@types/uuid": "^9.0.7", "async-mutex": "^0.4.0", - "axios": "1.7.9", + "axios": "1.12.0", "bcryptjs": "^2.4.3", "bullmq": "5.45.2", "cache-manager": "^6.3.2", diff --git a/packages/server/src/Interface.DocumentStore.ts b/packages/server/src/Interface.DocumentStore.ts index 7eda1cc1a96..a7f84f92e98 100644 --- a/packages/server/src/Interface.DocumentStore.ts +++ b/packages/server/src/Interface.DocumentStore.ts @@ -81,6 +81,7 @@ export interface IDocumentStoreUpsertData { replaceExisting?: boolean createNewDocStore?: boolean docStore?: IDocumentStore + loaderName?: string loader?: { name: string config: ICommonObject diff --git a/packages/server/src/Interface.ts b/packages/server/src/Interface.ts index 97f66d10920..57dacd73ea2 100644 --- a/packages/server/src/Interface.ts +++ b/packages/server/src/Interface.ts @@ -370,6 +370,7 @@ export interface ICustomTemplate { export interface IFlowConfig { chatflowid: string + chatflowId: string chatId: string sessionId: string chatHistory: IMessage[] @@ -395,6 +396,7 @@ export interface IExecuteFlowParams extends IPredictionQueueAppServer { orgId: string workspaceId: string subscriptionId: string + productId: string baseURL: string isInternal: boolean isEvaluation?: boolean diff --git a/packages/server/src/controllers/executions/index.ts b/packages/server/src/controllers/executions/index.ts index 7e3d80ae5da..074b0efa9a9 100644 --- a/packages/server/src/controllers/executions/index.ts +++ b/packages/server/src/controllers/executions/index.ts @@ -47,6 +47,7 @@ const getAllExecutions = async (req: Request, res: Response, next: NextFunction) // Flow and session filters if (req.query.agentflowId) filters.agentflowId = req.query.agentflowId as string + if (req.query.agentflowName) filters.agentflowName = req.query.agentflowName as string if (req.query.sessionId) filters.sessionId = req.query.sessionId as string // State filter diff --git a/packages/server/src/database/entities/ChatFlow.ts b/packages/server/src/database/entities/ChatFlow.ts index 4c14e99c1c4..7d047ba4a9a 100644 --- a/packages/server/src/database/entities/ChatFlow.ts +++ b/packages/server/src/database/entities/ChatFlow.ts @@ -2,6 +2,13 @@ import { Entity, Column, CreateDateColumn, UpdateDateColumn, PrimaryGeneratedColumn } from 'typeorm' import { ChatflowType, IChatFlow } from '../../Interface' +export enum EnumChatflowType { + CHATFLOW = 'CHATFLOW', + AGENTFLOW = 'AGENTFLOW', + MULTIAGENT = 'MULTIAGENT', + ASSISTANT = 'ASSISTANT' +} + @Entity() export class ChatFlow implements IChatFlow { @PrimaryGeneratedColumn('uuid') @@ -40,7 +47,7 @@ export class ChatFlow implements IChatFlow { @Column({ nullable: true, type: 'text' }) category?: string - @Column({ nullable: true, type: 'text' }) + @Column({ type: 'varchar', length: 20, default: EnumChatflowType.CHATFLOW }) type?: ChatflowType @Column({ type: 'timestamp' }) diff --git a/packages/server/src/database/migrations/mariadb/1755066758601-ModifyChatflowType.ts b/packages/server/src/database/migrations/mariadb/1755066758601-ModifyChatflowType.ts new file mode 100644 index 00000000000..a1ff962c491 --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1755066758601-ModifyChatflowType.ts @@ -0,0 +1,15 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { EnumChatflowType } from '../../entities/ChatFlow' + +export class ModifyChatflowType1755066758601 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + UPDATE \`chat_flow\` SET \`type\` = '${EnumChatflowType.CHATFLOW}' WHERE \`type\` IS NULL OR \`type\` = ''; + `) + await queryRunner.query(` + ALTER TABLE \`chat_flow\` MODIFY COLUMN \`type\` VARCHAR(20) NOT NULL DEFAULT '${EnumChatflowType.CHATFLOW}'; + `) + } + + public async down(): Promise {} +} diff --git a/packages/server/src/database/migrations/mariadb/1755748356008-AddChatFlowNameIndex.ts b/packages/server/src/database/migrations/mariadb/1755748356008-AddChatFlowNameIndex.ts new file mode 100644 index 00000000000..62c74f167d5 --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1755748356008-AddChatFlowNameIndex.ts @@ -0,0 +1,13 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddChatFlowNameIndex1755748356008 implements MigrationInterface { + name = 'AddChatFlowNameIndex1755748356008' + + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`CREATE INDEX \`IDX_chatflow_name\` ON \`chat_flow\` (\`name\`)`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP INDEX \`IDX_chatflow_name\` ON \`chat_flow\``) + } +} diff --git a/packages/server/src/database/migrations/mariadb/index.ts b/packages/server/src/database/migrations/mariadb/index.ts index 272a6bb1ff2..55cfb420027 100644 --- a/packages/server/src/database/migrations/mariadb/index.ts +++ b/packages/server/src/database/migrations/mariadb/index.ts @@ -36,6 +36,8 @@ import { AddExecutionEntity1738090872625 } from './1738090872625-AddExecutionEnt import { FixOpenSourceAssistantTable1743758056188 } from './1743758056188-FixOpenSourceAssistantTable' import { AddErrorToEvaluationRun1744964560174 } from './1744964560174-AddErrorToEvaluationRun' import { ModifyExecutionDataColumnType1747902489801 } from './1747902489801-ModifyExecutionDataColumnType' +import { ModifyChatflowType1755066758601 } from './1755066758601-ModifyChatflowType' +import { AddChatFlowNameIndex1755748356008 } from './1755748356008-AddChatFlowNameIndex' import { AddAuthTables1720230151482 } from '../../../enterprise/database/migrations/mariadb/1720230151482-AddAuthTables' import { AddWorkspace1725437498242 } from '../../../enterprise/database/migrations/mariadb/1725437498242-AddWorkspace' @@ -98,5 +100,7 @@ export const mariadbMigrations = [ FixOpenSourceAssistantTable1743758056188, AddErrorToEvaluationRun1744964560174, ExecutionLinkWorkspaceId1746862866554, - ModifyExecutionDataColumnType1747902489801 + ModifyExecutionDataColumnType1747902489801, + ModifyChatflowType1755066758601, + AddChatFlowNameIndex1755748356008 ] diff --git a/packages/server/src/database/migrations/mysql/1755066758601-ModifyChatflowType.ts b/packages/server/src/database/migrations/mysql/1755066758601-ModifyChatflowType.ts new file mode 100644 index 00000000000..a1ff962c491 --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1755066758601-ModifyChatflowType.ts @@ -0,0 +1,15 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { EnumChatflowType } from '../../entities/ChatFlow' + +export class ModifyChatflowType1755066758601 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + UPDATE \`chat_flow\` SET \`type\` = '${EnumChatflowType.CHATFLOW}' WHERE \`type\` IS NULL OR \`type\` = ''; + `) + await queryRunner.query(` + ALTER TABLE \`chat_flow\` MODIFY COLUMN \`type\` VARCHAR(20) NOT NULL DEFAULT '${EnumChatflowType.CHATFLOW}'; + `) + } + + public async down(): Promise {} +} diff --git a/packages/server/src/database/migrations/mysql/1755748356008-AddChatFlowNameIndex.ts b/packages/server/src/database/migrations/mysql/1755748356008-AddChatFlowNameIndex.ts new file mode 100644 index 00000000000..62c74f167d5 --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1755748356008-AddChatFlowNameIndex.ts @@ -0,0 +1,13 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddChatFlowNameIndex1755748356008 implements MigrationInterface { + name = 'AddChatFlowNameIndex1755748356008' + + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`CREATE INDEX \`IDX_chatflow_name\` ON \`chat_flow\` (\`name\`)`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP INDEX \`IDX_chatflow_name\` ON \`chat_flow\``) + } +} diff --git a/packages/server/src/database/migrations/mysql/index.ts b/packages/server/src/database/migrations/mysql/index.ts index c51ebb8a945..8fb9804b42b 100644 --- a/packages/server/src/database/migrations/mysql/index.ts +++ b/packages/server/src/database/migrations/mysql/index.ts @@ -37,6 +37,8 @@ import { FixOpenSourceAssistantTable1743758056188 } from './1743758056188-FixOpe import { AddErrorToEvaluationRun1744964560174 } from './1744964560174-AddErrorToEvaluationRun' import { FixErrorsColumnInEvaluationRun1746437114935 } from './1746437114935-FixErrorsColumnInEvaluationRun' import { ModifyExecutionDataColumnType1747902489801 } from './1747902489801-ModifyExecutionDataColumnType' +import { ModifyChatflowType1755066758601 } from './1755066758601-ModifyChatflowType' +import { AddChatFlowNameIndex1755748356008 } from './1755748356008-AddChatFlowNameIndex' import { AddAuthTables1720230151482 } from '../../../enterprise/database/migrations/mysql/1720230151482-AddAuthTables' import { AddWorkspace1720230151484 } from '../../../enterprise/database/migrations/mysql/1720230151484-AddWorkspace' @@ -100,5 +102,7 @@ export const mysqlMigrations = [ AddErrorToEvaluationRun1744964560174, FixErrorsColumnInEvaluationRun1746437114935, ExecutionLinkWorkspaceId1746862866554, - ModifyExecutionDataColumnType1747902489801 + ModifyExecutionDataColumnType1747902489801, + ModifyChatflowType1755066758601, + AddChatFlowNameIndex1755748356008 ] diff --git a/packages/server/src/database/migrations/postgres/1755066758601-ModifyChatflowType.ts b/packages/server/src/database/migrations/postgres/1755066758601-ModifyChatflowType.ts new file mode 100644 index 00000000000..02c2c125066 --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1755066758601-ModifyChatflowType.ts @@ -0,0 +1,21 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { EnumChatflowType } from '../../entities/ChatFlow' + +export class ModifyChatflowType1755066758601 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + UPDATE "chat_flow" SET "type" = '${EnumChatflowType.CHATFLOW}' WHERE "type" IS NULL OR "type" = ''; + `) + await queryRunner.query(` + ALTER TABLE "chat_flow" ALTER COLUMN "type" SET DEFAULT '${EnumChatflowType.CHATFLOW}'; + `) + await queryRunner.query(` + ALTER TABLE "chat_flow" ALTER COLUMN "type" TYPE VARCHAR(20); + `) + await queryRunner.query(` + ALTER TABLE "chat_flow" ALTER COLUMN "type" SET NOT NULL; + `) + } + + public async down(): Promise {} +} diff --git a/packages/server/src/database/migrations/postgres/1755748356008-AddChatFlowNameIndex.ts b/packages/server/src/database/migrations/postgres/1755748356008-AddChatFlowNameIndex.ts new file mode 100644 index 00000000000..75ebae63727 --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1755748356008-AddChatFlowNameIndex.ts @@ -0,0 +1,13 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddChatFlowNameIndex1755748356008 implements MigrationInterface { + name = 'AddChatFlowNameIndex1755748356008' + + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`CREATE INDEX "IDX_chatflow_name" ON "chat_flow" ("name")`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP INDEX "IDX_chatflow_name"`) + } +} diff --git a/packages/server/src/database/migrations/postgres/index.ts b/packages/server/src/database/migrations/postgres/index.ts index 4da17daa4ab..f254427d0bf 100644 --- a/packages/server/src/database/migrations/postgres/index.ts +++ b/packages/server/src/database/migrations/postgres/index.ts @@ -36,6 +36,8 @@ import { AddExecutionEntity1738090872625 } from './1738090872625-AddExecutionEnt import { FixOpenSourceAssistantTable1743758056188 } from './1743758056188-FixOpenSourceAssistantTable' import { AddErrorToEvaluationRun1744964560174 } from './1744964560174-AddErrorToEvaluationRun' import { ModifyExecutionSessionIdFieldType1748450230238 } from './1748450230238-ModifyExecutionSessionIdFieldType' +import { ModifyChatflowType1755066758601 } from './1755066758601-ModifyChatflowType' +import { AddChatFlowNameIndex1755748356008 } from './1755748356008-AddChatFlowNameIndex' import { AddAuthTables1720230151482 } from '../../../enterprise/database/migrations/postgres/1720230151482-AddAuthTables' import { AddWorkspace1720230151484 } from '../../../enterprise/database/migrations/postgres/1720230151484-AddWorkspace' @@ -98,5 +100,7 @@ export const postgresMigrations = [ FixOpenSourceAssistantTable1743758056188, AddErrorToEvaluationRun1744964560174, ExecutionLinkWorkspaceId1746862866554, - ModifyExecutionSessionIdFieldType1748450230238 + ModifyExecutionSessionIdFieldType1748450230238, + ModifyChatflowType1755066758601, + AddChatFlowNameIndex1755748356008 ] diff --git a/packages/server/src/database/migrations/sqlite/1755066758601-ModifyChatflowType.ts b/packages/server/src/database/migrations/sqlite/1755066758601-ModifyChatflowType.ts new file mode 100644 index 00000000000..9af5602e796 --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1755066758601-ModifyChatflowType.ts @@ -0,0 +1,40 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { EnumChatflowType } from '../../entities/ChatFlow' + +export class ModifyChatflowType1755066758601 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + CREATE TABLE "temp_chat_flow" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "flowData" text NOT NULL, + "deployed" boolean, + "isPublic" boolean, + "apikeyid" varchar, + "chatbotConfig" text, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "apiConfig" TEXT, + "analytic" TEXT, + "category" TEXT, + "speechToText" TEXT, + "type" VARCHAR(20) NOT NULL DEFAULT '${EnumChatflowType.CHATFLOW}', + "workspaceId" TEXT, + "followUpPrompts" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + await queryRunner.query(` + INSERT INTO "temp_chat_flow" ("id", "name", "flowData", "deployed", "isPublic", "apikeyid", "chatbotConfig", "createdDate", "updatedDate", "apiConfig", "analytic", "category", "speechToText", "type", "workspaceId", "followUpPrompts") + SELECT "id", "name", "flowData", "deployed", "isPublic", "apikeyid", "chatbotConfig", "createdDate", "updatedDate", "apiConfig", "analytic", "category", "speechToText", + CASE WHEN "type" IS NULL OR "type" = '' THEN '${EnumChatflowType.CHATFLOW}' ELSE "type" END, "workspaceId", "followUpPrompts" FROM "chat_flow"; + `) + + await queryRunner.query(`DROP TABLE "chat_flow";`) + + await queryRunner.query(`ALTER TABLE "temp_chat_flow" RENAME TO "chat_flow";`) + } + + public async down(): Promise {} +} diff --git a/packages/server/src/database/migrations/sqlite/1755748356008-AddChatFlowNameIndex.ts b/packages/server/src/database/migrations/sqlite/1755748356008-AddChatFlowNameIndex.ts new file mode 100644 index 00000000000..75ebae63727 --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1755748356008-AddChatFlowNameIndex.ts @@ -0,0 +1,13 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddChatFlowNameIndex1755748356008 implements MigrationInterface { + name = 'AddChatFlowNameIndex1755748356008' + + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`CREATE INDEX "IDX_chatflow_name" ON "chat_flow" ("name")`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP INDEX "IDX_chatflow_name"`) + } +} diff --git a/packages/server/src/database/migrations/sqlite/index.ts b/packages/server/src/database/migrations/sqlite/index.ts index 0b15e26938f..b611f7a3104 100644 --- a/packages/server/src/database/migrations/sqlite/index.ts +++ b/packages/server/src/database/migrations/sqlite/index.ts @@ -34,6 +34,8 @@ import { AddSeqNoToDatasetRow1733752119696 } from './1733752119696-AddSeqNoToDat import { AddExecutionEntity1738090872625 } from './1738090872625-AddExecutionEntity' import { FixOpenSourceAssistantTable1743758056188 } from './1743758056188-FixOpenSourceAssistantTable' import { AddErrorToEvaluationRun1744964560174 } from './1744964560174-AddErrorToEvaluationRun' +import { ModifyChatflowType1755066758601 } from './1755066758601-ModifyChatflowType' +import { AddChatFlowNameIndex1755748356008 } from './1755748356008-AddChatFlowNameIndex' import { AddAuthTables1720230151482 } from '../../../enterprise/database/migrations/sqlite/1720230151482-AddAuthTables' import { AddWorkspace1720230151484 } from '../../../enterprise/database/migrations/sqlite/1720230151484-AddWorkspace' @@ -94,5 +96,7 @@ export const sqliteMigrations = [ AddExecutionEntity1738090872625, FixOpenSourceAssistantTable1743758056188, AddErrorToEvaluationRun1744964560174, - ExecutionLinkWorkspaceId1746862866554 + ExecutionLinkWorkspaceId1746862866554, + ModifyChatflowType1755066758601, + AddChatFlowNameIndex1755748356008 ] diff --git a/packages/server/src/enterprise/services/account.service.ts b/packages/server/src/enterprise/services/account.service.ts index 5cc1107e699..a4cb46ad9d7 100644 --- a/packages/server/src/enterprise/services/account.service.ts +++ b/packages/server/src/enterprise/services/account.service.ts @@ -25,6 +25,7 @@ import { RoleErrorMessage, RoleService } from './role.service' import { UserErrorMessage, UserService } from './user.service' import { WorkspaceUserErrorMessage, WorkspaceUserService } from './workspace-user.service' import { WorkspaceErrorMessage, WorkspaceService } from './workspace.service' +import { sanitizeUser } from '../../utils/sanitize.util' type AccountDTO = { user: Partial @@ -175,7 +176,7 @@ export class AccountService { if (data.user.tempToken) { const user = await this.userService.readUserByToken(data.user.tempToken, queryRunner) if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) - if (user.email !== data.user.email) + if (user.email.toLowerCase() !== data.user.email?.toLowerCase()) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_EMAIL) const name = data.user.name if (data.user.credential) user.credential = this.userService.encryptUserCredential(data.user.credential) @@ -540,7 +541,7 @@ export class AccountService { await queryRunner.release() } - return data + return sanitizeUser(data.user) } public async resetPassword(data: AccountDTO) { @@ -582,7 +583,7 @@ export class AccountService { await queryRunner.release() } - return data + return sanitizeUser(data.user) } public async logout(user: LoggedInUser) { diff --git a/packages/server/src/enterprise/services/user.service.ts b/packages/server/src/enterprise/services/user.service.ts index 0f15b392448..4fe80a04d96 100644 --- a/packages/server/src/enterprise/services/user.service.ts +++ b/packages/server/src/enterprise/services/user.service.ts @@ -5,10 +5,11 @@ import { getRunningExpressApp } from '../../utils/getRunningExpressApp' import { Telemetry, TelemetryEventType } from '../../utils/telemetry' import { User, UserStatus } from '../database/entities/user.entity' import { isInvalidEmail, isInvalidName, isInvalidPassword, isInvalidUUID } from '../utils/validation.util' -import { DataSource, QueryRunner } from 'typeorm' +import { DataSource, ILike, QueryRunner } from 'typeorm' import { generateId } from '../../utils' import { GeneralErrorMessage } from '../../utils/constants' import { getHash } from '../utils/encryption.util' +import { sanitizeUser } from '../../utils/sanitize.util' export const enum UserErrorMessage { EXPIRED_TEMP_TOKEN = 'Expired Temporary Token', @@ -53,8 +54,9 @@ export class UserService { } public async readUserByEmail(email: string | undefined, queryRunner: QueryRunner) { + if (!email) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_EMAIL) this.validateUserEmail(email) - return await queryRunner.manager.findOneBy(User, { email }) + return await queryRunner.manager.findOneBy(User, { email: ILike(email) }) } public async readUserByToken(token: string | undefined, queryRunner: QueryRunner) { @@ -95,7 +97,18 @@ export class UserService { data.updatedBy = data.id } - return queryRunner.manager.create(User, data) + const userObj = queryRunner.manager.create(User, data) + + this.telemetry.sendTelemetry( + TelemetryEventType.USER_CREATED, + { + userId: userObj.id, + createdBy: userObj.createdBy + }, + userObj.id + ) + + return userObj } public async saveUser(data: Partial, queryRunner: QueryRunner) { @@ -118,15 +131,6 @@ export class UserService { await queryRunner.release() } - this.telemetry.sendTelemetry( - TelemetryEventType.USER_CREATED, - { - userId: newUser.id, - createdBy: newUser.createdBy - }, - newUser.id - ) - return newUser } @@ -174,6 +178,6 @@ export class UserService { if (queryRunner && !queryRunner.isReleased) await queryRunner.release() } - return updatedUser + return sanitizeUser(updatedUser) } } diff --git a/packages/server/src/enterprise/utils/validation.util.ts b/packages/server/src/enterprise/utils/validation.util.ts index 96015ee68d0..c1738fc464f 100644 --- a/packages/server/src/enterprise/utils/validation.util.ts +++ b/packages/server/src/enterprise/utils/validation.util.ts @@ -18,6 +18,16 @@ export function isInvalidDateTime(dateTime: unknown): boolean { } export function isInvalidPassword(password: unknown): boolean { + // Minimum Length: At least 8 characters + // Maximum Length: No more than 128 characters + // Lowercase Letter: Must contain at least one lowercase letter (a-z) + // Uppercase Letter: Must contain at least one uppercase letter (A-Z) + // Digit: Must contain at least one number (0-9) + // Special Character: Must contain at least one special character (anything that's not a letter or number) + if (!password || typeof password !== 'string' || password.length > 128) { + return true + } + const regexPassword = /^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[^a-zA-Z0-9]).{8,}$/ - return !password || typeof password !== 'string' || !regexPassword.test(password) + return !regexPassword.test(password) } diff --git a/packages/server/src/queue/BaseQueue.ts b/packages/server/src/queue/BaseQueue.ts index 49e63453547..87b3a9faef7 100644 --- a/packages/server/src/queue/BaseQueue.ts +++ b/packages/server/src/queue/BaseQueue.ts @@ -57,8 +57,6 @@ export abstract class BaseQueue { } public createWorker(concurrency: number = WORKER_CONCURRENCY): Worker { - logger.info(`[BaseQueue] Creating worker for queue "${this.queue.name}" with concurrency: ${concurrency}`) - try { this.worker = new Worker( this.queue.name, @@ -92,14 +90,6 @@ export abstract class BaseQueue { logger.error(`[BaseQueue] Worker error for queue "${this.queue.name}":`, { error: err }) }) - this.worker.on('ready', () => { - logger.info(`[BaseQueue] Worker ready for queue "${this.queue.name}"`) - }) - - this.worker.on('closing', () => { - logger.info(`[BaseQueue] Worker closing for queue "${this.queue.name}"`) - }) - this.worker.on('closed', () => { logger.info(`[BaseQueue] Worker closed for queue "${this.queue.name}"`) }) diff --git a/packages/server/src/queue/PredictionQueue.ts b/packages/server/src/queue/PredictionQueue.ts index af0c9d8c72d..10cc125f779 100644 --- a/packages/server/src/queue/PredictionQueue.ts +++ b/packages/server/src/queue/PredictionQueue.ts @@ -100,13 +100,6 @@ export class PredictionQueue extends BaseQueue { data.signal = signal } - if (this.redisPublisher) { - logger.info( - `[PredictionQueue] RedisPublisher is connected [orgId:${data.orgId}/flowId:${data.chatflow.id}/chatId:${data.chatId}]`, - this.redisPublisher.isConnected() - ) - } - return await executeFlow(data) } } diff --git a/packages/server/src/queue/QueueManager.ts b/packages/server/src/queue/QueueManager.ts index 7a17cfd16e3..eef90b33b94 100644 --- a/packages/server/src/queue/QueueManager.ts +++ b/packages/server/src/queue/QueueManager.ts @@ -12,7 +12,6 @@ import { BullMQAdapter } from '@bull-board/api/bullMQAdapter' import { Express } from 'express' import { UsageCacheManager } from '../UsageCacheManager' import { ExpressAdapter } from '@bull-board/express' -import logger from '../utils/logger' const QUEUE_NAME = process.env.QUEUE_NAME || 'flowise-queue' @@ -48,9 +47,6 @@ export class QueueManager { ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) : undefined } - logger.info( - `[QueueManager] Connecting to Redis using URL: ${process.env.REDIS_URL.replace(/\/\/[^:]+:[^@]+@/, '//[CREDENTIALS]@')}` - ) } else { let tlsOpts = undefined if (process.env.REDIS_TLS === 'true') { @@ -72,11 +68,6 @@ export class QueueManager { ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) : undefined } - logger.info( - `[QueueManager] Connecting to Redis using host:port: ${process.env.REDIS_HOST || 'localhost'}:${ - process.env.REDIS_PORT || '6379' - }` - ) } } @@ -149,15 +140,6 @@ export class QueueManager { }) this.registerQueue('prediction', predictionQueue) - // Add connection event logging for prediction queue - if (predictionQueue.getQueue().opts.connection) { - const connInfo = predictionQueue.getQueue().opts.connection || {} - const connInfoString = JSON.stringify(connInfo) - .replace(/"username":"[^"]*"/g, '"username":"[REDACTED]"') - .replace(/"password":"[^"]*"/g, '"password":"[REDACTED]"') - logger.info(`[QueueManager] Prediction queue connected to Redis: ${connInfoString}`) - } - this.predictionQueueEventsProducer = new QueueEventsProducer(predictionQueue.getQueueName(), { connection: this.connection }) @@ -172,15 +154,6 @@ export class QueueManager { }) this.registerQueue('upsert', upsertionQueue) - // Add connection event logging for upsert queue - if (upsertionQueue.getQueue().opts.connection) { - const connInfo = upsertionQueue.getQueue().opts.connection || {} - const connInfoString = JSON.stringify(connInfo) - .replace(/"username":"[^"]*"/g, '"username":"[REDACTED]"') - .replace(/"password":"[^"]*"/g, '"password":"[REDACTED]"') - logger.info(`[QueueManager] Upsert queue connected to Redis: ${connInfoString}`) - } - if (serverAdapter) { createBullBoard({ queues: [new BullMQAdapter(predictionQueue.getQueue()), new BullMQAdapter(upsertionQueue.getQueue())], diff --git a/packages/server/src/queue/RedisEventPublisher.ts b/packages/server/src/queue/RedisEventPublisher.ts index f2b601508ac..55b513b520c 100644 --- a/packages/server/src/queue/RedisEventPublisher.ts +++ b/packages/server/src/queue/RedisEventPublisher.ts @@ -77,20 +77,7 @@ export class RedisEventPublisher implements IServerSideEventStreamer { } async connect() { - logger.info(`[RedisEventPublisher] Connecting to Redis...`) await this.redisPublisher.connect() - - // Log connection details after successful connection - const connInfo = this.redisPublisher.options?.socket - const connInfoString = JSON.stringify(connInfo) - .replace(/"username":"[^"]*"/g, '"username":"[REDACTED]"') - .replace(/"password":"[^"]*"/g, '"password":"[REDACTED]"') - logger.info(`[RedisEventPublisher] Connected to Redis: ${connInfoString}`) - - // Add error event listener - this.redisPublisher.on('error', (err) => { - logger.error(`[RedisEventPublisher] Redis connection error`, { error: err }) - }) } streamCustomEvent(chatId: string, eventType: string, data: any) { diff --git a/packages/server/src/queue/RedisEventSubscriber.ts b/packages/server/src/queue/RedisEventSubscriber.ts index a202bdc6574..f9069264b0c 100644 --- a/packages/server/src/queue/RedisEventSubscriber.ts +++ b/packages/server/src/queue/RedisEventSubscriber.ts @@ -77,20 +77,7 @@ export class RedisEventSubscriber { } async connect() { - logger.info(`[RedisEventSubscriber] Connecting to Redis...`) await this.redisSubscriber.connect() - - // Log connection details after successful connection - const connInfo = this.redisSubscriber.options?.socket - const connInfoString = JSON.stringify(connInfo) - .replace(/"username":"[^"]*"/g, '"username":"[REDACTED]"') - .replace(/"password":"[^"]*"/g, '"password":"[REDACTED]"') - logger.info(`[RedisEventSubscriber] Connected to Redis: ${connInfoString}`) - - // Add error event listener - this.redisSubscriber.on('error', (err) => { - logger.error(`[RedisEventSubscriber] Redis connection error`, { error: err }) - }) } subscribe(channel: string) { diff --git a/packages/server/src/services/chatflows/index.ts b/packages/server/src/services/chatflows/index.ts index a525168daa5..0b2e651f35f 100644 --- a/packages/server/src/services/chatflows/index.ts +++ b/packages/server/src/services/chatflows/index.ts @@ -4,7 +4,7 @@ import { In } from 'typeorm' import { ChatflowType, IReactFlowObject } from '../../Interface' import { FLOWISE_COUNTER_STATUS, FLOWISE_METRIC_COUNTERS } from '../../Interface.Metrics' import { UsageCacheManager } from '../../UsageCacheManager' -import { ChatFlow } from '../../database/entities/ChatFlow' +import { ChatFlow, EnumChatflowType } from '../../database/entities/ChatFlow' import { ChatMessage } from '../../database/entities/ChatMessage' import { ChatMessageFeedback } from '../../database/entities/ChatMessageFeedback' import { UpsertHistory } from '../../database/entities/UpsertHistory' @@ -20,6 +20,15 @@ import { utilGetUploadsConfig } from '../../utils/getUploadsConfig' import logger from '../../utils/logger' import { updateStorageUsage } from '../../utils/quotaUsage' +export const enum ChatflowErrorMessage { + INVALID_CHATFLOW_TYPE = 'Invalid Chatflow Type' +} + +export function validateChatflowType(type: ChatflowType | undefined) { + if (!Object.values(EnumChatflowType).includes(type as EnumChatflowType)) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, ChatflowErrorMessage.INVALID_CHATFLOW_TYPE) +} + // Check if chatflow valid for streaming const checkIfChatflowIsValidForStreaming = async (chatflowId: string): Promise => { try { @@ -254,57 +263,57 @@ const saveChatflow = async ( subscriptionId: string, usageCacheManager: UsageCacheManager ): Promise => { - try { - const appServer = getRunningExpressApp() - - let dbResponse: ChatFlow - if (containsBase64File(newChatFlow)) { - // we need a 2-step process, as we need to save the chatflow first and then update the file paths - // this is because we need the chatflow id to create the file paths - - // step 1 - save with empty flowData - const incomingFlowData = newChatFlow.flowData - newChatFlow.flowData = JSON.stringify({}) - const chatflow = appServer.AppDataSource.getRepository(ChatFlow).create(newChatFlow) - const step1Results = await appServer.AppDataSource.getRepository(ChatFlow).save(chatflow) - - // step 2 - convert base64 to file paths and update the chatflow - step1Results.flowData = await updateFlowDataWithFilePaths( - step1Results.id, - incomingFlowData, - orgId, - workspaceId, - subscriptionId, - usageCacheManager - ) - await _checkAndUpdateDocumentStoreUsage(step1Results, newChatFlow.workspaceId) - dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).save(step1Results) - } else { - const chatflow = appServer.AppDataSource.getRepository(ChatFlow).create(newChatFlow) - dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).save(chatflow) - } - await appServer.telemetry.sendTelemetry( - 'chatflow_created', - { - version: await getAppVersion(), - chatflowId: dbResponse.id, - flowGraph: getTelemetryFlowObj(JSON.parse(dbResponse.flowData)?.nodes, JSON.parse(dbResponse.flowData)?.edges) - }, - orgId - ) - - appServer.metricsProvider?.incrementCounter( - dbResponse?.type === 'MULTIAGENT' ? FLOWISE_METRIC_COUNTERS.AGENTFLOW_CREATED : FLOWISE_METRIC_COUNTERS.CHATFLOW_CREATED, - { status: FLOWISE_COUNTER_STATUS.SUCCESS } - ) - - return dbResponse - } catch (error) { - throw new InternalFlowiseError( - StatusCodes.INTERNAL_SERVER_ERROR, - `Error: chatflowsService.saveChatflow - ${getErrorMessage(error)}` + validateChatflowType(newChatFlow.type) + const appServer = getRunningExpressApp() + + let dbResponse: ChatFlow + if (containsBase64File(newChatFlow)) { + // we need a 2-step process, as we need to save the chatflow first and then update the file paths + // this is because we need the chatflow id to create the file paths + + // step 1 - save with empty flowData + const incomingFlowData = newChatFlow.flowData + newChatFlow.flowData = JSON.stringify({}) + const chatflow = appServer.AppDataSource.getRepository(ChatFlow).create(newChatFlow) + const step1Results = await appServer.AppDataSource.getRepository(ChatFlow).save(chatflow) + + // step 2 - convert base64 to file paths and update the chatflow + step1Results.flowData = await updateFlowDataWithFilePaths( + step1Results.id, + incomingFlowData, + orgId, + workspaceId, + subscriptionId, + usageCacheManager ) + await _checkAndUpdateDocumentStoreUsage(step1Results, newChatFlow.workspaceId) + dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).save(step1Results) + } else { + const chatflow = appServer.AppDataSource.getRepository(ChatFlow).create(newChatFlow) + dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).save(chatflow) } + + const subscriptionDetails = await usageCacheManager.getSubscriptionDataFromCache(subscriptionId) + const productId = subscriptionDetails?.productId || '' + + await appServer.telemetry.sendTelemetry( + 'chatflow_created', + { + version: await getAppVersion(), + chatflowId: dbResponse.id, + flowGraph: getTelemetryFlowObj(JSON.parse(dbResponse.flowData)?.nodes, JSON.parse(dbResponse.flowData)?.edges), + productId, + subscriptionId + }, + orgId + ) + + appServer.metricsProvider?.incrementCounter( + dbResponse?.type === 'MULTIAGENT' ? FLOWISE_METRIC_COUNTERS.AGENTFLOW_CREATED : FLOWISE_METRIC_COUNTERS.CHATFLOW_CREATED, + { status: FLOWISE_COUNTER_STATUS.SUCCESS } + ) + + return dbResponse } const updateChatflow = async ( @@ -314,29 +323,27 @@ const updateChatflow = async ( workspaceId: string, subscriptionId: string ): Promise => { - try { - const appServer = getRunningExpressApp() - if (updateChatFlow.flowData && containsBase64File(updateChatFlow)) { - updateChatFlow.flowData = await updateFlowDataWithFilePaths( - chatflow.id, - updateChatFlow.flowData, - orgId, - workspaceId, - subscriptionId, - appServer.usageCacheManager - ) - } - const newDbChatflow = appServer.AppDataSource.getRepository(ChatFlow).merge(chatflow, updateChatFlow) - await _checkAndUpdateDocumentStoreUsage(newDbChatflow, chatflow.workspaceId) - const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).save(newDbChatflow) - - return dbResponse - } catch (error) { - throw new InternalFlowiseError( - StatusCodes.INTERNAL_SERVER_ERROR, - `Error: chatflowsService.updateChatflow - ${getErrorMessage(error)}` + const appServer = getRunningExpressApp() + if (updateChatFlow.flowData && containsBase64File(updateChatFlow)) { + updateChatFlow.flowData = await updateFlowDataWithFilePaths( + chatflow.id, + updateChatFlow.flowData, + orgId, + workspaceId, + subscriptionId, + appServer.usageCacheManager ) } + if (updateChatFlow.type || updateChatFlow.type === '') { + validateChatflowType(updateChatFlow.type) + } else { + updateChatFlow.type = chatflow.type + } + const newDbChatflow = appServer.AppDataSource.getRepository(ChatFlow).merge(chatflow, updateChatFlow) + await _checkAndUpdateDocumentStoreUsage(newDbChatflow, chatflow.workspaceId) + const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).save(newDbChatflow) + + return dbResponse } // Get specific chatflow chatbotConfig via id (PUBLIC endpoint, used to retrieve config for embedded chat) diff --git a/packages/server/src/services/documentstore/index.ts b/packages/server/src/services/documentstore/index.ts index a9a12fd47ce..05520523de4 100644 --- a/packages/server/src/services/documentstore/index.ts +++ b/packages/server/src/services/documentstore/index.ts @@ -573,7 +573,8 @@ const _splitIntoChunks = async (appDataSource: DataSource, componentNodes: IComp chatflowid: uuidv4(), appDataSource, databaseEntities, - logger + logger, + processRaw: true } const docNodeInstance = new nodeModule.nodeClass() let docs: IDocument[] = await docNodeInstance.init(nodeData, '', options) @@ -1727,6 +1728,11 @@ const upsertDocStore = async ( ...newLoader?.config } + // Override loaderName if it's provided directly in data + if (data.loaderName) { + loaderName = data.loaderName + } + splitterName = newSplitter?.name ? getComponentLabelFromName(newSplitter?.name) : splitterName splitterId = newSplitter?.name || splitterId splitterConfig = { diff --git a/packages/server/src/services/executions/index.ts b/packages/server/src/services/executions/index.ts index f54ee322088..062337aad02 100644 --- a/packages/server/src/services/executions/index.ts +++ b/packages/server/src/services/executions/index.ts @@ -11,6 +11,7 @@ import { getRunningExpressApp } from '../../utils/getRunningExpressApp' export interface ExecutionFilters { id?: string agentflowId?: string + agentflowName?: string sessionId?: string state?: ExecutionState startDate?: Date @@ -65,7 +66,7 @@ const getPublicExecutionById = async (executionId: string): Promise => { try { const appServer = getRunningExpressApp() - const { id, agentflowId, sessionId, state, startDate, endDate, page = 1, limit = 12, workspaceId } = filters + const { id, agentflowId, agentflowName, sessionId, state, startDate, endDate, page = 1, limit = 12, workspaceId } = filters // Handle UUID fields properly using raw parameters to avoid type conversion issues // This uses the query builder instead of direct objects for compatibility with UUID fields @@ -78,6 +79,8 @@ const getAllExecutions = async (filters: ExecutionFilters = {}): Promise<{ data: if (id) queryBuilder.andWhere('execution.id = :id', { id }) if (agentflowId) queryBuilder.andWhere('execution.agentflowId = :agentflowId', { agentflowId }) + if (agentflowName) + queryBuilder.andWhere('LOWER(agentflow.name) LIKE LOWER(:agentflowName)', { agentflowName: `%${agentflowName}%` }) if (sessionId) queryBuilder.andWhere('execution.sessionId = :sessionId', { sessionId }) if (state) queryBuilder.andWhere('execution.state = :state', { state }) if (workspaceId) queryBuilder.andWhere('execution.workspaceId = :workspaceId', { workspaceId }) diff --git a/packages/server/src/services/fetch-links/index.ts b/packages/server/src/services/fetch-links/index.ts index 53c6e94e94a..cf55515120e 100644 --- a/packages/server/src/services/fetch-links/index.ts +++ b/packages/server/src/services/fetch-links/index.ts @@ -1,4 +1,4 @@ -import { webCrawl, xmlScrape } from 'flowise-components' +import { webCrawl, xmlScrape, checkDenyList } from 'flowise-components' import { StatusCodes } from 'http-status-codes' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' @@ -6,6 +6,8 @@ import { getErrorMessage } from '../../errors/utils' const getAllLinks = async (requestUrl: string, relativeLinksMethod: string, queryLimit: string): Promise => { try { const url = decodeURIComponent(requestUrl) + await checkDenyList(url) + if (!relativeLinksMethod) { throw new InternalFlowiseError( StatusCodes.INTERNAL_SERVER_ERROR, diff --git a/packages/server/src/utils/buildAgentGraph.ts b/packages/server/src/utils/buildAgentGraph.ts index e79cd6947cb..31489170987 100644 --- a/packages/server/src/utils/buildAgentGraph.ts +++ b/packages/server/src/utils/buildAgentGraph.ts @@ -89,6 +89,7 @@ export const buildAgentGraph = async ({ chatId, sessionId, chatflowid, + chatflowId: chatflowid, logger, analytic, appDataSource, diff --git a/packages/server/src/utils/buildAgentflow.ts b/packages/server/src/utils/buildAgentflow.ts index b0d2eed6434..3f961a2585e 100644 --- a/packages/server/src/utils/buildAgentflow.ts +++ b/packages/server/src/utils/buildAgentflow.ts @@ -133,6 +133,7 @@ interface IExecuteNodeParams { orgId: string workspaceId: string subscriptionId: string + productId: string } interface IExecuteAgentFlowParams extends Omit { @@ -215,6 +216,7 @@ export const resolveVariables = async ( variableOverrides: IVariableOverride[], uploadedFilesContent: string, chatHistory: IMessage[], + componentNodes: IComponentNodes, agentFlowExecutedData?: IAgentflowExecutedData[], iterationContext?: ICommonObject ): Promise => { @@ -389,6 +391,135 @@ export const resolveVariables = async ( } const getParamValues = async (paramsObj: ICommonObject) => { + /* + * EXAMPLE SCENARIO: + * + * 1. Agent node has inputParam: { name: "agentTools", type: "array", array: [{ name: "agentSelectedTool", loadConfig: true }] } + * 2. Inputs contain: { agentTools: [{ agentSelectedTool: "requestsGet", agentSelectedToolConfig: { requestsGetHeaders: "Bearer {{ $vars.TOKEN }}" } }] } + * 3. We need to resolve the variable in requestsGetHeaders because RequestsGet node defines requestsGetHeaders with acceptVariable: true + * + * STEP 1: Find all parameters with loadConfig=true (e.g., "agentSelectedTool") + * STEP 2: Find their values in inputs (e.g., "requestsGet") + * STEP 3: Look up component node definition for "requestsGet" + * STEP 4: Find which of its parameters have acceptVariable=true (e.g., "requestsGetHeaders") + * STEP 5: Find the config object (e.g., "agentSelectedToolConfig") + * STEP 6: Resolve variables in config parameters that accept variables + */ + + // Helper function to find params with loadConfig recursively + // Example: Finds ["agentModel", "agentSelectedTool"] from the inputParams structure + const findParamsWithLoadConfig = (inputParams: any[]): string[] => { + const paramsWithLoadConfig: string[] = [] + + for (const param of inputParams) { + // Direct loadConfig param (e.g., agentModel with loadConfig: true) + if (param.loadConfig === true) { + paramsWithLoadConfig.push(param.name) + } + + // Check nested array parameters (e.g., agentTools.array contains agentSelectedTool with loadConfig: true) + if (param.type === 'array' && param.array && Array.isArray(param.array)) { + const nestedParams = findParamsWithLoadConfig(param.array) + paramsWithLoadConfig.push(...nestedParams) + } + } + + return paramsWithLoadConfig + } + + // Helper function to find value of a parameter recursively in nested objects/arrays + // Example: Searches for "agentSelectedTool" value in complex nested inputs structure + // Returns "requestsGet" when found in agentTools[0].agentSelectedTool + const findParamValue = (obj: any, paramName: string): any => { + if (typeof obj !== 'object' || obj === null) { + return undefined + } + + // Handle arrays (e.g., agentTools array) + if (Array.isArray(obj)) { + for (const item of obj) { + const result = findParamValue(item, paramName) + if (result !== undefined) { + return result + } + } + return undefined + } + + // Direct property match + if (Object.prototype.hasOwnProperty.call(obj, paramName)) { + return obj[paramName] + } + + // Recursively search nested objects + for (const value of Object.values(obj)) { + const result = findParamValue(value, paramName) + if (result !== undefined) { + return result + } + } + + return undefined + } + + // Helper function to process config parameters with acceptVariable + // Example: Processes agentSelectedToolConfig object, resolving variables in requestsGetHeaders + const processConfigParams = async (configObj: any, configParamWithAcceptVariables: string[]) => { + if (typeof configObj !== 'object' || configObj === null) { + return + } + + for (const [key, value] of Object.entries(configObj)) { + // Only resolve variables for parameters that accept them + // Example: requestsGetHeaders is in configParamWithAcceptVariables, so resolve "Bearer {{ $vars.TOKEN }}" + if (configParamWithAcceptVariables.includes(key)) { + configObj[key] = await resolveNodeReference(value) + } + } + } + + // STEP 1: Get all params with loadConfig from inputParams + // Example result: ["agentModel", "agentSelectedTool"] + const paramsWithLoadConfig = findParamsWithLoadConfig(reactFlowNodeData.inputParams) + + // STEP 2-6: Process each param with loadConfig + for (const paramWithLoadConfig of paramsWithLoadConfig) { + // STEP 2: Find the value of this parameter in the inputs + // Example: paramWithLoadConfig="agentSelectedTool", paramValue="requestsGet" + const paramValue = findParamValue(paramsObj, paramWithLoadConfig) + + if (paramValue && componentNodes[paramValue]) { + // STEP 3: Get the node instance inputs to find params with acceptVariable + // Example: componentNodes["requestsGet"] contains the RequestsGet node definition + const nodeInstance = componentNodes[paramValue] + const configParamWithAcceptVariables: string[] = [] + + // STEP 4: Find which parameters of the component accept variables + // Example: RequestsGet has inputs like { name: "requestsGetHeaders", acceptVariable: true } + if (nodeInstance.inputs && Array.isArray(nodeInstance.inputs)) { + for (const input of nodeInstance.inputs) { + if (input.acceptVariable === true) { + configParamWithAcceptVariables.push(input.name) + } + } + } + // Example result: configParamWithAcceptVariables = ["requestsGetHeaders", "requestsGetUrl", ...] + + // STEP 5: Look for the config object (paramName + "Config") + // Example: Look for "agentSelectedToolConfig" in the inputs + const configParamName = paramWithLoadConfig + 'Config' + const configValue = findParamValue(paramsObj, configParamName) + + // STEP 6: Process config object to resolve variables + // Example: Resolve "Bearer {{ $vars.TOKEN }}" in requestsGetHeaders + if (configValue && configParamWithAcceptVariables.length > 0) { + await processConfigParams(configValue, configParamWithAcceptVariables) + } + } + } + + // Original logic for direct acceptVariable params (maintains backward compatibility) + // Example: Direct params like agentUserMessage with acceptVariable: true for (const key in paramsObj) { const paramValue = paramsObj[key] const isAcceptVariable = reactFlowNodeData.inputParams.find((param) => param.name === key)?.acceptVariable ?? false @@ -838,7 +969,8 @@ const executeNode = async ({ iterationContext, orgId, workspaceId, - subscriptionId + subscriptionId, + productId }: IExecuteNodeParams): Promise<{ result: any shouldStop?: boolean @@ -879,6 +1011,7 @@ const executeNode = async ({ const chatHistory = [...pastChatHistory, ...runtimeChatHistory] const flowConfig: IFlowConfig = { chatflowid: chatflow.id, + chatflowId: chatflow.id, chatId, sessionId, apiMessageId, @@ -910,6 +1043,7 @@ const executeNode = async ({ variableOverrides, uploadedFilesContent, chatHistory, + componentNodes, agentFlowExecutedData, iterationContext ) @@ -961,6 +1095,7 @@ const executeNode = async ({ chatId, sessionId, chatflowid: chatflow.id, + chatflowId: chatflow.id, apiMessageId: flowConfig.apiMessageId, logger, appDataSource, @@ -1060,7 +1195,8 @@ const executeNode = async ({ }, orgId, workspaceId, - subscriptionId + subscriptionId, + productId }) // Store the result @@ -1287,7 +1423,8 @@ export const executeAgentFlow = async ({ isTool = false, orgId, workspaceId, - subscriptionId + subscriptionId, + productId }: IExecuteAgentFlowParams) => { logger.debug('\n🚀 Starting flow execution') @@ -1754,7 +1891,8 @@ export const executeAgentFlow = async ({ iterationContext, orgId, workspaceId, - subscriptionId + subscriptionId, + productId }) if (executionResult.agentFlowExecutedData) { @@ -2020,7 +2158,9 @@ export const executeAgentFlow = async ({ chatflowId: chatflowid, chatId, type: evaluationRunId ? ChatType.EVALUATION : isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, - flowGraph: getTelemetryFlowObj(nodes, edges) + flowGraph: getTelemetryFlowObj(nodes, edges), + productId, + subscriptionId }, orgId ) diff --git a/packages/server/src/utils/buildChatflow.ts b/packages/server/src/utils/buildChatflow.ts index be0bb6a105a..86154778c53 100644 --- a/packages/server/src/utils/buildChatflow.ts +++ b/packages/server/src/utils/buildChatflow.ts @@ -249,7 +249,8 @@ export const executeFlow = async ({ isTool, orgId, workspaceId, - subscriptionId + subscriptionId, + productId }: IExecuteFlowParams) => { // Ensure incomingInput has all required properties with default values incomingInput = { @@ -421,7 +422,8 @@ export const executeFlow = async ({ isTool, orgId, workspaceId, - subscriptionId + subscriptionId, + productId }) } @@ -478,6 +480,7 @@ export const executeFlow = async ({ const flowConfig: IFlowConfig = { chatflowid, + chatflowId: chatflow.id, chatId, sessionId, chatHistory, @@ -812,7 +815,9 @@ export const executeFlow = async ({ chatflowId: chatflowid, chatId, type: isEvaluation ? ChatType.EVALUATION : isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, - flowGraph: getTelemetryFlowObj(nodes, edges) + flowGraph: getTelemetryFlowObj(nodes, edges), + productId, + subscriptionId }, orgId ) @@ -954,6 +959,9 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals organizationId = orgId const subscriptionId = org.subscriptionId as string + const subscriptionDetails = await appServer.usageCacheManager.getSubscriptionDataFromCache(subscriptionId) + const productId = subscriptionDetails?.productId || '' + await checkPredictions(orgId, subscriptionId, appServer.usageCacheManager) const executeData: IExecuteFlowParams = { @@ -974,7 +982,8 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals usageCacheManager: appServer.usageCacheManager, orgId, workspaceId, - subscriptionId + subscriptionId, + productId } if (process.env.MODE === MODE.QUEUE) { diff --git a/packages/server/src/utils/index.ts b/packages/server/src/utils/index.ts index d99c0b546b7..9451a49b688 100644 --- a/packages/server/src/utils/index.ts +++ b/packages/server/src/utils/index.ts @@ -1490,7 +1490,9 @@ export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNod 'chatTogetherAI', 'chatTogetherAI_LlamaIndex', 'chatFireworks', - 'chatBaiduWenxin' + 'ChatSambanova', + 'chatBaiduWenxin', + 'chatCometAPI' ], LLMs: ['azureOpenAI', 'openAI', 'ollama'] } diff --git a/packages/server/src/utils/sanitize.util.ts b/packages/server/src/utils/sanitize.util.ts index 410bc4c05e3..9d84478f592 100644 --- a/packages/server/src/utils/sanitize.util.ts +++ b/packages/server/src/utils/sanitize.util.ts @@ -1,3 +1,5 @@ +import { User } from '../enterprise/database/entities/user.entity' + export function sanitizeNullBytes(obj: any): any { const stack = [obj] @@ -30,3 +32,11 @@ export function sanitizeNullBytes(obj: any): any { return obj } + +export function sanitizeUser(user: Partial) { + delete user.credential + delete user.tempToken + delete user.tokenExpiry + + return user +} diff --git a/packages/server/src/utils/upsertVector.ts b/packages/server/src/utils/upsertVector.ts index 63dbfbdc0a8..29ebcbe9632 100644 --- a/packages/server/src/utils/upsertVector.ts +++ b/packages/server/src/utils/upsertVector.ts @@ -277,6 +277,9 @@ export const upsertVector = async (req: Request, isInternal: boolean = false) => const orgId = org.id const subscriptionId = org.subscriptionId as string + const subscriptionDetails = await appServer.usageCacheManager.getSubscriptionDataFromCache(subscriptionId) + const productId = subscriptionDetails?.productId || '' + const executeData: IExecuteFlowParams = { componentNodes: appServer.nodesPool.componentNodes, incomingInput, @@ -293,7 +296,8 @@ export const upsertVector = async (req: Request, isInternal: boolean = false) => isUpsert: true, orgId, workspaceId, - subscriptionId + subscriptionId, + productId } if (process.env.MODE === MODE.QUEUE) { diff --git a/packages/ui/package.json b/packages/ui/package.json index 7e3a0fccfee..0a60dfb8bb9 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -1,6 +1,6 @@ { "name": "flowise-ui", - "version": "3.0.5", + "version": "3.0.6", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://flowiseai.com", "author": { @@ -34,7 +34,7 @@ "@uiw/codemirror-theme-sublime": "^4.21.21", "@uiw/codemirror-theme-vscode": "^4.21.21", "@uiw/react-codemirror": "^4.21.21", - "axios": "1.7.9", + "axios": "1.12.0", "clsx": "^1.1.1", "dompurify": "^3.2.6", "dotenv": "^16.0.0", diff --git a/packages/ui/src/ui-component/dialog/ViewMessagesDialog.jsx b/packages/ui/src/ui-component/dialog/ViewMessagesDialog.jsx index aba719726dd..40ed8c14f3c 100644 --- a/packages/ui/src/ui-component/dialog/ViewMessagesDialog.jsx +++ b/packages/ui/src/ui-component/dialog/ViewMessagesDialog.jsx @@ -185,7 +185,6 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) const [chatlogs, setChatLogs] = useState([]) - const [allChatlogs, setAllChatLogs] = useState([]) const [chatMessages, setChatMessages] = useState([]) const [stats, setStats] = useState({}) const [selectedMessageIndex, setSelectedMessageIndex] = useState(0) @@ -361,6 +360,16 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { if ('windows' === getOS()) { fileSeparator = '\\' } + + const resp = await chatmessageApi.getAllChatmessageFromChatflow(dialogProps.chatflow.id, { + chatType: chatTypeFilter.length ? chatTypeFilter : undefined, + feedbackType: feedbackTypeFilter.length ? feedbackTypeFilter : undefined, + startDate: startDate, + endDate: endDate, + order: 'DESC' + }) + + const allChatlogs = resp.data ?? [] for (let i = 0; i < allChatlogs.length; i += 1) { const chatmsg = allChatlogs[i] const chatPK = getChatPK(chatmsg) @@ -748,7 +757,6 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { if (getChatmessageApi.data) { getStoragePathFromServer.request() - setAllChatLogs(getChatmessageApi.data) const chatPK = processChatLogs(getChatmessageApi.data) setSelectedMessageIndex(0) if (chatPK) { @@ -784,7 +792,6 @@ const ViewMessagesDialog = ({ show, dialogProps, onCancel }) => { return () => { setChatLogs([]) - setAllChatLogs([]) setChatMessages([]) setChatTypeFilter(['INTERNAL', 'EXTERNAL']) setFeedbackTypeFilter([]) diff --git a/packages/ui/src/ui-component/extended/FileUpload.jsx b/packages/ui/src/ui-component/extended/FileUpload.jsx index 2555ee147e4..bc1fd9962ad 100644 --- a/packages/ui/src/ui-component/extended/FileUpload.jsx +++ b/packages/ui/src/ui-component/extended/FileUpload.jsx @@ -1,4 +1,4 @@ -import { useDispatch } from 'react-redux' +import { useDispatch, useSelector } from 'react-redux' import { useState, useEffect } from 'react' import PropTypes from 'prop-types' import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction, SET_CHATFLOW } from '@/store/actions' @@ -41,6 +41,7 @@ const availableFileTypes = [ const FileUpload = ({ dialogProps }) => { const dispatch = useDispatch() + const customization = useSelector((state) => state.customization) useNotifier() @@ -234,14 +235,18 @@ const FileUpload = ({ dialogProps }) => { - PDF Configuration + + PDF Configuration + PDF Usage diff --git a/packages/ui/src/views/agentexecutions/ExecutionDetails.jsx b/packages/ui/src/views/agentexecutions/ExecutionDetails.jsx index 90fbc18a0d5..c89a1130f01 100644 --- a/packages/ui/src/views/agentexecutions/ExecutionDetails.jsx +++ b/packages/ui/src/views/agentexecutions/ExecutionDetails.jsx @@ -725,7 +725,8 @@ export const ExecutionDetails = ({ open, isPublic, execution, metadata, onClose, flex: '1 1 35%', padding: 2, borderRight: 1, - borderColor: 'divider' + borderColor: 'divider', + overflow: 'auto' }} > { return ( <> - {!isLoading ? ( + {isLoading ? ( + + + + ) : ( <> - {!execution || getExecutionByIdPublicApi.error ? ( + {getExecutionByIdPublicApi.error ? ( { /> )} - ) : null} + )} ) } diff --git a/packages/ui/src/views/agentexecutions/index.jsx b/packages/ui/src/views/agentexecutions/index.jsx index ccc18b36433..0f3663cd1df 100644 --- a/packages/ui/src/views/agentexecutions/index.jsx +++ b/packages/ui/src/views/agentexecutions/index.jsx @@ -68,6 +68,7 @@ const AgentExecutions = () => { startDate: null, endDate: null, agentflowId: '', + agentflowName: '', sessionId: '' }) @@ -132,6 +133,7 @@ const AgentExecutions = () => { } if (filters.agentflowId) params.agentflowId = filters.agentflowId + if (filters.agentflowName) params.agentflowName = filters.agentflowName if (filters.sessionId) params.sessionId = filters.sessionId getAllExecutions.request(params) @@ -143,6 +145,7 @@ const AgentExecutions = () => { startDate: null, endDate: null, agentflowId: '', + agentflowName: '', sessionId: '' }) setCurrentPage(1) @@ -312,6 +315,20 @@ const AgentExecutions = () => { } /> + + handleFilterChange('agentflowName', e.target.value)} + size='small' + sx={{ + '& .MuiOutlinedInput-notchedOutline': { + borderColor: borderColor + } + }} + /> + { }} /> - +