Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions packages/@n8n/api-types/src/chat-hub.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ export const chatHubLLMProviderSchema = z.enum([
'azureOpenAi',
'ollama',
'awsBedrock',
'deepSeek',
'cohere',
'mistralCloud',
]);
Expand All @@ -44,6 +45,7 @@ export const PROVIDER_CREDENTIAL_TYPE_MAP: Record<
ollama: 'ollamaApi',
azureOpenAi: 'azureOpenAiApi',
awsBedrock: 'aws',
deepSeek: 'deepSeekApi',
cohere: 'cohereApi',
mistralCloud: 'mistralCloudApi',
};
Expand Down Expand Up @@ -83,6 +85,11 @@ const awsBedrockModelSchema = z.object({
model: z.string(),
});

const deepSeekModelSchema = z.object({
provider: z.literal('deepSeek'),
model: z.string(),
});

const cohereModelSchema = z.object({
provider: z.literal('cohere'),
model: z.string(),
Expand Down Expand Up @@ -110,6 +117,7 @@ export const chatHubConversationModelSchema = z.discriminatedUnion('provider', [
azureOpenAIModelSchema,
ollamaModelSchema,
awsBedrockModelSchema,
deepSeekModelSchema,
cohereModelSchema,
mistralCloudModelSchema,
n8nModelSchema,
Expand All @@ -122,6 +130,7 @@ export type ChatHubGoogleModel = z.infer<typeof googleModelSchema>;
export type ChatHubAzureOpenAIModel = z.infer<typeof azureOpenAIModelSchema>;
export type ChatHubOllamaModel = z.infer<typeof ollamaModelSchema>;
export type ChatHubAwsBedrockModel = z.infer<typeof awsBedrockModelSchema>;
export type ChatHubDeepSeekModel = z.infer<typeof deepSeekModelSchema>;
export type ChatHubCohereModel = z.infer<typeof cohereModelSchema>;
export type ChatHubMistralCloudModel = z.infer<typeof mistralCloudModelSchema>;
export type ChatHubBaseLLMModel =
Expand All @@ -131,6 +140,7 @@ export type ChatHubBaseLLMModel =
| ChatHubAzureOpenAIModel
| ChatHubOllamaModel
| ChatHubAwsBedrockModel
| ChatHubDeepSeekModel
| ChatHubCohereModel
| ChatHubMistralCloudModel;

Expand Down Expand Up @@ -175,6 +185,7 @@ export const emptyChatModelsResponse: ChatModelsResponse = {
azureOpenAi: { models: [] },
ollama: { models: [] },
awsBedrock: { models: [] },
deepSeek: { models: [] },
cohere: { models: [] },
mistralCloud: { models: [] },
n8n: { models: [] },
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -512,6 +512,15 @@ export class ChatHubWorkflowService {
},
};
}
case 'deepSeek': {
return {
...common,
parameters: {
model,
options: {},
},
};
}
case 'cohere': {
return {
...common,
Expand Down
4 changes: 4 additions & 0 deletions packages/cli/src/modules/chat-hub/chat-hub.constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@ export const PROVIDER_NODE_TYPE_MAP: Record<ChatHubLLMProvider, INodeTypeNameVer
name: '@n8n/n8n-nodes-langchain.lmChatAwsBedrock',
version: 1.1,
},
deepSeek: {
name: '@n8n/n8n-nodes-langchain.lmChatDeepSeek',
version: 1,
},
cohere: {
name: '@n8n/n8n-nodes-langchain.lmChatCohere',
version: 1,
Expand Down
58 changes: 58 additions & 0 deletions packages/cli/src/modules/chat-hub/chat-hub.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,8 @@ export class ChatHubService {
return await this.fetchAzureOpenAiModels(credentials, additionalData);
case 'awsBedrock':
return await this.fetchAwsBedrockModels(credentials, additionalData);
case 'deepSeek':
return await this.fetchDeepSeekModels(credentials, additionalData);
case 'cohere':
return await this.fetchCohereModels(credentials, additionalData);
case 'mistralCloud':
Expand Down Expand Up @@ -593,6 +595,62 @@ export class ChatHubService {
};
}

private async fetchDeepSeekModels(
credentials: INodeCredentials,
additionalData: IWorkflowExecuteAdditionalData,
): Promise<ChatModelsResponse['deepSeek']> {
const results = await this.nodeParametersService.getOptionsViaLoadOptions(
{
routing: {
request: {
method: 'GET',
url: '/models',
},
output: {
postReceive: [
{
type: 'rootProperty',
properties: {
property: 'data',
},
},
{
type: 'setKeyValue',
properties: {
name: '={{$responseItem.id}}',
value: '={{$responseItem.id}}',
},
},
{
type: 'sort',
properties: {
key: 'name',
},
},
],
},
},
},
additionalData,
PROVIDER_NODE_TYPE_MAP.deepSeek,
{},
credentials,
);

return {
models: results.map((result) => ({
name: result.name,
description: result.description ?? String(result.value),
model: {
provider: 'deepSeek',
model: String(result.value),
},
createdAt: null,
updatedAt: null,
})),
};
}

private async fetchAgentWorkflowsAsModels(user: User): Promise<ChatModelsResponse['n8n']> {
const nodeTypes = [CHAT_TRIGGER_NODE_TYPE];
const workflows = await this.workflowService.getWorkflowsWithNodesIncluded(
Expand Down
1 change: 1 addition & 0 deletions packages/cli/src/modules/chat-hub/context-limits.ts
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,7 @@ export const maxContextWindowTokens: Record<ChatHubLLMProvider, Record<string, n
azureOpenAi: {},
ollama: {},
awsBedrock: {},
deepSeek: {},
cohere: {},
mistralCloud: {},
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ export const providerDisplayNames: Record<ChatHubProvider, string> = {
azureOpenAi: 'Azure OpenAI',
ollama: 'Ollama',
awsBedrock: 'AWS Bedrock',
deepSeek: 'DeepSeek',
cohere: 'Cohere',
mistralCloud: 'Mistral Cloud',
n8n: 'n8n',
Expand Down
Loading