Skip to content

Commit 3da0fc8

Browse files
Cadiackonstantintieber
authored andcommitted
feat(core): Add Mistral cloud provider to chat hub (no-changelog) (#22067)
1 parent 3549887 commit 3da0fc8

File tree

6 files changed

+91
-1
lines changed

6 files changed

+91
-1
lines changed

packages/@n8n/api-types/src/chat-hub.ts

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ export const chatHubLLMProviderSchema = z.enum([
1818
'azureOpenAi',
1919
'ollama',
2020
'awsBedrock',
21+
'mistralCloud',
2122
]);
2223
export type ChatHubLLMProvider = z.infer<typeof chatHubLLMProviderSchema>;
2324

@@ -42,6 +43,7 @@ export const PROVIDER_CREDENTIAL_TYPE_MAP: Record<
4243
ollama: 'ollamaApi',
4344
azureOpenAi: 'azureOpenAiApi',
4445
awsBedrock: 'aws',
46+
mistralCloud: 'mistralCloudApi',
4547
};
4648

4749
export type ChatHubAgentTool = typeof JINA_AI_TOOL_NODE_TYPE | typeof SEAR_XNG_TOOL_NODE_TYPE;
@@ -79,6 +81,11 @@ const awsBedrockModelSchema = z.object({
7981
model: z.string(),
8082
});
8183

84+
const mistralCloudModelSchema = z.object({
85+
provider: z.literal('mistralCloud'),
86+
model: z.string(),
87+
});
88+
8289
const n8nModelSchema = z.object({
8390
provider: z.literal('n8n'),
8491
workflowId: z.string(),
@@ -96,6 +103,7 @@ export const chatHubConversationModelSchema = z.discriminatedUnion('provider', [
96103
azureOpenAIModelSchema,
97104
ollamaModelSchema,
98105
awsBedrockModelSchema,
106+
mistralCloudModelSchema,
99107
n8nModelSchema,
100108
chatAgentSchema,
101109
]);
@@ -106,13 +114,15 @@ export type ChatHubGoogleModel = z.infer<typeof googleModelSchema>;
106114
export type ChatHubAzureOpenAIModel = z.infer<typeof azureOpenAIModelSchema>;
107115
export type ChatHubOllamaModel = z.infer<typeof ollamaModelSchema>;
108116
export type ChatHubAwsBedrockModel = z.infer<typeof awsBedrockModelSchema>;
117+
export type ChatHubMistralCloudModel = z.infer<typeof mistralCloudModelSchema>;
109118
export type ChatHubBaseLLMModel =
110119
| ChatHubOpenAIModel
111120
| ChatHubAnthropicModel
112121
| ChatHubGoogleModel
113122
| ChatHubAzureOpenAIModel
114123
| ChatHubOllamaModel
115-
| ChatHubAwsBedrockModel;
124+
| ChatHubAwsBedrockModel
125+
| ChatHubMistralCloudModel;
116126

117127
export type ChatHubN8nModel = z.infer<typeof n8nModelSchema>;
118128
export type ChatHubCustomAgentModel = z.infer<typeof chatAgentSchema>;
@@ -155,6 +165,7 @@ export const emptyChatModelsResponse: ChatModelsResponse = {
155165
azureOpenAi: { models: [] },
156166
ollama: { models: [] },
157167
awsBedrock: { models: [] },
168+
mistralCloud: { models: [] },
158169
n8n: { models: [] },
159170
// eslint-disable-next-line @typescript-eslint/naming-convention
160171
'custom-agent': { models: [] },

packages/cli/src/modules/chat-hub/chat-hub-workflow.service.ts

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -510,6 +510,15 @@ export class ChatHubWorkflowService {
510510
},
511511
};
512512
}
513+
case 'mistralCloud': {
514+
return {
515+
...common,
516+
parameters: {
517+
model,
518+
options: {},
519+
},
520+
};
521+
}
513522
default:
514523
throw new OperationalError('Unsupported model provider');
515524
}

packages/cli/src/modules/chat-hub/chat-hub.constants.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,10 @@ export const PROVIDER_NODE_TYPE_MAP: Record<ChatHubLLMProvider, INodeTypeNameVer
3636
name: '@n8n/n8n-nodes-langchain.lmChatAwsBedrock',
3737
version: 1.1,
3838
},
39+
mistralCloud: {
40+
name: '@n8n/n8n-nodes-langchain.lmChatMistralCloud',
41+
version: 1,
42+
},
3943
};
4044

4145
export const NODE_NAMES = {

packages/cli/src/modules/chat-hub/chat-hub.service.ts

Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -164,6 +164,8 @@ export class ChatHubService {
164164
return await this.fetchAzureOpenAiModels(credentials, additionalData);
165165
case 'awsBedrock':
166166
return await this.fetchAwsBedrockModels(credentials, additionalData);
167+
case 'mistralCloud':
168+
return await this.fetchMistralCloudModels(credentials, additionalData);
167169
case 'n8n':
168170
return await this.fetchAgentWorkflowsAsModels(user);
169171
case 'custom-agent':
@@ -470,6 +472,68 @@ export class ChatHubService {
470472
};
471473
}
472474

475+
private async fetchMistralCloudModels(
476+
credentials: INodeCredentials,
477+
additionalData: IWorkflowExecuteAdditionalData,
478+
): Promise<ChatModelsResponse['mistralCloud']> {
479+
const results = await this.nodeParametersService.getOptionsViaLoadOptions(
480+
{
481+
routing: {
482+
request: {
483+
method: 'GET',
484+
url: '/models',
485+
},
486+
output: {
487+
postReceive: [
488+
{
489+
type: 'rootProperty',
490+
properties: {
491+
property: 'data',
492+
},
493+
},
494+
{
495+
type: 'filter',
496+
properties: {
497+
pass: "={{ !$responseItem.id.includes('embed') }}",
498+
},
499+
},
500+
{
501+
type: 'setKeyValue',
502+
properties: {
503+
name: '={{ $responseItem.id }}',
504+
value: '={{ $responseItem.id }}',
505+
},
506+
},
507+
{
508+
type: 'sort',
509+
properties: {
510+
key: 'name',
511+
},
512+
},
513+
],
514+
},
515+
},
516+
},
517+
additionalData,
518+
PROVIDER_NODE_TYPE_MAP.mistralCloud,
519+
{},
520+
credentials,
521+
);
522+
523+
return {
524+
models: results.map((result) => ({
525+
name: result.name,
526+
description: result.description ?? String(result.value),
527+
model: {
528+
provider: 'mistralCloud',
529+
model: String(result.value),
530+
},
531+
createdAt: null,
532+
updatedAt: null,
533+
})),
534+
};
535+
}
536+
473537
private async fetchAgentWorkflowsAsModels(user: User): Promise<ChatModelsResponse['n8n']> {
474538
const nodeTypes = [CHAT_TRIGGER_NODE_TYPE];
475539
const workflows = await this.workflowService.getWorkflowsWithNodesIncluded(

packages/cli/src/modules/chat-hub/context-limits.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -140,6 +140,7 @@ export const maxContextWindowTokens: Record<ChatHubLLMProvider, Record<string, n
140140
azureOpenAi: {},
141141
ollama: {},
142142
awsBedrock: {},
143+
mistralCloud: {},
143144
};
144145

145146
export const getMaxContextWindowTokens = (

packages/frontend/editor-ui/src/features/ai/chatHub/constants.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ export const providerDisplayNames: Record<ChatHubProvider, string> = {
1414
azureOpenAi: 'Azure OpenAI',
1515
ollama: 'Ollama',
1616
awsBedrock: 'AWS Bedrock',
17+
mistralCloud: 'Mistral Cloud',
1718
n8n: 'n8n',
1819
'custom-agent': 'Custom Agent',
1920
};

0 commit comments

Comments
 (0)