From edba1f049e00789cdbd8652e5406580e5fdf6af8 Mon Sep 17 00:00:00 2001 From: "guangfei.zhao" Date: Fri, 24 Oct 2025 15:40:34 +0800 Subject: [PATCH] feat(models): implement model configuration dialogs for Azure, Bedrock and Ollama --- src/interfaces/request/llm.ts | 6 +- .../components/Dialog/AzureOpenAIDialog.tsx | 183 +++++++++--- .../components/Dialog/BedrockDialog.tsx | 227 ++++++++++++--- .../components/Dialog/OllamaDialog.tsx | 270 ++++++++++++++++-- src/pages/setting/hooks/useModelDialogs.ts | 43 +-- src/pages/setting/models.tsx | 18 +- 6 files changed, 603 insertions(+), 144 deletions(-) diff --git a/src/interfaces/request/llm.ts b/src/interfaces/request/llm.ts index 9178a5a..2daa24e 100644 --- a/src/interfaces/request/llm.ts +++ b/src/interfaces/request/llm.ts @@ -9,9 +9,9 @@ export interface ISetApiKeyRequestBody { export interface IAddLlmRequestBody { llm_factory: string; // Ollama llm_name: string; - model_type: string; - api_base?: string; // chat|embedding|speech2text|image2text - api_key: string; + model_type: string;// chat|embedding|speech2text|image2text + api_base: string; + api_key?: string; max_tokens: number; } diff --git a/src/pages/setting/components/Dialog/AzureOpenAIDialog.tsx b/src/pages/setting/components/Dialog/AzureOpenAIDialog.tsx index 385288b..1dbd98e 100644 --- a/src/pages/setting/components/Dialog/AzureOpenAIDialog.tsx +++ b/src/pages/setting/components/Dialog/AzureOpenAIDialog.tsx @@ -11,14 +11,26 @@ import { IconButton, InputAdornment, CircularProgress, + MenuItem, + Select, + FormControl, + InputLabel, + FormHelperText, + Link, } from '@mui/material'; import { Visibility, VisibilityOff } from '@mui/icons-material'; import { Controller, useForm } from 'react-hook-form'; +import type { IAddLlmRequestBody } from '@/interfaces/request/llm'; + +// 模型类型选项 +const MODEL_TYPE_OPTIONS = [ + { value: 'chat', label: 'Chat' }, + { value: 'embedding', label: 'Embedding' }, + { value: 'image2text', label: 'Image2Text' }, +]; // 表单数据接口 -export interface AzureOpenAIFormData { - api_key: string; - endpoint: string; +export interface AzureOpenAIFormData extends IAddLlmRequestBody { api_version: string; } @@ -35,7 +47,7 @@ export interface AzureOpenAIDialogProps { /** * Azure OpenAI 配置对话框 */ -function AzureOpenAIDialog ({ +function AzureOpenAIDialog({ open, onClose, onSubmit, @@ -52,16 +64,28 @@ function AzureOpenAIDialog ({ formState: { errors }, } = useForm({ defaultValues: { + model_type: 'embedding', + llm_name: 'gpt-3.5-turbo', + api_base: '', api_key: '', - endpoint: '', api_version: '2024-02-01', + max_tokens: 4096, + llm_factory: 'Azure-OpenAI', }, }); // 当对话框打开或初始数据变化时重置表单 useEffect(() => { if (open) { - reset(initialData || { api_key: '', endpoint: '', api_version: '2024-02-01' }); + reset({ + model_type: 'embedding', + llm_name: 'gpt-3.5-turbo', + api_base: '', + api_key: '', + api_version: '2024-02-01', + max_tokens: 4096, + llm_factory: initialData?.llm_factory || 'Azure-OpenAI', + }); } }, [open, initialData, reset]); @@ -80,19 +104,86 @@ function AzureOpenAIDialog ({ + {/* 模型类型选择 */} ( + + 模型类型 + + {errors.model_type && ( + {errors.model_type.message} + )} + + )} + /> + + {/* 模型名称 */} + ( + )} + /> + + {/* 基础 URL */} + ( + + )} + /> + + {/* API Key */} + ( + @@ -110,29 +201,7 @@ function AzureOpenAIDialog ({ )} /> - ( - - )} - /> - + {/* API Version */} )} /> + + {/* 最大token数 */} + ( + field.onChange(parseInt(e.target.value) || 0)} + /> + )} + /> - - + {/* 右侧按钮组 */} + + + + ); diff --git a/src/pages/setting/components/Dialog/BedrockDialog.tsx b/src/pages/setting/components/Dialog/BedrockDialog.tsx index f85a886..6a66ceb 100644 --- a/src/pages/setting/components/Dialog/BedrockDialog.tsx +++ b/src/pages/setting/components/Dialog/BedrockDialog.tsx @@ -15,25 +15,63 @@ import { Select, MenuItem, CircularProgress, + FormHelperText, + Link, } from '@mui/material'; import { Visibility, VisibilityOff } from '@mui/icons-material'; import { Controller, useForm } from 'react-hook-form'; +import type { IAddLlmRequestBody } from '@/interfaces/request/llm'; // AWS Bedrock 支持的区域列表 export const BEDROCK_REGIONS = [ - { value: 'us-east-1', label: 'US East (N. Virginia)' }, - { value: 'us-west-2', label: 'US West (Oregon)' }, - { value: 'ap-southeast-2', label: 'Asia Pacific (Sydney)' }, - { value: 'ap-northeast-1', label: 'Asia Pacific (Tokyo)' }, - { value: 'eu-central-1', label: 'Europe (Frankfurt)' }, - { value: 'eu-west-3', label: 'Europe (Paris)' }, + 'us-east-2', + 'us-east-1', + 'us-west-1', + 'us-west-2', + 'af-south-1', + 'ap-east-1', + 'ap-south-2', + 'ap-southeast-3', + 'ap-southeast-5', + 'ap-southeast-4', + 'ap-south-1', + 'ap-northeast-3', + 'ap-northeast-2', + 'ap-southeast-1', + 'ap-southeast-2', + 'ap-east-2', + 'ap-southeast-7', + 'ap-northeast-1', + 'ca-central-1', + 'ca-west-1', + 'eu-central-1', + 'eu-west-1', + 'eu-west-2', + 'eu-south-1', + 'eu-west-3', + 'eu-south-2', + 'eu-north-1', + 'eu-central-2', + 'il-central-1', + 'mx-central-1', + 'me-south-1', + 'me-central-1', + 'sa-east-1', + 'us-gov-east-1', + 'us-gov-west-1', +]; + +// 模型类型选项 +const MODEL_TYPE_OPTIONS = [ + { value: 'chat', label: 'Chat' }, + { value: 'embedding', label: 'Embedding' }, ]; // 表单数据接口 -export interface BedrockFormData { - access_key_id: string; - secret_access_key: string; - region: string; +export interface BedrockFormData extends IAddLlmRequestBody { + bedrock_ak: string; + bedrock_sk: string; + bedrock_region: string; } // 对话框 Props 接口 @@ -67,16 +105,28 @@ function BedrockDialog ({ formState: { errors }, } = useForm({ defaultValues: { - access_key_id: '', - secret_access_key: '', - region: 'us-east-1', + model_type: 'chat', + llm_name: '', + bedrock_ak: '', + bedrock_sk: '', + bedrock_region: 'us-east-1', + max_tokens: 4096, + llm_factory: 'Bedrock', }, }); // 当对话框打开或初始数据变化时重置表单 useEffect(() => { if (open) { - reset(initialData || { access_key_id: '', secret_access_key: '', region: 'us-east-1' }); + reset({ + model_type: 'chat', + llm_name: '', + bedrock_ak: '', + bedrock_sk: '', + bedrock_region: 'us-east-1', + max_tokens: 4096, + llm_factory: initialData?.llm_factory || 'Bedrock', + }); } }, [open, initialData, reset]); @@ -92,26 +142,73 @@ function BedrockDialog ({ setShowSecretKey(!showSecretKey); }; + const docInfo = { + url: 'https://console.aws.amazon.com/', + text: '如何集成 Bedrock', + }; + return ( - {editMode ? '编辑' : '配置'} AWS Bedrock + {editMode ? '编辑' : '添加'} LLM + {/* 模型类型 */} ( + + * 模型类型 + + {errors.model_type && ( + {errors.model_type.message} + )} + + )} + /> + + {/* 模型名称 */} + ( + )} + /> + + {/* ACCESS KEY */} + ( + @@ -129,19 +226,21 @@ function BedrockDialog ({ )} /> + {/* SECRET KEY */} ( @@ -159,42 +258,76 @@ function BedrockDialog ({ )} /> + {/* AWS Region */} ( - - Region - {BEDROCK_REGIONS.map((region) => ( - - {region.label} + + {region} ))} - {errors.region && ( - - {errors.region.message} - + {errors.bedrock_region && ( + {errors.bedrock_region.message} )} )} /> + + {/* 最大token数 */} + ( + field.onChange(Number(e.target.value))} + /> + )} + /> - - + + + {docInfo.text} + + + + + + ); diff --git a/src/pages/setting/components/Dialog/OllamaDialog.tsx b/src/pages/setting/components/Dialog/OllamaDialog.tsx index 88e183b..d574c10 100644 --- a/src/pages/setting/components/Dialog/OllamaDialog.tsx +++ b/src/pages/setting/components/Dialog/OllamaDialog.tsx @@ -1,4 +1,4 @@ -import React, { useEffect } from 'react'; +import React, { useEffect, useMemo } from 'react'; import { Dialog, DialogTitle, @@ -9,14 +9,28 @@ import { Box, Typography, CircularProgress, + MenuItem, + Select, + FormControl, + InputLabel, + FormHelperText, + Link, } from '@mui/material'; import { Controller, useForm } from 'react-hook-form'; +import logger from '@/utils/logger'; +import { LLM_FACTORY_LIST, type LLMFactory } from '@/constants/llm'; // 表单数据接口 export interface OllamaFormData { - base_url: string; + model_type: string; + llm_name: string; + api_base: string; + api_key?: string; + max_tokens: number; + llm_factory: string; } + // 对话框 Props 接口 export interface OllamaDialogProps { open: boolean; @@ -27,10 +41,49 @@ export interface OllamaDialogProps { editMode?: boolean; } +const llmFactoryToUrlMap: { [x: string]: string } = { + [LLM_FACTORY_LIST.Ollama]: + 'https://github.com/infiniflow/ragflow/blob/main/docs/guides/models/deploy_local_llm.mdx', + [LLM_FACTORY_LIST.Xinference]: + 'https://inference.readthedocs.io/en/latest/user_guide', + [LLM_FACTORY_LIST.ModelScope]: + 'https://www.modelscope.cn/docs/model-service/API-Inference/intro', + [LLM_FACTORY_LIST.LocalAI]: 'https://localai.io/docs/getting-started/models/', + [LLM_FACTORY_LIST.LMStudio]: 'https://lmstudio.ai/docs/basics', + [LLM_FACTORY_LIST.OpenAiAPICompatible]: + 'https://platform.openai.com/docs/models/gpt-4', + [LLM_FACTORY_LIST.TogetherAI]: 'https://docs.together.ai/docs/deployment-options', + [LLM_FACTORY_LIST.Replicate]: 'https://replicate.com/docs/topics/deployments', + [LLM_FACTORY_LIST.OpenRouter]: 'https://openrouter.ai/docs', + [LLM_FACTORY_LIST.HuggingFace]: + 'https://huggingface.co/docs/text-embeddings-inference/quick_tour', + [LLM_FACTORY_LIST.GPUStack]: 'https://docs.gpustack.ai/latest/quickstart', + [LLM_FACTORY_LIST.VLLM]: 'https://docs.vllm.ai/en/latest/', +} as const; + +function getURLByFactory(factory: LLMFactory) { + const url = llmFactoryToUrlMap[factory]; + return { + textTip: `如何集成 ${factory}`, + url, + } +} + + +// 模型类型选项 +const MODEL_TYPE_OPTIONS = [ + { value: 'chat', label: 'Chat' }, + { value: 'embedding', label: 'Embedding' }, + { value: 'rerank', label: 'Rerank' }, + { value: 'image2text', label: 'Image2Text' }, + { value: 'speech2text', label: 'Speech2Text' }, +]; + + /** - * Ollama 配置对话框 + * Ollama / local llm 配置对话框 */ -function OllamaDialog ({ +function OllamaDialog({ open, onClose, onSubmit, @@ -45,14 +98,60 @@ function OllamaDialog ({ formState: { errors }, } = useForm({ defaultValues: { - base_url: 'http://localhost:11434', + model_type: 'chat', + llm_name: '', + api_base: 'http://localhost:11434', + api_key: '', + max_tokens: 4096, + llm_factory: 'Ollama', }, }); + const modelTypeOptions = useMemo(() => { + const factory = initialData?.llm_factory || LLM_FACTORY_LIST.Ollama; + if (factory == LLM_FACTORY_LIST.HuggingFace) { + return [ + { value: 'embedding', label: 'Embedding' }, + { value: 'chat', label: 'Chat' }, + { value: 'rerank', label: 'Rerank' }, + ] + } else if (factory == LLM_FACTORY_LIST.Xinference) { + return [ + { value: 'chat', label: 'Chat' }, + { value: 'embedding', label: 'Embedding' }, + { value: 'rerank', label: 'Rerank' }, + { value: 'image2text', label: 'Image2Text' }, + { value: 'speech2text', label: 'Speech2Text' }, + { value: 'tts', label: 'TTS' }, + ] + } else if (factory == LLM_FACTORY_LIST.ModelScope) { + return [ + { value: 'chat', label: 'Chat' }, + ] + } else if (factory == LLM_FACTORY_LIST.GPUStack) { + return [ + { value: 'chat', label: 'Chat' }, + { value: 'embedding', label: 'Embedding' }, + { value: 'rerank', label: 'Rerank' }, + { value: 'image2text', label: 'Image2Text' }, + ] + } + return MODEL_TYPE_OPTIONS; + }, [initialData]) + + logger.debug('OllamaDialog', { open, initialData, editMode }); + // 当对话框打开或初始数据变化时重置表单 useEffect(() => { if (open) { - reset(initialData || { base_url: 'http://localhost:11434' }); + reset({ + model_type: 'chat', + llm_name: '', + api_base: initialData?.api_base, + api_key: initialData?.api_key, + max_tokens: initialData?.max_tokens, + llm_factory: initialData?.llm_factory || 'Ollama', + }); } }, [open, initialData, reset]); @@ -60,49 +159,168 @@ function OllamaDialog ({ onSubmit(data); }; + // 获取文档链接信息 + const docInfo = getURLByFactory((initialData?.llm_factory || LLM_FACTORY_LIST.Ollama) as LLMFactory); + return ( - {editMode ? '编辑' : '配置'} Ollama + {editMode ? `编辑 ${initialData?.llm_factory || LLM_FACTORY_LIST.Ollama}` : `配置 ${initialData?.llm_factory || LLM_FACTORY_LIST.Ollama}`} + {/* 模型类型选择 */} ( + + 模型类型 * + + {errors.model_type && ( + {errors.model_type.message} + )} + + )} + /> + + {/* 模型名称 */} + ( + + )} + /> + + {/* 基础 URL */} + ( + )} + /> + + {/* API Key (可选) */} + ( + + )} + /> + + {/* 最大 Token 数 */} + ( + field.onChange(parseInt(e.target.value) || 0)} /> )} /> - - + + {/* 左侧文档链接 */} + + {docInfo.textTip} + + + {/* 右侧按钮组 */} + + + + + ); diff --git a/src/pages/setting/hooks/useModelDialogs.ts b/src/pages/setting/hooks/useModelDialogs.ts index 43f81dd..0a8c300 100644 --- a/src/pages/setting/hooks/useModelDialogs.ts +++ b/src/pages/setting/hooks/useModelDialogs.ts @@ -107,23 +107,25 @@ export const useAzureOpenAIDialog = () => { dialogState.setLoading(true); try { // 调用 Azure OpenAI 特定的 API - await userService.set_api_key({ - llm_factory: 'AzureOpenAI', - // llm_name: data.deployment_name, + await userService.add_llm({ + llm_factory: data.llm_factory, + llm_name: data.llm_name, + model_type: data.model_type, + api_base: data.api_base, api_key: data.api_key, - // azure_endpoint: data.azure_endpoint, - // api_version: data.api_version, + // @ts-ignore + api_version: data.api_version, + max_tokens: data.max_tokens, }); showMessage.success('Azure OpenAI 配置成功'); dialogState.closeDialog(); } catch (error) { logger.error('Azure OpenAI 配置失败:', error); - showMessage.error('Azure OpenAI 配置失败'); throw error; } finally { dialogState.setLoading(false); } - }, [dialogState]); + }, [dialogState, showMessage]); return { ...dialogState, @@ -140,13 +142,15 @@ export const useBedrockDialog = () => { dialogState.setLoading(true); try { // 调用 Bedrock 特定的 API - await userService.set_api_key({ - llm_factory: 'Bedrock', - llm_name: '', - api_key: '', // Bedrock 使用 access key - // access_key_id: data.access_key_id, - // secret_access_key: data.secret_access_key, - // region: data.region, + await userService.add_llm({ + llm_factory: data.llm_factory, + llm_name: data.llm_name, + model_type: data.model_type, + // @ts-ignore + bedrock_ak: data.bedrock_ak, + bedrock_sk: data.bedrock_sk, + bedrock_region: data.bedrock_region, + max_tokens: data.max_tokens, }); showMessage.success('AWS Bedrock 配置成功'); dialogState.closeDialog(); @@ -175,9 +179,12 @@ export const useOllamaDialog = () => { try { // 调用添加 LLM 的 API await userService.add_llm({ - llm_factory: 'Ollama', - // llm_name: data.model_name, - // base_url: data.base_url, + llm_factory: data.llm_factory, + llm_name: data.llm_name, + model_type: data.model_type, + api_base: data.api_base, + api_key: data.api_key || '', + max_tokens: data.max_tokens, }); showMessage.success('Ollama 模型添加成功'); dialogState.closeDialog(); @@ -188,7 +195,7 @@ export const useOllamaDialog = () => { } finally { dialogState.setLoading(false); } - }, [dialogState]); + }, [dialogState, showMessage]); return { ...dialogState, diff --git a/src/pages/setting/models.tsx b/src/pages/setting/models.tsx index b89a174..b5cceda 100644 --- a/src/pages/setting/models.tsx +++ b/src/pages/setting/models.tsx @@ -69,7 +69,7 @@ function ModelsPage() { const { llmFactory, myLlm, refreshLlmModel } = useLlmModelSetting(); const modelDialogs = useModelDialogs(refreshLlmModel); - // 折叠状态管理 - 使用 Map 来管理每个工厂的折叠状态 + // 折叠状态管理 - 使用 Map 来管理每个工厂的折叠状态,默认所有工厂都是折叠的 const [collapsedFactories, setCollapsedFactories] = useState>({}); // 切换工厂折叠状态 @@ -120,11 +120,17 @@ function ModelsPage() { // 然后有很多自定义的配置项,需要单独用 dialog 来配置 const factoryName = factory.name as LLMFactory; if (LocalLlmFactories.includes(factoryName)) { - // modelDialogs.localLlmDialog.openLocalLlmDialog(factoryName); + modelDialogs.ollamaDialog.openDialog({ + llm_factory: factory.name, + }); } else if (factoryName == LLM_FACTORY_LIST.AzureOpenAI) { - + modelDialogs.azureDialog.openDialog({ + llm_factory: factory.name, + }); } else if (factoryName == LLM_FACTORY_LIST.Bedrock) { - + modelDialogs.bedrockDialog.openDialog({ + llm_factory: factory.name, + }); } else if (factoryName == LLM_FACTORY_LIST.BaiduYiYan) { } else if (factoryName == LLM_FACTORY_LIST.GoogleCloud) { @@ -241,7 +247,7 @@ function ModelsPage() { {/* 折叠/展开图标 */} - {collapsedFactories[factoryName] ? : } + {collapsedFactories[factoryName] ? : } {/* 模型工厂名称 */} @@ -281,7 +287,7 @@ function ModelsPage() { {/* 模型列表 - 使用 Collapse 组件包装 */} - + {group.llm.map((model) => (