From 9137ae30638b665f92820dc8ce274aec6affc99e Mon Sep 17 00:00:00 2001 From: "guangfei.zhao" Date: Wed, 22 Oct 2025 15:27:31 +0800 Subject: [PATCH] feat(models): enhance model management with improved dialogs and state handling --- src/hooks/setting-hooks.ts | 54 +++-- src/interfaces/request/llm.ts | 8 + src/pages/setting/components/ModelDialogs.tsx | 5 +- src/pages/setting/hooks/useModelDialogs.ts | 67 ++++-- src/pages/setting/models.tsx | 207 +++++++++--------- src/services/user_service.ts | 11 +- 6 files changed, 197 insertions(+), 155 deletions(-) diff --git a/src/hooks/setting-hooks.ts b/src/hooks/setting-hooks.ts index b790f3d..1f6425a 100644 --- a/src/hooks/setting-hooks.ts +++ b/src/hooks/setting-hooks.ts @@ -11,7 +11,7 @@ import type { LLMFactory } from "@/constants/llm"; * 个人中心设置 */ export function useProfileSetting() { - const {fetchUserInfo, userInfo} = useUserData(); + const { fetchUserInfo, userInfo } = useUserData(); useEffect(() => { fetchUserInfo(); @@ -53,36 +53,42 @@ export function useLlmModelSetting() { const [llmFactory, setLlmFactory] = useState([]); const [myLlm, setMyLlm] = useState>(); + const fetchLlmFactory = async () => { + try { + const res = await userService.llm_factories_list(); + const arr = res.data.data || []; + setLlmFactory(arr); + } catch (error) { + logger.error('获取模型工厂失败:', error); + throw error; + } + } + + const fetchMyLlm = async () => { + try { + const res = await userService.my_llm(); + const llm_dic = res.data.data || {}; + setMyLlm(llm_dic); + } catch (error) { + logger.error('获取我的模型失败:', error); + throw error; + } + } useEffect(() => { - const fetchLlmFactory = async () => { - try { - const res = await userService.llm_factories_list(); - const arr = res.data.data || []; - setLlmFactory(arr); - } catch (error) { - logger.error('获取模型工厂失败:', error); - throw error; - } - } - - const fetchMyLlm = async () => { - try { - const res = await userService.my_llm(); - const llm_dic = res.data.data || {}; - setMyLlm(llm_dic); - } catch (error) { - logger.error('获取我的模型失败:', error); - throw error; - } - } - fetchLlmFactory(); fetchMyLlm(); - }, []); // 空依赖数组,只在组件挂载时执行一次 + }, []); + + const refreshLlmModel = async () => { + await fetchMyLlm(); + // await fetchLlmFactory(); + logger.info('刷新我的模型成功'); + } return { llmFactory, myLlm, + refreshLlmModel, } } diff --git a/src/interfaces/request/llm.ts b/src/interfaces/request/llm.ts index 05f8f47..9178a5a 100644 --- a/src/interfaces/request/llm.ts +++ b/src/interfaces/request/llm.ts @@ -1,3 +1,11 @@ +export interface ISetApiKeyRequestBody { + llm_factory: string; + api_key: string; + llm_name?: string; + model_type?: string; + base_url?: string; +} + export interface IAddLlmRequestBody { llm_factory: string; // Ollama llm_name: string; diff --git a/src/pages/setting/components/ModelDialogs.tsx b/src/pages/setting/components/ModelDialogs.tsx index 34972e8..098b562 100644 --- a/src/pages/setting/components/ModelDialogs.tsx +++ b/src/pages/setting/components/ModelDialogs.tsx @@ -25,6 +25,7 @@ import { IconMap, type LLMFactory } from '@/constants/llm'; import type { ITenantInfo } from '@/interfaces/database/knowledge'; import type { LlmModelType } from '@/constants/knowledge'; import type { IMyLlmModel, IThirdOAIModel } from '@/interfaces/database/llm'; +import logger from '@/utils/logger'; // 基础对话框状态 interface BaseDialogState { @@ -164,6 +165,8 @@ export const ApiKeyDialog: React.FC = ({ }); const [showApiKey, setShowApiKey] = React.useState(false); + logger.info('ApiKeyDialog 初始化:', { open, editMode, factoryName, initialData }); + useEffect(() => { if (open && initialData) { reset(initialData); @@ -664,6 +667,7 @@ export const SystemModelDialog: React.FC = ({ onSubmit, loading, initialData, + editMode = false, allModelOptions }) => { const { control, handleSubmit, reset, formState: { errors } } = useForm({ @@ -676,7 +680,6 @@ export const SystemModelDialog: React.FC = ({ }; // all model options 包含了全部的 options - const llmOptions = useMemo(() => allModelOptions?.llmOptions || [], [allModelOptions]); const embdOptions = useMemo(() => allModelOptions?.embeddingOptions || [], [allModelOptions]); const img2txtOptions = useMemo(() => allModelOptions?.image2textOptions || [], [allModelOptions]); diff --git a/src/pages/setting/hooks/useModelDialogs.ts b/src/pages/setting/hooks/useModelDialogs.ts index 1324da1..be5aa53 100644 --- a/src/pages/setting/hooks/useModelDialogs.ts +++ b/src/pages/setting/hooks/useModelDialogs.ts @@ -1,4 +1,4 @@ -import { useState, useCallback, useMemo } from 'react'; +import { useState, useCallback, useMemo, useEffect } from 'react'; import { useMessage } from '@/hooks/useSnackbar'; import userService from '@/services/user_service'; import logger from '@/utils/logger'; @@ -11,6 +11,8 @@ import type { import type { ITenantInfo } from '@/interfaces/database/knowledge'; import { useLlmList } from '@/hooks/llm-hooks'; import type { LlmModelType } from '@/constants/knowledge'; +import { useUserData } from '@/hooks/useUserData'; +import type { ISetApiKeyRequestBody } from '@/interfaces/request/llm'; // 对话框状态管理 hook export const useDialogState = () => { @@ -20,7 +22,9 @@ export const useDialogState = () => { const [initialData, setInitialData] = useState(null); const openDialog = useCallback((data?: any, isEdit = false) => { - setInitialData(data); + if (data != null) { + setInitialData(data); + } setEditMode(isEdit); setOpen(true); }, []); @@ -55,19 +59,26 @@ export const useApiKeyDialog = () => { const submitApiKey = useCallback(async (data: ApiKeyFormData) => { dialogState.setLoading(true); + logger.info('提交 API Key:', data); try { - await userService.set_api_key({ - factory_name: factoryName, - model_name: '', // 根据实际需求调整 - // api_key: data.api_key, - ...data - }); + const params: ISetApiKeyRequestBody = { + llm_factory: factoryName, + api_key: data.api_key, + }; + + if (data.base_url && data.base_url.trim() !== '') { + params.base_url = data.base_url; + } + + if (data.group_id && data.group_id.trim() !== '') { + // params.group_id = data.group_id; + } + + await userService.set_api_key(params); showMessage.success('API Key 配置成功'); dialogState.closeDialog(); } catch (error) { logger.error('API Key 配置失败:', error); - showMessage.error('API Key 配置失败'); - throw error; } finally { dialogState.setLoading(false); } @@ -91,8 +102,8 @@ export const useAzureOpenAIDialog = () => { try { // 调用 Azure OpenAI 特定的 API await userService.set_api_key({ - factory_name: 'AzureOpenAI', - model_name: data.deployment_name, + llm_factory: 'AzureOpenAI', + llm_name: data.deployment_name, api_key: data.api_key, // azure_endpoint: data.azure_endpoint, // api_version: data.api_version, @@ -124,8 +135,8 @@ export const useBedrockDialog = () => { try { // 调用 Bedrock 特定的 API await userService.set_api_key({ - factory_name: 'Bedrock', - model_name: '', + llm_factory: 'Bedrock', + llm_name: '', api_key: '', // Bedrock 使用 access key // access_key_id: data.access_key_id, // secret_access_key: data.secret_access_key, @@ -158,8 +169,8 @@ export const useOllamaDialog = () => { try { // 调用添加 LLM 的 API await userService.add_llm({ - factory_name: 'Ollama', - model_name: data.model_name, + llm_factory: 'Ollama', + llm_name: data.model_name, // base_url: data.base_url, }); showMessage.success('Ollama 模型添加成功'); @@ -188,14 +199,12 @@ export const useDeleteOperations = () => { setLoading(true); try { await userService.delete_llm({ - factory_name: factoryName, - model_name: modelName, + llm_factory: factoryName, + llm_name: modelName, }); showMessage.success('模型删除成功'); } catch (error) { logger.error('模型删除失败:', error); - showMessage.error('模型删除失败'); - throw error; } finally { setLoading(false); } @@ -205,13 +214,11 @@ export const useDeleteOperations = () => { setLoading(true); try { await userService.deleteFactory({ - factory_name: factoryName, + llm_factory: factoryName, }); showMessage.success('模型工厂删除成功'); } catch (error) { logger.error('模型工厂删除失败:', error); - showMessage.error('模型工厂删除失败'); - throw error; } finally { setLoading(false); } @@ -231,6 +238,12 @@ export const useSystemModelSetting = () => { const { data: llmList } = useLlmList(); + const { tenantInfo, fetchTenantInfo } = useUserData(); + + useEffect(() => { + fetchTenantInfo(); + }, []); + const getOptionsByModelType = useCallback((modelType: LlmModelType) => { return Object.entries(llmList) .filter(([, value]) => @@ -278,11 +291,16 @@ export const useSystemModelSetting = () => { const submitSystemModelSetting = useCallback(async (data: Partial) => { dialogState.setLoading(true); + logger.debug('submitSystemModelSetting data:', data); try { + delete data.role; // 这里需要根据实际的 API 接口调整 - // await userService.setSystemDefaultModel(data); + await userService.setTenantInfo({ + ...data, + }); showMessage.success('系统默认模型设置成功'); dialogState.closeDialog(); + fetchTenantInfo(); } catch (error) { logger.error('系统默认模型设置失败:', error); showMessage.error('系统默认模型设置失败'); @@ -296,6 +314,7 @@ export const useSystemModelSetting = () => { ...dialogState, submitSystemModelSetting, allModelOptions, + initialData: tenantInfo, }; }; diff --git a/src/pages/setting/models.tsx b/src/pages/setting/models.tsx index 5faab22..337dfd2 100644 --- a/src/pages/setting/models.tsx +++ b/src/pages/setting/models.tsx @@ -25,61 +25,82 @@ import { } from '@mui/icons-material'; import { useLlmModelSetting } from '@/hooks/setting-hooks'; import { useModelDialogs } from './hooks/useModelDialogs'; -import AppSvgIcon, { LlmSvgIcon } from '@/components/AppSvgIcon'; -import { LLM_FACTORY_LIST, IconMap, type LLMFactory } from '@/constants/llm'; import type { IFactory, IMyLlmModel, ILlmItem } from '@/interfaces/database/llm'; import LLMFactoryCard, { MODEL_TYPE_COLORS } from './components/LLMFactoryCard'; import { ModelDialogs } from './components/ModelDialogs'; +import { useDialog } from '@/hooks/useDialog'; +import logger from '@/utils/logger'; +import { useMessage } from '@/hooks/useSnackbar'; +function MyLlmGridItem({ model, onDelete }: { model: ILlmItem, onDelete: (model: ILlmItem) => void }) { + return ( + + + + + {model.name} + + + onDelete(model)} + > + + + + + + + + ); +} // 主页面组件 function ModelsPage() { - const { llmFactory, myLlm } = useLlmModelSetting(); + const { llmFactory, myLlm, refreshLlmModel } = useLlmModelSetting(); const modelDialogs = useModelDialogs(); // 处理配置模型工厂 const handleConfigureFactory = useCallback((factory: IFactory) => { - // modelDialogs.openDialog(factory.name); - }, [modelDialogs]); + modelDialogs.apiKeyDialog.openApiKeyDialog(factory.name); + }, [modelDialogs, refreshLlmModel]); - // 处理删除模型工厂 - const handleDeleteFactory = useCallback(async (factoryName: string) => { - try { - // await modelDialogs.deleteOperations.deleteFactory(factoryName); - // 刷新数据 - window.location.reload(); - } catch (error) { - console.error('删除工厂失败:', error); - } - }, []); + const dialog = useDialog(); // 处理删除单个模型 const handleDeleteModel = useCallback(async (factoryName: string, modelName: string) => { - try { - // await modelDialogs.deleteOperations.deleteLlm(factoryName, modelName); - // 刷新数据 - window.location.reload(); - } catch (error) { - console.error('删除模型失败:', error); - } - }, []); + dialog.confirm({ + title: '确认删除', + content: `是否确认删除模型 ${modelName}?`, + showCancel: true, + onConfirm: async () => { + await modelDialogs.deleteOps.deleteLlm(factoryName, modelName); + await refreshLlmModel(); + }, + }); + }, [dialog, refreshLlmModel]); - // 处理编辑模型 - const handleEditModel = useCallback((factory: IFactory, model: ILlmItem) => { - // 设置编辑模式并打开对话框 - // modelDialogs.openDialog(factory.name, { - // model_name: model.name, - // api_base: model.api_base, - // max_tokens: model.max_tokens, - // }); - }, [modelDialogs]); - - // 根据工厂名称获取对应的模型列表 - const getModelsForFactory = (factoryName: LLMFactory): ILlmItem[] => { - if (!myLlm) return []; - const factoryGroup = myLlm[factoryName]; - return factoryGroup?.llm || []; - }; + // 处理删除模型工厂 + const handleDeleteFactory = useCallback(async (factoryName: string) => { + dialog.confirm({ + title: '确认删除', + content: `是否确认删除模型工厂 ${factoryName}?`, + showCancel: true, + onConfirm: async () => { + await modelDialogs.deleteOps.deleteFactory(factoryName); + await refreshLlmModel(); + }, + }); + }, [dialog, refreshLlmModel]); if (!llmFactory || !myLlm) { return ( @@ -98,12 +119,12 @@ function ModelsPage() { justifyContent: 'space-between', }}> - - 模型设置 - - - 管理您的 LLM 模型工厂和个人模型配置 - + + 模型设置 + + + 管理您的 LLM 模型工厂和个人模型配置 + {/* 设置默认模型 */} + + + + {/* 模型列表 */} + + {group.llm.map((model) => ( + handleDeleteModel(factoryName, model.name)} + /> ))} @@ -230,6 +234,7 @@ function ModelsPage() { {/* 模型配置对话框 */} + {/* @ts-ignore */} ); diff --git a/src/services/user_service.ts b/src/services/user_service.ts index 51b527b..2ce7519 100644 --- a/src/services/user_service.ts +++ b/src/services/user_service.ts @@ -3,6 +3,7 @@ import request, { post } from '@/utils/request'; import type { ITenantInfo } from '@/interfaces/database/knowledge'; import type { IUserInfo, ITenant } from '@/interfaces/database/user-setting'; import type { LlmModelType } from '@/constants/knowledge'; +import type { IAddLlmRequestBody, IDeleteLlmRequestBody, ISetApiKeyRequestBody } from '@/interfaces/request/llm'; // 用户相关API服务 const userService = { @@ -53,7 +54,7 @@ const userService = { }, // 设置租户信息 - setTenantInfo: (data: ITenantInfo) => { + setTenantInfo: (data: Partial>) => { return post(api.set_tenant_info, data); }, // 租户用户管理 @@ -95,22 +96,22 @@ const userService = { }, // add llm - add_llm: (data: { factory_name: string; model_name: string }) => { + add_llm: (data: Partial) => { return request.post(api.add_llm, data); }, // delete llm - delete_llm: (data: { factory_name: string; model_name: string }) => { + delete_llm: (data: IDeleteLlmRequestBody) => { return request.post(api.delete_llm, data); }, // delete factory - deleteFactory: (data: { factory_name: string }) => { + deleteFactory: (data: IDeleteLlmRequestBody) => { return request.post(api.deleteFactory, data); }, // set api key - set_api_key: (data: { factory_name: string; model_name: string; api_key: string }) => { + set_api_key: (data: ISetApiKeyRequestBody) => { return request.post(api.set_api_key, data); }, };