feat(models): enhance model management with improved dialogs and state handling
This commit is contained in:
@@ -53,7 +53,6 @@ export function useLlmModelSetting() {
|
||||
const [llmFactory, setLlmFactory] = useState<IFactory[]>([]);
|
||||
const [myLlm, setMyLlm] = useState<Record<LLMFactory, IMyLlmModel>>();
|
||||
|
||||
useEffect(() => {
|
||||
const fetchLlmFactory = async () => {
|
||||
try {
|
||||
const res = await userService.llm_factories_list();
|
||||
@@ -75,14 +74,21 @@ export function useLlmModelSetting() {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
fetchLlmFactory();
|
||||
fetchMyLlm();
|
||||
}, []); // 空依赖数组,只在组件挂载时执行一次
|
||||
}, []);
|
||||
|
||||
const refreshLlmModel = async () => {
|
||||
await fetchMyLlm();
|
||||
// await fetchLlmFactory();
|
||||
logger.info('刷新我的模型成功');
|
||||
}
|
||||
|
||||
return {
|
||||
llmFactory,
|
||||
myLlm,
|
||||
refreshLlmModel,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,11 @@
|
||||
export interface ISetApiKeyRequestBody {
|
||||
llm_factory: string;
|
||||
api_key: string;
|
||||
llm_name?: string;
|
||||
model_type?: string;
|
||||
base_url?: string;
|
||||
}
|
||||
|
||||
export interface IAddLlmRequestBody {
|
||||
llm_factory: string; // Ollama
|
||||
llm_name: string;
|
||||
|
||||
@@ -25,6 +25,7 @@ import { IconMap, type LLMFactory } from '@/constants/llm';
|
||||
import type { ITenantInfo } from '@/interfaces/database/knowledge';
|
||||
import type { LlmModelType } from '@/constants/knowledge';
|
||||
import type { IMyLlmModel, IThirdOAIModel } from '@/interfaces/database/llm';
|
||||
import logger from '@/utils/logger';
|
||||
|
||||
// 基础对话框状态
|
||||
interface BaseDialogState {
|
||||
@@ -164,6 +165,8 @@ export const ApiKeyDialog: React.FC<ApiKeyDialogProps> = ({
|
||||
});
|
||||
const [showApiKey, setShowApiKey] = React.useState(false);
|
||||
|
||||
logger.info('ApiKeyDialog 初始化:', { open, editMode, factoryName, initialData });
|
||||
|
||||
useEffect(() => {
|
||||
if (open && initialData) {
|
||||
reset(initialData);
|
||||
@@ -664,6 +667,7 @@ export const SystemModelDialog: React.FC<SystemModelDialogProps> = ({
|
||||
onSubmit,
|
||||
loading,
|
||||
initialData,
|
||||
editMode = false,
|
||||
allModelOptions
|
||||
}) => {
|
||||
const { control, handleSubmit, reset, formState: { errors } } = useForm<ITenantInfo>({
|
||||
@@ -676,7 +680,6 @@ export const SystemModelDialog: React.FC<SystemModelDialogProps> = ({
|
||||
};
|
||||
|
||||
// all model options 包含了全部的 options
|
||||
|
||||
const llmOptions = useMemo(() => allModelOptions?.llmOptions || [], [allModelOptions]);
|
||||
const embdOptions = useMemo(() => allModelOptions?.embeddingOptions || [], [allModelOptions]);
|
||||
const img2txtOptions = useMemo(() => allModelOptions?.image2textOptions || [], [allModelOptions]);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useState, useCallback, useMemo } from 'react';
|
||||
import { useState, useCallback, useMemo, useEffect } from 'react';
|
||||
import { useMessage } from '@/hooks/useSnackbar';
|
||||
import userService from '@/services/user_service';
|
||||
import logger from '@/utils/logger';
|
||||
@@ -11,6 +11,8 @@ import type {
|
||||
import type { ITenantInfo } from '@/interfaces/database/knowledge';
|
||||
import { useLlmList } from '@/hooks/llm-hooks';
|
||||
import type { LlmModelType } from '@/constants/knowledge';
|
||||
import { useUserData } from '@/hooks/useUserData';
|
||||
import type { ISetApiKeyRequestBody } from '@/interfaces/request/llm';
|
||||
|
||||
// 对话框状态管理 hook
|
||||
export const useDialogState = () => {
|
||||
@@ -20,7 +22,9 @@ export const useDialogState = () => {
|
||||
const [initialData, setInitialData] = useState<any>(null);
|
||||
|
||||
const openDialog = useCallback((data?: any, isEdit = false) => {
|
||||
if (data != null) {
|
||||
setInitialData(data);
|
||||
}
|
||||
setEditMode(isEdit);
|
||||
setOpen(true);
|
||||
}, []);
|
||||
@@ -55,19 +59,26 @@ export const useApiKeyDialog = () => {
|
||||
|
||||
const submitApiKey = useCallback(async (data: ApiKeyFormData) => {
|
||||
dialogState.setLoading(true);
|
||||
logger.info('提交 API Key:', data);
|
||||
try {
|
||||
await userService.set_api_key({
|
||||
factory_name: factoryName,
|
||||
model_name: '', // 根据实际需求调整
|
||||
// api_key: data.api_key,
|
||||
...data
|
||||
});
|
||||
const params: ISetApiKeyRequestBody = {
|
||||
llm_factory: factoryName,
|
||||
api_key: data.api_key,
|
||||
};
|
||||
|
||||
if (data.base_url && data.base_url.trim() !== '') {
|
||||
params.base_url = data.base_url;
|
||||
}
|
||||
|
||||
if (data.group_id && data.group_id.trim() !== '') {
|
||||
// params.group_id = data.group_id;
|
||||
}
|
||||
|
||||
await userService.set_api_key(params);
|
||||
showMessage.success('API Key 配置成功');
|
||||
dialogState.closeDialog();
|
||||
} catch (error) {
|
||||
logger.error('API Key 配置失败:', error);
|
||||
showMessage.error('API Key 配置失败');
|
||||
throw error;
|
||||
} finally {
|
||||
dialogState.setLoading(false);
|
||||
}
|
||||
@@ -91,8 +102,8 @@ export const useAzureOpenAIDialog = () => {
|
||||
try {
|
||||
// 调用 Azure OpenAI 特定的 API
|
||||
await userService.set_api_key({
|
||||
factory_name: 'AzureOpenAI',
|
||||
model_name: data.deployment_name,
|
||||
llm_factory: 'AzureOpenAI',
|
||||
llm_name: data.deployment_name,
|
||||
api_key: data.api_key,
|
||||
// azure_endpoint: data.azure_endpoint,
|
||||
// api_version: data.api_version,
|
||||
@@ -124,8 +135,8 @@ export const useBedrockDialog = () => {
|
||||
try {
|
||||
// 调用 Bedrock 特定的 API
|
||||
await userService.set_api_key({
|
||||
factory_name: 'Bedrock',
|
||||
model_name: '',
|
||||
llm_factory: 'Bedrock',
|
||||
llm_name: '',
|
||||
api_key: '', // Bedrock 使用 access key
|
||||
// access_key_id: data.access_key_id,
|
||||
// secret_access_key: data.secret_access_key,
|
||||
@@ -158,8 +169,8 @@ export const useOllamaDialog = () => {
|
||||
try {
|
||||
// 调用添加 LLM 的 API
|
||||
await userService.add_llm({
|
||||
factory_name: 'Ollama',
|
||||
model_name: data.model_name,
|
||||
llm_factory: 'Ollama',
|
||||
llm_name: data.model_name,
|
||||
// base_url: data.base_url,
|
||||
});
|
||||
showMessage.success('Ollama 模型添加成功');
|
||||
@@ -188,14 +199,12 @@ export const useDeleteOperations = () => {
|
||||
setLoading(true);
|
||||
try {
|
||||
await userService.delete_llm({
|
||||
factory_name: factoryName,
|
||||
model_name: modelName,
|
||||
llm_factory: factoryName,
|
||||
llm_name: modelName,
|
||||
});
|
||||
showMessage.success('模型删除成功');
|
||||
} catch (error) {
|
||||
logger.error('模型删除失败:', error);
|
||||
showMessage.error('模型删除失败');
|
||||
throw error;
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
@@ -205,13 +214,11 @@ export const useDeleteOperations = () => {
|
||||
setLoading(true);
|
||||
try {
|
||||
await userService.deleteFactory({
|
||||
factory_name: factoryName,
|
||||
llm_factory: factoryName,
|
||||
});
|
||||
showMessage.success('模型工厂删除成功');
|
||||
} catch (error) {
|
||||
logger.error('模型工厂删除失败:', error);
|
||||
showMessage.error('模型工厂删除失败');
|
||||
throw error;
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
@@ -231,6 +238,12 @@ export const useSystemModelSetting = () => {
|
||||
|
||||
const { data: llmList } = useLlmList();
|
||||
|
||||
const { tenantInfo, fetchTenantInfo } = useUserData();
|
||||
|
||||
useEffect(() => {
|
||||
fetchTenantInfo();
|
||||
}, []);
|
||||
|
||||
const getOptionsByModelType = useCallback((modelType: LlmModelType) => {
|
||||
return Object.entries(llmList)
|
||||
.filter(([, value]) =>
|
||||
@@ -278,11 +291,16 @@ export const useSystemModelSetting = () => {
|
||||
|
||||
const submitSystemModelSetting = useCallback(async (data: Partial<ITenantInfo>) => {
|
||||
dialogState.setLoading(true);
|
||||
logger.debug('submitSystemModelSetting data:', data);
|
||||
try {
|
||||
delete data.role;
|
||||
// 这里需要根据实际的 API 接口调整
|
||||
// await userService.setSystemDefaultModel(data);
|
||||
await userService.setTenantInfo({
|
||||
...data,
|
||||
});
|
||||
showMessage.success('系统默认模型设置成功');
|
||||
dialogState.closeDialog();
|
||||
fetchTenantInfo();
|
||||
} catch (error) {
|
||||
logger.error('系统默认模型设置失败:', error);
|
||||
showMessage.error('系统默认模型设置失败');
|
||||
@@ -296,6 +314,7 @@ export const useSystemModelSetting = () => {
|
||||
...dialogState,
|
||||
submitSystemModelSetting,
|
||||
allModelOptions,
|
||||
initialData: tenantInfo,
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@@ -25,61 +25,82 @@ import {
|
||||
} from '@mui/icons-material';
|
||||
import { useLlmModelSetting } from '@/hooks/setting-hooks';
|
||||
import { useModelDialogs } from './hooks/useModelDialogs';
|
||||
import AppSvgIcon, { LlmSvgIcon } from '@/components/AppSvgIcon';
|
||||
import { LLM_FACTORY_LIST, IconMap, type LLMFactory } from '@/constants/llm';
|
||||
import type { IFactory, IMyLlmModel, ILlmItem } from '@/interfaces/database/llm';
|
||||
import LLMFactoryCard, { MODEL_TYPE_COLORS } from './components/LLMFactoryCard';
|
||||
import { ModelDialogs } from './components/ModelDialogs';
|
||||
import { useDialog } from '@/hooks/useDialog';
|
||||
import logger from '@/utils/logger';
|
||||
import { useMessage } from '@/hooks/useSnackbar';
|
||||
|
||||
function MyLlmGridItem({ model, onDelete }: { model: ILlmItem, onDelete: (model: ILlmItem) => void }) {
|
||||
return (
|
||||
<Grid size={{ xs: 6, sm: 4, md: 3 }} key={model.name}>
|
||||
<Card variant="outlined" sx={{ p: 2 }}>
|
||||
<Box display="flex" justifyContent="space-between" alignItems="flex-start" mb={1}>
|
||||
<Typography variant="body2" fontWeight="bold">
|
||||
{model.name}
|
||||
</Typography>
|
||||
<Box>
|
||||
<IconButton
|
||||
size="small"
|
||||
color="error"
|
||||
onClick={() => onDelete(model)}
|
||||
>
|
||||
<DeleteIcon fontSize="small" />
|
||||
</IconButton>
|
||||
</Box>
|
||||
</Box>
|
||||
<Chip
|
||||
label={model.type}
|
||||
size="small"
|
||||
sx={{
|
||||
backgroundColor: MODEL_TYPE_COLORS[model.type.toUpperCase()] || '#757575',
|
||||
color: 'white',
|
||||
mb: 1,
|
||||
}}
|
||||
/>
|
||||
</Card>
|
||||
</Grid>
|
||||
);
|
||||
}
|
||||
|
||||
// 主页面组件
|
||||
function ModelsPage() {
|
||||
const { llmFactory, myLlm } = useLlmModelSetting();
|
||||
const { llmFactory, myLlm, refreshLlmModel } = useLlmModelSetting();
|
||||
const modelDialogs = useModelDialogs();
|
||||
|
||||
// 处理配置模型工厂
|
||||
const handleConfigureFactory = useCallback((factory: IFactory) => {
|
||||
// modelDialogs.openDialog(factory.name);
|
||||
}, [modelDialogs]);
|
||||
modelDialogs.apiKeyDialog.openApiKeyDialog(factory.name);
|
||||
}, [modelDialogs, refreshLlmModel]);
|
||||
|
||||
// 处理删除模型工厂
|
||||
const handleDeleteFactory = useCallback(async (factoryName: string) => {
|
||||
try {
|
||||
// await modelDialogs.deleteOperations.deleteFactory(factoryName);
|
||||
// 刷新数据
|
||||
window.location.reload();
|
||||
} catch (error) {
|
||||
console.error('删除工厂失败:', error);
|
||||
}
|
||||
}, []);
|
||||
const dialog = useDialog();
|
||||
|
||||
// 处理删除单个模型
|
||||
const handleDeleteModel = useCallback(async (factoryName: string, modelName: string) => {
|
||||
try {
|
||||
// await modelDialogs.deleteOperations.deleteLlm(factoryName, modelName);
|
||||
// 刷新数据
|
||||
window.location.reload();
|
||||
} catch (error) {
|
||||
console.error('删除模型失败:', error);
|
||||
}
|
||||
}, []);
|
||||
dialog.confirm({
|
||||
title: '确认删除',
|
||||
content: `是否确认删除模型 ${modelName}?`,
|
||||
showCancel: true,
|
||||
onConfirm: async () => {
|
||||
await modelDialogs.deleteOps.deleteLlm(factoryName, modelName);
|
||||
await refreshLlmModel();
|
||||
},
|
||||
});
|
||||
}, [dialog, refreshLlmModel]);
|
||||
|
||||
// 处理编辑模型
|
||||
const handleEditModel = useCallback((factory: IFactory, model: ILlmItem) => {
|
||||
// 设置编辑模式并打开对话框
|
||||
// modelDialogs.openDialog(factory.name, {
|
||||
// model_name: model.name,
|
||||
// api_base: model.api_base,
|
||||
// max_tokens: model.max_tokens,
|
||||
// });
|
||||
}, [modelDialogs]);
|
||||
|
||||
// 根据工厂名称获取对应的模型列表
|
||||
const getModelsForFactory = (factoryName: LLMFactory): ILlmItem[] => {
|
||||
if (!myLlm) return [];
|
||||
const factoryGroup = myLlm[factoryName];
|
||||
return factoryGroup?.llm || [];
|
||||
};
|
||||
// 处理删除模型工厂
|
||||
const handleDeleteFactory = useCallback(async (factoryName: string) => {
|
||||
dialog.confirm({
|
||||
title: '确认删除',
|
||||
content: `是否确认删除模型工厂 ${factoryName}?`,
|
||||
showCancel: true,
|
||||
onConfirm: async () => {
|
||||
await modelDialogs.deleteOps.deleteFactory(factoryName);
|
||||
await refreshLlmModel();
|
||||
},
|
||||
});
|
||||
}, [dialog, refreshLlmModel]);
|
||||
|
||||
if (!llmFactory || !myLlm) {
|
||||
return (
|
||||
@@ -129,9 +150,13 @@ function ModelsPage() {
|
||||
<Grid size={12} key={factoryName}>
|
||||
<Card variant="outlined">
|
||||
<CardContent>
|
||||
<Box sx={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
|
||||
<Box>
|
||||
{/* 模型工厂名称 */}
|
||||
<Typography variant="h6" gutterBottom>
|
||||
{factoryName}
|
||||
</Typography>
|
||||
{/* 模型标签 */}
|
||||
<Box display="flex" gap={1} mb={2}>
|
||||
{group.tags.split(',').map((tag) => (
|
||||
<Chip
|
||||
@@ -145,52 +170,31 @@ function ModelsPage() {
|
||||
/>
|
||||
))}
|
||||
</Box>
|
||||
</Box>
|
||||
{/* edit and delete factory button */}
|
||||
<Box sx={{ display: 'flex', gap: 1 }}>
|
||||
<Button
|
||||
variant='contained' color='primary' startIcon={<EditIcon />}
|
||||
onClick={() => modelDialogs.apiKeyDialog.openApiKeyDialog(factoryName)}
|
||||
>
|
||||
编辑
|
||||
</Button>
|
||||
<Button
|
||||
variant='outlined' color='primary' startIcon={<DeleteIcon />}
|
||||
onClick={() => handleDeleteFactory(factoryName)}
|
||||
>
|
||||
删除
|
||||
</Button>
|
||||
</Box>
|
||||
</Box>
|
||||
{/* 模型列表 */}
|
||||
<Grid container spacing={2}>
|
||||
{group.llm.map((model) => (
|
||||
<Grid size={{ xs: 12, sm: 6, md: 4 }} key={model.name}>
|
||||
<Card variant="outlined" sx={{ p: 2 }}>
|
||||
<Box display="flex" justifyContent="space-between" alignItems="flex-start" mb={1}>
|
||||
<Typography variant="body2" fontWeight="bold">
|
||||
{model.name}
|
||||
</Typography>
|
||||
<Box>
|
||||
<IconButton
|
||||
size="small"
|
||||
onClick={() => handleEditModel({ name: factoryName } as IFactory, model)}
|
||||
>
|
||||
<EditIcon fontSize="small" />
|
||||
</IconButton>
|
||||
<IconButton
|
||||
size="small"
|
||||
color="error"
|
||||
onClick={() => handleDeleteModel(factoryName, model.name)}
|
||||
>
|
||||
<DeleteIcon fontSize="small" />
|
||||
</IconButton>
|
||||
</Box>
|
||||
</Box>
|
||||
<Chip
|
||||
label={model.type}
|
||||
size="small"
|
||||
sx={{
|
||||
backgroundColor: MODEL_TYPE_COLORS[model.type.toUpperCase()] || '#757575',
|
||||
color: 'white',
|
||||
mb: 1,
|
||||
}}
|
||||
<MyLlmGridItem
|
||||
key={model.name}
|
||||
model={model}
|
||||
onDelete={() => handleDeleteModel(factoryName, model.name)}
|
||||
/>
|
||||
<Typography variant="caption" display="block" color="text.secondary">
|
||||
Max Tokens: {model.max_tokens}
|
||||
</Typography>
|
||||
<Typography variant="caption" display="block" color="text.secondary">
|
||||
Used: {model.used_token}
|
||||
</Typography>
|
||||
{model.api_base && (
|
||||
<Typography variant="caption" display="block" color="text.secondary">
|
||||
Base URL: {model.api_base}
|
||||
</Typography>
|
||||
)}
|
||||
</Card>
|
||||
</Grid>
|
||||
))}
|
||||
</Grid>
|
||||
</CardContent>
|
||||
@@ -230,6 +234,7 @@ function ModelsPage() {
|
||||
</Box>
|
||||
|
||||
{/* 模型配置对话框 */}
|
||||
{/* @ts-ignore */}
|
||||
<ModelDialogs {...modelDialogs} />
|
||||
</Box>
|
||||
);
|
||||
|
||||
@@ -3,6 +3,7 @@ import request, { post } from '@/utils/request';
|
||||
import type { ITenantInfo } from '@/interfaces/database/knowledge';
|
||||
import type { IUserInfo, ITenant } from '@/interfaces/database/user-setting';
|
||||
import type { LlmModelType } from '@/constants/knowledge';
|
||||
import type { IAddLlmRequestBody, IDeleteLlmRequestBody, ISetApiKeyRequestBody } from '@/interfaces/request/llm';
|
||||
|
||||
// 用户相关API服务
|
||||
const userService = {
|
||||
@@ -53,7 +54,7 @@ const userService = {
|
||||
},
|
||||
|
||||
// 设置租户信息
|
||||
setTenantInfo: (data: ITenantInfo) => {
|
||||
setTenantInfo: (data: Partial<Omit<ITenantInfo, 'role'>>) => {
|
||||
return post(api.set_tenant_info, data);
|
||||
},
|
||||
// 租户用户管理
|
||||
@@ -95,22 +96,22 @@ const userService = {
|
||||
},
|
||||
|
||||
// add llm
|
||||
add_llm: (data: { factory_name: string; model_name: string }) => {
|
||||
add_llm: (data: Partial<IAddLlmRequestBody>) => {
|
||||
return request.post(api.add_llm, data);
|
||||
},
|
||||
|
||||
// delete llm
|
||||
delete_llm: (data: { factory_name: string; model_name: string }) => {
|
||||
delete_llm: (data: IDeleteLlmRequestBody) => {
|
||||
return request.post(api.delete_llm, data);
|
||||
},
|
||||
|
||||
// delete factory
|
||||
deleteFactory: (data: { factory_name: string }) => {
|
||||
deleteFactory: (data: IDeleteLlmRequestBody) => {
|
||||
return request.post(api.deleteFactory, data);
|
||||
},
|
||||
|
||||
// set api key
|
||||
set_api_key: (data: { factory_name: string; model_name: string; api_key: string }) => {
|
||||
set_api_key: (data: ISetApiKeyRequestBody) => {
|
||||
return request.post(api.set_api_key, data);
|
||||
},
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user