feat(llm-config): add generic configuration dialog for LLM providers

This commit is contained in:
2025-10-24 17:49:25 +08:00
parent edba1f049e
commit fd256025b3
6 changed files with 1173 additions and 76 deletions

View File

@@ -0,0 +1,310 @@
import React, { useEffect, useMemo } from 'react';
import {
Dialog,
DialogTitle,
DialogContent,
DialogActions,
Button,
TextField,
Box,
IconButton,
InputAdornment,
CircularProgress,
MenuItem,
Select,
FormControl,
InputLabel,
FormHelperText,
Link,
} from '@mui/material';
import { Visibility, VisibilityOff } from '@mui/icons-material';
import { Controller, useForm } from 'react-hook-form';
import type { IAddLlmRequestBody } from '@/interfaces/request/llm';
// 表单项配置接口
export interface ConfigFormItem {
label: string;
name: string;
type: 'text' | 'number' | 'select' | 'textarea' | 'password';
options?: { label: string; value: string | number }[];
required?: boolean;
placeholder?: string;
helperText?: string;
validation?: {
pattern?: { value: RegExp; message: string };
min?: { value: number; message: string };
max?: { value: number; message: string };
};
defaultValue?: string | number;
}
// 表单数据接口
export interface ConfigurationFormData extends IAddLlmRequestBody {
// 扩展字段,支持各种 LLM 的特殊配置
api_version?: string;
[key: string]: any;
}
// 文档链接配置
export interface DocLinkConfig {
url: string;
text: string;
}
// 对话框 Props 接口
export interface ConfigurationDialogProps {
open: boolean;
onClose: () => void;
onSubmit: (data: Partial<ConfigurationFormData>) => void;
loading?: boolean;
title?: string;
formItems: ConfigFormItem[];
defaultValues?: Partial<ConfigurationFormData>;
docLink?: DocLinkConfig;
editMode?: boolean;
}
/**
* 通用配置对话框组件
* 支持动态表单字段渲染,适用于各种 LLM 配置场景
*/
function ConfigurationDialog({
open,
onClose,
onSubmit,
loading = false,
title = '配置 LLM',
formItems,
defaultValues = {},
docLink,
editMode = false,
}: ConfigurationDialogProps) {
const [passwordVisibility, setPasswordVisibility] = React.useState<Record<string, boolean>>({});
// 构建默认值
const formDefaultValues = useMemo(() => {
const defaults: Partial<ConfigurationFormData> = {
model_type: '',
llm_name: '',
api_base: '',
api_key: '',
max_tokens: 0,
llm_factory: '',
...defaultValues,
};
// 从 formItems 中获取默认值
formItems.forEach(item => {
if (item.defaultValue !== undefined && !defaults[item.name]) {
defaults[item.name] = item.defaultValue;
}
});
return defaults;
}, [defaultValues]);
const {
control,
handleSubmit,
reset,
formState: { errors },
} = useForm<ConfigurationFormData>({
defaultValues: formDefaultValues,
});
// 当对话框打开时重置表单
useEffect(() => {
if (open) {
reset(formDefaultValues);
}
}, [open, reset]);
const handleFormSubmit = (data: ConfigurationFormData) => {
onSubmit(data);
};
const togglePasswordVisibility = (fieldName: string) => {
setPasswordVisibility(prev => ({
...prev,
[fieldName]: !prev[fieldName],
}));
};
// 渲染表单字段
const renderFormField = (item: ConfigFormItem) => {
const isPassword = item.type === 'password';
const showPassword = passwordVisibility[item.name] || false;
// 构建验证规则
const rules: any = {};
if (item.required) {
rules.required = `${item.label}是必填项`;
}
if (item.validation) {
Object.assign(rules, item.validation);
}
switch (item.type) {
case 'select':
return (
<Controller
key={item.name}
name={item.name}
control={control}
rules={rules}
render={({ field }) => (
<FormControl fullWidth margin="normal" error={!!errors[item.name]}>
<InputLabel>{item.required ? `* ${item.label}` : item.label}</InputLabel>
<Select {...field} label={item.required ? `* ${item.label}` : item.label}>
{item.options?.map((option) => (
<MenuItem key={option.value} value={option.value}>
{option.label}
</MenuItem>
))}
</Select>
{errors[item.name] && (
// @ts-ignore
<FormHelperText>{errors[item.name]?.message}</FormHelperText>
)}
</FormControl>
)}
/>
);
case 'textarea':
return (
<Controller
key={item.name}
name={item.name}
control={control}
rules={rules}
render={({ field }) => (
<TextField
{...field}
fullWidth
label={item.required ? `* ${item.label}` : item.label}
margin="normal"
multiline
rows={4}
placeholder={item.placeholder}
error={!!errors[item.name]}
// @ts-ignore
helperText={errors[item.name]?.message || item.helperText}
/>
)}
/>
);
case 'number':
return (
<Controller
key={item.name}
name={item.name}
control={control}
rules={rules}
render={({ field }) => (
<TextField
{...field}
fullWidth
label={item.required ? `* ${item.label}` : item.label}
type="number"
margin="normal"
placeholder={item.placeholder}
error={!!errors[item.name]}
// @ts-ignore
helperText={errors[item.name]?.message || item.helperText}
onChange={(e) => field.onChange(Number(e.target.value))}
/>
)}
/>
);
case 'password':
case 'text':
default:
return (
<Controller
key={item.name}
name={item.name}
control={control}
rules={rules}
render={({ field }) => (
<TextField
{...field}
fullWidth
label={item.required ? `* ${item.label}` : item.label}
type={isPassword && !showPassword ? 'password' : 'text'}
margin="normal"
placeholder={item.placeholder}
error={!!errors[item.name]}
// @ts-ignore
helperText={errors[item.name]?.message || item.helperText}
InputProps={
isPassword
? {
endAdornment: (
<InputAdornment position="end">
<IconButton
aria-label={`toggle ${item.name} visibility`}
onClick={() => togglePasswordVisibility(item.name)}
edge="end"
>
{showPassword ? <VisibilityOff /> : <Visibility />}
</IconButton>
</InputAdornment>
),
}
: undefined
}
/>
)}
/>
);
}
};
return (
<Dialog open={open} onClose={onClose} maxWidth="sm" fullWidth>
<DialogTitle>
{editMode ? '编辑' : ''} {title}
</DialogTitle>
<DialogContent>
<Box component="form" sx={{ mt: 2 }}>
{formItems.map(renderFormField)}
</Box>
</DialogContent>
<DialogActions>
<Box sx={{ display: 'flex', justifyContent: 'space-between', width: '100%' }}>
{/* 左侧文档链接 */}
{docLink && (
<Link
href={docLink.url}
target="_blank"
rel="noopener noreferrer"
sx={{ alignSelf: 'center', textDecoration: 'none', ml: 2 }}
>
{docLink.text}
</Link>
)}
{/* 右侧按钮组 */}
<Box sx={{ ml: 'auto' }}>
<Button onClick={onClose} disabled={loading} sx={{ mr: 1 }}>
</Button>
<Button
onClick={handleSubmit(handleFormSubmit)}
variant="contained"
disabled={loading}
startIcon={loading ? <CircularProgress size={20} /> : null}
>
</Button>
</Box>
</Box>
</DialogActions>
</Dialog>
);
}
export default ConfigurationDialog;

View File

@@ -91,19 +91,20 @@ function OllamaDialog({
initialData,
editMode = false,
}: OllamaDialogProps) {
const {
control,
handleSubmit,
reset,
formState: { errors },
reset,
} = useForm<OllamaFormData>({
defaultValues: {
model_type: 'chat',
llm_name: '',
api_base: 'http://localhost:11434',
api_key: '',
max_tokens: 4096,
llm_factory: 'Ollama',
api_base: initialData?.api_base,
api_key: initialData?.api_key,
max_tokens: initialData?.max_tokens,
llm_factory: initialData?.llm_factory || 'Ollama',
},
});
@@ -139,9 +140,6 @@ function OllamaDialog({
return MODEL_TYPE_OPTIONS;
}, [initialData])
logger.debug('OllamaDialog', { open, initialData, editMode });
// 当对话框打开或初始数据变化时重置表单
useEffect(() => {
if (open) {
reset({
@@ -153,7 +151,7 @@ function OllamaDialog({
llm_factory: initialData?.llm_factory || 'Ollama',
});
}
}, [open, initialData, reset]);
}, [open]);
const handleFormSubmit = (data: OllamaFormData) => {
onSubmit(data);

View File

@@ -0,0 +1,742 @@
import type { ConfigFormItem, DocLinkConfig } from './ConfigurationDialog';
import { LLM_FACTORY_LIST } from '@/constants/llm';
// AWS Bedrock 支持的区域列表
export const BEDROCK_REGIONS = [
'us-east-2',
'us-east-1',
'us-west-1',
'us-west-2',
'af-south-1',
'ap-east-1',
'ap-south-2',
'ap-southeast-3',
'ap-southeast-5',
'ap-southeast-4',
'ap-south-1',
'ap-northeast-3',
'ap-northeast-2',
'ap-southeast-1',
'ap-southeast-2',
'ap-east-2',
'ap-southeast-7',
'ap-northeast-1',
'ca-central-1',
'ca-west-1',
'eu-central-1',
'eu-west-1',
'eu-west-2',
'eu-south-1',
'eu-west-3',
'eu-south-2',
'eu-north-1',
'eu-central-2',
'il-central-1',
'mx-central-1',
'me-south-1',
'me-central-1',
'sa-east-1',
'us-gov-east-1',
'us-gov-west-1',
];
// 模型类型选项
export const MODEL_TYPE_OPTIONS = [
{ value: 'chat', label: 'Chat' },
{ value: 'embedding', label: 'Embedding' },
{ value: 'rerank', label: 'Rerank' },
{ value: 'image2text', label: 'Image2Text' },
{ value: 'speech2text', label: 'Speech2Text' },
{ value: 'tts', label: 'TTS' },
];
// 文档链接映射
export const DOC_LINKS: Record<string, DocLinkConfig> = {
[LLM_FACTORY_LIST.AzureOpenAI]: {
url: 'https://azure.microsoft.com/en-us/products/ai-services/openai-service',
text: '如何集成 Azure OpenAI',
},
[LLM_FACTORY_LIST.Bedrock]: {
url: 'https://console.aws.amazon.com/',
text: '如何集成 Bedrock',
},
[LLM_FACTORY_LIST.Ollama]: {
url: 'https://github.com/infiniflow/ragflow/blob/main/docs/guides/models/deploy_local_llm.mdx',
text: '如何集成 Ollama',
},
[LLM_FACTORY_LIST.Xinference]: {
url: 'https://inference.readthedocs.io/en/latest/user_guide',
text: '如何集成 Xinference',
},
[LLM_FACTORY_LIST.ModelScope]: {
url: 'https://www.modelscope.cn/docs/model-service/API-Inference/intro',
text: '如何集成 ModelScope',
},
[LLM_FACTORY_LIST.LocalAI]: {
url: 'https://localai.io/docs/getting-started/models/',
text: '如何集成 LocalAI',
},
[LLM_FACTORY_LIST.LMStudio]: {
url: 'https://lmstudio.ai/docs/basics',
text: '如何集成 LMStudio',
},
[LLM_FACTORY_LIST.OpenAiAPICompatible]: {
url: 'https://platform.openai.com/docs/models/gpt-4',
text: '如何集成 OpenAI API Compatible',
},
[LLM_FACTORY_LIST.TogetherAI]: {
url: 'https://docs.together.ai/docs/deployment-options',
text: '如何集成 TogetherAI',
},
[LLM_FACTORY_LIST.Replicate]: {
url: 'https://replicate.com/docs/topics/deployments',
text: '如何集成 Replicate',
},
[LLM_FACTORY_LIST.OpenRouter]: {
url: 'https://openrouter.ai/docs',
text: '如何集成 OpenRouter',
},
[LLM_FACTORY_LIST.HuggingFace]: {
url: 'https://huggingface.co/docs/text-embeddings-inference/quick_tour',
text: '如何集成 HuggingFace',
},
[LLM_FACTORY_LIST.GPUStack]: {
url: 'https://docs.gpustack.ai/latest/quickstart',
text: '如何集成 GPUStack',
},
[LLM_FACTORY_LIST.VLLM]: {
url: 'https://docs.vllm.ai/en/latest/',
text: '如何集成 VLLM',
},
[LLM_FACTORY_LIST.FishAudio]: {
url: 'https://www.fish.audio/',
text: '如何集成 Fish Audio',
},
[LLM_FACTORY_LIST.TencentCloud]: {
url: 'https://cloud.tencent.com/document/api/1093/37823',
text: '如何集成 腾讯云语音识别',
},
[LLM_FACTORY_LIST.VolcEngine]: {
url: 'https://www.volcengine.com/docs/82379/1302008',
text: '如何集成 VolcEngine',
},
};
// Azure OpenAI 配置
export const AZURE_OPENAI_CONFIG: ConfigFormItem[] = [
{
name: 'model_type',
label: '模型类型',
type: 'select',
required: true,
options: [
{ value: 'chat', label: 'Chat' },
{ value: 'embedding', label: 'Embedding' },
{ value: 'image2text', label: 'Image2Text' },
],
defaultValue: 'embedding',
},
{
name: 'llm_name',
label: '模型名称',
type: 'text',
required: true,
placeholder: 'gpt-3.5-turbo',
helperText: '请输入模型名称',
defaultValue: 'gpt-3.5-turbo',
},
{
name: 'api_base',
label: '基础 Url',
type: 'text',
required: true,
placeholder: 'https://your-resource.openai.azure.com/',
helperText: 'Azure OpenAI 服务的端点 URL',
validation: {
pattern: {
value: /^https?:\/\/.+/,
message: '基础 URL 必须是有效的 URL',
},
},
},
{
name: 'api_key',
label: 'API-Key',
type: 'password',
helperText: '输入api key如果是本地部署的模型请忽略',
},
{
name: 'api_version',
label: 'API Version',
type: 'text',
required: true,
placeholder: '2024-02-01',
helperText: 'Azure OpenAI API 版本',
defaultValue: '2024-02-01',
},
{
name: 'max_tokens',
label: '最大token数',
type: 'number',
required: true,
placeholder: '设置了模型输出的最大长度以token单词片段的数量表示',
helperText: '设置了模型输出的最大长度以token单词片段的数量表示',
defaultValue: 4096,
validation: {
min: { value: 1, message: '最大token数必须大于0' },
max: { value: 100000, message: '最大token数不能超过100000' },
},
},
];
// Bedrock 配置
export const BEDROCK_CONFIG: ConfigFormItem[] = [
{
name: 'model_type',
label: '模型类型',
type: 'select',
required: true,
options: MODEL_TYPE_OPTIONS.slice(0, 2), // 只支持 chat 和 embedding
defaultValue: 'chat',
},
{
name: 'llm_name',
label: '模型名称',
type: 'text',
required: true,
placeholder: '请输入模型名称',
},
{
name: 'bedrock_ak',
label: 'ACCESS KEY',
type: 'password',
required: true,
placeholder: '请输入 ACCESS KEY',
},
{
name: 'bedrock_sk',
label: 'SECRET KEY',
type: 'password',
required: true,
placeholder: '请输入 SECRET KEY',
},
{
name: 'bedrock_region',
label: 'AWS Region',
type: 'select',
required: true,
options: BEDROCK_REGIONS.map(region => ({ value: region, label: region })),
defaultValue: 'us-east-1',
},
{
name: 'max_tokens',
label: '最大token数',
type: 'number',
required: true,
placeholder: '这设置了模型输出的最大长度以token单词或词片段的数量来衡量',
helperText: '这设置了模型输出的最大长度以token单词或词片段的数量来衡量',
defaultValue: 4096,
validation: {
min: { value: 1, message: '最大token数必须大于0' },
},
},
];
// Ollama 配置
export const OLLAMA_CONFIG: ConfigFormItem[] = [
{
name: 'model_type',
label: '模型类型',
type: 'select',
required: true,
options: MODEL_TYPE_OPTIONS,
defaultValue: 'chat',
},
{
name: 'llm_name',
label: '模型名称',
type: 'text',
required: true,
placeholder: '例如: llama2, mistral',
helperText: '请输入模型名称',
},
{
name: 'api_base',
label: '基础 URL',
type: 'text',
required: true,
placeholder: 'http://localhost:8888',
helperText: '基础 URL',
defaultValue: 'http://localhost:11434',
validation: {
pattern: {
value: /^https?:\/\/.+/,
message: '基础 URL 必须是有效的 URL',
},
},
},
{
name: 'api_key',
label: 'API Key',
type: 'text',
placeholder: '如果需要认证,请输入 API Key',
helperText: 'API Key (可选)',
},
{
name: 'max_tokens',
label: '最大 Token 数',
type: 'number',
required: true,
placeholder: '4096',
helperText: '模型支持的最大 Token 数',
defaultValue: 4096,
validation: {
min: { value: 1, message: '最大 Token 数必须大于 0' },
max: { value: 100000, message: '最大 Token 数不能超过 100000' },
},
},
];
export const BAIDU_YIYAN_CONFIG: ConfigFormItem[] = [
{
name: 'model_type',
label: '模型类型',
type: 'select',
required: true,
options: MODEL_TYPE_OPTIONS.slice(0, 3),
defaultValue: 'chat',
},
{
name: 'llm_name',
label: '模型名称',
type: 'text',
required: true,
placeholder: '请输入模型名称',
},
{
name: 'yiyan_ak',
label: '一言 API KEY',
type: 'text',
required: true,
placeholder: '请输入 API KEY',
helperText: 'Baidu YiYan API KEY',
},
{
name: 'yiyan_sk',
label: '一言 Secret KEY',
type: 'password',
required: true,
placeholder: '请输入 Secret KEY',
helperText: 'Baidu YiYan Secret KEY',
},
{
name: 'max_tokens',
label: '最大token数',
type: 'number',
required: true,
placeholder: '设置了模型输出的最大长度以token单词片段的数量表示',
helperText: '设置了模型输出的最大长度以token单词片段的数量表示',
defaultValue: 4096,
validation: {
min: { value: 1, message: '最大token数必须大于0' },
max: { value: 100000, message: '最大token数不能超过100000' },
},
},
];
export const FISH_AUDIO_CONFIG: ConfigFormItem[] = [
{
name: 'model_type',
label: '模型类型',
type: 'select',
required: true,
options: [{ value: 'tts', label: 'TTS' },],
defaultValue: 'tts',
},
{
name: 'llm_name',
label: '模型名称',
type: 'text',
required: true,
placeholder: '请输入模型名称',
},
{
name: 'fish_audio_ak',
label: 'Fish Audio API KEY',
type: 'text',
required: true,
placeholder: '请输入 API KEY',
helperText: 'Fish Audio API KEY',
},
{
name: 'fish_audio_refid',
label: 'FishAudio Refrence ID',
type: 'text',
required: true,
placeholder: '请输入 Refrence ID',
helperText: 'Fish Audio Refrence ID',
},
{
name: 'max_tokens',
label: '最大token数',
type: 'number',
required: true,
placeholder: '设置了模型输出的最大长度以token单词片段的数量表示',
helperText: '设置了模型输出的最大长度以token单词片段的数量表示',
defaultValue: 4096,
validation: {
min: { value: 1, message: '最大token数必须大于0' },
max: { value: 100000, message: '最大token数不能超过100000' },
},
},
]
export const GOOGLE_CLOUD_CONFIG: ConfigFormItem[] = [
{
name: 'model_type',
label: '模型类型',
type: 'select',
required: true,
options: [{ value: 'chat', label: 'Chat' }, { value: 'image2text', label: 'Image2Text' }],
defaultValue: 'chat',
},
{
name: 'llm_name',
label: '模型名称',
type: 'text',
required: true,
placeholder: '请输入模型名称',
},
{
name: 'google_project_id',
label: 'Project ID',
type: 'text',
required: true,
placeholder: '请输入 Project ID',
helperText: 'Google Cloud Project ID',
},
{
name: 'google_region',
label: 'Google Cloud 区域',
type: 'text',
required: true,
placeholder: '请输入 Google Cloud 区域',
helperText: 'Google Cloud 区域',
},
{
name: 'google_service_account_key',
label: 'Google Cloud Service Account Key',
type: 'text',
required: true,
placeholder: '请输入 Google Cloud Service Account Key',
helperText: 'Google Cloud Service Account Key',
},
{
name: 'max_tokens',
label: '最大token数',
type: 'number',
required: true,
placeholder: '设置了模型输出的最大长度以token单词片段的数量表示',
helperText: '设置了模型输出的最大长度以token单词片段的数量表示',
defaultValue: 4096,
validation: {
min: { value: 1, message: '最大token数必须大于0' },
max: { value: 100000, message: '最大token数不能超过100000' },
},
},
]
export const TENCENT_CLOUD_CONFIG: ConfigFormItem[] = [
{
name: 'model_type',
label: '模型类型',
type: 'select',
required: true,
options: [{ value: 'speech2text', label: 'Speech2Text' }],
defaultValue: 'speech2text',
},
{
name: 'llm_name',
label: '模型名称',
type: 'select',
required: true,
options: [
'16k_zh', '16k_zh_large', '16k_multi_lang', '16k_zh_dialect', '16k_en', '16k_yue', '16k_zh-PY',
'16k_ja', '16k_ko', '16k_vi', '16k_ms', '16k_id', '16k_fil', '16k_th', '16k_pt', '16k_tr',
'16k_ar', '16k_es', '16k_hi', '16k_fr', '16k_zh_medical', '16k_de'
].map((item) => ({ value: item, label: item })),
defaultValue: '16k_zh',
},
{
name: 'tencent_ak',
label: '腾讯云 Secret ID',
type: 'text',
required: true,
placeholder: '请输入 Secret ID',
helperText: '腾讯云 Secret ID',
},
{
name: 'tencent_sk',
label: '腾讯云 Secret KEY',
type: 'password',
required: true,
placeholder: '请输入 Secret KEY',
helperText: '腾讯云 Secret KEY',
},
]
export const TENCENT_HUNYUAN_CONFIG: ConfigFormItem[] = [
{
name: 'hunyuan_sid',
label: '混元 Secret ID',
type: 'text',
required: true,
placeholder: '请输入 Secret ID',
helperText: '混元 Secret ID',
},
{
name: 'hunyuan_sk',
label: '混元 Secret KEY',
type: 'text',
required: true,
placeholder: '请输入 Secret KEY',
helperText: '混元 Secret KEY',
},
]
// XunFeiSpark
export const XUNFEI_SPARK_CONFIG: ConfigFormItem[] = [
{
name: 'model_type',
label: '模型类型',
type: 'select',
required: true,
options: [{ value: 'chat', label: 'Chat' }, { value: 'tts', label: 'TTS' }],
defaultValue: 'chat',
},
{
name: 'llm_name',
label: '模型名称',
type: 'text',
required: true,
placeholder: '请输入模型名称',
},
{
name: 'xunfei_spark_password',
label: '讯飞星火 API Password',
type: 'text',
required: true,
placeholder: '请输入 API Password',
helperText: '讯飞星火 API Password',
},
{
name: 'max_tokens',
label: '最大token数',
type: 'number',
required: true,
placeholder: '设置了模型输出的最大长度以token单词片段的数量表示',
helperText: '设置了模型输出的最大长度以token单词片段的数量表示',
defaultValue: 4096,
validation: {
min: { value: 1, message: '最大token数必须大于0' },
max: { value: 100000, message: '最大token数不能超过100000' },
},
},
]
// VolcEngine
export const VOLC_ENGINE_CONFIG: ConfigFormItem[] = [
{
name: 'model_type',
label: '模型类型',
type: 'select',
required: true,
options: [{ value: 'chat', label: 'Chat' }, { value: 'embedding', label: 'Embedding' }],
defaultValue: 'chat',
},
{
name: 'llm_name',
label: '模型名称',
type: 'text',
required: true,
placeholder: '请输入模型名称',
},
{
name: 'endpoint_id',
label: '模型 EndpointID',
type: 'text',
required: true,
placeholder: '请输入 EndpointID',
helperText: '模型 EndpointID',
},
{
name: 'ark_api_key',
label: '火山 ARK_API_KEY',
type: 'password',
required: true,
placeholder: '请输入 ARK_API_KEY',
helperText: '模型 ARK_API_KEY',
},
{
name: 'max_tokens',
label: '最大token数',
type: 'number',
required: true,
placeholder: '设置了模型输出的最大长度以token单词片段的数量表示',
helperText: '设置了模型输出的最大长度以token单词片段的数量表示',
defaultValue: 4096,
validation: {
min: { value: 1, message: '最大token数必须大于0' },
max: { value: 100000, message: '最大token数不能超过100000' },
},
},
]
// 根据 LLM Factory 获取配置
export function getLLMConfig(factory: string): {
formItems: ConfigFormItem[];
docLink?: DocLinkConfig;
title: string;
defaultValues: Record<string, any>;
} {
const docLink: DocLinkConfig | undefined = DOC_LINKS[factory];
switch (factory) {
case LLM_FACTORY_LIST.AzureOpenAI:
return {
formItems: AZURE_OPENAI_CONFIG,
docLink,
title: 'Azure OpenAI',
defaultValues: { llm_factory: factory },
};
case LLM_FACTORY_LIST.Bedrock:
return {
formItems: BEDROCK_CONFIG,
docLink,
title: 'Bedrock',
defaultValues: { llm_factory: factory },
};
case LLM_FACTORY_LIST.BaiduYiYan:
return {
formItems: BAIDU_YIYAN_CONFIG,
docLink,
title: 'Baidu YiYan',
defaultValues: { llm_factory: factory },
};
case LLM_FACTORY_LIST.FishAudio:
return {
formItems: FISH_AUDIO_CONFIG,
docLink,
title: 'Fish Audio',
defaultValues: { llm_factory: factory },
};
case LLM_FACTORY_LIST.GoogleCloud:
return {
formItems: GOOGLE_CLOUD_CONFIG,
docLink,
title: 'Google Cloud',
defaultValues: { llm_factory: factory },
};
case LLM_FACTORY_LIST.TencentCloud:
return {
formItems: TENCENT_CLOUD_CONFIG,
docLink,
title: 'Tencent Cloud',
defaultValues: { llm_factory: factory },
};
case LLM_FACTORY_LIST.TencentHunYuan:
return {
formItems: TENCENT_HUNYUAN_CONFIG,
docLink,
title: 'Tencent HunYuan',
defaultValues: { llm_factory: factory },
};
case LLM_FACTORY_LIST.XunFeiSpark:
return {
formItems: XUNFEI_SPARK_CONFIG,
docLink,
title: 'XunFei Spark',
defaultValues: { llm_factory: factory },
};
case LLM_FACTORY_LIST.VolcEngine:
return {
formItems: VOLC_ENGINE_CONFIG,
docLink,
title: 'Volc Engine',
defaultValues: { llm_factory: factory },
};
// local llm
case LLM_FACTORY_LIST.Ollama:
case LLM_FACTORY_LIST.Xinference:
case LLM_FACTORY_LIST.ModelScope:
case LLM_FACTORY_LIST.LocalAI:
case LLM_FACTORY_LIST.LMStudio:
case LLM_FACTORY_LIST.OpenAiAPICompatible:
case LLM_FACTORY_LIST.TogetherAI:
case LLM_FACTORY_LIST.Replicate:
case LLM_FACTORY_LIST.OpenRouter:
case LLM_FACTORY_LIST.HuggingFace:
case LLM_FACTORY_LIST.GPUStack:
case LLM_FACTORY_LIST.VLLM:
default:
// 根据不同的 factory 调整模型类型选项
let modelTypeOptions = MODEL_TYPE_OPTIONS;
let defaultApiBase = 'http://localhost:11434';
if (factory === LLM_FACTORY_LIST.HuggingFace) {
modelTypeOptions = [
{ value: 'embedding', label: 'Embedding' },
{ value: 'chat', label: 'Chat' },
{ value: 'rerank', label: 'Rerank' },
];
} else if (factory === LLM_FACTORY_LIST.Xinference) {
modelTypeOptions = [
{ value: 'chat', label: 'Chat' },
{ value: 'embedding', label: 'Embedding' },
{ value: 'rerank', label: 'Rerank' },
{ value: 'image2text', label: 'Image2Text' },
{ value: 'speech2text', label: 'Speech2Text' },
{ value: 'tts', label: 'TTS' },
];
} else if (factory === LLM_FACTORY_LIST.ModelScope) {
modelTypeOptions = [{ value: 'chat', label: 'Chat' }];
} else if (factory === LLM_FACTORY_LIST.GPUStack) {
modelTypeOptions = [
{ value: 'chat', label: 'Chat' },
{ value: 'embedding', label: 'Embedding' },
{ value: 'rerank', label: 'Rerank' },
{ value: 'image2text', label: 'Image2Text' },
];
}
// 根据不同 factory 设置不同的默认 API Base
if (factory === LLM_FACTORY_LIST.Xinference) {
defaultApiBase = 'http://localhost:9997';
} else if (factory === LLM_FACTORY_LIST.LocalAI) {
defaultApiBase = 'http://localhost:8080';
} else if (factory === LLM_FACTORY_LIST.LMStudio) {
defaultApiBase = 'http://localhost:1234';
}
const ollamaConfig = [...OLLAMA_CONFIG];
// 更新模型类型选项
ollamaConfig[0] = {
...ollamaConfig[0],
options: modelTypeOptions,
};
// 更新默认 API Base
ollamaConfig[2] = {
...ollamaConfig[2],
defaultValue: defaultApiBase,
};
return {
formItems: ollamaConfig,
docLink,
title: factory,
defaultValues: { llm_factory: factory },
};
}
}

View File

@@ -6,6 +6,7 @@ import AzureOpenAIDialog, { type AzureOpenAIFormData, type AzureOpenAIDialogProp
import BedrockDialog, { type BedrockFormData, type BedrockDialogProps, BEDROCK_REGIONS } from './Dialog/BedrockDialog';
import OllamaDialog, { type OllamaFormData, type OllamaDialogProps } from './Dialog/OllamaDialog';
import SystemModelDialog, { type SystemModelFormData, type SystemModelDialogProps, type ModelOption, type ModelGroup } from './Dialog/SystemModelDialog';
import ConfigurationDialog, { type ConfigurationFormData, type ConfigurationDialogProps, type ConfigFormItem, type DocLinkConfig } from './Dialog/ConfigurationDialog';
@@ -18,10 +19,10 @@ export interface BaseDialogProps {
}
// 导出所有表单数据接口
export type { ApiKeyFormData, AzureOpenAIFormData, BedrockFormData, OllamaFormData, SystemModelFormData };
export type { ApiKeyFormData, AzureOpenAIFormData, BedrockFormData, OllamaFormData, SystemModelFormData, ConfigurationFormData };
// 导出所有对话框 Props 接口
export type { ApiKeyDialogProps, AzureOpenAIDialogProps, BedrockDialogProps, OllamaDialogProps, SystemModelDialogProps };
export type { ApiKeyDialogProps, AzureOpenAIDialogProps, BedrockDialogProps, OllamaDialogProps, SystemModelDialogProps, ConfigurationDialogProps };
// 导出其他相关接口和常量
export type { ModelOption, ModelGroup };
@@ -62,6 +63,20 @@ export interface ModelDialogsProps {
initialData?: OllamaFormData;
editMode?: boolean;
};
configurationDialog: {
open: boolean;
closeDialog: () => void;
submitConfiguration: (data: Partial<ConfigurationFormData>) => void;
loading: boolean;
editMode?: boolean;
llmFactory: string;
config: {
formItems: ConfigFormItem[];
docLink: DocLinkConfig;
title: string;
defaultValues: Record<string, any>;
} | null;
};
systemDialog: {
open: boolean;
closeDialog: () => void;
@@ -81,6 +96,7 @@ export const ModelDialogs: React.FC<ModelDialogsProps> = ({
azureDialog,
bedrockDialog,
ollamaDialog,
configurationDialog,
systemDialog,
}) => {
return (
@@ -126,6 +142,18 @@ export const ModelDialogs: React.FC<ModelDialogsProps> = ({
editMode={ollamaDialog.editMode}
/>
{/* 通用配置对话框 */}
<ConfigurationDialog
open={configurationDialog.open}
onClose={configurationDialog.closeDialog}
onSubmit={configurationDialog.submitConfiguration}
loading={configurationDialog.loading}
editMode={configurationDialog.editMode}
formItems={configurationDialog.config?.formItems || []}
docLink={configurationDialog.config?.docLink}
title={configurationDialog.config?.title || configurationDialog.llmFactory}
/>
{/* 系统默认模型设置对话框 */}
<SystemModelDialog
open={systemDialog.open}

View File

@@ -13,7 +13,9 @@ import type { ITenantInfo } from '@/interfaces/database/knowledge';
import { useLlmList } from '@/hooks/llm-hooks';
import type { LlmModelType } from '@/constants/knowledge';
import { useUserData } from '@/hooks/useUserData';
import type { ISetApiKeyRequestBody } from '@/interfaces/request/llm';
import type { ISetApiKeyRequestBody, IAddLlmRequestBody } from '@/interfaces/request/llm';
import type { ConfigFormItem, ConfigurationFormData, DocLinkConfig } from '../components/Dialog/ConfigurationDialog';
import { getLLMConfig } from '../components/Dialog/llmConfigs';
// 对话框状态管理 hook
export const useDialogState = () => {
@@ -45,6 +47,68 @@ export const useDialogState = () => {
openDialog,
closeDialog,
};
}
// 通用配置对话框管理
export const useConfigurationDialog = (onSuccess?: () => void) => {
const dialogState = useDialogState();
const showMessage = useMessage();
const [llmFactory, setLlmFactory] = useState('');
const [config, setConfig] = useState<{
formItems: ConfigFormItem[];
docLink?: DocLinkConfig;
title: string;
defaultValues: Record<string, any>;
} | null>(null);
const openConfigurationDialog = useCallback((factory: string, data?: Partial<ConfigurationFormData>, isEdit = false) => {
setLlmFactory(factory);
const llmConfig = getLLMConfig(factory);
setConfig(llmConfig);
// 合并默认值和传入的数据
const mergedData = {
...llmConfig.defaultValues,
...data,
};
dialogState.openDialog(mergedData, isEdit);
}, [dialogState]);
const submitConfiguration = useCallback(async (data: Partial<ConfigurationFormData>) => {
dialogState.setLoading(true);
logger.info('提交配置:', data);
try {
// 构建请求参数
const params: Partial<IAddLlmRequestBody> = {
...data,
llm_factory: llmFactory,
};
await userService.add_llm(params);
showMessage.success(`${config?.title || llmFactory} 配置成功`);
dialogState.closeDialog();
// 调用成功回调
if (onSuccess) {
onSuccess();
}
} catch (error) {
logger.error(`${config?.title || llmFactory} 配置失败:`, error);
showMessage.error(`${config?.title || llmFactory} 配置失败`);
throw error;
} finally {
dialogState.setLoading(false);
}
}, [llmFactory, config, dialogState, showMessage, onSuccess]);
return {
...dialogState,
llmFactory,
config,
openConfigurationDialog,
submitConfiguration,
};
};
// API Key 对话框管理
@@ -352,33 +416,22 @@ export const useModelDialogs = (onSuccess?: () => void) => {
const azureDialog = useAzureOpenAIDialog();
const bedrockDialog = useBedrockDialog();
const ollamaDialog = useOllamaDialog();
const configurationDialog = useConfigurationDialog(onSuccess);
const systemDialog = useSystemModelSetting(onSuccess);
const deleteOps = useDeleteOperations(onSuccess);
// 根据工厂类型打开对应的对话框
const openFactoryDialog = useCallback((factoryName: string, data?: any, isEdit = false) => {
switch (factoryName.toLowerCase()) {
case 'azureopenai':
azureDialog.openDialog(data, isEdit);
break;
case 'bedrock':
bedrockDialog.openDialog(data, isEdit);
break;
case 'ollama':
ollamaDialog.openDialog(data, isEdit);
break;
default:
// 默认使用 API Key 对话框
apiKeyDialog.openApiKeyDialog(factoryName, data, isEdit);
break;
}
}, [apiKeyDialog, azureDialog, bedrockDialog, ollamaDialog]);
// 使用通用的 ConfigurationDialog 替代特定的 Dialog
configurationDialog.openConfigurationDialog(factoryName, data, isEdit);
}, [configurationDialog]);
return {
apiKeyDialog,
azureDialog,
bedrockDialog,
ollamaDialog,
configurationDialog,
systemDialog,
deleteOps,
openFactoryDialog,

View File

@@ -86,31 +86,6 @@ function ModelsPage() {
return filterFactory || [];
}, [llmFactory, myLlm]);
// const ModalMap = useMemo(
// () => ({
// [LLMFactory.Bedrock]: showBedrockAddingModal,
// [LLMFactory.VolcEngine]: showVolcAddingModal,
// [LLMFactory.TencentHunYuan]: showHunyuanAddingModal,
// [LLMFactory.XunFeiSpark]: showSparkAddingModal,
// [LLMFactory.BaiduYiYan]: showyiyanAddingModal,
// [LLMFactory.FishAudio]: showFishAudioAddingModal,
// [LLMFactory.TencentCloud]: showTencentCloudAddingModal,
// [LLMFactory.GoogleCloud]: showGoogleAddingModal,
// [LLMFactory.AzureOpenAI]: showAzureAddingModal,
// }),
// [
// showBedrockAddingModal,
// showVolcAddingModal,
// showHunyuanAddingModal,
// showTencentCloudAddingModal,
// showSparkAddingModal,
// showyiyanAddingModal,
// showFishAudioAddingModal,
// showGoogleAddingModal,
// showAzureAddingModal,
// ],
// );
// 处理配置模型工厂
const handleConfigureFactory = useCallback((factory: IFactory) => {
if (factory == null) {
@@ -119,32 +94,23 @@ function ModelsPage() {
// llm 的配置很多,有很多种类型 首先是local llm 然后是配置项不一样的
// 然后有很多自定义的配置项,需要单独用 dialog 来配置
const factoryName = factory.name as LLMFactory;
const configurationFactories: LLMFactory[] = [
LLM_FACTORY_LIST.AzureOpenAI,
LLM_FACTORY_LIST.Bedrock,
LLM_FACTORY_LIST.BaiduYiYan,
LLM_FACTORY_LIST.FishAudio,
LLM_FACTORY_LIST.GoogleCloud,
LLM_FACTORY_LIST.TencentCloud,
LLM_FACTORY_LIST.TencentHunYuan,
LLM_FACTORY_LIST.XunFeiSpark,
LLM_FACTORY_LIST.VolcEngine,
]
if (LocalLlmFactories.includes(factoryName)) {
modelDialogs.ollamaDialog.openDialog({
llm_factory: factory.name,
});
} else if (factoryName == LLM_FACTORY_LIST.AzureOpenAI) {
modelDialogs.azureDialog.openDialog({
llm_factory: factory.name,
});
} else if (factoryName == LLM_FACTORY_LIST.Bedrock) {
modelDialogs.bedrockDialog.openDialog({
llm_factory: factory.name,
});
} else if (factoryName == LLM_FACTORY_LIST.BaiduYiYan) {
} else if (factoryName == LLM_FACTORY_LIST.GoogleCloud) {
} else if (factoryName == LLM_FACTORY_LIST.FishAudio) {
} else if (factoryName == LLM_FACTORY_LIST.TencentCloud) {
} else if (factoryName == LLM_FACTORY_LIST.TencentHunYuan) {
} else if (factoryName == LLM_FACTORY_LIST.XunFeiSpark) {
} else if (factoryName == LLM_FACTORY_LIST.VolcEngine) {
} else if (configurationFactories.includes(factoryName)) {
modelDialogs.configurationDialog.openConfigurationDialog(factory.name);
} else {
modelDialogs.apiKeyDialog.openApiKeyDialog(factoryName);
}
@@ -247,7 +213,7 @@ function ModelsPage() {
<Box sx={{ display: 'flex', alignItems: 'center', gap: 1 }}>
{/* 折叠/展开图标 */}
<IconButton size="small">
{collapsedFactories[factoryName] ? <ExpandLessIcon />: <ExpandMoreIcon /> }
{collapsedFactories[factoryName] ? <ExpandLessIcon /> : <ExpandMoreIcon />}
</IconButton>
<Box>
{/* 模型工厂名称 */}