feat(models): implement model configuration dialogs for Azure, Bedrock and Ollama

This commit is contained in:
2025-10-24 15:40:34 +08:00
parent a9b47f776b
commit edba1f049e
6 changed files with 603 additions and 144 deletions

View File

@@ -1,4 +1,4 @@
import React, { useEffect } from 'react';
import React, { useEffect, useMemo } from 'react';
import {
Dialog,
DialogTitle,
@@ -9,14 +9,28 @@ import {
Box,
Typography,
CircularProgress,
MenuItem,
Select,
FormControl,
InputLabel,
FormHelperText,
Link,
} from '@mui/material';
import { Controller, useForm } from 'react-hook-form';
import logger from '@/utils/logger';
import { LLM_FACTORY_LIST, type LLMFactory } from '@/constants/llm';
// 表单数据接口
export interface OllamaFormData {
base_url: string;
model_type: string;
llm_name: string;
api_base: string;
api_key?: string;
max_tokens: number;
llm_factory: string;
}
// 对话框 Props 接口
export interface OllamaDialogProps {
open: boolean;
@@ -27,10 +41,49 @@ export interface OllamaDialogProps {
editMode?: boolean;
}
const llmFactoryToUrlMap: { [x: string]: string } = {
[LLM_FACTORY_LIST.Ollama]:
'https://github.com/infiniflow/ragflow/blob/main/docs/guides/models/deploy_local_llm.mdx',
[LLM_FACTORY_LIST.Xinference]:
'https://inference.readthedocs.io/en/latest/user_guide',
[LLM_FACTORY_LIST.ModelScope]:
'https://www.modelscope.cn/docs/model-service/API-Inference/intro',
[LLM_FACTORY_LIST.LocalAI]: 'https://localai.io/docs/getting-started/models/',
[LLM_FACTORY_LIST.LMStudio]: 'https://lmstudio.ai/docs/basics',
[LLM_FACTORY_LIST.OpenAiAPICompatible]:
'https://platform.openai.com/docs/models/gpt-4',
[LLM_FACTORY_LIST.TogetherAI]: 'https://docs.together.ai/docs/deployment-options',
[LLM_FACTORY_LIST.Replicate]: 'https://replicate.com/docs/topics/deployments',
[LLM_FACTORY_LIST.OpenRouter]: 'https://openrouter.ai/docs',
[LLM_FACTORY_LIST.HuggingFace]:
'https://huggingface.co/docs/text-embeddings-inference/quick_tour',
[LLM_FACTORY_LIST.GPUStack]: 'https://docs.gpustack.ai/latest/quickstart',
[LLM_FACTORY_LIST.VLLM]: 'https://docs.vllm.ai/en/latest/',
} as const;
function getURLByFactory(factory: LLMFactory) {
const url = llmFactoryToUrlMap[factory];
return {
textTip: `如何集成 ${factory}`,
url,
}
}
// 模型类型选项
const MODEL_TYPE_OPTIONS = [
{ value: 'chat', label: 'Chat' },
{ value: 'embedding', label: 'Embedding' },
{ value: 'rerank', label: 'Rerank' },
{ value: 'image2text', label: 'Image2Text' },
{ value: 'speech2text', label: 'Speech2Text' },
];
/**
* Ollama 配置对话框
* Ollama / local llm 配置对话框
*/
function OllamaDialog ({
function OllamaDialog({
open,
onClose,
onSubmit,
@@ -45,14 +98,60 @@ function OllamaDialog ({
formState: { errors },
} = useForm<OllamaFormData>({
defaultValues: {
base_url: 'http://localhost:11434',
model_type: 'chat',
llm_name: '',
api_base: 'http://localhost:11434',
api_key: '',
max_tokens: 4096,
llm_factory: 'Ollama',
},
});
const modelTypeOptions = useMemo(() => {
const factory = initialData?.llm_factory || LLM_FACTORY_LIST.Ollama;
if (factory == LLM_FACTORY_LIST.HuggingFace) {
return [
{ value: 'embedding', label: 'Embedding' },
{ value: 'chat', label: 'Chat' },
{ value: 'rerank', label: 'Rerank' },
]
} else if (factory == LLM_FACTORY_LIST.Xinference) {
return [
{ value: 'chat', label: 'Chat' },
{ value: 'embedding', label: 'Embedding' },
{ value: 'rerank', label: 'Rerank' },
{ value: 'image2text', label: 'Image2Text' },
{ value: 'speech2text', label: 'Speech2Text' },
{ value: 'tts', label: 'TTS' },
]
} else if (factory == LLM_FACTORY_LIST.ModelScope) {
return [
{ value: 'chat', label: 'Chat' },
]
} else if (factory == LLM_FACTORY_LIST.GPUStack) {
return [
{ value: 'chat', label: 'Chat' },
{ value: 'embedding', label: 'Embedding' },
{ value: 'rerank', label: 'Rerank' },
{ value: 'image2text', label: 'Image2Text' },
]
}
return MODEL_TYPE_OPTIONS;
}, [initialData])
logger.debug('OllamaDialog', { open, initialData, editMode });
// 当对话框打开或初始数据变化时重置表单
useEffect(() => {
if (open) {
reset(initialData || { base_url: 'http://localhost:11434' });
reset({
model_type: 'chat',
llm_name: '',
api_base: initialData?.api_base,
api_key: initialData?.api_key,
max_tokens: initialData?.max_tokens,
llm_factory: initialData?.llm_factory || 'Ollama',
});
}
}, [open, initialData, reset]);
@@ -60,49 +159,168 @@ function OllamaDialog ({
onSubmit(data);
};
// 获取文档链接信息
const docInfo = getURLByFactory((initialData?.llm_factory || LLM_FACTORY_LIST.Ollama) as LLMFactory);
return (
<Dialog open={open} onClose={onClose} maxWidth="sm" fullWidth>
<DialogTitle>
{editMode ? '编辑' : '配置'} Ollama
{editMode ? `编辑 ${initialData?.llm_factory || LLM_FACTORY_LIST.Ollama}` : `配置 ${initialData?.llm_factory || LLM_FACTORY_LIST.Ollama}`}
</DialogTitle>
<DialogContent>
<Box component="form" sx={{ mt: 2 }}>
{/* 模型类型选择 */}
<Controller
name="base_url"
name="model_type"
control={control}
rules={{
required: 'Base URL 是必填项',
rules={{ required: '模型类型是必填项' }}
render={({ field }) => (
<FormControl fullWidth margin="normal" error={!!errors.model_type}>
<InputLabel> *</InputLabel>
<Select
{...field}
label="模型类型 *"
>
{modelTypeOptions.map((option) => (
<MenuItem key={option.value} value={option.value}>
{option.label}
</MenuItem>
))}
</Select>
{errors.model_type && (
<FormHelperText>{errors.model_type.message}</FormHelperText>
)}
</FormControl>
)}
/>
{/* 模型名称 */}
<Controller
name="llm_name"
control={control}
rules={{ required: '模型名称是必填项' }}
render={({ field }) => (
<TextField
{...field}
fullWidth
label="模型名称"
margin="normal"
required
error={!!errors.llm_name}
helperText={errors.llm_name?.message || '请输入模型名称'}
placeholder="例如: llama2, mistral"
/>
)}
/>
{/* 基础 URL */}
<Controller
name="api_base"
control={control}
rules={{
required: '基础 URL 是必填项',
pattern: {
value: /^https?:\/\/.+/,
message: 'Base URL 必须是有效的 URL'
message: '基础 URL 必须是有效的 URL'
}
}}
render={({ field }) => (
<TextField
{...field}
fullWidth
label="Base URL"
label="基础 URL"
margin="normal"
error={!!errors.base_url}
helperText={errors.base_url?.message || 'Ollama 服务的基础 URL'}
placeholder="http://localhost:11434"
required
error={!!errors.api_base}
helperText={errors.api_base?.message || '基础 URL'}
placeholder="http://localhost:8888"
/>
)}
/>
{/* API Key (可选) */}
<Controller
name="api_key"
control={control}
render={({ field }) => (
<TextField
{...field}
fullWidth
label="API Key"
margin="normal"
error={!!errors.api_key}
helperText={errors.api_key?.message || 'API Key (可选)'}
placeholder="如果需要认证,请输入 API Key"
/>
)}
/>
{/* 最大 Token 数 */}
<Controller
name="max_tokens"
control={control}
rules={{
required: '最大 Token 数是必填项',
min: {
value: 1,
message: '最大 Token 数必须大于 0'
},
max: {
value: 100000,
message: '最大 Token 数不能超过 100000'
}
}}
render={({ field }) => (
<TextField
{...field}
fullWidth
label="最大 Token 数"
margin="normal"
type="number"
required
error={!!errors.max_tokens}
helperText={errors.max_tokens?.message || '模型支持的最大 Token 数'}
placeholder="4096"
onChange={(e) => field.onChange(parseInt(e.target.value) || 0)}
/>
)}
/>
</Box>
</DialogContent>
<DialogActions>
<Button onClick={onClose} disabled={loading}>
</Button>
<Button
onClick={handleSubmit(handleFormSubmit)}
variant="contained"
disabled={loading}
startIcon={loading ? <CircularProgress size={20} /> : null}
>
</Button>
<Box sx={{ display: 'flex', justifyContent: 'space-between', width: '100%', alignItems: 'center' }}>
{/* 左侧文档链接 */}
<Link
href={docInfo.url}
target="_blank"
rel="noopener noreferrer"
sx={{
ml: 2,
fontSize: '16',
textDecoration: 'none',
'&:hover': {
textDecoration: 'underline'
}
}}
>
{docInfo.textTip}
</Link>
{/* 右侧按钮组 */}
<Box sx={{ display: 'flex', gap: 1 }}>
<Button onClick={onClose} disabled={loading}>
</Button>
<Button
onClick={handleSubmit(handleFormSubmit)}
variant="contained"
disabled={loading}
startIcon={loading ? <CircularProgress size={20} /> : null}
>
</Button>
</Box>
</Box>
</DialogActions>
</Dialog>
);