import type { ConfigFormItem, DocLinkConfig } from './ConfigurationDialog'; import { LLM_FACTORY_LIST } from '@/constants/llm'; import i18n from '@/locales'; // AWS Bedrock 支持的区域列表 export const BEDROCK_REGIONS = [ 'us-east-2', 'us-east-1', 'us-west-1', 'us-west-2', 'af-south-1', 'ap-east-1', 'ap-south-2', 'ap-southeast-3', 'ap-southeast-5', 'ap-southeast-4', 'ap-south-1', 'ap-northeast-3', 'ap-northeast-2', 'ap-southeast-1', 'ap-southeast-2', 'ap-east-2', 'ap-southeast-7', 'ap-northeast-1', 'ca-central-1', 'ca-west-1', 'eu-central-1', 'eu-west-1', 'eu-west-2', 'eu-south-1', 'eu-west-3', 'eu-south-2', 'eu-north-1', 'eu-central-2', 'il-central-1', 'mx-central-1', 'me-south-1', 'me-central-1', 'sa-east-1', 'us-gov-east-1', 'us-gov-west-1', ]; // 模型类型选项 export const MODEL_TYPE_OPTIONS = [ { value: 'chat', label: 'Chat' }, { value: 'embedding', label: 'Embedding' }, { value: 'rerank', label: 'Rerank' }, { value: 'image2text', label: 'Image2Text' }, { value: 'speech2text', label: 'Speech2Text' }, { value: 'tts', label: 'TTS' }, ]; // 文档链接映射 export const DOC_LINKS: Record = { [LLM_FACTORY_LIST.AzureOpenAI]: { url: 'https://azure.microsoft.com/en-us/products/ai-services/openai-service', text: `${i18n.t('setting.howToIntegrate')} Azure OpenAI`, }, [LLM_FACTORY_LIST.Bedrock]: { url: 'https://console.aws.amazon.com/', text: `${i18n.t('setting.howToIntegrate')} Bedrock`, }, [LLM_FACTORY_LIST.Ollama]: { url: 'https://github.com/infiniflow/ragflow/blob/main/docs/guides/models/deploy_local_llm.mdx', text: `${i18n.t('setting.howToIntegrate')} Ollama`, }, [LLM_FACTORY_LIST.Xinference]: { url: 'https://inference.readthedocs.io/en/latest/user_guide', text: `${i18n.t('setting.howToIntegrate')} Xinference`, }, [LLM_FACTORY_LIST.ModelScope]: { url: 'https://www.modelscope.cn/docs/model-service/API-Inference/intro', text: `${i18n.t('setting.howToIntegrate')} ModelScope`, }, [LLM_FACTORY_LIST.LocalAI]: { url: 'https://localai.io/docs/getting-started/models/', text: `${i18n.t('setting.howToIntegrate')} LocalAI`, }, [LLM_FACTORY_LIST.LMStudio]: { url: 'https://lmstudio.ai/docs/basics', text: `${i18n.t('setting.howToIntegrate')} LMStudio`, }, [LLM_FACTORY_LIST.OpenAiAPICompatible]: { url: 'https://platform.openai.com/docs/models/gpt-4', text: `${i18n.t('setting.howToIntegrate')} OpenAI API Compatible`, }, [LLM_FACTORY_LIST.TogetherAI]: { url: 'https://docs.together.ai/docs/deployment-options', text: `${i18n.t('setting.howToIntegrate')} TogetherAI`, }, [LLM_FACTORY_LIST.Replicate]: { url: 'https://replicate.com/docs/topics/deployments', text: `${i18n.t('setting.howToIntegrate')} Replicate`, }, [LLM_FACTORY_LIST.OpenRouter]: { url: 'https://openrouter.ai/docs', text: `${i18n.t('setting.howToIntegrate')} OpenRouter`, }, [LLM_FACTORY_LIST.HuggingFace]: { url: 'https://huggingface.co/docs/text-embeddings-inference/quick_tour', text: `${i18n.t('setting.howToIntegrate')} HuggingFace`, }, [LLM_FACTORY_LIST.GPUStack]: { url: 'https://docs.gpustack.ai/latest/quickstart', text: `${i18n.t('setting.howToIntegrate')} GPUStack`, }, [LLM_FACTORY_LIST.VLLM]: { url: 'https://docs.vllm.ai/en/latest/', text: `${i18n.t('setting.howToIntegrate')} VLLM`, }, [LLM_FACTORY_LIST.FishAudio]: { url: 'https://www.fish.audio/', text: `${i18n.t('setting.howToIntegrate')} Fish Audio`, }, [LLM_FACTORY_LIST.TencentCloud]: { url: 'https://cloud.tencent.com/document/api/1093/37823', text: `${i18n.t('setting.howToIntegrate')} 腾讯云语音识别`, }, [LLM_FACTORY_LIST.VolcEngine]: { url: 'https://www.volcengine.com/docs/82379/1302008', text: `${i18n.t('setting.howToIntegrate')} VolcEngine`, }, }; // Azure OpenAI 配置 export const AZURE_OPENAI_CONFIG: ConfigFormItem[] = [ { name: 'model_type', label: i18n.t('setting.modelType'), type: 'select', required: true, options: [ { value: 'chat', label: 'Chat' }, { value: 'embedding', label: 'Embedding' }, { value: 'image2text', label: 'Image2Text' }, ], defaultValue: 'embedding', }, { name: 'llm_name', label: i18n.t('setting.modelName'), type: 'text', required: true, placeholder: 'gpt-3.5-turbo', helperText: i18n.t('setting.modelNameHelperText'), defaultValue: 'gpt-3.5-turbo', }, { name: 'api_base', label: i18n.t('setting.baseUrl'), type: 'text', required: true, placeholder: 'https://your-resource.openai.azure.com/', helperText: i18n.t('setting.azureOpenAIEndpointHelperText'), validation: { pattern: { value: /^https?:\/\/.+/, message: i18n.t('setting.baseUrlValidationMessage'), }, }, }, { name: 'api_key', label: 'API-Key', type: 'password', helperText: i18n.t('setting.apiKeyHelperText'), }, { name: 'api_version', label: 'API Version', type: 'text', required: true, placeholder: '2024-02-01', helperText: i18n.t('setting.azureAPIVersionHelperText'), defaultValue: '2024-02-01', }, { name: 'max_tokens', label: i18n.t('setting.maxTokens'), type: 'number', required: true, placeholder: i18n.t('setting.maxTokensPlaceholder'), helperText: i18n.t('setting.maxTokensHelperText'), defaultValue: 4096, validation: { min: { value: 1, message: i18n.t('setting.maxTokensMinMessage') }, max: { value: 100000, message: i18n.t('setting.maxTokensMaxMessage') }, }, }, ]; // Bedrock 配置 export const BEDROCK_CONFIG: ConfigFormItem[] = [ { name: 'model_type', label: i18n.t('setting.modelType'), type: 'select', required: true, options: MODEL_TYPE_OPTIONS.slice(0, 2), // 只支持 chat 和 embedding defaultValue: 'chat', }, { name: 'llm_name', label: i18n.t('setting.modelName'), type: 'text', required: true, placeholder: i18n.t('setting.modelNamePlaceholder'), }, { name: 'bedrock_ak', label: 'ACCESS KEY', type: 'password', required: true, placeholder: i18n.t('setting.accessKeyPlaceholder'), }, { name: 'bedrock_sk', label: 'SECRET KEY', type: 'password', required: true, placeholder: i18n.t('setting.secretKeyPlaceholder'), }, { name: 'bedrock_region', label: 'AWS Region', type: 'select', required: true, options: BEDROCK_REGIONS.map(region => ({ value: region, label: region })), defaultValue: 'us-east-1', }, { name: 'max_tokens', label: i18n.t('setting.maxTokens'), type: 'number', required: true, placeholder: i18n.t('setting.maxTokensPlaceholder'), helperText: i18n.t('setting.maxTokensHelperText'), defaultValue: 4096, validation: { min: { value: 1, message: i18n.t('setting.maxTokensMinMessage') }, }, }, ]; // Ollama 配置 export const OLLAMA_CONFIG: ConfigFormItem[] = [ { name: 'model_type', label: i18n.t('setting.modelType'), type: 'select', required: true, options: MODEL_TYPE_OPTIONS, defaultValue: 'chat', }, { name: 'llm_name', label: i18n.t('setting.modelName'), type: 'text', required: true, placeholder: i18n.t('setting.ollamaModelNamePlaceholder'), helperText: i18n.t('setting.modelNameHelperText'), }, { name: 'api_base', label: i18n.t('setting.baseUrl'), type: 'text', required: true, placeholder: 'http://localhost:8888', helperText: i18n.t('setting.baseUrlHelperText'), defaultValue: 'http://localhost:11434', validation: { pattern: { value: /^https?:\/\/.+/, message: i18n.t('setting.baseUrlValidationMessage'), }, }, }, { name: 'api_key', label: 'API Key', type: 'text', placeholder: i18n.t('setting.apiKeyOptionalPlaceholder'), helperText: i18n.t('setting.apiKeyOptional'), }, { name: 'max_tokens', label: i18n.t('setting.maxTokens'), type: 'number', required: true, placeholder: '4096', helperText: i18n.t('setting.maxTokensSupportedHelperText'), defaultValue: 4096, validation: { min: { value: 1, message: i18n.t('setting.maxTokensMinMessage') }, max: { value: 100000, message: i18n.t('setting.maxTokensMaxMessage') }, }, }, ]; export const BAIDU_YIYAN_CONFIG: ConfigFormItem[] = [ { name: 'model_type', label: i18n.t('setting.modelType'), type: 'select', required: true, options: MODEL_TYPE_OPTIONS.slice(0, 3), defaultValue: 'chat', }, { name: 'llm_name', label: i18n.t('setting.modelName'), type: 'text', required: true, placeholder: i18n.t('setting.modelNamePlaceholder'), }, { name: 'yiyan_ak', label: i18n.t('setting.baiduYiYanAPIKey'), type: 'text', required: true, placeholder: i18n.t('setting.apiKeyPlaceholder'), helperText: 'Baidu YiYan API KEY', }, { name: 'yiyan_sk', label: i18n.t('setting.baiduYiYanSecretKey'), type: 'password', required: true, placeholder: i18n.t('setting.secretKeyPlaceholder'), helperText: 'Baidu YiYan Secret KEY', }, { name: 'max_tokens', label: i18n.t('setting.maxTokens'), type: 'number', required: true, placeholder: i18n.t('setting.maxTokensPlaceholder'), helperText: i18n.t('setting.maxTokensHelperText'), defaultValue: 4096, validation: { min: { value: 1, message: i18n.t('setting.maxTokensMinMessage') }, max: { value: 100000, message: i18n.t('setting.maxTokensMaxMessage') }, }, }, ]; export const FISH_AUDIO_CONFIG: ConfigFormItem[] = [ { name: 'model_type', label: i18n.t('setting.modelType'), type: 'select', required: true, options: [{ value: 'tts', label: 'TTS' },], defaultValue: 'tts', }, { name: 'llm_name', label: i18n.t('setting.modelName'), type: 'text', required: true, placeholder: i18n.t('setting.modelNamePlaceholder'), }, { name: 'fish_audio_ak', label: 'Fish Audio API KEY', type: 'text', required: true, placeholder: i18n.t('setting.apiKeyPlaceholder'), helperText: 'Fish Audio API KEY', }, { name: 'fish_audio_refid', label: 'FishAudio Refrence ID', type: 'text', required: true, placeholder: i18n.t('setting.fishAudioRefIdPlaceholder'), helperText: 'Fish Audio Refrence ID', }, { name: 'max_tokens', label: i18n.t('setting.maxTokens'), type: 'number', required: true, placeholder: i18n.t('setting.maxTokensPlaceholder'), helperText: i18n.t('setting.maxTokensHelperText'), defaultValue: 4096, validation: { min: { value: 1, message: i18n.t('setting.maxTokensMinMessage') }, max: { value: 100000, message: i18n.t('setting.maxTokensMaxMessage') }, }, }, ] export const GOOGLE_CLOUD_CONFIG: ConfigFormItem[] = [ { name: 'model_type', label: i18n.t('setting.modelType'), type: 'select', required: true, options: [{ value: 'chat', label: 'Chat' }, { value: 'image2text', label: 'Image2Text' }], defaultValue: 'chat', }, { name: 'llm_name', label: i18n.t('setting.modelName'), type: 'text', required: true, placeholder: i18n.t('setting.modelNamePlaceholder'), }, { name: 'google_project_id', label: 'Project ID', type: 'text', required: true, placeholder: i18n.t('setting.googleProjectIdPlaceholder'), helperText: 'Google Cloud Project ID', }, { name: 'google_region', label: i18n.t('setting.googleCloudRegion'), type: 'text', required: true, placeholder: i18n.t('setting.googleCloudRegionPlaceholder'), helperText: i18n.t('setting.googleCloudRegionHelperText'), }, { name: 'google_service_account_key', label: 'Google Cloud Service Account Key', type: 'text', required: true, placeholder: i18n.t('setting.googleServiceAccountKeyPlaceholder'), helperText: 'Google Cloud Service Account Key', }, { name: 'max_tokens', label: i18n.t('setting.maxTokens'), type: 'number', required: true, placeholder: i18n.t('setting.maxTokensPlaceholder'), helperText: i18n.t('setting.maxTokensHelperText'), defaultValue: 4096, validation: { min: { value: 1, message: i18n.t('setting.maxTokensMinMessage') }, max: { value: 100000, message: i18n.t('setting.maxTokensMaxMessage') }, }, }, ] export const TENCENT_CLOUD_CONFIG: ConfigFormItem[] = [ { name: 'model_type', label: i18n.t('setting.modelType'), type: 'select', required: true, options: [{ value: 'speech2text', label: 'Speech2Text' }], defaultValue: 'speech2text', }, { name: 'llm_name', label: i18n.t('setting.modelName'), type: 'select', required: true, options: [ '16k_zh', '16k_zh_large', '16k_multi_lang', '16k_zh_dialect', '16k_en', '16k_yue', '16k_zh-PY', '16k_ja', '16k_ko', '16k_vi', '16k_ms', '16k_id', '16k_fil', '16k_th', '16k_pt', '16k_tr', '16k_ar', '16k_es', '16k_hi', '16k_fr', '16k_zh_medical', '16k_de' ].map((item) => ({ value: item, label: item })), defaultValue: '16k_zh', }, { name: 'tencent_ak', label: i18n.t('setting.tencentSecretId'), type: 'text', required: true, placeholder: i18n.t('setting.secretIdPlaceholder'), helperText: i18n.t('setting.tencentSecretIdHelperText'), }, { name: 'tencent_sk', label: i18n.t('setting.tencentSecretKey'), type: 'password', required: true, placeholder: i18n.t('setting.secretKeyPlaceholder'), helperText: i18n.t('setting.tencentSecretKeyHelperText'), }, ] export const TENCENT_HUNYUAN_CONFIG: ConfigFormItem[] = [ { name: 'hunyuan_sid', label: i18n.t('setting.hunyuanSecretId'), type: 'text', required: true, placeholder: i18n.t('setting.secretIdPlaceholder'), helperText: i18n.t('setting.hunyuanSecretIdHelperText'), }, { name: 'hunyuan_sk', label: i18n.t('setting.hunyuanSecretKey'), type: 'text', required: true, placeholder: i18n.t('setting.secretKeyPlaceholder'), helperText: i18n.t('setting.hunyuanSecretKeyHelperText'), }, ] // XunFeiSpark export const XUNFEI_SPARK_CONFIG: ConfigFormItem[] = [ { name: 'model_type', label: i18n.t('setting.modelType'), type: 'select', required: true, options: [{ value: 'chat', label: 'Chat' }, { value: 'tts', label: 'TTS' }], defaultValue: 'chat', }, { name: 'llm_name', label: i18n.t('setting.modelName'), type: 'text', required: true, placeholder: i18n.t('setting.modelNamePlaceholder'), }, { name: 'xunfei_spark_password', label: i18n.t('setting.xunfeiSparkAPIPassword'), type: 'text', required: true, placeholder: i18n.t('setting.apiPasswordPlaceholder'), helperText: i18n.t('setting.xunfeiSparkAPIPasswordHelperText'), }, { name: 'max_tokens', label: i18n.t('setting.maxTokens'), type: 'number', required: true, placeholder: i18n.t('setting.maxTokensPlaceholder'), helperText: i18n.t('setting.maxTokensHelperText'), defaultValue: 4096, validation: { min: { value: 1, message: i18n.t('setting.maxTokensMinMessage') }, max: { value: 100000, message: i18n.t('setting.maxTokensMaxMessage') }, }, }, ] // VolcEngine export const VOLC_ENGINE_CONFIG: ConfigFormItem[] = [ { name: 'model_type', label: i18n.t('setting.modelType'), type: 'select', required: true, options: [{ value: 'chat', label: 'Chat' }, { value: 'embedding', label: 'Embedding' }], defaultValue: 'chat', }, { name: 'llm_name', label: i18n.t('setting.modelName'), type: 'text', required: true, placeholder: i18n.t('setting.modelNamePlaceholder'), }, { name: 'endpoint_id', label: i18n.t('setting.modelEndpointId'), type: 'text', required: true, placeholder: i18n.t('setting.endpointIdPlaceholder'), helperText: i18n.t('setting.modelEndpointIdHelperText'), }, { name: 'ark_api_key', label: i18n.t('setting.volcEngineARKAPIKey'), type: 'password', required: true, placeholder: i18n.t('setting.arkApiKeyPlaceholder'), helperText: i18n.t('setting.modelARKAPIKeyHelperText'), }, { name: 'max_tokens', label: i18n.t('setting.maxTokens'), type: 'number', required: true, placeholder: i18n.t('setting.maxTokensPlaceholder'), helperText: i18n.t('setting.maxTokensHelperText'), defaultValue: 4096, validation: { min: { value: 1, message: i18n.t('setting.maxTokensMinMessage') }, max: { value: 100000, message: i18n.t('setting.maxTokensMaxMessage') }, }, }, ] // 根据 LLM Factory 获取配置 export function getLLMConfig(factory: string): { formItems: ConfigFormItem[]; docLink?: DocLinkConfig; title: string; defaultValues: Record; } { const docLink: DocLinkConfig | undefined = DOC_LINKS[factory]; switch (factory) { case LLM_FACTORY_LIST.AzureOpenAI: return { formItems: AZURE_OPENAI_CONFIG, docLink, title: 'Azure OpenAI', defaultValues: { llm_factory: factory }, }; case LLM_FACTORY_LIST.Bedrock: return { formItems: BEDROCK_CONFIG, docLink, title: 'Bedrock', defaultValues: { llm_factory: factory }, }; case LLM_FACTORY_LIST.BaiduYiYan: return { formItems: BAIDU_YIYAN_CONFIG, docLink, title: 'Baidu YiYan', defaultValues: { llm_factory: factory }, }; case LLM_FACTORY_LIST.FishAudio: return { formItems: FISH_AUDIO_CONFIG, docLink, title: 'Fish Audio', defaultValues: { llm_factory: factory }, }; case LLM_FACTORY_LIST.GoogleCloud: return { formItems: GOOGLE_CLOUD_CONFIG, docLink, title: 'Google Cloud', defaultValues: { llm_factory: factory }, }; case LLM_FACTORY_LIST.TencentCloud: return { formItems: TENCENT_CLOUD_CONFIG, docLink, title: 'Tencent Cloud', defaultValues: { llm_factory: factory }, }; case LLM_FACTORY_LIST.TencentHunYuan: return { formItems: TENCENT_HUNYUAN_CONFIG, docLink, title: 'Tencent HunYuan', defaultValues: { llm_factory: factory }, }; case LLM_FACTORY_LIST.XunFeiSpark: return { formItems: XUNFEI_SPARK_CONFIG, docLink, title: 'XunFei Spark', defaultValues: { llm_factory: factory }, }; case LLM_FACTORY_LIST.VolcEngine: return { formItems: VOLC_ENGINE_CONFIG, docLink, title: 'Volc Engine', defaultValues: { llm_factory: factory }, }; // local llm case LLM_FACTORY_LIST.Ollama: case LLM_FACTORY_LIST.Xinference: case LLM_FACTORY_LIST.ModelScope: case LLM_FACTORY_LIST.LocalAI: case LLM_FACTORY_LIST.LMStudio: case LLM_FACTORY_LIST.OpenAiAPICompatible: case LLM_FACTORY_LIST.TogetherAI: case LLM_FACTORY_LIST.Replicate: case LLM_FACTORY_LIST.OpenRouter: case LLM_FACTORY_LIST.HuggingFace: case LLM_FACTORY_LIST.GPUStack: case LLM_FACTORY_LIST.VLLM: default: // 根据不同的 factory 调整模型类型选项 let modelTypeOptions = MODEL_TYPE_OPTIONS; let defaultApiBase = 'http://localhost:11434'; if (factory === LLM_FACTORY_LIST.HuggingFace) { modelTypeOptions = [ { value: 'embedding', label: 'Embedding' }, { value: 'chat', label: 'Chat' }, { value: 'rerank', label: 'Rerank' }, ]; } else if (factory === LLM_FACTORY_LIST.Xinference) { modelTypeOptions = [ { value: 'chat', label: 'Chat' }, { value: 'embedding', label: 'Embedding' }, { value: 'rerank', label: 'Rerank' }, { value: 'image2text', label: 'Image2Text' }, { value: 'speech2text', label: 'Speech2Text' }, { value: 'tts', label: 'TTS' }, ]; } else if (factory === LLM_FACTORY_LIST.ModelScope) { modelTypeOptions = [{ value: 'chat', label: 'Chat' }]; } else if (factory === LLM_FACTORY_LIST.GPUStack) { modelTypeOptions = [ { value: 'chat', label: 'Chat' }, { value: 'embedding', label: 'Embedding' }, { value: 'rerank', label: 'Rerank' }, { value: 'image2text', label: 'Image2Text' }, ]; } // 根据不同 factory 设置不同的默认 API Base if (factory === LLM_FACTORY_LIST.Xinference) { defaultApiBase = 'http://localhost:9997'; } else if (factory === LLM_FACTORY_LIST.LocalAI) { defaultApiBase = 'http://localhost:8080'; } else if (factory === LLM_FACTORY_LIST.LMStudio) { defaultApiBase = 'http://localhost:1234'; } const ollamaConfig = [...OLLAMA_CONFIG]; // 更新模型类型选项 ollamaConfig[0] = { ...ollamaConfig[0], options: modelTypeOptions, }; // 更新默认 API Base ollamaConfig[2] = { ...ollamaConfig[2], defaultValue: defaultApiBase, }; return { formItems: ollamaConfig, docLink, title: factory, defaultValues: { llm_factory: factory }, }; } }