refactor(build): improve caching strategy with content hashing

feat(header): add translation support for menu items
fix(models): enhance model factory handling with proper dialogs
This commit is contained in:
2025-10-29 10:29:08 +08:00
parent 303715f82c
commit ef0a99ea30
9 changed files with 158 additions and 145 deletions

View File

@@ -161,7 +161,7 @@ export const useApiKeyDialog = (onSuccess?: () => void) => {
};
// Ollama 对话框管理
export const useOllamaDialog = () => {
export const useOllamaDialog = (onSuccess?: () => void) => {
const dialogState = useDialogState();
const showMessage = useMessage();
@@ -179,6 +179,11 @@ export const useOllamaDialog = () => {
});
showMessage.success('Ollama 模型添加成功');
dialogState.closeDialog();
// 调用成功回调
if (onSuccess) {
onSuccess();
}
} catch (error) {
logger.error('Ollama 模型添加失败:', error);
showMessage.error('Ollama 模型添加失败');
@@ -186,7 +191,7 @@ export const useOllamaDialog = () => {
} finally {
dialogState.setLoading(false);
}
}, [dialogState, showMessage]);
}, [dialogState, showMessage, onSuccess]);
return {
...dialogState,
@@ -340,7 +345,7 @@ export const useSystemModelSetting = (onSuccess?: () => void) => {
// 统一的模型对话框管理器
export const useModelDialogs = (onSuccess?: () => void) => {
const apiKeyDialog = useApiKeyDialog(onSuccess);
const ollamaDialog = useOllamaDialog();
const ollamaDialog = useOllamaDialog(onSuccess);
const configurationDialog = useConfigurationDialog(onSuccess);
const systemDialog = useSystemModelSetting(onSuccess);
const deleteOps = useDeleteOperations(onSuccess);

View File

@@ -88,6 +88,28 @@ function ModelsPage() {
return filterFactory || [];
}, [llmFactory, myLlm]);
const showAddModel = (factoryName: string) => {
const configurationFactories: LLMFactory[] = [
LLM_FACTORY_LIST.AzureOpenAI,
LLM_FACTORY_LIST.Bedrock,
LLM_FACTORY_LIST.BaiduYiYan,
LLM_FACTORY_LIST.FishAudio,
LLM_FACTORY_LIST.GoogleCloud,
LLM_FACTORY_LIST.TencentCloud,
LLM_FACTORY_LIST.TencentHunYuan,
LLM_FACTORY_LIST.XunFeiSpark,
LLM_FACTORY_LIST.VolcEngine,
]
const fN = factoryName as LLMFactory;
if (!fN) {
return false;
}
return LocalLlmFactories.includes(fN) ||
configurationFactories.includes(fN);
}
// 处理配置模型工厂
const handleConfigureFactory = useCallback((factory: IFactory) => {
if (factory == null) {
@@ -126,7 +148,31 @@ function ModelsPage() {
if (factoryName == null) {
return;
}
modelDialogs.apiKeyDialog.openApiKeyDialog(factoryName);
const factoryN = factoryName as LLMFactory;
const configurationFactories: LLMFactory[] = [
LLM_FACTORY_LIST.AzureOpenAI,
LLM_FACTORY_LIST.Bedrock,
LLM_FACTORY_LIST.BaiduYiYan,
LLM_FACTORY_LIST.FishAudio,
LLM_FACTORY_LIST.GoogleCloud,
LLM_FACTORY_LIST.TencentCloud,
LLM_FACTORY_LIST.TencentHunYuan,
LLM_FACTORY_LIST.XunFeiSpark,
LLM_FACTORY_LIST.VolcEngine,
]
if (LocalLlmFactories.includes(factoryN)) {
// local llm
modelDialogs.ollamaDialog.openDialog({
llm_factory: factoryN,
}, true);
} else if (configurationFactories.includes(factoryN)) {
// custom configuration llm
modelDialogs.configurationDialog.openConfigurationDialog(factoryN);
} else {
// llm set api
modelDialogs.apiKeyDialog.openApiKeyDialog(factoryN, {}, true);
}
logger.debug('handleEditLlmFactory', factoryN);
}, []);
const dialog = useDialog();
@@ -247,7 +293,7 @@ function ModelsPage() {
variant='contained' color='primary' startIcon={<EditIcon />}
onClick={() => handleEditLlmFactory(factoryName)}
>
{t('setting.edit')}
{ showAddModel(factoryName) ? t('setting.addModel') : t('setting.edit')}
</Button>
<Button
variant='outlined' color='primary' startIcon={<DeleteIcon />}