Files
TERES_web_frontend/src/hooks/setting-hooks.ts

95 lines
2.3 KiB
TypeScript
Raw Normal View History

import { useUserData } from "./useUserData";
import { useEffect, useState } from "react";
import logger from "@/utils/logger";
import type { IUserInfo } from "@/interfaces/database/user-setting";
import userService from "@/services/user_service";
import { rsaPsw } from "../utils/encryption";
import type { IFactory, IMyLlmModel } from "@/interfaces/database/llm";
import type { LLMFactory } from "@/constants/llm";
/**
*
*/
export function useProfileSetting() {
const { fetchUserInfo, userInfo } = useUserData();
useEffect(() => {
fetchUserInfo();
}, [fetchUserInfo]);
const updateUserInfo = async (newUserInfo: Partial<IUserInfo>) => {
try {
await userService.updateSetting(newUserInfo);
} catch (error) {
logger.error('更新用户信息失败:', error);
throw error;
}
};
const changeUserPassword = async (data: { password: string; new_password: string }) => {
try {
const newPassword = rsaPsw(data.new_password);
const oldPassword = rsaPsw(data.password);
const res = await userService.updatePassword({
password: oldPassword,
new_password: newPassword,
});
} catch (error) {
throw error;
}
};
return {
userInfo,
updateUserInfo,
changeUserPassword,
};
}
/**
* LLM
*/
export function useLlmModelSetting() {
const [llmFactory, setLlmFactory] = useState<IFactory[]>([]);
const [myLlm, setMyLlm] = useState<Record<LLMFactory, IMyLlmModel>>();
const fetchLlmFactory = async () => {
try {
const res = await userService.llm_factories_list();
const arr = res.data.data || [];
setLlmFactory(arr);
} catch (error) {
logger.error('获取模型工厂失败:', error);
throw error;
}
}
const fetchMyLlm = async () => {
try {
const res = await userService.my_llm();
const llm_dic = res.data.data || {};
setMyLlm(llm_dic);
} catch (error) {
logger.error('获取我的模型失败:', error);
throw error;
}
}
useEffect(() => {
fetchLlmFactory();
fetchMyLlm();
}, []);
const refreshLlmModel = async () => {
await fetchMyLlm();
// await fetchLlmFactory();
logger.info('刷新我的模型成功');
}
return {
llmFactory,
myLlm,
refreshLlmModel,
}
}