feat: add ragflow web project & add pnpm workspace file
This commit is contained in:
213
ragflow_web/src/utils/api.ts
Normal file
213
ragflow_web/src/utils/api.ts
Normal file
@@ -0,0 +1,213 @@
|
||||
let api_host = `/v1`;
|
||||
const ExternalApi = `/api`;
|
||||
|
||||
export { api_host };
|
||||
|
||||
export default {
|
||||
// user
|
||||
login: `${api_host}/user/login`,
|
||||
logout: `${api_host}/user/logout`,
|
||||
register: `${api_host}/user/register`,
|
||||
setting: `${api_host}/user/setting`,
|
||||
user_info: `${api_host}/user/info`,
|
||||
tenant_info: `${api_host}/user/tenant_info`,
|
||||
set_tenant_info: `${api_host}/user/set_tenant_info`,
|
||||
login_channels: `${api_host}/user/login/channels`,
|
||||
login_channel: (channel: string) => `${api_host}/user/login/${channel}`,
|
||||
|
||||
// team
|
||||
addTenantUser: (tenantId: string) => `${api_host}/tenant/${tenantId}/user`,
|
||||
listTenantUser: (tenantId: string) =>
|
||||
`${api_host}/tenant/${tenantId}/user/list`,
|
||||
deleteTenantUser: (tenantId: string, userId: string) =>
|
||||
`${api_host}/tenant/${tenantId}/user/${userId}`,
|
||||
listTenant: `${api_host}/tenant/list`,
|
||||
agreeTenant: (tenantId: string) => `${api_host}/tenant/agree/${tenantId}`,
|
||||
|
||||
// llm model
|
||||
factories_list: `${api_host}/llm/factories`,
|
||||
llm_list: `${api_host}/llm/list`,
|
||||
my_llm: `${api_host}/llm/my_llms`,
|
||||
set_api_key: `${api_host}/llm/set_api_key`,
|
||||
add_llm: `${api_host}/llm/add_llm`,
|
||||
delete_llm: `${api_host}/llm/delete_llm`,
|
||||
deleteFactory: `${api_host}/llm/delete_factory`,
|
||||
|
||||
// plugin
|
||||
llm_tools: `${api_host}/plugin/llm_tools`,
|
||||
|
||||
// knowledge base
|
||||
kb_list: `${api_host}/kb/list`,
|
||||
create_kb: `${api_host}/kb/create`,
|
||||
update_kb: `${api_host}/kb/update`,
|
||||
rm_kb: `${api_host}/kb/rm`,
|
||||
get_kb_detail: `${api_host}/kb/detail`,
|
||||
getKnowledgeGraph: (knowledgeId: string) =>
|
||||
`${api_host}/kb/${knowledgeId}/knowledge_graph`,
|
||||
getMeta: `${api_host}/kb/get_meta`,
|
||||
getKnowledgeBasicInfo: `${api_host}/kb/basic_info`,
|
||||
// data pipeline log
|
||||
fetchDataPipelineLog: `${api_host}/kb/list_pipeline_logs`,
|
||||
get_pipeline_detail: `${api_host}/kb/pipeline_log_detail`,
|
||||
fetchPipelineDatasetLogs: `${api_host}/kb/list_pipeline_dataset_logs`,
|
||||
runGraphRag: `${api_host}/kb/run_graphrag`,
|
||||
traceGraphRag: `${api_host}/kb/trace_graphrag`,
|
||||
runRaptor: `${api_host}/kb/run_raptor`,
|
||||
traceRaptor: `${api_host}/kb/trace_raptor`,
|
||||
unbindPipelineTask: ({ kb_id, type }: { kb_id: string; type: string }) =>
|
||||
`${api_host}/kb/unbind_task?kb_id=${kb_id}&pipeline_task_type=${type}`,
|
||||
pipelineRerun: `${api_host}/canvas/rerun`,
|
||||
|
||||
// tags
|
||||
listTag: (knowledgeId: string) => `${api_host}/kb/${knowledgeId}/tags`,
|
||||
listTagByKnowledgeIds: `${api_host}/kb/tags`,
|
||||
removeTag: (knowledgeId: string) => `${api_host}/kb/${knowledgeId}/rm_tags`,
|
||||
renameTag: (knowledgeId: string) =>
|
||||
`${api_host}/kb/${knowledgeId}/rename_tag`,
|
||||
|
||||
// chunk
|
||||
chunk_list: `${api_host}/chunk/list`,
|
||||
create_chunk: `${api_host}/chunk/create`,
|
||||
set_chunk: `${api_host}/chunk/set`,
|
||||
get_chunk: `${api_host}/chunk/get`,
|
||||
switch_chunk: `${api_host}/chunk/switch`,
|
||||
rm_chunk: `${api_host}/chunk/rm`,
|
||||
retrieval_test: `${api_host}/chunk/retrieval_test`,
|
||||
knowledge_graph: `${api_host}/chunk/knowledge_graph`,
|
||||
|
||||
// document
|
||||
get_document_list: `${api_host}/document/list`,
|
||||
document_change_status: `${api_host}/document/change_status`,
|
||||
document_rm: `${api_host}/document/rm`,
|
||||
document_delete: `${api_host}/api/document`,
|
||||
document_rename: `${api_host}/document/rename`,
|
||||
document_create: `${api_host}/document/create`,
|
||||
document_run: `${api_host}/document/run`,
|
||||
document_change_parser: `${api_host}/document/change_parser`,
|
||||
document_thumbnails: `${api_host}/document/thumbnails`,
|
||||
get_document_file: `${api_host}/document/get`,
|
||||
document_upload: `${api_host}/document/upload`,
|
||||
web_crawl: `${api_host}/document/web_crawl`,
|
||||
document_infos: `${api_host}/document/infos`,
|
||||
upload_and_parse: `${api_host}/document/upload_and_parse`,
|
||||
parse: `${api_host}/document/parse`,
|
||||
setMeta: `${api_host}/document/set_meta`,
|
||||
get_dataset_filter: `${api_host}/document/filter`,
|
||||
|
||||
// chat
|
||||
setDialog: `${api_host}/dialog/set`,
|
||||
getDialog: `${api_host}/dialog/get`,
|
||||
removeDialog: `${api_host}/dialog/rm`,
|
||||
listDialog: `${api_host}/dialog/list`,
|
||||
setConversation: `${api_host}/conversation/set`,
|
||||
getConversation: `${api_host}/conversation/get`,
|
||||
getConversationSSE: `${api_host}/conversation/getsse`,
|
||||
listConversation: `${api_host}/conversation/list`,
|
||||
removeConversation: `${api_host}/conversation/rm`,
|
||||
completeConversation: `${api_host}/conversation/completion`,
|
||||
deleteMessage: `${api_host}/conversation/delete_msg`,
|
||||
thumbup: `${api_host}/conversation/thumbup`,
|
||||
tts: `${api_host}/conversation/tts`,
|
||||
ask: `${api_host}/conversation/ask`,
|
||||
mindmap: `${api_host}/conversation/mindmap`,
|
||||
getRelatedQuestions: `${api_host}/conversation/related_questions`,
|
||||
// chat for external
|
||||
createToken: `${api_host}/api/new_token`,
|
||||
listToken: `${api_host}/api/token_list`,
|
||||
removeToken: `${api_host}/api/rm`,
|
||||
getStats: `${api_host}/api/stats`,
|
||||
createExternalConversation: `${api_host}/api/new_conversation`,
|
||||
getExternalConversation: `${api_host}/api/conversation`,
|
||||
completeExternalConversation: `${api_host}/api/completion`,
|
||||
uploadAndParseExternal: `${api_host}/api/document/upload_and_parse`,
|
||||
|
||||
// next chat
|
||||
listNextDialog: `${api_host}/dialog/next`,
|
||||
fetchExternalChatInfo: (id: string) =>
|
||||
`${ExternalApi}${api_host}/chatbots/${id}/info`,
|
||||
|
||||
// file manager
|
||||
listFile: `${api_host}/file/list`,
|
||||
uploadFile: `${api_host}/file/upload`,
|
||||
removeFile: `${api_host}/file/rm`,
|
||||
renameFile: `${api_host}/file/rename`,
|
||||
getAllParentFolder: `${api_host}/file/all_parent_folder`,
|
||||
createFolder: `${api_host}/file/create`,
|
||||
connectFileToKnowledge: `${api_host}/file2document/convert`,
|
||||
getFile: `${api_host}/file/get`,
|
||||
moveFile: `${api_host}/file/mv`,
|
||||
|
||||
// system
|
||||
getSystemVersion: `${api_host}/system/version`,
|
||||
getSystemStatus: `${api_host}/system/status`,
|
||||
getSystemTokenList: `${api_host}/system/token_list`,
|
||||
createSystemToken: `${api_host}/system/new_token`,
|
||||
listSystemToken: `${api_host}/system/token_list`,
|
||||
removeSystemToken: `${api_host}/system/token`,
|
||||
getSystemConfig: `${api_host}/system/config`,
|
||||
setLangfuseConfig: `${api_host}/langfuse/api_key`,
|
||||
|
||||
// flow
|
||||
listTemplates: `${api_host}/canvas/templates`,
|
||||
listCanvas: `${api_host}/canvas/list`,
|
||||
getCanvas: `${api_host}/canvas/get`,
|
||||
getCanvasSSE: `${api_host}/canvas/getsse`,
|
||||
removeCanvas: `${api_host}/canvas/rm`,
|
||||
setCanvas: `${api_host}/canvas/set`,
|
||||
settingCanvas: `${api_host}/canvas/setting`,
|
||||
getListVersion: `${api_host}/canvas/getlistversion`,
|
||||
getVersion: `${api_host}/canvas/getversion`,
|
||||
resetCanvas: `${api_host}/canvas/reset`,
|
||||
runCanvas: `${api_host}/canvas/completion`,
|
||||
testDbConnect: `${api_host}/canvas/test_db_connect`,
|
||||
getInputElements: `${api_host}/canvas/input_elements`,
|
||||
debug: `${api_host}/canvas/debug`,
|
||||
uploadCanvasFile: `${api_host}/canvas/upload`,
|
||||
trace: `${api_host}/canvas/trace`,
|
||||
// agent
|
||||
inputForm: `${api_host}/canvas/input_form`,
|
||||
fetchVersionList: (id: string) => `${api_host}/canvas/getlistversion/${id}`,
|
||||
fetchVersion: (id: string) => `${api_host}/canvas/getversion/${id}`,
|
||||
fetchCanvas: (id: string) => `${api_host}/canvas/get/${id}`,
|
||||
fetchAgentAvatar: (id: string) => `${api_host}/canvas/getsse/${id}`,
|
||||
uploadAgentFile: (id?: string) => `${api_host}/canvas/upload/${id}`,
|
||||
fetchAgentLogs: (canvasId: string) =>
|
||||
`${api_host}/canvas/${canvasId}/sessions`,
|
||||
fetchExternalAgentInputs: (canvasId: string) =>
|
||||
`${ExternalApi}${api_host}/agentbots/${canvasId}/inputs`,
|
||||
prompt: `${api_host}/canvas/prompts`,
|
||||
cancelDataflow: (id: string) => `${api_host}/canvas/cancel/${id}`,
|
||||
downloadFile: `${api_host}/canvas/download`,
|
||||
|
||||
// mcp server
|
||||
listMcpServer: `${api_host}/mcp_server/list`,
|
||||
getMcpServer: `${api_host}/mcp_server/detail`,
|
||||
createMcpServer: `${api_host}/mcp_server/create`,
|
||||
updateMcpServer: `${api_host}/mcp_server/update`,
|
||||
deleteMcpServer: `${api_host}/mcp_server/rm`,
|
||||
importMcpServer: `${api_host}/mcp_server/import`,
|
||||
exportMcpServer: `${api_host}/mcp_server/export`,
|
||||
listMcpServerTools: `${api_host}/mcp_server/list_tools`,
|
||||
testMcpServerTool: `${api_host}/mcp_server/test_tool`,
|
||||
cacheMcpServerTool: `${api_host}/mcp_server/cache_tools`,
|
||||
testMcpServer: `${api_host}/mcp_server/test_mcp`,
|
||||
|
||||
// next-search
|
||||
createSearch: `${api_host}/search/create`,
|
||||
getSearchList: `${api_host}/search/list`,
|
||||
deleteSearch: `${api_host}/search/rm`,
|
||||
getSearchDetail: `${api_host}/search/detail`,
|
||||
getSearchDetailShare: `${ExternalApi}${api_host}/searchbots/detail`,
|
||||
updateSearchSetting: `${api_host}/search/update`,
|
||||
askShare: `${ExternalApi}${api_host}/searchbots/ask`,
|
||||
mindmapShare: `${ExternalApi}${api_host}/searchbots/mindmap`,
|
||||
getRelatedQuestionsShare: `${ExternalApi}${api_host}/searchbots/related_questions`,
|
||||
retrievalTestShare: `${ExternalApi}${api_host}/searchbots/retrieval_test`,
|
||||
|
||||
// data pipeline
|
||||
fetchDataflow: (id: string) => `${api_host}/dataflow/get/${id}`,
|
||||
setDataflow: `${api_host}/dataflow/set`,
|
||||
removeDataflow: `${api_host}/dataflow/rm`,
|
||||
listDataflow: `${api_host}/dataflow/list`,
|
||||
runDataflow: `${api_host}/dataflow/run`,
|
||||
};
|
||||
63
ragflow_web/src/utils/authorization-util.ts
Normal file
63
ragflow_web/src/utils/authorization-util.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { Authorization, Token, UserInfo } from '@/constants/authorization';
|
||||
import { getSearchValue } from './common-util';
|
||||
const KeySet = [Authorization, Token, UserInfo];
|
||||
|
||||
const storage = {
|
||||
getAuthorization: () => {
|
||||
return localStorage.getItem(Authorization);
|
||||
},
|
||||
getToken: () => {
|
||||
return localStorage.getItem(Token);
|
||||
},
|
||||
getUserInfo: () => {
|
||||
return localStorage.getItem(UserInfo);
|
||||
},
|
||||
getUserInfoObject: () => {
|
||||
return JSON.parse(localStorage.getItem('userInfo') || '');
|
||||
},
|
||||
setAuthorization: (value: string) => {
|
||||
localStorage.setItem(Authorization, value);
|
||||
},
|
||||
setToken: (value: string) => {
|
||||
localStorage.setItem(Token, value);
|
||||
},
|
||||
setUserInfo: (value: string | Record<string, unknown>) => {
|
||||
let valueStr = typeof value !== 'string' ? JSON.stringify(value) : value;
|
||||
localStorage.setItem(UserInfo, valueStr);
|
||||
},
|
||||
setItems: (pairs: Record<string, string>) => {
|
||||
Object.entries(pairs).forEach(([key, value]) => {
|
||||
localStorage.setItem(key, value);
|
||||
});
|
||||
},
|
||||
removeAuthorization: () => {
|
||||
localStorage.removeItem(Authorization);
|
||||
},
|
||||
removeAll: () => {
|
||||
KeySet.forEach((x) => {
|
||||
localStorage.removeItem(x);
|
||||
});
|
||||
},
|
||||
setLanguage: (lng: string) => {
|
||||
localStorage.setItem('lng', lng);
|
||||
},
|
||||
getLanguage: (): string => {
|
||||
return localStorage.getItem('lng') as string;
|
||||
},
|
||||
};
|
||||
|
||||
export const getAuthorization = () => {
|
||||
const auth = getSearchValue('auth');
|
||||
const authorization = auth
|
||||
? 'Bearer ' + auth
|
||||
: storage.getAuthorization() || '';
|
||||
|
||||
return authorization;
|
||||
};
|
||||
|
||||
export default storage;
|
||||
|
||||
// Will not jump to the login page
|
||||
export function redirectToLogin() {
|
||||
window.location.href = location.origin + `/login`;
|
||||
}
|
||||
75
ragflow_web/src/utils/canvas-util.tsx
Normal file
75
ragflow_web/src/utils/canvas-util.tsx
Normal file
@@ -0,0 +1,75 @@
|
||||
import { BaseNode } from '@/interfaces/database/agent';
|
||||
import { Edge } from '@xyflow/react';
|
||||
import { isEmpty } from 'lodash';
|
||||
import { ComponentType, ReactNode } from 'react';
|
||||
|
||||
export function filterAllUpstreamNodeIds(edges: Edge[], nodeIds: string[]) {
|
||||
return nodeIds.reduce<string[]>((pre, nodeId) => {
|
||||
const currentEdges = edges.filter((x) => x.target === nodeId);
|
||||
|
||||
const upstreamNodeIds: string[] = currentEdges.map((x) => x.source);
|
||||
|
||||
const ids = upstreamNodeIds.concat(
|
||||
filterAllUpstreamNodeIds(edges, upstreamNodeIds),
|
||||
);
|
||||
|
||||
ids.forEach((x) => {
|
||||
if (pre.every((y) => y !== x)) {
|
||||
pre.push(x);
|
||||
}
|
||||
});
|
||||
|
||||
return pre;
|
||||
}, []);
|
||||
}
|
||||
|
||||
export function buildOutputOptions(
|
||||
outputs: Record<string, any> = {},
|
||||
nodeId?: string,
|
||||
parentLabel?: string | ReactNode,
|
||||
icon?: ReactNode,
|
||||
) {
|
||||
return Object.keys(outputs).map((x) => ({
|
||||
label: x,
|
||||
value: `${nodeId}@${x}`,
|
||||
parentLabel,
|
||||
icon,
|
||||
type: outputs[x]?.type,
|
||||
}));
|
||||
}
|
||||
|
||||
export function buildNodeOutputOptions({
|
||||
nodes,
|
||||
edges,
|
||||
nodeId,
|
||||
Icon,
|
||||
}: {
|
||||
nodes: BaseNode[];
|
||||
edges: Edge[];
|
||||
nodeId?: string;
|
||||
Icon: ComponentType<{ name: string }>;
|
||||
}) {
|
||||
if (!nodeId) {
|
||||
return [];
|
||||
}
|
||||
const upstreamIds = filterAllUpstreamNodeIds(edges, [nodeId]);
|
||||
|
||||
const nodeWithOutputList = nodes.filter(
|
||||
(x) =>
|
||||
upstreamIds.some((y) => y === x.id) && !isEmpty(x.data?.form?.outputs),
|
||||
);
|
||||
|
||||
return nodeWithOutputList
|
||||
.filter((x) => x.id !== nodeId)
|
||||
.map((x) => ({
|
||||
label: x.data.name,
|
||||
value: x.id,
|
||||
title: x.data.name,
|
||||
options: buildOutputOptions(
|
||||
x.data.form.outputs,
|
||||
x.id,
|
||||
x.data.name,
|
||||
<Icon name={x.data.name} />,
|
||||
),
|
||||
}));
|
||||
}
|
||||
86
ragflow_web/src/utils/chat.ts
Normal file
86
ragflow_web/src/utils/chat.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import {
|
||||
ChatVariableEnabledField,
|
||||
EmptyConversationId,
|
||||
} from '@/constants/chat';
|
||||
import { Message } from '@/interfaces/database/chat';
|
||||
import { IMessage } from '@/pages/chat/interface';
|
||||
import { omit } from 'lodash';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
export const isConversationIdExist = (conversationId: string) => {
|
||||
return conversationId !== EmptyConversationId && conversationId !== '';
|
||||
};
|
||||
|
||||
export const buildMessageUuid = (message: Partial<Message | IMessage>) => {
|
||||
if ('id' in message && message.id) {
|
||||
return message.id;
|
||||
}
|
||||
return uuid();
|
||||
};
|
||||
|
||||
export const buildMessageListWithUuid = (messages?: Message[]) => {
|
||||
return (
|
||||
messages?.map((x: Message | IMessage) => ({
|
||||
...omit(x, 'reference'),
|
||||
id: buildMessageUuid(x),
|
||||
})) ?? []
|
||||
);
|
||||
};
|
||||
|
||||
export const getConversationId = () => {
|
||||
return uuid().replace(/-/g, '');
|
||||
};
|
||||
|
||||
// When rendering each message, add a prefix to the id to ensure uniqueness.
|
||||
export const buildMessageUuidWithRole = (
|
||||
message: Partial<Message | IMessage>,
|
||||
) => {
|
||||
return `${message.role}_${message.id}`;
|
||||
};
|
||||
|
||||
// Preprocess LaTeX equations to be rendered by KaTeX
|
||||
// ref: https://github.com/remarkjs/react-markdown/issues/785
|
||||
|
||||
export const preprocessLaTeX = (content: string) => {
|
||||
const blockProcessedContent = content.replace(
|
||||
/\\\[([\s\S]*?)\\\]/g,
|
||||
(_, equation) => `$$${equation}$$`,
|
||||
);
|
||||
const inlineProcessedContent = blockProcessedContent.replace(
|
||||
/\\\(([\s\S]*?)\\\)/g,
|
||||
(_, equation) => `$${equation}$`,
|
||||
);
|
||||
return inlineProcessedContent;
|
||||
};
|
||||
|
||||
export function replaceThinkToSection(text: string = '') {
|
||||
const pattern = /<think>([\s\S]*?)<\/think>/g;
|
||||
|
||||
const result = text.replace(pattern, '<section class="think">$1</section>');
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export function setInitialChatVariableEnabledFieldValue(
|
||||
field: ChatVariableEnabledField,
|
||||
) {
|
||||
return false;
|
||||
return field !== ChatVariableEnabledField.MaxTokensEnabled;
|
||||
}
|
||||
|
||||
const ShowImageFields = ['image', 'table'];
|
||||
|
||||
export function showImage(filed?: string) {
|
||||
return ShowImageFields.some((x) => x === filed);
|
||||
}
|
||||
|
||||
export function setChatVariableEnabledFieldValuePage() {
|
||||
const variableCheckBoxFieldMap = Object.values(
|
||||
ChatVariableEnabledField,
|
||||
).reduce<Record<string, boolean>>((pre, cur) => {
|
||||
pre[cur] = cur !== ChatVariableEnabledField.MaxTokensEnabled;
|
||||
return pre;
|
||||
}, {});
|
||||
|
||||
return variableCheckBoxFieldMap;
|
||||
}
|
||||
247
ragflow_web/src/utils/common-util.ts
Normal file
247
ragflow_web/src/utils/common-util.ts
Normal file
@@ -0,0 +1,247 @@
|
||||
import { LLMFactory } from '@/constants/llm';
|
||||
import { IFactory } from '@/interfaces/database/llm';
|
||||
import isObject from 'lodash/isObject';
|
||||
import snakeCase from 'lodash/snakeCase';
|
||||
|
||||
export const isFormData = (data: unknown): data is FormData => {
|
||||
return data instanceof FormData;
|
||||
};
|
||||
|
||||
const excludedFields = ['img2txt_id', 'mcpServers'];
|
||||
|
||||
const isExcludedField = (key: string) => {
|
||||
return excludedFields.includes(key);
|
||||
};
|
||||
|
||||
export const convertTheKeysOfTheObjectToSnake = (data: unknown) => {
|
||||
if (isObject(data) && !isFormData(data)) {
|
||||
return Object.keys(data).reduce<Record<string, any>>((pre, cur) => {
|
||||
const value = (data as Record<string, any>)[cur];
|
||||
pre[isFormData(value) || isExcludedField(cur) ? cur : snakeCase(cur)] =
|
||||
value;
|
||||
return pre;
|
||||
}, {});
|
||||
}
|
||||
return data;
|
||||
};
|
||||
|
||||
export const getSearchValue = (key: string) => {
|
||||
const params = new URL(document.location as any).searchParams;
|
||||
return params.get(key);
|
||||
};
|
||||
|
||||
// Formatize numbers, add thousands of separators
|
||||
export const formatNumberWithThousandsSeparator = (numberStr: string) => {
|
||||
const formattedNumber = numberStr.replace(/\B(?=(\d{3})+(?!\d))/g, ',');
|
||||
return formattedNumber;
|
||||
};
|
||||
|
||||
const orderFactoryList = [
|
||||
LLMFactory.OpenAI,
|
||||
LLMFactory.Moonshot,
|
||||
LLMFactory.PPIO,
|
||||
LLMFactory.ZhipuAI,
|
||||
LLMFactory.Ollama,
|
||||
LLMFactory.Xinference,
|
||||
LLMFactory.Ai302,
|
||||
LLMFactory.CometAPI,
|
||||
LLMFactory.DeerAPI,
|
||||
];
|
||||
|
||||
export const sortLLmFactoryListBySpecifiedOrder = (list: IFactory[]) => {
|
||||
const finalList: IFactory[] = [];
|
||||
orderFactoryList.forEach((orderItem) => {
|
||||
const index = list.findIndex((item) => item.name === orderItem);
|
||||
if (index !== -1) {
|
||||
finalList.push(list[index]);
|
||||
}
|
||||
});
|
||||
|
||||
list.forEach((item) => {
|
||||
if (finalList.every((x) => x.name !== item.name)) {
|
||||
finalList.push(item);
|
||||
}
|
||||
});
|
||||
|
||||
return finalList;
|
||||
};
|
||||
|
||||
export const filterOptionsByInput = (
|
||||
input: string,
|
||||
option: { label: string; value: string } | undefined,
|
||||
) => (option?.label ?? '').toLowerCase().includes(input.toLowerCase());
|
||||
|
||||
export const toFixed = (value: unknown, fixed = 2) => {
|
||||
if (typeof value === 'number') {
|
||||
return value.toFixed(fixed);
|
||||
}
|
||||
return value;
|
||||
};
|
||||
|
||||
export const stringToUint8Array = (str: string) => {
|
||||
// const byteString = str.replace(/b'|'/g, '');
|
||||
const byteString = str.slice(2, -1);
|
||||
|
||||
const uint8Array = new Uint8Array(byteString.length);
|
||||
for (let i = 0; i < byteString.length; i++) {
|
||||
uint8Array[i] = byteString.charCodeAt(i);
|
||||
}
|
||||
|
||||
return uint8Array;
|
||||
};
|
||||
|
||||
export const hexStringToUint8Array = (hex: string) => {
|
||||
const arr = hex.match(/[\da-f]{2}/gi);
|
||||
if (Array.isArray(arr)) {
|
||||
return new Uint8Array(
|
||||
arr.map(function (h) {
|
||||
return parseInt(h, 16);
|
||||
}),
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export function hexToArrayBuffer(input: string) {
|
||||
if (typeof input !== 'string') {
|
||||
throw new TypeError('Expected input to be a string');
|
||||
}
|
||||
|
||||
if (input.length % 2 !== 0) {
|
||||
throw new RangeError('Expected string to be an even number of characters');
|
||||
}
|
||||
|
||||
const view = new Uint8Array(input.length / 2);
|
||||
|
||||
for (let i = 0; i < input.length; i += 2) {
|
||||
view[i / 2] = parseInt(input.substring(i, i + 2), 16);
|
||||
}
|
||||
|
||||
return view.buffer;
|
||||
}
|
||||
|
||||
export function formatFileSize(bytes: number, si = true, dp = 1) {
|
||||
let nextBytes = bytes;
|
||||
const thresh = si ? 1000 : 1024;
|
||||
|
||||
if (Math.abs(bytes) < thresh) {
|
||||
return nextBytes + ' B';
|
||||
}
|
||||
|
||||
const units = si
|
||||
? ['kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
|
||||
: ['KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'];
|
||||
let u = -1;
|
||||
const r = 10 ** dp;
|
||||
|
||||
do {
|
||||
nextBytes /= thresh;
|
||||
++u;
|
||||
} while (
|
||||
Math.round(Math.abs(nextBytes) * r) / r >= thresh &&
|
||||
u < units.length - 1
|
||||
);
|
||||
|
||||
return nextBytes.toFixed(dp) + ' ' + units[u];
|
||||
}
|
||||
|
||||
// Get the actual color value of a CSS variable
|
||||
function getCSSVariableValue(variableName: string): string {
|
||||
const computedStyle = getComputedStyle(document.documentElement);
|
||||
const value = computedStyle.getPropertyValue(variableName).trim();
|
||||
if (!value) {
|
||||
throw new Error(`CSS variable ${variableName} is not defined`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**Parse the color and convert to RGB,
|
||||
* #fff -> [255, 255, 255]
|
||||
* var(--text-primary) -> [var(--text-primary-r), var(--text-primary-g), var(--text-primary-b)]
|
||||
* */
|
||||
export function parseColorToRGB(color: string): [number, number, number] {
|
||||
// Handling CSS variables (e.g. var(--accent-primary))
|
||||
let colorStr = color;
|
||||
if (colorStr.startsWith('var(')) {
|
||||
const varMatch = color.match(/var\(([^)]+)\)/);
|
||||
if (!varMatch) {
|
||||
console.error(`Invalid CSS variable: ${color}`);
|
||||
return [0, 0, 0];
|
||||
}
|
||||
const varName = varMatch[1];
|
||||
if (!varName) {
|
||||
console.error(`Invalid CSS variable: ${colorStr}`);
|
||||
return [0, 0, 0];
|
||||
}
|
||||
colorStr = getCSSVariableValue(varName);
|
||||
}
|
||||
|
||||
// Handle rgb(var(--accent-primary)) format
|
||||
if (colorStr.startsWith('rgb(var(')) {
|
||||
const varMatch = colorStr.match(/rgb\(var\(([^)]+)\)\)/);
|
||||
if (!varMatch) {
|
||||
console.error(`Invalid nested CSS variable: ${color}`);
|
||||
return [0, 0, 0];
|
||||
}
|
||||
const varName = varMatch[1];
|
||||
if (!varName) {
|
||||
console.error(`Invalid nested CSS variable: ${colorStr}`);
|
||||
return [0, 0, 0];
|
||||
}
|
||||
// Get the CSS variable value which should be in format "r, g, b"
|
||||
const rgbValues = getCSSVariableValue(varName);
|
||||
const rgbMatch = rgbValues.match(/^(\d+),?\s*(\d+),?\s*(\d+)$/);
|
||||
if (rgbMatch) {
|
||||
return [
|
||||
parseInt(rgbMatch[1]),
|
||||
parseInt(rgbMatch[2]),
|
||||
parseInt(rgbMatch[3]),
|
||||
];
|
||||
}
|
||||
console.error(`Unsupported RGB CSS variable format: ${rgbValues}`);
|
||||
return [0, 0, 0];
|
||||
}
|
||||
|
||||
// Handles hexadecimal colors (e.g. #FF5733)
|
||||
if (colorStr.startsWith('#')) {
|
||||
const cleanedHex = colorStr.replace(/^#/, '');
|
||||
if (cleanedHex.length === 3) {
|
||||
return [
|
||||
parseInt(cleanedHex[0] + cleanedHex[0], 16),
|
||||
parseInt(cleanedHex[1] + cleanedHex[1], 16),
|
||||
parseInt(cleanedHex[2] + cleanedHex[2], 16),
|
||||
];
|
||||
}
|
||||
return [
|
||||
parseInt(cleanedHex.slice(0, 2), 16),
|
||||
parseInt(cleanedHex.slice(2, 4), 16),
|
||||
parseInt(cleanedHex.slice(4, 6), 16),
|
||||
];
|
||||
}
|
||||
|
||||
// Handling RGB colors (e.g., rgb(255, 87, 51))
|
||||
if (colorStr.startsWith('rgb')) {
|
||||
const rgbMatch = colorStr.match(/rgb\((\d+),\s*(\d+),\s*(\d+)\)/);
|
||||
if (rgbMatch) {
|
||||
return [
|
||||
parseInt(rgbMatch[1]),
|
||||
parseInt(rgbMatch[2]),
|
||||
parseInt(rgbMatch[3]),
|
||||
];
|
||||
}
|
||||
console.error(`Unsupported RGB format: ${colorStr}`);
|
||||
return [0, 0, 0];
|
||||
}
|
||||
console.error(`Unsupported colorStr format: ${colorStr}`);
|
||||
return [0, 0, 0];
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param color eg: #fff, or var(--color-text-primary)
|
||||
* @param opcity 0~1
|
||||
* @return rgba(r,g,b,opcity)
|
||||
*/
|
||||
export function parseColorToRGBA(color: string, opcity = 1): string {
|
||||
const [r, g, b] = parseColorToRGB(color);
|
||||
return `rgba(${r},${g},${b},${opcity})`;
|
||||
}
|
||||
13
ragflow_web/src/utils/component-util.ts
Normal file
13
ragflow_web/src/utils/component-util.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
export function buildSelectOptions(
|
||||
list: Array<any>,
|
||||
keyName?: string,
|
||||
valueName?: string,
|
||||
) {
|
||||
if (!Array.isArray(list) || !list.length) {
|
||||
return [];
|
||||
}
|
||||
if (keyName && valueName) {
|
||||
return list.map((x) => ({ label: x[valueName], value: x[keyName] }));
|
||||
}
|
||||
return list.map((x) => ({ label: x, value: x }));
|
||||
}
|
||||
9
ragflow_web/src/utils/dataset-util.ts
Normal file
9
ragflow_web/src/utils/dataset-util.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { DocumentParserType } from '@/constants/knowledge';
|
||||
|
||||
export function isKnowledgeGraphParser(parserId: DocumentParserType) {
|
||||
return parserId === DocumentParserType.KnowledgeGraph;
|
||||
}
|
||||
|
||||
export function isNaiveParser(parserId: DocumentParserType) {
|
||||
return parserId === DocumentParserType.Naive;
|
||||
}
|
||||
63
ragflow_web/src/utils/date.ts
Normal file
63
ragflow_web/src/utils/date.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import dayjs from 'dayjs';
|
||||
|
||||
export function formatDate(date: any) {
|
||||
if (!date) {
|
||||
return '';
|
||||
}
|
||||
return dayjs(date).format('DD/MM/YYYY HH:mm:ss');
|
||||
}
|
||||
|
||||
export function formatTime(date: any) {
|
||||
if (!date) {
|
||||
return '';
|
||||
}
|
||||
return dayjs(date).format('HH:mm:ss');
|
||||
}
|
||||
|
||||
export function today() {
|
||||
return formatDate(dayjs());
|
||||
}
|
||||
|
||||
export function lastDay() {
|
||||
return formatDate(dayjs().subtract(1, 'days'));
|
||||
}
|
||||
|
||||
export function lastWeek() {
|
||||
return formatDate(dayjs().subtract(1, 'weeks'));
|
||||
}
|
||||
|
||||
export function formatPureDate(date: any) {
|
||||
if (!date) {
|
||||
return '';
|
||||
}
|
||||
return dayjs(date).format('DD/MM/YYYY');
|
||||
}
|
||||
|
||||
export function formatStandardDate(date: any) {
|
||||
if (!date) {
|
||||
return '';
|
||||
}
|
||||
const parsedDate = dayjs(date);
|
||||
if (!parsedDate.isValid()) {
|
||||
return '';
|
||||
}
|
||||
return parsedDate.format('YYYY-MM-DD');
|
||||
}
|
||||
|
||||
export function formatSecondsToHumanReadable(seconds: number): string {
|
||||
if (isNaN(seconds) || seconds < 0) {
|
||||
return '0s';
|
||||
}
|
||||
|
||||
const h = Math.floor(seconds / 3600);
|
||||
const m = Math.floor((seconds % 3600) / 60);
|
||||
// const s = toFixed(seconds % 60, 3);
|
||||
const s = seconds % 60;
|
||||
const formattedSeconds = s === 0 ? '0' : s.toFixed(3).replace(/\.?0+$/, '');
|
||||
const parts = [];
|
||||
if (h > 0) parts.push(`${h}h `);
|
||||
if (m > 0) parts.push(`${m}m `);
|
||||
if (s || parts.length === 0) parts.push(`${formattedSeconds}s`);
|
||||
|
||||
return parts.join('');
|
||||
}
|
||||
63
ragflow_web/src/utils/document-util.ts
Normal file
63
ragflow_web/src/utils/document-util.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { Images, SupportedPreviewDocumentTypes } from '@/constants/common';
|
||||
import { IReferenceChunk } from '@/interfaces/database/chat';
|
||||
import { IChunk } from '@/interfaces/database/knowledge';
|
||||
import { UploadFile } from 'antd';
|
||||
import { get } from 'lodash';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
export const buildChunkHighlights = (
|
||||
selectedChunk: IChunk | IReferenceChunk,
|
||||
size: { width: number; height: number },
|
||||
) => {
|
||||
return Array.isArray(selectedChunk?.positions) &&
|
||||
selectedChunk.positions.every((x) => Array.isArray(x))
|
||||
? selectedChunk?.positions?.map((x) => {
|
||||
const boundingRect = {
|
||||
width: size.width,
|
||||
height: size.height,
|
||||
x1: x[1],
|
||||
x2: x[2],
|
||||
y1: x[3],
|
||||
y2: x[4],
|
||||
};
|
||||
return {
|
||||
id: uuid(),
|
||||
comment: {
|
||||
text: '',
|
||||
emoji: '',
|
||||
},
|
||||
content: {
|
||||
text:
|
||||
get(selectedChunk, 'content_with_weight') ||
|
||||
get(selectedChunk, 'content', ''),
|
||||
},
|
||||
position: {
|
||||
boundingRect: boundingRect,
|
||||
rects: [boundingRect],
|
||||
pageNumber: x[0],
|
||||
},
|
||||
};
|
||||
})
|
||||
: [];
|
||||
};
|
||||
|
||||
export const isFileUploadDone = (file: UploadFile) => file.status === 'done';
|
||||
|
||||
export const getExtension = (name: string) =>
|
||||
name?.slice(name.lastIndexOf('.') + 1).toLowerCase() ?? '';
|
||||
|
||||
export const isPdf = (name: string) => {
|
||||
return getExtension(name) === 'pdf';
|
||||
};
|
||||
|
||||
export const getUnSupportedFilesCount = (message: string) => {
|
||||
return message.split('\n').length;
|
||||
};
|
||||
|
||||
export const isSupportedPreviewDocumentType = (fileExtension: string) => {
|
||||
return SupportedPreviewDocumentTypes.includes(fileExtension);
|
||||
};
|
||||
|
||||
export const isImage = (image: string) => {
|
||||
return [...Images, 'svg'].some((x) => x === image);
|
||||
};
|
||||
3
ragflow_web/src/utils/dom-util.ts
Normal file
3
ragflow_web/src/utils/dom-util.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export const scrollToBottom = (element: HTMLElement) => {
|
||||
element.scrollTo(0, element.scrollHeight);
|
||||
};
|
||||
183
ragflow_web/src/utils/file-util.ts
Normal file
183
ragflow_web/src/utils/file-util.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
import { FileMimeType } from '@/constants/common';
|
||||
import fileManagerService from '@/services/file-manager-service';
|
||||
import { UploadFile } from 'antd';
|
||||
|
||||
export const transformFile2Base64 = (val: any): Promise<any> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.readAsDataURL(val);
|
||||
reader.onload = (): void => {
|
||||
// Create image object
|
||||
const img = new Image();
|
||||
img.src = reader.result as string;
|
||||
|
||||
img.onload = () => {
|
||||
// Create canvas
|
||||
const canvas = document.createElement('canvas');
|
||||
const ctx = canvas.getContext('2d');
|
||||
|
||||
// Calculate compressed dimensions, set max width/height to 800px
|
||||
let width = img.width;
|
||||
let height = img.height;
|
||||
const maxSize = 100;
|
||||
|
||||
if (width > height && width > maxSize) {
|
||||
height = (height * maxSize) / width;
|
||||
width = maxSize;
|
||||
} else if (height > maxSize) {
|
||||
width = (width * maxSize) / height;
|
||||
height = maxSize;
|
||||
}
|
||||
|
||||
// Set canvas dimensions
|
||||
canvas.width = width;
|
||||
canvas.height = height;
|
||||
|
||||
// Draw image
|
||||
ctx?.drawImage(img, 0, 0, width, height);
|
||||
|
||||
// Convert to base64, maintain original format and transparency
|
||||
const compressedBase64 = canvas.toDataURL('image/png');
|
||||
resolve(compressedBase64);
|
||||
};
|
||||
|
||||
img.onerror = reject;
|
||||
};
|
||||
reader.onerror = reject;
|
||||
});
|
||||
};
|
||||
|
||||
export const transformBase64ToFile = (
|
||||
dataUrl: string,
|
||||
filename: string = 'file',
|
||||
) => {
|
||||
let arr = dataUrl.split(','),
|
||||
bstr = atob(arr[1]),
|
||||
n = bstr.length,
|
||||
u8arr = new Uint8Array(n);
|
||||
|
||||
const mime = arr[0].match(/:(.*?);/);
|
||||
const mimeType = mime ? mime[1] : 'image/png';
|
||||
|
||||
while (n--) {
|
||||
u8arr[n] = bstr.charCodeAt(n);
|
||||
}
|
||||
return new File([u8arr], filename, { type: mimeType });
|
||||
};
|
||||
|
||||
export const normFile = (e: any) => {
|
||||
if (Array.isArray(e)) {
|
||||
return e;
|
||||
}
|
||||
return e?.fileList;
|
||||
};
|
||||
|
||||
export const getUploadFileListFromBase64 = (avatar: string) => {
|
||||
let fileList: UploadFile[] = [];
|
||||
|
||||
if (avatar) {
|
||||
fileList = [{ uid: '1', name: 'file', thumbUrl: avatar, status: 'done' }];
|
||||
}
|
||||
|
||||
return fileList;
|
||||
};
|
||||
|
||||
export const getBase64FromUploadFileList = async (fileList?: UploadFile[]) => {
|
||||
if (Array.isArray(fileList) && fileList.length > 0) {
|
||||
const file = fileList[0];
|
||||
const originFileObj = file.originFileObj;
|
||||
if (originFileObj) {
|
||||
const base64 = await transformFile2Base64(originFileObj);
|
||||
return base64;
|
||||
} else {
|
||||
return file.thumbUrl;
|
||||
}
|
||||
// return fileList[0].thumbUrl; TODO: Even JPG files will be converted to base64 parameters in png format
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
|
||||
async function fetchDocumentBlob(id: string, mimeType?: FileMimeType) {
|
||||
const response = await fileManagerService.getDocumentFile({}, id);
|
||||
const blob = new Blob([response.data], {
|
||||
type: mimeType || response.data.type,
|
||||
});
|
||||
|
||||
return blob;
|
||||
}
|
||||
|
||||
export async function previewHtmlFile(id: string) {
|
||||
const blob = await fetchDocumentBlob(id, FileMimeType.Html);
|
||||
const url = URL.createObjectURL(blob);
|
||||
const link = document.createElement('a');
|
||||
link.href = url;
|
||||
link.click();
|
||||
URL.revokeObjectURL(url);
|
||||
}
|
||||
|
||||
export const downloadFileFromBlob = (blob: Blob, name?: string) => {
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
if (name) {
|
||||
a.download = name;
|
||||
}
|
||||
a.click();
|
||||
window.URL.revokeObjectURL(url);
|
||||
};
|
||||
|
||||
export const downloadDocument = async ({
|
||||
id,
|
||||
filename,
|
||||
}: {
|
||||
id: string;
|
||||
filename?: string;
|
||||
}) => {
|
||||
const blob = await fetchDocumentBlob(id);
|
||||
downloadFileFromBlob(blob, filename);
|
||||
};
|
||||
|
||||
const Units = ['bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'];
|
||||
|
||||
export const formatBytes = (x: string | number) => {
|
||||
let l = 0,
|
||||
n = (typeof x === 'string' ? parseInt(x, 10) : x) || 0;
|
||||
|
||||
while (n >= 1024 && ++l) {
|
||||
n = n / 1024;
|
||||
}
|
||||
|
||||
return n.toFixed(n < 10 && l > 0 ? 1 : 0) + ' ' + Units[l];
|
||||
};
|
||||
|
||||
export const downloadJsonFile = async (
|
||||
data: Record<string, any>,
|
||||
fileName: string,
|
||||
) => {
|
||||
const blob = new Blob([JSON.stringify(data)], { type: FileMimeType.Json });
|
||||
downloadFileFromBlob(blob, fileName);
|
||||
};
|
||||
|
||||
export function transformBase64ToFileWithPreview(
|
||||
dataUrl: string,
|
||||
filename: string = 'file',
|
||||
) {
|
||||
const file = transformBase64ToFile(dataUrl, filename);
|
||||
|
||||
(file as any).preview = dataUrl;
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
export const getBase64FromFileList = async (fileList?: File[]) => {
|
||||
if (Array.isArray(fileList) && fileList.length > 0) {
|
||||
const file = fileList[0];
|
||||
if (file) {
|
||||
const base64 = await transformFile2Base64(file);
|
||||
return base64;
|
||||
}
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
64
ragflow_web/src/utils/form.ts
Normal file
64
ragflow_web/src/utils/form.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import { variableEnabledFieldMap } from '@/constants/chat';
|
||||
import { TFunction } from 'i18next';
|
||||
import omit from 'lodash/omit';
|
||||
|
||||
// chat model setting and generate operator
|
||||
export const excludeUnEnabledVariables = (
|
||||
values: any = {},
|
||||
prefix = 'llm_setting.',
|
||||
) => {
|
||||
const unEnabledFields: Array<keyof typeof variableEnabledFieldMap> =
|
||||
Object.keys(variableEnabledFieldMap).filter((key) => !values[key]) as Array<
|
||||
keyof typeof variableEnabledFieldMap
|
||||
>;
|
||||
|
||||
return unEnabledFields.map(
|
||||
(key) => `${prefix}${variableEnabledFieldMap[key]}`,
|
||||
);
|
||||
};
|
||||
|
||||
// chat model setting and generate operator
|
||||
export const removeUselessFieldsFromValues = (values: any, prefix?: string) => {
|
||||
const nextValues: any = omit(values, [
|
||||
...Object.keys(variableEnabledFieldMap),
|
||||
'parameter',
|
||||
...excludeUnEnabledVariables(values, prefix),
|
||||
]);
|
||||
|
||||
return nextValues;
|
||||
};
|
||||
|
||||
export function buildOptions(
|
||||
data: Record<string, any>,
|
||||
t?: TFunction<['translation', ...string[]], undefined>,
|
||||
prefix?: string,
|
||||
) {
|
||||
if (t) {
|
||||
return Object.values(data).map((val) => ({
|
||||
label: t(
|
||||
`${prefix ? prefix + '.' : ''}${typeof val === 'string' ? val.toLowerCase() : val}`,
|
||||
),
|
||||
value: val,
|
||||
}));
|
||||
}
|
||||
return Object.values(data).map((val) => ({ label: val, value: val }));
|
||||
}
|
||||
|
||||
export function setLLMSettingEnabledValues(
|
||||
initialLlmSetting?: Record<string, any>,
|
||||
) {
|
||||
const values = Object.keys(variableEnabledFieldMap).reduce<
|
||||
Record<string, boolean>
|
||||
>((pre, field) => {
|
||||
pre[field] =
|
||||
initialLlmSetting === undefined
|
||||
? false
|
||||
: !!initialLlmSetting[
|
||||
variableEnabledFieldMap[
|
||||
field as keyof typeof variableEnabledFieldMap
|
||||
]
|
||||
];
|
||||
return pre;
|
||||
}, {});
|
||||
return values;
|
||||
}
|
||||
30
ragflow_web/src/utils/index.ts
Normal file
30
ragflow_web/src/utils/index.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
/**
|
||||
* @param {String} url
|
||||
* @param {Boolean} isNoCaseSensitive 是否区分大小写
|
||||
* @return {Object}
|
||||
*/
|
||||
// import numeral from 'numeral';
|
||||
|
||||
import { Base64 } from 'js-base64';
|
||||
import JSEncrypt from 'jsencrypt';
|
||||
|
||||
export const getWidth = () => {
|
||||
return { width: window.innerWidth };
|
||||
};
|
||||
export const rsaPsw = (password: string) => {
|
||||
const pub =
|
||||
'-----BEGIN PUBLIC KEY-----MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArq9XTUSeYr2+N1h3Afl/z8Dse/2yD0ZGrKwx+EEEcdsBLca9Ynmx3nIB5obmLlSfmskLpBo0UACBmB5rEjBp2Q2f3AG3Hjd4B+gNCG6BDaawuDlgANIhGnaTLrIqWrrcm4EMzJOnAOI1fgzJRsOOUEfaS318Eq9OVO3apEyCCt0lOQK6PuksduOjVxtltDav+guVAA068NrPYmRNabVKRNLJpL8w4D44sfth5RvZ3q9t+6RTArpEtc5sh5ChzvqPOzKGMXW83C95TxmXqpbK6olN4RevSfVjEAgCydH6HN6OhtOQEcnrU97r9H0iZOWwbw3pVrZiUkuRD1R56Wzs2wIDAQAB-----END PUBLIC KEY-----';
|
||||
const encryptor = new JSEncrypt();
|
||||
|
||||
encryptor.setPublicKey(pub);
|
||||
|
||||
return encryptor.encrypt(Base64.encode(password));
|
||||
};
|
||||
|
||||
export default {
|
||||
getWidth,
|
||||
rsaPsw,
|
||||
};
|
||||
|
||||
export const getFileExtension = (filename: string) =>
|
||||
filename.slice(filename.lastIndexOf('.') + 1).toLowerCase();
|
||||
29
ragflow_web/src/utils/list-filter-util.ts
Normal file
29
ragflow_web/src/utils/list-filter-util.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
export type FilterType = {
|
||||
id: string;
|
||||
label: string;
|
||||
count: number;
|
||||
};
|
||||
|
||||
export function groupListByType<T extends Record<string, any>>(
|
||||
list: T[],
|
||||
idField: string,
|
||||
labelField: string,
|
||||
) {
|
||||
const fileTypeList: FilterType[] = [];
|
||||
list.forEach((x) => {
|
||||
const item = fileTypeList.find((y) => y.id === x[idField]);
|
||||
if (!item) {
|
||||
fileTypeList.push({ id: x[idField], label: x[labelField], count: 1 });
|
||||
} else {
|
||||
item.count += 1;
|
||||
}
|
||||
});
|
||||
|
||||
return fileTypeList;
|
||||
}
|
||||
|
||||
export function buildOwnersFilter<T extends Record<string, any>>(list: T[]) {
|
||||
const owners = groupListByType(list, 'tenant_id', 'nickname');
|
||||
|
||||
return { field: 'owner', list: owners, label: 'Owner' };
|
||||
}
|
||||
24
ragflow_web/src/utils/llm-util.ts
Normal file
24
ragflow_web/src/utils/llm-util.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { IThirdOAIModel } from '@/interfaces/database/llm';
|
||||
|
||||
export const getLLMIconName = (fid: string, llm_name: string) => {
|
||||
if (fid === 'FastEmbed') {
|
||||
return llm_name.split('/').at(0) ?? '';
|
||||
}
|
||||
|
||||
return fid;
|
||||
};
|
||||
|
||||
export const getLlmNameAndFIdByLlmId = (llmId?: string) => {
|
||||
const [llmName, fId] = llmId?.split('@') || [];
|
||||
|
||||
return { fId, llmName };
|
||||
};
|
||||
|
||||
// The names of the large models returned by the interface are similar to "deepseek-r1___OpenAI-API"
|
||||
export function getRealModelName(llmName: string) {
|
||||
return llmName.split('__').at(0) ?? '';
|
||||
}
|
||||
|
||||
export function buildLlmUuid(llm: IThirdOAIModel) {
|
||||
return `${llm.llm_name}@${llm.fid}`;
|
||||
}
|
||||
146
ragflow_web/src/utils/next-request.ts
Normal file
146
ragflow_web/src/utils/next-request.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import message from '@/components/ui/message';
|
||||
import { Authorization } from '@/constants/authorization';
|
||||
import i18n from '@/locales/config';
|
||||
import authorizationUtil, {
|
||||
getAuthorization,
|
||||
redirectToLogin,
|
||||
} from '@/utils/authorization-util';
|
||||
import { notification } from 'antd';
|
||||
import axios from 'axios';
|
||||
import { convertTheKeysOfTheObjectToSnake } from './common-util';
|
||||
|
||||
const FAILED_TO_FETCH = 'Failed to fetch';
|
||||
|
||||
export const RetcodeMessage = {
|
||||
200: i18n.t('message.200'),
|
||||
201: i18n.t('message.201'),
|
||||
202: i18n.t('message.202'),
|
||||
204: i18n.t('message.204'),
|
||||
400: i18n.t('message.400'),
|
||||
401: i18n.t('message.401'),
|
||||
403: i18n.t('message.403'),
|
||||
404: i18n.t('message.404'),
|
||||
406: i18n.t('message.406'),
|
||||
410: i18n.t('message.410'),
|
||||
413: i18n.t('message.413'),
|
||||
422: i18n.t('message.422'),
|
||||
500: i18n.t('message.500'),
|
||||
502: i18n.t('message.502'),
|
||||
503: i18n.t('message.503'),
|
||||
504: i18n.t('message.504'),
|
||||
};
|
||||
export type ResultCode =
|
||||
| 200
|
||||
| 201
|
||||
| 202
|
||||
| 204
|
||||
| 400
|
||||
| 401
|
||||
| 403
|
||||
| 404
|
||||
| 406
|
||||
| 410
|
||||
| 413
|
||||
| 422
|
||||
| 500
|
||||
| 502
|
||||
| 503
|
||||
| 504;
|
||||
|
||||
const errorHandler = (error: {
|
||||
response: Response;
|
||||
message: string;
|
||||
}): Response => {
|
||||
const { response } = error;
|
||||
if (error.message === FAILED_TO_FETCH) {
|
||||
notification.error({
|
||||
description: i18n.t('message.networkAnomalyDescription'),
|
||||
message: i18n.t('message.networkAnomaly'),
|
||||
});
|
||||
} else {
|
||||
if (response && response.status) {
|
||||
const errorText =
|
||||
RetcodeMessage[response.status as ResultCode] || response.statusText;
|
||||
const { status, url } = response;
|
||||
notification.error({
|
||||
message: `${i18n.t('message.requestError')} ${status}: ${url}`,
|
||||
description: errorText,
|
||||
});
|
||||
}
|
||||
}
|
||||
return response ?? { data: { code: 1999 } };
|
||||
};
|
||||
|
||||
const request = axios.create({
|
||||
// errorHandler,
|
||||
timeout: 300000,
|
||||
// getResponse: true,
|
||||
});
|
||||
|
||||
request.interceptors.request.use(
|
||||
(config) => {
|
||||
const data = convertTheKeysOfTheObjectToSnake(config.data);
|
||||
const params = convertTheKeysOfTheObjectToSnake(config.params);
|
||||
|
||||
const newConfig = { ...config, data, params };
|
||||
|
||||
if (!newConfig.skipToken) {
|
||||
newConfig.headers.set(Authorization, getAuthorization());
|
||||
}
|
||||
|
||||
return newConfig;
|
||||
},
|
||||
function (error) {
|
||||
return Promise.reject(error);
|
||||
},
|
||||
);
|
||||
|
||||
request.interceptors.response.use(
|
||||
async (response) => {
|
||||
if (response?.status === 413 || response?.status === 504) {
|
||||
message.error(RetcodeMessage[response?.status as ResultCode]);
|
||||
}
|
||||
|
||||
if (response.config.responseType === 'blob') {
|
||||
return response;
|
||||
}
|
||||
const data = response?.data;
|
||||
if (data?.code === 100) {
|
||||
message.error(data?.message);
|
||||
} else if (data?.code === 401) {
|
||||
notification.error({
|
||||
message: data?.message,
|
||||
description: data?.message,
|
||||
duration: 3,
|
||||
});
|
||||
authorizationUtil.removeAll();
|
||||
redirectToLogin();
|
||||
} else if (data?.code !== 0) {
|
||||
notification.error({
|
||||
message: `${i18n.t('message.hint')} : ${data?.code}`,
|
||||
description: data?.message,
|
||||
duration: 3,
|
||||
});
|
||||
}
|
||||
return response;
|
||||
},
|
||||
function (error) {
|
||||
console.log('🚀 ~ error:', error);
|
||||
errorHandler(error);
|
||||
return Promise.reject(error);
|
||||
},
|
||||
);
|
||||
|
||||
export default request;
|
||||
|
||||
export const get = (url: string) => {
|
||||
return request.get(url);
|
||||
};
|
||||
|
||||
export const post = (url: string, body: any) => {
|
||||
return request.post(url, { data: body });
|
||||
};
|
||||
|
||||
export const drop = () => {};
|
||||
|
||||
export const put = () => {};
|
||||
81
ragflow_web/src/utils/register-server.ts
Normal file
81
ragflow_web/src/utils/register-server.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
/* eslint-disable guard-for-in */
|
||||
import { AxiosRequestConfig, AxiosResponse } from 'axios';
|
||||
import { isObject } from 'lodash';
|
||||
import omit from 'lodash/omit';
|
||||
import { RequestMethod } from 'umi-request';
|
||||
import request from './next-request';
|
||||
|
||||
type Service<T extends string> = Record<
|
||||
T,
|
||||
(params?: any, urlAppendix?: string) => any
|
||||
>;
|
||||
|
||||
const Methods = ['post', 'delete', 'put'];
|
||||
|
||||
const registerServer = <T extends string>(
|
||||
opt: Record<T, { url: string; method: string }>,
|
||||
request: RequestMethod,
|
||||
) => {
|
||||
const server: Service<T> = {} as Service<T>;
|
||||
for (let key in opt) {
|
||||
server[key] = (params?: any, urlAppendix?: string) => {
|
||||
let url = opt[key].url;
|
||||
const requestOptions = opt[key];
|
||||
if (urlAppendix) {
|
||||
url = url + '/' + urlAppendix;
|
||||
}
|
||||
if (Methods.some((x) => x === opt[key].method.toLowerCase())) {
|
||||
return request(url, {
|
||||
method: opt[key].method,
|
||||
data: params,
|
||||
});
|
||||
}
|
||||
|
||||
if (opt[key].method === 'get' || opt[key].method === 'GET') {
|
||||
return request.get(url, {
|
||||
...omit(requestOptions, ['method', 'url']),
|
||||
params,
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
return server;
|
||||
};
|
||||
|
||||
export default registerServer;
|
||||
|
||||
export function registerNextServer<T extends string>(
|
||||
requestRecord: Record<
|
||||
T,
|
||||
{ url: string | ((...args: Array<any>) => string); method: string }
|
||||
>,
|
||||
) {
|
||||
type Server = Record<
|
||||
T,
|
||||
(
|
||||
config?:
|
||||
| AxiosRequestConfig<any>
|
||||
| Record<string, any>
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| undefined,
|
||||
useAxiosNativeConfig?: boolean,
|
||||
) => Promise<AxiosResponse<any, any>>
|
||||
>;
|
||||
const server: Server = {} as Server;
|
||||
|
||||
for (const name in requestRecord) {
|
||||
if (Object.prototype.hasOwnProperty.call(requestRecord, name)) {
|
||||
const { url, method } = requestRecord[name];
|
||||
server[name] = (config, useAxiosNativeConfig = false) => {
|
||||
const nextConfig = useAxiosNativeConfig ? config : { data: config };
|
||||
const finalConfig = isObject(nextConfig) ? nextConfig : {};
|
||||
const nextUrl = typeof url === 'function' ? url(config) : url;
|
||||
return request({ url: nextUrl, method, ...finalConfig });
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return server;
|
||||
}
|
||||
143
ragflow_web/src/utils/request.ts
Normal file
143
ragflow_web/src/utils/request.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import { Authorization } from '@/constants/authorization';
|
||||
import { ResponseType } from '@/interfaces/database/base';
|
||||
import i18n from '@/locales/config';
|
||||
import authorizationUtil, {
|
||||
getAuthorization,
|
||||
redirectToLogin,
|
||||
} from '@/utils/authorization-util';
|
||||
import { message, notification } from 'antd';
|
||||
import { RequestMethod, extend } from 'umi-request';
|
||||
import { convertTheKeysOfTheObjectToSnake } from './common-util';
|
||||
|
||||
const FAILED_TO_FETCH = 'Failed to fetch';
|
||||
|
||||
export const RetcodeMessage = {
|
||||
200: i18n.t('message.200'),
|
||||
201: i18n.t('message.201'),
|
||||
202: i18n.t('message.202'),
|
||||
204: i18n.t('message.204'),
|
||||
400: i18n.t('message.400'),
|
||||
401: i18n.t('message.401'),
|
||||
403: i18n.t('message.403'),
|
||||
404: i18n.t('message.404'),
|
||||
406: i18n.t('message.406'),
|
||||
410: i18n.t('message.410'),
|
||||
413: i18n.t('message.413'),
|
||||
422: i18n.t('message.422'),
|
||||
500: i18n.t('message.500'),
|
||||
502: i18n.t('message.502'),
|
||||
503: i18n.t('message.503'),
|
||||
504: i18n.t('message.504'),
|
||||
};
|
||||
export type ResultCode =
|
||||
| 200
|
||||
| 201
|
||||
| 202
|
||||
| 204
|
||||
| 400
|
||||
| 401
|
||||
| 403
|
||||
| 404
|
||||
| 406
|
||||
| 410
|
||||
| 413
|
||||
| 422
|
||||
| 500
|
||||
| 502
|
||||
| 503
|
||||
| 504;
|
||||
|
||||
const errorHandler = (error: {
|
||||
response: Response;
|
||||
message: string;
|
||||
}): Response => {
|
||||
const { response } = error;
|
||||
if (error.message === FAILED_TO_FETCH) {
|
||||
notification.error({
|
||||
description: i18n.t('message.networkAnomalyDescription'),
|
||||
message: i18n.t('message.networkAnomaly'),
|
||||
});
|
||||
} else {
|
||||
if (response && response.status) {
|
||||
const errorText =
|
||||
RetcodeMessage[response.status as ResultCode] || response.statusText;
|
||||
const { status, url } = response;
|
||||
notification.error({
|
||||
message: `${i18n.t('message.requestError')} ${status}: ${url}`,
|
||||
description: errorText,
|
||||
});
|
||||
}
|
||||
}
|
||||
return response ?? { data: { code: 1999 } };
|
||||
};
|
||||
|
||||
const request: RequestMethod = extend({
|
||||
errorHandler,
|
||||
timeout: 300000,
|
||||
getResponse: true,
|
||||
});
|
||||
|
||||
request.interceptors.request.use((url: string, options: any) => {
|
||||
const data = convertTheKeysOfTheObjectToSnake(options.data);
|
||||
const params = convertTheKeysOfTheObjectToSnake(options.params);
|
||||
|
||||
return {
|
||||
url,
|
||||
options: {
|
||||
...options,
|
||||
data,
|
||||
params,
|
||||
headers: {
|
||||
...(options.skipToken
|
||||
? undefined
|
||||
: { [Authorization]: getAuthorization() }),
|
||||
...options.headers,
|
||||
},
|
||||
interceptors: true,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
request.interceptors.response.use(async (response: Response, options) => {
|
||||
if (response?.status === 413 || response?.status === 504) {
|
||||
message.error(RetcodeMessage[response?.status as ResultCode]);
|
||||
}
|
||||
|
||||
if (options.responseType === 'blob') {
|
||||
return response;
|
||||
}
|
||||
|
||||
const data: ResponseType = await response?.clone()?.json();
|
||||
if (data?.code === 100) {
|
||||
message.error(data?.message);
|
||||
} else if (data?.code === 401) {
|
||||
notification.error({
|
||||
message: data?.message,
|
||||
description: data?.message,
|
||||
duration: 3,
|
||||
});
|
||||
authorizationUtil.removeAll();
|
||||
redirectToLogin();
|
||||
} else if (data?.code !== 0) {
|
||||
notification.error({
|
||||
message: `${i18n.t('message.hint')} : ${data?.code}`,
|
||||
description: data?.message,
|
||||
duration: 3,
|
||||
});
|
||||
}
|
||||
return response;
|
||||
});
|
||||
|
||||
export default request;
|
||||
|
||||
export const get = (url: string) => {
|
||||
return request.get(url);
|
||||
};
|
||||
|
||||
export const post = (url: string, body: any) => {
|
||||
return request.post(url, { data: body });
|
||||
};
|
||||
|
||||
export const drop = () => {};
|
||||
|
||||
export const put = () => {};
|
||||
14
ragflow_web/src/utils/store-util.ts
Normal file
14
ragflow_web/src/utils/store-util.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
export const getOneNamespaceEffectsLoading = (
|
||||
namespace: string,
|
||||
effects: Record<string, boolean>,
|
||||
effectNames: Array<string>,
|
||||
) => {
|
||||
return effectNames.some(
|
||||
(effectName) => effects[`${namespace}/${effectName}`],
|
||||
);
|
||||
};
|
||||
|
||||
export const delay = (ms: number) =>
|
||||
new Promise((resolve) => {
|
||||
setTimeout(resolve, ms);
|
||||
});
|
||||
Reference in New Issue
Block a user