Skip to content
6 changes: 6 additions & 0 deletions packages/global/common/error/utils.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { replaceSensitiveText } from '../string/tools';
import { ERROR_RESPONSE } from './errorCode';

export const getErrText = (err: any, def = ''): any => {
const msg: string =
Expand All @@ -12,6 +13,11 @@ export const getErrText = (err: any, def = ''): any => {
err?.msg ||
err?.error ||
def;

if (ERROR_RESPONSE[msg]) {
return ERROR_RESPONSE[msg].message;
}

// msg && console.log('error =>', msg);
return replaceSensitiveText(msg);
};
2 changes: 2 additions & 0 deletions packages/web/i18n/en/dataset.json
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,8 @@
"request_headers": "Request headers, will automatically append 'Bearer '",
"retain_collection": "Adjust Training Parameters",
"retrain_task_submitted": "The retraining task has been submitted",
"retry_all": "Retry all",
"retry_failed": "Retry Failed",
"rootDirectoryFormatError": "Root directory data format is incorrect",
"rootdirectory": "/rootdirectory",
"same_api_collection": "The same API set exists",
Expand Down
2 changes: 2 additions & 0 deletions packages/web/i18n/zh-CN/dataset.json
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,8 @@
"request_headers": "请求头参数,会自动补充 Bearer",
"retain_collection": "调整训练参数",
"retrain_task_submitted": "重新训练任务已提交",
"retry_all": "全部重试",
"retry_failed": "重试失败",
"rootDirectoryFormatError": "根目录数据格式不正确",
"rootdirectory": "/根目录",
"same_api_collection": "存在相同的 API 集合",
Expand Down
2 changes: 2 additions & 0 deletions packages/web/i18n/zh-Hant/dataset.json
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,8 @@
"request_headers": "請求頭",
"retain_collection": "調整訓練參數",
"retrain_task_submitted": "重新訓練任務已提交",
"retry_all": "全部重試",
"retry_failed": "重試失敗",
"rootDirectoryFormatError": "根目錄資料格式不正確",
"rootdirectory": "/根目錄",
"same_api_collection": "存在相同的 API 集合",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,9 @@ import { useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
import MyImage from '@/components/MyImage';
import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import React from 'react';

enum TrainingStatus {
NotStart = 'NotStart',
Expand Down Expand Up @@ -391,7 +394,7 @@ const ErrorView = ({
<Td>{item.chunkIndex + 1}</Td>
<Td>{TrainingText[item.mode]}</Td>
<Td maxW={50}>
<MyTooltip label={item.errorMsg}>{item.errorMsg}</MyTooltip>
<MyTooltip label={item.errorMsg}>{t(item.errorMsg)}</MyTooltip>
</Td>
<Td>
<Flex alignItems={'center'}>
Expand Down Expand Up @@ -514,6 +517,7 @@ const TrainingStates = ({
onClose: () => void;
}) => {
const { t } = useTranslation();
const toast = useToast();
const [tab, setTab] = useState<typeof defaultTab>(defaultTab);

const {
Expand All @@ -526,6 +530,23 @@ const TrainingStates = ({
manual: false
});

// All retry logic
const { runAsync: handleRetryAll, loading: retrying } = useRequest2(
() => updateTrainingData({ datasetId, collectionId }),
{
manual: true,
onSuccess: () => {
refreshTrainingDetail();
},
onError: (e) => {
toast.toast({
status: 'error',
title: t('dataset:retry_failed')
});
}
}
);

const errorCounts = (Object.values(trainingDetail?.errorCounts || {}) as number[]).reduce(
(acc, count) => acc + count,
0
Expand All @@ -541,21 +562,25 @@ const TrainingStates = ({
isLoading={!trainingDetail && loading && tab === 'states'}
>
<ModalBody px={9} minH={['90vh', '500px']}>
<FillRowTabs
py={1}
mb={6}
value={tab}
onChange={(e) => setTab(e as 'states' | 'errors')}
list={[
{ label: t('dataset:dataset.Training Process'), value: 'states' },
{
label: t('dataset:dataset.Training_Errors', {
count: errorCounts
}),
value: 'errors'
}
]}
/>
<Flex align="center" justify="space-between" mb={4}>
<FillRowTabs
py={1}
value={tab}
onChange={(e) => setTab(e as 'states' | 'errors')}
list={[
{ label: t('dataset:dataset.Training Process'), value: 'states' },
{
label: t('dataset:dataset.Training_Errors', { count: errorCounts }),
value: 'errors'
}
]}
/>
{tab === 'errors' && errorCounts > 0 && (
<Button colorScheme="primary" size="sm" isLoading={retrying} onClick={handleRetryAll}>
{t('dataset:retry_all')}
</Button>
)}
</Flex>
{tab === 'states' && trainingDetail && <ProgressView trainingDetail={trainingDetail} />}
{tab === 'errors' && (
<ErrorView
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
export type updateTrainingDataBody = {
datasetId: string;
collectionId: string;
dataId?: string; // 改为可选,不传则重试所有错误数据
dataId?: string; // Change it to optional. If it is not passed, all error data will be retried.
q?: string;
a?: string;
chunkIndex?: number;
Expand All @@ -31,7 +31,7 @@ async function handler(
per: WritePermissionVal
});

// 如果没有传 dataId,则重试该集合下的所有错误数据
// If dataId is not passed, all error data in this collection will be retried.
if (!dataId) {
await MongoDatasetTraining.updateMany(
{
Expand All @@ -49,7 +49,7 @@ async function handler(
return {};
}

// 单个数据重试逻辑
// Single data retry logic
const data = await MongoDatasetTraining.findOne({ teamId, datasetId, _id: dataId });

if (!data) {
Expand Down
Loading