improve error messages
This commit is contained in:
parent
5c9b4a4c05
commit
447f4572b1
@ -157,5 +157,10 @@
|
||||
"Localhost": "本地",
|
||||
"Retry": "重试",
|
||||
"Delete": "删除",
|
||||
"Edit": "编辑"
|
||||
"Edit": "编辑",
|
||||
"Memory is not enough, try to increase the virtual memory or use a smaller model.": "内存不足,尝试增加虚拟内存,或使用一个更小规模的模型",
|
||||
"Bad pytorch version, please reinstall pytorch with cuda.": "错误的Pytorch版本,请重新安装CUDA版本的Pytorch",
|
||||
"The model file is corrupted, please download again.": "模型文件损坏,请重新下载",
|
||||
"Found no NVIDIA driver, please install the latest driver.": "没有找到NVIDIA驱动,请安装最新驱动",
|
||||
"VRAM is not enough, please reduce stored layers or use a lower precision in Configs page.": "显存不足,请在配置页面减少载入显存层数,或使用更低的精度"
|
||||
}
|
@ -207,11 +207,21 @@ export const RunButton: FC<{ onClickRun?: MouseEventHandler, iconMode?: boolean
|
||||
toast(t('Loading Model'), { type: 'info' });
|
||||
} else {
|
||||
commonStore.setStatus({ status: ModelStatus.Offline });
|
||||
toast(t('Failed to switch model') + ' - ' + await r.text(), { type: 'error' });
|
||||
const error = await r.text();
|
||||
const errorsMap = {
|
||||
'not enough memory': 'Memory is not enough, try to increase the virtual memory or use a smaller model.',
|
||||
'not compiled with CUDA': 'Bad pytorch version, please reinstall pytorch with cuda.',
|
||||
'invalid header or archive is corrupted': 'The model file is corrupted, please download again.',
|
||||
'no NVIDIA driver': 'Found no NVIDIA driver, please install the latest driver.',
|
||||
'CUDA out of memory': 'VRAM is not enough, please reduce stored layers or use a lower precision in Configs page.'
|
||||
};
|
||||
const matchedError = Object.entries(errorsMap).find(([key, _]) => error.includes(key));
|
||||
const message = matchedError ? t(matchedError[1]) : error;
|
||||
toast(t('Failed to switch model') + ' - ' + message, { autoClose: 5000, type: 'error' });
|
||||
}
|
||||
}).catch((e) => {
|
||||
commonStore.setStatus({ status: ModelStatus.Offline });
|
||||
toast(t('Failed to switch model') + ' - ' + e.message || e, { type: 'error' });
|
||||
toast(t('Failed to switch model') + ' - ' + (e.message || e), { type: 'error' });
|
||||
});
|
||||
}
|
||||
}).catch(() => {
|
||||
|
@ -409,7 +409,7 @@ const ChatPanel: FC = observer(() => {
|
||||
OpenFileFolder(path, false);
|
||||
});
|
||||
}).catch(e => {
|
||||
toast(t('Error') + ' - ' + e.message || e, { type: 'error', autoClose: 2500 });
|
||||
toast(t('Error') + ' - ' + (e.message || e), { type: 'error', autoClose: 2500 });
|
||||
});
|
||||
}} />
|
||||
</div>
|
||||
|
@ -270,7 +270,7 @@ export async function checkUpdate(notifyEvenLatest: boolean = false) {
|
||||
}
|
||||
);
|
||||
}).catch((e) => {
|
||||
toast(t('Update Error') + ' - ' + e.message || e, {
|
||||
toast(t('Update Error') + ' - ' + (e.message || e), {
|
||||
type: 'error',
|
||||
position: 'bottom-left',
|
||||
autoClose: false
|
||||
@ -302,7 +302,7 @@ export async function checkUpdate(notifyEvenLatest: boolean = false) {
|
||||
}
|
||||
}
|
||||
).catch((e) => {
|
||||
toast(t('Updates Check Error') + ' - ' + e.message || e, { type: 'error', position: 'bottom-left' });
|
||||
toast(t('Updates Check Error') + ' - ' + (e.message || e), { type: 'error', position: 'bottom-left' });
|
||||
});
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user