diff --git a/frontend/src/components/MarkdownRender.tsx b/frontend/src/components/MarkdownRender.tsx index 73a8b64..1c15056 100644 --- a/frontend/src/components/MarkdownRender.tsx +++ b/frontend/src/components/MarkdownRender.tsx @@ -5,6 +5,21 @@ import remarkGfm from 'remark-gfm'; import remarkBreaks from 'remark-breaks'; import {FC} from 'react'; import {ReactMarkdownOptions} from 'react-markdown/lib/react-markdown'; +import {BrowserOpenURL} from '../../wailsjs/runtime'; + +const Hyperlink: FC = ({href, children}) => { + return ( + { + BrowserOpenURL(href); + }} + > + {/*@ts-ignore*/} + {children} + + ); +}; export const MarkdownRender: FC = (props) => { return ( @@ -21,6 +36,9 @@ export const MarkdownRender: FC = (props) => { } ] ]} + components={{ + a: Hyperlink + }} > {props.children} diff --git a/frontend/src/pages/About.tsx b/frontend/src/pages/About.tsx index df5683d..4f81c5e 100644 --- a/frontend/src/pages/About.tsx +++ b/frontend/src/pages/About.tsx @@ -1,13 +1,21 @@ import React, {FC} from 'react'; -import {Text} from '@fluentui/react-components'; import {useTranslation} from 'react-i18next'; +import {Page} from '../components/Page'; +import MarkdownRender from '../components/MarkdownRender'; +import {observer} from 'mobx-react-lite'; +import commonStore from '../stores/commonStore'; -export const About: FC = () => { +export const About: FC = observer(() => { const {t} = useTranslation(); + const lang: string = commonStore.settings.language; return ( -
- {t('In Development')} -
+ + + {lang in commonStore.about ? commonStore.about[lang] : commonStore.about['en']} + + + }/> ); -}; +}); diff --git a/frontend/src/pages/Home.tsx b/frontend/src/pages/Home.tsx index 44705e3..c72a1be 100644 --- a/frontend/src/pages/Home.tsx +++ b/frontend/src/pages/Home.tsx @@ -14,6 +14,8 @@ import manifest from '../../../manifest.json'; import {BrowserOpenURL} from '../../wailsjs/runtime'; import {useTranslation} from 'react-i18next'; import {ConfigSelector} from '../components/ConfigSelector'; +import MarkdownRender from '../components/MarkdownRender'; +import commonStore from '../stores/commonStore'; type NavCard = { label: string; @@ -52,6 +54,7 @@ const navCards: NavCard[] = [ export const Home: FC = observer(() => { const {t} = useTranslation(); const navigate = useNavigate(); + const lang: string = commonStore.settings.language; const onClickNavCard = (path: string) => { navigate({pathname: path}); @@ -62,9 +65,10 @@ export const Home: FC = observer(() => {
{t('Introduction')} -
- {t('RWKV is an RNN with Transformer-level LLM performance, which can also be directly trained like a GPT transformer (parallelizable). And it\'s 100% attention-free. You only need the hidden state at position t to compute the state at position t+1. You can use the "GPT" mode to quickly compute the hidden state for the "RNN" mode.
So it\'s combining the best of RNN and transformer - great performance, fast inference, saves VRAM, fast training, "infinite" ctx_len, and free sentence embedding (using the final hidden state).')} - {/*TODO Markdown*/} +
+ + {lang in commonStore.introduction ? commonStore.introduction[lang] : commonStore.introduction['en']} +
diff --git a/frontend/src/pages/Models.tsx b/frontend/src/pages/Models.tsx index d1b6ff0..c2c011e 100644 --- a/frontend/src/pages/Models.tsx +++ b/frontend/src/pages/Models.tsx @@ -46,10 +46,15 @@ const columns: TableColumnDefinition[] = [ createTableColumn({ columnId: 'desc', compare: (a, b) => { - if (a.desc && b.desc) - return a.desc['en'].localeCompare(b.desc['en']); - else - return 0; + const lang: string = commonStore.settings.language; + + if (a.desc && b.desc) { + if (lang in a.desc && lang in b.desc) + return a.desc[lang].localeCompare(b.desc[lang]); + else if ('en' in a.desc && 'en' in b.desc) + return a.desc['en'].localeCompare(b.desc['en']); + } + return 0; }, renderHeaderCell: () => { const {t} = useTranslation(); diff --git a/frontend/src/startup.ts b/frontend/src/startup.ts index 133db29..eebc578 100644 --- a/frontend/src/startup.ts +++ b/frontend/src/startup.ts @@ -5,14 +5,27 @@ import {getStatus} from './apis'; export async function startup() { downloadProgramFiles(); + + initRemoteText(); initCache(); await initConfig(); + getStatus(500).then(status => { if (status) commonStore.setModelStatus(status); }); } +async function initRemoteText() { + await fetch('https://cdn.jsdelivr.net/gh/josstorer/RWKV-Runner/manifest.json', {cache: 'no-cache'}) + .then(r => r.json()).then((data) => { + if (data.introduction) + commonStore.setIntroduction(data.introduction); + if (data.about) + commonStore.setAbout(data.about); + }); +} + async function initConfig() { await ReadJson('config.json').then((configData: LocalConfig) => { if (configData.modelSourceManifestList) diff --git a/frontend/src/stores/commonStore.ts b/frontend/src/stores/commonStore.ts index 8c9c87c..66db475 100644 --- a/frontend/src/stores/commonStore.ts +++ b/frontend/src/stores/commonStore.ts @@ -1,6 +1,7 @@ import {makeAutoObservable} from 'mobx'; import {getUserLanguage, isSystemLightMode, saveConfigs, Settings} from '../utils'; import {WindowSetDarkTheme, WindowSetLightTheme} from '../../wailsjs/runtime'; +import manifest from '../../../manifest.json'; export enum ModelStatus { Offline, @@ -88,6 +89,8 @@ class CommonStore { darkMode: !isSystemLightMode(), autoUpdatesCheck: true }; + introduction: { [lang: string]: string } = manifest.introduction; + about: { [lang: string]: string } = manifest.about; getCurrentModelConfig = () => { return this.modelConfigs[this.currentModelConfigIndex]; @@ -157,6 +160,14 @@ class CommonStore { if (saveConfig) saveConfigs(); }; + + setIntroduction = (value: { [lang: string]: string }) => { + this.introduction = value; + }; + + setAbout = (value: { [lang: string]: string }) => { + this.about = value; + }; } export default new CommonStore(); \ No newline at end of file diff --git a/manifest.json b/manifest.json index 3cf8115..56c8e80 100644 --- a/manifest.json +++ b/manifest.json @@ -1,5 +1,13 @@ { "version": "1.0.0", + "introduction": { + "en": "RWKV is an RNN with Transformer-level LLM performance, which can also be directly trained like a GPT transformer (parallelizable). And it's 100% attention-free. You only need the hidden state at position t to compute the state at position t+1. You can use the \"GPT\" mode to quickly compute the hidden state for the \"RNN\" mode.
So it's combining the best of RNN and transformer - great performance, fast inference, saves VRAM, fast training, \"infinite\" ctx_len, and free sentence embedding (using the final hidden state).", + "zh": "RWKV是具有Transformer级别LLM性能的RNN,也可以像GPT Transformer一样直接进行训练(可并行化)。而且它是100% attention-free的。你只需在位置t处获得隐藏状态即可计算位置t + 1处的状态。你可以使用“GPT”模式快速计算用于“RNN”模式的隐藏状态。
因此,它将RNN和Transformer的优点结合起来 - 高性能、快速推理、节省显存、快速训练、“无限”上下文长度以及免费的语句嵌入(使用最终隐藏状态)。" + }, + "about": { + "en": "
\n\nProject Source Code:\nhttps://github.com/josStorer/RWKV-Runner\nAuthor: [@josStorer](https://github.com/josStorer)\n\nRelated Repositories:\nRWKV-4-Raven: https://huggingface.co/BlinkDL/rwkv-4-raven/tree/main\nChatRWKV: https://github.com/BlinkDL/ChatRWKV\nRWKV-LM: https://github.com/BlinkDL/RWKV-LM\n\n
", + "zh": "
\n\n本项目源码:\nhttps://github.com/josStorer/RWKV-Runner\n作者: [@josStorer](https://github.com/josStorer)\n\n相关仓库:\nRWKV-4-Raven: https://huggingface.co/BlinkDL/rwkv-4-raven/tree/main\nChatRWKV: https://github.com/BlinkDL/ChatRWKV\nRWKV-LM: https://github.com/BlinkDL/RWKV-LM\n\n
" + }, "localModelDir": "models", "programFiles": [ {