remote text

This commit is contained in:
josc146 2023-05-19 22:18:38 +08:00
parent 21c200d767
commit 5a0514c72d
7 changed files with 80 additions and 13 deletions

View File

@ -5,6 +5,21 @@ import remarkGfm from 'remark-gfm';
import remarkBreaks from 'remark-breaks'; import remarkBreaks from 'remark-breaks';
import {FC} from 'react'; import {FC} from 'react';
import {ReactMarkdownOptions} from 'react-markdown/lib/react-markdown'; import {ReactMarkdownOptions} from 'react-markdown/lib/react-markdown';
import {BrowserOpenURL} from '../../wailsjs/runtime';
const Hyperlink: FC<any> = ({href, children}) => {
return (
<span
style={{color: '#8ab4f8', cursor: 'pointer'}}
onClick={() => {
BrowserOpenURL(href);
}}
>
{/*@ts-ignore*/}
{children}
</span>
);
};
export const MarkdownRender: FC<ReactMarkdownOptions> = (props) => { export const MarkdownRender: FC<ReactMarkdownOptions> = (props) => {
return ( return (
@ -21,6 +36,9 @@ export const MarkdownRender: FC<ReactMarkdownOptions> = (props) => {
} }
] ]
]} ]}
components={{
a: Hyperlink
}}
> >
{props.children} {props.children}
</ReactMarkdown> </ReactMarkdown>

View File

@ -1,13 +1,21 @@
import React, {FC} from 'react'; import React, {FC} from 'react';
import {Text} from '@fluentui/react-components';
import {useTranslation} from 'react-i18next'; import {useTranslation} from 'react-i18next';
import {Page} from '../components/Page';
import MarkdownRender from '../components/MarkdownRender';
import {observer} from 'mobx-react-lite';
import commonStore from '../stores/commonStore';
export const About: FC = () => { export const About: FC = observer(() => {
const {t} = useTranslation(); const {t} = useTranslation();
const lang: string = commonStore.settings.language;
return ( return (
<div className="flex flex-col box-border gap-5 p-2"> <Page title={t('About')} content={
<Text size={600}>{t('In Development')}</Text> <div className="overflow-y-auto overflow-x-hidden p-1">
<MarkdownRender>
{lang in commonStore.about ? commonStore.about[lang] : commonStore.about['en']}
</MarkdownRender>
</div> </div>
}/>
); );
}; });

View File

@ -14,6 +14,8 @@ import manifest from '../../../manifest.json';
import {BrowserOpenURL} from '../../wailsjs/runtime'; import {BrowserOpenURL} from '../../wailsjs/runtime';
import {useTranslation} from 'react-i18next'; import {useTranslation} from 'react-i18next';
import {ConfigSelector} from '../components/ConfigSelector'; import {ConfigSelector} from '../components/ConfigSelector';
import MarkdownRender from '../components/MarkdownRender';
import commonStore from '../stores/commonStore';
type NavCard = { type NavCard = {
label: string; label: string;
@ -52,6 +54,7 @@ const navCards: NavCard[] = [
export const Home: FC = observer(() => { export const Home: FC = observer(() => {
const {t} = useTranslation(); const {t} = useTranslation();
const navigate = useNavigate(); const navigate = useNavigate();
const lang: string = commonStore.settings.language;
const onClickNavCard = (path: string) => { const onClickNavCard = (path: string) => {
navigate({pathname: path}); navigate({pathname: path});
@ -62,9 +65,10 @@ export const Home: FC = observer(() => {
<img className="rounded-xl select-none hidden sm:block" src={banner}/> <img className="rounded-xl select-none hidden sm:block" src={banner}/>
<div className="flex flex-col gap-2"> <div className="flex flex-col gap-2">
<Text size={600} weight="medium">{t('Introduction')}</Text> <Text size={600} weight="medium">{t('Introduction')}</Text>
<div className="h-40 overflow-y-auto p-1"> <div className="h-40 overflow-y-auto overflow-x-hidden p-1">
{t('RWKV is an RNN with Transformer-level LLM performance, which can also be directly trained like a GPT transformer (parallelizable). And it\'s 100% attention-free. You only need the hidden state at position t to compute the state at position t+1. You can use the "GPT" mode to quickly compute the hidden state for the "RNN" mode. <br/> So it\'s combining the best of RNN and transformer - great performance, fast inference, saves VRAM, fast training, "infinite" ctx_len, and free sentence embedding (using the final hidden state).')} <MarkdownRender>
{/*TODO Markdown*/} {lang in commonStore.introduction ? commonStore.introduction[lang] : commonStore.introduction['en']}
</MarkdownRender>
</div> </div>
</div> </div>
<div className="grid grid-cols-2 sm:grid-cols-4 gap-5"> <div className="grid grid-cols-2 sm:grid-cols-4 gap-5">

View File

@ -46,9 +46,14 @@ const columns: TableColumnDefinition<ModelSourceItem>[] = [
createTableColumn<ModelSourceItem>({ createTableColumn<ModelSourceItem>({
columnId: 'desc', columnId: 'desc',
compare: (a, b) => { compare: (a, b) => {
if (a.desc && b.desc) const lang: string = commonStore.settings.language;
if (a.desc && b.desc) {
if (lang in a.desc && lang in b.desc)
return a.desc[lang].localeCompare(b.desc[lang]);
else if ('en' in a.desc && 'en' in b.desc)
return a.desc['en'].localeCompare(b.desc['en']); return a.desc['en'].localeCompare(b.desc['en']);
else }
return 0; return 0;
}, },
renderHeaderCell: () => { renderHeaderCell: () => {

View File

@ -5,14 +5,27 @@ import {getStatus} from './apis';
export async function startup() { export async function startup() {
downloadProgramFiles(); downloadProgramFiles();
initRemoteText();
initCache(); initCache();
await initConfig(); await initConfig();
getStatus(500).then(status => { getStatus(500).then(status => {
if (status) if (status)
commonStore.setModelStatus(status); commonStore.setModelStatus(status);
}); });
} }
async function initRemoteText() {
await fetch('https://cdn.jsdelivr.net/gh/josstorer/RWKV-Runner/manifest.json', {cache: 'no-cache'})
.then(r => r.json()).then((data) => {
if (data.introduction)
commonStore.setIntroduction(data.introduction);
if (data.about)
commonStore.setAbout(data.about);
});
}
async function initConfig() { async function initConfig() {
await ReadJson('config.json').then((configData: LocalConfig) => { await ReadJson('config.json').then((configData: LocalConfig) => {
if (configData.modelSourceManifestList) if (configData.modelSourceManifestList)

View File

@ -1,6 +1,7 @@
import {makeAutoObservable} from 'mobx'; import {makeAutoObservable} from 'mobx';
import {getUserLanguage, isSystemLightMode, saveConfigs, Settings} from '../utils'; import {getUserLanguage, isSystemLightMode, saveConfigs, Settings} from '../utils';
import {WindowSetDarkTheme, WindowSetLightTheme} from '../../wailsjs/runtime'; import {WindowSetDarkTheme, WindowSetLightTheme} from '../../wailsjs/runtime';
import manifest from '../../../manifest.json';
export enum ModelStatus { export enum ModelStatus {
Offline, Offline,
@ -88,6 +89,8 @@ class CommonStore {
darkMode: !isSystemLightMode(), darkMode: !isSystemLightMode(),
autoUpdatesCheck: true autoUpdatesCheck: true
}; };
introduction: { [lang: string]: string } = manifest.introduction;
about: { [lang: string]: string } = manifest.about;
getCurrentModelConfig = () => { getCurrentModelConfig = () => {
return this.modelConfigs[this.currentModelConfigIndex]; return this.modelConfigs[this.currentModelConfigIndex];
@ -157,6 +160,14 @@ class CommonStore {
if (saveConfig) if (saveConfig)
saveConfigs(); saveConfigs();
}; };
setIntroduction = (value: { [lang: string]: string }) => {
this.introduction = value;
};
setAbout = (value: { [lang: string]: string }) => {
this.about = value;
};
} }
export default new CommonStore(); export default new CommonStore();

View File

@ -1,5 +1,13 @@
{ {
"version": "1.0.0", "version": "1.0.0",
"introduction": {
"en": "RWKV is an RNN with Transformer-level LLM performance, which can also be directly trained like a GPT transformer (parallelizable). And it's 100% attention-free. You only need the hidden state at position t to compute the state at position t+1. You can use the \"GPT\" mode to quickly compute the hidden state for the \"RNN\" mode.<br/>So it's combining the best of RNN and transformer - great performance, fast inference, saves VRAM, fast training, \"infinite\" ctx_len, and free sentence embedding (using the final hidden state).",
"zh": "RWKV是具有Transformer级别LLM性能的RNN也可以像GPT Transformer一样直接进行训练可并行化。而且它是100% attention-free的。你只需在位置t处获得隐藏状态即可计算位置t + 1处的状态。你可以使用“GPT”模式快速计算用于“RNN”模式的隐藏状态。<br/>因此它将RNN和Transformer的优点结合起来 - 高性能、快速推理、节省显存、快速训练、“无限”上下文长度以及免费的语句嵌入(使用最终隐藏状态)。"
},
"about": {
"en": "<div align=\"center\">\n\nProject Source Code:\nhttps://github.com/josStorer/RWKV-Runner\nAuthor: [@josStorer](https://github.com/josStorer)\n\nRelated Repositories:\nRWKV-4-Raven: https://huggingface.co/BlinkDL/rwkv-4-raven/tree/main\nChatRWKV: https://github.com/BlinkDL/ChatRWKV\nRWKV-LM: https://github.com/BlinkDL/RWKV-LM\n\n</div>",
"zh": "<div align=\"center\">\n\n本项目源码:\nhttps://github.com/josStorer/RWKV-Runner\n作者: [@josStorer](https://github.com/josStorer)\n\n相关仓库:\nRWKV-4-Raven: https://huggingface.co/BlinkDL/rwkv-4-raven/tree/main\nChatRWKV: https://github.com/BlinkDL/ChatRWKV\nRWKV-LM: https://github.com/BlinkDL/RWKV-LM\n\n</div>"
},
"localModelDir": "models", "localModelDir": "models",
"programFiles": [ "programFiles": [
{ {