Compare commits

...

6 Commits

Author SHA1 Message Date
josc146
edc6ac7297 chore 2023-06-03 20:34:33 +08:00
josc146
e89e23621c update readme 2023-06-03 20:28:21 +08:00
josc146
6b9ec4c6fa add strategy guides 2023-06-03 20:18:57 +08:00
josc146
ced0966ffc display current strategy 2023-06-03 19:38:24 +08:00
josc146
966b912013 improve logs 2023-06-03 19:28:37 +08:00
josc146
dc71054e61 improve logs 2023-06-03 17:36:50 +08:00
10 changed files with 82 additions and 24 deletions

View File

@@ -14,7 +14,7 @@ API兼容的接口这意味着一切ChatGPT客户端都是RWKV客户端。
[English](README.md) | 简体中文
[视频演示](https://www.bilibili.com/video/BV1hM4y1v76R) | [疑难解答](https://www.bilibili.com/read/cv23921171) | [预览](#Preview) | [下载][download-url]
[视频演示](https://www.bilibili.com/video/BV1hM4y1v76R) | [疑难解答](https://www.bilibili.com/read/cv23921171) | [预览](#Preview) | [下载][download-url] | [懒人包](https://pan.baidu.com/s/1wchIUHgne3gncIiLIeKBEQ?pwd=1111)
[license-image]: http://img.shields.io/badge/license-MIT-blue.svg

View File

@@ -1,7 +1,6 @@
import GPUtil
import torch
import rwkv
import langchain
import fastapi
import uvicorn
import sse_starlette

View File

@@ -115,13 +115,26 @@ The following is a coherent verbose detailed conversation between a girl named {
while completion_lock.locked():
if await request.is_disconnected():
requests_num = requests_num - 1
print(f"{request.client} Stop Waiting (Lock)")
quick_log(
request, None, "Stop Waiting. RequestsNum: " + str(requests_num)
request,
None,
"Stop Waiting (Lock). RequestsNum: " + str(requests_num),
)
return
await asyncio.sleep(0.1)
else:
completion_lock.acquire()
if await request.is_disconnected():
completion_lock.release()
requests_num = requests_num - 1
print(f"{request.client} Stop Waiting (Lock)")
quick_log(
request,
None,
"Stop Waiting (Lock). RequestsNum: " + str(requests_num),
)
return
set_rwkv_config(model, global_var.get(global_var.Model_Config))
set_rwkv_config(model, body)
if body.stream:
@@ -184,14 +197,20 @@ The following is a coherent verbose detailed conversation between a girl named {
break
# torch_gc()
requests_num = requests_num - 1
completion_lock.release()
if await request.is_disconnected():
print(f"{request.client} Stop Waiting")
quick_log(
request,
body,
response + "\nStop Waiting. RequestsNum: " + str(requests_num),
)
return
quick_log(
request,
body,
response + "\nFinished. RequestsNum: " + str(requests_num),
)
completion_lock.release()
if await request.is_disconnected():
return
yield {
"response": response,
"model": "rwkv",
@@ -213,7 +232,6 @@ The following is a coherent verbose detailed conversation between a girl named {
try:
return await eval_rwkv().__anext__()
except StopAsyncIteration:
print(f"{request.client} Stop Waiting")
return None
@@ -241,13 +259,26 @@ async def completions(body: CompletionBody, request: Request):
while completion_lock.locked():
if await request.is_disconnected():
requests_num = requests_num - 1
print(f"{request.client} Stop Waiting (Lock)")
quick_log(
request, None, "Stop Waiting. RequestsNum: " + str(requests_num)
request,
None,
"Stop Waiting (Lock). RequestsNum: " + str(requests_num),
)
return
await asyncio.sleep(0.1)
else:
completion_lock.acquire()
if await request.is_disconnected():
completion_lock.release()
requests_num = requests_num - 1
print(f"{request.client} Stop Waiting (Lock)")
quick_log(
request,
None,
"Stop Waiting (Lock). RequestsNum: " + str(requests_num),
)
return
set_rwkv_config(model, global_var.get(global_var.Model_Config))
set_rwkv_config(model, body)
if body.stream:
@@ -304,14 +335,20 @@ async def completions(body: CompletionBody, request: Request):
break
# torch_gc()
requests_num = requests_num - 1
completion_lock.release()
if await request.is_disconnected():
print(f"{request.client} Stop Waiting")
quick_log(
request,
body,
response + "\nStop Waiting. RequestsNum: " + str(requests_num),
)
return
quick_log(
request,
body,
response + "\nFinished. RequestsNum: " + str(requests_num),
)
completion_lock.release()
if await request.is_disconnected():
return
yield {
"response": response,
"model": "rwkv",
@@ -330,5 +367,4 @@ async def completions(body: CompletionBody, request: Request):
try:
return await eval_rwkv().__anext__()
except StopAsyncIteration:
print(f"{request.client} Stop Waiting")
return None

View File

@@ -137,5 +137,6 @@
"MacOS is not supported yet, please convert manually.": "暂不支持MacOS, 请手动转换",
"Microsoft Visual C++ Redistributable is not installed, would you like to download it?": "微软VC++组件未安装, 是否下载?",
"Path Cannot Contain Space": "路径不能包含空格",
"Failed to switch model, please try starting the program with administrator privileges.": "切换模型失败, 请尝试以管理员权限启动程序"
"Failed to switch model, please try starting the program with administrator privileges.": "切换模型失败, 请尝试以管理员权限启动程序",
"Current Strategy": "当前Strategy"
}

View File

Before

Width:  |  Height:  |  Size: 4.4 KiB

After

Width:  |  Height:  |  Size: 4.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 132 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 115 KiB

View File

@@ -5,17 +5,23 @@ import classnames from 'classnames';
export const Labeled: FC<{
label: string;
desc?: string | null,
descComponent?: ReactElement,
content: ReactElement,
flex?: boolean,
spaceBetween?: boolean,
breakline?: boolean
breakline?: boolean,
onMouseEnter?: () => void
onMouseLeave?: () => void
}> = ({
label,
desc,
descComponent,
content,
flex,
spaceBetween,
breakline
breakline,
onMouseEnter,
onMouseLeave
}) => {
return (
<div className={classnames(
@@ -24,11 +30,11 @@ export const Labeled: FC<{
breakline ? 'flex-col' : '',
spaceBetween && 'justify-between')
}>
{desc ?
<Tooltip content={desc} showDelay={0} hideDelay={0} relationship="description">
<Label>{label}</Label>
{(desc || descComponent) ?
<Tooltip content={descComponent ? descComponent : desc!} showDelay={0} hideDelay={0} relationship="description">
<Label onMouseEnter={onMouseEnter} onMouseLeave={onMouseLeave}>{label}</Label>
</Tooltip> :
<Label>{label}</Label>
<Label onMouseEnter={onMouseEnter} onMouseLeave={onMouseLeave}>{label}</Label>
}
{content}
</div>

View File

@@ -7,7 +7,7 @@ import { v4 as uuid } from 'uuid';
import classnames from 'classnames';
import { fetchEventSource } from '@microsoft/fetch-event-source';
import { ConversationPair, getConversationPairs, Record } from '../utils/get-conversation-pairs';
import logo from '../../../build/appicon.jpg';
import logo from '../assets/images/logo.jpg';
import MarkdownRender from '../components/MarkdownRender';
import { ToolTipButton } from '../components/ToolTipButton';
import { ArrowCircleUp28Regular, Delete28Regular, RecordStop28Regular } from '@fluentui/react-icons';

View File

@@ -12,7 +12,8 @@ import {
Label,
Option,
Select,
Switch
Switch,
Text
} from '@fluentui/react-components';
import {
AddCircle20Regular,
@@ -38,6 +39,8 @@ import { ConvertModel, FileExists } from '../../wailsjs/go/backend_golang/App';
import { getStrategy, refreshLocalModels } from '../utils';
import { useTranslation } from 'react-i18next';
import { WindowShow } from '../../wailsjs/runtime/runtime';
import strategyImg from '../assets/images/strategy.jpg';
import strategyZhImg from '../assets/images/strategy_zh.jpg';
export type ApiParameters = {
apiPort: number
@@ -632,6 +635,7 @@ export const Configs: FC = observer(() => {
const { t } = useTranslation();
const [selectedIndex, setSelectedIndex] = React.useState(commonStore.currentModelConfigIndex);
const [selectedConfig, setSelectedConfig] = React.useState(commonStore.modelConfigs[selectedIndex]);
const [displayStrategyImg, setDisplayStrategyImg] = React.useState(false);
const navigate = useNavigate();
const port = selectedConfig.apiParameters.apiPort;
@@ -895,9 +899,14 @@ export const Configs: FC = observer(() => {
</Dropdown>
} />
}
{selectedConfig.modelParameters.device == 'CUDA' && <div />}
{
selectedConfig.modelParameters.device == 'CUDA' && <Labeled label={t('Stored Layers')}
selectedConfig.modelParameters.device == 'CUDA' &&
<Labeled label={t('Current Strategy')}
content={<Text> {getStrategy(selectedConfig)} </Text>} />
}
{
selectedConfig.modelParameters.device == 'CUDA' &&
<Labeled label={t('Stored Layers')}
desc={t('Number of the neural network layers loaded into VRAM, the more you load, the faster the speed, but it consumes more VRAM.')}
content={
<ValuedSlider value={selectedConfig.modelParameters.storedLayers} min={0}
@@ -922,9 +931,16 @@ export const Configs: FC = observer(() => {
}} />
} />
}
{
displayStrategyImg &&
<img style={{ width: '80vh', height: 'auto', zIndex: 100 }} className="fixed left-0 top-0"
src={commonStore.settings.language === 'zh' ? strategyZhImg : strategyImg} />
}
{
selectedConfig.modelParameters.device == 'Custom' &&
<Labeled label="Strategy" desc="https://github.com/BlinkDL/ChatRWKV/blob/main/ChatRWKV-strategy.png"
<Labeled label="Strategy"
onMouseEnter={() => setDisplayStrategyImg(true)}
onMouseLeave={() => setDisplayStrategyImg(false)}
content={
<Input className="grow" placeholder="cuda:0 fp16 *20 -> cuda:1 fp16"
value={selectedConfig.modelParameters.customStrategy}