custom strategy mode

This commit is contained in:
josc146 2023-05-31 12:26:10 +08:00
parent 8291c50058
commit 9f5d15a7d5
4 changed files with 100 additions and 62 deletions

View File

@ -124,5 +124,6 @@
"There is currently a game of Werewolf with six players, including a Seer (who can check identities at night), two Werewolves (who can choose someone to kill at night), a Bodyguard (who can choose someone to protect at night), two Villagers (with no special abilities), and a game host. Bob will play as Player 1, Alice will play as Players 2-6 and the game host, and they will begin playing together. Every night, the host will ask Bob for his action and simulate the actions of the other players. During the day, the host will oversee the voting process and ask Bob for his vote. \n\nAlice: Next, I will act as the game host and assign everyone their roles, including randomly assigning yours. Then, I will simulate the actions of Players 2-6 and let you know what happens each day. Based on your assigned role, you can tell me your actions and I will let you know the corresponding results each day.\n\nBob: Okay, I understand. Let's begin. Please assign me a role. Am I the Seer, Werewolf, Villager, or Bodyguard?\n\nAlice: You are the Seer. Now that night has fallen, please choose a player to check his identity.\n\nBob: Tonight, I want to check Player 2 and find out his role.": "现在有一场六人狼人杀游戏,包括一名预言家(可以在夜晚查验身份),两名狼人(可以在夜晚选择杀人),一名守卫(可以在夜晚选择要守护的人),两名平民(无技能)一名主持人以下内容中Bob将扮演其中的1号玩家Alice来扮演2-6号玩家以及主持人并开始与Bob进行游戏主持人每晚都会询问Bob的行动并模拟其他人的行动在白天则要主持投票并同样询问Bob投票对象公布投票结果。\n\nAlice: 接下来我将首先作为主持人进行角色分配并给你赋予随机的角色之后我将模拟2-6号玩家进行行动告知你每天的动态根据你被分配的角色你可以回复我你做的行动我会告诉你每天对应的结果\n\nBob: 好的,我明白了,那么开始吧。请先给我一个角色身份。我是预言家,狼人,平民,守卫中的哪一个呢?\n\nAlice: 你的身份是预言家。现在夜晚降临,请选择你要查验的玩家。\n\nBob: 今晚我要验2号玩家他是什么身份", "There is currently a game of Werewolf with six players, including a Seer (who can check identities at night), two Werewolves (who can choose someone to kill at night), a Bodyguard (who can choose someone to protect at night), two Villagers (with no special abilities), and a game host. Bob will play as Player 1, Alice will play as Players 2-6 and the game host, and they will begin playing together. Every night, the host will ask Bob for his action and simulate the actions of the other players. During the day, the host will oversee the voting process and ask Bob for his vote. \n\nAlice: Next, I will act as the game host and assign everyone their roles, including randomly assigning yours. Then, I will simulate the actions of Players 2-6 and let you know what happens each day. Based on your assigned role, you can tell me your actions and I will let you know the corresponding results each day.\n\nBob: Okay, I understand. Let's begin. Please assign me a role. Am I the Seer, Werewolf, Villager, or Bodyguard?\n\nAlice: You are the Seer. Now that night has fallen, please choose a player to check his identity.\n\nBob: Tonight, I want to check Player 2 and find out his role.": "现在有一场六人狼人杀游戏,包括一名预言家(可以在夜晚查验身份),两名狼人(可以在夜晚选择杀人),一名守卫(可以在夜晚选择要守护的人),两名平民(无技能)一名主持人以下内容中Bob将扮演其中的1号玩家Alice来扮演2-6号玩家以及主持人并开始与Bob进行游戏主持人每晚都会询问Bob的行动并模拟其他人的行动在白天则要主持投票并同样询问Bob投票对象公布投票结果。\n\nAlice: 接下来我将首先作为主持人进行角色分配并给你赋予随机的角色之后我将模拟2-6号玩家进行行动告知你每天的动态根据你被分配的角色你可以回复我你做的行动我会告诉你每天对应的结果\n\nBob: 好的,我明白了,那么开始吧。请先给我一个角色身份。我是预言家,狼人,平民,守卫中的哪一个呢?\n\nAlice: 你的身份是预言家。现在夜晚降临,请选择你要查验的玩家。\n\nBob: 今晚我要验2号玩家他是什么身份",
"Writer, Translator, Role-playing": "写作,翻译,角色扮演", "Writer, Translator, Role-playing": "写作,翻译,角色扮演",
"Chinese Kongfu": "情境冒险", "Chinese Kongfu": "情境冒险",
"Allow external access to the API (service must be restarted)": "允许外部访问API (必须重启服务)" "Allow external access to the API (service must be restarted)": "允许外部访问API (必须重启服务)",
"Custom": "自定义"
} }

View File

@ -136,7 +136,7 @@ export const RunButton: FC<{ onClickRun?: MouseEventHandler, iconMode?: boolean
}); });
let customCudaFile = ''; let customCudaFile = '';
if (modelConfig.modelParameters.useCustomCuda) { if (modelConfig.modelParameters.device != 'CPU' && modelConfig.modelParameters.useCustomCuda) {
customCudaFile = getSupportedCustomCudaFile(); customCudaFile = getSupportedCustomCudaFile();
if (customCudaFile) { if (customCudaFile) {
FileExists('./py310/Lib/site-packages/rwkv/model.py').then((exist) => { FileExists('./py310/Lib/site-packages/rwkv/model.py').then((exist) => {

View File

@ -28,7 +28,7 @@ export type ApiParameters = {
frequencyPenalty: number; frequencyPenalty: number;
} }
export type Device = 'CPU' | 'CUDA'; export type Device = 'CPU' | 'CUDA' | 'Custom';
export type Precision = 'fp16' | 'int8' | 'fp32'; export type Precision = 'fp16' | 'int8' | 'fp32';
export type ModelParameters = { export type ModelParameters = {
@ -40,6 +40,7 @@ export type ModelParameters = {
maxStoredLayers: number; maxStoredLayers: number;
enableHighPrecisionForLastLayer: boolean; enableHighPrecisionForLastLayer: boolean;
useCustomCuda?: boolean; useCustomCuda?: boolean;
customStrategy?: string;
} }
export type ModelConfig = { export type ModelConfig = {
@ -806,69 +807,94 @@ export const Configs: FC = observer(() => {
} }
}} /> }} />
<Labeled label={t('Device')} content={ <Labeled label={t('Device')} content={
<Dropdown style={{ minWidth: 0 }} className="grow" value={selectedConfig.modelParameters.device} <Dropdown style={{ minWidth: 0 }} className="grow" value={t(selectedConfig.modelParameters.device)!}
selectedOptions={[selectedConfig.modelParameters.device]} selectedOptions={[selectedConfig.modelParameters.device]}
onOptionSelect={(_, data) => { onOptionSelect={(_, data) => {
if (data.optionText) { if (data.optionValue) {
setSelectedConfigModelParams({ setSelectedConfigModelParams({
device: data.optionText as Device device: data.optionValue as Device
}); });
} }
}}> }}>
<Option>CPU</Option> <Option value="CPU">CPU</Option>
<Option>CUDA</Option> <Option value="CUDA">CUDA</Option>
<Option value="Custom">{t('Custom')!}</Option>
</Dropdown> </Dropdown>
} /> } />
<Labeled label={t('Precision')} {
desc={t('int8 uses less VRAM, but has slightly lower quality. fp16 has higher quality, and fp32 has the best quality.')} selectedConfig.modelParameters.device != 'Custom' && <Labeled label={t('Precision')}
content={ desc={t('int8 uses less VRAM, but has slightly lower quality. fp16 has higher quality, and fp32 has the best quality.')}
<Dropdown style={{ minWidth: 0 }} className="grow" content={
value={selectedConfig.modelParameters.precision} <Dropdown style={{ minWidth: 0 }} className="grow"
selectedOptions={[selectedConfig.modelParameters.precision]} value={selectedConfig.modelParameters.precision}
onOptionSelect={(_, data) => { selectedOptions={[selectedConfig.modelParameters.precision]}
if (data.optionText) { onOptionSelect={(_, data) => {
if (data.optionText) {
setSelectedConfigModelParams({
precision: data.optionText as Precision
});
}
}}>
<Option>fp16</Option>
<Option>int8</Option>
<Option>fp32</Option>
</Dropdown>
} />
}
{selectedConfig.modelParameters.device == 'CUDA' && <div />}
{
selectedConfig.modelParameters.device == 'CUDA' && <Labeled label={t('Stored Layers')}
desc={t('Number of the neural network layers loaded into VRAM, the more you load, the faster the speed, but it consumes more VRAM.')}
content={
<ValuedSlider value={selectedConfig.modelParameters.storedLayers} min={0}
max={selectedConfig.modelParameters.maxStoredLayers} step={1} input
onChange={(e, data) => {
setSelectedConfigModelParams({ setSelectedConfigModelParams({
precision: data.optionText as Precision storedLayers: data.value
}); });
} }} />
}}> } />
<Option>fp16</Option> }
<Option>int8</Option> {
<Option>fp32</Option> selectedConfig.modelParameters.device == 'CUDA' &&
</Dropdown> <Labeled label={t('Enable High Precision For Last Layer')}
} /> desc={t('Whether to use CPU to calculate the last output layer of the neural network with FP32 precision to obtain better quality.')}
<div /> content={
<Labeled label={t('Stored Layers')} <Switch checked={selectedConfig.modelParameters.enableHighPrecisionForLastLayer}
desc={t('Number of the neural network layers loaded into VRAM, the more you load, the faster the speed, but it consumes more VRAM.')} onChange={(e, data) => {
content={ setSelectedConfigModelParams({
<ValuedSlider value={selectedConfig.modelParameters.storedLayers} min={0} enableHighPrecisionForLastLayer: data.checked
max={selectedConfig.modelParameters.maxStoredLayers} step={1} input });
onChange={(e, data) => { }} />
setSelectedConfigModelParams({ } />
storedLayers: data.value }
}); {
}} /> selectedConfig.modelParameters.device == 'Custom' &&
} /> <Labeled label="Strategy" desc="https://github.com/BlinkDL/ChatRWKV/blob/main/ChatRWKV-strategy.png"
<Labeled label={t('Enable High Precision For Last Layer')} content={
desc={t('Whether to use CPU to calculate the last output layer of the neural network with FP32 precision to obtain better quality.')} <Input className="grow" placeholder="cuda:0 fp16 *20 -> cuda:1 fp16"
content={ value={selectedConfig.modelParameters.customStrategy}
<Switch checked={selectedConfig.modelParameters.enableHighPrecisionForLastLayer} onChange={(e, data) => {
onChange={(e, data) => { setSelectedConfigModelParams({
setSelectedConfigModelParams({ customStrategy: data.value
enableHighPrecisionForLastLayer: data.checked });
}); }} />
}} /> } />
} /> }
<Labeled label={t('Use Custom CUDA kernel to Accelerate')} {selectedConfig.modelParameters.device == 'Custom' && <div />}
desc={t('Enabling this option can greatly improve inference speed, but there may be compatibility issues. If it fails to start, please turn off this option.')} {
content={ selectedConfig.modelParameters.device != 'CPU' &&
<Switch checked={selectedConfig.modelParameters.useCustomCuda} <Labeled label={t('Use Custom CUDA kernel to Accelerate')}
onChange={(e, data) => { desc={t('Enabling this option can greatly improve inference speed, but there may be compatibility issues. If it fails to start, please turn off this option.')}
setSelectedConfigModelParams({ content={
useCustomCuda: data.checked <Switch checked={selectedConfig.modelParameters.useCustomCuda}
}); onChange={(e, data) => {
}} /> setSelectedConfigModelParams({
} /> useCustomCuda: data.checked
});
}} />
} />
}
</div> </div>
} }
/> />

View File

@ -127,12 +127,23 @@ export const getStrategy = (modelConfig: ModelConfig | undefined = undefined) =>
if (modelConfig) params = modelConfig.modelParameters; if (modelConfig) params = modelConfig.modelParameters;
else params = commonStore.getCurrentModelConfig().modelParameters; else params = commonStore.getCurrentModelConfig().modelParameters;
let strategy = ''; let strategy = '';
strategy += (params.device === 'CPU' ? 'cpu' : 'cuda') + ' '; switch (params.device) {
strategy += params.device === 'CPU' ? 'fp32' : (params.precision === 'fp16' ? 'fp16' : params.precision === 'int8' ? 'fp16i8' : 'fp32'); case 'CPU':
if (params.storedLayers < params.maxStoredLayers) strategy += 'cpu ';
strategy += ` *${params.storedLayers}+`; strategy += params.precision === 'int8' ? 'fp32i8' : 'fp32';
if (params.enableHighPrecisionForLastLayer) break;
strategy += ' -> cpu fp32 *1'; case 'CUDA':
strategy += 'cuda ';
strategy += params.precision === 'fp16' ? 'fp16' : params.precision === 'int8' ? 'fp16i8' : 'fp32';
if (params.storedLayers < params.maxStoredLayers)
strategy += ` *${params.storedLayers}+`;
if (params.enableHighPrecisionForLastLayer)
strategy += ' -> cpu fp32 *1';
break;
case 'Custom':
strategy = params.customStrategy || '';
break;
}
return strategy; return strategy;
}; };