improve api docs

This commit is contained in:
josc146 2023-06-15 21:52:22 +08:00
parent 3f77762fda
commit 21c3009945
4 changed files with 55 additions and 3 deletions

View File

@ -24,6 +24,21 @@ class ChatCompletionBody(ModelConfigBody):
stream: bool = False
stop: str = None
class Config:
schema_extra = {
"example": {
"messages": [{"role": "user", "content": "hello"}],
"model": "rwkv",
"stream": False,
"stop": None,
"max_tokens": 1000,
"temperature": 1.2,
"top_p": 0.5,
"presence_penalty": 0.4,
"frequency_penalty": 0.4,
}
}
completion_lock = Lock()
@ -242,6 +257,21 @@ class CompletionBody(ModelConfigBody):
stream: bool = False
stop: str = None
class Config:
schema_extra = {
"example": {
"prompt": "The following is an epic science fiction masterpiece that is immortalized, with delicate descriptions and grand depictions of interstellar civilization wars.\nChapter 1.\n",
"model": "rwkv",
"stream": False,
"stop": None,
"max_tokens": 100,
"temperature": 1.2,
"top_p": 0.5,
"presence_penalty": 0.4,
"frequency_penalty": 0.4,
}
}
@router.post("/v1/completions")
@router.post("/completions")

View File

@ -29,6 +29,15 @@ class SwitchModelBody(BaseModel):
strategy: str
customCuda: bool = False
class Config:
schema_extra = {
"example": {
"model": "models/RWKV-4-World-3B-v1-OnlyForTest_80%_trained-20230612-ctx4096.pth",
"strategy": "cuda fp16",
"customCuda": False,
}
}
@router.post("/switch-model")
def switch_model(body: SwitchModelBody, response: Response, request: Request):
@ -59,7 +68,9 @@ def switch_model(body: SwitchModelBody, response: Response, request: Request):
print(e)
quick_log(request, body, f"Exception: {e}")
global_var.set(global_var.Model_Status, global_var.ModelStatus.Offline)
raise HTTPException(Status.HTTP_500_INTERNAL_SERVER_ERROR, "failed to load")
raise HTTPException(
Status.HTTP_500_INTERNAL_SERVER_ERROR, f"failed to load: {e}"
)
if global_var.get(global_var.Model_Config) is None:
global_var.set(

View File

@ -201,6 +201,17 @@ class ModelConfigBody(BaseModel):
presence_penalty: float = Field(default=None, ge=-2, le=2)
frequency_penalty: float = Field(default=None, ge=-2, le=2)
class Config:
schema_extra = {
"example": {
"max_tokens": 1000,
"temperature": 1.2,
"top_p": 0.5,
"presence_penalty": 0.4,
"frequency_penalty": 0.4,
}
}
def set_rwkv_config(model: RWKV, body: ModelConfigBody):
if body.max_tokens is not None:

View File

@ -11,8 +11,8 @@ start python ./RWKV-Runner/backend-python/main.py
powershell -Command "(Test-Path ./RWKV-Runner/models) -or (mkdir RWKV-Runner/models)"
powershell -Command "Import-Module BitsTransfer"
powershell -Command "(Test-Path ./RWKV-Runner/models/RWKV-4-World-1.5B-v1-20230607-ctx4096.pth) -or (Start-BitsTransfer https://huggingface.co/BlinkDL/rwkv-4-world/resolve/main/RWKV-4-World-1.5B-v1-20230607-ctx4096.pth ./RWKV-Runner/models/RWKV-4-World-1.5B-v1-20230607-ctx4096.pth)"
powershell -Command "Invoke-WebRequest http://127.0.0.1:8000/switch-model -Method POST -ContentType 'application/json' -Body '{\"model\":\"./RWKV-Runner/models/RWKV-4-World-1.5B-v1-20230607-ctx4096.pth\",\"strategy\":\"cuda fp32 *20+\"}'"
powershell -Command "(Test-Path ./RWKV-Runner/models/RWKV-4-World-1.5B-v1-fixed-20230612-ctx4096.pth) -or (Start-BitsTransfer https://huggingface.co/BlinkDL/rwkv-4-world/resolve/main/RWKV-4-World-1.5B-v1-fixed-20230612-ctx4096.pth ./RWKV-Runner/models/RWKV-4-World-1.5B-v1-fixed-20230612-ctx4096.pth)"
powershell -Command "Invoke-WebRequest http://127.0.0.1:8000/switch-model -Method POST -ContentType 'application/json' -Body '{\"model\":\"./RWKV-Runner/models/RWKV-4-World-1.5B-v1-fixed-20230612-ctx4096.pth\",\"strategy\":\"cuda fp32 *20+\"}'"
git clone https://github.com/Yidadaa/ChatGPT-Next-Web --depth=1
cd ChatGPT-Next-Web