log Generation Prompt

This commit is contained in:
josc146 2023-06-12 13:41:51 +08:00
parent bbd1ac1484
commit 8431b5d24f
3 changed files with 4 additions and 2 deletions

View File

@ -87,7 +87,7 @@ def longest_prefix_state(body: LongestPrefixStateBody, request: Request):
v = dtrie[id] v = dtrie[id]
device = v["device"] device = v["device"]
prompt = trie[id] prompt = trie[id]
quick_log(request, body, "Hit: " + prompt) quick_log(request, body, "Hit:\n" + prompt)
return { return {
"prompt": prompt, "prompt": prompt,
"tokens": v["tokens"], "tokens": v["tokens"],

View File

@ -22,7 +22,7 @@ def quick_log(request: Request, body: Any, response: str):
if body if body
else "" else ""
) )
+ (f"Response:\n{response}\n" if response else "") + (f"Data:\n{response}\n" if response else "")
) )

View File

@ -2,6 +2,7 @@ import os
import pathlib import pathlib
import copy import copy
from typing import Dict, List from typing import Dict, List
from utils.log import quick_log
from fastapi import HTTPException from fastapi import HTTPException
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from rwkv_pip.utils import PIPELINE from rwkv_pip.utils import PIPELINE
@ -101,6 +102,7 @@ The following is a coherent verbose detailed conversation between a girl named {
return out return out
def generate(self, prompt: str, stop: str = None): def generate(self, prompt: str, stop: str = None):
quick_log(None, None, "Generation Prompt:\n" + prompt)
cache = None cache = None
delta_prompt = prompt delta_prompt = prompt
try: try: