log Generation Prompt

This commit is contained in:
josc146 2023-06-12 13:41:51 +08:00
parent bbd1ac1484
commit 8431b5d24f
3 changed files with 4 additions and 2 deletions

View File

@ -87,7 +87,7 @@ def longest_prefix_state(body: LongestPrefixStateBody, request: Request):
v = dtrie[id]
device = v["device"]
prompt = trie[id]
quick_log(request, body, "Hit: " + prompt)
quick_log(request, body, "Hit:\n" + prompt)
return {
"prompt": prompt,
"tokens": v["tokens"],

View File

@ -22,7 +22,7 @@ def quick_log(request: Request, body: Any, response: str):
if body
else ""
)
+ (f"Response:\n{response}\n" if response else "")
+ (f"Data:\n{response}\n" if response else "")
)

View File

@ -2,6 +2,7 @@ import os
import pathlib
import copy
from typing import Dict, List
from utils.log import quick_log
from fastapi import HTTPException
from pydantic import BaseModel, Field
from rwkv_pip.utils import PIPELINE
@ -101,6 +102,7 @@ The following is a coherent verbose detailed conversation between a girl named {
return out
def generate(self, prompt: str, stop: str = None):
quick_log(None, None, "Generation Prompt:\n" + prompt)
cache = None
delta_prompt = prompt
try: