fix a tps error

This commit is contained in:
josc146 2024-05-16 13:48:06 +08:00
parent e1c12202aa
commit b24a18cd3a
2 changed files with 8 additions and 2 deletions

View File

@ -191,7 +191,10 @@ async def eval_rwkv(
# torch_gc() # torch_gc()
requests_num = requests_num - 1 requests_num = requests_num - 1
completion_end_time = time.time() completion_end_time = time.time()
tps = completion_tokens / (completion_end_time - completion_start_time) completion_interval = completion_end_time - completion_start_time
tps = 0
if completion_interval > 0:
tps = completion_tokens / completion_interval
print(f"Generation TPS: {tps:.2f}") print(f"Generation TPS: {tps:.2f}")
if await request.is_disconnected(): if await request.is_disconnected():

View File

@ -257,7 +257,10 @@ class AbstractRWKV(ABC):
self.fix_tokens(self.pipeline.encode(delta_prompt)) self.fix_tokens(self.pipeline.encode(delta_prompt))
) )
prompt_end_time = time.time() prompt_end_time = time.time()
tps = prompt_token_len / (prompt_end_time - prompt_start_time) prompt_interval = prompt_end_time - prompt_start_time
tps = 0
if prompt_interval > 0:
tps = prompt_token_len / prompt_interval
print(f"Prompt Prefill TPS: {tps:.2f}", end=" ", flush=True) print(f"Prompt Prefill TPS: {tps:.2f}", end=" ", flush=True)
try: try:
state_cache.add_state( state_cache.add_state(