2023-06-12 12:09:23 +08:00
|
|
|
from typing import Any, Dict, List
|
2023-06-09 20:46:19 +08:00
|
|
|
from utils.log import quick_log
|
|
|
|
from fastapi import APIRouter, HTTPException, Request, Response, status
|
2023-05-28 23:52:38 +08:00
|
|
|
from pydantic import BaseModel
|
|
|
|
import gc
|
|
|
|
import copy
|
2023-06-12 15:22:17 +08:00
|
|
|
import sys
|
2023-06-02 21:33:57 +08:00
|
|
|
import torch
|
2023-05-28 23:52:38 +08:00
|
|
|
|
|
|
|
router = APIRouter()
|
|
|
|
|
|
|
|
trie = None
|
|
|
|
dtrie: Dict = {}
|
2023-06-12 15:22:17 +08:00
|
|
|
max_trie_len = 3000
|
|
|
|
loop_start_id = 1 # to prevent preloaded prompts from being deleted
|
|
|
|
loop_del_trie_id = loop_start_id
|
2023-05-28 23:52:38 +08:00
|
|
|
|
|
|
|
|
|
|
|
def init():
|
|
|
|
global trie
|
|
|
|
try:
|
|
|
|
import cyac
|
2023-06-12 12:32:50 +08:00
|
|
|
|
|
|
|
# import mmap
|
|
|
|
# import os
|
|
|
|
#
|
|
|
|
# if os.path.exists("state_cache.trie"):
|
|
|
|
# with open("state_cache.trie", "r") as bf:
|
|
|
|
# buff_object = mmap.mmap(bf.fileno(), 0, access=mmap.ACCESS_READ)
|
|
|
|
# trie = cyac.Trie.from_buff(buff_object, copy=False)
|
|
|
|
# else:
|
|
|
|
trie = cyac.Trie()
|
2023-05-28 23:52:38 +08:00
|
|
|
except ModuleNotFoundError:
|
|
|
|
print("cyac not found")
|
|
|
|
|
|
|
|
|
|
|
|
class AddStateBody(BaseModel):
|
|
|
|
prompt: str
|
2023-06-12 12:09:23 +08:00
|
|
|
tokens: List[str]
|
2023-05-28 23:52:38 +08:00
|
|
|
state: Any
|
|
|
|
logits: Any
|
|
|
|
|
|
|
|
|
|
|
|
@router.post("/add-state")
|
|
|
|
def add_state(body: AddStateBody):
|
2023-06-12 15:22:17 +08:00
|
|
|
global trie, dtrie, loop_del_trie_id
|
2023-05-28 23:52:38 +08:00
|
|
|
if trie is None:
|
|
|
|
raise HTTPException(status.HTTP_400_BAD_REQUEST, "trie not loaded")
|
|
|
|
|
2023-06-15 22:37:00 +08:00
|
|
|
try:
|
2023-06-19 22:32:02 +08:00
|
|
|
id: int = trie.insert(body.prompt)
|
|
|
|
device: torch.device = body.state[0].device
|
2023-06-15 22:37:00 +08:00
|
|
|
dtrie[id] = {
|
|
|
|
"tokens": copy.deepcopy(body.tokens),
|
|
|
|
"state": [tensor.cpu() for tensor in body.state]
|
|
|
|
if device != torch.device("cpu")
|
|
|
|
else copy.deepcopy(body.state),
|
|
|
|
"logits": copy.deepcopy(body.logits),
|
|
|
|
"device": device,
|
|
|
|
}
|
2023-06-12 15:22:17 +08:00
|
|
|
|
2023-06-15 22:37:00 +08:00
|
|
|
if len(trie) >= max_trie_len:
|
|
|
|
del_prompt = trie[loop_del_trie_id]
|
|
|
|
trie.remove(del_prompt)
|
|
|
|
dtrie[loop_del_trie_id] = None
|
|
|
|
loop_del_trie_id = loop_del_trie_id + 1
|
|
|
|
if loop_del_trie_id >= max_trie_len:
|
|
|
|
loop_del_trie_id = loop_start_id
|
|
|
|
|
|
|
|
quick_log(
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
f"New Trie Id: {id}\nTrie Len: {len(trie)}\nTrie Buff Size: {trie.buff_size()}\nDtrie Buff Size Of Id: {_get_a_dtrie_buff_size(dtrie[id])}",
|
|
|
|
)
|
|
|
|
return "success"
|
|
|
|
except Exception as e:
|
|
|
|
raise HTTPException(
|
|
|
|
status.HTTP_400_BAD_REQUEST, f"insert failed, bad prompt.\n{e}"
|
|
|
|
)
|
2023-05-28 23:52:38 +08:00
|
|
|
|
|
|
|
|
|
|
|
@router.post("/reset-state")
|
|
|
|
def reset_state():
|
2023-06-12 12:32:50 +08:00
|
|
|
global trie, dtrie
|
2023-05-28 23:52:38 +08:00
|
|
|
if trie is None:
|
|
|
|
raise HTTPException(status.HTTP_400_BAD_REQUEST, "trie not loaded")
|
|
|
|
|
|
|
|
trie = cyac.Trie()
|
2023-06-12 12:32:50 +08:00
|
|
|
dtrie = {}
|
2023-05-28 23:52:38 +08:00
|
|
|
gc.collect()
|
|
|
|
|
|
|
|
return "success"
|
|
|
|
|
|
|
|
|
|
|
|
class LongestPrefixStateBody(BaseModel):
|
|
|
|
prompt: str
|
|
|
|
|
|
|
|
|
2023-06-12 15:22:17 +08:00
|
|
|
def _get_a_dtrie_buff_size(dtrie_v):
|
|
|
|
# print(sys.getsizeof(dtrie_v["tokens"][0])) # str
|
|
|
|
# print(sys.getsizeof(dtrie_v["tokens"][0]) * len(dtrie_v["tokens"]))
|
|
|
|
# print(dtrie_v["state"][0][0].element_size())
|
|
|
|
# print(dtrie_v["state"][0].nelement())
|
|
|
|
# print(len(dtrie_v["state"]))
|
|
|
|
# print(
|
|
|
|
# len(dtrie_v["state"])
|
|
|
|
# * dtrie_v["state"][0].nelement()
|
|
|
|
# * dtrie_v["state"][0][0].element_size()
|
|
|
|
# )
|
|
|
|
# print(dtrie_v["logits"][0].element_size())
|
|
|
|
# print(dtrie_v["logits"].nelement())
|
|
|
|
# print(dtrie_v["logits"][0].element_size() * dtrie_v["logits"].nelement())
|
2023-06-19 22:32:02 +08:00
|
|
|
return 54 * len(dtrie_v["tokens"]) + 491520 + 262144 + 28 # TODO
|
2023-06-12 15:22:17 +08:00
|
|
|
|
|
|
|
|
2023-05-28 23:52:38 +08:00
|
|
|
@router.post("/longest-prefix-state")
|
2023-06-09 20:46:19 +08:00
|
|
|
def longest_prefix_state(body: LongestPrefixStateBody, request: Request):
|
2023-05-28 23:52:38 +08:00
|
|
|
global trie
|
|
|
|
if trie is None:
|
|
|
|
raise HTTPException(status.HTTP_400_BAD_REQUEST, "trie not loaded")
|
|
|
|
|
|
|
|
id = -1
|
2023-06-15 22:37:00 +08:00
|
|
|
try:
|
|
|
|
for id, len in trie.prefix(body.prompt):
|
|
|
|
pass
|
|
|
|
except:
|
2023-05-28 23:52:38 +08:00
|
|
|
pass
|
|
|
|
if id != -1:
|
|
|
|
v = dtrie[id]
|
2023-06-19 22:32:02 +08:00
|
|
|
device: torch.device = v["device"]
|
|
|
|
prompt: str = trie[id]
|
|
|
|
|
2023-06-12 13:41:51 +08:00
|
|
|
quick_log(request, body, "Hit:\n" + prompt)
|
2023-05-28 23:52:38 +08:00
|
|
|
return {
|
2023-06-09 20:46:19 +08:00
|
|
|
"prompt": prompt,
|
2023-05-28 23:52:38 +08:00
|
|
|
"tokens": v["tokens"],
|
2023-06-02 21:33:57 +08:00
|
|
|
"state": [tensor.to(device) for tensor in v["state"]]
|
|
|
|
if device != torch.device("cpu")
|
|
|
|
else v["state"],
|
2023-05-28 23:52:38 +08:00
|
|
|
"logits": v["logits"],
|
2023-06-19 22:32:02 +08:00
|
|
|
"device": device.type,
|
2023-05-28 23:52:38 +08:00
|
|
|
}
|
|
|
|
else:
|
2023-06-12 12:32:50 +08:00
|
|
|
return {
|
|
|
|
"prompt": "",
|
|
|
|
"tokens": [],
|
|
|
|
"state": None,
|
|
|
|
"logits": None,
|
|
|
|
"device": None,
|
|
|
|
}
|
2023-05-28 23:52:38 +08:00
|
|
|
|
|
|
|
|
|
|
|
@router.post("/save-state")
|
|
|
|
def save_state():
|
|
|
|
global trie
|
|
|
|
if trie is None:
|
|
|
|
raise HTTPException(status.HTTP_400_BAD_REQUEST, "trie not loaded")
|
|
|
|
|
2023-06-12 12:32:50 +08:00
|
|
|
# trie.save("state_cache.trie")
|
2023-05-28 23:52:38 +08:00
|
|
|
|
2023-06-12 12:32:50 +08:00
|
|
|
return "not implemented"
|