2024-10-09 18:04:52 -07:00
|
|
|
import app.commons.globals as glb
|
|
|
|
|
import app.commons.utilities as utils
|
|
|
|
|
import app.loader as loader
|
|
|
|
|
|
|
|
|
|
from app.function_calling.model_handler import ArchFunctionHandler
|
|
|
|
|
from app.prompt_guard.model_handler import ArchGuardHanlder
|
|
|
|
|
|
2024-10-10 17:44:41 -07:00
|
|
|
logger = utils.get_model_server_logger()
|
2024-10-09 18:04:52 -07:00
|
|
|
|
|
|
|
|
arch_function_hanlder = ArchFunctionHandler()
|
2024-11-07 11:59:29 -08:00
|
|
|
PREFILL_LIST = ["May", "Could", "Sure", "Definitely", "Certainly", "Of course", "Can"]
|
|
|
|
|
PREFILL_ENABLED = True
|
|
|
|
|
TOOL_CALL_TOKEN = "<tool_call>"
|
2024-10-09 18:04:52 -07:00
|
|
|
arch_function_endpoint = "https://api.fc.archgw.com/v1"
|
|
|
|
|
arch_function_client = utils.get_client(arch_function_endpoint)
|
|
|
|
|
arch_function_generation_params = {
|
|
|
|
|
"temperature": 0.2,
|
|
|
|
|
"top_p": 1.0,
|
|
|
|
|
"top_k": 50,
|
|
|
|
|
"max_tokens": 512,
|
|
|
|
|
"stop_token_ids": [151645],
|
2024-11-27 15:17:02 -08:00
|
|
|
# "top_logprobs": 10,
|
2024-10-09 18:04:52 -07:00
|
|
|
}
|
|
|
|
|
|
2024-10-16 16:58:10 -07:00
|
|
|
arch_guard_model_type = {
|
|
|
|
|
"cpu": "katanemo/Arch-Guard-cpu",
|
|
|
|
|
"cuda": "katanemo/Arch-Guard",
|
|
|
|
|
"mps": "katanemo/Arch-Guard",
|
|
|
|
|
}
|
2024-10-09 18:04:52 -07:00
|
|
|
|
|
|
|
|
# Model definition
|
|
|
|
|
embedding_model = loader.get_embedding_model()
|
|
|
|
|
zero_shot_model = loader.get_zero_shot_model()
|
|
|
|
|
|
2024-10-16 16:58:10 -07:00
|
|
|
prompt_guard_dict = loader.get_prompt_guard(arch_guard_model_type[glb.DEVICE])
|
2024-10-09 18:04:52 -07:00
|
|
|
|
|
|
|
|
arch_guard_handler = ArchGuardHanlder(model_dict=prompt_guard_dict)
|
2024-11-27 15:17:02 -08:00
|
|
|
# Patterns for function name and parameter parsing
|