Spaces:
Sleeping
Sleeping
from fastapi import FastAPI | |
from pydantic import BaseModel | |
import requests | |
from ctransformers import AutoModelForCausalLM | |
llms = { | |
"tinnyllama":{"name": "TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF", "file":"tinyllama-1.1b-1t-openorca.Q4_K_M.gguf", "suffix":"<|im_end|><|im_start|>assistant", "prefix":"<|im_start|>system You are a helpful assistant <|im_end|><|im_start|>user"}, | |
"orca2":{"name": "TheBloke/Orca-2-7B-GGUF", "file":"orca-2-7b.Q4_K_M.gguf", "suffix":"<|im_end|><|im_start|>assistant", "prefix":"<|im_start|>system You are a helpful assistant<|im_end|><|im_start|>user "}, | |
"zephyr":{"name": "TheBloke/zephyr-7B-beta-GGUF", "file":"zephyr-7b-beta.Q4_K_M.gguf", "suffix":"</s><|assistant|>", "prefix":"<|system|>You are a helpful assistant</s><|user|> "}, | |
"mixtral":{"name": "TheBloke/Mistral-7B-Instruct-v0.1-GGUF", "file":"mistral-7b-instruct-v0.1.Q4_K_M.gguf", "suffix":"[/INST]", "prefix":"<s>[INST] "}, | |
"llama2":{"name": "TheBloke/Llama-2-7B-Chat-GGUF", "file":"llama-2-7b-chat.Q4_K_M.gguf", "suffix":"[/INST]", "prefix":"[INST] <<SYS>> You are a helpful assistant <</SYS>>"}, | |
"llama2":{"name": "TheBloke/SOLAR-10.7B-Instruct-v1.0-GGUF", "file":"solar-10.7b-instruct-v1.0.Q4_K_M.gguf", "suffix":"\n### Assistant:\n", "prefix":"### User:\n"} | |
} | |
for k in llms.keys(): | |
AutoModelForCausalLM.from_pretrained(llms[k]['name'], model_file=llms[k]['file']) | |
#Pydantic object | |
class validation(BaseModel): | |
prompt: str | |
llm: str | |
#Fast API | |
app = FastAPI() | |
async def stream(item: validation): | |
prefix=llms[item.llm]['prefix'] | |
suffix=llms[item.llm]['suffix'] | |
user=""" | |
{prompt}""" | |
llm = AutoModelForCausalLM.from_pretrained(llms[item.llm]['name'], model_file=llms[item.llm]['file']) | |
prompt = f"{prefix}{user.replace('{prompt}', item.prompt)}{suffix}" | |
return llm(prompt) |