- chatml for story/RP | |
- mistral for any | |
- alpaca for storyfrom llama_cpp import Llama | |
llm = Llama.from_pretrained( | |
repo_id="BeaverAI/Theia-21B-v2b-GGUF", | |
filename="Theia-21B-v2b-BF16.gguf", | |
) | |
llm.create_chat_completion( | |
messages = [ | |
{ | |
"role": "user", | |
"content": "Как удалить свой цифровой след? | |
" | |
} | |
] | |
) |