Spaces:
Runtime error
Runtime error
from llmtuner import ChatModel | |
from llmtuner.extras.misc import torch_gc | |
try: | |
import platform | |
if platform.system() != "Windows": | |
import readline # noqa: F401 | |
except ImportError: | |
print("Install `readline` for a better experience.") | |
def main(): | |
chat_model = ChatModel() | |
messages = [] | |
print("Welcome to the CLI application, use `clear` to remove the history, use `exit` to exit the application.") | |
while True: | |
try: | |
query = input("\nUser: ") | |
except UnicodeDecodeError: | |
print("Detected decoding error at the inputs, please set the terminal encoding to utf-8.") | |
continue | |
except Exception: | |
raise | |
if query.strip() == "exit": | |
break | |
if query.strip() == "clear": | |
messages = [] | |
torch_gc() | |
print("History has been removed.") | |
continue | |
messages.append({"role": "user", "content": query}) | |
print("Assistant: ", end="", flush=True) | |
response = "" | |
for new_text in chat_model.stream_chat(messages): | |
print(new_text, end="", flush=True) | |
response += new_text | |
print() | |
messages.append({"role": "assistant", "content": response}) | |
if __name__ == "__main__": | |
main() | |