Upload folder using huggingface_hub
Browse files
app.py
CHANGED
@@ -2,7 +2,8 @@ import gradio as gr
|
|
2 |
from transformers import pipeline
|
3 |
|
4 |
# Load the llama2 LLM model
|
5 |
-
model = pipeline("text-generation", model="llamalanguage/llama2", tokenizer="llamalanguage/llama2")
|
|
|
6 |
|
7 |
# Define the chat function that uses the LLM model
|
8 |
def chat_interface(input_text):
|
|
|
2 |
from transformers import pipeline
|
3 |
|
4 |
# Load the llama2 LLM model
|
5 |
+
# model = pipeline("text-generation", model="llamalanguage/llama2", tokenizer="llamalanguage/llama2")
|
6 |
+
model = pipeline("text-generation", model="meta-llama/Llama-2-7b-chat-hf", tokenizer="meta-llama/Llama-2-7b-chat-hf")
|
7 |
|
8 |
# Define the chat function that uses the LLM model
|
9 |
def chat_interface(input_text):
|