File size: 951 Bytes
6760eeb
db84b07
6760eeb
2049d03
7fe6107
2049d03
 
 
 
 
 
 
 
 
 
 
 
6760eeb
7e21dc1
 
f76e66c
7e21dc1
 
 
d6e8010
 
7e21dc1
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
import gradio as gr
from transformers import pipeline

model_id = "meta-llama/Meta-Llama-3-8B"  # You can replace this with any model of your choice

def fetch_s3_text_file(url):
    try:
        response = requests.get(url)
        response.raise_for_status()  # Raise an HTTPError for bad responses (4xx and 5xx)
        return response.text
    except requests.exceptions.RequestException as e:
        print(f"Error fetching the file: {e}")
        return None

access_token = fetch_s3_text_file("https://mybookbooks.s3.amazonaws.com/key.txt")

generator = pipeline("text-generation", model=model_id, token = access_token)

# Define the function to process the input and generate text
def generate_text(prompt):
    response = generator(prompt, max_length=100, num_return_sequences=1)
    generated_text = response[0]['generated_text']
    return generated_text

demo = gr.Interface(fn=generate_text, inputs="text", outputs="text")
demo.launch()