Spaces:
Sleeping
Sleeping
File size: 924 Bytes
6760eeb db84b07 dbbc1db 6760eeb 596e068 7fe6107 2049d03 30b56f5 2049d03 30b56f5 6760eeb 7e21dc1 f76e66c 7e21dc1 d6e8010 7e21dc1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 |
import gradio as gr
from transformers import pipeline
import requests
model_id = "gpt2" # You can replace this with any model of your choice
def fetch_s3_text_file(url):
try:
response = requests.get(url)
response.raise_for_status() # Raise an HTTPError for bad responses (4xx and 5xx)
return response.text
except requests.exceptions.RequestException as e:
print(f"Error fetching the file: {e}")
return None
#access_token = fetch_s3_text_file("https://mybookbooks.s3.amazonaws.com/key.txt")
generator = pipeline("text-generation", model=model_id)
# Define the function to process the input and generate text
def generate_text(prompt):
response = generator(prompt, max_length=100, num_return_sequences=1)
generated_text = response[0]['generated_text']
return generated_text
demo = gr.Interface(fn=generate_text, inputs="text", outputs="text")
demo.launch()
|