import streamlit as st from transformers import AutoTokenizer, AutoModelWithLMHead, GPT2Tokenizer, GPT2Model, FlaxGPT2LMHeadModel, GPT2LMHeadModel, pipeline, set_seed import torch #===========================================# # Loads Model and Pipeline # #===========================================# tokenizer = AutoTokenizer.from_pretrained("flax-community/swe-gpt-wiki") model = AutoModelWithLMHead.from_pretrained("flax-community/swe-gpt-wiki") generator = pipeline('text-generation', model=model, tokenizer=tokenizer) set_seed(42) #===========================================# # Streamlit Code # #===========================================# st.title('Vi använder wikipidea för att generera text.') desc = "En svensk GPT-modell tränad på wikipedia. Fyll i text i fältet nedanför och hur mycket text du vill ska genereras." st.write(desc) num_sentences = st.number_input('Number of Characters', min_value=1, max_value=150, value=30) user_input = st.text_input('Vi börjar med en text (can leave blank)') if st.button('Generate Text'): generated_text = generator(user_input, max_length=num_sentences, num_return_sequences=1) st.write(generated_text[0]["generated_text"])