|
from langchain_openai import ChatOpenAI |
|
from langchain_core.prompts import ChatPromptTemplate |
|
import streamlit as st |
|
import os |
|
import openai |
|
|
|
|
|
|
|
|
|
openai_api_k = os.getenv("openai_api_key") |
|
|
|
|
|
llm = ChatOpenAI(temperature=0, model="gpt-3.5-turbo",openai_api_key=openai_api_k) |
|
|
|
|
|
prompt = ChatPromptTemplate.from_messages([ |
|
("system", "You are a helpful assistant that translates {il} to {ol}."), |
|
("human", "{input}") |
|
]) |
|
|
|
|
|
st.title("Language Translator") |
|
|
|
|
|
input_language = st.text_input("Input Language") |
|
output_language = st.text_input("Output Language") |
|
input_text = st.text_area("Input Text:", height=100) |
|
|
|
|
|
if st.button("Translate"): |
|
if input_language and output_language and input_text: |
|
chain = prompt | llm |
|
response = chain.invoke({ |
|
"il": input_language, |
|
"ol": output_language, |
|
"input": input_text |
|
}) |
|
st.text_area("Translated Text:", value=response.content, height=100) |
|
|