Vincent Claes
first minimal working version using gradio
611aebd
raw
history blame
790 Bytes
import os
import gradio as gr
import weaviate
collection_name = "Chunk"
def predict(input_text):
client = weaviate.Client(
url=os.environ["WEAVIATE_URL"],
auth_client_secret=weaviate.AuthApiKey(api_key=os.environ["WEAVIATE_API_KEY"]),
additional_headers={
"X-OpenAI-Api-Key": os.environ["OPENAI_API_KEY"]
}
)
return (
client.query
.get(class_name=collection_name, properties=["text"])
.with_near_text({"concepts": input_text})
.with_limit(1)
.with_generate(single_prompt="{text}")
.do()
)
iface = gr.Interface(
fn=predict, # the function to wrap
inputs="text", # the input type
outputs="text", # the output type
)
if __name__ == "__main__":
iface.launch()