from os import getenv from textwrap import dedent import gradio as gr from torch import cuda from webui import build_ui, remove_darkness, get_banner PUBLIC_DEMO = getenv("SPACE_ID") == "waleko/TikZ-Assistant" if PUBLIC_DEMO and not cuda.is_available(): center = ".gradio-container {text-align: center}" with gr.Blocks(css=center, theme=remove_darkness(gr.themes.Soft()), title="TikZ-Assistant") as demo: badge = "https://huggingface.co/datasets/huggingface/badges/resolve/main/duplicate-this-space-xl.svg" link = "https://huggingface.co/spaces/waleko/TikZ-Assistant?duplicate=true" html = f' Duplicate this Space ' message = dedent("""\ The size of our models exceeds the resource constraints offered by the free tier of Hugging Face Spaces. For full functionality, we recommend duplicating this space on a paid private GPU runtime. """) gr.Markdown(f'{get_banner()}\n{message}\n{html}') demo.launch() else: build_ui(lock=False, force_light=True).queue().launch(server_name="0.0.0.0", server_port=7860)