import os import streamlit as st from langchain import HuggingFaceHub, LLMChain from git import Repo # Run pip freeze and pip install -r requirements.txt os.system("pip freeze > requirements.txt") os.system("pip install -r requirements.txt") st.set_page_config(layout="wide", initial_sidebar_state="auto", theme="dark") st.title("Hugging Face Space Demo") repository_url = st.text_input("Enter GitHub repository URL:", "") access_token = st.text_input("Enter GitHub access token (optional):", "") debug_logging = st.checkbox("Enable debug logging") if st.button("Run"): if debug_logging: import logging logging.basicConfig(filename='log.txt', level=logging.DEBUG, format='%(asctime)s %(message)s') logging.debug('Starting the process') # Clone the repository local_path = "/tmp/repository" Repo.clone_from(repository_url, local_path, branch="main", env={"GIT_TERMINAL_PROMPT": "0", "GIT_SSL_NO_VERIFY": "true"}) # Initialize Hugging Face model os.environ['HUGGINGFACEHUB_API_TOKEN'] = access_token hub_llm = HuggingFaceHub(repo_id='google/flan-t5-xl', model_kwargs={'temperature': 1e-10}) # Create a prompt template and LLM chain prompt = "What is the main purpose of the repository at {}?".format(repository_url) llm_chain = LLMChain(prompt=prompt, llm=hub_llm) # Get the result answer = llm_chain.run() st.write("Answer:", answer) if debug_logging: logging.debug('Finished the process')