Spaces:
Runtime error
Runtime error
File size: 1,485 Bytes
65068f7 a875418 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
import os
import streamlit as st
from langchain import HuggingFaceHub, LLMChain
from git import Repo
# Run pip freeze and pip install -r requirements.txt
os.system("pip freeze > requirements.txt")
os.system("pip install -r requirements.txt")
st.set_page_config(layout="wide", initial_sidebar_state="auto", theme="dark")
st.title("Hugging Face Space Demo")
repository_url = st.text_input("Enter GitHub repository URL:", "")
access_token = st.text_input("Enter GitHub access token (optional):", "")
debug_logging = st.checkbox("Enable debug logging")
if st.button("Run"):
if debug_logging:
import logging
logging.basicConfig(filename='log.txt', level=logging.DEBUG, format='%(asctime)s %(message)s')
logging.debug('Starting the process')
# Clone the repository
local_path = "/tmp/repository"
Repo.clone_from(repository_url, local_path, branch="main", env={"GIT_TERMINAL_PROMPT": "0", "GIT_SSL_NO_VERIFY": "true"})
# Initialize Hugging Face model
os.environ['HUGGINGFACEHUB_API_TOKEN'] = access_token
hub_llm = HuggingFaceHub(repo_id='google/flan-t5-xl', model_kwargs={'temperature': 1e-10})
# Create a prompt template and LLM chain
prompt = "What is the main purpose of the repository at {}?".format(repository_url)
llm_chain = LLMChain(prompt=prompt, llm=hub_llm)
# Get the result
answer = llm_chain.run()
st.write("Answer:", answer)
if debug_logging:
logging.debug('Finished the process') |