import gradio as gr import os from git_commd import GitCommandWrapper from typing import List, Optional HF_TOKEN=os.environ['HF_TOKEN'] if 'HF_TOKEN' in os.environ else '' WiseModel_TOKEN=os.environ['WM_TOKEN'] if 'WM_TOKEN' in os.environ else '' def get_cache_dir(): from random_word import RandomWords r = RandomWords() return r.get_random_word() def check_disk(): import os return os.system("df -h /") def pull_from_wisemodel(token: str, url: str, repo_name: str, cache_dir, branch: Optional[str] = None): print(cache_dir) os.makedirs(cache_dir, exist_ok=True) # os.system("cd "+cache_dir) gitCmd=GitCommandWrapper() isLfs=gitCmd.is_lfs_installed() if (not isLfs): gitCmd.git_lfs_install() gitCmd.clone(cache_dir,token,url,repo_name,branch) return f'Pulled {branch} to temp folder {cache_dir}: {url}' def remove_file(cache_dir, repo_name): import os try: os.remove(f'{cache_dir}/{repo_name}') except: return '' return 'README.md file removed' def push_to_hf(cache_dir, WiseModel_repo_name, hf_repo_id): from huggingface_hub import HfApi if not HF_TOKEN: raise gr.Error("Please enter your HF_TOKEN") api = HfApi(token=HF_TOKEN) # Token is not persisted on the machine. output = api.upload_folder( folder_path=f"{cache_dir}/{WiseModel_repo_name}", repo_id=hf_repo_id, repo_type="model", ) return f'Pushed to {hf_repo_id}' def handle(wisemodel_link, hf_repo_id): cache_dir = get_cache_dir() wiseModel_repo_url=wisemodel_link.replace(".git","").replace("git","").replace("clone","").replace(" ","") wiseModel_repo_info=wisemodel_link.replace(".git","").replace("git","").replace("clone","").replace(" ","").split("/") print(wiseModel_repo_info) wisemodel_repo_name=wiseModel_repo_info[len(wiseModel_repo_info)-1] stages = [ (check_disk, (), {}), # # Run all the sanity checks on README.md (pull_from_wisemodel, (WiseModel_TOKEN,wiseModel_repo_url,wisemodel_repo_name, cache_dir,"main"), {}), (remove_file, (wisemodel_repo_name, cache_dir), {}), (check_disk, (), {}), (push_to_hf, (cache_dir, wisemodel_repo_name, hf_repo_id), {}), (check_disk, (), {}), ] results = [] errors = [] for func, args, kwargs in stages: try: results.append(str(func(*args, **kwargs))) except Exception as e: errors.append(str(e)) if errors: break return '\n\n'.join(results), '\n\n'.join(errors) with gr.Blocks() as demo: gr.Markdown(''' This space uploads model from WiseModel to Huggingface. **Please make sure that you're the owner of the repo or have permission from the owner to do so!** # How to use this Space? - Duplicate this Space and providing WiseModel token (optional) and your read/write HF token (mandatory) - Create your target model repo on HF. This step needs to be done manually. The Space doesn't do create an empty repo for you. - In your own private Space, fill in information below. - Click submit then watch for output in container log for progress. - Create README.md file (since the metadata is not compatible with HF) ''') wisemodel_link = gr.Textbox(label="Copy the git download link from the model detail page of wisemodel ") hf_repo_id = gr.Textbox(label="Target HF Model Repo ID (case sensitive). \nPlease make sure that this model has already been created") with gr.Row(): button = gr.Button("Submit", variant="primary") clear = gr.Button("Clear") error = gr.Textbox(label="Error") output = gr.Textbox(label="Output") button.click(handle, [wisemodel_link, hf_repo_id], [output, error]) if __name__ == "__main__": demo.launch(debug = True)