echarlaix HF staff commited on
Commit
5172e09
1 Parent(s): 6396bac

update description

Browse files
Files changed (1) hide show
  1. app.py +15 -9
app.py CHANGED
@@ -32,21 +32,15 @@ from optimum.intel import (
32
  OVModelForPix2Struct,
33
  OVWeightQuantizationConfig,
34
  )
35
- from optimum.intel.openvino.modeling_diffusion import OVStableDiffusionPipelineBase
36
 
37
 
38
- def export(
39
- model_id: str,
40
- private_repo: bool,
41
- oauth_token: gr.OAuthToken,
42
- ):
43
  if oauth_token.token is None:
44
  raise ValueError("You must be logged in to use this space")
45
 
46
  model_name = model_id.split("/")[-1]
47
  username = whoami(oauth_token.token)["name"]
48
  new_repo_id = f"{username}/{model_name}-openvino"
49
-
50
  task = TasksManager.infer_task_from_model(model_id)
51
  if task not in _HEAD_TO_AUTOMODELS:
52
  raise ValueError(
@@ -80,7 +74,18 @@ def export(
80
  print("Repo created successfully!", new_repo_url)
81
 
82
  folder = Path(folder)
83
- for dir_name in ("", "vae_encoder", "vae_decoder", "text_encoder", "text_encoder_2", "unet", "tokenizer", "tokenizer_2", "scheduler", "feature_extractor"):
 
 
 
 
 
 
 
 
 
 
 
84
  if not (folder / dir_name).is_dir():
85
  continue
86
  for file_path in (folder / dir_name).iterdir():
@@ -139,7 +144,8 @@ def export(
139
  DESCRIPTION = """
140
  This Space uses [Optimum Intel](https://huggingface.co/docs/optimum/main/en/intel/openvino/export) to automatically export a model from the [Hub](https://huggingface.co/models) to the [OpenVINO format](https://docs.openvino.ai/2024/documentation/openvino-ir-format.html).
141
 
142
- The resulting model will then be pushed under your HF user namespace. For now we only support conversion for models that are hosted on public repositories.
 
143
  """
144
 
145
  model_id = HuggingfaceHubSearch(
 
32
  OVModelForPix2Struct,
33
  OVWeightQuantizationConfig,
34
  )
 
35
 
36
 
37
+ def export(model_id: str, private_repo: bool, oauth_token: gr.OAuthToken):
 
 
 
 
38
  if oauth_token.token is None:
39
  raise ValueError("You must be logged in to use this space")
40
 
41
  model_name = model_id.split("/")[-1]
42
  username = whoami(oauth_token.token)["name"]
43
  new_repo_id = f"{username}/{model_name}-openvino"
 
44
  task = TasksManager.infer_task_from_model(model_id)
45
  if task not in _HEAD_TO_AUTOMODELS:
46
  raise ValueError(
 
74
  print("Repo created successfully!", new_repo_url)
75
 
76
  folder = Path(folder)
77
+ for dir_name in (
78
+ "",
79
+ "vae_encoder",
80
+ "vae_decoder",
81
+ "text_encoder",
82
+ "text_encoder_2",
83
+ "unet",
84
+ "tokenizer",
85
+ "tokenizer_2",
86
+ "scheduler",
87
+ "feature_extractor",
88
+ ):
89
  if not (folder / dir_name).is_dir():
90
  continue
91
  for file_path in (folder / dir_name).iterdir():
 
144
  DESCRIPTION = """
145
  This Space uses [Optimum Intel](https://huggingface.co/docs/optimum/main/en/intel/openvino/export) to automatically export a model from the [Hub](https://huggingface.co/models) to the [OpenVINO format](https://docs.openvino.ai/2024/documentation/openvino-ir-format.html).
146
 
147
+ The resulting model will then be pushed under your HF user namespace. For now we only support conversion for models that are hosted on public repositories, support for gated or private models will be supported in a future version of this space.
148
+ Custom models that needs to be loaded with `trust_remote_code=True` are also not supported.
149
  """
150
 
151
  model_id = HuggingfaceHubSearch(