Spaces:
Runtime error
Bad Request Error.
BadRequestError: (Request ID: qIMnjPLkmEiwezvpewbMH) Bad request: No mask_token (
File "/usr/local/lib/python3.10/site-packages/streamlit/runtime/scriptrunner/script_runner.py", line 542, in _run_script
exec(code, module.dict)
File "/home/user/app/app.py", line 3, in
gr.load("models/s2w-ai/DarkBERT").launch()
File "/usr/local/lib/python3.10/site-packages/gradio/external.py", line 60, in load
return load_blocks_from_repo(
File "/usr/local/lib/python3.10/site-packages/gradio/external.py", line 99, in load_blocks_from_repo
blocks: gradio.Blocks = factory_methods[src](name, hf_token, alias, **kwargs)
File "/usr/local/lib/python3.10/site-packages/gradio/external.py", line 387, in from_model
interface = gradio.Interface(**kwargs)
File "/usr/local/lib/python3.10/site-packages/gradio/interface.py", line 527, in init
self.render_examples()
File "/usr/local/lib/python3.10/site-packages/gradio/interface.py", line 876, in render_examples
self.examples_handler = Examples(
File "/usr/local/lib/python3.10/site-packages/gradio/helpers.py", line 79, in create_examples
examples_obj.create()
File "/usr/local/lib/python3.10/site-packages/gradio/helpers.py", line 332, in create
self._start_caching()
File "/usr/local/lib/python3.10/site-packages/gradio/helpers.py", line 383, in _start_caching
client_utils.synchronize_async(self.cache)
File "/usr/local/lib/python3.10/site-packages/gradio_client/utils.py", line 855, in synchronize_async
return fsspec.asyn.sync(fsspec.asyn.get_loop(), func, *args, **kwargs) # type: ignore
File "/usr/local/lib/python3.10/site-packages/fsspec/asyn.py", line 103, in sync
raise return_result
File "/usr/local/lib/python3.10/site-packages/fsspec/asyn.py", line 56, in _runner
result[0] = await coro
File "/usr/local/lib/python3.10/site-packages/gradio/helpers.py", line 505, in cache
prediction = await Context.root_block.process_api(
File "/usr/local/lib/python3.10/site-packages/gradio/blocks.py", line 1897, in process_api
result = await self.call_function(
File "/usr/local/lib/python3.10/site-packages/gradio/blocks.py", line 1483, in call_function
prediction = await anyio.to_thread.run_sync(
File "/usr/local/lib/python3.10/site-packages/anyio/to_thread.py", line 56, in run_sync
return await get_async_backend().run_sync_in_worker_thread(
File "/usr/local/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 2177, in run_sync_in_worker_thread
return await future
File "/usr/local/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 859, in run
result = context.run(func, *args)
File "/usr/local/lib/python3.10/site-packages/gradio/utils.py", line 816, in wrapper
response = f(*args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/gradio/external.py", line 371, in query_huggingface_inference_endpoints
data = fn(*data) # type: ignore
File "/usr/local/lib/python3.10/site-packages/huggingface_hub/inference/_client.py", line 985, in fill_mask
response = self.post(json={"inputs": text}, model=model, task="fill-mask")
File "/usr/local/lib/python3.10/site-packages/huggingface_hub/inference/_client.py", line 273, in post
hf_raise_for_status(response)
File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_errors.py", line 358, in hf_raise_for_status
raise BadRequestError(message, response=response) from e
you can help me run this model and am begginer to collage how can access this model am no ideas