Skip to content
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,8 @@ No changes to highlight.
No changes to highlight.

## Full Changelog:
No changes to highlight.
* Allows loading private Spaces by passing an an `api_key` to `gr.Interface.load()`
by [@abidlabs](https://github.com/abidlabs) in [PR 2568](https://github.com/gradio-app/gradio/pull/2568)

## Contributors Shoutout:
No changes to highlight.
Expand Down
8 changes: 4 additions & 4 deletions gradio/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -1054,10 +1054,10 @@ def load(

Instance method: adds event that runs as soon as the demo loads in the browser. Example usage below.
Parameters:
name: Class Method - the name of the model (e.g. "gpt2"), can include the `src` as prefix (e.g. "models/gpt2")
src: Class Method - the source of the model: `models` or `spaces` (or empty if source is provided as a prefix in `name`)
api_key: Class Method - optional api key for use with Hugging Face Hub
alias: Class Method - optional string used as the name of the loaded model instead of the default name
name: Class Method - the name of the model (e.g. "gpt2" or "facebook/bart-base") or space (e.g. "flax-community/spanish-gpt2"), can include the `src` as prefix (e.g. "models/facebook/bart-base")
src: Class Method - the source of the model: `models` or `spaces` (or leave empty if source is provided as a prefix in `name`)
api_key: Class Method - optional access token for loading private Hugging Face Hub models or spaces. Find your token here: https://huggingface.co/settings/tokens
Comment thread
abidlabs marked this conversation as resolved.
alias: Class Method - optional string used as the name of the loaded model instead of the default name (only applies if loading a Space running Gradio 2.x)
fn: Instance Method - Callable function
inputs: Instance Method - input list
outputs: Instance Method - output list
Expand Down
8 changes: 8 additions & 0 deletions gradio/exceptions.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
class DuplicateBlockError(ValueError):
"""Raised when a Blocks contains more than one Block with the same id"""

pass


class TooManyRequestsError(Exception):
"""Raised when the Hugging Face API returns a 429 status code."""

pass


Expand Down
72 changes: 49 additions & 23 deletions gradio/external.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,19 +21,18 @@

import gradio
from gradio import components, exceptions, utils
from gradio.exceptions import TooManyRequestsError
from gradio.processing_utils import to_binary

if TYPE_CHECKING:
from gradio.blocks import Blocks
from gradio.components import DataframeData
from gradio.interface import Interface


class TooManyRequestsError(Exception):
"""Raised when the Hugging Face API returns a 429 status code."""

pass


def load_blocks_from_repo(name, src=None, api_key=None, alias=None, **kwargs):
def load_blocks_from_repo(
name: str, src: str = None, api_key: str = None, alias: str = None, **kwargs
) -> Blocks:
"""Creates and returns a Blocks instance from several kinds of Hugging Face repos:
1) A model repo
2) A Spaces repo running Gradio 2.x
Expand All @@ -55,7 +54,7 @@ def load_blocks_from_repo(name, src=None, api_key=None, alias=None, **kwargs):
return blocks


def get_tabular_examples(model_name) -> Dict[str, List[float]]:
def get_tabular_examples(model_name: str) -> Dict[str, List[float]]:
readme = requests.get(f"https://huggingface.co/{model_name}/resolve/main/README.md")
if readme.status_code != 200:
warnings.warn(f"Cannot load examples from README for {model_name}", UserWarning)
Expand Down Expand Up @@ -107,7 +106,7 @@ def rows_to_cols(
return {"inputs": {"data": data_column_wise}}


def get_models_interface(model_name, api_key, alias, **kwargs):
def get_models_interface(model_name: str, api_key: str | None, alias: str, **kwargs):
model_url = "https://huggingface.co/{}".format(model_name)
api_url = "https://api-inference.huggingface.co/models/{}".format(model_name)
print("Fetching model from: {}".format(model_url))
Expand Down Expand Up @@ -394,23 +393,37 @@ def query_huggingface_api(*params):
return interface


def get_spaces(model_name, api_key, alias, **kwargs):
space_url = "https://huggingface.co/spaces/{}".format(model_name)
print("Fetching interface from: {}".format(space_url))
iframe_url = "https://hf.space/embed/{}/+".format(model_name)
def get_spaces(space_name: str, api_key: str | None, alias: str, **kwargs) -> Blocks:
space_url = "https://huggingface.co/spaces/{}".format(space_name)
print("Fetching Space from: {}".format(space_url))

headers = {}
if api_key is not None:
headers["Authorization"] = f"Bearer {api_key}"

iframe_url = (
requests.get(
f"https://huggingface.co/api/spaces/{space_name}/host", headers=headers
)
.json()
.get("host")
)

r = requests.get(iframe_url, headers=headers)

r = requests.get(iframe_url)
result = re.search(
r"window.gradio_config = (.*?);[\s]*</script>", r.text
) # some basic regex to extract the config
try:
config = json.loads(result.group(1))
except AttributeError:
raise ValueError("Could not load the Space: {}".format(model_name))
raise ValueError("Could not load the Space: {}".format(space_name))
if "allow_flagging" in config: # Create an Interface for Gradio 2.x Spaces
return get_spaces_interface(model_name, config, alias, **kwargs)
return get_spaces_interface(
space_name, config, alias, api_key, iframe_url, **kwargs
)
else: # Create a Blocks for Gradio 3.x Spaces
return get_spaces_blocks(model_name, config)
return get_spaces_blocks(space_name, config, api_key, iframe_url)


async def get_pred_from_ws(
Expand Down Expand Up @@ -447,7 +460,9 @@ def use_websocket(config, dependency):
return queue_enabled and queue_uses_websocket and dependency_uses_queue


def get_spaces_blocks(model_name, config):
def get_spaces_blocks(
model_name: str, config: Dict, api_key: str | None, iframe_url: str
) -> Blocks:
def streamline_config(config: dict) -> dict:
"""Streamlines the blocks config dictionary to fix components that don't render correctly."""
# TODO(abidlabs): Need a better way to fix relative paths in dataset component
Expand All @@ -457,8 +472,10 @@ def streamline_config(config: dict) -> dict:
return config

config = streamline_config(config)
api_url = "https://hf.space/embed/{}/api/predict/".format(model_name)
api_url = "{}/api/predict/".format(iframe_url)
headers = {"Content-Type": "application/json"}
if api_key is not None:
headers["Authorization"] = f"Bearer {api_key}"
ws_url = "wss://spaces.huggingface.tech/{}/queue/join".format(model_name)
Comment thread
abidlabs marked this conversation as resolved.
Outdated

ws_fn = get_ws_fn(ws_url)
Expand Down Expand Up @@ -504,8 +521,15 @@ def fn(*data):
return gradio.Blocks.from_config(config, fns)


def get_spaces_interface(model_name, config, alias, **kwargs):
def streamline_config(config: dict) -> dict:
def get_spaces_interface(
model_name: str,
config: Dict,
alias: str,
api_key: str | None,
iframe_url: str,
**kwargs,
) -> Interface:
def streamline_config(config: Dict) -> Dict:
"""Streamlines the interface config dictionary to remove unnecessary keys."""
config["inputs"] = [
components.get_component_instance(component)
Expand All @@ -528,8 +552,10 @@ def streamline_config(config: dict) -> dict:
return config

config = streamline_config(config)
api_url = "https://hf.space/embed/{}/api/predict/".format(model_name)
api_url = "{}/api/predict/".format(iframe_url)
headers = {"Content-Type": "application/json"}
if api_key is not None:
headers["Authorization"] = f"Bearer {api_key}"

# The function should call the API with preprocessed data
def fn(*data):
Expand Down Expand Up @@ -571,7 +597,7 @@ def fn(*data):
}


def load_from_pipeline(pipeline):
def load_from_pipeline(pipeline) -> Dict:
"""
Gets the appropriate Interface kwargs for a given Hugging Face transformers.Pipeline.
pipeline (transformers.Pipeline): the transformers.Pipeline from which to create an interface
Expand Down
8 changes: 4 additions & 4 deletions gradio/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,10 +92,10 @@ def load(
model repos (if src is "models") or Space repos (if src is "spaces"). The input
and output components are automatically loaded from the repo.
Parameters:
name: the name of the model (e.g. "gpt2"), can include the `src` as prefix (e.g. "models/gpt2")
src: the source of the model: `models` or `spaces` (or empty if source is provided as a prefix in `name`)
api_key: optional api key for use with Hugging Face Hub
alias: optional string used as the name of the loaded model instead of the default name
name: the name of the model (e.g. "gpt2" or "facebook/bart-base") or space (e.g. "flax-community/spanish-gpt2"), can include the `src` as prefix (e.g. "models/facebook/bart-base")
src: the source of the model: `models` or `spaces` (or leave empty if source is provided as a prefix in `name`)
api_key: optional access token for loading private Hugging Face Hub models or spaces. Find your token here: https://huggingface.co/settings/tokens
alias: optional string used as the name of the loaded model instead of the default name (only applies if loading a Space running Gradio 2.x)
Returns:
a Gradio Interface object for the given model
Example:
Expand Down
11 changes: 11 additions & 0 deletions test/test_external.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,17 @@ def test_text_to_image_model(self):
except TooManyRequestsError:
pass

def test_private_space(self):
api_key = "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" # Intentionally revealing this key for testing purposes
io = gr.Interface.load(
"spaces/gradio-tests/not-actually-private-space", api_key=api_key
)
try:
output = io("abc")
self.assertEqual(output, "abc")
except TooManyRequestsError:
pass


class TestLoadFromPipeline(unittest.TestCase):
def test_text_to_text_model_from_pipeline(self):
Expand Down
4 changes: 3 additions & 1 deletion website/homepage/restart_demos.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import requests
from upload_demos import demos, upload_demo_to_space, AUTH_TOKEN, gradio_version
from gradio.networking import url_ok


for demo in demos:
space_id = "gradio/" + demo
if not url_ok(f"https://hf.space/embed/{space_id}/+"):
space_url = requests.get(f"https://huggingface.co/api/spaces/{space_id}/host").json().get("host")
if not url_ok(space_url):
print(f"{space_id} was down, restarting")
upload_demo_to_space(demo_name=demo, space_id=space_id, hf_token=AUTH_TOKEN, gradio_version=gradio_version)