code
stringlengths 26
870k
| docstring
stringlengths 1
65.6k
| func_name
stringlengths 1
194
| language
stringclasses 1
value | repo
stringlengths 8
68
| path
stringlengths 5
194
| url
stringlengths 46
254
| license
stringclasses 4
values |
---|---|---|---|---|---|---|---|
def from_config(
cls,
config: BlocksConfigDict,
fns: list[Callable],
proxy_url: str,
) -> Blocks:
"""
Factory method that creates a Blocks from a config and list of functions. Used
internally by the gradio.external.load() method.
Parameters:
config: a dictionary containing the configuration of the Blocks.
fns: a list of functions that are used in the Blocks. Must be in the same order as the dependencies in the config.
proxy_url: an external url to use as a root URL when serving files for components in the Blocks.
"""
config = copy.deepcopy(config)
components_config = config["components"]
theme = config.get("theme", "default")
original_mapping: dict[int, Block] = {}
proxy_urls = {proxy_url}
def get_block_instance(id: int) -> Block:
for block_config in components_config:
if block_config["id"] == id:
break
else:
raise ValueError(f"Cannot find block with id {id}")
cls = component_or_layout_class(block_config["props"]["name"])
# If a Gradio app B is loaded into a Gradio app A, and B itself loads a
# Gradio app C, then the proxy_urls of the components in A need to be the
# URL of C, not B. The else clause below handles this case.
if block_config["props"].get("proxy_url") is None:
block_config["props"]["proxy_url"] = f"{proxy_url}/"
postprocessed_value = block_config["props"].pop("value", None)
constructor_args = cls.recover_kwargs(block_config["props"])
block = cls(**constructor_args)
if postprocessed_value is not None:
block.value = postprocessed_value # type: ignore
block_proxy_url = block_config["props"]["proxy_url"]
block.proxy_url = block_proxy_url
proxy_urls.add(block_proxy_url)
if (
_selectable := block_config["props"].pop("_selectable", None)
) is not None:
block._selectable = _selectable # type: ignore
return block
def iterate_over_children(children_list):
for child_config in children_list:
id = child_config["id"]
block = get_block_instance(id)
original_mapping[id] = block
children = child_config.get("children")
if children is not None:
if not isinstance(block, BlockContext):
raise ValueError(
f"Invalid config, Block with id {id} has children but is not a BlockContext."
)
with block:
iterate_over_children(children)
derived_fields = ["types"]
with Blocks(theme=theme) as blocks:
# ID 0 should be the root Blocks component
original_mapping[0] = root_block = Context.root_block or blocks
if "layout" in config:
iterate_over_children(config["layout"].get("children", []))
first_dependency = None
# add the event triggers
if "dependencies" not in config:
raise ValueError(
"This config is missing the 'dependencies' field and cannot be loaded."
)
for dependency, fn in zip(config["dependencies"], fns, strict=False):
# We used to add a "fake_event" to the config to cache examples
# without removing it. This was causing bugs in calling gr.load
# We fixed the issue by removing "fake_event" from the config in examples.py
# but we still need to skip these events when loading the config to support
# older demos
if "trigger" in dependency and dependency["trigger"] == "fake_event":
continue
for field in derived_fields:
dependency.pop(field, None)
# older versions had a separate trigger field, but now it is part of the
# targets field
_targets = dependency.pop("targets")
trigger = dependency.pop("trigger", None)
is_then_event = False
# This assumes that you cannot combine multiple .then() events in a single
# gr.on() event, which is true for now. If this changes, we will need to
# update this code.
if not isinstance(_targets[0], int) and _targets[0][1] in [
"then",
"success",
]:
if len(_targets) != 1:
raise ValueError(
"This logic assumes that .then() events are not combined with other events in a single gr.on() event"
)
is_then_event = True
dependency.pop("backend_fn")
dependency.pop("documentation", None)
dependency["inputs"] = [
original_mapping[i] for i in dependency["inputs"]
]
dependency["outputs"] = [
original_mapping[o] for o in dependency["outputs"]
]
dependency.pop("status_tracker", None)
dependency.pop("zerogpu", None)
dependency.pop("id", None)
dependency.pop("rendered_in", None)
dependency.pop("render_id", None)
dependency.pop("every", None)
dependency["preprocess"] = False
dependency["postprocess"] = False
if is_then_event:
targets = [EventListenerMethod(None, "then")]
dependency["trigger_after"] = dependency.pop("trigger_after")
dependency["trigger_only_on_success"] = dependency.pop(
"trigger_only_on_success"
)
dependency["no_target"] = True
else:
targets = [
EventListenerMethod(
t.__self__ if t.has_trigger else None,
t.event_name, # type: ignore
)
for t in Blocks.get_event_targets(
original_mapping, _targets, trigger
)
]
dependency = root_block.default_config.set_event_trigger(
targets=targets, fn=fn, **dependency
)[0]
if first_dependency is None:
first_dependency = dependency
# Allows some use of Interface-specific methods with loaded Spaces
if first_dependency and get_blocks_context():
blocks.predict = [fns[0]]
blocks.input_components = first_dependency.inputs
blocks.output_components = first_dependency.outputs
blocks.__name__ = "Interface"
blocks.api_mode = True
blocks.proxy_urls = proxy_urls
return blocks | Factory method that creates a Blocks from a config and list of functions. Used
internally by the gradio.external.load() method.
Parameters:
config: a dictionary containing the configuration of the Blocks.
fns: a list of functions that are used in the Blocks. Must be in the same order as the dependencies in the config.
proxy_url: an external url to use as a root URL when serving files for components in the Blocks. | from_config | python | gradio-app/gradio | gradio/blocks.py | https://github.com/gradio-app/gradio/blob/master/gradio/blocks.py | Apache-2.0 |
def expects_oauth(self):
"""Return whether the app expects user to authenticate via OAuth."""
return any(
isinstance(block, components.LoginButton) for block in self.blocks.values()
) | Return whether the app expects user to authenticate via OAuth. | expects_oauth | python | gradio-app/gradio | gradio/blocks.py | https://github.com/gradio-app/gradio/blob/master/gradio/blocks.py | Apache-2.0 |
def unload(self, fn: Callable[..., Any]) -> None:
"""This listener is triggered when the user closes or refreshes the tab, ending the user session.
It is useful for cleaning up resources when the app is closed.
Parameters:
fn: Callable function to run to clear resources. The function should not take any arguments and the output is not used.
Example:
import gradio as gr
with gr.Blocks() as demo:
gr.Markdown("# When you close the tab, hello will be printed to the console")
demo.unload(lambda: print("hello"))
demo.launch()
"""
self.default_config.set_event_trigger(
targets=[EventListenerMethod(None, "unload")],
fn=fn,
inputs=None,
outputs=None,
preprocess=False,
postprocess=False,
show_progress="hidden",
api_name=None,
js=None,
no_target=True,
batch=False,
max_batch_size=4,
cancels=None,
collects_event_data=None,
trigger_after=None,
trigger_only_on_success=False,
trigger_mode="once",
concurrency_limit="default",
concurrency_id=None,
show_api=False,
) | This listener is triggered when the user closes or refreshes the tab, ending the user session.
It is useful for cleaning up resources when the app is closed.
Parameters:
fn: Callable function to run to clear resources. The function should not take any arguments and the output is not used.
Example:
import gradio as gr
with gr.Blocks() as demo:
gr.Markdown("# When you close the tab, hello will be printed to the console")
demo.unload(lambda: print("hello"))
demo.launch() | unload | python | gradio-app/gradio | gradio/blocks.py | https://github.com/gradio-app/gradio/blob/master/gradio/blocks.py | Apache-2.0 |
def is_callable(self, fn_index: int = 0) -> bool:
"""Checks if a particular Blocks function is callable (i.e. not stateful or a generator)."""
block_fn = self.fns[fn_index]
dependency = self.fns[fn_index]
if inspect.isasyncgenfunction(block_fn.fn):
return False
if inspect.isgeneratorfunction(block_fn.fn):
return False
if any(block.stateful for block in dependency.inputs):
return False
return not any(block.stateful for block in dependency.outputs) | Checks if a particular Blocks function is callable (i.e. not stateful or a generator). | is_callable | python | gradio-app/gradio | gradio/blocks.py | https://github.com/gradio-app/gradio/blob/master/gradio/blocks.py | Apache-2.0 |
def __call__(self, *inputs, fn_index: int = 0, api_name: str | None = None):
"""
Allows Blocks objects to be called as functions. Supply the parameters to the
function as positional arguments. To choose which function to call, use the
fn_index parameter, which must be a keyword argument.
Parameters:
*inputs: the parameters to pass to the function
fn_index: the index of the function to call (defaults to 0, which for Interfaces, is the default prediction function)
api_name: The api_name of the dependency to call. Will take precedence over fn_index.
"""
if api_name is not None:
inferred_fn_index = next(
(i for i, d in self.fns.items() if d.api_name == api_name),
None,
)
if inferred_fn_index is None:
raise InvalidApiNameError(
f"Cannot find a function with api_name {api_name}"
)
fn_index = inferred_fn_index
if not (self.is_callable(fn_index)):
raise ValueError(
"This function is not callable because it is either stateful or is a generator. Please use the .launch() method instead to create an interactive user interface."
)
inputs = list(inputs)
processed_inputs = self.serialize_data(fn_index, inputs)
fn = self.fns[fn_index]
if fn.batch:
processed_inputs = [[inp] for inp in processed_inputs]
outputs = client_utils.synchronize_async(
self.process_api,
block_fn=fn,
inputs=processed_inputs,
request=None,
state={},
explicit_call=True,
)
outputs = outputs["data"]
if fn.batch:
outputs = [out[0] for out in outputs]
outputs = self.deserialize_data(fn_index, outputs)
processed_outputs = utils.resolve_singleton(outputs)
return processed_outputs | Allows Blocks objects to be called as functions. Supply the parameters to the
function as positional arguments. To choose which function to call, use the
fn_index parameter, which must be a keyword argument.
Parameters:
*inputs: the parameters to pass to the function
fn_index: the index of the function to call (defaults to 0, which for Interfaces, is the default prediction function)
api_name: The api_name of the dependency to call. Will take precedence over fn_index. | __call__ | python | gradio-app/gradio | gradio/blocks.py | https://github.com/gradio-app/gradio/blob/master/gradio/blocks.py | Apache-2.0 |
async def call_function(
self,
block_fn: BlockFunction | int,
processed_input: list[Any],
iterator: AsyncIterator[Any] | None = None,
requests: Request | list[Request] | None = None,
event_id: str | None = None,
event_data: EventData | None = None,
in_event_listener: bool = False,
state: SessionState | None = None,
):
"""
Calls function with given index and preprocessed input, and measures process time.
Parameters:
fn_index: index of function to call
processed_input: preprocessed input to pass to function
iterator: iterator to use if function is a generator
requests: requests to pass to function
event_id: id of event in queue
event_data: data associated with event trigger
"""
if isinstance(block_fn, int):
block_fn = self.fns[block_fn]
if not block_fn.fn:
raise IndexError("function has no backend method.")
is_generating = False
request = requests[0] if isinstance(requests, list) else requests
start = time.time()
fn = utils.get_function_with_locals(
fn=block_fn.fn,
blocks=self,
event_id=event_id,
in_event_listener=in_event_listener,
request=request,
state=state,
)
if iterator is None: # If not a generator function that has already run
if block_fn.inputs_as_dict:
processed_input = [
dict(zip(block_fn.inputs, processed_input, strict=False))
]
fn_to_analyze = (
block_fn.renderable.fn if block_fn.renderable else block_fn.fn
)
processed_input, progress_index, _ = special_args(
fn_to_analyze, processed_input, request, event_data
)
progress_tracker = (
processed_input[progress_index] if progress_index is not None else None
)
if progress_tracker is not None and progress_index is not None:
progress_tracker, fn = create_tracker(fn, progress_tracker.track_tqdm)
processed_input[progress_index] = progress_tracker
if inspect.iscoroutinefunction(fn):
prediction = await fn(*processed_input)
else:
prediction = await anyio.to_thread.run_sync( # type: ignore
fn, *processed_input, limiter=self.limiter
)
else:
prediction = None
if inspect.isgeneratorfunction(fn) or inspect.isasyncgenfunction(fn):
try:
if iterator is None:
iterator = cast(AsyncIterator[Any], prediction)
if inspect.isgenerator(iterator):
iterator = utils.SyncToAsyncIterator(iterator, self.limiter)
prediction = await utils.async_iteration(iterator)
is_generating = True
except StopAsyncIteration:
n_outputs = len(block_fn.outputs)
prediction = (
components._Keywords.FINISHED_ITERATING
if n_outputs == 1
else (components._Keywords.FINISHED_ITERATING,) * n_outputs
)
iterator = None
duration = time.time() - start
return {
"prediction": prediction,
"duration": duration,
"is_generating": is_generating,
"iterator": iterator,
} | Calls function with given index and preprocessed input, and measures process time.
Parameters:
fn_index: index of function to call
processed_input: preprocessed input to pass to function
iterator: iterator to use if function is a generator
requests: requests to pass to function
event_id: id of event in queue
event_data: data associated with event trigger | call_function | python | gradio-app/gradio | gradio/blocks.py | https://github.com/gradio-app/gradio/blob/master/gradio/blocks.py | Apache-2.0 |
async def process_api(
self,
block_fn: BlockFunction | int,
inputs: list[Any],
state: SessionState | None = None,
request: Request | list[Request] | None = None,
iterator: AsyncIterator | None = None,
session_hash: str | None = None,
event_id: str | None = None,
event_data: EventData | None = None,
in_event_listener: bool = True,
simple_format: bool = False,
explicit_call: bool = False,
root_path: str | None = None,
) -> dict[str, Any]:
"""
Processes API calls from the frontend. First preprocesses the data,
then runs the relevant function, then postprocesses the output.
Parameters:
fn_index: Index of function to run.
inputs: input data received from the frontend
state: data stored from stateful components for session (key is input block id)
request: the gr.Request object containing information about the network request (e.g. IP address, headers, query parameters, username)
iterators: the in-progress iterators for each generator function (key is function index)
event_id: id of event that triggered this API call
event_data: data associated with the event trigger itself
in_event_listener: whether this API call is being made in response to an event listener
explicit_call: whether this call is being made directly by calling the Blocks function, instead of through an event listener or API route
root_path: if provided, the root path of the server. All file URLs will be prefixed with this path.
Returns: None
"""
if isinstance(block_fn, int):
block_fn = self.fns[block_fn]
batch = block_fn.batch
state_ids_to_track, hashed_values = self.get_state_ids_to_track(block_fn, state)
changed_state_ids = []
LocalContext.blocks.set(self)
if batch:
max_batch_size = block_fn.max_batch_size
batch_sizes = [len(inp) for inp in inputs]
batch_size = batch_sizes[0]
if inspect.isasyncgenfunction(block_fn.fn) or inspect.isgeneratorfunction(
block_fn.fn
):
raise ValueError("Gradio does not support generators in batch mode.")
if not all(x == batch_size for x in batch_sizes):
raise ValueError(
f"All inputs to a batch function must have the same length but instead have sizes: {batch_sizes}."
)
if batch_size > max_batch_size:
raise ValueError(
f"Batch size ({batch_size}) exceeds the max_batch_size for this function ({max_batch_size})"
)
inputs = [
await self.preprocess_data(block_fn, list(i), state, explicit_call)
for i in zip(*inputs, strict=False)
]
result = await self.call_function(
block_fn,
list(zip(*inputs, strict=False)),
None,
request,
event_id,
event_data,
in_event_listener,
state,
)
preds = result["prediction"]
data = [
await self.postprocess_data(block_fn, list(o), state)
for o in zip(*preds, strict=False)
]
if root_path is not None:
data = processing_utils.add_root_url(data, root_path, None) # type: ignore
data = list(zip(*data, strict=False))
is_generating, iterator = None, None
else:
old_iterator = iterator
if old_iterator:
inputs = []
else:
inputs = await self.preprocess_data(
block_fn, inputs, state, explicit_call
)
was_generating = old_iterator is not None
result = await self.call_function(
block_fn,
inputs,
old_iterator,
request,
event_id,
event_data,
in_event_listener,
state,
)
data = await self.postprocess_data(block_fn, result["prediction"], state)
if state:
changed_state_ids = [
state_id
for hash_value, state_id in zip(
hashed_values, state_ids_to_track, strict=False
)
if hash_value != utils.deep_hash(state[state_id])
]
if root_path is not None:
data = processing_utils.add_root_url(data, root_path, None)
is_generating, iterator = result["is_generating"], result["iterator"]
if is_generating or was_generating:
run = id(old_iterator) if was_generating else id(iterator)
data = await self.handle_streaming_outputs(
block_fn,
data,
session_hash=session_hash,
run=run,
root_path=root_path,
final=not is_generating,
)
data = self.handle_streaming_diffs(
block_fn,
data,
session_hash=session_hash,
run=run,
final=not is_generating,
simple_format=simple_format,
)
block_fn.total_runtime += result["duration"]
block_fn.total_runs += 1
output = {
"data": data,
"is_generating": is_generating,
"iterator": iterator,
"duration": result["duration"],
"average_duration": block_fn.total_runtime / block_fn.total_runs,
"render_config": None,
"changed_state_ids": changed_state_ids,
}
if block_fn.renderable and state:
output["render_config"] = state.blocks_config.get_config(
block_fn.renderable
)
output["render_config"]["render_id"] = block_fn.renderable._id
if root_path is not None:
output["render_config"] = processing_utils.add_root_url(
output["render_config"], root_path, None
)
return output | Processes API calls from the frontend. First preprocesses the data,
then runs the relevant function, then postprocesses the output.
Parameters:
fn_index: Index of function to run.
inputs: input data received from the frontend
state: data stored from stateful components for session (key is input block id)
request: the gr.Request object containing information about the network request (e.g. IP address, headers, query parameters, username)
iterators: the in-progress iterators for each generator function (key is function index)
event_id: id of event that triggered this API call
event_data: data associated with the event trigger itself
in_event_listener: whether this API call is being made in response to an event listener
explicit_call: whether this call is being made directly by calling the Blocks function, instead of through an event listener or API route
root_path: if provided, the root path of the server. All file URLs will be prefixed with this path.
Returns: None | process_api | python | gradio-app/gradio | gradio/blocks.py | https://github.com/gradio-app/gradio/blob/master/gradio/blocks.py | Apache-2.0 |
def clear(self):
"""Resets the layout of the Blocks object."""
self.default_config.blocks = {}
self.default_config.fns = {}
self.children = []
return self | Resets the layout of the Blocks object. | clear | python | gradio-app/gradio | gradio/blocks.py | https://github.com/gradio-app/gradio/blob/master/gradio/blocks.py | Apache-2.0 |
def queue(
self,
status_update_rate: float | Literal["auto"] = "auto",
api_open: bool | None = None,
max_size: int | None = None,
*,
default_concurrency_limit: int | None | Literal["not_set"] = "not_set",
):
"""
By enabling the queue you can control when users know their position in the queue, and set a limit on maximum number of events allowed.
Parameters:
status_update_rate: If "auto", Queue will send status estimations to all clients whenever a job is finished. Otherwise Queue will send status at regular intervals set by this parameter as the number of seconds.
api_open: If True, the REST routes of the backend will be open, allowing requests made directly to those endpoints to skip the queue.
max_size: The maximum number of events the queue will store at any given moment. If the queue is full, new events will not be added and a user will receive a message saying that the queue is full. If None, the queue size will be unlimited.
default_concurrency_limit: The default value of `concurrency_limit` to use for event listeners that don't specify a value. Can be set by environment variable GRADIO_DEFAULT_CONCURRENCY_LIMIT. Defaults to 1 if not set otherwise.
Example: (Blocks)
with gr.Blocks() as demo:
button = gr.Button(label="Generate Image")
button.click(fn=image_generator, inputs=gr.Textbox(), outputs=gr.Image())
demo.queue(max_size=10)
demo.launch()
Example: (Interface)
demo = gr.Interface(image_generator, gr.Textbox(), gr.Image())
demo.queue(max_size=20)
demo.launch()
"""
if api_open is not None:
self.api_open = api_open
if utils.is_zero_gpu_space():
max_size = 1 if max_size is None else max_size
self._queue = queueing.Queue(
live_updates=status_update_rate == "auto",
concurrency_count=self.max_threads,
update_intervals=status_update_rate if status_update_rate != "auto" else 1,
max_size=max_size,
blocks=self,
default_concurrency_limit=default_concurrency_limit,
)
self.app = App.create_app(self)
return self | By enabling the queue you can control when users know their position in the queue, and set a limit on maximum number of events allowed.
Parameters:
status_update_rate: If "auto", Queue will send status estimations to all clients whenever a job is finished. Otherwise Queue will send status at regular intervals set by this parameter as the number of seconds.
api_open: If True, the REST routes of the backend will be open, allowing requests made directly to those endpoints to skip the queue.
max_size: The maximum number of events the queue will store at any given moment. If the queue is full, new events will not be added and a user will receive a message saying that the queue is full. If None, the queue size will be unlimited.
default_concurrency_limit: The default value of `concurrency_limit` to use for event listeners that don't specify a value. Can be set by environment variable GRADIO_DEFAULT_CONCURRENCY_LIMIT. Defaults to 1 if not set otherwise.
Example: (Blocks)
with gr.Blocks() as demo:
button = gr.Button(label="Generate Image")
button.click(fn=image_generator, inputs=gr.Textbox(), outputs=gr.Image())
demo.queue(max_size=10)
demo.launch()
Example: (Interface)
demo = gr.Interface(image_generator, gr.Textbox(), gr.Image())
demo.queue(max_size=20)
demo.launch() | queue | python | gradio-app/gradio | gradio/blocks.py | https://github.com/gradio-app/gradio/blob/master/gradio/blocks.py | Apache-2.0 |
def launch(
self,
inline: bool | None = None,
inbrowser: bool = False,
share: bool | None = None,
debug: bool = False,
max_threads: int = 40,
auth: (
Callable[[str, str], bool] | tuple[str, str] | list[tuple[str, str]] | None
) = None,
auth_message: str | None = None,
prevent_thread_lock: bool = False,
show_error: bool = False,
server_name: str | None = None,
server_port: int | None = None,
*,
height: int = 500,
width: int | str = "100%",
favicon_path: str | None = None,
ssl_keyfile: str | None = None,
ssl_certfile: str | None = None,
ssl_keyfile_password: str | None = None,
ssl_verify: bool = True,
quiet: bool = False,
show_api: bool = not wasm_utils.IS_WASM,
allowed_paths: list[str] | None = None,
blocked_paths: list[str] | None = None,
root_path: str | None = None,
app_kwargs: dict[str, Any] | None = None,
state_session_capacity: int = 10000,
share_server_address: str | None = None,
share_server_protocol: Literal["http", "https"] | None = None,
share_server_tls_certificate: str | None = None,
auth_dependency: Callable[[fastapi.Request], str | None] | None = None,
max_file_size: str | int | None = None,
enable_monitoring: bool | None = None,
strict_cors: bool = True,
node_server_name: str | None = None,
node_port: int | None = None,
ssr_mode: bool | None = None,
pwa: bool | None = None,
_frontend: bool = True,
) -> tuple[App, str, str]:
"""
Launches a simple web server that serves the demo. Can also be used to create a
public link used by anyone to access the demo from their browser by setting share=True.
Parameters:
inline: whether to display in the gradio app inline in an iframe. Defaults to True in python notebooks; False otherwise.
inbrowser: whether to automatically launch the gradio app in a new tab on the default browser.
share: whether to create a publicly shareable link for the gradio app. Creates an SSH tunnel to make your UI accessible from anywhere. If not provided, it is set to False by default every time, except when running in Google Colab. When localhost is not accessible (e.g. Google Colab), setting share=False is not supported. Can be set by environment variable GRADIO_SHARE=True.
debug: if True, blocks the main thread from running. If running in Google Colab, this is needed to print the errors in the cell output.
auth: If provided, username and password (or list of username-password tuples) required to access app. Can also provide function that takes username and password and returns True if valid login.
auth_message: If provided, HTML message provided on login page.
prevent_thread_lock: By default, the gradio app blocks the main thread while the server is running. If set to True, the gradio app will not block and the gradio server will terminate as soon as the script finishes.
show_error: If True, any errors in the gradio app will be displayed in an alert modal and printed in the browser console log
server_port: will start gradio app on this port (if available). Can be set by environment variable GRADIO_SERVER_PORT. If None, will search for an available port starting at 7860.
server_name: to make app accessible on local network, set this to "0.0.0.0". Can be set by environment variable GRADIO_SERVER_NAME. If None, will use "127.0.0.1".
max_threads: the maximum number of total threads that the Gradio app can generate in parallel. The default is inherited from the starlette library (currently 40).
width: The width in pixels of the iframe element containing the gradio app (used if inline=True)
height: The height in pixels of the iframe element containing the gradio app (used if inline=True)
favicon_path: If a path to a file (.png, .gif, or .ico) is provided, it will be used as the favicon for the web page.
ssl_keyfile: If a path to a file is provided, will use this as the private key file to create a local server running on https.
ssl_certfile: If a path to a file is provided, will use this as the signed certificate for https. Needs to be provided if ssl_keyfile is provided.
ssl_keyfile_password: If a password is provided, will use this with the ssl certificate for https.
ssl_verify: If False, skips certificate validation which allows self-signed certificates to be used.
quiet: If True, suppresses most print statements.
show_api: If True, shows the api docs in the footer of the app. Default True.
allowed_paths: List of complete filepaths or parent directories that gradio is allowed to serve. Must be absolute paths. Warning: if you provide directories, any files in these directories or their subdirectories are accessible to all users of your app. Can be set by comma separated environment variable GRADIO_ALLOWED_PATHS. These files are generally assumed to be secure and will be displayed in the browser when possible.
blocked_paths: List of complete filepaths or parent directories that gradio is not allowed to serve (i.e. users of your app are not allowed to access). Must be absolute paths. Warning: takes precedence over `allowed_paths` and all other directories exposed by Gradio by default. Can be set by comma separated environment variable GRADIO_BLOCKED_PATHS.
root_path: The root path (or "mount point") of the application, if it's not served from the root ("/") of the domain. Often used when the application is behind a reverse proxy that forwards requests to the application. For example, if the application is served at "https://example.com/myapp", the `root_path` should be set to "/myapp". A full URL beginning with http:// or https:// can be provided, which will be used as the root path in its entirety. Can be set by environment variable GRADIO_ROOT_PATH. Defaults to "".
app_kwargs: Additional keyword arguments to pass to the underlying FastAPI app as a dictionary of parameter keys and argument values. For example, `{"docs_url": "/docs"}`
state_session_capacity: The maximum number of sessions whose information to store in memory. If the number of sessions exceeds this number, the oldest sessions will be removed. Reduce capacity to reduce memory usage when using gradio.State or returning updated components from functions. Defaults to 10000.
share_server_address: Use this to specify a custom FRP server and port for sharing Gradio apps (only applies if share=True). If not provided, will use the default FRP server at https://gradio.live. See https://github.com/huggingface/frp for more information.
share_server_protocol: Use this to specify the protocol to use for the share links. Defaults to "https", unless a custom share_server_address is provided, in which case it defaults to "http". If you are using a custom share_server_address and want to use https, you must set this to "https".
share_server_tls_certificate: The path to a TLS certificate file to use when connecting to a custom share server. This parameter is not used with the default FRP server at https://gradio.live. Otherwise, you must provide a valid TLS certificate file (e.g. a "cert.pem") relative to the current working directory, or the connection will not use TLS encryption, which is insecure.
auth_dependency: A function that takes a FastAPI request and returns a string user ID or None. If the function returns None for a specific request, that user is not authorized to access the app (they will see a 401 Unauthorized response). To be used with external authentication systems like OAuth. Cannot be used with `auth`.
max_file_size: The maximum file size in bytes that can be uploaded. Can be a string of the form "<value><unit>", where value is any positive integer and unit is one of "b", "kb", "mb", "gb", "tb". If None, no limit is set.
enable_monitoring: Enables traffic monitoring of the app through the /monitoring endpoint. By default is None, which enables this endpoint. If explicitly True, will also print the monitoring URL to the console. If False, will disable monitoring altogether.
strict_cors: If True, prevents external domains from making requests to a Gradio server running on localhost. If False, allows requests to localhost that originate from localhost but also, crucially, from "null". This parameter should normally be True to prevent CSRF attacks but may need to be False when embedding a *locally-running Gradio app* using web components.
ssr_mode: If True, the Gradio app will be rendered using server-side rendering mode, which is typically more performant and provides better SEO, but this requires Node 20+ to be installed on the system. If False, the app will be rendered using client-side rendering mode. If None, will use GRADIO_SSR_MODE environment variable or default to False.
pwa: If True, the Gradio app will be set up as an installable PWA (Progressive Web App). If set to None (default behavior), then the PWA feature will be enabled if this Gradio app is launched on Spaces, but not otherwise.
Returns:
app: FastAPI app object that is running the demo
local_url: Locally accessible link to the demo
share_url: Publicly accessible link to the demo (if share=True, otherwise None)
Example: (Blocks)
import gradio as gr
def reverse(text):
return text[::-1]
with gr.Blocks() as demo:
button = gr.Button(value="Reverse")
button.click(reverse, gr.Textbox(), gr.Textbox())
demo.launch(share=True, auth=("username", "password"))
Example: (Interface)
import gradio as gr
def reverse(text):
return text[::-1]
demo = gr.Interface(reverse, "text", "text")
demo.launch(share=True, auth=("username", "password"))
"""
from gradio.routes import App
if self._is_running_in_reload_thread:
# We have already launched the demo
return None, None, None # type: ignore
if not self.exited:
self.__exit__()
if auth is not None and auth_dependency is not None:
raise ValueError(
"You cannot provide both `auth` and `auth_dependency` in launch(). Please choose one."
)
if (
auth
and not callable(auth)
and not isinstance(auth[0], tuple)
and not isinstance(auth[0], list)
):
self.auth = [auth]
else:
self.auth = auth
if self.auth and not callable(self.auth):
if any(not authenticable[0] for authenticable in self.auth):
warnings.warn(
"You have provided an empty username in `auth`. Please provide a valid username."
)
if any(not authenticable[1] for authenticable in self.auth):
warnings.warn(
"You have provided an empty password in `auth`. Please provide a valid password."
)
self.auth_message = auth_message
self.show_error = show_error
self.height = height
self.width = width
self.favicon_path = favicon_path
self.ssl_verify = ssl_verify
self.state_session_capacity = state_session_capacity
if root_path is None:
self.root_path = os.environ.get("GRADIO_ROOT_PATH", "")
else:
self.root_path = root_path
self.show_api = show_api
if allowed_paths:
self.allowed_paths = allowed_paths
else:
allowed_paths_env = os.environ.get("GRADIO_ALLOWED_PATHS", "")
if len(allowed_paths_env) > 0:
self.allowed_paths = [
item.strip() for item in allowed_paths_env.split(",")
]
else:
self.allowed_paths = []
if blocked_paths:
self.blocked_paths = blocked_paths
else:
blocked_paths_env = os.environ.get("GRADIO_BLOCKED_PATHS", "")
if len(blocked_paths_env) > 0:
self.blocked_paths = [
item.strip() for item in blocked_paths_env.split(",")
]
else:
self.blocked_paths = []
if not isinstance(self.allowed_paths, list):
raise ValueError("`allowed_paths` must be a list of directories.")
if not isinstance(self.blocked_paths, list):
raise ValueError("`blocked_paths` must be a list of directories.")
self.validate_queue_settings()
self.max_file_size = utils._parse_file_size(max_file_size)
if self.dev_mode:
for block in self.blocks.values():
if block.key is None:
block.key = f"__{block._id}__"
self.pwa = utils.get_space() is not None if pwa is None else pwa
self.max_threads = max_threads
self._queue.max_thread_count = max_threads
self.transpile_to_js(quiet=quiet)
self.config = self.get_config_file()
self.ssr_mode = (
False
if wasm_utils.IS_WASM
else (
ssr_mode
if ssr_mode is not None
else os.getenv("GRADIO_SSR_MODE", "False").lower() == "true"
)
)
if self.ssr_mode:
self.node_path = os.environ.get(
"GRADIO_NODE_PATH", "" if wasm_utils.IS_WASM else get_node_path()
)
self.node_server_name, self.node_process, self.node_port = (
start_node_server(
server_name=node_server_name,
server_port=node_port,
node_path=self.node_path,
)
)
else:
self.node_server_name = self.node_port = self.node_process = None
# self.server_app is included for backwards compatibility
self.server_app = self.app = App.create_app(
self,
auth_dependency=auth_dependency,
app_kwargs=app_kwargs,
strict_cors=strict_cors,
ssr_mode=self.ssr_mode,
)
if self.is_running:
if not isinstance(self.local_url, str):
raise ValueError(f"Invalid local_url: {self.local_url}")
if not (quiet):
print(
"Rerunning server... use `close()` to stop if you need to change `launch()` parameters.\n----"
)
else:
if wasm_utils.IS_WASM:
server_name = "xxx"
server_port = 99999
local_url = ""
server = None
# In the Wasm environment, we only need the app object
# which the frontend app will directly communicate with through the Worker API,
# and we don't need to start a server.
wasm_utils.register_app(self.app)
else:
from gradio import http_server
(
server_name,
server_port,
local_url,
server,
) = http_server.start_server(
app=self.app,
server_name=server_name,
server_port=server_port,
ssl_keyfile=ssl_keyfile,
ssl_certfile=ssl_certfile,
ssl_keyfile_password=ssl_keyfile_password,
)
self.server_name = server_name
self.local_url = local_url
self.local_api_url = f"{self.local_url.rstrip('/')}{API_PREFIX}/"
self.server_port = server_port
self.server = server
self.is_running = True
self.is_colab = utils.colab_check()
self.is_kaggle = utils.kaggle_check()
self.share_server_address = share_server_address
self.share_server_protocol = share_server_protocol or (
"http" if share_server_address is not None else "https"
)
self.share_server_tls_certificate = share_server_tls_certificate
self.has_launched = True
self.protocol = (
"https"
if self.local_url.startswith("https") or self.is_colab
else "http"
)
if not wasm_utils.IS_WASM and not self.is_colab and not quiet:
s = (
strings.en["RUNNING_LOCALLY_SSR"]
if self.ssr_mode
else strings.en["RUNNING_LOCALLY"]
)
print(s.format(self.protocol, self.server_name, self.server_port))
self._queue.set_server_app(self.server_app)
if not wasm_utils.IS_WASM:
# Cannot run async functions in background other than app's scope.
# Workaround by triggering the app endpoint
resp = httpx.get(
f"{self.local_api_url}startup-events",
verify=ssl_verify,
timeout=None,
)
if not resp.is_success:
raise Exception(
f"Couldn’t start the app because '{resp.url}' failed (code {resp.status_code}). Check your network or proxy settings to ensure localhost is accessible."
)
else:
# NOTE: One benefit of the code above dispatching `startup_events()` via a self HTTP request is
# that `self._queue.start()` is called in another thread which is managed by the HTTP server, `uvicorn`
# so all the asyncio tasks created by the queue runs in an event loop in that thread and
# will be cancelled just by stopping the server.
# In contrast, in the Wasm env, we can't do that because `threading` is not supported and all async tasks will run in the same event loop, `pyodide.webloop.WebLoop` in the main thread.
# So we need to manually cancel them. See `self.close()`..
self.run_startup_events()
# In the normal mode, self.run_extra_startup_events() is awaited like https://github.com/gradio-app/gradio/blob/2afcad80abd489111e47cf586a2a8221cc3dc9b6/gradio/routes.py#L1442.
# But in the Wasm env, we need to call the start up events here as described above, so we can't await it as here is not in an async function.
# So we use create_task() instead. This is a best-effort fallback in the Wasm env but it doesn't guarantee that all the tasks are completed before they are needed.
asyncio.create_task(self.run_extra_startup_events())
self.is_sagemaker = (
False # TODO: fix Gradio's behavior in sagemaker and other hosted notebooks
)
if share is None:
if self.is_colab:
if not quiet:
print(
"Running Gradio in a Colab notebook requires sharing enabled. Automatically setting `share=True` (you can turn this off by setting `share=False` in `launch()` explicitly).\n"
)
self.share = True
elif self.is_kaggle:
if not quiet:
print(
"Kaggle notebooks require sharing enabled. Setting `share=True` (you can turn this off by setting `share=False` in `launch()` explicitly).\n"
)
self.share = True
elif self.is_sagemaker:
if not quiet:
print(
"Sagemaker notebooks may require sharing enabled. Setting `share=True` (you can turn this off by setting `share=False` in `launch()` explicitly).\n"
)
self.share = True
else:
self.share = False
# GRADIO_SHARE environment variable for forcing 'share=True'
# GRADIO_SHARE=True => share=True
share_env = os.getenv("GRADIO_SHARE")
if share_env is not None and share_env.lower() == "true":
self.share = True
else:
self.share = share
if enable_monitoring:
print(
f"Monitoring URL: {self.local_url}monitoring/{self.app.analytics_key}"
)
self.enable_monitoring = enable_monitoring in [True, None]
# If running in a colab or not able to access localhost,
# a shareable link must be created.
if (
_frontend
and not wasm_utils.IS_WASM
and not networking.url_ok(self.local_url)
and not self.share
):
raise ValueError(
"When localhost is not accessible, a shareable link must be created. Please set share=True or check your proxy settings to allow access to localhost."
)
if self.is_colab and not quiet:
if debug:
print(strings.en["COLAB_DEBUG_TRUE"])
else:
print(strings.en["COLAB_DEBUG_FALSE"])
if not self.share:
print(strings.en["COLAB_WARNING"].format(self.server_port))
if self.share:
if self.space_id:
warnings.warn(
"Setting share=True is not supported on Hugging Face Spaces"
)
self.share = False
if wasm_utils.IS_WASM:
warnings.warn(
"Setting share=True is not supported in the Wasm environment"
)
self.share = False
if self.share:
try:
if self.share_url is None:
share_url = networking.setup_tunnel(
local_host=self.server_name,
local_port=self.server_port,
share_token=self.share_token,
share_server_address=self.share_server_address,
share_server_tls_certificate=self.share_server_tls_certificate,
)
parsed_url = urlparse(share_url)
self.share_url = urlunparse(
(self.share_server_protocol,) + parsed_url[1:]
)
print(strings.en["SHARE_LINK_DISPLAY"].format(self.share_url))
if not (quiet):
print(strings.en["SHARE_LINK_MESSAGE"])
except Exception as e:
if self.analytics_enabled:
analytics.error_analytics("Not able to set up tunnel")
self.share_url = None
self.share = False
if isinstance(e, ChecksumMismatchError):
print(
strings.en["COULD_NOT_GET_SHARE_LINK_CHECKSUM"].format(
BINARY_PATH
)
)
elif Path(BINARY_PATH).exists():
print(strings.en["COULD_NOT_GET_SHARE_LINK"])
else:
print(
strings.en["COULD_NOT_GET_SHARE_LINK_MISSING_FILE"].format(
BINARY_PATH,
BINARY_URL,
BINARY_FILENAME,
BINARY_FOLDER,
)
)
else:
if not quiet and not wasm_utils.IS_WASM:
print(strings.en["PUBLIC_SHARE_TRUE"])
self.share_url = None
if inbrowser and not wasm_utils.IS_WASM:
link = self.share_url if self.share and self.share_url else self.local_url
webbrowser.open(link)
# Check if running in a Python notebook in which case, display inline
if inline is None:
inline = utils.ipython_check()
if inline:
try:
from IPython.display import HTML, Javascript, display # type: ignore
if self.share and self.share_url:
while not networking.url_ok(self.share_url):
time.sleep(0.25)
artifact = HTML(
f'<div><iframe src="{self.share_url}" width="{self.width}" height="{self.height}" allow="autoplay; camera; microphone; clipboard-read; clipboard-write;" frameborder="0" allowfullscreen></iframe></div>'
)
elif self.is_colab:
# modified from /usr/local/lib/python3.7/dist-packages/google/colab/output/_util.py within Colab environment
code = """(async (port, path, width, height, cache, element) => {
if (!google.colab.kernel.accessAllowed && !cache) {
return;
}
element.appendChild(document.createTextNode(''));
const url = await google.colab.kernel.proxyPort(port, {cache});
const external_link = document.createElement('div');
external_link.innerHTML = `
<div style="font-family: monospace; margin-bottom: 0.5rem">
Running on <a href=${new URL(path, url).toString()} target="_blank">
https://localhost:${port}${path}
</a>
</div>
`;
element.appendChild(external_link);
const iframe = document.createElement('iframe');
iframe.src = new URL(path, url).toString();
iframe.height = height;
iframe.allow = "autoplay; camera; microphone; clipboard-read; clipboard-write;"
iframe.width = width;
iframe.style.border = 0;
element.appendChild(iframe);
})""" + "({port}, {path}, {width}, {height}, {cache}, window.element)".format(
port=json.dumps(self.server_port),
path=json.dumps("/"),
width=json.dumps(self.width),
height=json.dumps(self.height),
cache=json.dumps(False),
)
artifact = Javascript(code)
else:
artifact = HTML(
f'<div><iframe src="{self.local_url}" width="{self.width}" height="{self.height}" allow="autoplay; camera; microphone; clipboard-read; clipboard-write;" frameborder="0" allowfullscreen></iframe></div>'
)
self.artifact = artifact
display(artifact)
except ImportError:
pass
if getattr(self, "analytics_enabled", False):
data = {
"launch_method": "browser" if inbrowser else "inline",
"is_google_colab": self.is_colab,
"is_sharing_on": self.share,
"is_space": self.space_id is not None,
"mode": self.mode,
}
analytics.launched_analytics(self, data)
is_in_interactive_mode = bool(getattr(sys, "ps1", sys.flags.interactive))
# Block main thread if debug==True
if (
debug
or int(os.getenv("GRADIO_DEBUG", "0")) == 1
and not wasm_utils.IS_WASM
or (
# Block main thread if running in a script to stop script from exiting
not prevent_thread_lock
and not is_in_interactive_mode
# In the Wasm env, we don't have to block the main thread because the server won't be shut down after the execution finishes.
# Moreover, we MUST NOT do it because there is only one thread in the Wasm env and blocking it will stop the subsequent code from running.
and not wasm_utils.IS_WASM
)
):
self.block_thread()
return TupleNoPrint((self.server_app, self.local_url, self.share_url)) # type: ignore | Launches a simple web server that serves the demo. Can also be used to create a
public link used by anyone to access the demo from their browser by setting share=True.
Parameters:
inline: whether to display in the gradio app inline in an iframe. Defaults to True in python notebooks; False otherwise.
inbrowser: whether to automatically launch the gradio app in a new tab on the default browser.
share: whether to create a publicly shareable link for the gradio app. Creates an SSH tunnel to make your UI accessible from anywhere. If not provided, it is set to False by default every time, except when running in Google Colab. When localhost is not accessible (e.g. Google Colab), setting share=False is not supported. Can be set by environment variable GRADIO_SHARE=True.
debug: if True, blocks the main thread from running. If running in Google Colab, this is needed to print the errors in the cell output.
auth: If provided, username and password (or list of username-password tuples) required to access app. Can also provide function that takes username and password and returns True if valid login.
auth_message: If provided, HTML message provided on login page.
prevent_thread_lock: By default, the gradio app blocks the main thread while the server is running. If set to True, the gradio app will not block and the gradio server will terminate as soon as the script finishes.
show_error: If True, any errors in the gradio app will be displayed in an alert modal and printed in the browser console log
server_port: will start gradio app on this port (if available). Can be set by environment variable GRADIO_SERVER_PORT. If None, will search for an available port starting at 7860.
server_name: to make app accessible on local network, set this to "0.0.0.0". Can be set by environment variable GRADIO_SERVER_NAME. If None, will use "127.0.0.1".
max_threads: the maximum number of total threads that the Gradio app can generate in parallel. The default is inherited from the starlette library (currently 40).
width: The width in pixels of the iframe element containing the gradio app (used if inline=True)
height: The height in pixels of the iframe element containing the gradio app (used if inline=True)
favicon_path: If a path to a file (.png, .gif, or .ico) is provided, it will be used as the favicon for the web page.
ssl_keyfile: If a path to a file is provided, will use this as the private key file to create a local server running on https.
ssl_certfile: If a path to a file is provided, will use this as the signed certificate for https. Needs to be provided if ssl_keyfile is provided.
ssl_keyfile_password: If a password is provided, will use this with the ssl certificate for https.
ssl_verify: If False, skips certificate validation which allows self-signed certificates to be used.
quiet: If True, suppresses most print statements.
show_api: If True, shows the api docs in the footer of the app. Default True.
allowed_paths: List of complete filepaths or parent directories that gradio is allowed to serve. Must be absolute paths. Warning: if you provide directories, any files in these directories or their subdirectories are accessible to all users of your app. Can be set by comma separated environment variable GRADIO_ALLOWED_PATHS. These files are generally assumed to be secure and will be displayed in the browser when possible.
blocked_paths: List of complete filepaths or parent directories that gradio is not allowed to serve (i.e. users of your app are not allowed to access). Must be absolute paths. Warning: takes precedence over `allowed_paths` and all other directories exposed by Gradio by default. Can be set by comma separated environment variable GRADIO_BLOCKED_PATHS.
root_path: The root path (or "mount point") of the application, if it's not served from the root ("/") of the domain. Often used when the application is behind a reverse proxy that forwards requests to the application. For example, if the application is served at "https://example.com/myapp", the `root_path` should be set to "/myapp". A full URL beginning with http:// or https:// can be provided, which will be used as the root path in its entirety. Can be set by environment variable GRADIO_ROOT_PATH. Defaults to "".
app_kwargs: Additional keyword arguments to pass to the underlying FastAPI app as a dictionary of parameter keys and argument values. For example, `{"docs_url": "/docs"}`
state_session_capacity: The maximum number of sessions whose information to store in memory. If the number of sessions exceeds this number, the oldest sessions will be removed. Reduce capacity to reduce memory usage when using gradio.State or returning updated components from functions. Defaults to 10000.
share_server_address: Use this to specify a custom FRP server and port for sharing Gradio apps (only applies if share=True). If not provided, will use the default FRP server at https://gradio.live. See https://github.com/huggingface/frp for more information.
share_server_protocol: Use this to specify the protocol to use for the share links. Defaults to "https", unless a custom share_server_address is provided, in which case it defaults to "http". If you are using a custom share_server_address and want to use https, you must set this to "https".
share_server_tls_certificate: The path to a TLS certificate file to use when connecting to a custom share server. This parameter is not used with the default FRP server at https://gradio.live. Otherwise, you must provide a valid TLS certificate file (e.g. a "cert.pem") relative to the current working directory, or the connection will not use TLS encryption, which is insecure.
auth_dependency: A function that takes a FastAPI request and returns a string user ID or None. If the function returns None for a specific request, that user is not authorized to access the app (they will see a 401 Unauthorized response). To be used with external authentication systems like OAuth. Cannot be used with `auth`.
max_file_size: The maximum file size in bytes that can be uploaded. Can be a string of the form "<value><unit>", where value is any positive integer and unit is one of "b", "kb", "mb", "gb", "tb". If None, no limit is set.
enable_monitoring: Enables traffic monitoring of the app through the /monitoring endpoint. By default is None, which enables this endpoint. If explicitly True, will also print the monitoring URL to the console. If False, will disable monitoring altogether.
strict_cors: If True, prevents external domains from making requests to a Gradio server running on localhost. If False, allows requests to localhost that originate from localhost but also, crucially, from "null". This parameter should normally be True to prevent CSRF attacks but may need to be False when embedding a *locally-running Gradio app* using web components.
ssr_mode: If True, the Gradio app will be rendered using server-side rendering mode, which is typically more performant and provides better SEO, but this requires Node 20+ to be installed on the system. If False, the app will be rendered using client-side rendering mode. If None, will use GRADIO_SSR_MODE environment variable or default to False.
pwa: If True, the Gradio app will be set up as an installable PWA (Progressive Web App). If set to None (default behavior), then the PWA feature will be enabled if this Gradio app is launched on Spaces, but not otherwise.
Returns:
app: FastAPI app object that is running the demo
local_url: Locally accessible link to the demo
share_url: Publicly accessible link to the demo (if share=True, otherwise None)
Example: (Blocks)
import gradio as gr
def reverse(text):
return text[::-1]
with gr.Blocks() as demo:
button = gr.Button(value="Reverse")
button.click(reverse, gr.Textbox(), gr.Textbox())
demo.launch(share=True, auth=("username", "password"))
Example: (Interface)
import gradio as gr
def reverse(text):
return text[::-1]
demo = gr.Interface(reverse, "text", "text")
demo.launch(share=True, auth=("username", "password")) | launch | python | gradio-app/gradio | gradio/blocks.py | https://github.com/gradio-app/gradio/blob/master/gradio/blocks.py | Apache-2.0 |
def integrate(
self,
comet_ml=None,
wandb: ModuleType | None = None,
mlflow: ModuleType | None = None,
) -> None:
"""
A catch-all method for integrating with other libraries. This method should be run after launch()
Parameters:
comet_ml: If a comet_ml Experiment object is provided, will integrate with the experiment and appear on Comet dashboard
wandb: If the wandb module is provided, will integrate with it and appear on WandB dashboard
mlflow: If the mlflow module is provided, will integrate with the experiment and appear on ML Flow dashboard
"""
analytics_integration = ""
if comet_ml is not None:
analytics_integration = "CometML"
comet_ml.log_other("Created from", "Gradio")
if self.share_url is not None:
comet_ml.log_text(f"gradio: {self.share_url}")
comet_ml.end()
elif self.local_url:
comet_ml.log_text(f"gradio: {self.local_url}")
comet_ml.end()
else:
raise ValueError("Please run `launch()` first.")
if wandb is not None:
analytics_integration = "WandB"
if self.share_url is not None:
wandb.log(
{
"Gradio panel": wandb.Html(
'<iframe src="'
+ self.share_url
+ '" width="'
+ str(self.width)
+ '" height="'
+ str(self.height)
+ '" frameBorder="0"></iframe>'
)
}
)
else:
print(
"The WandB integration requires you to `launch(share=True)` first."
)
if mlflow is not None:
analytics_integration = "MLFlow"
if self.share_url is not None:
mlflow.log_param("Gradio Interface Share Link", self.share_url)
else:
mlflow.log_param("Gradio Interface Local Link", self.local_url)
if self.analytics_enabled and analytics_integration:
data = {"integration": analytics_integration}
analytics.integration_analytics(data) | A catch-all method for integrating with other libraries. This method should be run after launch()
Parameters:
comet_ml: If a comet_ml Experiment object is provided, will integrate with the experiment and appear on Comet dashboard
wandb: If the wandb module is provided, will integrate with it and appear on WandB dashboard
mlflow: If the mlflow module is provided, will integrate with the experiment and appear on ML Flow dashboard | integrate | python | gradio-app/gradio | gradio/blocks.py | https://github.com/gradio-app/gradio/blob/master/gradio/blocks.py | Apache-2.0 |
def close(self, verbose: bool = True) -> None:
"""
Closes the Interface that was launched and frees the port.
"""
try:
if wasm_utils.IS_WASM:
# NOTE:
# Normally, queue-related async tasks whose async tasks are started at the `/queue/data` endpoint function)
# are running in an event loop in the server thread,
# so they will be cancelled by `self.server.close()` below.
# However, in the Wasm env, we don't have the `server` and
# all async tasks are running in the same event loop, `pyodide.webloop.WebLoop` in the main thread,
# so we have to cancel them explicitly so that these tasks won't run after a new app is launched.
self._queue._cancel_asyncio_tasks()
self.server_app._cancel_asyncio_tasks()
self._queue.close()
# set this before closing server to shut down heartbeats
self.is_running = False
self.app.stop_event.set()
if self.server:
self.server.close()
# So that the startup events (starting the queue)
# happen the next time the app is launched
self.app.startup_events_triggered = False
if verbose:
print(f"Closing server running on port: {self.server_port}")
except (AttributeError, OSError): # can't close if not running
pass | Closes the Interface that was launched and frees the port. | close | python | gradio-app/gradio | gradio/blocks.py | https://github.com/gradio-app/gradio/blob/master/gradio/blocks.py | Apache-2.0 |
def block_thread(
self,
) -> None:
"""Block main thread until interrupted by user."""
try:
while True:
time.sleep(0.1)
except (KeyboardInterrupt, OSError):
print("Keyboard interruption in main thread... closing server.")
if self.server:
self.server.close()
for tunnel in CURRENT_TUNNELS:
tunnel.kill() | Block main thread until interrupted by user. | block_thread | python | gradio-app/gradio | gradio/blocks.py | https://github.com/gradio-app/gradio/blob/master/gradio/blocks.py | Apache-2.0 |
def run_startup_events(self):
"""Events that should be run when the app containing this block starts up."""
self._queue.start()
# So that processing can resume in case the queue was stopped
self._queue.stopped = False
self.is_running = True
self.create_limiter() | Events that should be run when the app containing this block starts up. | run_startup_events | python | gradio-app/gradio | gradio/blocks.py | https://github.com/gradio-app/gradio/blob/master/gradio/blocks.py | Apache-2.0 |
def get_api_info(self, all_endpoints: bool = False) -> dict[str, Any] | None:
"""
Gets the information needed to generate the API docs from a Blocks.
Parameters:
all_endpoints: If True, returns information about all endpoints, including those with show_api=False.
"""
config = self.config
api_info = {"named_endpoints": {}, "unnamed_endpoints": {}}
for fn in self.fns.values():
if not fn.fn or fn.api_name is False:
continue
if not all_endpoints and not fn.show_api:
continue
dependency_info = {"parameters": [], "returns": [], "show_api": fn.show_api}
fn_info = utils.get_function_params(fn.fn) # type: ignore
skip_endpoint = False
inputs = fn.inputs
for index, input_block in enumerate(inputs):
for component in config["components"]:
if component["id"] == input_block._id:
break
else:
skip_endpoint = True # if component not found, skip endpoint
break
type = component["props"]["name"]
if self.blocks[component["id"]].skip_api:
continue
label = component["props"].get("label", f"parameter_{input_block._id}")
comp = self.get_component(component["id"])
if not isinstance(comp, components.Component):
raise TypeError(f"{comp!r} is not a Component")
info = component.get("api_info_as_input", component.get("api_info"))
example = comp.example_inputs()
python_type = client_utils.json_schema_to_python_type(info)
# Since the clients use "api_name" and "fn_index" to designate the endpoint and
# "result_callbacks" to specify the callbacks, we need to make sure that no parameters
# have those names. Hence the final checks.
if (
fn.fn
and index < len(fn_info)
and fn_info[index][0]
not in ["api_name", "fn_index", "result_callbacks"]
):
parameter_name = fn_info[index][0]
else:
parameter_name = f"param_{index}"
# How default values are set for the client: if a component has an initial value, then that parameter
# is optional in the client and the initial value from the config is used as default in the client.
# If the component does not have an initial value, but if the corresponding argument in the predict function has
# a default value of None, then that parameter is also optional in the client and the None is used as default in the client.
if component["props"].get("value") is not None:
parameter_has_default = True
parameter_default = component["props"]["value"]
elif (
fn.fn
and index < len(fn_info)
and fn_info[index][1]
and fn_info[index][2] is None
):
parameter_has_default = True
parameter_default = None
else:
parameter_has_default = False
parameter_default = None
dependency_info["parameters"].append(
{
"label": label,
"parameter_name": parameter_name,
"parameter_has_default": parameter_has_default,
"parameter_default": parameter_default,
"type": info,
"python_type": {
"type": python_type,
"description": info.get("additional_description", ""),
},
"component": type.capitalize(),
"example_input": example,
}
)
outputs = fn.outputs
for o in outputs:
for component in config["components"]:
if component["id"] == o._id:
break
else:
skip_endpoint = True # if component not found, skip endpoint
break
type = component["props"]["name"]
if self.blocks[component["id"]].skip_api:
continue
label = component["props"].get("label", f"value_{o._id}")
comp = self.get_component(component["id"])
if not isinstance(comp, components.Component):
raise TypeError(f"{comp!r} is not a Component")
info = component.get("api_info_as_output", component["api_info"])
example = comp.example_inputs()
python_type = client_utils.json_schema_to_python_type(info)
dependency_info["returns"].append(
{
"label": label,
"type": info,
"python_type": {
"type": python_type,
"description": info.get("description", ""),
},
"component": type.capitalize(),
}
)
if not skip_endpoint:
api_info["named_endpoints"][f"/{fn.api_name}"] = dependency_info
return api_info | Gets the information needed to generate the API docs from a Blocks.
Parameters:
all_endpoints: If True, returns information about all endpoints, including those with show_api=False. | get_api_info | python | gradio-app/gradio | gradio/blocks.py | https://github.com/gradio-app/gradio/blob/master/gradio/blocks.py | Apache-2.0 |
def route(self, name: str, path: str | None = None) -> Blocks:
"""
Adds a new page to the Blocks app.
Parameters:
name: The name of the page as it appears in the nav bar.
path: The URL suffix appended after your Gradio app's root URL to access this page (e.g. if path="/test", the page may be accessible e.g. at http://localhost:7860/test). If not provided, the path is generated from the name by converting to lowercase and replacing spaces with hyphens. Any leading or trailing forward slashes are stripped.
Example:
with gr.Blocks() as demo:
name = gr.Textbox(label="Name")
...
with demo.route("Test", "/test"):
num = gr.Number()
...
"""
if get_blocks_context():
raise ValueError(
"You cannot create a route while inside a Blocks() context. Call route() outside the Blocks() context (unindent this line)."
)
if path:
path = path.strip("/")
valid_path_regex = re.compile(r"^[a-zA-Z0-9-._~!$&'()*+,;=:@\[\]]+$")
if not valid_path_regex.match(path):
raise ValueError(
f"Path '{path}' contains invalid characters. Paths can only contain alphanumeric characters and the following special characters: -._~!$&'()*+,;=:@[]"
)
if path in INTERNAL_ROUTES:
raise ValueError(f"Route with path '{path}' already exists")
if path is None:
path = name.lower().replace(" ", "-")
path = "".join(
[letter for letter in path if letter.isalnum() or letter == "-"]
)
while path in INTERNAL_ROUTES or path in [page[0] for page in self.pages]:
path = "_" + path
self.pages.append((path, name))
self.current_page = path
return self | Adds a new page to the Blocks app.
Parameters:
name: The name of the page as it appears in the nav bar.
path: The URL suffix appended after your Gradio app's root URL to access this page (e.g. if path="/test", the page may be accessible e.g. at http://localhost:7860/test). If not provided, the path is generated from the name by converting to lowercase and replacing spaces with hyphens. Any leading or trailing forward slashes are stripped.
Example:
with gr.Blocks() as demo:
name = gr.Textbox(label="Name")
...
with demo.route("Test", "/test"):
num = gr.Number()
... | route | python | gradio-app/gradio | gradio/blocks.py | https://github.com/gradio-app/gradio/blob/master/gradio/blocks.py | Apache-2.0 |
def blocks(line, cell, local_ns):
"""Launch a demo defined in a cell in reload mode."""
args = parse_argstring(blocks, line) # type: ignore
exec(cell, None, local_ns)
demo: gr.Blocks = local_ns[args.demo_name]
if not reloader.demo_tracked():
demo.launch(share=args.share)
reloader.track(demo)
else:
reloader.swap_blocks(demo)
return reloader.running_demo.artifact | Launch a demo defined in a cell in reload mode. | load_ipython_extension.blocks | python | gradio-app/gradio | gradio/ipython_ext.py | https://github.com/gradio-app/gradio/blob/master/gradio/ipython_ext.py | Apache-2.0 |
def load_ipython_extension(ipython):
reloader = JupyterReloader(ipython)
@magic_arguments() # type: ignore
@argument("--demo-name", default="demo", help="Name of gradio blocks instance.") # type: ignore
@argument( # type: ignore
"--share",
default=False,
const=True,
nargs="?",
help="Whether to launch with sharing. Will slow down reloading.",
)
@register_cell_magic # type: ignore
@needs_local_scope # type: ignore
def blocks(line, cell, local_ns):
"""Launch a demo defined in a cell in reload mode."""
args = parse_argstring(blocks, line) # type: ignore
exec(cell, None, local_ns)
demo: gr.Blocks = local_ns[args.demo_name]
if not reloader.demo_tracked():
demo.launch(share=args.share)
reloader.track(demo)
else:
reloader.swap_blocks(demo)
return reloader.running_demo.artifact | Launch a demo defined in a cell in reload mode. | load_ipython_extension | python | gradio-app/gradio | gradio/ipython_ext.py | https://github.com/gradio-app/gradio/blob/master/gradio/ipython_ext.py | Apache-2.0 |
def safe_get_lock() -> asyncio.Lock:
"""Get asyncio.Lock() without fear of getting an Exception.
Needed because in reload mode we import the Blocks object outside
the main thread.
"""
try:
asyncio.get_event_loop()
return asyncio.Lock()
except RuntimeError:
return None # type: ignore | Get asyncio.Lock() without fear of getting an Exception.
Needed because in reload mode we import the Blocks object outside
the main thread. | safe_get_lock | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def _is_if_name_main(expr: ast.AST) -> bool:
"""Find the if __name__ == '__main__': block."""
return (
isinstance(expr, ast.If)
and isinstance(expr.test, ast.Compare)
and isinstance(expr.test.left, ast.Name)
and expr.test.left.id == "__name__"
and len(expr.test.ops) == 1
and isinstance(expr.test.ops[0], ast.Eq)
and isinstance(expr.test.comparators[0], ast.Constant)
and expr.test.comparators[0].s == "__main__"
) | Find the if __name__ == '__main__': block. | _remove_if_name_main_codeblock._is_if_name_main | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def _remove_if_name_main_codeblock(file_path: str):
"""Parse the file, remove the gr.no_reload code blocks, and write the file back to disk.
Parameters:
file_path (str): The path to the file to remove the no_reload code blocks from.
"""
with open(file_path, encoding="utf-8") as file:
code = file.read()
tree = ast.parse(code)
def _is_if_name_main(expr: ast.AST) -> bool:
"""Find the if __name__ == '__main__': block."""
return (
isinstance(expr, ast.If)
and isinstance(expr.test, ast.Compare)
and isinstance(expr.test.left, ast.Name)
and expr.test.left.id == "__name__"
and len(expr.test.ops) == 1
and isinstance(expr.test.ops[0], ast.Eq)
and isinstance(expr.test.comparators[0], ast.Constant)
and expr.test.comparators[0].s == "__main__"
)
# Find the positions of the code blocks to load
for node in ast.walk(tree):
if _is_if_name_main(node):
assert isinstance(node, ast.If) # noqa: S101
node.body = [ast.Pass(lineno=node.lineno, col_offset=node.col_offset)]
# convert tree to string
code_removed = compile(tree, filename=file_path, mode="exec")
return code_removed | Parse the file, remove the gr.no_reload code blocks, and write the file back to disk.
Parameters:
file_path (str): The path to the file to remove the no_reload code blocks from. | _remove_if_name_main_codeblock | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def watchfn(reloader: SourceFileReloader):
"""Watch python files in a given module.
get_changes is taken from uvicorn's default file watcher.
"""
NO_RELOAD.set(False)
# The thread running watchfn will be the thread reloading
# the app. So we need to modify this thread_data attr here
# so that subsequent calls to reload don't launch the app
from gradio.cli.commands.reload import reload_thread
reload_thread.running_reload = True
def get_changes() -> Path | None:
for file in iter_py_files():
try:
mtime = file.stat().st_mtime
except OSError: # pragma: nocover
continue
old_time = mtimes.get(file)
if old_time is None:
mtimes[file] = mtime
continue
elif mtime > old_time:
return file
return None
def iter_py_files() -> Iterator[Path]:
for reload_dir in reload_dirs:
for path in list(reload_dir.rglob("*.py")):
yield path.resolve()
for path in list(reload_dir.rglob("*.css")):
yield path.resolve()
reload_dirs = [Path(dir_) for dir_ in reloader.watch_dirs]
import sys
for dir_ in reload_dirs:
sys.path.insert(0, str(dir_))
mtimes = {}
# Need to import the module in this thread so that the
# module is available in the namespace of this thread
module = reloader.watch_module
no_reload_source_code = _remove_if_name_main_codeblock(str(reloader.demo_file))
exec(no_reload_source_code, module.__dict__)
sys.modules[reloader.watch_module_name] = module
while reloader.should_watch():
changed = get_changes()
if changed:
print(f"Changes detected in: {changed}")
try:
# How source file reloading works
# 1. Remove the gr.no_reload code blocks from the temp file
# 2. Execute the changed source code in the original module's namespac
# 3. Delete the package the module is in from sys.modules.
# This is so that the updated module is available in the entire package
# 4. Do 1-2 for the main demo file even if it did not change.
# This is because the main demo file may import the changed file and we need the
# changes to be reflected in the main demo file.
if changed.suffix == ".py":
changed_in_copy = _remove_if_name_main_codeblock(str(changed))
if changed != reloader.demo_file:
changed_module = _find_module(changed)
if changed_module:
exec(changed_in_copy, changed_module.__dict__)
top_level_parent = sys.modules[
changed_module.__name__.split(".")[0]
]
if top_level_parent != changed_module:
importlib.reload(top_level_parent)
changed_demo_file = _remove_if_name_main_codeblock(
str(reloader.demo_file)
)
exec(changed_demo_file, module.__dict__)
except Exception as error:
print(
f"Reloading {reloader.watch_module_name} failed with the following exception: "
)
if not isinstance(error, Error) or error.print_exception:
traceback.print_exc()
mtimes = {}
reloader.alert_change("error")
reloader.app.reload_error_message = traceback.format_exc()
continue
demo = getattr(module, reloader.demo_name)
reloader.swap_blocks(demo)
mtimes = {}
time.sleep(0.05) | Watch python files in a given module.
get_changes is taken from uvicorn's default file watcher. | watchfn | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def deep_equal(a: Any, b: Any) -> bool:
"""
Deep equality check for component values.
Prefer orjson for performance and compatibility with numpy arrays/dataframes/torch tensors.
If objects are not serializable by orjson, fall back to regular equality check.
"""
def _serialize(a: Any) -> bytes:
return orjson.dumps(
a,
option=orjson.OPT_SERIALIZE_NUMPY | orjson.OPT_PASSTHROUGH_DATETIME,
)
try:
return _serialize(a) == _serialize(b)
except TypeError:
try:
return a == b
except Exception:
return False | Deep equality check for component values.
Prefer orjson for performance and compatibility with numpy arrays/dataframes/torch tensors.
If objects are not serializable by orjson, fall back to regular equality check. | deep_equal | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def colab_check() -> bool:
"""
Check if interface is launching from Google Colab
:return is_colab (bool): True or False
"""
is_colab = False
try: # Check if running interactively using ipython.
from IPython.core.getipython import get_ipython
from_ipynb = get_ipython()
if "google.colab" in str(from_ipynb):
is_colab = True
except (ImportError, NameError):
pass
return is_colab | Check if interface is launching from Google Colab
:return is_colab (bool): True or False | colab_check | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def ipython_check() -> bool:
"""
Check if interface is launching from iPython (not colab)
:return is_ipython (bool): True or False
"""
is_ipython = False
try: # Check if running interactively using ipython.
from IPython.core.getipython import get_ipython
if get_ipython() is not None:
is_ipython = True
except (ImportError, NameError):
pass
return is_ipython | Check if interface is launching from iPython (not colab)
:return is_ipython (bool): True or False | ipython_check | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def assert_configs_are_equivalent_besides_ids(
config1: BlocksConfigDict, config2: BlocksConfigDict, root_keys: tuple = ("mode",)
):
"""Allows you to test if two different Blocks configs produce the same demo.
Parameters:
config1 (dict): nested dict with config from the first Blocks instance
config2 (dict): nested dict with config from the second Blocks instance
root_keys (Tuple): an interable consisting of which keys to test for equivalence at
the root level of the config. By default, only "mode" is tested,
so keys like "version" are ignored.
"""
config1 = copy.deepcopy(config1)
config2 = copy.deepcopy(config2)
config1 = json.loads(json.dumps(config1)) # convert tuples to lists
config2 = json.loads(json.dumps(config2))
for key in root_keys:
if config1[key] != config2[key]:
raise ValueError(f"Configs have different: {key}")
if len(config1["components"]) != len(config2["components"]):
raise ValueError("# of components are different")
def assert_same_components(config1_id, config2_id):
c1 = list(filter(lambda c: c["id"] == config1_id, config1["components"]))
if len(c1) == 0:
raise ValueError(f"Could not find component with id {config1_id}")
c1 = c1[0]
c2 = list(filter(lambda c: c["id"] == config2_id, config2["components"]))
if len(c2) == 0:
raise ValueError(f"Could not find component with id {config2_id}")
c2 = c2[0]
c1 = copy.deepcopy(c1)
c1.pop("id")
c2 = copy.deepcopy(c2)
c2.pop("id")
if c1 != c2:
raise ValueError(f"{c1} does not match {c2}")
def same_children_recursive(children1, chidren2):
for child1, child2 in zip(children1, chidren2, strict=False):
assert_same_components(child1["id"], child2["id"])
if "children" in child1 or "children" in child2:
same_children_recursive(child1["children"], child2["children"])
if "layout" in config1:
if "layout" not in config2:
raise ValueError(
"The first config has a layout key, but the second does not"
)
children1 = config1["layout"].get("children", [])
children2 = config2["layout"].get("children", [])
same_children_recursive(children1, children2)
if "dependencies" in config1:
if "dependencies" not in config2:
raise ValueError(
"The first config has a dependencies key, but the second does not"
)
for d1, d2 in zip(
config1["dependencies"], config2["dependencies"], strict=False
):
for t1, t2 in zip(d1.pop("targets"), d2.pop("targets"), strict=False):
assert_same_components(t1[0], t2[0])
for i1, i2 in zip(d1.pop("inputs"), d2.pop("inputs"), strict=False):
assert_same_components(i1, i2)
for o1, o2 in zip(d1.pop("outputs"), d2.pop("outputs"), strict=False):
assert_same_components(o1, o2)
if d1 != d2:
raise ValueError(f"{d1} does not match {d2}")
return True | Allows you to test if two different Blocks configs produce the same demo.
Parameters:
config1 (dict): nested dict with config from the first Blocks instance
config2 (dict): nested dict with config from the second Blocks instance
root_keys (Tuple): an interable consisting of which keys to test for equivalence at
the root level of the config. By default, only "mode" is tested,
so keys like "version" are ignored. | assert_configs_are_equivalent_besides_ids | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def delete_none(_dict: dict, skip_value: bool = False) -> dict:
"""
Delete keys whose values are None from a dictionary
"""
for key, value in list(_dict.items()):
if skip_value and key == "value":
continue
elif value is None:
del _dict[key]
return _dict | Delete keys whose values are None from a dictionary | delete_none | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def component_or_layout_class(cls_name: str) -> type[Component] | type[BlockContext]:
"""
Returns the component, template, or layout class with the given class name, or
raises a ValueError if not found.
Parameters:
cls_name (str): lower-case string class name of a component
Returns:
cls: the component class
"""
import gradio.components as components_module
from gradio.components import Component
components = {c.__name__.lower(): c for c in get_all_components()}
# add aliases such as 'text'
for name, cls in components_module.__dict__.items():
if isinstance(cls, type) and issubclass(cls, Component):
components[name.lower()] = cls
if cls_name.replace("_", "") in components:
return components[cls_name.replace("_", "")]
raise ValueError(
f"No such component or layout: {cls_name}. "
"It is possible it is a custom component, "
"in which case make sure it is installed and imported in your python session."
) | Returns the component, template, or layout class with the given class name, or
raises a ValueError if not found.
Parameters:
cls_name (str): lower-case string class name of a component
Returns:
cls: the component class | component_or_layout_class | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def run_coro_in_background(func: Callable, *args, **kwargs):
"""
Runs coroutines in background.
Warning, be careful to not use this function in other than FastAPI scope, because the event_loop has not started yet.
You can use it in any scope reached by FastAPI app.
correct scope examples: endpoints in routes, Blocks.process_api
incorrect scope examples: Blocks.launch
Use startup_events in routes.py if you need to run a coro in background in Blocks.launch().
Example:
utils.run_coro_in_background(fn, *args, **kwargs)
Args:
func:
*args:
**kwargs:
Returns:
"""
event_loop = asyncio.get_event_loop()
return event_loop.create_task(func(*args, **kwargs)) | Runs coroutines in background.
Warning, be careful to not use this function in other than FastAPI scope, because the event_loop has not started yet.
You can use it in any scope reached by FastAPI app.
correct scope examples: endpoints in routes, Blocks.process_api
incorrect scope examples: Blocks.launch
Use startup_events in routes.py if you need to run a coro in background in Blocks.launch().
Example:
utils.run_coro_in_background(fn, *args, **kwargs)
Args:
func:
*args:
**kwargs:
Returns: | run_coro_in_background | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def run_sync_iterator_async(iterator):
"""Helper for yielding StopAsyncIteration from sync iterators."""
try:
return next(iterator)
except StopIteration:
# raise a ValueError here because co-routines can't raise StopIteration themselves
raise StopAsyncIteration() from None | Helper for yielding StopAsyncIteration from sync iterators. | run_sync_iterator_async | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def set_directory(path: Path | str):
"""Context manager that sets the working directory to the given path."""
origin = Path().absolute()
try:
os.chdir(path)
yield
finally:
os.chdir(origin) | Context manager that sets the working directory to the given path. | set_directory | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def no_raise_exception():
"""Context manager that suppresses exceptions."""
try:
yield
except Exception:
pass | Context manager that suppresses exceptions. | no_raise_exception | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def sanitize_value_for_csv(value: str | float) -> str | float:
"""
Sanitizes a value that is being written to a CSV file to prevent CSV injection attacks.
Reference: https://owasp.org/www-community/attacks/CSV_Injection
"""
if isinstance(value, (float, int)):
return value
unsafe_prefixes = ["=", "+", "-", "@", "\t", "\n"]
unsafe_sequences = [",=", ",+", ",-", ",@", ",\t", ",\n"]
if any(value.startswith(prefix) for prefix in unsafe_prefixes) or any(
sequence in value for sequence in unsafe_sequences
):
value = f"'{value}"
return value | Sanitizes a value that is being written to a CSV file to prevent CSV injection attacks.
Reference: https://owasp.org/www-community/attacks/CSV_Injection | sanitize_value_for_csv | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def sanitize_list_for_csv(values: list[Any]) -> list[Any]:
"""
Sanitizes a list of values (or a list of list of values) that is being written to a
CSV file to prevent CSV injection attacks.
"""
sanitized_values = []
for value in values:
if isinstance(value, list):
sanitized_value = [sanitize_value_for_csv(v) for v in value]
sanitized_values.append(sanitized_value)
else:
sanitized_value = sanitize_value_for_csv(value)
sanitized_values.append(sanitized_value)
return sanitized_values | Sanitizes a list of values (or a list of list of values) that is being written to a
CSV file to prevent CSV injection attacks. | sanitize_list_for_csv | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def append_unique_suffix(name: str, list_of_names: list[str]):
"""Appends a numerical suffix to `name` so that it does not appear in `list_of_names`."""
set_of_names: set[str] = set(list_of_names) # for O(1) lookup
if name not in set_of_names:
return name
else:
suffix_counter = 1
new_name = f"{name}_{suffix_counter}"
while new_name in set_of_names:
suffix_counter += 1
new_name = f"{name}_{suffix_counter}"
return new_name | Appends a numerical suffix to `name` so that it does not appear in `list_of_names`. | append_unique_suffix | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def is_special_typed_parameter(name, parameter_types):
from gradio.helpers import EventData
from gradio.oauth import OAuthProfile, OAuthToken
from gradio.routes import Request
"""Checks if parameter has a type hint designating it as a gr.Request, gr.EventData, gr.OAuthProfile or gr.OAuthToken."""
hint = parameter_types.get(name)
if not hint:
return False
is_request = hint in (Request, Optional[Request])
is_oauth_arg = hint in (
OAuthProfile,
Optional[OAuthProfile],
OAuthToken,
Optional[OAuthToken],
)
is_event_data = inspect.isclass(hint) and issubclass(hint, EventData)
return is_request or is_event_data or is_oauth_arg | Checks if parameter has a type hint designating it as a gr.Request, gr.EventData, gr.OAuthProfile or gr.OAuthToken. | is_special_typed_parameter | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def check_function_inputs_match(fn: Callable, inputs: Sequence, inputs_as_dict: bool):
"""
Checks if the input component set matches the function
Returns: None if valid or if the function does not have a signature (e.g. is a built in),
or a string error message if mismatch
"""
try:
signature = inspect.signature(fn)
except ValueError:
return None
parameter_types = get_type_hints(fn)
min_args = 0
max_args = 0
infinity = -1
for name, param in signature.parameters.items():
has_default = param.default != param.empty
if param.kind in [param.POSITIONAL_ONLY, param.POSITIONAL_OR_KEYWORD]:
if not is_special_typed_parameter(name, parameter_types):
if not has_default:
min_args += 1
max_args += 1
elif param.kind == param.VAR_POSITIONAL:
max_args = infinity
elif param.kind == param.KEYWORD_ONLY and not has_default:
return f"Keyword-only args must have default values for function {fn}"
arg_count = 1 if inputs_as_dict else len(inputs)
if min_args == max_args and max_args != arg_count:
warnings.warn(
f"Expected {max_args} arguments for function {fn}, received {arg_count}."
)
if arg_count < min_args:
warnings.warn(
f"Expected at least {min_args} arguments for function {fn}, received {arg_count}."
)
if max_args != infinity and arg_count > max_args:
warnings.warn(
f"Expected maximum {max_args} arguments for function {fn}, received {arg_count}."
) | Checks if the input component set matches the function
Returns: None if valid or if the function does not have a signature (e.g. is a built in),
or a string error message if mismatch | check_function_inputs_match | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def is_in_or_equal(path_1: str | Path, path_2: str | Path) -> bool:
"""
True if path_1 is a descendant (i.e. located within) path_2 or if the paths are the
same, returns False otherwise.
Parameters:
path_1: str or Path (to file or directory)
path_2: str or Path (to file or directory)
"""
path_1, path_2 = abspath(path_1).resolve(), abspath(path_2).resolve()
try:
path_1.relative_to(path_2)
return True
except ValueError:
return False | True if path_1 is a descendant (i.e. located within) path_2 or if the paths are the
same, returns False otherwise.
Parameters:
path_1: str or Path (to file or directory)
path_2: str or Path (to file or directory) | is_in_or_equal | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def set_static_paths(paths: str | Path | list[str | Path]) -> None:
"""
Set the static paths to be served by the gradio app.
Static files are are served directly from the file system instead of being copied. They are served to users with The Content-Disposition HTTP header set to "inline"
when sending these files to users. This indicates that the file should be displayed directly in the browser window if possible.
This function is useful when you want to serve files that you know will not be modified during the lifetime of the gradio app (like files used in gr.Examples).
By setting static paths, your app will launch faster and it will consume less disk space.
Calling this function will set the static paths for all gradio applications defined in the same interpreter session until it is called again or the session ends.
Parameters:
paths: filepath or list of filepaths or directory names to be served by the gradio app. If it is a directory name, ALL files located within that directory will be considered static and not moved to the gradio cache. This also means that ALL files in that directory will be accessible over the network.
Example:
import gradio as gr
# Paths can be a list of strings or pathlib.Path objects
# corresponding to filenames or directories.
gr.set_static_paths(paths=["test/test_files/"])
# The example files and the default value of the input
# will not be copied to the gradio cache and will be served directly.
demo = gr.Interface(
lambda s: s.rotate(45),
gr.Image(value="test/test_files/cheetah1.jpg", type="pil"),
gr.Image(),
examples=["test/test_files/bus.png"],
)
demo.launch()
"""
from gradio.data_classes import _StaticFiles
if isinstance(paths, (str, Path)):
paths = [Path(paths)]
_StaticFiles.all_paths.extend([Path(p).resolve() for p in paths]) | Set the static paths to be served by the gradio app.
Static files are are served directly from the file system instead of being copied. They are served to users with The Content-Disposition HTTP header set to "inline"
when sending these files to users. This indicates that the file should be displayed directly in the browser window if possible.
This function is useful when you want to serve files that you know will not be modified during the lifetime of the gradio app (like files used in gr.Examples).
By setting static paths, your app will launch faster and it will consume less disk space.
Calling this function will set the static paths for all gradio applications defined in the same interpreter session until it is called again or the session ends.
Parameters:
paths: filepath or list of filepaths or directory names to be served by the gradio app. If it is a directory name, ALL files located within that directory will be considered static and not moved to the gradio cache. This also means that ALL files in that directory will be accessible over the network.
Example:
import gradio as gr
# Paths can be a list of strings or pathlib.Path objects
# corresponding to filenames or directories.
gr.set_static_paths(paths=["test/test_files/"])
# The example files and the default value of the input
# will not be copied to the gradio cache and will be served directly.
demo = gr.Interface(
lambda s: s.rotate(45),
gr.Image(value="test/test_files/cheetah1.jpg", type="pil"),
gr.Image(),
examples=["test/test_files/bus.png"],
)
demo.launch() | set_static_paths | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def is_static_file(file_path: Any):
"""Returns True if the file is a static file (and not moved to cache)"""
from gradio.data_classes import _StaticFiles
return _is_static_file(file_path, _StaticFiles.all_paths) | Returns True if the file is a static file (and not moved to cache) | is_static_file | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def _is_static_file(file_path: Any, static_files: list[Path]) -> bool:
"""
Returns True if the file is a static file (i.e. is is in the static files list).
"""
if not isinstance(file_path, (str, Path, FileData)):
return False
if isinstance(file_path, FileData):
file_path = file_path.path
if isinstance(file_path, str):
file_path = Path(file_path)
if not file_path.exists():
return False
return any(is_in_or_equal(file_path, static_file) for static_file in static_files) | Returns True if the file is a static file (i.e. is is in the static files list). | _is_static_file | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def find_user_stack_level() -> int:
"""
Find the first stack frame not inside Gradio.
"""
frame = inspect.currentframe()
n = 0
while frame:
fname = inspect.getfile(frame)
if "/gradio/" not in fname.replace(os.sep, "/"):
break
frame = frame.f_back
n += 1
return n | Find the first stack frame not inside Gradio. | find_user_stack_level | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def default_input_labels():
"""
A generator that provides default input labels for components when the user's function
does not have named parameters. The labels are of the form "input 0", "input 1", etc.
"""
n = 0
while True:
yield f"input {n}"
n += 1 | A generator that provides default input labels for components when the user's function
does not have named parameters. The labels are of the form "input 0", "input 1", etc. | default_input_labels | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def get_extension_from_file_path_or_url(file_path_or_url: str) -> str:
"""
Returns the file extension (without the dot) from a file path or URL. If the file path or URL does not have a file extension, returns an empty string.
For example, "https://example.com/avatar/xxxx.mp4?se=2023-11-16T06:51:23Z&sp=r" would return "mp4".
"""
parsed_url = urllib.parse.urlparse(file_path_or_url)
file_extension = os.path.splitext(os.path.basename(parsed_url.path))[1]
return file_extension[1:] if file_extension else "" | Returns the file extension (without the dot) from a file path or URL. If the file path or URL does not have a file extension, returns an empty string.
For example, "https://example.com/avatar/xxxx.mp4?se=2023-11-16T06:51:23Z&sp=r" would return "mp4". | get_extension_from_file_path_or_url | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def get_function_params(func: Callable) -> list[tuple[str, bool, Any, Any]]:
"""
Gets the parameters of a function as a list of tuples of the form (name, has_default, default_value, type_hint).
Excludes *args and **kwargs, as well as args that are Gradio-specific, such as gr.Request, gr.EventData, gr.OAuthProfile, and gr.OAuthToken.
"""
params_info = []
signature = inspect.signature(func)
type_hints = get_type_hints(func)
for name, parameter in signature.parameters.items():
if parameter.kind in (
inspect.Parameter.VAR_POSITIONAL,
inspect.Parameter.VAR_KEYWORD,
):
break
if is_special_typed_parameter(name, type_hints):
continue
if parameter.default is inspect.Parameter.empty:
params_info.append((name, False, None, type_hints.get(name, None)))
else:
params_info.append(
(name, True, parameter.default, type_hints.get(name, None))
)
return params_info | Gets the parameters of a function as a list of tuples of the form (name, has_default, default_value, type_hint).
Excludes *args and **kwargs, as well as args that are Gradio-specific, such as gr.Request, gr.EventData, gr.OAuthProfile, and gr.OAuthToken. | get_function_params | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def simplify_file_data_in_str(s):
"""
If a FileData dictionary has been dumped as part of a string, this function will replace the dict with just the str filepath
"""
try:
payload = json.loads(s)
except json.JSONDecodeError:
return s
payload = client_utils.traverse(
payload, lambda x: x["path"], client_utils.is_file_obj_with_meta
)
if isinstance(payload, str):
return payload
return json.dumps(payload) | If a FileData dictionary has been dumped as part of a string, this function will replace the dict with just the str filepath | simplify_file_data_in_str | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def async_lambda(f: Callable) -> Callable:
"""Turn a function into an async function.
Useful for internal event handlers defined as lambda functions used in the codebase
"""
@wraps(f)
async def function_wrapper(*args, **kwargs):
return f(*args, **kwargs)
return function_wrapper | Turn a function into an async function.
Useful for internal event handlers defined as lambda functions used in the codebase | async_lambda | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def connect_heartbeat(config: BlocksConfigDict, blocks) -> bool:
"""
Determines whether a heartbeat is required for a given config.
"""
from gradio.components import State
any_state = any(isinstance(block, State) for block in blocks)
any_unload = False
any_stream = False
if "dependencies" not in config:
raise ValueError(
"Dependencies not found in config. Cannot determine whether"
"heartbeat is required."
)
for dep in config["dependencies"]:
for target in dep["targets"]:
if isinstance(target, (list, tuple)) and len(target) == 2:
any_unload = target[1] == "unload"
if any_unload:
break
any_stream = target[1] == "stream"
if any_stream:
break
return any_state or any_unload or any_stream | Determines whether a heartbeat is required for a given config. | connect_heartbeat | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def deep_hash(obj):
"""Compute a hash for a deeply nested data structure."""
hasher = hashlib.sha256()
if isinstance(obj, (int, float, str, bytes)):
items = obj
elif isinstance(obj, dict):
items = tuple(
[
(k, deep_hash(v))
for k, v in sorted(obj.items(), key=lambda x: hash(x[0]))
]
)
elif isinstance(obj, (list, tuple)):
items = tuple(deep_hash(x) for x in obj)
elif isinstance(obj, set):
items = tuple(deep_hash(x) for x in sorted(obj, key=hash))
elif isinstance(obj, Hashable):
items = str(hash(obj)).encode("utf-8")
else:
items = str(id(obj)).encode("utf-8")
hasher.update(repr(items).encode("utf-8"))
return hasher.hexdigest() | Compute a hash for a deeply nested data structure. | deep_hash | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def safe_join(directory: DeveloperPath, path: UserProvidedPath) -> str:
"""Safely path to a base directory to avoid escaping the base directory.
Borrowed from: werkzeug.security.safe_join"""
_os_alt_seps: list[str] = [
sep for sep in [os.path.sep, os.path.altsep] if sep is not None and sep != "/"
]
filename = posixpath.normpath(path)
fullpath = os.path.join(directory, filename)
if (
any(sep in filename for sep in _os_alt_seps)
or os.path.isabs(filename)
or filename == ".."
or filename.startswith("../")
):
raise InvalidPathError()
return fullpath | Safely path to a base directory to avoid escaping the base directory.
Borrowed from: werkzeug.security.safe_join | safe_join | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def get_icon_path(icon_name: str) -> str:
"""Get the path to an icon file in the "gradio/icons/" directory
and return it as a static file path so that it can be used by components.
Parameters:
icon_name: Name of the icon file (e.g. "plus.svg")
Returns:
str: Full path to the icon file served as a static file
"""
icon_path = str(
importlib.resources.files("gradio").joinpath(str(Path("icons") / icon_name))
)
if Path(icon_path).exists():
set_static_paths(icon_path)
return icon_path
raise ValueError(f"Icon file not found: {icon_name}") | Get the path to an icon file in the "gradio/icons/" directory
and return it as a static file path so that it can be used by components.
Parameters:
icon_name: Name of the icon file (e.g. "plus.svg")
Returns:
str: Full path to the icon file served as a static file | get_icon_path | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def dict_factory(items):
"""
A utility function to convert a dataclass that includes pydantic fields to a dictionary.
"""
d = {}
for key, value in items:
if hasattr(value, "model_dump"):
d[key] = value.model_dump()
else:
d[key] = value
return d | A utility function to convert a dataclass that includes pydantic fields to a dictionary. | dict_factory | python | gradio-app/gradio | gradio/utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/utils.py | Apache-2.0 |
def reset(self, blocks: Blocks):
"""Reset the state holder with new blocks. Used during reload mode."""
self.session_data = OrderedDict()
# Call set blocks again to set new ids
self.set_blocks(blocks) | Reset the state holder with new blocks. Used during reload mode. | reset | python | gradio-app/gradio | gradio/state_holder.py | https://github.com/gradio-app/gradio/blob/master/gradio/state_holder.py | Apache-2.0 |
def format_image(
im: PIL.Image.Image | None,
type: Literal["numpy", "pil", "filepath"],
cache_dir: str,
name: str = "image",
format: str = "webp",
) -> np.ndarray | PIL.Image.Image | str | None:
"""Helper method to format an image based on self.type"""
if im is None:
return im
if type == "pil":
return im
elif type == "numpy":
return np.array(im)
elif type == "filepath":
try:
path = processing_utils.save_pil_to_cache(
im, cache_dir=cache_dir, name=name, format=format
)
# Catch error if format is not supported by PIL
except (KeyError, ValueError):
path = processing_utils.save_pil_to_cache(
im,
cache_dir=cache_dir,
name=name,
format="png", # type: ignore
)
return path
else:
raise ValueError(
"Unknown type: "
+ str(type)
+ ". Please choose from: 'numpy', 'pil', 'filepath'."
) | Helper method to format an image based on self.type | format_image | python | gradio-app/gradio | gradio/image_utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/image_utils.py | Apache-2.0 |
def extract_svg_content(image_file: str | Path) -> str:
"""
Provided a path or URL to an SVG file, return the SVG content as a string.
Parameters:
image_file: Local file path or URL to an SVG file
Returns:
str: The SVG content as a string
"""
image_file = str(image_file)
if is_http_url_like(image_file):
response = httpx.get(image_file)
response.raise_for_status() # Raise an error for bad status codes
return response.text
else:
with open(image_file) as file:
svg_content = file.read()
return svg_content | Provided a path or URL to an SVG file, return the SVG content as a string.
Parameters:
image_file: Local file path or URL to an SVG file
Returns:
str: The SVG content as a string | extract_svg_content | python | gradio-app/gradio | gradio/image_utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/image_utils.py | Apache-2.0 |
def create_examples(
examples: list[Any] | list[list[Any]] | str,
inputs: Component | Sequence[Component],
outputs: Component | Sequence[Component] | None = None,
fn: Callable | None = None,
cache_examples: bool | None = None,
cache_mode: Literal["eager", "lazy"] | None = None,
examples_per_page: int = 10,
_api_mode: bool = False,
label: str | None = None,
elem_id: str | None = None,
run_on_click: bool = False,
preprocess: bool = True,
postprocess: bool = True,
api_name: str | Literal[False] = "load_example",
batch: bool = False,
*,
example_labels: list[str] | None = None,
visible: bool = True,
):
"""Top-level synchronous function that creates Examples. Provided for backwards compatibility, i.e. so that gr.Examples(...) can be used to create the Examples component."""
examples_obj = Examples(
examples=examples,
inputs=inputs,
outputs=outputs,
fn=fn,
cache_examples=cache_examples,
cache_mode=cache_mode,
examples_per_page=examples_per_page,
_api_mode=_api_mode,
label=label,
elem_id=elem_id,
run_on_click=run_on_click,
preprocess=preprocess,
postprocess=postprocess,
api_name=api_name,
batch=batch,
example_labels=example_labels,
visible=visible,
_initiated_directly=False,
)
examples_obj.create()
return examples_obj | Top-level synchronous function that creates Examples. Provided for backwards compatibility, i.e. so that gr.Examples(...) can be used to create the Examples component. | create_examples | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def __init__(
self,
examples: list[Any] | list[list[Any]] | str,
inputs: Component | Sequence[Component],
outputs: Component | Sequence[Component] | None = None,
fn: Callable | None = None,
cache_examples: bool | None = None,
cache_mode: Literal["eager", "lazy"] | None = None,
examples_per_page: int = 10,
_api_mode: bool = False,
label: str | None = "Examples",
elem_id: str | None = None,
run_on_click: bool = False,
preprocess: bool = True,
postprocess: bool = True,
api_name: str | Literal[False] = "load_example",
batch: bool = False,
*,
example_labels: list[str] | None = None,
visible: bool = True,
_initiated_directly: bool = True,
):
"""
Parameters:
examples: example inputs that can be clicked to populate specific components. Should be nested list, in which the outer list consists of samples and each inner list consists of an input corresponding to each input component. A string path to a directory of examples can also be provided but it should be within the directory with the python file running the gradio app. If there are multiple input components and a directory is provided, a log.csv file must be present in the directory to link corresponding inputs.
inputs: the component or list of components corresponding to the examples
outputs: optionally, provide the component or list of components corresponding to the output of the examples. Required if `cache_examples` is not False.
fn: optionally, provide the function to run to generate the outputs corresponding to the examples. Required if `cache_examples` is not False. Also required if `run_on_click` is True.
cache_examples: If True, caches examples in the server for fast runtime in examples. If "lazy", then examples are cached (for all users of the app) after their first use (by any user of the app). If None, will use the GRADIO_CACHE_EXAMPLES environment variable, which should be either "true" or "false". In HuggingFace Spaces, this parameter is True (as long as `fn` and `outputs` are also provided). The default option otherwise is False.
cache_mode: if "lazy", examples are cached after their first use. If "eager", all examples are cached at app launch. If None, will use the GRADIO_CACHE_MODE environment variable if defined, or default to "eager".
examples_per_page: how many examples to show per page.
label: the label to use for the examples component (by default, "Examples")
elem_id: an optional string that is assigned as the id of this component in the HTML DOM.
run_on_click: if cache_examples is False, clicking on an example does not run the function when an example is clicked. Set this to True to run the function when an example is clicked. Has no effect if cache_examples is True.
preprocess: if True, preprocesses the example input before running the prediction function and caching the output. Only applies if `cache_examples` is not False.
postprocess: if True, postprocesses the example output after running the prediction function and before caching. Only applies if `cache_examples` is not False.
api_name: Defines how the event associated with clicking on the examples appears in the API docs. Can be a string or False. If set to a string, the endpoint will be exposed in the API docs with the given name. If False, the endpoint will not be exposed in the API docs and downstream apps (including those that `gr.load` this app) will not be able to use the example function.
batch: If True, then the function should process a batch of inputs, meaning that it should accept a list of input values for each parameter. Used only if cache_examples is not False.
example_labels: A list of labels for each example. If provided, the length of this list should be the same as the number of examples, and these labels will be used in the UI instead of rendering the example values.
visible: If False, the examples component will be hidden in the UI.
"""
if _initiated_directly:
warnings.warn(
"Please use gr.Examples(...) instead of gr.examples.Examples(...) to create the Examples.",
)
self.cache_examples = False
if cache_examples is None:
if (
os.getenv("GRADIO_CACHE_EXAMPLES", "").lower() in ["true", "lazy"]
and fn is not None
and outputs is not None
):
self.cache_examples = True
elif cache_examples == "lazy":
warnings.warn(
"In future versions of Gradio, the `cache_examples` parameter will no longer accept a value of 'lazy'. To enable lazy caching in "
"Gradio, you should set `cache_examples=True`, and `cache_mode='lazy'` instead."
)
self.cache_examples = "lazy"
elif cache_examples in [True, False]:
self.cache_examples = cache_examples
else:
raise ValueError(
f"The `cache_examples` parameter should be either True or False, not {cache_examples}"
)
if self.cache_examples and (fn is None or outputs is None):
raise ValueError("If caching examples, `fn` and `outputs` must be provided")
if (cache_mode_env := os.getenv("GRADIO_CACHE_MODE")) and cache_mode is None:
if cache_mode_env.lower() == "eager":
cache_mode = "eager"
elif cache_mode_env.lower() == "lazy":
cache_mode = "lazy"
else:
cache_mode = "eager"
warnings.warn(
"The `GRADIO_CACHE_MODE` environment variable must be either 'eager' or 'lazy'. "
"Defaulting to 'eager'."
)
if self.cache_examples and cache_mode == "lazy":
self.cache_examples = "lazy"
if not isinstance(inputs, Sequence):
inputs = [inputs]
if outputs and not isinstance(outputs, Sequence):
outputs = [outputs]
working_directory = Path().absolute()
if examples is None:
raise ValueError("The parameter `examples` cannot be None")
elif isinstance(examples, list) and (
len(examples) == 0 or isinstance(examples[0], list)
):
pass
elif (
isinstance(examples, list) and len(inputs) == 1
): # If there is only one input component, examples can be provided as a regular list instead of a list of lists
examples = [[e] for e in examples]
elif isinstance(examples, str):
if not Path(examples).exists():
raise FileNotFoundError(
f"Could not find examples directory: {examples}"
)
working_directory = examples
if not (Path(examples) / LOG_FILE).exists():
if len(inputs) == 1:
examples = [[e] for e in os.listdir(examples)]
else:
raise FileNotFoundError(
"Could not find log file (required for multiple inputs): "
+ LOG_FILE
)
else:
with open(Path(examples) / LOG_FILE) as logs:
examples = list(csv.reader(logs))
examples = [
examples[i][: len(inputs)] for i in range(1, len(examples))
] # remove header and unnecessary columns
else:
raise ValueError(
"The parameter `examples` must either be a string directory or a list"
"(if there is only 1 input component) or (more generally), a nested "
"list, where each sublist represents a set of inputs."
)
input_has_examples = [False] * len(inputs)
for example in examples:
for idx, example_for_input in enumerate(example):
if example_for_input is not None:
try:
input_has_examples[idx] = True
except IndexError:
pass # If there are more example components than inputs, ignore. This can sometimes be intentional (e.g. loading from a log file where outputs and timestamps are also logged)
inputs_with_examples = [
inp for (inp, keep) in zip(inputs, input_has_examples, strict=False) if keep
]
non_none_examples = [
[
ex
for (ex, keep) in zip(example, input_has_examples, strict=False)
if keep
]
for example in examples
]
if example_labels is not None and len(example_labels) != len(examples):
raise ValueError(
"If `example_labels` are provided, the length of `example_labels` must be the same as the number of examples."
)
self.examples = examples
self.non_none_examples = non_none_examples
self.inputs = inputs
self.input_has_examples = input_has_examples
self.inputs_with_examples = inputs_with_examples
self.outputs = outputs or []
self.fn = fn
self._api_mode = _api_mode
self.preprocess = preprocess
self.postprocess = postprocess
self.api_name: str | Literal[False] = api_name
self.batch = batch
self.example_labels = example_labels
self.working_directory = working_directory
from gradio import components
with utils.set_directory(working_directory):
self.dataset = components.Dataset(
components=inputs_with_examples,
samples=copy.deepcopy(non_none_examples),
type="tuple",
label=label,
samples_per_page=examples_per_page,
elem_id=elem_id,
visible=visible,
sample_labels=example_labels,
)
self.cache_logger = CSVLogger(
simplify_file_data=False, verbose=False, dataset_file_name="log.csv"
)
self.cached_folder = utils.get_cache_folder() / str(self.dataset._id)
if (
os.environ.get("GRADIO_RESET_EXAMPLES_CACHE") == "True"
and self.cached_folder.exists()
):
shutil.rmtree(self.cached_folder)
self.cached_file = Path(self.cached_folder) / "log.csv"
self.cached_indices_file = Path(self.cached_folder) / "indices.csv"
self.run_on_click = run_on_click
self.cache_event: Dependency | None = None
self.non_none_processed_examples = UnhashableKeyDict()
if self.dataset.samples:
for index, example in enumerate(self.non_none_examples):
self.non_none_processed_examples[self.dataset.samples[index]] = (
self._get_processed_example(example)
)
if self.cache_examples == "lazy":
print(
f"Will cache examples in '{utils.abspath(self.cached_folder)}' directory at first use.",
end="",
)
if Path(self.cached_file).exists():
print(
"If method or examples have changed since last caching, delete this folder to reset cache."
)
print("\n") | Parameters:
examples: example inputs that can be clicked to populate specific components. Should be nested list, in which the outer list consists of samples and each inner list consists of an input corresponding to each input component. A string path to a directory of examples can also be provided but it should be within the directory with the python file running the gradio app. If there are multiple input components and a directory is provided, a log.csv file must be present in the directory to link corresponding inputs.
inputs: the component or list of components corresponding to the examples
outputs: optionally, provide the component or list of components corresponding to the output of the examples. Required if `cache_examples` is not False.
fn: optionally, provide the function to run to generate the outputs corresponding to the examples. Required if `cache_examples` is not False. Also required if `run_on_click` is True.
cache_examples: If True, caches examples in the server for fast runtime in examples. If "lazy", then examples are cached (for all users of the app) after their first use (by any user of the app). If None, will use the GRADIO_CACHE_EXAMPLES environment variable, which should be either "true" or "false". In HuggingFace Spaces, this parameter is True (as long as `fn` and `outputs` are also provided). The default option otherwise is False.
cache_mode: if "lazy", examples are cached after their first use. If "eager", all examples are cached at app launch. If None, will use the GRADIO_CACHE_MODE environment variable if defined, or default to "eager".
examples_per_page: how many examples to show per page.
label: the label to use for the examples component (by default, "Examples")
elem_id: an optional string that is assigned as the id of this component in the HTML DOM.
run_on_click: if cache_examples is False, clicking on an example does not run the function when an example is clicked. Set this to True to run the function when an example is clicked. Has no effect if cache_examples is True.
preprocess: if True, preprocesses the example input before running the prediction function and caching the output. Only applies if `cache_examples` is not False.
postprocess: if True, postprocesses the example output after running the prediction function and before caching. Only applies if `cache_examples` is not False.
api_name: Defines how the event associated with clicking on the examples appears in the API docs. Can be a string or False. If set to a string, the endpoint will be exposed in the API docs with the given name. If False, the endpoint will not be exposed in the API docs and downstream apps (including those that `gr.load` this app) will not be able to use the example function.
batch: If True, then the function should process a batch of inputs, meaning that it should accept a list of input values for each parameter. Used only if cache_examples is not False.
example_labels: A list of labels for each example. If provided, the length of this list should be the same as the number of examples, and these labels will be used in the UI instead of rendering the example values.
visible: If False, the examples component will be hidden in the UI. | __init__ | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def _get_processed_example(self, example):
"""
This function is used to get the post-processed example values, ready to be used
in the frontend for each input component. For example, if the input components are
image components, the post-processed example values will be the a list of ImageData dictionaries
with the path, url, size, mime_type, orig_name, and is_stream keys. For any input components
that should be skipped (b/c they are None for all samples), they will simply be absent
from the returned list
Parameters:
example: a list of example values for each input component, excluding those components
that have all None values
"""
if example in self.non_none_processed_examples:
return self.non_none_processed_examples[example]
with utils.set_directory(self.working_directory):
sub = []
for component, sample in zip(
self.inputs_with_examples, example, strict=False
):
prediction_value = component.postprocess(sample)
if isinstance(prediction_value, (GradioRootModel, GradioModel)):
prediction_value = prediction_value.model_dump()
prediction_value = processing_utils.move_files_to_cache(
prediction_value,
component,
postprocess=True,
)
sub.append(prediction_value)
return sub | This function is used to get the post-processed example values, ready to be used
in the frontend for each input component. For example, if the input components are
image components, the post-processed example values will be the a list of ImageData dictionaries
with the path, url, size, mime_type, orig_name, and is_stream keys. For any input components
that should be skipped (b/c they are None for all samples), they will simply be absent
from the returned list
Parameters:
example: a list of example values for each input component, excluding those components
that have all None values | _get_processed_example | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def create(self) -> None:
"""Creates the Dataset component to hold the examples"""
blocks_config = get_blocks_context()
self.root_block = Context.root_block or (
blocks_config.root_block if blocks_config else None
)
if blocks_config:
if self.root_block:
self.root_block.extra_startup_events.append(self._start_caching)
if self.cache_examples:
def load_example_with_output(example_tuple):
example_id, example_value = example_tuple
processed_example = self._get_processed_example(
example_value
) + self.load_from_cache(example_id)
return utils.resolve_singleton(processed_example)
self.cache_event = self.load_input_event = self.dataset.click(
load_example_with_output,
inputs=[self.dataset],
outputs=self.inputs + self.outputs, # type: ignore
show_progress="hidden",
postprocess=False,
queue=False,
api_name=self.api_name,
show_api=False,
)
else:
def load_example(example_tuple):
_, example_value = example_tuple
processed_example = self._get_processed_example(example_value)
if len(self.inputs_with_examples) == 1:
return update(
value=processed_example[0],
**self.dataset.component_props[0], # type: ignore
)
return [
update(
value=processed_example[i],
**self.dataset.component_props[i], # type: ignore
)
for i in range(len(self.inputs_with_examples))
]
self.load_input_event = self.dataset.click(
load_example,
inputs=[self.dataset],
outputs=self.inputs_with_examples,
show_progress="hidden",
postprocess=False,
queue=False,
api_name=self.api_name,
show_api=False,
)
if self.run_on_click:
if self.fn is None:
raise ValueError(
"Cannot run_on_click if no function is provided"
)
self.load_input_event.then(
self.fn,
inputs=self.inputs,
outputs=self.outputs,
show_api=False,
)
else:
warnings.warn(
f"If an Examples object is created outside a Blocks Context, make sure to call `examples.dataset.render()`{'and `examples.create()`' if self.cache_examples else ''} to render the examples in the interface."
) | Creates the Dataset component to hold the examples | create | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
async def _postprocess_output(self, output) -> list:
"""
This is a way that we can postprocess the data manually, since we set postprocess=False in the lazy_cache
event handler. The reason we did that is because we don't want to postprocess data if we are loading from
the cache, since that has already been postprocessed. We postprocess this data manually if we are calling
the function using the _handle_callable_as_generator() method.
"""
import gradio as gr
with gr.Blocks() as demo:
[output.render() for output in self.outputs]
demo.load(self.fn, self.inputs, self.outputs)
demo.unrender()
return await demo.postprocess_data(demo.default_config.fns[0], output, None) | This is a way that we can postprocess the data manually, since we set postprocess=False in the lazy_cache
event handler. The reason we did that is because we don't want to postprocess data if we are loading from
the cache, since that has already been postprocessed. We postprocess this data manually if we are calling
the function using the _handle_callable_as_generator() method. | _postprocess_output | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
async def cache(self, example_id: int | None = None) -> None:
"""
Caches examples so that their predictions can be shown immediately.
Parameters:
example_id: The id of the example to process (zero-indexed). If None, all examples are cached.
"""
if self.root_block is None:
raise Error("Cannot cache examples if not in a Blocks context.")
if Path(self.cached_file).exists() and example_id is None:
print(
f"Using cache from '{utils.abspath(self.cached_folder)}' directory. If method or examples have changed since last caching, delete this folder to clear cache.\n"
)
else:
print(f"Caching examples at: '{utils.abspath(self.cached_folder)}'")
self.cache_logger.setup(self.outputs, self.cached_folder)
generated_values = []
if inspect.isgeneratorfunction(self.fn):
def get_final_item(*args): # type: ignore
x = None
generated_values.clear()
for x in self.fn(*args): # noqa: B007 # type: ignore
generated_values.append(x)
return x
fn = get_final_item
elif inspect.isasyncgenfunction(self.fn):
async def get_final_item(*args):
x = None
generated_values.clear()
async for x in self.fn(*args): # noqa: B007 # type: ignore
generated_values.append(x)
return x
fn = get_final_item
else:
fn = self.fn
# create a fake dependency to process the examples and get the predictions
from gradio.events import EventListenerMethod
_, fn_index = self.root_block.default_config.set_event_trigger(
[EventListenerMethod(Context.root_block, "load")],
fn=fn,
inputs=self.inputs,
outputs=self.outputs,
preprocess=self.preprocess and not self._api_mode,
postprocess=self.postprocess and not self._api_mode,
batch=self.batch,
)
if self.outputs is None:
raise ValueError("self.outputs is missing")
for i, example in enumerate(self.non_none_examples):
if example_id is not None and i != example_id:
continue
processed_input = self._get_processed_example(example)
for index, keep in enumerate(self.input_has_examples):
if not keep:
processed_input.insert(index, None)
if self.batch:
processed_input = [[value] for value in processed_input]
with utils.MatplotlibBackendMananger():
prediction = await self.root_block.process_api(
block_fn=self.root_block.default_config.fns[fn_index],
inputs=processed_input,
request=None,
)
output = prediction["data"]
if len(generated_values):
output = await merge_generated_values_into_output(
self.outputs, generated_values, output
)
if self.batch:
output = [value[0] for value in output]
self.cache_logger.flag(output)
# Remove the "fake_event" to prevent bugs in loading interfaces from spaces
self.root_block.default_config.fns.pop(fn_index) | Caches examples so that their predictions can be shown immediately.
Parameters:
example_id: The id of the example to process (zero-indexed). If None, all examples are cached. | cache | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def load_from_cache(self, example_id: int) -> list[Any]:
"""Loads a particular cached example for the interface.
Parameters:
example_id: The id of the example to process (zero-indexed).
"""
if self.cache_examples == "lazy":
if (cached_index := self._get_cached_index_if_cached(example_id)) is None:
client_utils.synchronize_async(self.cache, example_id)
with open(self.cached_indices_file, "a") as f:
f.write(f"{example_id}\n")
with open(self.cached_indices_file) as f:
example_id = len(f.readlines()) - 1
else:
example_id = cached_index
with open(self.cached_file, encoding="utf-8") as cache:
examples = list(csv.reader(cache))
example = examples[example_id + 1] # +1 to adjust for header
output = []
if self.outputs is None:
raise ValueError("self.outputs is missing")
for component, value in zip(self.outputs, example, strict=False):
value_to_use = value
try:
value_as_dict = ast.literal_eval(value)
# File components that output multiple files get saved as a python list
# need to pass the parsed list to serialize
# TODO: Better file serialization in 4.0
if isinstance(value_as_dict, list) and isinstance(
component, components.File
):
value_to_use = value_as_dict
if not utils.is_prop_update(value_as_dict):
raise TypeError("value wasn't an update") # caught below
output.append(value_as_dict)
except (ValueError, TypeError, SyntaxError):
output.append(component.read_from_flag(value_to_use))
return output | Loads a particular cached example for the interface.
Parameters:
example_id: The id of the example to process (zero-indexed). | load_from_cache | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def __init__(
self,
track_tqdm: bool = False,
):
"""
Parameters:
track_tqdm: If True, the Progress object will track any tqdm.tqdm iterations with the tqdm library in the function.
"""
if track_tqdm:
patch_tqdm()
self.track_tqdm = track_tqdm
self.iterables: list[TrackedIterable] = [] | Parameters:
track_tqdm: If True, the Progress object will track any tqdm.tqdm iterations with the tqdm library in the function. | __init__ | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def __next__(self):
"""
Updates progress tracker with next item in iterable.
"""
callback = self._progress_callback()
if callback:
current_iterable = self.iterables[-1]
while (
not hasattr(current_iterable.iterable, "__next__")
and len(self.iterables) > 0
):
current_iterable = self.iterables.pop()
callback(self.iterables)
if current_iterable.index is None:
raise IndexError("Index not set.")
current_iterable.index += 1
try:
return next(current_iterable.iterable) # type: ignore
except StopIteration:
self.iterables.pop()
raise
else:
return self | Updates progress tracker with next item in iterable. | __next__ | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def __call__(
self,
progress: float | tuple[int, int | None] | None,
desc: str | None = None,
total: int | None = None,
unit: str = "steps",
_tqdm=None,
):
"""
Updates progress tracker with progress and message text.
Parameters:
progress: If float, should be between 0 and 1 representing completion. If Tuple, first number represents steps completed, and second value represents total steps or None if unknown. If None, hides progress bar.
desc: description to display.
total: estimated total number of steps.
unit: unit of iterations.
"""
callback = self._progress_callback()
if callback:
if isinstance(progress, tuple):
index, total = progress
progress = None
else:
index = None
callback(
self.iterables
+ [TrackedIterable(None, index, total, desc, unit, _tqdm, progress)]
)
else:
return progress | Updates progress tracker with progress and message text.
Parameters:
progress: If float, should be between 0 and 1 representing completion. If Tuple, first number represents steps completed, and second value represents total steps or None if unknown. If None, hides progress bar.
desc: description to display.
total: estimated total number of steps.
unit: unit of iterations. | __call__ | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def tqdm(
self,
iterable: Iterable | None,
desc: str | None = None,
total: int | None = None,
unit: str = "steps",
_tqdm=None,
):
"""
Attaches progress tracker to iterable, like tqdm.
Parameters:
iterable: iterable to attach progress tracker to.
desc: description to display.
total: estimated total number of steps.
unit: unit of iterations.
"""
callback = self._progress_callback()
if callback:
if iterable is None:
new_iterable = TrackedIterable(None, 0, total, desc, unit, _tqdm)
self.iterables.append(new_iterable)
callback(self.iterables)
return self
length = len(iterable) if hasattr(iterable, "__len__") else total # type: ignore
self.iterables.append(
TrackedIterable(iter(iterable), 0, length, desc, unit, _tqdm)
)
return self | Attaches progress tracker to iterable, like tqdm.
Parameters:
iterable: iterable to attach progress tracker to.
desc: description to display.
total: estimated total number of steps.
unit: unit of iterations. | tqdm | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def update(self, n=1):
"""
Increases latest iterable with specified number of steps.
Parameters:
n: number of steps completed.
"""
callback = self._progress_callback()
if callback and len(self.iterables) > 0:
current_iterable = self.iterables[-1]
if current_iterable.index is None:
raise IndexError("Index not set.")
current_iterable.index += n
callback(self.iterables)
else:
return | Increases latest iterable with specified number of steps.
Parameters:
n: number of steps completed. | update | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def close(self, _tqdm):
"""
Removes iterable with given _tqdm.
"""
callback = self._progress_callback()
if callback:
for i in range(len(self.iterables)):
if id(self.iterables[i]._tqdm) == id(_tqdm):
self.iterables.pop(i)
break
callback(self.iterables)
else:
return | Removes iterable with given _tqdm. | close | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def special_args(
fn: Callable,
inputs: list[Any] | None = None,
request: routes.Request | None = None,
event_data: EventData | None = None,
) -> tuple[list, int | None, int | None]:
"""
Checks if function has special arguments Request or EventData (via annotation) or Progress (via default value).
If inputs is provided, these values will be loaded into the inputs array.
Parameters:
fn: function to check.
inputs: array to load special arguments into.
request: request to load into inputs.
event_data: event-related data to load into inputs.
Returns:
updated inputs, progress index, event data index.
"""
try:
signature = inspect.signature(fn)
except ValueError:
return inputs or [], None, None
type_hints = utils.get_type_hints(fn)
positional_args = []
for param in signature.parameters.values():
if param.kind not in (param.POSITIONAL_ONLY, param.POSITIONAL_OR_KEYWORD):
break
positional_args.append(param)
progress_index = None
event_data_index = None
for i, param in enumerate(positional_args):
type_hint = type_hints.get(param.name)
if isinstance(param.default, Progress):
progress_index = i
if inputs is not None:
inputs.insert(i, param.default)
elif type_hint in (routes.Request, Optional[routes.Request]):
if inputs is not None:
inputs.insert(i, request)
elif type_hint in (
oauth.OAuthProfile | None,
oauth.OAuthToken | None,
oauth.OAuthProfile,
oauth.OAuthToken,
):
if inputs is not None:
# Retrieve session from gr.Request, if it exists (i.e. if user is logged in)
session = (
# request.session (if fastapi.Request obj i.e. direct call)
getattr(request, "session", {})
or
# or request.request.session (if gr.Request obj i.e. websocket call)
getattr(getattr(request, "request", None), "session", {})
)
# Inject user profile
if type_hint in (Optional[oauth.OAuthProfile], oauth.OAuthProfile):
oauth_profile = (
session["oauth_info"]["userinfo"]
if "oauth_info" in session
else None
)
if oauth_profile is not None:
oauth_profile = oauth.OAuthProfile(oauth_profile)
elif type_hint == oauth.OAuthProfile:
raise Error(
"This action requires a logged in user. Please sign in and retry."
)
inputs.insert(i, oauth_profile)
# Inject user token
elif type_hint in (Optional[oauth.OAuthToken], oauth.OAuthToken):
oauth_info = session.get("oauth_info", None)
oauth_token = (
oauth.OAuthToken(
token=oauth_info["access_token"],
scope=oauth_info["scope"],
expires_at=oauth_info["expires_at"],
)
if oauth_info is not None
else None
)
if oauth_token is None and type_hint == oauth.OAuthToken:
raise Error(
"This action requires a logged in user. Please sign in and retry."
)
inputs.insert(i, oauth_token)
elif (
type_hint
and inspect.isclass(type_hint)
and issubclass(type_hint, EventData)
):
event_data_index = i
if inputs is not None and event_data is not None:
processing_utils.check_all_files_in_cache(event_data._data)
inputs.insert(i, type_hint(event_data.target, event_data._data))
elif (
param.default is not param.empty and inputs is not None and len(inputs) <= i
):
inputs.insert(i, param.default)
if inputs is not None:
while len(inputs) < len(positional_args):
i = len(inputs)
param = positional_args[i]
if param.default == param.empty:
warnings.warn("Unexpected argument. Filling with None.")
inputs.append(None)
else:
inputs.append(param.default)
return inputs or [], progress_index, event_data_index | Checks if function has special arguments Request or EventData (via annotation) or Progress (via default value).
If inputs is provided, these values will be loaded into the inputs array.
Parameters:
fn: function to check.
inputs: array to load special arguments into.
request: request to load into inputs.
event_data: event-related data to load into inputs.
Returns:
updated inputs, progress index, event data index. | special_args | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def update(
elem_id: str | None = None,
elem_classes: list[str] | str | None = None,
visible: bool | None = None,
**kwargs: Any,
) -> dict[str, Any]:
"""
Updates a component's properties. When a function passed into a Gradio Interface or a Blocks
events returns a value, it typically updates the value of the output component. But it is also possible
to update the *properties* of an output component (such as the number of lines of a `Textbox` or
the visibility of an `Row`) by returning a component and passing in the parameters to update in
the constructor of the component. Alternatively, you can return `gr.update(...)` with any arbitrary
parameters to update. (This is useful as a shorthand or if the same function can be called with different
components to update.) This method does not work with the `gr.State` component.
Parameters:
elem_id: Use this to update the id of the component in the HTML DOM
elem_classes: Use this to update the classes of the component in the HTML DOM
visible: Use this to update the visibility of the component
kwargs: Any other keyword arguments to update the component's properties.
Example:
import gradio as gr
with gr.Blocks() as demo:
radio = gr.Radio([1, 2, 4], label="Set the value of the number")
number = gr.Number(value=2, interactive=True)
radio.change(fn=lambda value: gr.update(value=value), inputs=radio, outputs=number)
demo.launch()
"""
kwargs["__type__"] = "update"
if elem_id is not None:
kwargs["elem_id"] = elem_id
if elem_classes is not None:
kwargs["elem_classes"] = elem_classes
if visible is not None:
kwargs["visible"] = visible
return kwargs | Updates a component's properties. When a function passed into a Gradio Interface or a Blocks
events returns a value, it typically updates the value of the output component. But it is also possible
to update the *properties* of an output component (such as the number of lines of a `Textbox` or
the visibility of an `Row`) by returning a component and passing in the parameters to update in
the constructor of the component. Alternatively, you can return `gr.update(...)` with any arbitrary
parameters to update. (This is useful as a shorthand or if the same function can be called with different
components to update.) This method does not work with the `gr.State` component.
Parameters:
elem_id: Use this to update the id of the component in the HTML DOM
elem_classes: Use this to update the classes of the component in the HTML DOM
visible: Use this to update the visibility of the component
kwargs: Any other keyword arguments to update the component's properties.
Example:
import gradio as gr
with gr.Blocks() as demo:
radio = gr.Radio([1, 2, 4], label="Set the value of the number")
number = gr.Number(value=2, interactive=True)
radio.change(fn=lambda value: gr.update(value=value), inputs=radio, outputs=number)
demo.launch() | update | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def skip() -> dict:
"""
A special function that can be returned from a Gradio function to skip updating the output component. This may be useful when
you want to update the output component conditionally, and in some cases, you want to skip updating the output component.
If you have multiple output components, you can return `gr.skip()` as part of a tuple to skip updating a specific output component,
or you can return a single `gr.skip()` to skip updating all output components.
"""
return {"__type__": "update"} | A special function that can be returned from a Gradio function to skip updating the output component. This may be useful when
you want to update the output component conditionally, and in some cases, you want to skip updating the output component.
If you have multiple output components, you can return `gr.skip()` as part of a tuple to skip updating a specific output component,
or you can return a single `gr.skip()` to skip updating all output components. | skip | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def Warning( # noqa: N802
message: str = "Warning issued.",
duration: float | None = 10,
visible: bool = True,
title: str = "Warning",
):
"""
This function allows you to pass custom warning messages to the user. You can do so simply by writing `gr.Warning('message here')` in your function, and when that line is executed the custom message will appear in a modal on the demo. The modal is yellow by default and has the heading: "Warning." Queue must be enabled for this behavior; otherwise, the warning will be printed to the console using the `warnings` library.
Demos: blocks_chained_events
Parameters:
message: The warning message to be displayed to the user. Can be HTML, which will be rendered in the modal.
duration: The duration in seconds that the warning message should be displayed for. If None or 0, the message will be displayed indefinitely until the user closes it.
visible: Whether the error message should be displayed in the UI.
title: The title to be displayed to the user at the top of the modal.
Example:
import gradio as gr
def hello_world():
gr.Warning('This is a warning message.')
return "hello world"
with gr.Blocks() as demo:
md = gr.Markdown()
demo.load(hello_world, inputs=None, outputs=[md])
demo.launch()
"""
log_message(
message, title=title, level="warning", duration=duration, visible=visible
) | This function allows you to pass custom warning messages to the user. You can do so simply by writing `gr.Warning('message here')` in your function, and when that line is executed the custom message will appear in a modal on the demo. The modal is yellow by default and has the heading: "Warning." Queue must be enabled for this behavior; otherwise, the warning will be printed to the console using the `warnings` library.
Demos: blocks_chained_events
Parameters:
message: The warning message to be displayed to the user. Can be HTML, which will be rendered in the modal.
duration: The duration in seconds that the warning message should be displayed for. If None or 0, the message will be displayed indefinitely until the user closes it.
visible: Whether the error message should be displayed in the UI.
title: The title to be displayed to the user at the top of the modal.
Example:
import gradio as gr
def hello_world():
gr.Warning('This is a warning message.')
return "hello world"
with gr.Blocks() as demo:
md = gr.Markdown()
demo.load(hello_world, inputs=None, outputs=[md])
demo.launch() | Warning | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def Info( # noqa: N802
message: str = "Info issued.",
duration: float | None = 10,
visible: bool = True,
title: str = "Info",
):
"""
This function allows you to pass custom info messages to the user. You can do so simply by writing `gr.Info('message here')` in your function, and when that line is executed the custom message will appear in a modal on the demo. The modal is gray by default and has the heading: "Info." Queue must be enabled for this behavior; otherwise, the message will be printed to the console.
Demos: blocks_chained_events
Parameters:
message: The info message to be displayed to the user. Can be HTML, which will be rendered in the modal.
duration: The duration in seconds that the info message should be displayed for. If None or 0, the message will be displayed indefinitely until the user closes it.
visible: Whether the error message should be displayed in the UI.
title: The title to be displayed to the user at the top of the modal.
Example:
import gradio as gr
def hello_world():
gr.Info('This is some info.')
return "hello world"
with gr.Blocks() as demo:
md = gr.Markdown()
demo.load(hello_world, inputs=None, outputs=[md])
demo.launch()
"""
log_message(message, title=title, level="info", duration=duration, visible=visible) | This function allows you to pass custom info messages to the user. You can do so simply by writing `gr.Info('message here')` in your function, and when that line is executed the custom message will appear in a modal on the demo. The modal is gray by default and has the heading: "Info." Queue must be enabled for this behavior; otherwise, the message will be printed to the console.
Demos: blocks_chained_events
Parameters:
message: The info message to be displayed to the user. Can be HTML, which will be rendered in the modal.
duration: The duration in seconds that the info message should be displayed for. If None or 0, the message will be displayed indefinitely until the user closes it.
visible: Whether the error message should be displayed in the UI.
title: The title to be displayed to the user at the top of the modal.
Example:
import gradio as gr
def hello_world():
gr.Info('This is some info.')
return "hello world"
with gr.Blocks() as demo:
md = gr.Markdown()
demo.load(hello_world, inputs=None, outputs=[md])
demo.launch() | Info | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def Success( # noqa: N802
message: str = "Success.",
duration: float | None = 10,
visible: bool = True,
title: str = "Success",
):
"""
This function allows you to pass custom success messages to the user. You can do so simply by writing `gr.Success('message here')` in your function, and when that line is executed the custom message will appear in a modal on the demo. The modal is green by default and has the heading: "Success." Queue must be enabled for this behavior; otherwise, the message will be printed to the console.
Parameters:
message: The success message to be displayed to the user. Can be HTML, which will be rendered in the modal.
duration: The duration in seconds that the success message should be displayed for. If None or 0, the message will be displayed indefinitely until the user closes it.
visible: Whether the error message should be displayed in the UI.
title: The title to be displayed to the user at the top of the modal.
Example:
def hello_world():
gr.Success('Operation completed successfully!')
return "hello world"
with gr.Blocks() as demo:
md = gr.Markdown()
demo.load(hello_world, inputs=None, outputs=[md])
demo.launch()
"""
log_message(
message, title=title, level="success", duration=duration, visible=visible
) | This function allows you to pass custom success messages to the user. You can do so simply by writing `gr.Success('message here')` in your function, and when that line is executed the custom message will appear in a modal on the demo. The modal is green by default and has the heading: "Success." Queue must be enabled for this behavior; otherwise, the message will be printed to the console.
Parameters:
message: The success message to be displayed to the user. Can be HTML, which will be rendered in the modal.
duration: The duration in seconds that the success message should be displayed for. If None or 0, the message will be displayed indefinitely until the user closes it.
visible: Whether the error message should be displayed in the UI.
title: The title to be displayed to the user at the top of the modal.
Example:
def hello_world():
gr.Success('Operation completed successfully!')
return "hello world"
with gr.Blocks() as demo:
md = gr.Markdown()
demo.load(hello_world, inputs=None, outputs=[md])
demo.launch() | Success | python | gradio-app/gradio | gradio/helpers.py | https://github.com/gradio-app/gradio/blob/master/gradio/helpers.py | Apache-2.0 |
def is_none(self) -> bool:
"""
Checks if the FileData object is empty, i.e., all attributes are None.
Returns:
bool: True if all attributes (except 'is_stream' and 'meta') are None, False otherwise.
"""
return all(
f is None
for f in [
self.path,
self.url,
self.size,
self.orig_name,
self.mime_type,
]
) | Checks if the FileData object is empty, i.e., all attributes are None.
Returns:
bool: True if all attributes (except 'is_stream' and 'meta') are None, False otherwise. | is_none | python | gradio-app/gradio | gradio/data_classes.py | https://github.com/gradio-app/gradio/blob/master/gradio/data_classes.py | Apache-2.0 |
def from_path(cls, path: str) -> FileData:
"""
Creates a FileData object from a given file path.
Args:
path: The file path.
Returns:
FileData: An instance of FileData representing the file at the specified path.
"""
return cls(path=path) | Creates a FileData object from a given file path.
Args:
path: The file path.
Returns:
FileData: An instance of FileData representing the file at the specified path. | from_path | python | gradio-app/gradio | gradio/data_classes.py | https://github.com/gradio-app/gradio/blob/master/gradio/data_classes.py | Apache-2.0 |
def _copy_to_dir(self, dir: str) -> FileData:
"""
Copies the file to a specified directory and returns a new FileData object representing the copied file.
Args:
dir: The destination directory.
Returns:
FileData: A new FileData object representing the copied file.
Raises:
ValueError: If the source file path is not set.
"""
pathlib.Path(dir).mkdir(exist_ok=True)
new_obj = dict(self)
if not self.path:
raise ValueError("Source file path is not set")
new_name = shutil.copy(self.path, dir)
new_obj["path"] = new_name
return self.__class__(**new_obj) | Copies the file to a specified directory and returns a new FileData object representing the copied file.
Args:
dir: The destination directory.
Returns:
FileData: A new FileData object representing the copied file.
Raises:
ValueError: If the source file path is not set. | _copy_to_dir | python | gradio-app/gradio | gradio/data_classes.py | https://github.com/gradio-app/gradio/blob/master/gradio/data_classes.py | Apache-2.0 |
def is_file_data(cls, obj: Any) -> bool:
"""
Checks if an object is a valid FileData instance.
Args:
obj: The object to check.
Returns:
bool: True if the object is a valid FileData instance, False otherwise.
"""
if isinstance(obj, dict):
try:
return not FileData(**obj).is_none
except (TypeError, ValidationError):
return False
return False | Checks if an object is a valid FileData instance.
Args:
obj: The object to check.
Returns:
bool: True if the object is a valid FileData instance, False otherwise. | is_file_data | python | gradio-app/gradio | gradio/data_classes.py | https://github.com/gradio-app/gradio/blob/master/gradio/data_classes.py | Apache-2.0 |
def __init__(
self, trigger, key_vals, dep_index, fn, associated_timer: Timer | None = None
):
"""
The Dependency object is usualy not created directly but is returned when an event listener is set up. It contains the configuration
data for the event listener, and can be used to set up additional event listeners that depend on the completion of the current event
listener using .then() and .success().
Demos: chatbot_consecutive, blocks_chained_events
"""
super().__init__(key_vals)
self.fn = fn
self.associated_timer = associated_timer
self.then = partial(
EventListener(
"then",
trigger_after=dep_index,
trigger_only_on_success=False,
has_trigger=False,
).listener,
trigger,
)
"""
Triggered after directly preceding event is completed, regardless of success or failure.
"""
self.success = partial(
EventListener(
"success",
trigger_after=dep_index,
trigger_only_on_success=True,
has_trigger=False,
).listener,
trigger,
)
"""
Triggered after directly preceding event is completed, if it was successful.
""" | The Dependency object is usualy not created directly but is returned when an event listener is set up. It contains the configuration
data for the event listener, and can be used to set up additional event listeners that depend on the completion of the current event
listener using .then() and .success().
Demos: chatbot_consecutive, blocks_chained_events | __init__ | python | gradio-app/gradio | gradio/events.py | https://github.com/gradio-app/gradio/blob/master/gradio/events.py | Apache-2.0 |
def __init__(self, target: Block | None, _data: Any):
"""
Parameters:
target: The component object that triggered the event. Can be used to distinguish multiple components bound to the same listener.
"""
self.target = target
self._data = _data | Parameters:
target: The component object that triggered the event. Can be used to distinguish multiple components bound to the same listener. | __init__ | python | gradio-app/gradio | gradio/events.py | https://github.com/gradio-app/gradio/blob/master/gradio/events.py | Apache-2.0 |
def __init__(self, target: Block | None, data: Any):
super().__init__(target, data)
self.index: Any = data["index"]
"""
The index of the selected item. Is a tuple if the component is two dimensional or selection is a range.
"""
self.value: Any = data["value"]
"""
The value of the selected item.
"""
self.row_value: Any = data.get("row_value")
"""
The value of the entire row that the selected item belongs to, as a 1-D list. Only implemented for the `Dataframe` component, returns None for other components.
"""
self.col_value: Any = data.get("col_value")
"""
The value of the entire row that the selected item belongs to, as a 1-D list. Only implemented for the `Dataframe` component, returns None for other components.
"""
self.selected: bool = data.get("selected", True)
"""
True if the item was selected, False if deselected.
""" | The index of the selected item. Is a tuple if the component is two dimensional or selection is a range. | __init__ | python | gradio-app/gradio | gradio/events.py | https://github.com/gradio-app/gradio/blob/master/gradio/events.py | Apache-2.0 |
def __init__(self, target: Block | None, data: Any):
super().__init__(target, data)
self.key: str = data["key"]
"""
The key that was pressed.
"""
self.input_value: str = data["input_value"]
"""
The displayed value in the input textbox after the key was pressed. This may be different than the `value`
attribute of the component itself, as the `value` attribute of some components (e.g. Dropdown) are not updated
until the user presses Enter.
""" | The key that was pressed. | __init__ | python | gradio-app/gradio | gradio/events.py | https://github.com/gradio-app/gradio/blob/master/gradio/events.py | Apache-2.0 |
def __init__(self, target: Block | None, data: FileDataDict):
super().__init__(target, data)
self.file: FileData = FileData(**data)
"""
The file that was deleted, as a FileData object.
""" | The file that was deleted, as a FileData object. | __init__ | python | gradio-app/gradio | gradio/events.py | https://github.com/gradio-app/gradio/blob/master/gradio/events.py | Apache-2.0 |
def __init__(self, target: Block | None, data: Any):
super().__init__(target, data)
self.index: int | tuple[int, int] = data["index"]
"""
The index of the liked/disliked item. Is a tuple if the component is two dimensional.
"""
self.value: Any = data["value"]
"""
The value of the liked/disliked item.
"""
self.liked: bool | str = data.get("liked", True)
"""
True if the item was liked, False if disliked, or string value if any other feedback.
""" | The index of the liked/disliked item. Is a tuple if the component is two dimensional. | __init__ | python | gradio-app/gradio | gradio/events.py | https://github.com/gradio-app/gradio/blob/master/gradio/events.py | Apache-2.0 |
def __init__(self, target: Block | None, data: Any):
super().__init__(target, data)
self.index: int | tuple[int, int] = data["index"]
"""
The index of the user message that should be retried.
"""
self.value: Any = data["value"]
"""
The value of the user message that should be retried.
""" | The index of the user message that should be retried. | __init__ | python | gradio-app/gradio | gradio/events.py | https://github.com/gradio-app/gradio/blob/master/gradio/events.py | Apache-2.0 |
def __init__(self, target: Block | None, data: Any):
super().__init__(target, data)
self.index: int | tuple[int, int] = data["index"]
"""
The index of the user message that should be undone.
"""
self.value: Any = data["value"]
"""
The value of the user message that should be undone.
""" | The index of the user message that should be undone. | __init__ | python | gradio-app/gradio | gradio/events.py | https://github.com/gradio-app/gradio/blob/master/gradio/events.py | Apache-2.0 |
def __init__(self, target: Block | None, data: Any):
super().__init__(target, data)
self.index: int | tuple[int, int] = data["index"]
"""
The index of the message that was edited.
"""
self.previous_value: Any = data["previous_value"]
"""
The previous content of the message that was edited.
"""
self.value: Any = data["value"]
"""
The new content of the message that was edited.
""" | The index of the message that was edited. | __init__ | python | gradio-app/gradio | gradio/events.py | https://github.com/gradio-app/gradio/blob/master/gradio/events.py | Apache-2.0 |
def __init__(self, target: Block | None, data: FileDataDict):
super().__init__(target, data)
self.file: FileData = FileData(**data)
"""
The file that was downloaded, as a FileData object.
""" | The file that was downloaded, as a FileData object. | __init__ | python | gradio-app/gradio | gradio/events.py | https://github.com/gradio-app/gradio/blob/master/gradio/events.py | Apache-2.0 |
def __init__(self, target: Block | None, data: Any):
super().__init__(target, data)
self.value: Any = data["value"]
"""
The value that was copied.
""" | The value that was copied. | __init__ | python | gradio-app/gradio | gradio/events.py | https://github.com/gradio-app/gradio/blob/master/gradio/events.py | Apache-2.0 |
def event_trigger(
block: Block | None,
fn: Callable | None | Literal["decorator"] = "decorator",
inputs: Component
| BlockContext
| Sequence[Component | BlockContext]
| Set[Component | BlockContext]
| None = None,
outputs: Component
| BlockContext
| Sequence[Component | BlockContext]
| Set[Component | BlockContext]
| None = None,
api_name: str | None | Literal[False] = None,
scroll_to_output: bool = False,
show_progress: Literal["full", "minimal", "hidden"] = _show_progress,
show_progress_on: Component | Sequence[Component] | None = None,
queue: bool = True,
batch: bool = False,
max_batch_size: int = 4,
preprocess: bool = True,
postprocess: bool = True,
cancels: dict[str, Any] | list[dict[str, Any]] | None = None,
trigger_mode: Literal["once", "multiple", "always_last"] | None = None,
js: str | Literal[True] | None = None,
concurrency_limit: int | None | Literal["default"] = "default",
concurrency_id: str | None = None,
show_api: bool = True,
time_limit: int | None = None,
stream_every: float = 0.5,
like_user_message: bool = False,
) -> Dependency:
"""
Parameters:
fn: the function to call when this event is triggered. Often a machine learning model's prediction function. Each parameter of the function corresponds to one input component, and the function should return a single value or a tuple of values, with each element in the tuple corresponding to one output component.
inputs: List of gradio.components to use as inputs. If the function takes no inputs, this should be an empty list.
outputs: List of gradio.components to use as outputs. If the function returns no outputs, this should be an empty list.
api_name: defines how the endpoint appears in the API docs. Can be a string, None, or False. If set to a string, the endpoint will be exposed in the API docs with the given name. If None (default), the name of the function will be used as the API endpoint. If False, the endpoint will not be exposed in the API docs and downstream apps (including those that `gr.load` this app) will not be able to use this event.
scroll_to_output: If True, will scroll to output component on completion
show_progress: how to show the progress animation while event is running: "full" shows a spinner which covers the output component area as well as a runtime display in the upper right corner, "minimal" only shows the runtime display, "hidden" shows no progress animation at all
show_progress_on: Component or list of components to show the progress animation on. If None, will show the progress animation on all of the output components.
queue: If True, will place the request on the queue, if the queue has been enabled. If False, will not put this event on the queue, even if the queue has been enabled. If None, will use the queue setting of the gradio app.
batch: If True, then the function should process a batch of inputs, meaning that it should accept a list of input values for each parameter. The lists should be of equal length (and be up to length `max_batch_size`). The function is then *required* to return a tuple of lists (even if there is only 1 output component), with each list in the tuple corresponding to one output component.
max_batch_size: Maximum number of inputs to batch together if this is called from the queue (only relevant if batch=True)
preprocess: If False, will not run preprocessing of component data before running 'fn' (e.g. leaving it as a base64 string if this method is called with the `Image` component).
postprocess: If False, will not run postprocessing of component data before returning 'fn' output to the browser.
cancels: A list of other events to cancel when this listener is triggered. For example, setting cancels=[click_event] will cancel the click_event, where click_event is the return value of another components .click method. Functions that have not yet run (or generators that are iterating) will be cancelled, but functions that are currently running will be allowed to finish.
trigger_mode: If "once" (default for all events except `.change()`) would not allow any submissions while an event is pending. If set to "multiple", unlimited submissions are allowed while pending, and "always_last" (default for `.change()` and `.key_up()` events) would allow a second submission after the pending event is complete.
js: Optional frontend js method to run before running 'fn'. Input arguments for js method are values of 'inputs' and 'outputs', return should be a list of values for output components.
concurrency_limit: If set, this is the maximum number of this event that can be running simultaneously. Can be set to None to mean no concurrency_limit (any number of this event can be running simultaneously). Set to "default" to use the default concurrency limit (defined by the `default_concurrency_limit` parameter in `Blocks.queue()`, which itself is 1 by default).
concurrency_id: If set, this is the id of the concurrency group. Events with the same concurrency_id will be limited by the lowest set concurrency_limit.
show_api: whether to show this event in the "view API" page of the Gradio app, or in the ".view_api()" method of the Gradio clients. Unlike setting api_name to False, setting show_api to False will still allow downstream apps as well as the Clients to use this event. If fn is None, show_api will automatically be set to False.
"""
if fn == "decorator":
def wrapper(func):
event_trigger(
block=block,
fn=func,
inputs=inputs,
outputs=outputs,
api_name=api_name,
scroll_to_output=scroll_to_output,
show_progress=show_progress,
show_progress_on=show_progress_on,
queue=queue,
batch=batch,
max_batch_size=max_batch_size,
preprocess=preprocess,
postprocess=postprocess,
cancels=cancels,
trigger_mode=trigger_mode,
js=js,
concurrency_limit=concurrency_limit,
concurrency_id=concurrency_id,
show_api=show_api,
)
@wraps(func)
def inner(*args, **kwargs):
return func(*args, **kwargs)
return inner
return Dependency(None, {}, None, wrapper)
from gradio.components.base import StreamingInput
if isinstance(block, StreamingInput) and "stream" in block.events:
block.check_streamable() # type: ignore
if isinstance(show_progress, bool):
show_progress = "full" if show_progress else "hidden"
root_block = get_blocks_context()
if root_block is None:
raise AttributeError(
f"Cannot call {_event_name} outside of a gradio.Blocks context."
)
event_target = EventListenerMethod(
block if _has_trigger else None, _event_name
)
dep, dep_index = root_block.set_event_trigger(
[event_target],
fn,
inputs,
outputs,
preprocess=preprocess,
postprocess=postprocess,
scroll_to_output=scroll_to_output,
show_progress=show_progress,
show_progress_on=show_progress_on,
api_name=api_name,
js=js,
concurrency_limit=concurrency_limit,
concurrency_id=concurrency_id,
queue=queue,
batch=batch,
max_batch_size=max_batch_size,
trigger_after=_trigger_after,
trigger_only_on_success=_trigger_only_on_success,
trigger_mode=trigger_mode,
show_api=show_api,
connection=_connection,
time_limit=time_limit,
stream_every=stream_every,
like_user_message=like_user_message,
event_specific_args=[
d["name"]
for d in _event_specific_args
if d.get("component_prop", "true") != "false"
]
if _event_specific_args
else None,
)
set_cancel_events(
[event_target],
cancels,
)
if _callback:
_callback(block)
return Dependency(block, dep.get_config(), dep_index, fn) | Parameters:
fn: the function to call when this event is triggered. Often a machine learning model's prediction function. Each parameter of the function corresponds to one input component, and the function should return a single value or a tuple of values, with each element in the tuple corresponding to one output component.
inputs: List of gradio.components to use as inputs. If the function takes no inputs, this should be an empty list.
outputs: List of gradio.components to use as outputs. If the function returns no outputs, this should be an empty list.
api_name: defines how the endpoint appears in the API docs. Can be a string, None, or False. If set to a string, the endpoint will be exposed in the API docs with the given name. If None (default), the name of the function will be used as the API endpoint. If False, the endpoint will not be exposed in the API docs and downstream apps (including those that `gr.load` this app) will not be able to use this event.
scroll_to_output: If True, will scroll to output component on completion
show_progress: how to show the progress animation while event is running: "full" shows a spinner which covers the output component area as well as a runtime display in the upper right corner, "minimal" only shows the runtime display, "hidden" shows no progress animation at all
show_progress_on: Component or list of components to show the progress animation on. If None, will show the progress animation on all of the output components.
queue: If True, will place the request on the queue, if the queue has been enabled. If False, will not put this event on the queue, even if the queue has been enabled. If None, will use the queue setting of the gradio app.
batch: If True, then the function should process a batch of inputs, meaning that it should accept a list of input values for each parameter. The lists should be of equal length (and be up to length `max_batch_size`). The function is then *required* to return a tuple of lists (even if there is only 1 output component), with each list in the tuple corresponding to one output component.
max_batch_size: Maximum number of inputs to batch together if this is called from the queue (only relevant if batch=True)
preprocess: If False, will not run preprocessing of component data before running 'fn' (e.g. leaving it as a base64 string if this method is called with the `Image` component).
postprocess: If False, will not run postprocessing of component data before returning 'fn' output to the browser.
cancels: A list of other events to cancel when this listener is triggered. For example, setting cancels=[click_event] will cancel the click_event, where click_event is the return value of another components .click method. Functions that have not yet run (or generators that are iterating) will be cancelled, but functions that are currently running will be allowed to finish.
trigger_mode: If "once" (default for all events except `.change()`) would not allow any submissions while an event is pending. If set to "multiple", unlimited submissions are allowed while pending, and "always_last" (default for `.change()` and `.key_up()` events) would allow a second submission after the pending event is complete.
js: Optional frontend js method to run before running 'fn'. Input arguments for js method are values of 'inputs' and 'outputs', return should be a list of values for output components.
concurrency_limit: If set, this is the maximum number of this event that can be running simultaneously. Can be set to None to mean no concurrency_limit (any number of this event can be running simultaneously). Set to "default" to use the default concurrency limit (defined by the `default_concurrency_limit` parameter in `Blocks.queue()`, which itself is 1 by default).
concurrency_id: If set, this is the id of the concurrency group. Events with the same concurrency_id will be limited by the lowest set concurrency_limit.
show_api: whether to show this event in the "view API" page of the Gradio app, or in the ".view_api()" method of the Gradio clients. Unlike setting api_name to False, setting show_api to False will still allow downstream apps as well as the Clients to use this event. If fn is None, show_api will automatically be set to False. | _setup.event_trigger | python | gradio-app/gradio | gradio/events.py | https://github.com/gradio-app/gradio/blob/master/gradio/events.py | Apache-2.0 |
def _setup(
_event_name: str,
_has_trigger: bool,
_show_progress: Literal["full", "minimal", "hidden"],
_callback: Callable | None,
_trigger_after: int | None,
_trigger_only_on_success: bool,
_event_specific_args: list[dict[str, str]],
_connection: Literal["sse", "stream"] = "sse",
):
def event_trigger(
block: Block | None,
fn: Callable | None | Literal["decorator"] = "decorator",
inputs: Component
| BlockContext
| Sequence[Component | BlockContext]
| Set[Component | BlockContext]
| None = None,
outputs: Component
| BlockContext
| Sequence[Component | BlockContext]
| Set[Component | BlockContext]
| None = None,
api_name: str | None | Literal[False] = None,
scroll_to_output: bool = False,
show_progress: Literal["full", "minimal", "hidden"] = _show_progress,
show_progress_on: Component | Sequence[Component] | None = None,
queue: bool = True,
batch: bool = False,
max_batch_size: int = 4,
preprocess: bool = True,
postprocess: bool = True,
cancels: dict[str, Any] | list[dict[str, Any]] | None = None,
trigger_mode: Literal["once", "multiple", "always_last"] | None = None,
js: str | Literal[True] | None = None,
concurrency_limit: int | None | Literal["default"] = "default",
concurrency_id: str | None = None,
show_api: bool = True,
time_limit: int | None = None,
stream_every: float = 0.5,
like_user_message: bool = False,
) -> Dependency:
"""
Parameters:
fn: the function to call when this event is triggered. Often a machine learning model's prediction function. Each parameter of the function corresponds to one input component, and the function should return a single value or a tuple of values, with each element in the tuple corresponding to one output component.
inputs: List of gradio.components to use as inputs. If the function takes no inputs, this should be an empty list.
outputs: List of gradio.components to use as outputs. If the function returns no outputs, this should be an empty list.
api_name: defines how the endpoint appears in the API docs. Can be a string, None, or False. If set to a string, the endpoint will be exposed in the API docs with the given name. If None (default), the name of the function will be used as the API endpoint. If False, the endpoint will not be exposed in the API docs and downstream apps (including those that `gr.load` this app) will not be able to use this event.
scroll_to_output: If True, will scroll to output component on completion
show_progress: how to show the progress animation while event is running: "full" shows a spinner which covers the output component area as well as a runtime display in the upper right corner, "minimal" only shows the runtime display, "hidden" shows no progress animation at all
show_progress_on: Component or list of components to show the progress animation on. If None, will show the progress animation on all of the output components.
queue: If True, will place the request on the queue, if the queue has been enabled. If False, will not put this event on the queue, even if the queue has been enabled. If None, will use the queue setting of the gradio app.
batch: If True, then the function should process a batch of inputs, meaning that it should accept a list of input values for each parameter. The lists should be of equal length (and be up to length `max_batch_size`). The function is then *required* to return a tuple of lists (even if there is only 1 output component), with each list in the tuple corresponding to one output component.
max_batch_size: Maximum number of inputs to batch together if this is called from the queue (only relevant if batch=True)
preprocess: If False, will not run preprocessing of component data before running 'fn' (e.g. leaving it as a base64 string if this method is called with the `Image` component).
postprocess: If False, will not run postprocessing of component data before returning 'fn' output to the browser.
cancels: A list of other events to cancel when this listener is triggered. For example, setting cancels=[click_event] will cancel the click_event, where click_event is the return value of another components .click method. Functions that have not yet run (or generators that are iterating) will be cancelled, but functions that are currently running will be allowed to finish.
trigger_mode: If "once" (default for all events except `.change()`) would not allow any submissions while an event is pending. If set to "multiple", unlimited submissions are allowed while pending, and "always_last" (default for `.change()` and `.key_up()` events) would allow a second submission after the pending event is complete.
js: Optional frontend js method to run before running 'fn'. Input arguments for js method are values of 'inputs' and 'outputs', return should be a list of values for output components.
concurrency_limit: If set, this is the maximum number of this event that can be running simultaneously. Can be set to None to mean no concurrency_limit (any number of this event can be running simultaneously). Set to "default" to use the default concurrency limit (defined by the `default_concurrency_limit` parameter in `Blocks.queue()`, which itself is 1 by default).
concurrency_id: If set, this is the id of the concurrency group. Events with the same concurrency_id will be limited by the lowest set concurrency_limit.
show_api: whether to show this event in the "view API" page of the Gradio app, or in the ".view_api()" method of the Gradio clients. Unlike setting api_name to False, setting show_api to False will still allow downstream apps as well as the Clients to use this event. If fn is None, show_api will automatically be set to False.
"""
if fn == "decorator":
def wrapper(func):
event_trigger(
block=block,
fn=func,
inputs=inputs,
outputs=outputs,
api_name=api_name,
scroll_to_output=scroll_to_output,
show_progress=show_progress,
show_progress_on=show_progress_on,
queue=queue,
batch=batch,
max_batch_size=max_batch_size,
preprocess=preprocess,
postprocess=postprocess,
cancels=cancels,
trigger_mode=trigger_mode,
js=js,
concurrency_limit=concurrency_limit,
concurrency_id=concurrency_id,
show_api=show_api,
)
@wraps(func)
def inner(*args, **kwargs):
return func(*args, **kwargs)
return inner
return Dependency(None, {}, None, wrapper)
from gradio.components.base import StreamingInput
if isinstance(block, StreamingInput) and "stream" in block.events:
block.check_streamable() # type: ignore
if isinstance(show_progress, bool):
show_progress = "full" if show_progress else "hidden"
root_block = get_blocks_context()
if root_block is None:
raise AttributeError(
f"Cannot call {_event_name} outside of a gradio.Blocks context."
)
event_target = EventListenerMethod(
block if _has_trigger else None, _event_name
)
dep, dep_index = root_block.set_event_trigger(
[event_target],
fn,
inputs,
outputs,
preprocess=preprocess,
postprocess=postprocess,
scroll_to_output=scroll_to_output,
show_progress=show_progress,
show_progress_on=show_progress_on,
api_name=api_name,
js=js,
concurrency_limit=concurrency_limit,
concurrency_id=concurrency_id,
queue=queue,
batch=batch,
max_batch_size=max_batch_size,
trigger_after=_trigger_after,
trigger_only_on_success=_trigger_only_on_success,
trigger_mode=trigger_mode,
show_api=show_api,
connection=_connection,
time_limit=time_limit,
stream_every=stream_every,
like_user_message=like_user_message,
event_specific_args=[
d["name"]
for d in _event_specific_args
if d.get("component_prop", "true") != "false"
]
if _event_specific_args
else None,
)
set_cancel_events(
[event_target],
cancels,
)
if _callback:
_callback(block)
return Dependency(block, dep.get_config(), dep_index, fn)
event_trigger.event_name = _event_name # type: ignore
event_trigger.has_trigger = _has_trigger # type: ignore
event_trigger.callback = _callback # type: ignore
event_trigger.connection = _connection # type: ignore
event_specific_args = (
[
d["name"]
for d in _event_specific_args
if d.get("component_prop", "true") != "false"
]
if _event_specific_args
else None
)
event_trigger.event_specific_args = event_specific_args # type: ignore
return event_trigger | Parameters:
fn: the function to call when this event is triggered. Often a machine learning model's prediction function. Each parameter of the function corresponds to one input component, and the function should return a single value or a tuple of values, with each element in the tuple corresponding to one output component.
inputs: List of gradio.components to use as inputs. If the function takes no inputs, this should be an empty list.
outputs: List of gradio.components to use as outputs. If the function returns no outputs, this should be an empty list.
api_name: defines how the endpoint appears in the API docs. Can be a string, None, or False. If set to a string, the endpoint will be exposed in the API docs with the given name. If None (default), the name of the function will be used as the API endpoint. If False, the endpoint will not be exposed in the API docs and downstream apps (including those that `gr.load` this app) will not be able to use this event.
scroll_to_output: If True, will scroll to output component on completion
show_progress: how to show the progress animation while event is running: "full" shows a spinner which covers the output component area as well as a runtime display in the upper right corner, "minimal" only shows the runtime display, "hidden" shows no progress animation at all
show_progress_on: Component or list of components to show the progress animation on. If None, will show the progress animation on all of the output components.
queue: If True, will place the request on the queue, if the queue has been enabled. If False, will not put this event on the queue, even if the queue has been enabled. If None, will use the queue setting of the gradio app.
batch: If True, then the function should process a batch of inputs, meaning that it should accept a list of input values for each parameter. The lists should be of equal length (and be up to length `max_batch_size`). The function is then *required* to return a tuple of lists (even if there is only 1 output component), with each list in the tuple corresponding to one output component.
max_batch_size: Maximum number of inputs to batch together if this is called from the queue (only relevant if batch=True)
preprocess: If False, will not run preprocessing of component data before running 'fn' (e.g. leaving it as a base64 string if this method is called with the `Image` component).
postprocess: If False, will not run postprocessing of component data before returning 'fn' output to the browser.
cancels: A list of other events to cancel when this listener is triggered. For example, setting cancels=[click_event] will cancel the click_event, where click_event is the return value of another components .click method. Functions that have not yet run (or generators that are iterating) will be cancelled, but functions that are currently running will be allowed to finish.
trigger_mode: If "once" (default for all events except `.change()`) would not allow any submissions while an event is pending. If set to "multiple", unlimited submissions are allowed while pending, and "always_last" (default for `.change()` and `.key_up()` events) would allow a second submission after the pending event is complete.
js: Optional frontend js method to run before running 'fn'. Input arguments for js method are values of 'inputs' and 'outputs', return should be a list of values for output components.
concurrency_limit: If set, this is the maximum number of this event that can be running simultaneously. Can be set to None to mean no concurrency_limit (any number of this event can be running simultaneously). Set to "default" to use the default concurrency limit (defined by the `default_concurrency_limit` parameter in `Blocks.queue()`, which itself is 1 by default).
concurrency_id: If set, this is the id of the concurrency group. Events with the same concurrency_id will be limited by the lowest set concurrency_limit.
show_api: whether to show this event in the "view API" page of the Gradio app, or in the ".view_api()" method of the Gradio clients. Unlike setting api_name to False, setting show_api to False will still allow downstream apps as well as the Clients to use this event. If fn is None, show_api will automatically be set to False. | _setup | python | gradio-app/gradio | gradio/events.py | https://github.com/gradio-app/gradio/blob/master/gradio/events.py | Apache-2.0 |
def on(
triggers: Sequence[EventListenerCallable] | EventListenerCallable | None = None,
fn: Callable | None | Literal["decorator"] = "decorator",
inputs: Component
| BlockContext
| Sequence[Component | BlockContext]
| Set[Component | BlockContext]
| None = None,
outputs: Component
| BlockContext
| Sequence[Component | BlockContext]
| Set[Component | BlockContext]
| None = None,
*,
api_name: str | None | Literal[False] = None,
scroll_to_output: bool = False,
show_progress: Literal["full", "minimal", "hidden"] = "full",
show_progress_on: Component | Sequence[Component] | None = None,
queue: bool = True,
batch: bool = False,
max_batch_size: int = 4,
preprocess: bool = True,
postprocess: bool = True,
cancels: dict[str, Any] | list[dict[str, Any]] | None = None,
trigger_mode: Literal["once", "multiple", "always_last"] | None = None,
js: str | Literal[True] | None = None,
concurrency_limit: int | None | Literal["default"] = "default",
concurrency_id: str | None = None,
show_api: bool = True,
time_limit: int | None = None,
stream_every: float = 0.5,
) -> Dependency:
"""
Sets up an event listener that triggers a function when the specified event(s) occur. This is especially
useful when the same function should be triggered by multiple events. Only a single API endpoint is generated
for all events in the triggers list.
Parameters:
triggers: List of triggers to listen to, e.g. [btn.click, number.change]. If None, will run on app load and changes to any inputs.
fn: the function to call when this event is triggered. Often a machine learning model's prediction function. Each parameter of the function corresponds to one input component, and the function should return a single value or a tuple of values, with each element in the tuple corresponding to one output component.
inputs: List of gradio.components to use as inputs. If the function takes no inputs, this should be an empty list.
outputs: List of gradio.components to use as outputs. If the function returns no outputs, this should be an empty list.
api_name: Defines how the endpoint appears in the API docs. Can be a string, None, or False. If False, the endpoint will not be exposed in the api docs. If set to None, will use the functions name as the endpoint route. If set to a string, the endpoint will be exposed in the api docs with the given name.
scroll_to_output: If True, will scroll to output component on completion
show_progress: how to show the progress animation while event is running: "full" shows a spinner which covers the output component area as well as a runtime display in the upper right corner, "minimal" only shows the runtime display, "hidden" shows no progress animation at all,
show_progress_on: Component or list of components to show the progress animation on. If None, will show the progress animation on all of the output components.
queue: If True, will place the request on the queue, if the queue has been enabled. If False, will not put this event on the queue, even if the queue has been enabled. If None, will use the queue setting of the gradio app.
batch: If True, then the function should process a batch of inputs, meaning that it should accept a list of input values for each parameter. The lists should be of equal length (and be up to length `max_batch_size`). The function is then *required* to return a tuple of lists (even if there is only 1 output component), with each list in the tuple corresponding to one output component.
max_batch_size: Maximum number of inputs to batch together if this is called from the queue (only relevant if batch=True)
preprocess: If False, will not run preprocessing of component data before running 'fn' (e.g. leaving it as a base64 string if this method is called with the `Image` component).
postprocess: If False, will not run postprocessing of component data before returning 'fn' output to the browser.
cancels: A list of other events to cancel when this listener is triggered. For example, setting cancels=[click_event] will cancel the click_event, where click_event is the return value of another components .click method. Functions that have not yet run (or generators that are iterating) will be cancelled, but functions that are currently running will be allowed to finish.
trigger_mode: If "once" (default for all events except `.change()`) would not allow any submissions while an event is pending. If set to "multiple", unlimited submissions are allowed while pending, and "always_last" (default for `.change()` and `.key_up()` events) would allow a second submission after the pending event is complete.
js: Optional frontend js method to run before running 'fn'. Input arguments for js method are values of 'inputs', return should be a list of values for output components.
concurrency_limit: If set, this is the maximum number of this event that can be running simultaneously. Can be set to None to mean no concurrency_limit (any number of this event can be running simultaneously). Set to "default" to use the default concurrency limit (defined by the `default_concurrency_limit` parameter in `Blocks.queue()`, which itself is 1 by default).
concurrency_id: If set, this is the id of the concurrency group. Events with the same concurrency_id will be limited by the lowest set concurrency_limit.
show_api: whether to show this event in the "view API" page of the Gradio app, or in the ".view_api()" method of the Gradio clients. Unlike setting api_name to False, setting show_api to False will still allow downstream apps as well as the Clients to use this event. If fn is None, show_api will automatically be set to False.
time_limit: The time limit for the function to run. Parameter only used for the `.stream()` event.
stream_every: The latency (in seconds) at which stream chunks are sent to the backend. Defaults to 0.5 seconds. Parameter only used for the `.stream()` event.
Example:
import gradio as gr
with gr.Blocks() as demo:
with gr.Row():
input = gr.Textbox()
button = gr.Button("Submit")
output = gr.Textbox()
gr.on(
triggers=[button.click, input.submit],
fn=lambda x: x,
inputs=[input],
outputs=[output]
)
demo.launch()
"""
from gradio.blocks import Block
if not isinstance(triggers, Sequence) and triggers is not None:
triggers = [triggers]
triggers_typed = cast(Sequence[EventListener], triggers)
if isinstance(inputs, Block):
inputs = [inputs]
if fn == "decorator":
def wrapper(func):
on(
triggers,
fn=func,
inputs=inputs,
outputs=outputs,
api_name=api_name,
scroll_to_output=scroll_to_output,
show_progress=show_progress,
show_progress_on=show_progress_on,
queue=queue,
batch=batch,
max_batch_size=max_batch_size,
preprocess=preprocess,
postprocess=postprocess,
cancels=cancels,
js=js,
concurrency_limit=concurrency_limit,
concurrency_id=concurrency_id,
show_api=show_api,
trigger_mode=trigger_mode,
time_limit=time_limit,
stream_every=stream_every,
)
@wraps(func)
def inner(*args, **kwargs):
return func(*args, **kwargs)
return inner
return Dependency(None, {}, None, wrapper)
root_block = get_blocks_context()
if root_block is None:
raise Exception("Cannot call on() outside of a gradio.Blocks context.")
if triggers is None:
methods = (
[EventListenerMethod(input, "change") for input in inputs]
if inputs is not None
else []
) + [EventListenerMethod(root_block, "load")] # type: ignore
else:
methods = [
EventListenerMethod(t.__self__ if t.has_trigger else None, t.event_name) # type: ignore
for t in triggers_typed
]
if triggers:
for trigger in triggers:
if trigger.callback: # type: ignore
trigger.callback(trigger.__self__) # type: ignore
dep, dep_index = root_block.set_event_trigger(
methods,
fn,
inputs,
outputs,
preprocess=preprocess,
postprocess=postprocess,
scroll_to_output=scroll_to_output,
show_progress=show_progress,
show_progress_on=show_progress_on,
api_name=api_name,
js=js,
concurrency_limit=concurrency_limit,
concurrency_id=concurrency_id,
queue=queue,
batch=batch,
max_batch_size=max_batch_size,
show_api=show_api,
trigger_mode=trigger_mode,
connection="stream"
if any(t.connection == "stream" for t in (triggers_typed or []))
else "sse",
event_specific_args=[
a
for t in (triggers_typed or [])
for a in cast(list[str], t.event_specific_args or [])
],
time_limit=time_limit,
stream_every=stream_every,
)
set_cancel_events(methods, cancels)
return Dependency(None, dep.get_config(), dep_index, fn) | Sets up an event listener that triggers a function when the specified event(s) occur. This is especially
useful when the same function should be triggered by multiple events. Only a single API endpoint is generated
for all events in the triggers list.
Parameters:
triggers: List of triggers to listen to, e.g. [btn.click, number.change]. If None, will run on app load and changes to any inputs.
fn: the function to call when this event is triggered. Often a machine learning model's prediction function. Each parameter of the function corresponds to one input component, and the function should return a single value or a tuple of values, with each element in the tuple corresponding to one output component.
inputs: List of gradio.components to use as inputs. If the function takes no inputs, this should be an empty list.
outputs: List of gradio.components to use as outputs. If the function returns no outputs, this should be an empty list.
api_name: Defines how the endpoint appears in the API docs. Can be a string, None, or False. If False, the endpoint will not be exposed in the api docs. If set to None, will use the functions name as the endpoint route. If set to a string, the endpoint will be exposed in the api docs with the given name.
scroll_to_output: If True, will scroll to output component on completion
show_progress: how to show the progress animation while event is running: "full" shows a spinner which covers the output component area as well as a runtime display in the upper right corner, "minimal" only shows the runtime display, "hidden" shows no progress animation at all,
show_progress_on: Component or list of components to show the progress animation on. If None, will show the progress animation on all of the output components.
queue: If True, will place the request on the queue, if the queue has been enabled. If False, will not put this event on the queue, even if the queue has been enabled. If None, will use the queue setting of the gradio app.
batch: If True, then the function should process a batch of inputs, meaning that it should accept a list of input values for each parameter. The lists should be of equal length (and be up to length `max_batch_size`). The function is then *required* to return a tuple of lists (even if there is only 1 output component), with each list in the tuple corresponding to one output component.
max_batch_size: Maximum number of inputs to batch together if this is called from the queue (only relevant if batch=True)
preprocess: If False, will not run preprocessing of component data before running 'fn' (e.g. leaving it as a base64 string if this method is called with the `Image` component).
postprocess: If False, will not run postprocessing of component data before returning 'fn' output to the browser.
cancels: A list of other events to cancel when this listener is triggered. For example, setting cancels=[click_event] will cancel the click_event, where click_event is the return value of another components .click method. Functions that have not yet run (or generators that are iterating) will be cancelled, but functions that are currently running will be allowed to finish.
trigger_mode: If "once" (default for all events except `.change()`) would not allow any submissions while an event is pending. If set to "multiple", unlimited submissions are allowed while pending, and "always_last" (default for `.change()` and `.key_up()` events) would allow a second submission after the pending event is complete.
js: Optional frontend js method to run before running 'fn'. Input arguments for js method are values of 'inputs', return should be a list of values for output components.
concurrency_limit: If set, this is the maximum number of this event that can be running simultaneously. Can be set to None to mean no concurrency_limit (any number of this event can be running simultaneously). Set to "default" to use the default concurrency limit (defined by the `default_concurrency_limit` parameter in `Blocks.queue()`, which itself is 1 by default).
concurrency_id: If set, this is the id of the concurrency group. Events with the same concurrency_id will be limited by the lowest set concurrency_limit.
show_api: whether to show this event in the "view API" page of the Gradio app, or in the ".view_api()" method of the Gradio clients. Unlike setting api_name to False, setting show_api to False will still allow downstream apps as well as the Clients to use this event. If fn is None, show_api will automatically be set to False.
time_limit: The time limit for the function to run. Parameter only used for the `.stream()` event.
stream_every: The latency (in seconds) at which stream chunks are sent to the backend. Defaults to 0.5 seconds. Parameter only used for the `.stream()` event.
Example:
import gradio as gr
with gr.Blocks() as demo:
with gr.Row():
input = gr.Textbox()
button = gr.Button("Submit")
output = gr.Textbox()
gr.on(
triggers=[button.click, input.submit],
fn=lambda x: x,
inputs=[input],
outputs=[output]
)
demo.launch() | on | python | gradio-app/gradio | gradio/events.py | https://github.com/gradio-app/gradio/blob/master/gradio/events.py | Apache-2.0 |
def api(
fn: Callable | Literal["decorator"] = "decorator",
*,
api_name: str | None | Literal[False] = None,
queue: bool = True,
batch: bool = False,
max_batch_size: int = 4,
concurrency_limit: int | None | Literal["default"] = "default",
concurrency_id: str | None = None,
show_api: bool = True,
time_limit: int | None = None,
stream_every: float = 0.5,
) -> Dependency:
"""
Sets up an API endpoint for a generic function that can be called via the gradio client. Derives its type from type-hints in the function signature.
Parameters:
fn: the function to call when this event is triggered. Often a machine learning model's prediction function. Each parameter of the function corresponds to one input component, and the function should return a single value or a tuple of values, with each element in the tuple corresponding to one output component.
api_name: Defines how the endpoint appears in the API docs. Can be a string, None, or False. If False, the endpoint will not be exposed in the api docs. If set to None, will use the functions name as the endpoint route. If set to a string, the endpoint will be exposed in the api docs with the given name.
queue: If True, will place the request on the queue, if the queue has been enabled. If False, will not put this event on the queue, even if the queue has been enabled. If None, will use the queue setting of the gradio app.
batch: If True, then the function should process a batch of inputs, meaning that it should accept a list of input values for each parameter. The lists should be of equal length (and be up to length `max_batch_size`). The function is then *required* to return a tuple of lists (even if there is only 1 output component), with each list in the tuple corresponding to one output component.
max_batch_size: Maximum number of inputs to batch together if this is called from the queue (only relevant if batch=True)
concurrency_limit: If set, this is the maximum number of this event that can be running simultaneously. Can be set to None to mean no concurrency_limit (any number of this event can be running simultaneously). Set to "default" to use the default concurrency limit (defined by the `default_concurrency_limit` parameter in `Blocks.queue()`, which itself is 1 by default).
concurrency_id: If set, this is the id of the concurrency group. Events with the same concurrency_id will be limited by the lowest set concurrency_limit.
show_api: whether to show this event in the "view API" page of the Gradio app, or in the ".view_api()" method of the Gradio clients. Unlike setting api_name to False, setting show_api to False will still allow downstream apps as well as the Clients to use this event. If fn is None, show_api will automatically be set to False.
time_limit: The time limit for the function to run. Parameter only used for the `.stream()` event.
stream_every: The latency (in seconds) at which stream chunks are sent to the backend. Defaults to 0.5 seconds. Parameter only used for the `.stream()` event.
Example:
import gradio as gr
with gr.Blocks() as demo:
with gr.Row():
input = gr.Textbox()
button = gr.Button("Submit")
output = gr.Textbox()
gr.on(
triggers=[button.click, input.submit],
fn=lambda x: x,
inputs=[input],
outputs=[output]
)
demo.launch()
"""
if fn == "decorator":
def wrapper(func):
api(
fn=func,
api_name=api_name,
queue=queue,
batch=batch,
max_batch_size=max_batch_size,
concurrency_limit=concurrency_limit,
concurrency_id=concurrency_id,
show_api=show_api,
time_limit=time_limit,
stream_every=stream_every,
)
@wraps(func)
def inner(*args, **kwargs):
return func(*args, **kwargs)
return inner
return Dependency(None, {}, None, wrapper)
root_block = get_blocks_context()
if root_block is None:
raise Exception("Cannot call api() outside of a gradio.Blocks context.")
from gradio.components.api_component import Api
fn_params = get_function_params(fn)
return_types = get_return_types(fn)
def ordinal(n):
return f"{n}{'th' if 10 <= n % 100 <= 20 else {1: 'st', 2: 'nd', 3: 'rd'}.get(n % 10, 'th')}"
if any(param[3] is None for param in fn_params):
raise ValueError(
"API endpoints must have type hints. Please specify a type hint for all parameters."
)
inputs = [
Api(
default_value if has_default else None,
python_type_to_json_schema(_type),
ordinal(i + 1),
)
for i, (_, has_default, default_value, _type) in enumerate(fn_params)
]
outputs = [
Api(None, python_type_to_json_schema(type), ordinal(i + 1))
for i, type in enumerate(return_types)
]
dep, dep_index = root_block.set_event_trigger(
[],
fn,
inputs,
outputs,
preprocess=False,
postprocess=False,
scroll_to_output=False,
show_progress="hidden",
api_name=api_name,
js=None,
concurrency_limit=concurrency_limit,
concurrency_id=concurrency_id,
queue=queue,
batch=batch,
max_batch_size=max_batch_size,
show_api=show_api,
trigger_mode=None,
time_limit=time_limit,
stream_every=stream_every,
)
return Dependency(None, dep.get_config(), dep_index, fn) | Sets up an API endpoint for a generic function that can be called via the gradio client. Derives its type from type-hints in the function signature.
Parameters:
fn: the function to call when this event is triggered. Often a machine learning model's prediction function. Each parameter of the function corresponds to one input component, and the function should return a single value or a tuple of values, with each element in the tuple corresponding to one output component.
api_name: Defines how the endpoint appears in the API docs. Can be a string, None, or False. If False, the endpoint will not be exposed in the api docs. If set to None, will use the functions name as the endpoint route. If set to a string, the endpoint will be exposed in the api docs with the given name.
queue: If True, will place the request on the queue, if the queue has been enabled. If False, will not put this event on the queue, even if the queue has been enabled. If None, will use the queue setting of the gradio app.
batch: If True, then the function should process a batch of inputs, meaning that it should accept a list of input values for each parameter. The lists should be of equal length (and be up to length `max_batch_size`). The function is then *required* to return a tuple of lists (even if there is only 1 output component), with each list in the tuple corresponding to one output component.
max_batch_size: Maximum number of inputs to batch together if this is called from the queue (only relevant if batch=True)
concurrency_limit: If set, this is the maximum number of this event that can be running simultaneously. Can be set to None to mean no concurrency_limit (any number of this event can be running simultaneously). Set to "default" to use the default concurrency limit (defined by the `default_concurrency_limit` parameter in `Blocks.queue()`, which itself is 1 by default).
concurrency_id: If set, this is the id of the concurrency group. Events with the same concurrency_id will be limited by the lowest set concurrency_limit.
show_api: whether to show this event in the "view API" page of the Gradio app, or in the ".view_api()" method of the Gradio clients. Unlike setting api_name to False, setting show_api to False will still allow downstream apps as well as the Clients to use this event. If fn is None, show_api will automatically be set to False.
time_limit: The time limit for the function to run. Parameter only used for the `.stream()` event.
stream_every: The latency (in seconds) at which stream chunks are sent to the backend. Defaults to 0.5 seconds. Parameter only used for the `.stream()` event.
Example:
import gradio as gr
with gr.Blocks() as demo:
with gr.Row():
input = gr.Textbox()
button = gr.Button("Submit")
output = gr.Textbox()
gr.on(
triggers=[button.click, input.submit],
fn=lambda x: x,
inputs=[input],
outputs=[output]
)
demo.launch() | api | python | gradio-app/gradio | gradio/events.py | https://github.com/gradio-app/gradio/blob/master/gradio/events.py | Apache-2.0 |
def setup(self, components: Sequence[Component], flagging_dir: str):
"""
This method should be overridden and ensure that everything is set up correctly for flag().
This method gets called once at the beginning of the Interface.launch() method.
Parameters:
components: Set of components that will provide flagged data.
flagging_dir: A string, typically containing the path to the directory where the flagging file should be stored (provided as an argument to Interface.__init__()).
"""
pass | This method should be overridden and ensure that everything is set up correctly for flag().
This method gets called once at the beginning of the Interface.launch() method.
Parameters:
components: Set of components that will provide flagged data.
flagging_dir: A string, typically containing the path to the directory where the flagging file should be stored (provided as an argument to Interface.__init__()). | setup | python | gradio-app/gradio | gradio/flagging.py | https://github.com/gradio-app/gradio/blob/master/gradio/flagging.py | Apache-2.0 |
def flag(
self,
flag_data: list[Any],
flag_option: str | None = None,
username: str | None = None,
) -> int:
"""
This method should be overridden by the FlaggingCallback subclass and may contain optional additional arguments.
This gets called every time the <flag> button is pressed.
Parameters:
interface: The Interface object that is being used to launch the flagging interface.
flag_data: The data to be flagged.
flag_option (optional): In the case that flagging_options are provided, the flag option that is being used.
username (optional): The username of the user that is flagging the data, if logged in.
Returns:
(int) The total number of samples that have been flagged.
"""
pass | This method should be overridden by the FlaggingCallback subclass and may contain optional additional arguments.
This gets called every time the <flag> button is pressed.
Parameters:
interface: The Interface object that is being used to launch the flagging interface.
flag_data: The data to be flagged.
flag_option (optional): In the case that flagging_options are provided, the flag option that is being used.
username (optional): The username of the user that is flagging the data, if logged in.
Returns:
(int) The total number of samples that have been flagged. | flag | python | gradio-app/gradio | gradio/flagging.py | https://github.com/gradio-app/gradio/blob/master/gradio/flagging.py | Apache-2.0 |
def __init__(
self,
simplify_file_data: bool = True,
verbose: bool = True,
dataset_file_name: str | None = None,
):
"""
Parameters:
simplify_file_data: If True, the file data will be simplified before being written to the CSV file. If CSVLogger is being used to cache examples, this is set to False to preserve the original FileData class
verbose: If True, prints messages to the console about the dataset file creation
dataset_file_name: The name of the dataset file to be created (should end in ".csv"). If None, the dataset file will be named "dataset1.csv" or the next available number.
"""
self.simplify_file_data = simplify_file_data
self.verbose = verbose
self.dataset_file_name = dataset_file_name
self.lock = (
Lock() if not wasm_utils.IS_WASM else contextlib.nullcontext()
) # The multiprocessing module doesn't work on Lite. | Parameters:
simplify_file_data: If True, the file data will be simplified before being written to the CSV file. If CSVLogger is being used to cache examples, this is set to False to preserve the original FileData class
verbose: If True, prints messages to the console about the dataset file creation
dataset_file_name: The name of the dataset file to be created (should end in ".csv"). If None, the dataset file will be named "dataset1.csv" or the next available number. | __init__ | python | gradio-app/gradio | gradio/flagging.py | https://github.com/gradio-app/gradio/blob/master/gradio/flagging.py | Apache-2.0 |
def _resolve_concurrency_limit(
self, default_concurrency_limit: int | None | Literal["not_set"]
) -> int | None:
"""
Handles the logic of resolving the default_concurrency_limit as this can be specified via a combination
of the `default_concurrency_limit` parameter of the `Blocks.queue()` or the `GRADIO_DEFAULT_CONCURRENCY_LIMIT`
environment variable. The parameter in `Blocks.queue()` takes precedence over the environment variable.
Parameters:
default_concurrency_limit: The default concurrency limit, as specified by a user in `Blocks.queu()`.
"""
if default_concurrency_limit != "not_set":
return default_concurrency_limit
if default_concurrency_limit_env := os.environ.get(
"GRADIO_DEFAULT_CONCURRENCY_LIMIT"
):
if default_concurrency_limit_env.lower() == "none":
return None
else:
return int(default_concurrency_limit_env)
else:
return 1 | Handles the logic of resolving the default_concurrency_limit as this can be specified via a combination
of the `default_concurrency_limit` parameter of the `Blocks.queue()` or the `GRADIO_DEFAULT_CONCURRENCY_LIMIT`
environment variable. The parameter in `Blocks.queue()` takes precedence over the environment variable.
Parameters:
default_concurrency_limit: The default concurrency limit, as specified by a user in `Blocks.queu()`. | _resolve_concurrency_limit | python | gradio-app/gradio | gradio/queueing.py | https://github.com/gradio-app/gradio/blob/master/gradio/queueing.py | Apache-2.0 |
async def start_progress_updates(self) -> None:
"""
Because progress updates can be very frequent, we do not necessarily want to send a message per update.
Rather, we check for progress updates at regular intervals, and send a message if there is a pending update.
Consecutive progress updates between sends will overwrite each other so only the most recent update will be sent.
"""
while not self.stopped:
events = [evt for job in self.active_jobs if job is not None for evt in job]
if len(events) == 0:
await asyncio.sleep(self.progress_update_sleep_when_free)
continue
for event in events:
if event.progress_pending and event.progress:
event.progress_pending = False
self.send_message(event, event.progress)
await asyncio.sleep(self.progress_update_sleep_when_free) | Because progress updates can be very frequent, we do not necessarily want to send a message per update.
Rather, we check for progress updates at regular intervals, and send a message if there is a pending update.
Consecutive progress updates between sends will overwrite each other so only the most recent update will be sent. | start_progress_updates | python | gradio-app/gradio | gradio/queueing.py | https://github.com/gradio-app/gradio/blob/master/gradio/queueing.py | Apache-2.0 |
async def notify_clients(self) -> None:
"""
Notify clients about events statuses in the queue periodically.
"""
while not self.stopped:
await asyncio.sleep(self.update_intervals)
if len(self) > 0:
for concurrency_id in self.event_queue_per_concurrency_id:
self.broadcast_estimations(concurrency_id) | Notify clients about events statuses in the queue periodically. | notify_clients | python | gradio-app/gradio | gradio/queueing.py | https://github.com/gradio-app/gradio/blob/master/gradio/queueing.py | Apache-2.0 |
def to_binary(x: str | dict) -> bytes:
"""Converts a base64 string or dictionary to a binary string that can be sent in a POST."""
if isinstance(x, dict):
if x.get("data"):
base64str = x["data"]
else:
base64str = client_utils.encode_url_or_file_to_base64(x["path"])
else:
base64str = x
return base64.b64decode(extract_base64_data(base64str)) | Converts a base64 string or dictionary to a binary string that can be sent in a POST. | to_binary | python | gradio-app/gradio | gradio/processing_utils.py | https://github.com/gradio-app/gradio/blob/master/gradio/processing_utils.py | Apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.