3v324v23 commited on
Commit
7d96495
·
1 Parent(s): af44f8f

adding code documentation

Browse files
Files changed (1) hide show
  1. app.py +42 -0
app.py CHANGED
@@ -43,6 +43,22 @@ client_cache = OrderedDict()
43
  MAX_CACHE_SIZE = 15
44
  default_client=Client("pi19404/ai-worker", hf_token=API_TOKEN)
45
  def get_client_for_ip(ip_address,x_ip_token):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
  if x_ip_token is None:
47
  x_ip_token=ip_address
48
 
@@ -55,6 +71,7 @@ def get_client_for_ip(ip_address,x_ip_token):
55
  # Move the accessed item to the end (most recently used)
56
  client_cache.move_to_end(x_ip_token)
57
  return client_cache[x_ip_token]
 
58
  # Create a new client
59
  new_client = Client("pi19404/ai-worker", hf_token=API_TOKEN, headers={"X-IP-Token": x_ip_token})
60
  # Add to cache, removing oldest if necessary
@@ -66,6 +83,21 @@ def get_client_for_ip(ip_address,x_ip_token):
66
  return new_client
67
 
68
  def set_client_for_session(request: gr.Request):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
69
  # Collect all headers in a dictionary
70
  all_headers = {header: value for header, value in request.headers.items()}
71
 
@@ -125,6 +157,16 @@ def my_inference_function(client,input_data, output_data,mode, max_length, max_n
125
  return json.dumps({"error": str(e)})
126
 
127
  with gr.Blocks() as demo:
 
 
 
 
 
 
 
 
 
 
128
  gr.Markdown("## LLM Safety Evaluation")
129
  client = gr.State()
130
  with gr.Tab("ShieldGemma2"):
 
43
  MAX_CACHE_SIZE = 15
44
  default_client=Client("pi19404/ai-worker", hf_token=API_TOKEN)
45
  def get_client_for_ip(ip_address,x_ip_token):
46
+ """
47
+ Retrieve or create a client for the given IP address.
48
+
49
+ This function implements a caching mechanism to store up to MAX_CACHE_SIZE clients.
50
+ If a client for the given IP exists in the cache, it's returned and moved to the end
51
+ of the cache (marking it as most recently used). If not, a new client is created,
52
+ added to the cache, and the least recently used client is removed if the cache is full.
53
+
54
+ Args:
55
+ ip_address (str): The IP address of the client.
56
+ x_ip_token (str): The X-IP-Token header value for the client.
57
+
58
+ Returns:
59
+ Client: A Gradio client instance for the given IP address.
60
+ """
61
+
62
  if x_ip_token is None:
63
  x_ip_token=ip_address
64
 
 
71
  # Move the accessed item to the end (most recently used)
72
  client_cache.move_to_end(x_ip_token)
73
  return client_cache[x_ip_token]
74
+
75
  # Create a new client
76
  new_client = Client("pi19404/ai-worker", hf_token=API_TOKEN, headers={"X-IP-Token": x_ip_token})
77
  # Add to cache, removing oldest if necessary
 
83
  return new_client
84
 
85
  def set_client_for_session(request: gr.Request):
86
+ """
87
+ Set up a client for the current session and collect request headers.
88
+
89
+ This function is called when a new session is initiated. It retrieves or creates
90
+ a client for the session's IP address and collects all request headers for debugging.
91
+
92
+ Args:
93
+ request (gr.Request): The Gradio request object for the current session.
94
+
95
+ Returns:
96
+ tuple: A tuple containing:
97
+ - Client: The Gradio client instance for the session.
98
+ - str: A JSON string of all request headers.
99
+ """
100
+
101
  # Collect all headers in a dictionary
102
  all_headers = {header: value for header, value in request.headers.items()}
103
 
 
157
  return json.dumps({"error": str(e)})
158
 
159
  with gr.Blocks() as demo:
160
+ """
161
+ Main Gradio interface setup.
162
+
163
+ This block sets up the Gradio interface, including:
164
+ - A State component to store the client for the session.
165
+ - A JSON component to display request headers for debugging.
166
+ - Other UI components (not shown in this snippet).
167
+ - A load event that calls set_client_for_session when the interface is loaded.
168
+ """
169
+
170
  gr.Markdown("## LLM Safety Evaluation")
171
  client = gr.State()
172
  with gr.Tab("ShieldGemma2"):