ThorbenFroehlking commited on
Commit
c396e30
·
1 Parent(s): 65c015c
.gradio/cached_examples/43/log.csv ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ PyMOL Visualization Commands,Interactive 3D Structure,Download Results,component 3,timestamp
2
+ Failed to create chain-specific PDB: invalid literal for int() with base 10: 'THR',,,"{'visible': False, '__type__': 'update'}",2025-04-09 13:42:40.355486
3
+ Failed to create chain-specific PDB: invalid literal for int() with base 10: 'YR',,,"{'visible': False, '__type__': 'update'}",2025-04-09 13:42:41.822673
4
+ Failed to create chain-specific PDB: invalid literal for int() with base 10: 'LU',,,"{'visible': False, '__type__': 'update'}",2025-04-09 13:42:43.191727
.ipynb_checkpoints/app-checkpoint.py CHANGED
@@ -30,7 +30,8 @@ from scipy.special import expit
30
 
31
  # Load model and move to device
32
  #checkpoint = 'ThorbenF/prot_t5_xl_uniref50'
33
- checkpoint = 'ThorbenF/prot_t5_xl_uniref50_cryptic'
 
34
  max_length = 1500
35
  model, tokenizer = load_model(checkpoint, max_length)
36
  device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
@@ -445,7 +446,7 @@ with gr.Blocks(css="""
445
  "color": "whiteCarbon",
446
  "residue_range": "",
447
  "around": 0,
448
- "byres": False,
449
  }
450
  ])
451
 
@@ -532,4 +533,4 @@ with gr.Blocks(css="""
532
  outputs=[predictions_output, molecule_output, download_output]
533
  )
534
 
535
- demo.launch(share=True)
 
30
 
31
  # Load model and move to device
32
  #checkpoint = 'ThorbenF/prot_t5_xl_uniref50'
33
+ #checkpoint = 'ThorbenF/prot_t5_xl_uniref50_cryptic'
34
+ checkpoint = 'ThorbenF/prot_t5_xl_uniref50_database'
35
  max_length = 1500
36
  model, tokenizer = load_model(checkpoint, max_length)
37
  device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
 
446
  "color": "whiteCarbon",
447
  "residue_range": "",
448
  "around": 0,
449
+ #"byres": False,
450
  }
451
  ])
452
 
 
533
  outputs=[predictions_output, molecule_output, download_output]
534
  )
535
 
536
+ demo.launch(share=True)
app.py CHANGED
@@ -446,7 +446,7 @@ with gr.Blocks(css="""
446
  "color": "whiteCarbon",
447
  "residue_range": "",
448
  "around": 0,
449
- "byres": False,
450
  }
451
  ])
452
 
 
446
  "color": "whiteCarbon",
447
  "residue_range": "",
448
  "around": 0,
449
+ #"byres": False,
450
  }
451
  ])
452
 
app.py.backup DELETED
@@ -1,542 +0,0 @@
1
- from datetime import datetime
2
- import gradio as gr
3
- import requests
4
- from Bio.PDB import PDBParser, MMCIFParser, PDBIO, Select
5
- from Bio.PDB.Polypeptide import is_aa
6
- from Bio.SeqUtils import seq1
7
- from typing import Optional, Tuple
8
- import numpy as np
9
- import os
10
- from gradio_molecule3d import Molecule3D
11
-
12
- from model_loader import load_model
13
-
14
- import torch
15
- import torch.nn as nn
16
- import torch.nn.functional as F
17
- from torch.utils.data import DataLoader
18
-
19
- import re
20
- import pandas as pd
21
- import copy
22
-
23
- import transformers
24
- from transformers import AutoTokenizer, DataCollatorForTokenClassification
25
-
26
- from datasets import Dataset
27
-
28
- from scipy.special import expit
29
-
30
-
31
- # Load model and move to device
32
- checkpoint = 'ThorbenF/prot_t5_xl_uniref50'
33
- max_length = 1500
34
- model, tokenizer = load_model(checkpoint, max_length)
35
- device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
36
- model.to(device)
37
- model.eval()
38
-
39
- def normalize_scores(scores):
40
- min_score = np.min(scores)
41
- max_score = np.max(scores)
42
- return (scores - min_score) / (max_score - min_score) if max_score > min_score else scores
43
-
44
- def read_mol(pdb_path):
45
- """Read PDB file and return its content as a string"""
46
- with open(pdb_path, 'r') as f:
47
- return f.read()
48
-
49
- def fetch_structure(pdb_id: str, output_dir: str = ".") -> Optional[str]:
50
- """
51
- Fetch the structure file for a given PDB ID. Prioritizes CIF files.
52
- If a structure file already exists locally, it uses that.
53
- """
54
- file_path = download_structure(pdb_id, output_dir)
55
- if file_path:
56
- return file_path
57
- else:
58
- return None
59
-
60
- def download_structure(pdb_id: str, output_dir: str) -> Optional[str]:
61
- """
62
- Attempt to download the structure file in CIF or PDB format.
63
- Returns the path to the downloaded file, or None if download fails.
64
- """
65
- for ext in ['.cif', '.pdb']:
66
- file_path = os.path.join(output_dir, f"{pdb_id}{ext}")
67
- if os.path.exists(file_path):
68
- return file_path
69
- url = f"https://files.rcsb.org/download/{pdb_id}{ext}"
70
- try:
71
- response = requests.get(url, timeout=10)
72
- if response.status_code == 200:
73
- with open(file_path, 'wb') as f:
74
- f.write(response.content)
75
- return file_path
76
- except Exception as e:
77
- print(f"Download error for {pdb_id}{ext}: {e}")
78
- return None
79
-
80
- def convert_cif_to_pdb(cif_path: str, output_dir: str = ".") -> str:
81
- """
82
- Convert a CIF file to PDB format using BioPython and return the PDB file path.
83
- """
84
- pdb_path = os.path.join(output_dir, os.path.basename(cif_path).replace('.cif', '.pdb'))
85
- parser = MMCIFParser(QUIET=True)
86
- structure = parser.get_structure('protein', cif_path)
87
- io = PDBIO()
88
- io.set_structure(structure)
89
- io.save(pdb_path)
90
- return pdb_path
91
-
92
- def fetch_pdb(pdb_id):
93
- pdb_path = fetch_structure(pdb_id)
94
- if not pdb_path:
95
- return None
96
- _, ext = os.path.splitext(pdb_path)
97
- if ext == '.cif':
98
- pdb_path = convert_cif_to_pdb(pdb_path)
99
- return pdb_path
100
-
101
- def create_chain_specific_pdb(input_pdb: str, chain_id: str, residue_scores: list, protein_residues: list) -> str:
102
- """
103
- Create a PDB file with only the selected chain and residues, replacing B-factor with prediction scores
104
- """
105
- # Read the original PDB file
106
- parser = PDBParser(QUIET=True)
107
- structure = parser.get_structure('protein', input_pdb)
108
-
109
- # Prepare a new structure with only the specified chain and selected residues
110
- output_pdb = f"{os.path.splitext(input_pdb)[0]}_{chain_id}_predictions_scores.pdb"
111
-
112
- # Create scores dictionary for easy lookup
113
- scores_dict = {resi: score for resi, score in residue_scores}
114
-
115
- # Create a custom Select class
116
- class ResidueSelector(Select):
117
- def __init__(self, chain_id, selected_residues, scores_dict):
118
- self.chain_id = chain_id
119
- self.selected_residues = selected_residues
120
- self.scores_dict = scores_dict
121
-
122
- def accept_chain(self, chain):
123
- return chain.id == self.chain_id
124
-
125
- def accept_residue(self, residue):
126
- return residue.id[1] in self.selected_residues
127
-
128
- def accept_atom(self, atom):
129
- if atom.parent.id[1] in self.scores_dict:
130
- atom.bfactor = np.absolute(1-self.scores_dict[atom.parent.id[1]]) * 100
131
- return True
132
-
133
- # Prepare output PDB with selected chain and residues, modified B-factors
134
- io = PDBIO()
135
- selector = ResidueSelector(chain_id, [res.id[1] for res in protein_residues], scores_dict)
136
-
137
- io.set_structure(structure[0])
138
- io.save(output_pdb, selector)
139
-
140
- return output_pdb
141
-
142
- def calculate_geometric_center(pdb_path: str, high_score_residues: list, chain_id: str):
143
- """
144
- Calculate the geometric center of high-scoring residues
145
- """
146
- parser = PDBParser(QUIET=True)
147
- structure = parser.get_structure('protein', pdb_path)
148
-
149
- # Collect coordinates of CA atoms from high-scoring residues
150
- coords = []
151
- for model in structure:
152
- for chain in model:
153
- if chain.id == chain_id:
154
- for residue in chain:
155
- if residue.id[1] in high_score_residues:
156
- if 'CA' in residue: # Use alpha carbon as representative
157
- ca_atom = residue['CA']
158
- coords.append(ca_atom.coord)
159
-
160
- # Calculate geometric center
161
- if coords:
162
- center = np.mean(coords, axis=0)
163
- return center
164
- return None
165
-
166
- def process_pdb(pdb_id_or_file, segment):
167
- # Determine if input is a PDB ID or file path
168
- if pdb_id_or_file.endswith('.pdb'):
169
- pdb_path = pdb_id_or_file
170
- pdb_id = os.path.splitext(os.path.basename(pdb_path))[0]
171
- else:
172
- pdb_id = pdb_id_or_file
173
- pdb_path = fetch_pdb(pdb_id)
174
-
175
- if not pdb_path:
176
- return "Failed to fetch PDB file", None, None
177
-
178
- # Determine the file format and choose the appropriate parser
179
- _, ext = os.path.splitext(pdb_path)
180
- parser = MMCIFParser(QUIET=True) if ext == '.cif' else PDBParser(QUIET=True)
181
-
182
- try:
183
- # Parse the structure file
184
- structure = parser.get_structure('protein', pdb_path)
185
- except Exception as e:
186
- return f"Error parsing structure file: {e}", None, None
187
-
188
- # Extract the specified chain
189
- try:
190
- chain = structure[0][segment]
191
- except KeyError:
192
- return "Invalid Chain ID", None, None
193
-
194
- protein_residues = [res for res in chain if is_aa(res)]
195
- sequence = "".join(seq1(res.resname) for res in protein_residues)
196
- sequence_id = [res.id[1] for res in protein_residues]
197
-
198
- input_ids = tokenizer(" ".join(sequence), return_tensors="pt").input_ids.to(device)
199
- with torch.no_grad():
200
- outputs = model(input_ids).logits.detach().cpu().numpy().squeeze()
201
-
202
- # Calculate scores and normalize them
203
- scores = expit(outputs[:, 1] - outputs[:, 0])
204
-
205
- normalized_scores = normalize_scores(scores)
206
-
207
- # Zip residues with scores to track the residue ID and score
208
- residue_scores = [(resi, score) for resi, score in zip(sequence_id, normalized_scores)]
209
-
210
-
211
- # Define the score brackets
212
- score_brackets = {
213
- "0.0-0.2": (0.0, 0.2),
214
- "0.2-0.4": (0.2, 0.4),
215
- "0.4-0.6": (0.4, 0.6),
216
- "0.6-0.8": (0.6, 0.8),
217
- "0.8-1.0": (0.8, 1.0)
218
- }
219
-
220
- # Initialize a dictionary to store residues by bracket
221
- residues_by_bracket = {bracket: [] for bracket in score_brackets}
222
-
223
- # Categorize residues into brackets
224
- for resi, score in residue_scores:
225
- for bracket, (lower, upper) in score_brackets.items():
226
- if lower <= score < upper:
227
- residues_by_bracket[bracket].append(resi)
228
- break
229
-
230
- # Preparing the result string
231
- current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
232
- result_str = f"Prediction for PDB: {pdb_id}, Chain: {segment}\nDate: {current_time}\n\n"
233
- result_str += "Residues by Score Brackets:\n\n"
234
-
235
- # Add residues for each bracket
236
- for bracket, residues in residues_by_bracket.items():
237
- result_str += f"Bracket {bracket}:\n"
238
- result_str += "Columns: Residue Name, Residue Number, One-letter Code, Normalized Score\n"
239
- result_str += "\n".join([
240
- f"{res.resname} {res.id[1]} {sequence[i]} {normalized_scores[i]:.2f}"
241
- for i, res in enumerate(protein_residues) if res.id[1] in residues
242
- ])
243
- result_str += "\n\n"
244
-
245
- # Create chain-specific PDB with scores in B-factor
246
- scored_pdb = create_chain_specific_pdb(pdb_path, segment, residue_scores, protein_residues)
247
-
248
- # Molecule visualization with updated script with color mapping
249
- mol_vis = molecule(pdb_path, residue_scores, segment)#, color_map)
250
-
251
- # Improved PyMOL command suggestions
252
- current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
253
- pymol_commands = f"Prediction for PDB: {pdb_id}, Chain: {segment}\nDate: {current_time}\n\n"
254
-
255
- pymol_commands += f"""
256
- # PyMOL Visualization Commands
257
- load {os.path.abspath(pdb_path)}, protein
258
- hide everything, all
259
- show cartoon, chain {segment}
260
- color white, chain {segment}
261
- """
262
-
263
- # Define colors for each score bracket
264
- bracket_colors = {
265
- "0.0-0.2": "white",
266
- "0.2-0.4": "lightorange",
267
- "0.4-0.6": "orange",
268
- "0.6-0.8": "orangered",
269
- "0.8-1.0": "red"
270
- }
271
-
272
- # Add PyMOL commands for each score bracket
273
- for bracket, residues in residues_by_bracket.items():
274
- if residues: # Only add commands if there are residues in this bracket
275
- color = bracket_colors[bracket]
276
- resi_list = '+'.join(map(str, residues))
277
- pymol_commands += f"""
278
- select bracket_{bracket.replace('.', '').replace('-', '_')}, resi {resi_list} and chain {segment}
279
- show sticks, bracket_{bracket.replace('.', '').replace('-', '_')}
280
- color {color}, bracket_{bracket.replace('.', '').replace('-', '_')}
281
- """
282
- # Create prediction and scored PDB files
283
- prediction_file = f"{pdb_id}_binding_site_residues.txt"
284
- with open(prediction_file, "w") as f:
285
- f.write(result_str)
286
-
287
- return pymol_commands, mol_vis, [prediction_file,scored_pdb]
288
-
289
- def molecule(input_pdb, residue_scores=None, segment='A'):
290
- # More granular scoring for visualization
291
- mol = read_mol(input_pdb) # Read PDB file content
292
-
293
- # Prepare high-scoring residues script if scores are provided
294
- high_score_script = ""
295
- if residue_scores is not None:
296
- # Filter residues based on their scores
297
- class1_score_residues = [resi for resi, score in residue_scores if 0.0 < score <= 0.2]
298
- class2_score_residues = [resi for resi, score in residue_scores if 0.2 < score <= 0.4]
299
- class3_score_residues = [resi for resi, score in residue_scores if 0.4 < score <= 0.6]
300
- class4_score_residues = [resi for resi, score in residue_scores if 0.6 < score <= 0.8]
301
- class5_score_residues = [resi for resi, score in residue_scores if 0.8 < score <= 1.0]
302
-
303
-
304
- high_score_script = """
305
- // Load the original model and apply white cartoon style
306
- let chainModel = viewer.addModel(pdb, "pdb");
307
- chainModel.setStyle({}, {});
308
- chainModel.setStyle(
309
- {"chain": "%s"},
310
- {"cartoon": {"color": "white"}}
311
- );
312
-
313
- // Create a new model for high-scoring residues and apply red sticks style
314
- let class1Model = viewer.addModel(pdb, "pdb");
315
- class1Model.setStyle({}, {});
316
- class1Model.setStyle(
317
- {"chain": "%s", "resi": [%s]},
318
- {"stick": {"color": "0xFFFFFF", "opacity": 0.5}}
319
- );
320
-
321
- // Create a new model for high-scoring residues and apply red sticks style
322
- let class2Model = viewer.addModel(pdb, "pdb");
323
- class2Model.setStyle({}, {});
324
- class2Model.setStyle(
325
- {"chain": "%s", "resi": [%s]},
326
- {"stick": {"color": "0xFFD580", "opacity": 0.7}}
327
- );
328
-
329
- // Create a new model for high-scoring residues and apply red sticks style
330
- let class3Model = viewer.addModel(pdb, "pdb");
331
- class3Model.setStyle({}, {});
332
- class3Model.setStyle(
333
- {"chain": "%s", "resi": [%s]},
334
- {"stick": {"color": "0xFFA500", "opacity": 1}}
335
- );
336
-
337
- // Create a new model for high-scoring residues and apply red sticks style
338
- let class4Model = viewer.addModel(pdb, "pdb");
339
- class4Model.setStyle({}, {});
340
- class4Model.setStyle(
341
- {"chain": "%s", "resi": [%s]},
342
- {"stick": {"color": "0xFF4500", "opacity": 1}}
343
- );
344
-
345
- // Create a new model for high-scoring residues and apply red sticks style
346
- let class5Model = viewer.addModel(pdb, "pdb");
347
- class5Model.setStyle({}, {});
348
- class5Model.setStyle(
349
- {"chain": "%s", "resi": [%s]},
350
- {"stick": {"color": "0xFF0000", "alpha": 1}}
351
- );
352
-
353
- """ % (
354
- segment,
355
- segment,
356
- ", ".join(str(resi) for resi in class1_score_residues),
357
- segment,
358
- ", ".join(str(resi) for resi in class2_score_residues),
359
- segment,
360
- ", ".join(str(resi) for resi in class3_score_residues),
361
- segment,
362
- ", ".join(str(resi) for resi in class4_score_residues),
363
- segment,
364
- ", ".join(str(resi) for resi in class5_score_residues)
365
- )
366
-
367
- # Generate the full HTML content
368
- html_content = f"""
369
- <!DOCTYPE html>
370
- <html>
371
- <head>
372
- <meta http-equiv="content-type" content="text/html; charset=UTF-8" />
373
- <style>
374
- .mol-container {{
375
- width: 100%;
376
- height: 700px;
377
- position: relative;
378
- }}
379
- </style>
380
- <script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.6.3/jquery.min.js"></script>
381
- <script src="https://3Dmol.csb.pitt.edu/build/3Dmol-min.js"></script>
382
- </head>
383
- <body>
384
- <div id="container" class="mol-container"></div>
385
- <script>
386
- let pdb = `{mol}`; // Use template literal to properly escape PDB content
387
- $(document).ready(function () {{
388
- let element = $("#container");
389
- let config = {{ backgroundColor: "white" }};
390
- let viewer = $3Dmol.createViewer(element, config);
391
-
392
- {high_score_script}
393
-
394
- // Add hover functionality
395
- viewer.setHoverable(
396
- {{}},
397
- true,
398
- function(atom, viewer, event, container) {{
399
- if (!atom.label) {{
400
- atom.label = viewer.addLabel(
401
- atom.resn + ":" +atom.resi + ":" + atom.atom,
402
- {{
403
- position: atom,
404
- backgroundColor: 'mintcream',
405
- fontColor: 'black',
406
- fontSize: 18,
407
- padding: 4
408
- }}
409
- );
410
- }}
411
- }},
412
- function(atom, viewer) {{
413
- if (atom.label) {{
414
- viewer.removeLabel(atom.label);
415
- delete atom.label;
416
- }}
417
- }}
418
- );
419
-
420
- viewer.zoomTo();
421
- viewer.render();
422
- viewer.zoom(0.8, 2000);
423
- }});
424
- </script>
425
- </body>
426
- </html>
427
- """
428
-
429
- # Return the HTML content within an iframe safely encoded for special characters
430
- return f'<iframe width="100%" height="700" srcdoc="{html_content.replace(chr(34), "&quot;").replace(chr(39), "&#39;")}"></iframe>'
431
-
432
- # Gradio UI
433
- with gr.Blocks() as demo:
434
- gr.Markdown("# Protein Binding Site Prediction")
435
-
436
- # Mode selection
437
- mode = gr.Radio(
438
- choices=["PDB ID", "Upload File"],
439
- value="PDB ID",
440
- label="Input Mode",
441
- info="Choose whether to input a PDB ID or upload a PDB/CIF file."
442
- )
443
-
444
- # Input components based on mode
445
- pdb_input = gr.Textbox(value="4BDU", label="PDB ID", placeholder="Enter PDB ID here...")
446
- pdb_file = gr.File(label="Upload PDB/CIF File", visible=False)
447
- visualize_btn = gr.Button("Visualize Structure")
448
-
449
- molecule_output2 = Molecule3D(label="Protein Structure", reps=[
450
- {
451
- "model": 0,
452
- "style": "cartoon",
453
- "color": "whiteCarbon",
454
- "residue_range": "",
455
- "around": 0,
456
- "byres": False,
457
- }
458
- ])
459
-
460
- with gr.Row():
461
- segment_input = gr.Textbox(value="A", label="Chain ID", placeholder="Enter Chain ID here...")
462
- prediction_btn = gr.Button("Predict Binding Site")
463
-
464
- molecule_output = gr.HTML(label="Protein Structure")
465
- explanation_vis = gr.Markdown("""
466
- Score dependent colorcoding:
467
- - 0.0-0.2: white
468
- - 0.2–0.4: light orange
469
- - 0.4–0.6: orange
470
- - 0.6–0.8: orangered
471
- - 0.8–1.0: red
472
- """)
473
- predictions_output = gr.Textbox(label="Visualize Prediction with PyMol")
474
- gr.Markdown("### Download:\n- List of predicted binding site residues\n- PDB with score in beta factor column")
475
- download_output = gr.File(label="Download Files", file_count="multiple")
476
-
477
- def process_interface(mode, pdb_id, pdb_file, chain_id):
478
- if mode == "PDB ID":
479
- return process_pdb(pdb_id, chain_id)
480
- elif mode == "Upload File":
481
- _, ext = os.path.splitext(pdb_file.name)
482
- file_path = os.path.join('./', f"{_}{ext}")
483
- if ext == '.cif':
484
- pdb_path = convert_cif_to_pdb(file_path)
485
- else:
486
- pdb_path= file_path
487
- return process_pdb(pdb_path, chain_id)
488
- else:
489
- return "Error: Invalid mode selected", None, None
490
-
491
- def fetch_interface(mode, pdb_id, pdb_file):
492
- if mode == "PDB ID":
493
- return fetch_pdb(pdb_id)
494
- elif mode == "Upload File":
495
- _, ext = os.path.splitext(pdb_file.name)
496
- file_path = os.path.join('./', f"{_}{ext}")
497
- #print(ext)
498
- if ext == '.cif':
499
- pdb_path = convert_cif_to_pdb(file_path)
500
- else:
501
- pdb_path= file_path
502
- #print(pdb_path)
503
- return pdb_path
504
- else:
505
- return "Error: Invalid mode selected"
506
-
507
- def toggle_mode(selected_mode):
508
- if selected_mode == "PDB ID":
509
- return gr.update(visible=True), gr.update(visible=False)
510
- else:
511
- return gr.update(visible=False), gr.update(visible=True)
512
-
513
- mode.change(
514
- toggle_mode,
515
- inputs=[mode],
516
- outputs=[pdb_input, pdb_file]
517
- )
518
-
519
- prediction_btn.click(
520
- process_interface,
521
- inputs=[mode, pdb_input, pdb_file, segment_input],
522
- outputs=[predictions_output, molecule_output, download_output]
523
- )
524
-
525
- visualize_btn.click(
526
- fetch_interface,
527
- inputs=[mode, pdb_input, pdb_file],
528
- outputs=molecule_output2
529
- )
530
-
531
- gr.Markdown("## Examples")
532
- gr.Examples(
533
- examples=[
534
- ["7RPZ", "A"],
535
- ["2IWI", "B"],
536
- ["2F6V", "A"]
537
- ],
538
- inputs=[pdb_input, segment_input],
539
- outputs=[predictions_output, molecule_output, download_output]
540
- )
541
-
542
- demo.launch(share=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
test.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 29,
6
  "id": "e776d9d6-417e-46d4-8061-846c055e1f8a",
7
  "metadata": {},
8
  "outputs": [
@@ -10,8 +10,8 @@
10
  "name": "stdout",
11
  "output_type": "stream",
12
  "text": [
13
- "* Running on local URL: http://127.0.0.1:7873\n",
14
- "* Running on public URL: https://120000a6aa9d78e04c.gradio.live\n",
15
  "\n",
16
  "This share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from the terminal in the working directory to deploy to Hugging Face Spaces (https://huggingface.co/spaces)\n"
17
  ]
@@ -19,7 +19,7 @@
19
  {
20
  "data": {
21
  "text/html": [
22
- "<div><iframe src=\"https://120000a6aa9d78e04c.gradio.live\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
23
  ],
24
  "text/plain": [
25
  "<IPython.core.display.HTML object>"
@@ -32,7 +32,7 @@
32
  "data": {
33
  "text/plain": []
34
  },
35
- "execution_count": 29,
36
  "metadata": {},
37
  "output_type": "execute_result"
38
  }
@@ -570,36 +570,30 @@
570
  },
571
  {
572
  "cell_type": "code",
573
- "execution_count": 21,
574
  "id": "d70c40b9-5d5a-4795-b2a2-149c4a57d16e",
575
  "metadata": {},
576
  "outputs": [
577
- {
578
- "name": "stderr",
579
- "output_type": "stream",
580
- "text": [
581
- "/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/helpers.py:441: UserWarning: Examples will be cached but not all input components have example values. This may result in an exception being thrown by your function. If you do get an error while caching examples, make sure all of your inputs have example values for all of your examples or you provide default values for those particular parameters in your function.\n",
582
- " warnings.warn(\n",
583
- "INFO:__main__:Using cached structure: ./7rpz.cif\n",
584
- "INFO:__main__:Using cached structure: ./2iwi.cif\n",
585
- "INFO:__main__:Using cached structure: ./2f6v.cif\n",
586
- "INFO:httpx:HTTP Request: GET http://127.0.0.1:7862/gradio_api/startup-events \"HTTP/1.1 200 OK\"\n"
587
- ]
588
- },
589
  {
590
  "name": "stdout",
591
  "output_type": "stream",
592
  "text": [
593
- "* Running on local URL: http://127.0.0.1:7862\n",
594
- "Caching examples at: '/home/frohlkin/Projects/LargeLanguageModels/Publication/test_webpage/.gradio/cached_examples/148'\n"
595
  ]
596
  },
597
  {
598
  "name": "stderr",
599
  "output_type": "stream",
600
  "text": [
601
- "INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7862/ \"HTTP/1.1 200 OK\"\n",
 
602
  "INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version \"HTTP/1.1 200 OK\"\n",
 
 
 
 
 
603
  "INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request \"HTTP/1.1 200 OK\"\n"
604
  ]
605
  },
@@ -607,7 +601,7 @@
607
  "name": "stdout",
608
  "output_type": "stream",
609
  "text": [
610
- "* Running on public URL: https://de785d7cce806497e9.gradio.live\n",
611
  "\n",
612
  "This share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from the terminal in the working directory to deploy to Hugging Face Spaces (https://huggingface.co/spaces)\n"
613
  ]
@@ -616,13 +610,13 @@
616
  "name": "stderr",
617
  "output_type": "stream",
618
  "text": [
619
- "INFO:httpx:HTTP Request: HEAD https://de785d7cce806497e9.gradio.live \"HTTP/1.1 200 OK\"\n"
620
  ]
621
  },
622
  {
623
  "data": {
624
  "text/html": [
625
- "<div><iframe src=\"https://de785d7cce806497e9.gradio.live\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
626
  ],
627
  "text/plain": [
628
  "<IPython.core.display.HTML object>"
@@ -635,135 +629,7 @@
635
  "name": "stderr",
636
  "output_type": "stream",
637
  "text": [
638
- "Traceback (most recent call last):\n",
639
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/routes.py\", line 990, in predict\n",
640
- " output = await route_utils.call_process_api(\n",
641
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
642
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/route_utils.py\", line 322, in call_process_api\n",
643
- " output = await app.get_blocks().process_api(\n",
644
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
645
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/blocks.py\", line 2047, in process_api\n",
646
- " result = await self.call_function(\n",
647
- " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
648
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/blocks.py\", line 1594, in call_function\n",
649
- " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
650
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
651
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
652
- " return await get_async_backend().run_sync_in_worker_thread(\n",
653
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
654
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/anyio/_backends/_asyncio.py\", line 2405, in run_sync_in_worker_thread\n",
655
- " return await future\n",
656
- " ^^^^^^^^^^^^\n",
657
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/anyio/_backends/_asyncio.py\", line 914, in run\n",
658
- " result = context.run(func, *args)\n",
659
- " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
660
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/utils.py\", line 869, in wrapper\n",
661
- " response = f(*args, **kwargs)\n",
662
- " ^^^^^^^^^^^^^^^^^^\n",
663
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/helpers.py\", line 355, in load_example_with_output\n",
664
- " ) + self.load_from_cache(example_id)\n",
665
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
666
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/helpers.py\", line 579, in load_from_cache\n",
667
- " output.append(component.read_from_flag(value_to_use))\n",
668
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
669
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/components/base.py\", line 366, in read_from_flag\n",
670
- " return self.data_model.from_json(json.loads(payload))\n",
671
- " ^^^^^^^^^^^^^^^^^^^\n",
672
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/json/__init__.py\", line 346, in loads\n",
673
- " return _default_decoder.decode(s)\n",
674
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
675
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/json/decoder.py\", line 337, in decode\n",
676
- " obj, end = self.raw_decode(s, idx=_w(s, 0).end())\n",
677
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
678
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/json/decoder.py\", line 355, in raw_decode\n",
679
- " raise JSONDecodeError(\"Expecting value\", s, err.value) from None\n",
680
- "json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)\n",
681
- "Traceback (most recent call last):\n",
682
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/routes.py\", line 990, in predict\n",
683
- " output = await route_utils.call_process_api(\n",
684
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
685
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/route_utils.py\", line 322, in call_process_api\n",
686
- " output = await app.get_blocks().process_api(\n",
687
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
688
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/blocks.py\", line 2047, in process_api\n",
689
- " result = await self.call_function(\n",
690
- " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
691
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/blocks.py\", line 1594, in call_function\n",
692
- " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
693
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
694
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
695
- " return await get_async_backend().run_sync_in_worker_thread(\n",
696
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
697
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/anyio/_backends/_asyncio.py\", line 2405, in run_sync_in_worker_thread\n",
698
- " return await future\n",
699
- " ^^^^^^^^^^^^\n",
700
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/anyio/_backends/_asyncio.py\", line 914, in run\n",
701
- " result = context.run(func, *args)\n",
702
- " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
703
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/utils.py\", line 869, in wrapper\n",
704
- " response = f(*args, **kwargs)\n",
705
- " ^^^^^^^^^^^^^^^^^^\n",
706
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/helpers.py\", line 355, in load_example_with_output\n",
707
- " ) + self.load_from_cache(example_id)\n",
708
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
709
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/helpers.py\", line 579, in load_from_cache\n",
710
- " output.append(component.read_from_flag(value_to_use))\n",
711
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
712
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/components/base.py\", line 366, in read_from_flag\n",
713
- " return self.data_model.from_json(json.loads(payload))\n",
714
- " ^^^^^^^^^^^^^^^^^^^\n",
715
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/json/__init__.py\", line 346, in loads\n",
716
- " return _default_decoder.decode(s)\n",
717
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
718
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/json/decoder.py\", line 337, in decode\n",
719
- " obj, end = self.raw_decode(s, idx=_w(s, 0).end())\n",
720
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
721
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/json/decoder.py\", line 355, in raw_decode\n",
722
- " raise JSONDecodeError(\"Expecting value\", s, err.value) from None\n",
723
- "json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)\n",
724
- "Traceback (most recent call last):\n",
725
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/routes.py\", line 990, in predict\n",
726
- " output = await route_utils.call_process_api(\n",
727
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
728
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/route_utils.py\", line 322, in call_process_api\n",
729
- " output = await app.get_blocks().process_api(\n",
730
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
731
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/blocks.py\", line 2047, in process_api\n",
732
- " result = await self.call_function(\n",
733
- " ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
734
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/blocks.py\", line 1594, in call_function\n",
735
- " prediction = await anyio.to_thread.run_sync( # type: ignore\n",
736
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
737
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/anyio/to_thread.py\", line 56, in run_sync\n",
738
- " return await get_async_backend().run_sync_in_worker_thread(\n",
739
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
740
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/anyio/_backends/_asyncio.py\", line 2405, in run_sync_in_worker_thread\n",
741
- " return await future\n",
742
- " ^^^^^^^^^^^^\n",
743
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/anyio/_backends/_asyncio.py\", line 914, in run\n",
744
- " result = context.run(func, *args)\n",
745
- " ^^^^^^^^^^^^^^^^^^^^^^^^\n",
746
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/utils.py\", line 869, in wrapper\n",
747
- " response = f(*args, **kwargs)\n",
748
- " ^^^^^^^^^^^^^^^^^^\n",
749
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/helpers.py\", line 355, in load_example_with_output\n",
750
- " ) + self.load_from_cache(example_id)\n",
751
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
752
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/helpers.py\", line 579, in load_from_cache\n",
753
- " output.append(component.read_from_flag(value_to_use))\n",
754
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
755
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/components/base.py\", line 366, in read_from_flag\n",
756
- " return self.data_model.from_json(json.loads(payload))\n",
757
- " ^^^^^^^^^^^^^^^^^^^\n",
758
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/json/__init__.py\", line 346, in loads\n",
759
- " return _default_decoder.decode(s)\n",
760
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
761
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/json/decoder.py\", line 337, in decode\n",
762
- " obj, end = self.raw_decode(s, idx=_w(s, 0).end())\n",
763
- " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
764
- " File \"/home/frohlkin/anaconda3/envs/LLM/lib/python3.12/json/decoder.py\", line 355, in raw_decode\n",
765
- " raise JSONDecodeError(\"Expecting value\", s, err.value) from None\n",
766
- "json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)\n"
767
  ]
768
  }
769
  ],
@@ -1491,9 +1357,9 @@
1491
  ],
1492
  "metadata": {
1493
  "kernelspec": {
1494
- "display_name": "Python (LLM)",
1495
  "language": "python",
1496
- "name": "llm"
1497
  },
1498
  "language_info": {
1499
  "codemirror_mode": {
@@ -1505,7 +1371,7 @@
1505
  "name": "python",
1506
  "nbconvert_exporter": "python",
1507
  "pygments_lexer": "ipython3",
1508
- "version": "3.12.2"
1509
  }
1510
  },
1511
  "nbformat": 4,
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 2,
6
  "id": "e776d9d6-417e-46d4-8061-846c055e1f8a",
7
  "metadata": {},
8
  "outputs": [
 
10
  "name": "stdout",
11
  "output_type": "stream",
12
  "text": [
13
+ "* Running on local URL: http://127.0.0.1:7860\n",
14
+ "* Running on public URL: https://17e7c7978ce0b4df12.gradio.live\n",
15
  "\n",
16
  "This share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from the terminal in the working directory to deploy to Hugging Face Spaces (https://huggingface.co/spaces)\n"
17
  ]
 
19
  {
20
  "data": {
21
  "text/html": [
22
+ "<div><iframe src=\"https://17e7c7978ce0b4df12.gradio.live\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
23
  ],
24
  "text/plain": [
25
  "<IPython.core.display.HTML object>"
 
32
  "data": {
33
  "text/plain": []
34
  },
35
+ "execution_count": 2,
36
  "metadata": {},
37
  "output_type": "execute_result"
38
  }
 
570
  },
571
  {
572
  "cell_type": "code",
573
+ "execution_count": 3,
574
  "id": "d70c40b9-5d5a-4795-b2a2-149c4a57d16e",
575
  "metadata": {},
576
  "outputs": [
 
 
 
 
 
 
 
 
 
 
 
 
577
  {
578
  "name": "stdout",
579
  "output_type": "stream",
580
  "text": [
581
+ "* Running on local URL: http://127.0.0.1:7861\n",
582
+ "Caching examples at: '/Users/thorben_froehlking/Desktop/PostDoc/Projects/CrypticPockets/test_webpage/.gradio/cached_examples/43'\n"
583
  ]
584
  },
585
  {
586
  "name": "stderr",
587
  "output_type": "stream",
588
  "text": [
589
+ "/Users/thorben_froehlking/anaconda3/envs/LLM/lib/python3.12/site-packages/gradio/helpers.py:441: UserWarning: Examples will be cached but not all input components have example values. This may result in an exception being thrown by your function. If you do get an error while caching examples, make sure all of your inputs have example values for all of your examples or you provide default values for those particular parameters in your function.\n",
590
+ " warnings.warn(\n",
591
  "INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version \"HTTP/1.1 200 OK\"\n",
592
+ "INFO:__main__:Successfully downloaded: https://files.rcsb.org/download/7RPZ.cif\n",
593
+ "INFO:__main__:Successfully downloaded: https://files.rcsb.org/download/2IWI.cif\n",
594
+ "INFO:__main__:Successfully downloaded: https://files.rcsb.org/download/2F6V.cif\n",
595
+ "INFO:httpx:HTTP Request: GET http://127.0.0.1:7861/gradio_api/startup-events \"HTTP/1.1 200 OK\"\n",
596
+ "INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7861/ \"HTTP/1.1 200 OK\"\n",
597
  "INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request \"HTTP/1.1 200 OK\"\n"
598
  ]
599
  },
 
601
  "name": "stdout",
602
  "output_type": "stream",
603
  "text": [
604
+ "* Running on public URL: https://c7596f9c4fd96dd249.gradio.live\n",
605
  "\n",
606
  "This share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from the terminal in the working directory to deploy to Hugging Face Spaces (https://huggingface.co/spaces)\n"
607
  ]
 
610
  "name": "stderr",
611
  "output_type": "stream",
612
  "text": [
613
+ "INFO:httpx:HTTP Request: HEAD https://c7596f9c4fd96dd249.gradio.live \"HTTP/1.1 200 OK\"\n"
614
  ]
615
  },
616
  {
617
  "data": {
618
  "text/html": [
619
+ "<div><iframe src=\"https://c7596f9c4fd96dd249.gradio.live\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
620
  ],
621
  "text/plain": [
622
  "<IPython.core.display.HTML object>"
 
629
  "name": "stderr",
630
  "output_type": "stream",
631
  "text": [
632
+ "INFO:__main__:Using cached structure: ./2f6v.cif\n"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
633
  ]
634
  }
635
  ],
 
1357
  ],
1358
  "metadata": {
1359
  "kernelspec": {
1360
+ "display_name": "Python [conda env:LLM]",
1361
  "language": "python",
1362
+ "name": "conda-env-LLM-py"
1363
  },
1364
  "language_info": {
1365
  "codemirror_mode": {
 
1371
  "name": "python",
1372
  "nbconvert_exporter": "python",
1373
  "pygments_lexer": "ipython3",
1374
+ "version": "3.12.7"
1375
  }
1376
  },
1377
  "nbformat": 4,