simonduerr commited on
Commit
2e385df
·
verified ·
1 Parent(s): 340be60

Update inference_app.py

Browse files
Files changed (1) hide show
  1. inference_app.py +22 -13
inference_app.py CHANGED
@@ -82,15 +82,12 @@ def get_metrics(
82
  )
83
  end_time = time.time()
84
  run_time = end_time - start_time
85
- return metrics, run_time
86
 
87
 
88
  def get_metrics_pinder(
89
  system_id: str,
90
- receptor_file: Path,
91
- ligand_file: Path,
92
- flexible: bool = True,
93
- posebusters: bool = True,
94
  methodname: str = "",
95
  store:bool =True
96
  ) -> tuple[pd.DataFrame, float]:
@@ -101,10 +98,20 @@ with gr.Blocks() as app:
101
  with gr.Row():
102
  with gr.Column():
103
  input_system_id_pinder = gr.Textbox(label="PINDER system ID")
104
- input_receptor_file_pinder = gr.File(label="Receptor file")
105
- input_ligand_file_pinder = gr.File(label="Ligand file")
106
  methodname_pinder = gr.Textbox(label="Name of your method in the format mlsb/spacename")
107
  store_pinder = gr.Checkbox(label="Store on huggingface for leaderboard", value=False)
 
 
 
 
 
 
 
 
 
 
 
108
  eval_btn_pinder = gr.Button("Run Evaluation")
109
 
110
 
@@ -120,9 +127,7 @@ with gr.Blocks() as app:
120
  posebusters = gr.Checkbox(label="PoseBusters", value=True)
121
  methodname = gr.Textbox(label="Name of your method in the format mlsb/spacename")
122
  store = gr.Checkbox(label="Store on huggingface for leaderboard", value=False)
123
-
124
- eval_btn = gr.Button("Run Evaluation")
125
- gr.Examples(
126
  [
127
  [
128
  "4neh__1__1.B__1.H",
@@ -130,17 +135,21 @@ with gr.Blocks() as app:
130
  "input_ligand_test.sdf",
131
  True,
132
  True,
 
 
133
  ],
134
  ],
135
  [input_system_id, input_receptor_file, input_ligand_file, flexible, posebusters, methodname, store],
136
  )
 
 
137
  eval_run_time = gr.Textbox(label="Evaluation runtime")
138
  metric_table = gr.DataFrame(
139
- pd.DataFrame([], columns=EVAL_METRICS), label="Evaluation metrics"
140
  )
141
 
142
  metric_table_pinder = gr.DataFrame(
143
- pd.DataFrame([], columns=EVAL_METRICS_PINDER), label="Evaluation metrics"
144
  )
145
 
146
  eval_btn.click(
@@ -150,7 +159,7 @@ with gr.Blocks() as app:
150
  )
151
  eval_btn_pinder.click(
152
  get_metrics_pinder,
153
- inputs=[input_system_id_pinder, input_receptor_file_pinder, input_ligand_file_pinder, methodname_pinder, store_pinder],
154
  outputs=[metric_table_pinder, eval_run_time],
155
  )
156
 
 
82
  )
83
  end_time = time.time()
84
  run_time = end_time - start_time
85
+ return gr.DataFrame(metrics, visible=True), run_time
86
 
87
 
88
  def get_metrics_pinder(
89
  system_id: str,
90
+ complex_file: Path,
 
 
 
91
  methodname: str = "",
92
  store:bool =True
93
  ) -> tuple[pd.DataFrame, float]:
 
98
  with gr.Row():
99
  with gr.Column():
100
  input_system_id_pinder = gr.Textbox(label="PINDER system ID")
101
+ input_complex_pinder = gr.File(label="Receptor file")
 
102
  methodname_pinder = gr.Textbox(label="Name of your method in the format mlsb/spacename")
103
  store_pinder = gr.Checkbox(label="Store on huggingface for leaderboard", value=False)
104
+ gr.Examples(
105
+ [
106
+ [
107
+ "4neh__1__1.B__1.H",
108
+ "input_protein_test.cif",
109
+ "mlsb/test",
110
+ False
111
+ ],
112
+ ],
113
+ [input_system_id, input_complex_pinder, methodname_pinder, store_pinder],
114
+ )
115
  eval_btn_pinder = gr.Button("Run Evaluation")
116
 
117
 
 
127
  posebusters = gr.Checkbox(label="PoseBusters", value=True)
128
  methodname = gr.Textbox(label="Name of your method in the format mlsb/spacename")
129
  store = gr.Checkbox(label="Store on huggingface for leaderboard", value=False)
130
+ gr.Examples(
 
 
131
  [
132
  [
133
  "4neh__1__1.B__1.H",
 
135
  "input_ligand_test.sdf",
136
  True,
137
  True,
138
+ "mlsb/test",
139
+ False
140
  ],
141
  ],
142
  [input_system_id, input_receptor_file, input_ligand_file, flexible, posebusters, methodname, store],
143
  )
144
+ eval_btn = gr.Button("Run Evaluation")
145
+
146
  eval_run_time = gr.Textbox(label="Evaluation runtime")
147
  metric_table = gr.DataFrame(
148
+ pd.DataFrame([], columns=EVAL_METRICS), label="Evaluation metrics", visible=False
149
  )
150
 
151
  metric_table_pinder = gr.DataFrame(
152
+ pd.DataFrame([], columns=EVAL_METRICS_PINDER), label="Evaluation metrics",, visible=False
153
  )
154
 
155
  eval_btn.click(
 
159
  )
160
  eval_btn_pinder.click(
161
  get_metrics_pinder,
162
+ inputs=[input_system_id_pinder, input_complex_pinder, methodname_pinder, store_pinder],
163
  outputs=[metric_table_pinder, eval_run_time],
164
  )
165