Update app.py
Browse files
app.py
CHANGED
@@ -5,6 +5,7 @@ import traceback
|
|
5 |
import pandas as pd
|
6 |
import torch
|
7 |
import gradio as gr
|
|
|
8 |
from transformers import (
|
9 |
logging,
|
10 |
AutoProcessor,
|
@@ -106,6 +107,27 @@ def generate_and_export():
|
|
106 |
"outputs/inference.tsv",
|
107 |
"outputs/eval.csv"
|
108 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
109 |
|
110 |
except Exception as e:
|
111 |
traceback.print_exc()
|
|
|
5 |
import pandas as pd
|
6 |
import torch
|
7 |
import gradio as gr
|
8 |
+
import gc
|
9 |
from transformers import (
|
10 |
logging,
|
11 |
AutoProcessor,
|
|
|
107 |
"outputs/inference.tsv",
|
108 |
"outputs/eval.csv"
|
109 |
)
|
110 |
+
# Add this to your generate_and_export function
|
111 |
+
|
112 |
+
# ... existing code ...
|
113 |
+
|
114 |
+
# Better memory management
|
115 |
+
for i in range(1, 21):
|
116 |
+
doc = to_soap("Generate a realistic, concise doctor's progress note...")
|
117 |
+
docs.append(doc)
|
118 |
+
gts.append(to_soap(doc))
|
119 |
+
|
120 |
+
# More aggressive cleanup every 3 iterations
|
121 |
+
if i % 3 == 0:
|
122 |
+
torch.cuda.empty_cache()
|
123 |
+
gc.collect()
|
124 |
+
|
125 |
+
# Clean up model after use
|
126 |
+
del model
|
127 |
+
torch.cuda.empty_cache()
|
128 |
+
gc.collect()
|
129 |
+
|
130 |
+
|
131 |
|
132 |
except Exception as e:
|
133 |
traceback.print_exc()
|