Upload app.py
Browse files
app.py
CHANGED
@@ -6,6 +6,7 @@ from datasets import Dataset
|
|
6 |
import torch
|
7 |
from flask import Flask, request, jsonify, render_template
|
8 |
from threading import Thread
|
|
|
9 |
|
10 |
app = Flask(__name__)
|
11 |
|
@@ -118,7 +119,19 @@ def run_flask():
|
|
118 |
tokenizer = AutoTokenizer.from_pretrained("bert-base-multilingual-cased")
|
119 |
model = None
|
120 |
labels = []
|
121 |
-
app.run()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
122 |
|
123 |
if __name__ == '__main__':
|
124 |
Thread(target=run_flask).start()
|
|
|
|
6 |
import torch
|
7 |
from flask import Flask, request, jsonify, render_template
|
8 |
from threading import Thread
|
9 |
+
import gradio as gr
|
10 |
|
11 |
app = Flask(__name__)
|
12 |
|
|
|
119 |
tokenizer = AutoTokenizer.from_pretrained("bert-base-multilingual-cased")
|
120 |
model = None
|
121 |
labels = []
|
122 |
+
app.run(port=5000)
|
123 |
+
|
124 |
+
# تشغيل Gradio
|
125 |
+
def run_gradio():
|
126 |
+
def classify(text):
|
127 |
+
inputs = tokenizer(text, return_tensors="pt", padding="max_length", truncation=True)
|
128 |
+
outputs = model(**inputs)
|
129 |
+
predictions = torch.argmax(outputs.logits, dim=-1)
|
130 |
+
label = labels[predictions.item()]
|
131 |
+
return label
|
132 |
+
|
133 |
+
gr.Interface(fn=classify, inputs="text", outputs="text").launch(server_name="0.0.0.0", server_port=7860)
|
134 |
|
135 |
if __name__ == '__main__':
|
136 |
Thread(target=run_flask).start()
|
137 |
+
Thread(target=run_gradio).start()
|