mokarnoub commited on
Commit
b4df929
·
verified ·
1 Parent(s): 5da6e3c

Upload 5 files

Browse files
Files changed (5) hide show
  1. Dockerfile +17 -0
  2. app.py +72 -0
  3. docker-compose.yml +23 -0
  4. dockerignore +11 -0
  5. requirements.txt +14 -0
Dockerfile ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim-buster
2
+
3
+ ENV PYTHONUNBUFFERED=1 \
4
+ HF_HOME=/app/.cache \
5
+ TORCH_HOME=/app/cache \
6
+ USE_CUDA=false
7
+
8
+ RUN apt-get update && apt-get install -y --no-install-recommends gcc python3-dev
9
+
10
+ WORKDIR /app
11
+ COPY . .
12
+
13
+ RUN pip install --upgrade pip && \
14
+ pip install --no-cache-dir -r requirements.txt && \
15
+ python -c "from transformers import pipeline; pipeline('zero-shot-classification', model='joeddav/xlm-roberta-large-xnli')"
16
+
17
+ CMD ["gunicorn", "--bind", "0.0.0.0:7860", "--workers", "1", "--threads", "4", "app:app"]
app.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import logging
3
+ from flask import Flask, request, jsonify
4
+ from flask_cors import CORS
5
+ from transformers import pipeline
6
+ import torch
7
+
8
+ app = Flask(__name__)
9
+ CORS(app)
10
+
11
+ # تهيئة السجل (Logging)
12
+ logging.basicConfig(level=logging.INFO)
13
+ logger = logging.getLogger(__name__)
14
+
15
+ # الفئات المحددة مسبقًا
16
+ PREDEFINED_LABELS = [
17
+ "مطعم","حديقة","جامعة", "مطبخ", "غرفة نوم",
18
+ "حمام", "غرفة معيشة", "شرفة", "مكتب", "صف دراسي"
19
+ ]
20
+
21
+ # متغيرات عامة للنموذج والتهيئة
22
+ MODEL = None
23
+ DEVICE = None
24
+ LABELS_ENCODED = None # سيتم تخزين تمثيل الـ Labels هنا
25
+
26
+ def initialize():
27
+ global MODEL, DEVICE, LABELS_ENCODED
28
+ DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
29
+ logger.info(f"جار التحميل على الجهاز: {DEVICE}")
30
+
31
+ MODEL = pipeline(
32
+ task="zero-shot-classification",
33
+ model="joeddav/xlm-roberta-large-xnli",
34
+ device=DEVICE,
35
+ torch_dtype=torch.float16 if DEVICE == "cuda" else torch.float32,
36
+ )
37
+
38
+ # تهيئة النموذج مع الـ Labels كاملة (Warm-up)
39
+ logger.info("جار تهيئة النموذج مع الـ Labels...")
40
+ dummy_result = MODEL(
41
+ "تهيئة النموذج",
42
+ PREDEFINED_LABELS,
43
+ multi_label=False
44
+ )
45
+ logger.info("تم تحميل النموذج والـ Labels بنجاح")
46
+
47
+ # استدعاء التهيئة عند بدء التشغيل
48
+ initialize()
49
+
50
+ @app.route('/classify', methods=['POST'])
51
+ def classify():
52
+ try:
53
+ text = request.json.get('text', '').strip()
54
+ if not text:
55
+ return jsonify({"error": "يجب تقديم نص للتصنيف"}), 400
56
+
57
+ # الاستدعاء باستخدام الـ Labels المخزنة مسبقًا
58
+ result = MODEL(text, PREDEFINED_LABELS, multi_label=False)
59
+
60
+ return jsonify({
61
+ "prediction": {
62
+ "label": result["labels"][0],
63
+ "score": float(result["scores"][0])
64
+ }
65
+ })
66
+
67
+ except Exception as e:
68
+ logger.error(f"خطأ في التصنيف: {str(e)}")
69
+ return jsonify({"error": "حدث خطأ أثناء المعالجة"}), 500
70
+
71
+ if __name__ == '__main__':
72
+ app.run(host='0.0.0.0', port=7860)
docker-compose.yml ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ version: '3.8'
2
+
3
+ services:
4
+ classifier:
5
+ build: .
6
+ ports:
7
+ - "7860:7860"
8
+ environment:
9
+ - USE_CUDA=false
10
+ - HF_HOME=/app/.cache/huggingface
11
+ - TORCH_HOME=/app/.cache/torch
12
+ - TOKENIZERS_PARALLELISM=true
13
+ volumes:
14
+ - ./logs:/app/logs
15
+ - ./model_cache:/app/.cache
16
+ restart: unless-stopped
17
+ deploy:
18
+ resources:
19
+ limits:
20
+ cpus: '4'
21
+ memory: 16G
22
+ reservations:
23
+ memory: 8G
dockerignore ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .git
2
+ __pycache__
3
+ *.pyc
4
+ *.pyo
5
+ *.pyd
6
+ .DS_Store
7
+ .env
8
+ venv
9
+ logs/*
10
+ *.log
11
+ test.py
requirements.txt ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ flask==3.0.2
2
+ flask-cors==4.0.0
3
+ transformers==4.40.1
4
+ torch==2.1.0
5
+ accelerate==0.30.1
6
+ gunicorn==21.2.0
7
+ sentencepiece==0.2.0
8
+ huggingface-hub==0.23.1
9
+ cachetools==5.3.3
10
+ protobuf==3.20.3
11
+ onnxruntime==1.16.0
12
+ optimum==1.16.0
13
+ torchvision==0.16.0
14
+ numpy==1.26.4