File size: 2,487 Bytes
067af65 5aef73d a85fc0b d0f2261 a85fc0b 067af65 d0f2261 a85fc0b df29062 067af65 d0f2261 df29062 d0f2261 df29062 a15cdfd 067af65 d0f2261 067af65 d0f2261 067af65 df29062 067af65 df29062 067af65 df29062 067af65 df29062 067af65 df29062 a15cdfd d0f2261 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 |
# app.py
from flask import Flask, send_file, request, jsonify
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
import gradio as gr
app = Flask(__name__)
# تحميل النموذج
model = None
tokenizer = None
def load_model():
global model, tokenizer
if model is None:
print("جاري تحميل النموذج...")
tokenizer = AutoTokenizer.from_pretrained("amd/AMD-OLMo-1B")
model = AutoModelForCausalLM.from_pretrained(
"amd/AMD-OLMo-1B",
torch_dtype=torch.float16,
device_map="auto"
)
print("تم تحميل النموذج بنجاح!")
def generate_response(prompt):
"""Generate response from the model"""
global model, tokenizer
try:
if model is None:
load_model()
inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
with torch.no_grad():
outputs = model.generate(
**inputs,
max_length=200,
num_return_sequences=1,
temperature=0.7,
top_p=0.9,
repetition_penalty=1.2,
pad_token_id=tokenizer.eos_token_id
)
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
return response.replace(prompt, "").strip()
except Exception as e:
print(f"خطأ في توليد الرد: {str(e)}")
return "عذراً، حدث خطأ في معالجة رسالتك."
@app.route('/')
def home():
return send_file('index.html')
@app.route('/api/chat', methods=['POST'])
def chat():
try:
data = request.json
if not data:
return jsonify({"response": "لم يتم استلام أي بيانات"}), 400
user_message = data.get('message', '')
if not user_message:
return jsonify({"response": "الرسالة فارغة"}), 400
print(f"رسالة مستلمة: {user_message}")
response = generate_response(user_message)
print(f"الرد: {response}")
return jsonify({"response": response})
except Exception as e:
print(f"خطأ في معالجة الرسالة: {str(e)}")
return jsonify({"response": "عذراً، حدث خطأ في معالجة رسالتك"}), 500
if __name__ == "__main__":
# إذا كنت تريد تشغيل التطبيق محلياً
app.run() |