E-slam commited on
Commit
6823db2
·
verified ·
1 Parent(s): 1674ee4

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +30 -42
main.py CHANGED
@@ -1,56 +1,44 @@
1
- import re
2
- import urllib
3
- import json
4
- from fastapi import FastAPI, HTTPException, Query
5
- from fastapi.middleware.cors import CORSMiddleware
6
- from transformers import AutoTokenizer, AutoModel
7
- import torch
8
- from torch import Tensor
9
- import torch.nn.functional as F
10
- import os
11
- os.environ['HF_HOME'] = '/'
12
 
13
- app = FastAPI()
14
 
15
- # Enable CORS
16
- app.add_middleware(
17
- CORSMiddleware,
18
- allow_origins=["*"],
19
- allow_credentials=True,
20
- allow_methods=["*"],
21
- allow_headers=["*"],
22
- )
23
 
24
- model_name = "intfloat/multilingual-e5-large"
25
- tokenizer = AutoTokenizer.from_pretrained(model_name)
26
- model = AutoModel.from_pretrained(model_name)
27
 
28
- def average_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tensor:
29
- last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0)
30
- return last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None]
31
 
32
- def embed_single_text(text: str) -> Tensor:
33
- tokenizer = AutoTokenizer.from_pretrained('intfloat/multilingual-e5-large')
34
- model = AutoModel.from_pretrained('intfloat/multilingual-e5-large').cpu()
35
 
36
- batch_dict = tokenizer(text, max_length=512, padding=True, truncation=True, return_tensors='pt')
37
 
38
- with torch.no_grad():
39
- outputs = model(**batch_dict)
40
 
41
- embedding = average_pool(outputs.last_hidden_state, batch_dict['attention_mask'])
42
 
43
- embedding = F.normalize(embedding, p=2, dim=1)
44
 
45
- return embedding
 
46
 
 
47
 
48
- @app.get("/e5_embeddings")
49
- def e5_embeddings(query: str = Query(...)):
50
 
51
- result = embed_single_text([query])
52
 
53
- if result is not None:
54
- return result.tolist()
55
- else:
56
- raise HTTPException(status_code=500)
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import time
3
+ import shutil
4
+ import subprocess
 
 
 
 
 
 
 
5
 
6
+ # command = ["pip", "install", "flet==0.23.2"]
7
 
8
+ # subprocess.run(command, check=True)
 
 
 
 
 
 
 
9
 
10
+ # command = ["pip", "install", "flet-fastapi==0.19.0", "--no-deps"]
 
 
11
 
12
+ # subprocess.run(command, check=True)
 
 
13
 
 
 
 
14
 
15
+ gh_token = os.getenv("gh_token")
16
 
17
+ url_with_token = f"https://{gh_token}@github.com/Eslam-Magdy-1297/ES-BatchAPI-Backend.git"
 
18
 
19
+ os.system(f"git clone {url_with_token}")
20
 
21
+ time.sleep(10)
22
 
23
+ source_dir = "ES-BatchAPI-Backend"
24
+ destination_dir = "."
25
 
26
+ import os
27
 
28
+ # current_directory = os.getcwd()
 
29
 
30
+ # items = os.listdir(current_directory)
31
 
32
+ # for item in items:
33
+ # print(item)
34
+
35
+ for item in os.listdir(source_dir):
36
+ s = os.path.join(source_dir, item)
37
+ d = os.path.join(destination_dir, item)
38
+ shutil.move(s, d)
39
+
40
+ os.rmdir(source_dir)
41
+
42
+ with open("index.py", "r") as file:
43
+ code = file.read()
44
+ exec(code)