Spaces:
Sleeping
Sleeping
Commit
·
f5769cf
1
Parent(s):
de4975e
Update main.py
Browse files
main.py
CHANGED
@@ -1,36 +1,7 @@
|
|
|
|
1 |
|
2 |
-
|
3 |
-
from huggingface_hub import Repository
|
4 |
-
|
5 |
-
# app = Flask(__name__)
|
6 |
-
|
7 |
-
#define model
|
8 |
-
tokenizer = AutoTokenizer.from_pretrained("UBC-NLP/MARBERT")
|
9 |
-
|
10 |
-
sarcasm_adapter = Repository(local_dir="sarcasm_adapter", clone_from="nehalelkaref/sarcasm_adapter")
|
11 |
-
aoc3_adapter = Repository(local_dir="aoc3_adapter", clone_from="nehalelkaref/aoc3_adapter")
|
12 |
-
aoc4_adapter = Repository(local_dir="aoc4_adapter", clone_from="nehalelkaref/aoc4_adapter")
|
13 |
-
fusion_adapter = Repository(local_dir="fusion_adapter", clone_from="nehalelkaref/region_fusion")
|
14 |
-
|
15 |
-
model = AutoAdapterModel.from_pretrained("UBC-NLP/MARBERT")
|
16 |
-
|
17 |
-
model.load_adapter("/aoc3_adapter", set_active=True, with_head=False)
|
18 |
-
model.load_adapter("/aoc4_adapter", set_active=True, with_head=False)
|
19 |
-
model.load_adapter("/sarcasm_adapter", set_active=True, with_head=False)
|
20 |
-
|
21 |
-
model.load_adapter_fusion("/fusion_adapter/aoc(3),aoc(4),sarcasm",with_head=True, set_active=True)
|
22 |
-
|
23 |
-
pipe = TextClassificationPipeline(tokenizer=tokenizer, model=model)
|
24 |
-
|
25 |
-
# @app.route('/predict', methods=['POST'])
|
26 |
-
# def predict():
|
27 |
-
# text = request.json['inputs']
|
28 |
-
|
29 |
-
# prediction = pipe(text)
|
30 |
-
# labels = {"LABEL_0":"GULF", "LABEL_1":"LEVANT","LABEL_2":"EGYPT"}
|
31 |
-
# regions = []
|
32 |
-
# for res in prediction:
|
33 |
-
# regions.append(labels[res['label']])
|
34 |
-
|
35 |
-
# return jsonify({'response': regions})
|
36 |
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI
|
2 |
|
3 |
+
app = FastAPI()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
|
5 |
+
@app.get("/")
|
6 |
+
def read_root():
|
7 |
+
return {"Hello": "World!"}
|