AlGe commited on
Commit
2f3854c
·
verified ·
1 Parent(s): 9b2d559

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -9,13 +9,13 @@ auth_token = os.environ['HF_TOKEN']
9
  tokenizer_ext = AutoTokenizer.from_pretrained("AlGe/deberta-v3-large_token", token=auth_token)
10
  model_ext = AutoModelForTokenClassification.from_pretrained("AlGe/deberta-v3-large_token", token=auth_token)
11
  tokenizer_ext.model_max_length = 512
12
- pipe_ext = gr.pipeline("ner", model=model_ext, tokenizer=tokenizer_ext)
13
 
14
  # Load the tokenizer and models for the second pipeline
15
  tokenizer_ais = AutoTokenizer.from_pretrained("AlGe/deberta-v3-large_AIS-token", token=auth_token)
16
  model_ais = AutoModelForTokenClassification.from_pretrained("AlGe/deberta-v3-large_AIS-token", token=auth_token)
17
  tokenizer_ais.model_max_length = 512
18
- pipe_ais = gr.pipeline("ner", model=model_ais, tokenizer=tokenizer_ais)
19
 
20
  # Load the tokenizer and models for the third pipeline
21
  model1 = AutoModelForSequenceClassification.from_pretrained("AlGe/deberta-v3-large_Int_segment", num_labels=1, token=auth_token)
 
9
  tokenizer_ext = AutoTokenizer.from_pretrained("AlGe/deberta-v3-large_token", token=auth_token)
10
  model_ext = AutoModelForTokenClassification.from_pretrained("AlGe/deberta-v3-large_token", token=auth_token)
11
  tokenizer_ext.model_max_length = 512
12
+ pipe_ext = pipeline("ner", model=model_ext, tokenizer=tokenizer_ext)
13
 
14
  # Load the tokenizer and models for the second pipeline
15
  tokenizer_ais = AutoTokenizer.from_pretrained("AlGe/deberta-v3-large_AIS-token", token=auth_token)
16
  model_ais = AutoModelForTokenClassification.from_pretrained("AlGe/deberta-v3-large_AIS-token", token=auth_token)
17
  tokenizer_ais.model_max_length = 512
18
+ pipe_ais = pipeline("ner", model=model_ais, tokenizer=tokenizer_ais)
19
 
20
  # Load the tokenizer and models for the third pipeline
21
  model1 = AutoModelForSequenceClassification.from_pretrained("AlGe/deberta-v3-large_Int_segment", num_labels=1, token=auth_token)