Spaces:
Runtime error
Runtime error
Commit
Β·
3c9ff9d
1
Parent(s):
957d368
Update app.py
Browse files
app.py
CHANGED
@@ -5,11 +5,13 @@ from transformers import pipeline, GPTJForCausalLM
|
|
5 |
# load fp 16 model
|
6 |
model = GPTJForCausalLM.from_pretrained("hackathon-somos-nlp-2023/bertin-gpt-j-6b-ner-es", torch_dtype=torch.float16)
|
7 |
|
|
|
|
|
8 |
# load tokenizer
|
9 |
tokenizer = AutoTokenizer.from_pretrained("hackathon-somos-nlp-2023/bertin-gpt-j-6b-ner-es")
|
10 |
|
11 |
# create pipeline
|
12 |
-
pipe = pipeline("text-generation", model=model,
|
13 |
|
14 |
def predict(text):
|
15 |
return pipe(f"text: {text}, entities:")["generated_text"]
|
|
|
5 |
# load fp 16 model
|
6 |
model = GPTJForCausalLM.from_pretrained("hackathon-somos-nlp-2023/bertin-gpt-j-6b-ner-es", torch_dtype=torch.float16)
|
7 |
|
8 |
+
config = AutoConfig.from_pretrained("hackathon-somos-nlp-2023/bertin-gpt-j-6b-ner-es", name_or_path="adapter_model.bin")
|
9 |
+
|
10 |
# load tokenizer
|
11 |
tokenizer = AutoTokenizer.from_pretrained("hackathon-somos-nlp-2023/bertin-gpt-j-6b-ner-es")
|
12 |
|
13 |
# create pipeline
|
14 |
+
pipe = pipeline("text-generation", model=model, config=config, tokenizer=tokenizer, device=0,)
|
15 |
|
16 |
def predict(text):
|
17 |
return pipe(f"text: {text}, entities:")["generated_text"]
|