Spaces:
Running
Running
Update cord_inference.py
Browse files- cord_inference.py +3 -3
cord_inference.py
CHANGED
@@ -10,9 +10,9 @@ id2label = {v: k for v, k in enumerate(labels)}
|
|
10 |
label2id = {k: v for v, k in enumerate(labels)}
|
11 |
|
12 |
# nielsr/layoutlmv3-finetuned-cord
|
13 |
-
tokenizer = LayoutLMv3TokenizerFast.from_pretrained("mp-02/layoutlmv3-finetuned-
|
14 |
-
processor = LayoutLMv3Processor.from_pretrained("mp-02/layoutlmv3-finetuned-
|
15 |
-
model = LayoutLMv3ForTokenClassification.from_pretrained("mp-02/layoutlmv3-finetuned-
|
16 |
|
17 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
18 |
model.to(device)
|
|
|
10 |
label2id = {k: v for v, k in enumerate(labels)}
|
11 |
|
12 |
# nielsr/layoutlmv3-finetuned-cord
|
13 |
+
tokenizer = LayoutLMv3TokenizerFast.from_pretrained("mp-02/layoutlmv3-finetuned-cord", apply_ocr=False)
|
14 |
+
processor = LayoutLMv3Processor.from_pretrained("mp-02/layoutlmv3-finetuned-cord", apply_ocr=False)
|
15 |
+
model = LayoutLMv3ForTokenClassification.from_pretrained("mp-02/layoutlmv3-finetuned-cord")
|
16 |
|
17 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
18 |
model.to(device)
|