gardarjuto
commited on
Commit
·
eb01dd5
1
Parent(s):
210bb23
add logging to handler
Browse files- handler.py +9 -0
handler.py
CHANGED
@@ -1,6 +1,10 @@
|
|
1 |
from typing import Dict, List, Any
|
2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
3 |
import torch
|
|
|
|
|
|
|
|
|
4 |
|
5 |
|
6 |
# Prompts for the different tasks
|
@@ -29,6 +33,10 @@ class EndpointHandler:
|
|
29 |
self.model = AutoModelForCausalLM.from_pretrained(
|
30 |
path, device_map="auto", torch_dtype=torch.bfloat16
|
31 |
)
|
|
|
|
|
|
|
|
|
32 |
# Fix the pad and bos tokens to avoid bug in the tokenizer
|
33 |
pad_token = "<unk>"
|
34 |
bos_token = "<|endoftext|>"
|
@@ -90,6 +98,7 @@ class EndpointHandler:
|
|
90 |
Return:
|
91 |
A :obj:`list` | `dict`: will be serialized and returned
|
92 |
"""
|
|
|
93 |
|
94 |
# Get inputs
|
95 |
input_a = data.pop("input_a", None)
|
|
|
1 |
from typing import Dict, List, Any
|
2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
3 |
import torch
|
4 |
+
import logging
|
5 |
+
|
6 |
+
logging.basicConfig(level=logging.INFO)
|
7 |
+
LOGGER = logging.getLogger(__name__)
|
8 |
|
9 |
|
10 |
# Prompts for the different tasks
|
|
|
33 |
self.model = AutoModelForCausalLM.from_pretrained(
|
34 |
path, device_map="auto", torch_dtype=torch.bfloat16
|
35 |
)
|
36 |
+
LOGGER.info(f"Inference model loaded from {path}")
|
37 |
+
LOGGER.info(f"Model outline: {self.model}")
|
38 |
+
LOGGER.info(f"Model device: {self.model.device}")
|
39 |
+
|
40 |
# Fix the pad and bos tokens to avoid bug in the tokenizer
|
41 |
pad_token = "<unk>"
|
42 |
bos_token = "<|endoftext|>"
|
|
|
98 |
Return:
|
99 |
A :obj:`list` | `dict`: will be serialized and returned
|
100 |
"""
|
101 |
+
LOGGER.info(f"Received data: {data}")
|
102 |
|
103 |
# Get inputs
|
104 |
input_a = data.pop("input_a", None)
|