ahmetmete commited on
Commit
428d607
·
1 Parent(s): 38b4491

handler.py

Browse files
Files changed (1) hide show
  1. handler.py +28 -0
handler.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Dict, List, Any
2
+ import torch
3
+ from transformers import AutoTokenizer, AutoModelForCausalLM, GenerationConfig
4
+
5
+ class EndpointHandler():
6
+ def _init_(self, path=""):
7
+ model_name = "deepseek-ai/deepseek-llm-67b-chat"
8
+ self.tokenizer = AutoTokenizer.from_pretrained(model_name)
9
+ self.model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.bfloat16, device_map="auto")
10
+ self.model.generation_config = GenerationConfig.from_pretrained(model_name)
11
+ self.model.generation_config.pad_token_id = self.model.generation_config.eos_token_id
12
+
13
+ def _call_(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
14
+ """
15
+ data args:
16
+ inputs (:obj: `str` | `PIL.Image` | `np.array`)
17
+ kwargs
18
+ Return:
19
+ A :obj:`list` | `dict`: will be serialized and returned
20
+ """
21
+ inputs = data.pop('inputs', data)
22
+ messages = [{"role": "user", "content": inputs}]
23
+ input_tensor = self.tokenizer.apply_chat_template(messages, add_generation_prompt=True, return_tensors="pt")
24
+ outputs = self.model.generate(input_tensor.to(self.model.device), max_new_tokens=100)
25
+ result = self.tokenizer.decode(outputs[0][input_tensor.shape[1]:], skip_special_tokens=True)
26
+ # pseudo
27
+ # self.model(input)
28
+ return [{"result": result}]