kunato commited on
Commit
080d3e7
·
verified ·
1 Parent(s): 3961909

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -81
README.md CHANGED
@@ -39,66 +39,13 @@ For release post, please see our [blog](...).
39
  ## Usage Example
40
 
41
  ```python
42
- from transformers import AutoTokenizer, AutoModelForCausalLM
43
- import torch
44
-
45
- model_id = "scb10x/llama3.2-typhoon2-1b-instruct"
46
-
47
- tokenizer = AutoTokenizer.from_pretrained(model_id)
48
- model = AutoModelForCausalLM.from_pretrained(
49
- model_id,
50
- torch_dtype=torch.bfloat16,
51
- device_map="auto",
52
- )
53
-
54
- messages = [
55
- {"role": "system", "content": "You are Typhoon, an AI assistant created by SCB 10X, designed to be helpful, harmless, and honest. Typhoon assists with analysis, answering questions, math, coding, creative writing, teaching, role-play, discussions, and more. Typhoon responds directly without affirmations or filler phrases (e.g., “Certainly,” “Of course”). Responses do not start with “Certainly” in any form. Typhoon adheres to these rules in all languages and always replies in the user's language or as requested. Communicate in fluid, conversational prose, showing genuine interest, empathy, and presenting information clearly and visually."},
56
- {"role": "user", "content": "ขอสูตรไก่ย่าง"},
57
- ]
58
-
59
- input_ids = tokenizer.apply_chat_template(
60
- messages,
61
- add_generation_prompt=True,
62
- return_tensors="pt"
63
- ).to(model.device)
64
-
65
- terminators = [
66
- tokenizer.eos_token_id,
67
- tokenizer.convert_tokens_to_ids("<|eot_id|>")
68
- ]
69
-
70
- outputs = model.generate(
71
- input_ids,
72
- max_new_tokens=512,
73
- eos_token_id=terminators,
74
- do_sample=True,
75
- temperature=0.4,
76
- top_p=0.9,
77
- )
78
- response = outputs[0][input_ids.shape[-1]:]
79
- print(tokenizer.decode(response, skip_special_tokens=True))
80
- ```
81
-
82
- ## Inference Server Hosting Example
83
- ```bash
84
- pip install vllm
85
- vllm serve scb10x/llama3.2-typhoon2-1b-instruct
86
- # see more information at https://docs.vllm.ai/
87
- ```
88
-
89
-
90
- ## Function-Call Example
91
- ```python
92
- import json
93
  import torch
94
  from transformers import AutoModelForCausalLM, AutoTokenizer
95
- import os
96
  import ast
97
-
98
  model_name = "scb10x/llama3.2-typhoon2-1b-instruct"
99
  tokenizer = AutoTokenizer.from_pretrained(model_name)
100
  model = AutoModelForCausalLM.from_pretrained(
101
- model_name, torch_dtype=torch.bfloat16
102
  )
103
 
104
  get_weather_api = {
@@ -159,10 +106,6 @@ messages = [
159
  {"role": "user", "content": "ขอราคาหุ้น Tasla (TLS) และ Amazon (AMZ) ?"},
160
  ]
161
 
162
- final_prompt = tokenizer.apply_chat_template(
163
- messages, tools=openai_format_tools, add_generation_prompt=True, tokenize=False
164
- )
165
-
166
  inputs = tokenizer.apply_chat_template(
167
  messages, tools=openai_format_tools, add_generation_prompt=True, return_tensors="pt"
168
  ).to(model.device)
@@ -175,7 +118,7 @@ outputs = model.generate(
175
  num_return_sequences=1,
176
  eos_token_id=[tokenizer.eos_token_id, 128009],
177
  )
178
- response = outputs[0][input_ids.shape[-1]:]
179
 
180
  print("Here Output:", tokenizer.decode(response, skip_special_tokens=True))
181
 
@@ -287,27 +230,6 @@ def parse_nested_value(value):
287
  )
288
  return repr(value)
289
 
290
-
291
- def decoded_output_to_execution_list(decoded_output):
292
- """
293
- Convert decoded output to a list of executable function calls.
294
-
295
- Args:
296
- decoded_output (list): A list of dictionaries representing function calls.
297
-
298
- Returns:
299
- list: A list of strings, each representing an executable function call.
300
- """
301
- execution_list = []
302
- for function_call in decoded_output:
303
- for key, value in function_call.items():
304
- args_str = ", ".join(
305
- f"{k}={parse_nested_value(v)}" for k, v in value.items()
306
- )
307
- execution_list.append(f"{key}({args_str})")
308
- return execution_list
309
-
310
-
311
  def default_decode_ast_prompting(result, language="Python"):
312
  result = result.strip("`\n ")
313
  if not result.startswith("["):
@@ -317,7 +239,6 @@ def default_decode_ast_prompting(result, language="Python"):
317
  decoded_output = ast_parse(result, language)
318
  return decoded_output
319
 
320
-
321
  fc_result = default_decode_ast_prompting(tokenizer.decode(response, skip_special_tokens=True))
322
  print(fc_result) # [{'Function': {'arguments': '{"symbol": "TLS"}', 'name': 'get_stock_price'}}, {'Function': {'arguments': '{"symbol": "AMZ"}', 'name': 'get_stock_price'}}]
323
  ```
 
39
  ## Usage Example
40
 
41
  ```python
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  import torch
43
  from transformers import AutoModelForCausalLM, AutoTokenizer
 
44
  import ast
 
45
  model_name = "scb10x/llama3.2-typhoon2-1b-instruct"
46
  tokenizer = AutoTokenizer.from_pretrained(model_name)
47
  model = AutoModelForCausalLM.from_pretrained(
48
+ model_name, torch_dtype=torch.bfloat16, device_map="auto",
49
  )
50
 
51
  get_weather_api = {
 
106
  {"role": "user", "content": "ขอราคาหุ้น Tasla (TLS) และ Amazon (AMZ) ?"},
107
  ]
108
 
 
 
 
 
109
  inputs = tokenizer.apply_chat_template(
110
  messages, tools=openai_format_tools, add_generation_prompt=True, return_tensors="pt"
111
  ).to(model.device)
 
118
  num_return_sequences=1,
119
  eos_token_id=[tokenizer.eos_token_id, 128009],
120
  )
121
+ response = outputs[0][inputs.shape[-1]:]
122
 
123
  print("Here Output:", tokenizer.decode(response, skip_special_tokens=True))
124
 
 
230
  )
231
  return repr(value)
232
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
233
  def default_decode_ast_prompting(result, language="Python"):
234
  result = result.strip("`\n ")
235
  if not result.startswith("["):
 
239
  decoded_output = ast_parse(result, language)
240
  return decoded_output
241
 
 
242
  fc_result = default_decode_ast_prompting(tokenizer.decode(response, skip_special_tokens=True))
243
  print(fc_result) # [{'Function': {'arguments': '{"symbol": "TLS"}', 'name': 'get_stock_price'}}, {'Function': {'arguments': '{"symbol": "AMZ"}', 'name': 'get_stock_price'}}]
244
  ```