Akjava commited on
Commit
e3b2b8b
·
verified ·
1 Parent(s): 906f0c4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -0
app.py CHANGED
@@ -32,6 +32,11 @@ if not huggingface_token:
32
 
33
 
34
  tokenizer = AutoTokenizer.from_pretrained(model_id, token=huggingface_token)
 
 
 
 
 
35
 
36
  print(model_id,device,dtype)
37
  histories = []
@@ -67,6 +72,7 @@ def generate_text(messages):
67
  model.to(device)
68
  question = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
69
  question = tokenizer(question, return_tensors="pt").to(device)
 
70
 
71
  streamer = TextIteratorStreamer(tokenizer, skip_prompt=True)
72
  generation_kwargs = dict(question, streamer=streamer, max_new_tokens=200)
 
32
 
33
 
34
  tokenizer = AutoTokenizer.from_pretrained(model_id, token=huggingface_token)
35
+ print(tokenizer.special_tokens_map)
36
+
37
+ # 特殊トークンIDを確認
38
+ print(tokenizer.eos_token_id)
39
+ print(tokenizer.encode("<|im_end|>", add_special_tokens=False))
40
 
41
  print(model_id,device,dtype)
42
  histories = []
 
72
  model.to(device)
73
  question = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
74
  question = tokenizer(question, return_tensors="pt").to(device)
75
+
76
 
77
  streamer = TextIteratorStreamer(tokenizer, skip_prompt=True)
78
  generation_kwargs = dict(question, streamer=streamer, max_new_tokens=200)