ciyidogan commited on
Commit
2e98497
·
verified ·
1 Parent(s): 99e6b98

Update llm_model.py

Browse files
Files changed (1) hide show
  1. llm_model.py +4 -3
llm_model.py CHANGED
@@ -1,6 +1,6 @@
1
  import torch
2
  import traceback
3
- from transformers import AutoTokenizer, AutoModelForCausalLM, AutoModelForSequenceClassification
4
  from log import log
5
  from pydantic import BaseModel
6
  import os
@@ -43,8 +43,9 @@ class LLMModel:
43
  log(f"❌ LLMModel setup() hatası: {e}")
44
  traceback.print_exc()
45
 
46
- async def generate_response_with_messages(self, messages, project_config):
47
- encodeds = self.tokenizer.apply_chat_template(messages, return_tensors="pt", add_generation_prompt=True)
 
48
  input_ids = encodeds.to(self.model.device)
49
  attention_mask = (input_ids != self.tokenizer.pad_token_id).long()
50
 
 
1
  import torch
2
  import traceback
3
+ from transformers import AutoTokenizer, AutoModelForCausalLM
4
  from log import log
5
  from pydantic import BaseModel
6
  import os
 
43
  log(f"❌ LLMModel setup() hatası: {e}")
44
  traceback.print_exc()
45
 
46
+ async def generate_response_with_messages(self, messages, project_config, system_prompt):
47
+ all_messages = [{"role": "system", "content": system_prompt}] + messages
48
+ encodeds = self.tokenizer.apply_chat_template(all_messages, return_tensors="pt", add_generation_prompt=True)
49
  input_ids = encodeds.to(self.model.device)
50
  attention_mask = (input_ids != self.tokenizer.pad_token_id).long()
51