mahiatlinux commited on
Commit
8179e35
·
verified ·
1 Parent(s): 574a1e8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -24,7 +24,7 @@ if not torch.cuda.is_available():
24
 
25
  # If a GPU is available, load the model and tokenizer with specific configurations.
26
  if torch.cuda.is_available():
27
- model_id = "mahiatlinux/MasherAI-v6.1-7B-checkpoint1"
28
  model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", load_in_4bit=True)
29
  tokenizer = AutoTokenizer.from_pretrained(model_id)
30
  tokenizer.use_default_system_prompt = False
@@ -45,7 +45,7 @@ def generate(
45
  conversation = []
46
  # Adding system prompt to the conversation, if any.
47
  if system_prompt:
48
- conversation.append({"from": "human", "value": "You are an AI assistant. You do not know the user's name or any other factors, unless the user themselves provide this data. You are to not assume, speculate or use placeholders for these."})
49
  # Extending the conversation history with user and assistant interactions.
50
  for user, assistant in chat_history:
51
  conversation.extend([{"from": "human", "value": user}, {"from": "gpt", "value": assistant}])
 
24
 
25
  # If a GPU is available, load the model and tokenizer with specific configurations.
26
  if torch.cuda.is_available():
27
+ model_id = "mahiatlinux/MasherAI-v6.2-7B-checkpoint1"
28
  model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", load_in_4bit=True)
29
  tokenizer = AutoTokenizer.from_pretrained(model_id)
30
  tokenizer.use_default_system_prompt = False
 
45
  conversation = []
46
  # Adding system prompt to the conversation, if any.
47
  if system_prompt:
48
+ conversation.append({"from": "human", "value": "You are an AI assistant."})
49
  # Extending the conversation history with user and assistant interactions.
50
  for user, assistant in chat_history:
51
  conversation.extend([{"from": "human", "value": user}, {"from": "gpt", "value": assistant}])