LuckRafly commited on
Commit
5186d1b
·
1 Parent(s): 489cc5d

Update function.py

Browse files
Files changed (1) hide show
  1. function.py +6 -6
function.py CHANGED
@@ -11,14 +11,14 @@ def GetLLMResponse(selected_topic_level,
11
  num_quizzes):
12
 
13
  # Calling llama model
14
- llm = CTransformers(model="D:\Code Workspace\DL Model\llama-2-7b-chat.ggmlv3.q8_0.bin",
15
- model_type = 'llama',
16
- config = config)
17
-
18
- # llm = CTransformers(model='TheBloke/Llama-2-7B-Chat-GGML',
19
- # model_file = 'llama-2-7b-chat.ggmlv3.q8_0.bin',
20
  # model_type = 'llama',
21
  # config = config)
 
 
 
 
 
22
 
23
  ## Create LLM Chaining
24
  questions_template = "Generate a {selected_topic_level} math quiz on the topic of {selected_topic}. Include {num_quizzes} questions without providing answers."
 
11
  num_quizzes):
12
 
13
  # Calling llama model
14
+ # llm = CTransformers(model="D:\Code Workspace\DL Model\llama-2-7b-chat.ggmlv3.q8_0.bin",
 
 
 
 
 
15
  # model_type = 'llama',
16
  # config = config)
17
+
18
+ llm = CTransformers(model='TheBloke/Llama-2-7B-Chat-GGML',
19
+ model_file = 'llama-2-7b-chat.ggmlv3.q8_0.bin',
20
+ model_type = 'llama',
21
+ config = config)
22
 
23
  ## Create LLM Chaining
24
  questions_template = "Generate a {selected_topic_level} math quiz on the topic of {selected_topic}. Include {num_quizzes} questions without providing answers."