Update function.py
Browse files- function.py +2 -3
function.py
CHANGED
@@ -28,7 +28,6 @@ def get_answers(questions,model):
|
|
28 |
|
29 |
def GetLLMResponse(selected_topic_level, selected_topic,num_quizzes, model):
|
30 |
question_prompt = (f'You are an AI interview assistant that helps generate customized interview questions for various technical and non-technical roles. Your task is to create a set of interview questions based on the {selected_topic_level} and topic : {selected_topic}. Generate only {num_quizzes} questions ')
|
31 |
-
# question_prompt = (f'I want you to just generate mockquestion with this specification: Generate a {selected_topic_level} math quiz on the topic of {selected_topic}. Generate only {num_quizzes} questions not more and without providing answers.')
|
32 |
|
33 |
|
34 |
if model == "Open AI":
|
@@ -42,9 +41,9 @@ def GetLLMResponse(selected_topic_level, selected_topic,num_quizzes, model):
|
|
42 |
questions = questions.content
|
43 |
# return questions.content
|
44 |
|
|
|
45 |
|
46 |
-
|
47 |
-
answers = get_answers(questions,model)
|
48 |
|
49 |
|
50 |
return(questions,answers)
|
|
|
28 |
|
29 |
def GetLLMResponse(selected_topic_level, selected_topic,num_quizzes, model):
|
30 |
question_prompt = (f'You are an AI interview assistant that helps generate customized interview questions for various technical and non-technical roles. Your task is to create a set of interview questions based on the {selected_topic_level} and topic : {selected_topic}. Generate only {num_quizzes} questions ')
|
|
|
31 |
|
32 |
|
33 |
if model == "Open AI":
|
|
|
41 |
questions = questions.content
|
42 |
# return questions.content
|
43 |
|
44 |
+
answers = "testing"
|
45 |
|
46 |
+
# answers = get_answers(questions,model)
|
|
|
47 |
|
48 |
|
49 |
return(questions,answers)
|