Spaces:
Sleeping
Sleeping
Update pages/Our_model.py
Browse files- pages/Our_model.py +2 -2
pages/Our_model.py
CHANGED
@@ -18,7 +18,7 @@ def gpt_scorer(m,s):
|
|
18 |
messages=[
|
19 |
{
|
20 |
"role": "system",
|
21 |
-
"content": "You are UPSC answers evaluater. You will be given model answer and student answer. Evaluate it by comparing with the model answer. \n<<REMEMBER>>\nIt is 10 marks question. Give marks in the range of 0.5. (ex. 0,0.5,1...)\nPlease give marks generously. If the student answer body matches more than 70% with the model answer then give full marks for body
|
22 |
},
|
23 |
{
|
24 |
"role": "user",
|
@@ -32,7 +32,7 @@ def gpt_scorer(m,s):
|
|
32 |
max_tokens=256,
|
33 |
top_p=1,
|
34 |
frequency_penalty=0,
|
35 |
-
presence_penalty=0
|
36 |
)
|
37 |
return json.loads(response.choices[0].message.content)
|
38 |
def embeddings_cosine(s1,s2,model):
|
|
|
18 |
messages=[
|
19 |
{
|
20 |
"role": "system",
|
21 |
+
"content": "You are UPSC answers evaluater. You will be given model answer and student answer. Evaluate it by comparing with the model answer. \n<<REMEMBER>>\nIt is 10 marks question. Give marks in the range of 0.5. (ex. 0,0.5,1...)\nPlease give marks generously. If the student answer body matches more than 70% with the model answer then give full marks for body. \nIf the student answer and model answer is not relevant then give 0 marks.\ngive output in json format. Give output in this format {\"total\":}\n<<OUTPUT>>"
|
22 |
},
|
23 |
{
|
24 |
"role": "user",
|
|
|
32 |
max_tokens=256,
|
33 |
top_p=1,
|
34 |
frequency_penalty=0,
|
35 |
+
presence_penalty=0,seed = 10
|
36 |
)
|
37 |
return json.loads(response.choices[0].message.content)
|
38 |
def embeddings_cosine(s1,s2,model):
|