File size: 1,630 Bytes
d2b9679
 
 
 
 
62e221a
5827f05
d2b9679
62e221a
37a8e62
 
a14671e
37a8e62
 
a14671e
 
 
37a8e62
 
 
 
 
 
 
d2b9679
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
from fastapi import FastAPI
from openai import OpenAI
import json
import os
app = FastAPI()
#client = OpenAI(api_key=OPENAI_API_KEY)
org = os.getenv("org")
OPENAI_API_KEY = os.getenv("open_ai")
client = OpenAI(api_key=OPENAI_API_KEY, organization=org)

description = """
# As part of the project, we need to implement a FastAPI endpoint that takes a string as input and returns a list of questions along with their corresponding answers. This endpoint will be used to generate questions from text data.

Details:
Input-1: A string containing the input text. (Type: String)
Input-2: Number of questions (Type: Integer)
--------------------------------------------
Output: A JSON response containing a list of questions and a corresponding list of answers.
"""

app = FastAPI(docs_url="/", description=description)



@app.post("/get_questions")
async def getQuestions(job_description: str, no_of_questions: int):
    response = client.chat.completions.create(
        model="gpt-3.5-turbo-1106",
        response_format={"type": "json_object"},  # To ENABLE JSON MODE
        messages=[
            {"role": "system",
                "content": "You are a helpful assistant designed to output JSON in this format [question-text as key and its value as answer-text]"},
            {"role": "user",
             "content": f"Given the job description [{job_description}] create {no_of_questions} "
                        f"interview questions and their corresponding answers"}
        ]
    )
    result = response.choices[0].message.content
    # Parse the JSON data
    parsed_data = json.loads(result)
    
    return parsed_data