Spaces:
Sleeping
Sleeping
from fastapi import FastAPI | |
from openai import OpenAI | |
import json | |
import os | |
app = FastAPI() | |
#client = OpenAI(api_key=OPENAI_API_KEY) | |
org = os.getenv("org") | |
OPENAI_API_KEY = os.getenv("open_ai") | |
client = OpenAI(api_key=OPENAI_API_KEY, organization=org) | |
description = """ | |
# As part of the project, we need to implement a FastAPI endpoint that takes a string as input and returns a list of questions along with their corresponding answers. This endpoint will be used to generate questions from text data. | |
Details: | |
Input-1: A string containing the input text. (Type: String) | |
Input-2: Number of questions (Type: Integer) | |
-------------------------------------------- | |
Output: A JSON response containing a list of questions and a corresponding list of answers. | |
""" | |
app = FastAPI(docs_url="/", description=description) | |
async def getQuestions(job_description: str, no_of_questions: int): | |
response = client.chat.completions.create( | |
model="gpt-3.5-turbo-1106", | |
response_format={"type": "json_object"}, # To ENABLE JSON MODE | |
messages=[ | |
{"role": "system", | |
"content": "You are a helpful assistant designed to output JSON in this format [question-text as key and its value as answer-text]"}, | |
{"role": "user", | |
"content": f"Given the job description [{job_description}] create {no_of_questions} " | |
f"interview questions and their corresponding answers"} | |
] | |
) | |
result = response.choices[0].message.content | |
# Parse the JSON data | |
parsed_data = json.loads(result) | |
return parsed_data |