Spaces:
Sleeping
Sleeping
Upload workbook_server.py
Browse files- workbook_server.py +29 -0
workbook_server.py
CHANGED
@@ -4,6 +4,12 @@ import sqlite3
|
|
4 |
from fastapi import FastAPI, HTTPException
|
5 |
from fastapi.responses import JSONResponse
|
6 |
from pydantic import BaseModel
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
|
8 |
@asynccontextmanager
|
9 |
async def lifespan(app: FastAPI):
|
@@ -62,5 +68,28 @@ def get_questions_by_chapter(request: ChapterRequest):
|
|
62 |
]
|
63 |
return JSONResponse(content=question_list)
|
64 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
65 |
if __name__ == '__main__':
|
66 |
uvicorn.run(app, host="127.0.0.1", port=8000, log_level="debug")
|
|
|
4 |
from fastapi import FastAPI, HTTPException
|
5 |
from fastapi.responses import JSONResponse
|
6 |
from pydantic import BaseModel
|
7 |
+
from openai import OpenAI
|
8 |
+
import os
|
9 |
+
|
10 |
+
# OpenAI APIクライアントを初期化
|
11 |
+
client = OpenAI()
|
12 |
+
client.api_key = os.getenv('OPENAI_API_KEY')
|
13 |
|
14 |
@asynccontextmanager
|
15 |
async def lifespan(app: FastAPI):
|
|
|
68 |
]
|
69 |
return JSONResponse(content=question_list)
|
70 |
|
71 |
+
class HintRequest(BaseModel):
|
72 |
+
question_text: str
|
73 |
+
options: str
|
74 |
+
answer: str
|
75 |
+
|
76 |
+
@app.post('/api/hint')
|
77 |
+
def generate_hint(request: HintRequest):
|
78 |
+
prompt = f"設問: {request.question_text}\n選択肢: {request.options}\n正解: {request.answer}\nこの設問に対するヒントを生成してください。"
|
79 |
+
try:
|
80 |
+
response = client.chat.completions.create(
|
81 |
+
model="gpt-4o-mini",
|
82 |
+
messages=[
|
83 |
+
{
|
84 |
+
"role": "user",
|
85 |
+
"content": [{'type': 'text', 'text': prompt}],
|
86 |
+
},
|
87 |
+
],
|
88 |
+
)
|
89 |
+
response_content = response.choices[0].message.content
|
90 |
+
return JSONResponse(content={"hint": response_content})
|
91 |
+
except Exception as e:
|
92 |
+
raise HTTPException(status_code=500, detail=str(e))
|
93 |
+
|
94 |
if __name__ == '__main__':
|
95 |
uvicorn.run(app, host="127.0.0.1", port=8000, log_level="debug")
|