|
|
|
import asyncio |
|
from mcp.openai_utils import ai_qa |
|
from mcp.gemini import gemini_qa |
|
|
|
async def draft_protocol(question: str, context: str, llm: str = "openai") -> str: |
|
""" |
|
Draft a detailed experimental protocol for a given hypothesis/question. |
|
""" |
|
if llm.lower() == "gemini": |
|
qa_fn = gemini_qa |
|
else: |
|
qa_fn = ai_qa |
|
prompt = ( |
|
"You are a senior researcher. Draft a step-by-step experimental protocol to test: " |
|
f"{question}\nContext:\n{context}\nInclude materials, methods, controls, expected outcomes." |
|
) |
|
return await qa_fn(prompt) |