File size: 4,069 Bytes
8e3c024
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import chainlit as cl
from pydantic import BaseModel, Field
from dotenv import load_dotenv
import instructor
from openai import OpenAI
import os

load_dotenv()

# Patch the OpenAI client with Instructor
client = instructor.from_openai(OpenAI(api_key=os.environ['OPENAI_API_KEY']))

# Define the Pydantic models
class UserProposal(BaseModel):
    proposal: str = Field(description="This is the proposal of the original user prompt. It should be a clear concise detailed plan to use simple ai software tools to solve specific problem.")
    is_clear: str = Field(description="Is the proposed plan clear? It specifies which tools it needs to use and how. It lays out each component and how they all connect.")
    is_detailed: str = Field(description="Is the proposed plan detailed? Each component should have a description of what it does.")
    is_explicit: str = Field(description="Is the proposed plan explicit? Each component should have a data model to describe their input and output schema.")

class ProposedArchitecture(BaseModel):
    proposed_architecture: str = Field(description="A detailed AI application architecture with all the tools required for the plan proposed. (e.g. Python packages)")

class PropositionWithRevision(BaseModel):
    revised_proposed_architecture: str = Field(description="Step by step implementation of software solution.")

# Define functions
def extract_user_proposal_details(user_proposal: str) -> UserProposal:
    return client.chat.completions.create(
        model="gpt-4-turbo-preview",
        response_model=UserProposal,
        messages=[
            {"role": "user", "content": user_proposal},
        ],
    )

def generate_proposed_architecture(proposal: str) -> ProposedArchitecture:
    return client.chat.completions.create(
        model="gpt-4-turbo-preview",
        response_model=ProposedArchitecture,
        messages=[
            {"role": "user", "content": f"Write a detailed AI application architecture with all the tools required for the plan proposed: \n\n{proposal}"},
        ],
    )

def revise_architecture(proposed_architecture: str) -> PropositionWithRevision:
    return client.chat.completions.create(
        model="gpt-4-turbo-preview",
        response_model=PropositionWithRevision,
        messages=[
            {"role": "user", "content": f"Revise the plan proposed: \n\n{proposed_architecture}\n\nThe plan should be a step by step implementation of software solution."},
        ],
    )

# Define the Chainlit message handler
@cl.on_message
async def main(message: cl.Message):
    user_proposal = message.content

    user_proposal_details = extract_user_proposal_details(user_proposal)
    
    proposed_architecture = generate_proposed_architecture(user_proposal_details.proposal)
    
    await cl.Message(
        content=f"Proposed Architecture:\n{proposed_architecture.proposed_architecture}"
    ).send()

    feedback_message = await cl.AskUserMessage(content="What do you think about this proposed plan and alleged architecture?", timeout=300).send()
    if feedback_message:
        human_feedback_of_proposed_plan = feedback_message['output']
    
        revised_architecture = revise_architecture(proposed_architecture.proposed_architecture)
        
        await cl.Message(
            content=f"Revised Architecture:\n{revised_architecture.revised_proposed_architecture}"
        ).send()

        with open("output.md", "w") as output_file:
            output_file.write("# User Proposal\n")
            output_file.write(user_proposal_details.proposal + "\n\n")
            output_file.write("# Proposed Architecture\n")
            output_file.write(proposed_architecture.proposed_architecture + "\n\n")
            output_file.write("# Revised Architecture\n")
            output_file.write(revised_architecture.revised_proposed_architecture + "\n")

        await cl.Message(
            content="The results have been saved to output.md"
        ).send()
    else:
        await cl.Message(
            content="No feedback received. Exiting."
        ).send()