File size: 9,017 Bytes
4042897
e005c1d
4042897
 
e005c1d
4042897
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
import os
# import langchain_mistralai
from langchain.agents import AgentType, initialize_agent
import gradio as gr
# from mistralai import Mistral
import pandas as pd
from collections import Counter
from openai import AzureOpenAI
import time
import re

# Memory per persona
structured_memory = {
    "πŸ‘¨β€πŸ’Ό Scrum Master": {},
    "πŸ’‘ Product Owner": {},
    "πŸ‘¨β€πŸ’» Developer": {},
    "🎨 Designer": {},
}
selected_items = []

def prepare_model(selected_model):
    if selected_model == 'o4-mini':
        client = AzureOpenAI(
                api_version="2024-12-01-preview",
                azure_endpoint="https://nextgenagilehu4401821853.openai.azure.com/",
                api_key="2ziHxIi2Pz511IscFVVjBpgHkm2nDXNfPAvXyFWpsmCHlpHwAOuJJQQJ99BEACHYHv6XJ3w3AAAAACOGmC8m"  # Use a string instead of AzureKeyCredential
                )
    elif selected_model == 'o3-mini':
        client = AzureOpenAI(
                api_version="2024-12-01-preview",
                azure_endpoint="https://nextgenagilehu4401821853.openai.azure.com/",
                api_key="2ziHxIi2Pz511IscFVVjBpgHkm2nDXNfPAvXyFWpsmCHlpHwAOuJJQQJ99BEACHYHv6XJ3w3AAAAACOGmC8m"  # Use a string instead of AzureKeyCredential
                )
    return client 

def clear_selections():
    global selected_items
    selected_items.clear()
    return ""

def add_selection(option):
    global selected_items
    selected_items += [option]
    flat_list = [item for sublist in selected_items for item in sublist]
    counts = Counter(flat_list)
    formatted_output = ", ".join(f"{count} {option.lower()}" for option, count in counts.items())
    return formatted_output

# Persona system prompts with emojis
persona_prompts = {
    "πŸ‘¨β€πŸ’Ό Scrum Master": "You are an experienced Scrum Master responsible for creating detailed JIRA tickets across parameters like EPIC, Feature, Story, Tasks, Subtasks, assignee based on job designation,sprint and story points",
    "πŸ’‘ Product Owner": "You are a Product Owner focused on defining product features, value delivery, and aligning JIRA tickets with business outcomes. Depending on requirements you can focus upon these aspects: Description,Business Context,User Value,Scope,Out of Scope,Dependencies,Acceptance Criteria,Definition of Done,User Impact Metrics,Next Steps",
    "πŸ‘¨β€πŸ’» Developer": "You are a Software Developer breaking down technical objectives into implementation tasks, bug fixes, and engineering subtasks.Depending on requirements you can focus upon these aspects: Description, Acceptance Criteria, Technical Details:Frontend,Backend,Database,Integrations,Logging & Monitoring, Component/s, Definition of Done, Sub-tasks,Story Points",
    "🎨 Designer": "You are a UX/UI Designer structuring JIRA tickets around user flows, UI components, and design assets."
}

# Default prompt examples
persona_default_prompts = {
    "πŸ‘¨β€πŸ’Ό Scrum Master": "Objective: Automate JIRA story creation for a new agile project",
    "πŸ’‘ Product Owner": "Objective: Define user stories for a travel booking feature",
    "πŸ‘¨β€πŸ’» Developer": "Objective: Break down the user login system into dev tasks",
    "🎨 Designer": "Objective: Create tasks for designing the homepage UI"
}

model_select = {
    "O-4 Mini": "o4-mini",
    "O-3 Mini" : "o3-mini"
}
# Process function with memory per persona
def process_files(message, chat_history, persona,model):
    selected_model = model_select.get(model)
    client = prepare_model(selected_model)
    system_prompt = persona_prompts.get(persona)
    memory = structured_memory[persona]

    if message.lower().startswith("objective:"):
        objective = message.split(":", 1)[-1].strip()
        if len(selected_items) > 0:
            flat_list = [item for sublist in selected_items for item in sublist]
            counts = Counter(flat_list)
            formatted_output = ", ".join(f"{count} {option.lower()}" for option, count in counts.items())
            full_prompt = f"Create a complete JIRA ticket for the {objective} where your team structure is: {formatted_output}"
        else:
            full_prompt = f"Create a complete JIRA ticket for the {objective}"

        chat_response = client.chat.completions.create(
            messages=[
                {"role": "system", "content": system_prompt},
                {"role": "user", "content": full_prompt}
            ],
            max_completion_tokens=100000,
            model=selected_model
        )
        bot_message = chat_response.choices[0].message.content
        memory["current"] = bot_message

    elif any(keyword in message.lower() for keyword in ["improve", "update", "change"]):
        if "current" not in memory:
            bot_message = "Please provide a project objective first using 'Objective: <your project>'."
        else:
            previous_output = memory["current"]
            improvement_prompt = f"Improve the following JIRA ticket structure based on the user's instruction.\n\nStructure:\n{previous_output}\n\nUser Request:\n{message}"
            chat_response = client.chat.completions.create(
                messages=[
                    {"role": "system", "content": system_prompt},
                    {"role": "user", "content": improvement_prompt}
                ],
                max_completion_tokens=100000,
                model=selected_model
            )
            bot_message = chat_response.choices[0].message.content
            memory["current"] = bot_message
    else:
        bot_message = "Please start with 'Objective: <project>' or a request to update something."

    chat_history.append({"role": "user", "content": message})
    chat_history.append({"role": "assistant", "content": bot_message})
    time.sleep(1)
    return "", chat_history

def insert_prefix(prefix_text, textbox_content):
    if not textbox_content.lower().startswith(prefix_text.lower()):
        return f"{prefix_text} {textbox_content}"
    else:
        return textbox_content

def set_default_prompt(persona):
    return persona_default_prompts.get(persona, "")

# Gradio App
with gr.Blocks() as demo:
    with gr.Row():
        with gr.Column(scale=1):  # Sidebar
            gr.Image("/afh/projects/NextGenAgileTool-aefe44c3-e188-4674-9bd7-fd55842e362e/data/logo-removebg-preview.png", width=135, height=100,show_download_button=False , container= False,show_fullscreen_button=False, show_label=False)
            # gr.Markdown("### *NextGenAgile* – AI Jira Assistant")
            gr.Markdown("### πŸ’½ Choose Model")

            model = gr.Dropdown(
                choices=[
                    "O-4 Mini",
                    "O-3 Mini"
                ],
                value="",
                label=""
            )

            gr.Markdown("### 🧠 Choose Persona")

            persona = gr.Dropdown(
                choices=[
                    "πŸ‘¨β€πŸ’Ό Scrum Master",
                    "πŸ’‘ Product Owner",
                    "πŸ‘¨β€πŸ’» Developer",
                    "🎨 Designer"
                ],
                value="πŸ‘¨β€πŸ’Ό Scrum Master",
                label="πŸ§‘ Persona"
            )

        with gr.Column(scale=9):
            
            chatbot = gr.Chatbot(
                label="Chat History",
                type="messages",
                height=500,
                value=[
                    {"role": "assistant", "content": "Hello! πŸ‘‹ Start with an 'Objective' or ask to 'Improve' an earlier ticket."}
                ]
            )

            with gr.Row():
                objective_btn = gr.Button("➑️ Objective")
                improve_btn = gr.Button("✏️ Improve")
                update_btn = gr.Button("♻️ Update")
                change_btn = gr.Button("πŸ”„ Change")

            with gr.Row():
                msg = gr.Textbox(
                    placeholder="Type your request here...",
                    label="Your Prompt",
                    lines=2,
                    submit_btn=True
                )

            clear = gr.ClearButton([msg, chatbot])

            # Hook logic
            msg.submit(process_files, [msg, chatbot, persona,model], [msg, chatbot])
            persona.change(set_default_prompt, persona, msg)

            objective_btn.click(insert_prefix, inputs=[gr.State("Objective:"), msg], outputs=msg)
            improve_btn.click(insert_prefix, inputs=[gr.State("Improve:"), msg], outputs=msg)
            update_btn.click(insert_prefix, inputs=[gr.State("Update:"), msg], outputs=msg)
            change_btn.click(insert_prefix, inputs=[gr.State("Change:"), msg], outputs=msg)

# if __name__ == "__main__":
#     demo.launch(debug=True, share=True)


server_info = demo.launch(share=True)

# Extract the public URL using regex
match = re.search(r"Running on public URL: (https?://[^\s]+)", server_info)
public_url = match.group(1) if match else "URL not found"

# Save only the public URL to a file
with open("gradio_link.txt", "w") as f:
    f.write(public_url)