File size: 11,546 Bytes
10da927
c1fe6fb
10da927
028ac25
10da927
028ac25
1b6dc66
 
c1fe6fb
1b6dc66
10da927
 
 
 
 
ab7c0d6
10da927
440deef
10da927
440deef
10da927
 
 
 
 
 
 
c1fe6fb
10da927
9987dc1
 
10da927
 
 
53e163e
 
10da927
 
 
 
 
 
 
 
 
 
 
 
 
 
028ac25
 
 
 
 
 
10da927
028ac25
 
10da927
 
028ac25
10da927
 
 
 
 
 
c1fe6fb
10da927
 
 
 
 
 
 
 
9987dc1
c623d28
8d7b187
10da927
 
 
 
53e163e
c1fe6fb
10da927
c1fe6fb
10da927
 
c1fe6fb
 
 
10da927
 
 
 
 
 
 
 
 
 
 
 
 
 
53e163e
10da927
 
1b6dc66
 
 
 
 
 
 
 
 
 
10da927
1b6dc66
 
 
 
2b0d36d
 
 
 
 
 
 
10da927
1b6dc66
 
10da927
 
 
 
c1fe6fb
10da927
 
 
 
 
c1fe6fb
 
10da927
 
 
 
 
c1fe6fb
 
53e163e
10da927
 
 
 
c1fe6fb
 
53e163e
10da927
 
53e163e
10da927
 
 
 
9987dc1
10da927
53e163e
 
 
 
 
 
 
440deef
10da927
53e163e
10da927
53e163e
10da927
53e163e
10da927
53e163e
440deef
 
2b0d36d
440deef
 
1b6dc66
 
 
 
 
 
 
 
885425a
1b6dc66
 
2b0d36d
 
1b6dc66
2b0d36d
 
 
1b6dc66
 
 
 
028ac25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ab7c0d6
21ed367
028ac25
 
 
10da927
ab7c0d6
10da927
1b6dc66
2b0d36d
 
 
 
10da927
1b6dc66
10da927
 
1b6dc66
10da927
 
 
 
 
 
 
 
 
 
 
1b6dc66
2b0d36d
 
 
1b6dc66
 
 
 
2b0d36d
1b6dc66
885425a
10da927
 
1b6dc66
10da927
 
 
 
 
 
1b6dc66
 
 
10da927
2b0d36d
 
 
1b6dc66
53e163e
10da927
 
 
 
53e163e
10da927
 
 
 
2b0d36d
10da927
 
1b6dc66
 
10da927
 
 
 
440deef
10da927
 
 
1b6dc66
440deef
 
028ac25
 
 
10da927
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
import asyncio
import glob
import os
import shutil
import time
import traceback
import pandas as pd
import utils

from zipfile import ZipFile
import gradio as gr
from dotenv import load_dotenv
from langchain.chat_models import ChatOpenAI
from langchain.embeddings import OpenAIEmbeddings

from csv_agent import CSVAgent
from grader import Grader
from grader_qa import GraderQA
from ingest import ingest_canvas_discussions
from utils import reset_folder

load_dotenv()

pickle_file = "vector_stores/canvas-discussions.pkl"
index_file = "vector_stores/canvas-discussions.index"

grading_model = 'gpt-4'
qa_model = 'gpt-4'

llm = ChatOpenAI(model_name=qa_model, temperature=0, verbose=True)
embeddings = OpenAIEmbeddings(model='text-embedding-ada-002')

grader = None
grader_qa = None
disabled = gr.update(interactive=False)
enabled = gr.update(interactive=True)


def add_text(history, text):
    print("Question asked: " + text)
    response = run_model(text)
    history = history + [(text, response)]
    print(history)
    return history, ""


def run_model(text):
    global grader, grader_qa
    start_time = time.time()
    print("start time:" + str(start_time))
    try:
        response = grader_qa.agent.run(text)
    except Exception as e:
        response = "I need a break. Please ask me again in a few minutes"
        print(traceback.format_exc())

    sources = []
    # for document in response['source_documents']:
    #     sources.append(str(document.metadata))

    source = ','.join(set(sources))
    # response = response['answer'] + '\nSources: ' + str(len(sources))
    end_time = time.time()
    # # If response contains string `SOURCES:`, then add a \n before `SOURCES`
    # if "SOURCES:" in response:
    #     response = response.replace("SOURCES:", "\nSOURCES:")
    response = response + "\n\n" + "Time taken: " + str(end_time - start_time)
    print(response)
    print(sources)
    print("Time taken: " + str(end_time - start_time))
    return response


def set_model(history):
    history = get_first_message(history)
    return history

def ingest(url, canvas_api_key, history):
    global grader, llm, embeddings
    text = f"Downloaded discussion data from {url} to start grading"
    ingest_canvas_discussions(url, canvas_api_key)
    grader = Grader(grading_model)
    response = "Ingested canvas data successfully"
    history = history + [(text, response)]
    return history, disabled, disabled, disabled, enabled


def start_grading(history):
    global grader, grader_qa
    text = f"Start grading discussions from {url}"
    if grader:
        # if grader.llm.model_name != grading_model:
        #     grader = Grader(grading_model)
        # Create a new event loop
        loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)
        try:
            # Use the event loop to run the async function
            loop.run_until_complete(grader.run_chain())
            grader_qa = GraderQA(grader, embeddings)
            response = "Grading done"
        finally:
            # Close the loop after use
            loop.close()
    else:
        response = "Please ingest data before grading"
    history = history + [(text, response)]
    return history, disabled, enabled, enabled, enabled


# def start_downloading(history):
#     files = glob.glob("output/*.csv")
#     if files:
#         file = files[0]
#         download.interactive = True
#         return gr.components.File(file)
#     else:
#         return " No file found"


def start_downloading():
    # with ZipFile("output/*.csv", "w") as zipObj:
    #     for idx, file in enumerate(files):
    #         zipObj.write(file.name, file.name.split("/")[-1])
    # return "outputfiles.zip"
    print(grader.csv)
    return grader.csv, gr.update(visible=True), gr.update(value=process_csv_text(), visible=True)

def get_headers():
    df = process_csv_text()
    return list(df.columns)
    

#file_input = glob.glob("output/*.csv") #[gr.File(file_count="multiple", file_types=["text", ".json", ".csv"])]

def get_first_message(history):
    global grader_qa
    history = [(None,
                'Get feedback on your canvas discussions. Add your discussion url and get your discussions graded in instantly.')]
    return get_grading_status(history)


def get_grading_status(history):
    global grader, grader_qa
    # Check if grading is complete
    if os.path.isdir('output') and len(glob.glob("output/*.csv")) > 0 and len(glob.glob("docs/*.json")) > 0 and len(
            glob.glob("docs/*.html")) > 0:
        if not grader:
            grader = Grader(qa_model)
            grader_qa = GraderQA(grader, embeddings)
        elif not grader_qa:
            grader_qa = GraderQA(grader, embeddings)
        if len(history) == 1:
            history = history + [(None, 'Grading is already complete. You can now ask questions')]
        enable_fields(False, False, False, False, True, True, True)
    # Check if data is ingested
    elif len(glob.glob("docs/*.json")) > 0 and len(glob.glob("docs/*.html")):
        if not grader_qa:
            grader = Grader(qa_model)
        if len(history) == 1:
            history = history + [(None, 'Canvas data is already ingested. You can grade discussions now')]
        enable_fields(False, False, False, True, True, False, False)
    else:
        history = history + [(None, 'Please ingest data and start grading')]
        enable_fields(True, True, True, False, False, False, False)
    return history


# handle enable/disable of fields
def enable_fields(url_status, canvas_api_key_status, submit_status, grade_status,
                  download_status, chatbot_txt_status, chatbot_btn_status):
    url.interactive = url_status
    canvas_api_key.interactive = canvas_api_key_status
    submit.interactive = submit_status
    grade.interactive = grade_status
    download.interactive = download_status
    txt.interactive = chatbot_txt_status
    ask.interactive = chatbot_btn_status

    if not chatbot_txt_status:
        txt.placeholder = "Please grade discussions first"
    else:
        txt.placeholder = "Ask a question"
    if not url_status:
        url.placeholder = "Data already ingested"
    if not canvas_api_key_status:
        canvas_api_key.placeholder = "Data already ingested"


def reset_data():
    # Use shutil.rmtree() to delete output, docs, and vector_stores folders, reset grader and grader_qa, and get_grading_status, reset and return history
    global grader, grader_qa
    # reset_folder('output')
    # reset_folder('docs')
    # reset_folder('vector_stores')
    # grader = None
    # grader_qa = None
    # history = [(None, 'Data reset successfully')]
    # return history
    #disha
    #If there's data in docs/output folder during grading [During Grading]
    if os.path.isdir('output') and len(glob.glob("output/*.csv")) > 0 or len(glob.glob("docs/*.json")) > 0 or len(
            glob.glob("docs/*.html")) > 0 or len(glob.glob("vector_stores/*.faiss")) > 0 or len(glob.glob("vector_stores/*.pkl")) > 0:
        reset_folder('output')
        reset_folder('docs')
        reset_folder('vector_stores')
        grader = None
        grader_qa = None
        history = [(None, 'Data reset successfully')]
        url.placeholder = [(None, 'Enter your Canvas Discussion URL')]
        canvas_api_key.placeholder = [(None, 'Enter your Canvas API Key')]
        return history, enabled, enabled, enabled, disabled, disabled, disabled, disabled, disabled, disabled, disabled, disabled
    
def get_output_dir(orig_name):
    script_dir = os.path.dirname(os.path.abspath(__file__))
    output_dir = os.path.join(script_dir, 'output', orig_name)
    return output_dir


def upload_grading_results(file, history):
    global grader, grader_qa
    # Delete output folder and save the file in output folder
    if os.path.isdir('output'):
        shutil.rmtree('output')
    os.mkdir('output')
    if os.path.isdir('vector_stores'):
        shutil.rmtree('vector_stores')
    os.mkdir('vector_stores')
    # get current path
    path = os.path.join("output", os.path.basename(file.name))
    # Copy the uploaded file from its temporary location to the desired location
    shutil.copyfile(file.name, path)
    grader_qa = CSVAgent(llm, embeddings, path)
    history = [(None, 'Grading results uploaded successfully. Start Chatting!')]
    return history


def bot(history):
    return history


def process_csv_text():
    file_path = utils.get_csv_file_name()
    df = pd.read_csv(file_path)
    return df


with gr.Blocks() as demo:
    gr.Markdown(f"<h2><center>{'Canvas Discussion Grading With Feedback'}</center></h2>")
    

    with gr.Row():
        url = gr.Textbox(
            label="Canvas Discussion URL",
            placeholder="Enter your Canvas Discussion URL"
        )

        canvas_api_key = gr.Textbox(
            label="Canvas API Key",
            placeholder="Enter your Canvas API Key", type="password"
        )
        submit = gr.Button(value="Submit", variant="secondary", )
    with gr.Row():
        table = gr.Dataframe(label ='Canvas CSV Output', type="pandas", overflow_row_behaviour="paginate", visible = False, wrap=True)

    with gr.Row(equal_height=True):
        
        grade = gr.Button(value="Grade", variant="secondary")
        download = gr.Button(value="Generate Output", variant="secondary")
        file = gr.components.File(label="CSV Output", container=False, visible=False).style(height=100)
        #reset = gr.ClearButton(value="Reset", components=[url, canvas_api_key, submit, table, grade, download])
        reset = gr.ClearButton(value="Reset")

    chatbot = gr.Chatbot([], label="Chat with grading results", elem_id="chatbot", height=400)
    
    with gr.Row():
        with gr.Column(scale=3):
            txt = gr.Textbox(
                label="Ask questions about how students did on the discussion",
                placeholder="Enter text and press enter, or upload an image", lines=1
            )
    
        upload = gr.UploadButton(label="Upload grading results", type="file", file_types=["csv"], scale=1)
        ask = gr.Button(value="Ask", variant="secondary", scale=1)
    chatbot.value = get_first_message([])
    
    with gr.Row():
        table = gr.Dataframe(label ='Canvas CSV Output', type="pandas", overflow_row_behaviour="paginate", visible = False, wrap=True)

    submit.click(ingest, inputs=[url, canvas_api_key, chatbot], outputs=[chatbot, url, canvas_api_key, submit, grade],
                 postprocess=False).then(
        bot, chatbot, chatbot
    )

    grade.click(start_grading, inputs=[chatbot], outputs=[chatbot, grade, download, txt, ask],
                postprocess=False).then(
        bot, chatbot, chatbot
    )

    download.click(start_downloading, inputs=[], outputs=[file, file, table]).then(
        bot, chatbot, chatbot
    )
    
    
    txt.submit(add_text, [chatbot, txt], [chatbot, txt], postprocess=False).then(
        bot, chatbot, chatbot
    )

    ask.click(add_text, inputs=[chatbot, txt], outputs=[chatbot, txt], postprocess=False, ).then(
        bot, chatbot, chatbot
    )

    reset.click(reset_data, inputs=[], outputs=[chatbot, url, canvas_api_key, submit, table, grade, download, txt, file, upload, ask]).success(
        bot, chatbot, chatbot)

    upload.upload(upload_grading_results, inputs=[upload, chatbot], outputs=[chatbot], postprocess=False, ).then(
        bot, chatbot, chatbot)

if __name__ == "__main__":
    demo.queue()
    demo.queue(concurrency_count=5)
    demo.launch(debug=True, )