Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -357,53 +357,53 @@ if Usage == 'Ask me anything (CSV file data)!📊':
|
|
357 |
pass
|
358 |
|
359 |
temp = st.slider('Temperature: ', 0.0, 1.0, 0.0)
|
360 |
-
|
361 |
-
|
362 |
-
|
363 |
-
|
364 |
-
|
365 |
-
|
366 |
-
|
367 |
-
|
368 |
-
|
369 |
-
|
370 |
-
|
371 |
-
|
372 |
-
|
373 |
-
|
374 |
-
|
375 |
-
|
376 |
-
|
377 |
-
|
378 |
-
|
379 |
-
if len(gr_prompt) > 4097:
|
380 |
-
st.write('OVERWHELMING DATA!!! You have given me more than 4097 tokens! ^_^')
|
381 |
-
st.write('As of today, the NLP model text-davinci-003/gpt-3.5-turbo that I run on takes in inputs that have less than 4097 tokens. Kindly retry ^_^')
|
382 |
-
|
383 |
-
elif len(result_tab2.columns) < 2:
|
384 |
-
st.write("I need more data to conduct analysis and provide visualizations for you... ^_^")
|
385 |
-
|
386 |
-
else:
|
387 |
-
st.success("Plotting...")
|
388 |
-
response_graph = openai.Completion.create(
|
389 |
-
engine="text-davinci-003",
|
390 |
-
prompt = gr_prompt,
|
391 |
-
max_tokens=1024,
|
392 |
-
n=1,
|
393 |
-
stop=None,
|
394 |
-
temperature=0.5,
|
395 |
-
)
|
396 |
|
397 |
-
if
|
398 |
-
|
399 |
-
|
400 |
|
|
|
|
|
|
|
401 |
else:
|
402 |
-
|
403 |
-
|
404 |
-
|
405 |
-
|
406 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
407 |
|
408 |
elif res == "Sample_Cars_csv":
|
409 |
df = pd.read_csv('cars.csv')
|
@@ -417,53 +417,54 @@ if Usage == 'Ask me anything (CSV file data)!📊':
|
|
417 |
pass
|
418 |
|
419 |
temp = st.slider('Temperature: ', 0.0, 1.0, 0.0)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
420 |
|
421 |
-
|
422 |
-
|
423 |
-
|
424 |
-
if submitButton:
|
425 |
-
try:
|
426 |
-
col_p ="Create SQL statement from instruction. "+ext+" " " (" + column +")." +" Request:" + userPrompt + "SQL statement:"
|
427 |
-
result = gpt3(col_p)
|
428 |
-
sqlOutput = result #st.text_area('SQL Query', value=gpt3(col_p))
|
429 |
-
warning(sqlOutput)
|
430 |
-
result_tab2=ps.sqldf(sqlOutput)
|
431 |
-
st.write(result_tab2)
|
432 |
-
|
433 |
-
with open("fewshot_matplot.txt", "r") as file:
|
434 |
-
text_plot = file.read()
|
435 |
-
|
436 |
-
result_tab = result_tab2.reset_index(drop=True)
|
437 |
-
result_tab_string = result_tab.to_string()
|
438 |
-
gr_prompt = text_plot + userPrompt + result_tab_string + "Plot graph for: "
|
439 |
-
|
440 |
-
if len(gr_prompt) > 4097:
|
441 |
-
st.write('OVERWHELMING DATA!!! You have given me more than 4097 tokens! ^_^')
|
442 |
-
st.write('As of today, the NLP model text-davinci-003 that I run on takes in inputs that have less than 4097 tokens. Kindly retry ^_^')
|
443 |
-
|
444 |
-
elif len(result_tab2.columns) < 2:
|
445 |
-
st.write("I need more data to conduct analysis and provide visualizations for you... ^_^")
|
446 |
-
|
447 |
-
else:
|
448 |
-
st.success("Plotting...")
|
449 |
-
response_graph = openai.Completion.create(
|
450 |
-
engine="text-davinci-003",
|
451 |
-
prompt = gr_prompt,
|
452 |
-
max_tokens=1024,
|
453 |
-
n=1,
|
454 |
-
stop=None,
|
455 |
-
temperature=0.5,
|
456 |
-
)
|
457 |
|
458 |
-
if
|
459 |
-
|
460 |
-
|
461 |
|
|
|
|
|
|
|
462 |
else:
|
463 |
-
|
464 |
-
|
465 |
-
|
466 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
467 |
|
468 |
|
469 |
elif Usage == 'Ask me anything!😊':
|
|
|
357 |
pass
|
358 |
|
359 |
temp = st.slider('Temperature: ', 0.0, 1.0, 0.0)
|
360 |
+
if st.form("Form Layout Upload_csv"):
|
361 |
+
userPrompt = st.text_area("Input Prompt",'Enter Natural Language Query')
|
362 |
+
submitButton = st.form_submit_button(label = 'Submit')
|
363 |
+
|
364 |
+
if submitButton:
|
365 |
+
try:
|
366 |
+
col_p ="Create SQL statement from instruction. "+ext+" " " (" + column +")." +" Request:" + userPrompt + "SQL statement:"
|
367 |
+
result = gpt3(col_p)
|
368 |
+
sqlOutput = result #st.text_area('SQL Query', value=gpt3(col_p))
|
369 |
+
warning(sqlOutput)
|
370 |
+
result_tab2=ps.sqldf(sqlOutput)
|
371 |
+
st.write(result_tab2)
|
372 |
+
with open("fewshot_matplot.txt", "r") as file:
|
373 |
+
text_plot = file.read()
|
374 |
+
|
375 |
+
result_tab = result_tab2.reset_index(drop=True)
|
376 |
+
result_tab_string = result_tab.to_string()
|
377 |
+
gr_prompt = text_plot + userPrompt + result_tab_string + "Plot graph for: "
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
378 |
|
379 |
+
if len(gr_prompt) > 4097:
|
380 |
+
st.write('OVERWHELMING DATA!!! You have given me more than 4097 tokens! ^_^')
|
381 |
+
st.write('As of today, the NLP model text-davinci-003/gpt-3.5-turbo that I run on takes in inputs that have less than 4097 tokens. Kindly retry ^_^')
|
382 |
|
383 |
+
elif len(result_tab2.columns) < 2:
|
384 |
+
st.write("I need more data to conduct analysis and provide visualizations for you... ^_^")
|
385 |
+
|
386 |
else:
|
387 |
+
st.success("Plotting...")
|
388 |
+
response_graph = openai.Completion.create(
|
389 |
+
engine="text-davinci-003",
|
390 |
+
prompt = gr_prompt,
|
391 |
+
max_tokens=1024,
|
392 |
+
n=1,
|
393 |
+
stop=None,
|
394 |
+
temperature=0.5,
|
395 |
+
)
|
396 |
+
|
397 |
+
if response_graph['choices'][0]['text'] != "":
|
398 |
+
print(response_graph['choices'][0]['text'])
|
399 |
+
exec(response_graph['choices'][0]['text'])
|
400 |
+
|
401 |
+
else:
|
402 |
+
print('Retry! Graph could not be plotted *_*')
|
403 |
+
|
404 |
+
except:
|
405 |
+
results = gpt3(userPrompt)
|
406 |
+
st.success('loaded')
|
407 |
|
408 |
elif res == "Sample_Cars_csv":
|
409 |
df = pd.read_csv('cars.csv')
|
|
|
417 |
pass
|
418 |
|
419 |
temp = st.slider('Temperature: ', 0.0, 1.0, 0.0)
|
420 |
+
|
421 |
+
if st.form("Form Layout Custom_csv"):
|
422 |
+
userPrompt = st.text_area("Input Prompt",'Enter Natural Language Query')
|
423 |
+
submitButton = st.form_submit_button(label = 'Submit')
|
424 |
+
|
425 |
+
if submitButton:
|
426 |
+
try:
|
427 |
+
col_p ="Create SQL statement from instruction. "+ext+" " " (" + column +")." +" Request:" + userPrompt + "SQL statement:"
|
428 |
+
result = gpt3(col_p)
|
429 |
+
sqlOutput = result #st.text_area('SQL Query', value=gpt3(col_p))
|
430 |
+
warning(sqlOutput)
|
431 |
+
result_tab2=ps.sqldf(sqlOutput)
|
432 |
+
st.write(result_tab2)
|
433 |
+
|
434 |
+
with open("fewshot_matplot.txt", "r") as file:
|
435 |
+
text_plot = file.read()
|
436 |
|
437 |
+
result_tab = result_tab2.reset_index(drop=True)
|
438 |
+
result_tab_string = result_tab.to_string()
|
439 |
+
gr_prompt = text_plot + userPrompt + result_tab_string + "Plot graph for: "
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
440 |
|
441 |
+
if len(gr_prompt) > 4097:
|
442 |
+
st.write('OVERWHELMING DATA!!! You have given me more than 4097 tokens! ^_^')
|
443 |
+
st.write('As of today, the NLP model text-davinci-003 that I run on takes in inputs that have less than 4097 tokens. Kindly retry ^_^')
|
444 |
|
445 |
+
elif len(result_tab2.columns) < 2:
|
446 |
+
st.write("I need more data to conduct analysis and provide visualizations for you... ^_^")
|
447 |
+
|
448 |
else:
|
449 |
+
st.success("Plotting...")
|
450 |
+
response_graph = openai.Completion.create(
|
451 |
+
engine="text-davinci-003",
|
452 |
+
prompt = gr_prompt,
|
453 |
+
max_tokens=1024,
|
454 |
+
n=1,
|
455 |
+
stop=None,
|
456 |
+
temperature=0.5,
|
457 |
+
)
|
458 |
+
|
459 |
+
if response_graph['choices'][0]['text'] != "":
|
460 |
+
print(response_graph['choices'][0]['text'])
|
461 |
+
exec(response_graph['choices'][0]['text'])
|
462 |
+
|
463 |
+
else:
|
464 |
+
print('Retry! Graph could not be plotted *_*')
|
465 |
+
except:
|
466 |
+
results = gpt3(userPrompt)
|
467 |
+
st.success('loaded')
|
468 |
|
469 |
|
470 |
elif Usage == 'Ask me anything!😊':
|