SrijitMukherjee commited on
Commit
3f6490c
·
verified ·
1 Parent(s): 4ba2959

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -12
app.py CHANGED
@@ -1,24 +1,24 @@
1
  import pandas as pd
2
  import streamlit as st
3
- import ollama
4
- #import transformers
5
  import torch
6
- #model_id = "meta-llama/Meta-Llama-3-70B"
7
- # pipeline = transformers.pipeline(
8
- # "text-generation", model=model_id, model_kwargs={"torch_dtype": torch.bfloat16}, device_map="auto"
9
- # )
10
 
11
  # Load your CSV file
12
  df = pd.read_csv("your_file.csv")
13
 
14
  # Function to generate responses using the Llama3 model
15
- def generate_response(question):
16
- response = ollama.chat(model='llama3', messages=[{'role': 'user', 'content': question}])
17
- return response['message']['content']
18
-
19
  # def generate_response(question):
20
- # response = pipeline(questions)
21
- # return response
 
 
 
 
22
 
23
  # Define the functions for solving problems, giving hints, and creating similar problems
24
  def show_problem(exam, year, problem):
 
1
  import pandas as pd
2
  import streamlit as st
3
+ # import ollama
4
+ import transformers
5
  import torch
6
+ model_id = "meta-llama/Meta-Llama-3-8B"
7
+ pipeline = transformers.pipeline(
8
+ "text-generation", model=model_id, model_kwargs={"torch_dtype": torch.bfloat16}, device_map="auto"
9
+ )
10
 
11
  # Load your CSV file
12
  df = pd.read_csv("your_file.csv")
13
 
14
  # Function to generate responses using the Llama3 model
 
 
 
 
15
  # def generate_response(question):
16
+ # response = ollama.chat(model='llama3', messages=[{'role': 'user', 'content': question}])
17
+ # return response['message']['content']
18
+
19
+ def generate_response(question):
20
+ response = pipeline(questions)
21
+ return response
22
 
23
  # Define the functions for solving problems, giving hints, and creating similar problems
24
  def show_problem(exam, year, problem):