|
|
|
from flask import Flask, request, jsonify |
|
from huggingface_hub import InferenceClient |
|
import gradio as gr |
|
|
|
client = InferenceClient( |
|
"mistralai/Mistral-7B-Instruct-v0.1" |
|
) |
|
|
|
app = Flask(__name__) |
|
|
|
file_path = "mentor.txt" |
|
with open(file_path, "r") as file: |
|
mentors_data = file.read() |
|
|
|
@app.route('/') |
|
def home(): |
|
return jsonify({"message": "Welcome to the Recommendation API!"}) |
|
|
|
|
|
def format_prompt(message): |
|
prompt = "<s>" |
|
prompt += f"[INST] {message} [/INST]" |
|
prompt += "</s>" |
|
return prompt |
|
|
|
@app.route('/recommend', methods=['POST']) |
|
def recommend(temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,): |
|
content = request.json |
|
user_degree = content.get('degree') |
|
user_stream = content.get('stream') |
|
user_semester = content.get('semester') |
|
temperature = float(temperature) |
|
if temperature < 1e-2: |
|
temperature = 1e-2 |
|
top_p = float(top_p) |
|
|
|
generate_kwargs = dict( |
|
temperature=temperature, |
|
max_new_tokens=max_new_tokens, |
|
top_p=top_p, |
|
repetition_penalty=repetition_penalty, |
|
do_sample=True, |
|
seed=42, |
|
) |
|
prompt = f""" prompt: |
|
You need to act like as recommendataion engine for course recommendation for student based on below details. |
|
|
|
Degree: {user_degree} |
|
Stream: {user_stream} |
|
Current Semester: {user_semester} |
|
|
|
|
|
Based on above details recommend the courses that realtes to above details |
|
Note: Output should bevalid json format in below format: |
|
{{"course1:course_name,course2:course_name,course3:course_name,...}} |
|
|
|
""" |
|
formatted_prompt = format_prompt(prompt) |
|
|
|
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False) |
|
output = "" |
|
|
|
for response in stream: |
|
output += response.token.text |
|
yield output |
|
return jsonify({"ans":output}) |
|
|
|
@app.route('/get_mentor', methods=['POST']) |
|
def mentor(temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,): |
|
content = request.json |
|
user_degree = content.get('degree') |
|
user_stream = content.get('stream') |
|
user_semester = content.get('semester') |
|
courses = content.get('courses') |
|
temperature = float(temperature) |
|
if temperature < 1e-2: |
|
temperature = 1e-2 |
|
top_p = float(top_p) |
|
|
|
generate_kwargs = dict( |
|
temperature=temperature, |
|
max_new_tokens=max_new_tokens, |
|
top_p=top_p, |
|
repetition_penalty=repetition_penalty, |
|
do_sample=True, |
|
seed=42, |
|
) |
|
prompt = f""" prompt: |
|
You need to act like as recommendataion engine for mentor recommendation for student based on below details also the list of mentors with their experience is attached. |
|
|
|
Degree: {user_degree} |
|
Stream: {user_stream} |
|
Current Semester: {user_semester} |
|
courses opted:{courses} |
|
|
|
Mentor list= {mentors_data} |
|
Based on above details recommend the mentor that realtes to above details |
|
Note: Output should be valid json format in below format: |
|
{{"mentor1:mentor_name,mentor2:mentor_name,mentor3:mentor_name,...}} |
|
""" |
|
formatted_prompt = format_prompt(prompt) |
|
|
|
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False) |
|
output = "" |
|
|
|
for response in stream: |
|
output += response.token.text |
|
yield output |
|
return jsonify({"ans":output}) |
|
|
|
|
|
if __name__ == '__main__': |
|
app.run(debug=True) |