|
import gradio as gr |
|
from transformers import pipeline |
|
|
|
|
|
generator = pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.1") |
|
|
|
|
|
def generate_proposal(client_description): |
|
prompt = f"Generate a professional project proposal based on the following client request:\n\n{client_description}\n\nProposal:" |
|
result = generator(prompt, max_length=512, do_sample=True, temperature=0.7) |
|
return result[0]['generated_text'] |
|
|
|
|
|
iface = gr.Interface( |
|
fn=generate_proposal, |
|
inputs=gr.Textbox(label="Client Requirement", placeholder="Describe the project or client needs here...", lines=5), |
|
outputs=gr.Textbox(label="Generated Proposal", lines=15), |
|
title="AI Proposal Generator", |
|
description="This app uses Mistral-7B to generate business proposals based on client input." |
|
) |
|
|
|
|
|
iface.launch() |
|
|