rishikasharma commited on
Commit
f113451
·
verified ·
1 Parent(s): fcb4a44

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -38
app.py CHANGED
@@ -1,40 +1,7 @@
1
- import gradio as gr
2
- import requests
3
- from datasets import load_dataset
4
- from dotenv import load_dotenv
5
- import os
6
 
7
- load_dotenv()
8
- HUGGINGFACE_TOKEN = os.getenv('HUGGINGFACE_TOKEN')
9
-
10
- ds = load_dataset("fka/awesome-chatgpt-prompts")
11
-
12
- # Hugging Face API configuration
13
- API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3"
14
- HEADERS = {"Authorization": f"Bearer {HUGGINGFACE_TOKEN}"}
15
-
16
- # Function to interact with the Hugging Face model
17
- def query_huggingface_api(input_text):
18
- payload = {"inputs": input_text}
19
- try:
20
- response = requests.post(API_URL, headers=HEADERS, json=payload)
21
- response.raise_for_status() # Raise error for HTTP errors
22
- return response.json()[0]["generated_text"]
23
- except requests.exceptions.RequestException as e:
24
- return f"Error: {str(e)}"
25
-
26
- # Gradio interface
27
- def chatbot(input_text):
28
- response = query_huggingface_api(input_text)
29
- return response
30
-
31
- iface = gr.Interface(
32
- fn=chatbot,
33
- inputs=gr.Textbox(lines=2, placeholder="Type your message here..."),
34
- outputs=gr.Textbox(),
35
- title="AI Chatbot",
36
- description="Chat with the AI powered by Hugging Face Mistral-7B-Instruct-v0.3.",
37
  )
38
-
39
- if __name__ == "__main__":
40
- iface.launch()
 
1
+ from huggingface_hub import InferenceClient
 
 
 
 
2
 
3
+ client = InferenceClient(
4
+ model="mistralai/Mistral-7B-Instruct-v0.3",
5
+ token="your_actual_token_here" # Replace with the actual token temporarily
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  )
7
+ print(client.text_generation(prompt="Hello"))