Z3ktrix commited on
Commit
b05ac74
Β·
verified Β·
1 Parent(s): 3f4939e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -5
app.py CHANGED
@@ -15,10 +15,10 @@ HF_API_KEY = os.getenv('HFREAD')
15
  API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3"
16
  headers = {"Authorization": f"Bearer {HF_API_KEY}"}
17
 
18
- # Function to query the Hugging Face model
19
- def query_huggingface(payload):
20
  try:
21
- response = requests.post(API_URL, headers=headers, json=payload)
22
  response.raise_for_status()
23
  return response.json()
24
  except requests.exceptions.RequestException as e:
@@ -39,10 +39,12 @@ async def on_ready():
39
  @bot.command(name='ask')
40
  async def ask(ctx, *, question: str):
41
  """
42
- Command to ask a question to the Hugging Face model.
43
  """
 
 
44
  await ctx.send(f"Question: {question}")
45
- response = query_huggingface({"inputs": question})
46
  if 'generated_text' in response:
47
  await ctx.send(f"Response: {response['generated_text']}")
48
  elif isinstance(response, list) and len(response) > 0 and 'generated_text' in response[0]:
 
15
  API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3"
16
  headers = {"Authorization": f"Bearer {HF_API_KEY}"}
17
 
18
+ # Function to query the Hugging Face model with a structured prompt
19
+ def query_huggingface(prompt):
20
  try:
21
+ response = requests.post(API_URL, headers=headers, json={"inputs": prompt})
22
  response.raise_for_status()
23
  return response.json()
24
  except requests.exceptions.RequestException as e:
 
39
  @bot.command(name='ask')
40
  async def ask(ctx, *, question: str):
41
  """
42
+ Command to ask a question to the Hugging Face model with an instructive prompt.
43
  """
44
+ # Create a structured prompt
45
+ prompt = f"Do not be annoying. Do not write insanely large responses unless asked specifically. Please provide a detailed response to the following: {question}"
46
  await ctx.send(f"Question: {question}")
47
+ response = query_huggingface(prompt)
48
  if 'generated_text' in response:
49
  await ctx.send(f"Response: {response['generated_text']}")
50
  elif isinstance(response, list) and len(response) > 0 and 'generated_text' in response[0]: