ragavendran-r commited on
Commit
4a14dde
·
verified ·
1 Parent(s): f1c573d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -2
app.py CHANGED
@@ -5,13 +5,17 @@ from huggingface_hub import InferenceClient
5
  client = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
6
 
7
  def generate_response(prompt, max_length=512, temperature=0.7):
8
- """Generate a response from the model."""
9
  response = client.text_generation(
10
  prompt,
11
  max_new_tokens=max_length,
12
  temperature=temperature,
 
13
  )
14
- return response
 
 
 
15
 
16
  # Define the Gradio interface
17
  iface = gr.Interface(
 
5
  client = InferenceClient("NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO")
6
 
7
  def generate_response(prompt, max_length=512, temperature=0.7):
8
+ """Generate a response from the model and check metadata."""
9
  response = client.text_generation(
10
  prompt,
11
  max_new_tokens=max_length,
12
  temperature=temperature,
13
+ details=True # This may provide extra model info
14
  )
15
+ print(f"Response Metadata: {response}") # Check if model details are in the response
16
+ return response["generated_text"] if isinstance(response, dict) else response
17
+
18
+ print(f"Using model: {client.model_id}")
19
 
20
  # Define the Gradio interface
21
  iface = gr.Interface(