ManuelMagana commited on
Commit
8f5f876
·
1 Parent(s): 6f598b8

Chaging the Hugging Face model path

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -1,13 +1,14 @@
1
  import os
2
  from dotenv import load_dotenv
3
  load_dotenv()
 
4
  import gradio as gr
5
  from huggingface_hub import InferenceClient
6
 
7
  """
8
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
9
  """
10
- client = InferenceClient(model="Layer7/autotrain-llama32-1b-finetuned", token=os.getenv("HF_TOKEN"))
11
 
12
 
13
  def respond(message, history: list[tuple[str, str]], system_message, max_tokens, temperature, top_p):
 
1
  import os
2
  from dotenv import load_dotenv
3
  load_dotenv()
4
+
5
  import gradio as gr
6
  from huggingface_hub import InferenceClient
7
 
8
  """
9
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
10
  """
11
+ client = InferenceClient(model="Layer7/autotrain-llama")
12
 
13
 
14
  def respond(message, history: list[tuple[str, str]], system_message, max_tokens, temperature, top_p):