mobinln commited on
Commit
9338954
·
1 Parent(s): 9d6f55d

change to meta llama 3

Browse files
Files changed (1) hide show
  1. app.py +9 -2
app.py CHANGED
@@ -4,7 +4,7 @@ from huggingface_hub import InferenceClient
4
  """
5
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
  """
7
- client = InferenceClient(model="microsoft/Phi-3-vision-128k-instruct")
8
 
9
 
10
  def respond(
@@ -57,7 +57,14 @@ demo = gr.ChatInterface(
57
  label="Top-p (nucleus sampling)",
58
  ),
59
  ],
60
- multimodal=True,
 
 
 
 
 
 
 
61
  )
62
 
63
 
 
4
  """
5
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
  """
7
+ client = InferenceClient(model="meta-llama/Meta-Llama-3-8B-Instruct")
8
 
9
 
10
  def respond(
 
57
  label="Top-p (nucleus sampling)",
58
  ),
59
  ],
60
+ title="Meta Llama 3 8B Instruct",
61
+ description="Enter your prompt and get a response from the Meta Llama 3 8B Instruct model.",
62
+ examples=[
63
+ ["What is the capital of France?"],
64
+ ["What is the largest country in the world?"],
65
+ ["What is the currency of Japan?"],
66
+ ],
67
+ cache_examples=False,
68
  )
69
 
70