SantiagoMoreno-Col commited on
Commit
9e49f69
Β·
1 Parent(s): 3079e6c

Pytorch and GPU

Browse files
Files changed (1) hide show
  1. app.py +4 -1
app.py CHANGED
@@ -1,12 +1,15 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  from transformers import pipeline
 
4
 
5
  """
6
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
7
  """
 
8
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
9
- pipe = pipeline("text-generation", model="HiTZ/latxa-7b-v1")
 
10
 
11
  def respond(
12
  message,
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  from transformers import pipeline
4
+ import torch
5
 
6
  """
7
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
8
  """
9
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
10
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
11
+ pipe = pipeline("text-generation", model="HiTZ/latxa-7b-v1", device=0)
12
+
13
 
14
  def respond(
15
  message,