Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -9,12 +9,11 @@ import gradio as gr
|
|
9 |
title = "Welcome to Tonic's 🐋🐳Orca-2-13B!"
|
10 |
description = "You can use [🐋🐳microsoft/Orca-2-13b](https://huggingface.co/microsoft/Orca-2-13b) Or clone this space to use it locally or on huggingface! [Join me on Discord to build together](https://discord.gg/VqTxc76K3u)."
|
11 |
|
12 |
-
os.environ['PYTORCH_CUDA_ALLOC_CONF'] = 'max_split_size_mb:24'
|
13 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
14 |
model_name = "microsoft/Orca-2-13b"
|
15 |
-
model = AutoModelForCausalLM.from_pretrained(model_name)
|
16 |
tokenizer = AutoTokenizer.from_pretrained(model_name, use_fast=False,)
|
17 |
-
model.to(device)
|
18 |
|
19 |
class OrcaChatBot:
|
20 |
def __init__(self, model, tokenizer, system_message="You are Orca, an AI language model created by Microsoft. You are a cautious assistant. You carefully follow instructions. You are helpful and harmless and you follow ethical guidelines and promote positive behavior."):
|
|
|
9 |
title = "Welcome to Tonic's 🐋🐳Orca-2-13B!"
|
10 |
description = "You can use [🐋🐳microsoft/Orca-2-13b](https://huggingface.co/microsoft/Orca-2-13b) Or clone this space to use it locally or on huggingface! [Join me on Discord to build together](https://discord.gg/VqTxc76K3u)."
|
11 |
|
12 |
+
# os.environ['PYTORCH_CUDA_ALLOC_CONF'] = 'max_split_size_mb:24'
|
13 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
14 |
model_name = "microsoft/Orca-2-13b"
|
15 |
+
model = AutoModelForCausalLM.from_pretrained(model_name, device_map='auto')
|
16 |
tokenizer = AutoTokenizer.from_pretrained(model_name, use_fast=False,)
|
|
|
17 |
|
18 |
class OrcaChatBot:
|
19 |
def __init__(self, model, tokenizer, system_message="You are Orca, an AI language model created by Microsoft. You are a cautious assistant. You carefully follow instructions. You are helpful and harmless and you follow ethical guidelines and promote positive behavior."):
|