Spaces:
Paused
Paused
chore: update README.md content
Browse files
README.md
CHANGED
@@ -6,7 +6,7 @@ colorTo: pink
|
|
6 |
sdk: gradio
|
7 |
sdk_version: 4.36.1
|
8 |
app_file: app.py
|
9 |
-
pinned:
|
10 |
header: mini
|
11 |
suggested_hardware: a10g-small
|
12 |
language:
|
@@ -20,8 +20,8 @@ language:
|
|
20 |
- ko
|
21 |
- zh
|
22 |
license: other
|
23 |
-
license_name: ghost-llms
|
24 |
-
license_link: https://ghost-x.org/ghost-llms
|
25 |
tags:
|
26 |
- ghost
|
27 |
---
|
|
|
6 |
sdk: gradio
|
7 |
sdk_version: 4.36.1
|
8 |
app_file: app.py
|
9 |
+
pinned: false
|
10 |
header: mini
|
11 |
suggested_hardware: a10g-small
|
12 |
language:
|
|
|
20 |
- ko
|
21 |
- zh
|
22 |
license: other
|
23 |
+
license_name: ghost-open-llms
|
24 |
+
license_link: https://ghost-x.org/ghost-open-llms
|
25 |
tags:
|
26 |
- ghost
|
27 |
---
|
app.py
CHANGED
@@ -249,7 +249,7 @@ if not torch.cuda.is_available():
|
|
249 |
|
250 |
|
251 |
if torch.cuda.is_available():
|
252 |
-
model_id = "
|
253 |
model_tk = os.getenv("HF_TOKEN", None)
|
254 |
model = AutoModelForCausalLM.from_pretrained(
|
255 |
model_id,
|
|
|
249 |
|
250 |
|
251 |
if torch.cuda.is_available():
|
252 |
+
model_id = "ghost-x/ghost-8b-beta"
|
253 |
model_tk = os.getenv("HF_TOKEN", None)
|
254 |
model = AutoModelForCausalLM.from_pretrained(
|
255 |
model_id,
|