Spaces:
Configuration error
Configuration error
[91mβ Error occurred at line 76: python3 - <<EOF | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
print("π Downloading tokenizer & model...") | |
tokenizer = AutoTokenizer.from_pretrained("$MODEL_NAME") | |
model = AutoModelForCausalLM.from_pretrained("$MODEL_NAME") | |
print("β Model ready.") | |
EOF | |
[0m | |
[91mβ Error occurred at line 76: python3 - <<EOF | |
from transformers import AutoTokenizer, GPTNeoForCausalLM | |
print("π Downloading tokenizer & model (GPTNeoForCausalLM)...") | |
tokenizer = AutoTokenizer.from_pretrained("$MODEL_NAME") | |
model = GPTNeoForCausalLM.from_pretrained("$MODEL_NAME") | |
print("β Model ready (GPTNeoForCausalLM).") | |
EOF | |
[0m | |
[91mβ Error occurred at line 76: python3 - <<EOF | |
from transformers import AutoTokenizer, GPTNeoForCausalLM | |
print("π Downloading tokenizer & model (GPTNeoForCausalLM)...") | |
tokenizer = AutoTokenizer.from_pretrained("$MODEL_NAME") | |
model = GPTNeoForCausalLM.from_pretrained("$MODEL_NAME") | |
print("β Model ready (GPTNeoForCausalLM).") | |
EOF | |
[0m | |
[91mβ Error occurred at line 74: python3 - <<EOF | |
from transformers import AutoTokenizer, GPTNeoForCausalLM | |
print("π Downloading tokenizer & model (GPTNeoForCausalLM)...") | |
tokenizer = AutoTokenizer.from_pretrained("$MODEL_NAME") | |
model = GPTNeoForCausalLM.from_pretrained("$MODEL_NAME") | |
print("β Model ready (GPTNeoForCausalLM).") | |
EOF | |
[0m | |
[91mβ Error occurred at line 88: python3 - <<EOF | |
from transformers import GPT2Tokenizer, GPTNeoForCausalLM | |
print("π Downloading tokenizer & model (GPTNeoForCausalLM)...") | |
tokenizer = GPT2Tokenizer.from_pretrained("$MODEL_NAME") | |
model = GPTNeoForCausalLM.from_pretrained("$MODEL_NAME") | |
print("β Model ready (GPTNeoForCausalLM).") | |
EOF | |
[0m | |
[91mβ Error occurred at line 182: huggingface-cli repo create "$HF_USERNAME/$HF_SPACE_NAME" --type space --space-sdks gradio[0m | |
[91mβ Error occurred at line 182: huggingface-cli repo create "$HF_USERNAME/$HF_SPACE_NAME" --type space[0m | |
[91mβ Error occurred at line 182: huggingface-cli repo create "$HF_USERNAME/$HF_SPACE_NAME" --type space[0m | |
[91mβ Error occurred at line 182: huggingface-cli repo create "$HF_USERNAME/$HF_SPACE_NAME" --type space[0m | |
[91mβ Error occurred at line 182: huggingface-cli repo create "$HF_SPACE_NAME" --type space[0m | |
[91mβ Error occurred at line 216: huggingface-cli repo create "$HF_SPACE_NAME" --type space --space-sdk gradio[0m | |
[91mβ Error occurred at line 184: python3 - <<EOF | |
from transformers import GPT2Tokenizer, GPTNeoForCausalLM | |
import json | |
# Load configuration | |
with open("$WORK_DIR/shx-config.json", "r") as f: | |
config = json.load(f) | |
tokenizer = GPT2Tokenizer.from_pretrained(config["model_name"]) | |
model = GPTNeoForCausalLM.from_pretrained(config["model_name"]) | |
prompt = "SHX is" | |
inputs = tokenizer(prompt, return_tensors="pt", padding=True) | |
output = model.generate( | |
input_ids=inputs.input_ids, | |
attention_mask=inputs.attention_mask, | |
pad_token_id=tokenizer.eos_token_id, | |
max_length=config["max_length"], | |
temperature=config["temperature"], | |
top_k=config["top_k"], | |
top_p=config["top_p"] | |
) | |
print("π§ SHX Test Output:", tokenizer.decode(output[0], skip_special_tokens=True)) | |
EOF | |
[0m | |