์๋ ํ์ธ์ ๋ฌธ์ฅ์์ฑ ๋ชจ๋ธ์ ๋๋ค. ๋ฐ์ดํฐ๋ ai hub์ ์ผ๋ฐ ์ ๋ฌธ๋ถ์ผ ๋ฐ์ดํฐ๋ก ํ์ตํ์์ต๋๋ค. ^^
!pip install transformers
!pip install sentence_transformers
from transformers import AutoTokenizer,GPTNeoXForCausalLM
path = "kimdwan/polyglot-ko-1.3b-Logan"
model = GPTNeoXForCausalLM.from_pretrained(path)
tokenizer = AutoTokenizer.from_pretrained(path)
# ๋ฃ๊ณ ์ถ์ ๋จ์ด๋ฑ์ ๋ฃ์ด์ฃผ๋ฉด ์๋์ผ๋ก ๋ฌธ์ฅ์ ์์ฑํด์ฃผ๋ 20ํ ํฐ ์์์ ๊ฐ๋ฅํฉ๋๋ค.
text = "์ธ๊ณต"
token = tokenizer(text,return_tensors='pt')
model.eval()
model = model.to("cpu")
output = model.generate(input_ids = token["input_ids"])
tokenizer.decode(output[0])
- Downloads last month
- 17
Inference Providers
NEW
This model is not currently available via any of the supported Inference Providers.