rnlduatm commited on
Commit
f3cefa3
Β·
1 Parent(s): 4944c37

Update space

Browse files
Files changed (1) hide show
  1. Text2Long_text.py +2 -2
Text2Long_text.py CHANGED
@@ -9,7 +9,7 @@ tokenizer = AutoTokenizer.from_pretrained("skt/kogpt2-base-v2")
9
  model = AutoModelForCausalLM.from_pretrained("skt/kogpt2-base-v2").to(device)
10
 
11
  # 3. ν•œκ΅­μ–΄ μ†Œμ„€ 생성 ν•¨μˆ˜
12
- def generate_korean_story(prompt, max_length=300):
13
  input_ids = tokenizer.encode(prompt, return_tensors="pt").to(device)
14
 
15
  outputs = model.generate(
@@ -31,7 +31,7 @@ def generate_korean_story(prompt, max_length=300):
31
  # 4. μ‹€ν–‰
32
  if __name__ == "__main__":
33
  user_prompt = input("πŸ“œ μ†Œμ„€μ˜ μ‹œμž‘ λ¬Έμž₯을 μž…λ ₯ν•˜μ„Έμš” (ν•œκ΅­μ–΄): ")
34
- result = generate_korean_story(user_prompt, max_length=500)
35
 
36
  print("\nπŸ“– μƒμ„±λœ ν•œκ΅­μ–΄ μ†Œμ„€:\n")
37
  print(result)
 
9
  model = AutoModelForCausalLM.from_pretrained("skt/kogpt2-base-v2").to(device)
10
 
11
  # 3. ν•œκ΅­μ–΄ μ†Œμ„€ 생성 ν•¨μˆ˜
12
+ def generate_korean_story(prompt, max_length=100):
13
  input_ids = tokenizer.encode(prompt, return_tensors="pt").to(device)
14
 
15
  outputs = model.generate(
 
31
  # 4. μ‹€ν–‰
32
  if __name__ == "__main__":
33
  user_prompt = input("πŸ“œ μ†Œμ„€μ˜ μ‹œμž‘ λ¬Έμž₯을 μž…λ ₯ν•˜μ„Έμš” (ν•œκ΅­μ–΄): ")
34
+ result = generate_korean_story(user_prompt, max_length=100)
35
 
36
  print("\nπŸ“– μƒμ„±λœ ν•œκ΅­μ–΄ μ†Œμ„€:\n")
37
  print(result)