Spaces:
Running
Running
File size: 6,832 Bytes
073d3e8 4a45930 073d3e8 876af8e d3f7e6f 7b2bf17 d3f7e6f bb03802 876af8e 073d3e8 d3f7e6f 073d3e8 7b2bf17 073d3e8 d3f7e6f 3c311b4 7b2bf17 876af8e 7b2bf17 3c311b4 d3f7e6f 3c311b4 57d8b30 876af8e 073d3e8 7b2bf17 d3f7e6f 073d3e8 57d8b30 d3f7e6f 52d4f4d 876af8e d3f7e6f 876af8e d3f7e6f ed9b23a d3f7e6f 7b2bf17 506884e d3f7e6f 506884e 876af8e ed9b23a 506884e ed9b23a 506884e ed9b23a d3f7e6f 7b2bf17 876af8e 7b2bf17 876af8e 7b2bf17 506884e 876af8e 506884e 7b2bf17 506884e 073d3e8 7b2bf17 073d3e8 4a45930 073d3e8 ed9b23a 876af8e 073d3e8 d3f7e6f ed9b23a 073d3e8 7b2bf17 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 |
import gradio as gr
from gradio_client import Client
import json
import logging
import openai
import os
import re
# λ‘κΉ
μ€μ
logging.basicConfig(filename='youtube_script_extractor.log', level=logging.DEBUG,
format='%(asctime)s - %(levelname)s - %(message)s')
openai.api_key = os.getenv("OPENAI_API_KEY")
# λ¬Έμ₯ κ΅¬λΆ ν¨μ
def split_sentences(text):
sentences = re.split(r"(λλ€|μμ|ꡬλ|ν΄μ|κ΅°μ|κ² μ΄μ|μμ€|ν΄λΌ|μμ|μμ|λ°μ|λμ|μΈμ|μ΄μ|κ²μ|ꡬμ|κ³ μ|λμ|νμ£ )(?![\w])", text)
combined_sentences = []
current_sentence = ""
for i in range(0, len(sentences), 2):
if i + 1 < len(sentences):
sentence = sentences[i] + sentences[i + 1]
else:
sentence = sentences[i]
if len(current_sentence) + len(sentence) > 100: # 100μλ₯Ό μ΄κ³Όν κ²½μ°
combined_sentences.append(current_sentence.strip())
current_sentence = sentence.strip()
else:
current_sentence += sentence
if sentence.endswith(('.', '?', '!')):
combined_sentences.append(current_sentence.strip())
current_sentence = ""
if current_sentence:
combined_sentences.append(current_sentence.strip())
return combined_sentences
def parse_api_response(response):
try:
if isinstance(response, str):
response = json.loads(response)
if isinstance(response, list) and len(response) > 0:
response = response[0]
if not isinstance(response, dict):
raise ValueError(f"μμμΉ λͺ»ν μλ΅ νμμ
λλ€. λ°μ λ°μ΄ν° νμ
: {type(response)}")
return response
except Exception as e:
logging.error(f"API μλ΅ νμ± μ€ν¨: {str(e)}")
raise ValueError(f"API μλ΅ νμ± μ€ν¨: {str(e)}")
def get_youtube_script(url):
logging.info(f"μ€ν¬λ¦½νΈ μΆμΆ μμ: URL = {url}")
client = Client("whispersound/YT_Ts_R")
try:
result = client.predict(youtube_url=url, api_name="/predict")
parsed_result = parse_api_response(result)
if 'data' not in parsed_result or not parsed_result['data']:
raise ValueError("API μλ΅μ μ ν¨ν λ°μ΄ν°κ° μμ΅λλ€.")
data = parsed_result["data"][0]
title = data.get("title", "μ λͺ© μμ")
description = data.get("description", "μ€λͺ
μμ")
transcription_text = data.get("transcriptionAsText", "")
if not transcription_text:
raise ValueError("μΆμΆλ μ€ν¬λ¦½νΈκ° μμ΅λλ€.")
logging.info("μ€ν¬λ¦½νΈ μΆμΆ μλ£")
return title, description, transcription_text
except Exception as e:
logging.exception("μ€ν¬λ¦½νΈ μΆμΆ μ€ μ€λ₯ λ°μ")
raise
def call_api(prompt, max_tokens, temperature, top_p):
try:
response = openai.ChatCompletion.create(
model="gpt-4o-mini",
messages=[{"role": "user", "content": prompt}],
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p
)
return response['choices'][0]['message']['content']
except Exception as e:
logging.exception("LLM API νΈμΆ μ€ μ€λ₯ λ°μ")
raise
def summarize_text(title, description, text):
prompt = f"""
μ λͺ©: {title}
μ€λͺ
: {description}
μμ μ λͺ©κ³Ό μ€λͺ
μ μ΄ μ νλΈ μμμ μλ³Έ λ©νλ°μ΄ν°μ
λλ€. μ΄λ₯Ό μ°Έκ³ νμ¬ μλμ λλ³Έμ μμ½ν΄μ£ΌμΈμ.
1. μμ μ λͺ©κ³Ό μ€λͺ
μ μ°Έκ³ νμ¬ μ νλΈ λλ³Έμ ν΅μ¬ μ£Όμ μ λͺ¨λ μ£Όμ λ΄μ©μ μμΈνκ² μμ½νλΌ
2. λ°λμ νκΈλ‘ μμ±νλΌ
3. μμ½λ¬Έλ§μΌλ‘λ μμμ μ§μ μμ²ν κ²κ³Ό λμΌν μμ€μΌλ‘ λ΄μ©μ μ΄ν΄ν μ μλλ‘ μμΈν μμ±
4. κΈμ λ무 μμΆνκ±°λ ν¨μΆνμ§ λ§κ³ , μ€μν λ΄μ©κ³Ό μΈλΆμ¬νμ λͺ¨λ ν¬ν¨
5. λ°λμ λλ³Έμ νλ¦κ³Ό λ
Όλ¦¬ ꡬ쑰λ₯Ό μ μ§
6. λ°λμ μκ° μμλ μ¬κ±΄μ μ κ° κ³Όμ μ λͺ
ννκ² λ°μ
7. λ±μ₯μΈλ¬Ό, μ₯μ, μ¬κ±΄ λ± μ€μν μμλ₯Ό μ ννκ² μμ±
8. λλ³Έμμ μ λ¬νλ κ°μ μ΄λ λΆμκΈ°λ ν¬ν¨
9. λ°λμ κΈ°μ μ μ©μ΄λ μ λ¬Έ μ©μ΄κ° μμ κ²½μ°, μ΄λ₯Ό μ ννκ² μ¬μ©
10. λλ³Έμ λͺ©μ μ΄λ μλλ₯Ό νμ
νκ³ , μ΄λ₯Ό μμ½μ λ°λμ λ°μ
11. κ° λ¬Έμ₯μ λͺ
ννκ² κ΅¬λΆνκ³ , μ μ ν λ¨λ½ ꡬλΆμ μ¬μ©νμ¬ κ°λ
μ±μ λμ΄μμ€
λλ³Έ:
{text}
"""
return call_api(prompt, max_tokens=2000, temperature=0.3, top_p=0.9)
def create_collapsible_section(section_title, video_title, content):
if section_title == "μλ¬Έ μ€ν¬λ¦½νΈ":
sentences = split_sentences(content)
content = "\n".join(sentences)
return f"""
<details>
<summary style="cursor: pointer; font-weight: bold;">{section_title}</summary>
<div style="margin-top: 10px;">
<h3 style="font-size: 18px; margin-bottom: 10px;">{video_title}</h3>
<div style="white-space: pre-wrap; background-color: #f0f0f0; padding: 15px; border-radius: 5px;">{content}</div>
</div>
</details>
"""
def analyze(url, cache):
try:
if url == cache["url"]:
logging.info(f"μΊμλ λ°μ΄ν° μ¬μ©: URL = {url}")
title, description, script = cache["title"], cache["description"], cache["script"]
else:
logging.info(f"μλ‘μ΄ λ°μ΄ν° μΆμΆ μμ: URL = {url}")
title, description, script = get_youtube_script(url)
cache = {"url": url, "title": title, "description": description, "script": script}
# μλ¬Έ μ€ν¬λ¦½νΈ μΉμ
μμ±
script_section = create_collapsible_section("μλ¬Έ μ€ν¬λ¦½νΈ", title, script)
yield script_section, cache
# μμ½ μμ± λ° μΉμ
μμ±
summary = summarize_text(title, description, script)
summary_section = create_collapsible_section("μμ½", title, summary)
yield script_section + summary_section, cache
except Exception as e:
error_msg = f"μ²λ¦¬ μ€ μ€λ₯ λ°μ: {str(e)}"
logging.exception(error_msg)
yield error_msg, cache
# Gradio μΈν°νμ΄μ€
with gr.Blocks() as demo:
gr.Markdown("## YouTube μ€ν¬λ¦½νΈ μΆμΆ λ° μμ½ λꡬ")
youtube_url_input = gr.Textbox(label="YouTube URL μ
λ ₯")
analyze_button = gr.Button("λΆμνκΈ°")
content_output = gr.HTML(label="λ΄μ©")
cached_data = gr.State({"url": "", "title": "", "description": "", "script": ""})
analyze_button.click(
analyze,
inputs=[youtube_url_input, cached_data],
outputs=[content_output, cached_data]
)
if __name__ == "__main__":
demo.launch(share=True) |