File size: 2,653 Bytes
66a02d5
ca1e304
ed1a33b
e6d9b29
3bfdf65
66a02d5
6affcce
e6d9b29
1f03019
66a02d5
 
 
cc2185f
 
66a02d5
cc2185f
 
 
 
66a02d5
cc2185f
 
1f03019
cc2185f
 
1f03019
cc2185f
 
91f81f0
cc2185f
 
 
3bfdf65
ca1e304
cc2185f
ca1e304
cc2185f
3bfdf65
 
 
 
 
 
 
66a02d5
cc2185f
ca1e304
899c09c
 
 
66a02d5
899c09c
66a02d5
 
899c09c
 
 
 
91f81f0
 
e049fa7
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
import warnings
from huggingface_hub import InferenceClient
import gradio as gr

#warnings.filterwarnings('ignore')

# Initialize the language model
generator = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")

def generate_script(host_name, listener_location, causes_climate_change, co2_level, effects_climate_change,
                    sea_level_rise, warming_rate, potential_solutions, individual_role, call_to_action,
                    TOPIC, DESCRIPTION):
    try:
        # Variables and template definitions...

        # Combine templates based on the DESCRIPTION
        prompt_template = f"""{introduction_template} {causes_template} {effects_template} {solutions_template} {role_template} {action_template} {summary_template}
        
        TOPIC: {TOPIC}. DESCRIPTION: {DESCRIPTION}"""

        # Generate the script using the language model
        script = generator(prompt_template, max_length=1000)[0]['generated_text']

        # Split the script into sections
        sections = script.split("\n")

        # Calculate the word count for each section
        word_counts = [len(section.split()) for section in sections]

        # Check if any section exceeds the target word count
        for i, count in enumerate(word_counts):
            if count > 200:
                return f"Warning: Section {i + 1} exceeds the target word count. You may need to shorten this section."

        return script

    except Exception as e:
        error_message = f"Error: {e}"

        # Save error log to a file
        with open("./error_log.txt", "a") as log_file:
            log_file.write(error_message + "\n")

        return error_message

# Gradio interface setup...
iface = gr.Interface(fn=generate_script,
                   inputs=[gr.Textbox(label="Host Name", value="John"),
                           gr.Textbox(label="Listener Location", value="City"),
                           gr.Textbox(label="Causes Climate Change", value="human activities"),
                           gr.Number(label="CO2 Level", value=400),
                           gr.Textbox(label="Effects Climate Change", value="rising temperatures"),
                           gr.Number(label="Sea Level Rise", value=0.1),
                           gr.Number(label="Warming Rate", value=0.2),
                           gr.Textbox(label="Potential Solutions", value="renewable energy"),
                           gr.Textbox(label="Individual Role", value="reduce carbon footprint"),
                           gr.Textbox(label="Call To Action", value="act now")],
                   outputs="text")

# Launch the interface
iface.launch(debug=True)