File size: 4,990 Bytes
380991e
04a0510
380991e
 
 
 
04a0510
fd6c6c7
380991e
 
 
fd6c6c7
0401525
 
 
380991e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0401525
380991e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0401525
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
380991e
 
 
 
04a0510
 
0401525
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
from typing import final
import gradio as gr
import os
import json
from bs4 import BeautifulSoup
import requests
from huggingface_hub import InferenceClient


# Define global variables
BOT_AVATAR = 'https://automatedstockmining.org/wp-content/uploads/2024/08/south-west-value-mining-logo.webp'
hf_token = os.getenv("HF_TOKEN")

client = InferenceClient(token=hf_token)

custom_css = '''
.gradio-container {
    font-family: 'Roboto', sans-serif;
}
.main-header {
    text-align: center;
    color: #4a4a4a;
    margin-bottom: 2rem;
}
.tab-header {
    font-size: 1.2rem;
    font-weight: bold;
    margin-bottom: 1rem;
}
.custom-chatbot {
    border-radius: 10px;
    box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
}
.custom-button {
    background-color: #3498db;
    color: white;
    border: none;
    padding: 10px 20px;
    border-radius: 5px;
    cursor: pointer;
    transition: background-color 0.3s ease;
}
.custom-button:hover {
    background-color: #2980b9;
}
'''

def extract_text_from_webpage(html):
    soup = BeautifulSoup(html, "html.parser")
    for script in soup(["script", "style"]):
        script.decompose()
    visible_text = soup.get_text(separator=" ", strip=True)
    return visible_text

def search(query):
    term = query
    max_chars_per_page = 8000
    all_results = []

    with requests.Session() as session:
        try:
            resp = session.get(
                url="https://www.google.com/search",
                headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/111.0"},
                params={"q": term, "num": 4},
                timeout=5
            )
            resp.raise_for_status()

            soup = BeautifulSoup(resp.text, "html.parser")
            result_block = soup.find_all("div", attrs={"class": "g"})

            for result in result_block:
                link = result.find("a", href=True)
                if link:
                    link = link["href"]
                    try:
                        webpage = session.get(link, headers={"User-Agent": "Mozilla/5.0"}, timeout=5)
                        webpage.raise_for_status()

                        visible_text = extract_text_from_webpage(webpage.text)
                        if len(visible_text) > max_chars_per_page:
                            visible_text = visible_text[:max_chars_per_page]

                        all_results.append({"link": link, "text": visible_text})

                    except requests.exceptions.RequestException as e:
                        print(f"Failed to retrieve {link}: {e}")
                        all_results.append({"link": link, "text": None})
        except requests.exceptions.RequestException as e:
            print(f"Google search failed: {e}")

    return all_results

def process_query(user_input, history):
    gr.Info('ℹ️ thinking...',duration = 6)
    # Start with a system message
    messages = [{'role': 'system', 'content': "YOU ARE IM.X, AN INVESTMENT CHATBOT BUILT BY automatedstockmining.org. "}]

    # Append history to messages
    for user, assistant in history:
        messages.append({'role': 'user', 'content': user})
        messages.append({'role': 'assistant', 'content': assistant})
    messages.append({'role': 'user', 'content': user_input})

    # Perform the web search based on user input
    search_results = search(user_input)
    search_results_str = json.dumps(search_results)
    
    
    # Create completion request to HuggingFace client
    response = client.chat_completion(
        model="Qwen/Qwen2.5-72B-Instruct",
        messages=[{"role": "user", "content": f"YOU ARE IM.X, AN INVESTMENT CHATBOT BUILT BY automatedstockmining.org. Answer the user's request '{user_input}' using the following information: {search_results_str} and the chat history{history}. Provide a concise, direct answer in no more than 2-3 sentences. use the appropriate emojis for some of your responses"}],
        max_tokens=400,
        stream=True
    )

    final_response = ""
    for chunk in response:
        content = chunk.choices[0].delta.content or ''
        final_response += content
        yield final_response  # Yield the accumulated response for real-time streaming

theme = gr.themes.Citrus(
    primary_hue="blue",
    neutral_hue="slate",
)

examples = [
    ["What's the current price of bitcoin"],
    ["What's the latest news on Cisco Systems stock"],
    ["Analyze technical indicators for Adobe, are they presenting buy or sell signals"],
    ["What's the current price of Apple stock"],
    ["What are the best stocks to buy this month"],
    ["What companies report earnings this week"],
    ["What's Apple's current market cap"]
]

chatbot = gr.Chatbot(
    label="IM.S",
    avatar_images=[None, BOT_AVATAR],
    show_copy_button=True,
    layout="panel",
    height = 600
    
)
gr.ChatInterface(
        theme = theme,
        fn=process_query,
        chatbot=chatbot,
        examples=examples,
    ).launch()