File size: 6,367 Bytes
65d7ee4
8210490
 
 
db6e2f8
 
8210490
 
 
 
 
65d7ee4
 
82762ba
20b91e7
db6e2f8
e3a847b
 
 
82762ba
8d1c8cf
82762ba
 
 
 
 
 
 
 
8d1c8cf
 
db6e2f8
2cf06ef
db6e2f8
8210490
 
 
db6e2f8
59a1e79
 
ebfadeb
2cf06ef
 
ebfadeb
 
 
 
269d275
20b91e7
 
 
 
 
 
 
 
 
 
db6e2f8
65d7ee4
 
82762ba
65d7ee4
 
ebfadeb
8210490
 
 
 
db6e2f8
8210490
 
 
 
 
 
db6e2f8
8210490
 
 
db6e2f8
8210490
 
 
 
 
 
 
 
 
82762ba
 
29b0465
 
82762ba
 
 
8210490
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2cf06ef
65d7ee4
 
8210490
 
 
65d7ee4
 
 
8210490
 
 
 
65d7ee4
8210490
 
 
 
 
 
 
 
db6e2f8
 
8210490
 
 
 
 
 
e3e27a7
 
78218a8
8210490
 
fe37aad
8210490
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65d7ee4
99624bb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
from flask import Flask, render_template, request, send_from_directory
from datetime import datetime
from langchain_community.llms import HuggingFaceHub
from langchain.prompts import PromptTemplate
import requests
import json
import nltk
from textblob import TextBlob
from nltk.tokenize import word_tokenize
from nltk.stem import PorterStemmer
from nltk.stem import WordNetLemmatizer
import tensorflow as tf
from tensorflow import keras
import spacy
from bs4 import BeautifulSoup

nltk.download('punkt')
nltk.download('wordnet')

def download_spacy_model():
    import spacy  # Import spacy within the function scope
    try:
        spacy.load("en_core_web_sm")
    except OSError:
        import spacy.cli
        spacy.cli.download("en_core_web_sm")

download_spacy_model()

nlp = spacy.load("en_core_web_sm")

app = Flask(__name__)


# Load the JSON data from the file
with open('ai_chatbot_data.json', 'r') as file:
    json_data = json.load(file)

    


template = "Message: {message}\n\nSentiment Analysis: {sentiment}\n\nConversation Now Between you and user: {history}\n\nDate and Time: {date_time}\n\nBitcoin Price: ${bitcoin_price}\n\nBitcoin history from 1-jan-2024 to today the tidy is date-open-high-low-close-adj close-volum: {database_tag}\n\nYour system: {json_data}.\n\nResponse:"
prompt = PromptTemplate(template=template, input_variables=["message", "sentiment", "history", "date_time", "bitcoin_price", "database_tag", "json_data"])
conversation_history = []

MAX_HISTORY_LENGTH = 55

url = "https://dooratre-info.hf.space/"

response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')

div_content = soup.find('div', {'id': '45'})
if div_content:
    print(div_content)
else:
    print("No div with id=45 found on the page.")
database_tag=div_content

def update_conversation_history(message):
    if len(conversation_history) >= MAX_HISTORY_LENGTH:
        conversation_history.pop(0)
    conversation_history.append(message)


def get_bitcoin_price():
    current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
    url = 'https://api.coindesk.com/v1/bpi/currentprice.json'
    response = requests.get(url)

    if response.status_code == 200:
        data = response.json()
        bitcoin_price = data['bpi']['USD']['rate']
        return bitcoin_price, current_time
    else:
        return 'Error fetching data', current_time

@app.route('/assets/<path:path>')
def send_static(path):
    return send_from_directory('assets', path)

@app.route('/')
def index():
    global conversation_history
    return render_template('index.html', conversation=conversation_history)

@app.route('/submit', methods=['POST'])
def submit():
    user_input = request.json.get('user_input')

    doc = nlp(user_input)
    tokens = [token.text for token in doc]

    sentiment = TextBlob(user_input).sentiment
    
    # Add Spacy NLP processing here
    
    ps = PorterStemmer()
    stemmed_tokens = [ps.stem(token) for token in tokens]

    lemmatizer = WordNetLemmatizer()
    lemmatized_tokens = [lemmatizer.lemmatize(token) for token in tokens]

    sentiment = TextBlob(user_input).sentiment

    bitcoin_price, current_time = get_bitcoin_price()

    conversation_history.append("User: " + user_input)
    
    # NLTK processing for conversation history
    history_tokens = word_tokenize("<br>".join(conversation_history))
    history_stemmed_tokens = [ps.stem(token) for token in history_tokens]
    history_lemmatized_tokens = [lemmatizer.lemmatize(token) for token in history_tokens]

    model_input = prompt.format(message=user_input, sentiment=sentiment, history="<br>".join(conversation_history), database_tag=div_content, date_time=current_time, bitcoin_price=bitcoin_price, json_data=json_data,history_tokens=history_tokens,history_stemmed_tokens=history_stemmed_tokens,history_lemmatized_tokens=history_lemmatized_tokens)

    response = llm(model_input, context="<br>".join(conversation_history))

    bot_response = response.split('Response:')[1].strip()
    bot_response = bot_response.strip().replace('\n', '<br>')

    # Update the conversation history with bot's response
    update_conversation_history("You " + bot_response)

    conversation_html = '<br>'.join(conversation_history)

    return bot_response

@app.route('/clear_history')
def clear_history():
    global conversation_history
    conversation_history = []
    return 'Conversation history cleared'

with open('i.txt', 'r') as file:
    data = file.read()

if __name__ == "__main__":
    repo_id = "mistralai/Mixtral-8x7B-Instruct-v0.1"
    huggingfacehub_api_token = "hf" + data

    llm = HuggingFaceHub(huggingfacehub_api_token=huggingfacehub_api_token,
                         repo_id=repo_id,
                         model_kwargs={
                             "temperature": 0.5,
                             "max_new_tokens": 256,
                             "top_p": 0.5,
                             "repetition_penalty": 1.2,
                             "num_beams": 3,
                             "length_penalty": 1.2,
                             "no_repeat_ngram_size": 2,
                             "early_stopping": True,
                             "num_return_sequences": 1,
                             "use_cache": True,
                             "task": "predictions",
                             "data_source": "financial_markets",
                             "historical_data_fetch": True,
                             "real-time_data_integration": True,
                             "feature_engineering": ["technical_indicators", "sentiment_analysis", "volume_analysis"],
                             "machine_learning_models": ["LSTM", "Random Forest", "ARIMA", "Gradient Boosting"],
                             "prediction_horizon": "short-term",
                             "evaluation_metrics": ["accuracy", "MSE", "MAE", "RMSE"],
                             "model_fine-tuning": True,
                             "interpretability_explanation": True,
                             "ensemble_methods": ["voting", "stacking"],
                             "hyperparameter_optimization": True,
                             "cross-validation": True,
                             "online_learning": True,
                         }
                         )

    app.run(host="0.0.0.0", port=7860)