File size: 5,155 Bytes
229fc16 8210490 229fc16 8210490 db6e2f8 8210490 82762ba 20b91e7 db6e2f8 e3a847b 82762ba 229fc16 82762ba 8d1c8cf db6e2f8 2cf06ef 229fc16 ebfadeb 65d7ee4 82762ba 65d7ee4 8210490 229fc16 8210490 229fc16 8210490 229fc16 db6e2f8 8210490 82762ba 29b0465 229fc16 8210490 229fc16 8210490 229fc16 65d7ee4 229fc16 8210490 229fc16 8210490 229fc16 65d7ee4 8210490 db6e2f8 8210490 e3e27a7 78218a8 8210490 fe37aad 8210490 65d7ee4 99624bb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 |
from flask import Flask, render_template, request, send_from_directory, jsonify
from datetime import datetime
import requests
from langchain_community.llms import HuggingFaceHub
from langchain.prompts import PromptTemplate
import json
import nltk
from textblob import TextBlob
from nltk.tokenize import word_tokenize
from nltk.stem import PorterStemmer
from nltk.stem import WordNetLemmatizer
import spacy
from bs4 import BeautifulSoup
nltk.download('punkt')
nltk.download('wordnet')
def download_spacy_model():
import spacy
try:
spacy.load("en_core_web_sm")
except OSError:
import spacy.cli
spacy.cli.download("en_core_web_sm")
download_spacy_model()
nlp = spacy.load("en_core_web_sm")
app = Flask(__name__)
template = "Message: {message}\n\nSentiment Analysis: {sentiment}\n\nConversation History: {history}\n\nDate and Time: {date_time}\n\nBitcoin Price: ${bitcoin_price}\n\nBitcoin Data: {database_tag}\n\nResponse: {response}"
prompt = PromptTemplate(template=template, input_variables=["message", "sentiment", "history", "date_time", "bitcoin_price", "database_tag", "response"])
conversation_history = []
MAX_HISTORY_LENGTH = 55
def update_conversation_history(message):
if len(conversation_history) >= MAX_HISTORY_LENGTH:
conversation_history.pop(0)
conversation_history.append(message)
def get_bitcoin_price():
url = 'https://api.coindesk.com/v1/bpi/currentprice.json'
response = requests.get(url)
if response.status_code == 200:
data = response.json()
bitcoin_price = data['bpi']['USD']['rate']
current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
return bitcoin_price, current_time
else:
return 'Error fetching data', None
@app.route('/')
def index():
return render_template('index.html', conversation=conversation_history)
@app.route('/submit', methods=['POST'])
def submit():
user_input = request.json.get('user_input')
doc = nlp(user_input)
tokens = [token.text for token in doc]
sentiment = TextBlob(user_input).sentiment
ps = PorterStemmer()
stemmed_tokens = [ps.stem(token) for token in tokens]
lemmatizer = WordNetLemmatizer()
lemmatized_tokens = [lemmatizer.lemmatize(token) for token in tokens]
bitcoin_price, current_time = get_bitcoin_price()
conversation_history.append("User: " + user_input)
history_tokens = word_tokenize(" ".join(conversation_history))
history_stemmed_tokens = [ps.stem(token) for token in history_tokens]
history_lemmatized_tokens = [lemmatizer.lemmatize(token) for token in history_tokens]
model_input = prompt.format(message=user_input, sentiment=sentiment, history=" ".join(conversation_history), database_tag="Placeholder", date_time=current_time, bitcoin_price=bitcoin_price, response="")
response = "Placeholder response" # Update with actual response generation logic
response_message = "Bot: " + response
update_conversation_history(response_message)
return jsonify({'response':response})
@app.route('/clear_history')
def clear_history():
global conversation_history
conversation_history = []
return 'Conversation history cleared'
with open('i.txt', 'r') as file:
data = file.read()
if __name__ == "__main__":
repo_id = "mistralai/Mixtral-8x7B-Instruct-v0.1"
huggingfacehub_api_token = "hf" + data
llm = HuggingFaceHub(huggingfacehub_api_token=huggingfacehub_api_token,
repo_id=repo_id,
model_kwargs={
"temperature": 0.5,
"max_new_tokens": 256,
"top_p": 0.5,
"repetition_penalty": 1.2,
"num_beams": 3,
"length_penalty": 1.2,
"no_repeat_ngram_size": 2,
"early_stopping": True,
"num_return_sequences": 1,
"use_cache": True,
"task": "predictions",
"data_source": "financial_markets",
"historical_data_fetch": True,
"real-time_data_integration": True,
"feature_engineering": ["technical_indicators", "sentiment_analysis", "volume_analysis"],
"machine_learning_models": ["LSTM", "Random Forest", "ARIMA", "Gradient Boosting"],
"prediction_horizon": "short-term",
"evaluation_metrics": ["accuracy", "MSE", "MAE", "RMSE"],
"model_fine-tuning": True,
"interpretability_explanation": True,
"ensemble_methods": ["voting", "stacking"],
"hyperparameter_optimization": True,
"cross-validation": True,
"online_learning": True,
}
)
app.run(host="0.0.0.0", port=7860) |