abhi2000.1 / app.py
Abhisesh7's picture
Create app.py
87be1c1 verified
# Step 1: Install Required Libraries
# Uncomment and run this if you haven't installed these packages yet.
# !pip install pandas numpy matplotlib yfinance tensorflow scikit-learn
# Step 2: Import Required Libraries
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import yfinance as yf
from sklearn.preprocessing import MinMaxScaler
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import LSTM, Dense, Dropout
from datetime import datetime, timedelta
# Step 3: Load and Preprocess Data
def load_data(ticker, start_date, end_date):
# Fetch data from Yahoo Finance
data = yf.download(ticker, start=start_date, end=end_date)
data = data[['Close']] # We only need the closing prices
return data
def preprocess_data(data):
# Scale the data
scaler = MinMaxScaler(feature_range=(0, 1))
scaled_data = scaler.fit_transform(data)
# Create training data
training_data_len = int(np.ceil(len(scaled_data) * .8)) # 80% for training
train_data = scaled_data[0:training_data_len, :]
# Create the dataset with X_train and y_train
x_train, y_train = [], []
for i in range(60, len(train_data)):
x_train.append(train_data[i-60:i, 0])
y_train.append(train_data[i, 0])
x_train, y_train = np.array(x_train), np.array(y_train)
x_train = np.reshape(x_train, (x_train.shape[0], x_train.shape[1], 1)) # Reshape for LSTM
return x_train, y_train, scaler, training_data_len, scaled_data
# Step 4: Build the LSTM Model
def build_model(input_shape):
model = Sequential()
model.add(LSTM(50, return_sequences=True, input_shape=input_shape))
model.add(Dropout(0.2))
model.add(LSTM(50, return_sequences=False))
model.add(Dropout(0.2))
model.add(Dense(25))
model.add(Dense(1)) # Prediction of the next closing price
model.compile(optimizer='adam', loss='mean_squared_error')
return model
# Step 5: Train the Model
def train_model(model, x_train, y_train, epochs=10, batch_size=1):
model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs)
# Step 6: Make Predictions
def make_predictions(model, scaled_data, training_data_len, scaler):
# Create the test dataset
test_data = scaled_data[training_data_len - 60:, :]
x_test = []
for i in range(60, len(test_data)):
x_test.append(test_data[i-60:i, 0])
x_test = np.array(x_test)
# Reshape the data
x_test = np.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
# Get the predicted price
predictions = model.predict(x_test)
predictions = scaler.inverse_transform(predictions) # Inverse scaling
return predictions
# Step 7: Visualize the Results
def visualize_results(data, predictions):
train = data[:len(data) - len(predictions)]
valid = data[len(data) - len(predictions):]
valid['Predictions'] = predictions
# Plotting
plt.figure(figsize=(16, 8))
plt.title('Model')
plt.xlabel('Date')
plt.ylabel('Close Price USD')
plt.plot(train['Close'])
plt.plot(valid[['Close', 'Predictions']])
plt.legend(['Train', 'Val', 'Predictions'], loc='lower right')
plt.show()
# Example Usage
if __name__ == "__main__":
# Set parameters
ticker = "AAPL" # Example ticker (Apple Inc.)
start_date = "2020-01-01"
end_date = datetime.now().strftime("%Y-%m-%d")
# Load data
data = load_data(ticker, start_date, end_date)
# Preprocess data
x_train, y_train, scaler, training_data_len, scaled_data = preprocess_data(data)
# Build and train the model
model = build_model((x_train.shape[1], 1))
train_model(model, x_train, y_train, epochs=50)
# Make predictions
predictions = make_predictions(model, scaled_data, training_data_len, scaler)
# Visualize results
visualize_results(data, predictions)