|
|
|
"""bayburtanalysis.159 |
|
|
|
Automatically generated by Colab. |
|
|
|
Original file is located at |
|
https://colab.research.google.com/drive/1i3xf37d6YszBy480hNM0EGmK3u-RtMJB |
|
""" |
|
|
|
import pandas as pd |
|
import numpy as np |
|
import matplotlib.pyplot as plt |
|
import seaborn as sns |
|
from datetime import datetime |
|
|
|
from statsmodels.tsa.seasonal import seasonal_decompose |
|
from statsmodels.tsa.arima.model import ARIMA |
|
import prophet |
|
|
|
from sklearn.model_selection import train_test_split |
|
from sklearn.preprocessing import StandardScaler |
|
from sklearn.linear_model import LinearRegression |
|
from sklearn.metrics import mean_squared_error, r2_score |
|
from sklearn.ensemble import RandomForestRegressor |
|
|
|
from textblob import TextBlob |
|
import nltk |
|
from nltk.sentiment.vader import SentimentIntensityAnalyzer |
|
nltk.download('vader_lexicon') |
|
|
|
import plotly.express as px |
|
import plotly.graph_objs as go |
|
import plotly.figure_factory as ff |
|
|
|
import warnings |
|
warnings.filterwarnings('ignore') |
|
|
|
print("Very well you may continue") |
|
|
|
big_tech_companies = pd.read_csv('big_tech_companies.csv') |
|
big_tech_stock_prices = pd.read_csv('big_tech_stock_prices.csv') |
|
|
|
print("Big Tech Companies Dataset:") |
|
print(big_tech_companies.head()) |
|
|
|
print("\nBig Tech Stock Prices Dataset:") |
|
print(big_tech_stock_prices.head()) |
|
|
|
print("\nBig Tech Companies Dataset Info:") |
|
print(big_tech_companies.info()) |
|
|
|
print("\nBig Tech Stock Prices Dataset Info:") |
|
print(big_tech_stock_prices.info()) |
|
|
|
print("\nBig Tech Companies Dataset Description:") |
|
print(big_tech_companies.describe()) |
|
|
|
print("\nBig Tech Stock Prices Dataset Description:") |
|
print(big_tech_stock_prices.describe()) |
|
|
|
print("\nUnique Companies in Big Tech Companies Dataset:") |
|
print(big_tech_companies['company'].nunique()) |
|
|
|
print("\nUnique Stock Symbols in Big Tech Stock Prices Dataset:") |
|
print(big_tech_stock_prices['stock_symbol'].nunique()) |
|
|
|
print("\nMissing Values in Big Tech Companies Dataset:") |
|
print(big_tech_companies.isnull().sum()) |
|
|
|
print("\nMissing Values in Big Tech Stock Prices Dataset:") |
|
print(big_tech_stock_prices.isnull().sum()) |
|
|
|
print("\nStock Symbol Counts in Big Tech Stock Prices Dataset:") |
|
print(big_tech_stock_prices['stock_symbol'].value_counts()) |
|
|
|
big_tech_stock_prices['date'] = pd.to_datetime(big_tech_stock_prices['date']) |
|
|
|
plt.figure(figsize=(14, 7)) |
|
sns.lineplot(data=big_tech_stock_prices, x='date', y='close', hue='stock_symbol') |
|
plt.title('Stock Prices Over Time') |
|
plt.xlabel('Date') |
|
plt.ylabel('Close Price') |
|
plt.legend(title='Stock Symbol') |
|
plt.show() |
|
|
|
plt.figure(figsize=(14, 7)) |
|
sns.lineplot(data=big_tech_stock_prices, x='date', y ='volume', hue='stock_symbol') |
|
plt.title('Trading Volume Over Time') |
|
plt.xlabel('Data') |
|
plt.ylabel('Volume') |
|
plt.legend(title='Stock Symbol') |
|
plt.show() |
|
|
|
plt.figure(figsize=(14,7)) |
|
sns.boxplot(data=big_tech_stock_prices, x='stock_symbol', y='close') |
|
plt.title('Distribution of Closing Prices by Stock Symbol') |
|
plt.xlabel('Stock Symbol') |
|
plt.ylabel('Close Price') |
|
plt.show() |
|
|
|
apple_stock = big_tech_stock_prices[big_tech_stock_prices['stock_symbol'] == 'AAPL'] |
|
apple_stock.set_index('date', inplace=True) |
|
|
|
decompostiion = seasonal_decompose(apple_stock['close'], model='multiplicative', period=365) |
|
fig = decompostiion.plot() |
|
fig.set_size_inches(14, 10) |
|
plt.show() |
|
|
|
plt.figure(figsize=(14, 7)) |
|
apple_stock['close'].plot() |
|
plt.title('Apple Closing Prices') |
|
plt.xlabel('Date') |
|
plt.ylabel('Close Price') |
|
plt.show() |
|
|
|
apple_stock['rolling_mean'] = apple_stock['close'].rolling(window=30).mean() |
|
|
|
plt.figure(figsize=(14, 7)) |
|
apple_stock[['close', 'rolling_mean']].plot() |
|
plt.title('Apple Closing Prices and 30-Day Moving Average') |
|
plt.xlabel('Date') |
|
plt.ylabel('Close Price') |
|
plt.show() |
|
|
|
pivot_table = big_tech_stock_prices.pivot(index='date', columns='stock_symbol', values='close') |
|
correlation_matrix = pivot_table.corr() |
|
|
|
plt.figure(figsize=(12, 8)) |
|
sns.heatmap(correlation_matrix, annot=True, cmap='coolwarm', linewidths=0.5) |
|
plt.title('Correlation Matrix of Stock Closing Prices') |
|
plt.show() |
|
|
|
big_tech_stock_prices_2020 = big_tech_stock_prices |
|
[(big_tech_stock_prices['date'] >= '2020-01-01') & |
|
(big_tech_stock_prices['date'] <= '2020-12-31')] |
|
|
|
plt.figure(figsize=(14, 7)) |
|
sns.lineplot(data=big_tech_stock_prices_2020, x='date', y='close', hue='stock_symbol') |
|
plt.title('Stock Prices During 2020') |
|
plt.xlabel('Date') |
|
plt.ylabel('Close Price') |
|
plt.legend(title='Stock Symbol') |
|
plt.show() |
|
|
|
big_tech_stock_prices['year'] = big_tech_stock_prices['date'].dt.year |
|
|
|
yearly_avg_prices = big_tech_stock_prices.groupby(['year', 'stock_symbol']).mean().reset_index() |
|
|
|
plt.figure(figsize=(14, 7)) |
|
sns.lineplot(data=yearly_avg_prices, x='year', y='close', hue='stock_symbol') |
|
plt.title('Yearly Average Closing Prices') |
|
plt.xlabel('Year') |
|
plt.ylabel('Average Close Price') |
|
plt.legend(title='Stock Symbol') |
|
plt.show() |
|
|
|
big_tech_stock_prices['price_change'] = big_tech_stock_prices.groupby('stock_symbol')['close'].pct_change() |
|
|
|
plt.figure(figsize=(14, 10)) |
|
|
|
sns.histplot(big_tech_stock_prices['price_change']. dropna(), bins=100, kde=True) |
|
plt.title('Histogram of Daily Price Changes for All Stocks') |
|
plt.xlabel('Daily Price Change') |
|
plt.ylabel('Frequency') |
|
plt.show() |
|
|
|
unique_symbols = big_tech_stock_prices['stock_symbol'].unique() |
|
|
|
for symbol in unique_symbols: |
|
plt.figure(figsize=(14, 7)) |
|
sns.histplot(big_tech_stock_prices[big_tech_stock_prices['stock_symbol'] == symbol]['price_change'].dropna(), bins=100, kde=True) |
|
plt.title(f'Histogram of Daily Price Changes for {symbol}') |
|
plt.xlabel('Daily Price Change') |
|
plt.ylabel('Frequency') |
|
plt.show() |
|
|
|
volatility = big_tech_stock_prices.groupby('stock_symbol')['price_change'].std().reset_index() |
|
volatility.columns = ['stock_symbol', 'volatility'] |
|
|
|
plt.figure(figsize=(14, 7)) |
|
sns.barplot(data=volatility, x='stock_symbol', y='volatility') |
|
plt.title('Stock Price Volatility') |
|
plt.xlabel('Stock Symbol') |
|
plt.ylabel('Volatility(Standard Deviation of Daily Price Changes)') |
|
plt.show() |
|
|
|
yearly_price_change = big_tech_stock_prices.groupby(['year', 'stock_symbol'])['close'].mean().pct_change().reset_index() |
|
yearly_price_change = yearly_price_change.dropna() |
|
|
|
plt.figure(figsize=(14, 7)) |
|
sns.lineplot(data=yearly_price_change, x='year', y='close', hue='stock_symbol', marker='o') |
|
plt.title('Yearly Percentage Change in Average Closing Prices') |
|
plt.xlabel('Year') |
|
plt.ylabel('Percentage Change in Average Close Price') |
|
plt.legend(title='Stock Symbol') |
|
plt.show() |
|
|
|
model = ARIMA(apple_stock['close'], order=(5, 1, 0)) |
|
|
|
model_fit = model.fit() |
|
print(model_fit.summary()) |
|
|
|
plt.figure(figsize=(14, 7)) |
|
plt.plot(apple_stock['close'], label='Original') |
|
plt.plot(model_fit.fittedvalues, color='red', label='Fitted Values') |
|
plt.title('ARIMA Model Fit') |
|
plt.xlabel('Date') |
|
plt.ylabel('Close Price') |
|
plt.legend() |
|
plt.show() |
|
|
|
forecast = model_fit.get_forecast(steps=30) |
|
forecast_index = pd.date_range(start=apple_stock.index[-1], periods=30, freq='D') |
|
forecast_mean = forecast.predicted_mean |
|
forecast_conf_int = forecast.conf_int() |
|
|
|
plt.figure(figsize=(14, 7)) |
|
plt.plot(apple_stock['close'], label='Original') |
|
plt.plot(forecast_index, forecast_mean, color='red', label='Forecast') |
|
plt.fill_between(forecast_index, forecast_conf_int.iloc[:, 0], forecast_conf_int.iloc[:, 1], color='pink', alpha=0.3) |
|
plt.title('ARIMA Model Forecast') |
|
plt.xlabel('Date') |
|
plt.ylabel('Close Price') |
|
plt.legend() |
|
plt.show() |
|
|
|
unique_symbols = big_tech_stock_prices['stock_symbol'].unique() |
|
|
|
for symbol in unique_symbols: |
|
stock_data = big_tech_stock_prices[big_tech_stock_prices['stock_symbol'] == symbol] |
|
stock_data.set_index('date', inplace=True) |
|
|
|
print(f"\n### {symbol} ###") |
|
|
|
model = ARIMA(stock_data['close'], order=(5, 1, 0)) |
|
model_fit = model.fit() |
|
print(model_fit.summary()) |
|
|
|
plt.figure(figsize=(14, 7)) |
|
plt.plot(stock_data['close'], label='Original') |
|
plt.plot(model_fit.fittedvalues, color='red', label='Fitted Values') |
|
plt.title(f'{symbol} ARIMA Model Fit') |
|
plt.xlabel('Date') |
|
plt.ylabel('Close Price') |
|
plt.legend() |
|
plt.show() |
|
|
|
forecast = model_fit.get_forecast(steps=30) |
|
forecast_index = pd.date_range(start=stock_data.index[-1], periods=30, freq='D') |
|
forecast_mean = forecast.predicted_mean |
|
forecast_conf_int = forecast.conf_int() |
|
|
|
plt.figure(figsize=(14, 7)) |
|
plt.plot(stock_data['close'], label='Original') |
|
plt.plot(forecast_index, forecast_mean, color='red', label='Forecast') |
|
plt.fill_between(forecast_index, forecast_conf_int.iloc[:, 0], forecast_conf_int.iloc[:, 1], color='pink', alpha=0.3) |
|
plt.title(f'{symbol} ARIMA Model Forecast') |
|
plt.xlabel('Date') |
|
plt.ylabel('Close Price') |
|
plt.legend() |
|
plt.show() |
|
|
|
big_tech_stock_prices['daily_return'] = big_tech_stock_prices.groupby('stock_symbol')['close'].pct_change() |
|
|
|
mean_returns = big_tech_stock_prices.groupby('stock_symbol')['daily_return'].mean() |
|
volatilties = big_tech_stock_prices.groupby('stock_symbol')['daily_return'].std() |
|
|
|
risk_return_df = pd.DataFrame({'mean_return': mean_returns, 'volatility': volatilties}) |
|
print(risk_return_df) |
|
|
|
mean_returns = big_tech_stock_prices.groupby('stock_symbol')['daily_return'].mean() |
|
cov_matrix = big_tech_stock_prices.pivot_table(index='date', columns='stock_symbol', values='daily_return').cov() |
|
|
|
num_portfolios = 10000 |
|
results = np.zeros((4, num_portfolios)) |
|
weights_record = [] |
|
|
|
np.random.seed(42) |
|
|
|
for i in range(num_portfolios): |
|
weights = np.random.random(len(mean_returns)) |
|
weights /= np.sum(weights) |
|
weights_record.append(weights) |
|
portfolio_return = np.dot(weights, mean_returns) |
|
portfolio_stddev = np.sqrt(np.dot(weights.T, np.dot(cov_matrix, weights))) |
|
results[0, i] = portfolio_return |
|
results[1, i] = portfolio_stddev |
|
results[2, i] = results[0, i] / results[1, i] |
|
|
|
results_frame = pd.DataFrame(results.T, columns=['Return', 'Risk', 'Sharpe Ratio', 'Index']) |
|
|
|
max_sharpe_idx = results_frame['Sharpe Ratio'].idxmax() |
|
max_sharpe_portfolio = results_frame.iloc[max_sharpe_idx] |
|
max_sharpe_weights = weights_record[int(max_sharpe_portfolio[3])] |
|
|
|
min_risk_idx = results_frame['Risk'].idxmin() |
|
min_risk_portfolio = results_frame.iloc[min_risk_idx] |
|
min_risk_weights = weights_record[int(min_risk_portfolio[3])] |
|
|
|
plt.figure(figsize=(10, 6)) |
|
plt.scatter(results_frame['Risk'], results_frame['Return'], c=results_frame['Sharpe Ratio'], cmap='viridis') |
|
plt.colorbar(label='Sharpe Ratio') |
|
plt.scatter(max_sharpe_portfolio[1], max_sharpe_portfolio[0], marker='*', color='r', s=200, label='Max Sharpe Ratio') |
|
plt.scatter(min_risk_portfolio[1], min_risk_portfolio[0], marker='*', color='b', s=200, label= 'Min Risk') |
|
plt.title('Portfolio Optimization based on Efficient Frontier') |
|
plt.xlabel('Risk (Standard Deviation)') |
|
plt.ylabel('Return') |
|
plt.legend() |
|
plt.show |
|
|
|
print("Maximum Sharpe Ratio Portfolio Allocation\n") |
|
print("Return:", max_sharpe_portfolio[0]) |
|
print("Risk:", max_sharpe_portfolio[1]) |
|
print("Sharpe Ratio:", max_sharpe_portfolio[2]) |
|
print("\nWeights:\n") |
|
for i, txt in enumerate(mean_returns.index): |
|
print(f"{txt}: {max_sharpe_weights[i]}") |
|
|
|
print("\nMinimum Risk Portfolio Allocation\n") |
|
print("Return:", min_risk_portfolio[0]) |
|
print("Risk:", min_risk_portfolio[1]) |
|
print("\nWeights:\n") |
|
for i, txt in enumerate(mean_returns.index): |
|
print(f"{txt}: {min_risk_weights[i]}") |
|
|
|
big_tech_stock_price = pd.read_csv('big_tech_stock_prices.csv') |
|
macro_data = pd.read_csv('DATA.csv') |
|
|
|
print(macro_data.columns) |
|
|
|
macro_data = macro_data.rename(columns={ |
|
'UNRATE(%)': 'unemployment_rate', |
|
'CPIALLITEMS': 'cpi', |
|
'INFLATION(%)': 'inflation_rate', |
|
'MORTGAGE INT. MONTHLY AVG(%)': 'mortgage_interest_rate', |
|
'CORP. BOND YIELD(%)': 'corporate_bond_yield' |
|
}) |
|
|
|
macro_data['DATE'] = pd.to_datetime(macro_data['DATE']) |
|
|
|
macro_data.rename(columns={'DATE': 'date'}, inplace=True) |
|
|
|
big_tech_stock_price['date'] = pd.to_datetime(big_tech_stock_price['date']) |
|
|
|
merged_data = pd.merge(big_tech_stock_prices, macro_data, on='date', how='inner') |
|
|
|
print(merged_data.head()) |
|
print(merged_data.columns) |
|
|
|
correlation_matrix = merged_data[['close', 'unemployment_rate', 'cpi', 'inflation_rate', 'mortgage_interest_rate', 'corporate_bond_yield']].corr() |
|
print(correlation_matrix) |
|
|
|
plt.figure(figsize=(10, 6)) |
|
sns.heatmap(correlation_matrix, annot=True, cmap='coolwarm', linewidths=0.5) |
|
plt.title('Correlation Matrix of Stock Prices and Macro-Economic Indicators') |
|
plt.show() |
|
|
|
plt.figure(figsize=(14, 7)) |
|
sns.lineplot(data=merged_data, x='date', y='close', hue='stock_symbol') |
|
plt.title('Stock Prices Over Time') |
|
plt.xlabel('Date') |
|
plt.ylabel('Close Price') |
|
plt.show() |
|
|
|
X = merged_data[['unemployment_rate', 'cpi', 'inflation_rate', 'mortgage_interest_rate', 'corporate_bond_yield']] |
|
y = merged_data['close'] |
|
|
|
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42) |
|
|
|
model = LinearRegression() |
|
model.fit(X_train, y_train) |
|
|
|
y_pred = model.predict(X_test) |
|
r2_score = model.score(X_test, y_test) |
|
|
|
print(f"R^2 Score: {r2_score}") |
|
|
|
coefficients = pd.DataFrame(model.coef_, X.columns, columns=['Coefficient']) |
|
print(coefficients) |
|
|
|
for symbol in unique_symbols: |
|
stock_data = big_tech_stock_prices[big_tech_stock_prices['stock_symbol'] == symbol] |
|
stock_data.set_index('date', inplace=True) |
|
|
|
stock_data['z_score'] = (stock_data['close'] - stock_data['close'].mean()) / stock_data['close'].std() |
|
|
|
stock_data['anomaly'] = np.where(stock_data['z_score'].abs() > 3, True, False) |
|
|
|
plt.figure(figsize=(14, 7)) |
|
plt.plot(stock_data.index, stock_data['close'], label='Close Price') |
|
plt.scatter(stock_data[stock_data['anomaly']]. index, stock_data[stock_data['anomaly']]['close'], color='red', label='Anomaly') |
|
plt.title(f'{symbol} Stock Price with Anomalies') |
|
plt.xlabel('Date') |
|
plt.ylabel('Close Price') |
|
plt.legend() |
|
plt.show() |
|
|
|
anomalies = stock_data[stock_data['anomaly']] |
|
print(f"Anomalies for {symbol}:") |
|
print(anomalies[['close', 'z_score']]) |
|
print("\n") |
|
|
|
pip install arch |
|
|
|
from arch import arch_model |
|
|
|
for symbol in unique_symbols: |
|
stock_data = big_tech_stock_prices[big_tech_stock_prices['stock_symbol'] == symbol] |
|
stock_data.set_index('date', inplace=True) |
|
|
|
stock_data['return'] = stock_data['close'].pct_change().dropna() |
|
|
|
model = arch_model(stock_data['return'].dropna(), vol='Garch', p=1, q=1) |
|
model_fit = model.fit(disp='off') |
|
print(f"Summary for {symbol}:") |
|
print(model_fit.summary()) |
|
|
|
volatility = model_fit.conditional_volatility |
|
|
|
plt.figure(figsize=(14, 7)) |
|
plt.plot(volatility) |
|
plt.title(f'{symbol} Stock Volatility') |
|
plt.xlabel('Date') |
|
plt.ylabel('Volatility') |
|
plt.show() |
|
|
|
forecast_horizon = 30 |
|
forecast = model_fit.forecast(horizon=forecast_horizon) |
|
forecast_volatility = np.sqrt(forecast.variance.values[-1, :]) |
|
|
|
plt.figure(figsize=(14, 7)) |
|
plt.plot(range(1, forecast_horizon+1), forecast_volatility) |
|
plt.title(f'{symbol} Forecasted Volatility for Next 30 Days') |
|
plt.xlabel('Days') |
|
plt.ylabel('Volatility') |
|
plt.show() |
|
|
|
for symbol in unique_symbols: |
|
stock_data = big_tech_stock_prices[big_tech_stock_prices['stock_symbol'] == symbol] |
|
stock_data.set_index('date', inplace=True) |
|
|
|
stock_data['SMA50'] = stock_data['close'].rolling(window=50).mean() |
|
stock_data['SMA200'] = stock_data['close'].rolling(window=200).mean() |
|
|
|
stock_data['Signal'] = 0.0 |
|
stock_data['Signal'][50:] = np.where(stock_data['SMA50'][50:] > stock_data['SMA200'][50:], 1.0, 0.0) |
|
stock_data['Position'] = stock_data['Signal'].diff() |
|
|
|
for symbol in unique_symbols: |
|
stock_data = big_tech_stock_prices[big_tech_stock_prices['stock_symbol'] == symbol] |
|
stock_data.set_index('date', inplace=True) |
|
|
|
stock_data['SMA50'] = stock_data['close'].rolling(window=50).mean() |
|
stock_data['SMA200'] = stock_data['close'].rolling(window=200).mean() |
|
|
|
stock_data['Signal'] = 0.0 |
|
stock_data['Signal'][50:] = np.where(stock_data['SMA50'][50:] > stock_data['SMA200'][50:], 1.0, 0.0) |
|
stock_data['Position'] = stock_data['Signal'].diff() |
|
|
|
|
|
plt.figure(figsize=(14, 7)) |
|
plt.plot(stock_data['close'], label='Close Price') |
|
plt.plot(stock_data['SMA50'], label='50-day SMA', alpha=0.7) |
|
plt.plot(stock_data['SMA200'], label='200-day SMA', alpha=0.7) |
|
plt.plot(stock_data[stock_data['Position'] == 1].index, stock_data['SMA50'][stock_data['Position'] == 1], '^', markersize=10, color='g', lw=0, label='Buy Signal') |
|
plt.plot(stock_data[stock_data['Position'] == -1].index, stock_data['SMA50'][stock_data['Position'] == -1], 'v', markersize=10, color='r', lw=0, label='Sell Signal') |
|
plt.title(f'{symbol} - SMA Crossover Strategy') |
|
plt.xlabel('Date') |
|
plt.ylabel('Close Price') |
|
plt.legend() |
|
plt.show() |
|
|
|
X = merged_data[['unemployment_rate', 'cpi', 'inflation_rate', 'mortgage_interest_rate', 'corporate_bond_yield']] |
|
y = merged_data['close'] |
|
|
|
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42) |
|
|
|
model = RandomForestRegressor() |
|
model.fit(X_train, y_train) |
|
|
|
!pip install shap |
|
import shap |
|
|
|
explainer = shap.TreeExplainer(model) |
|
shap_values = explainer.shap_values(X_test) |
|
|
|
shap.summary_plot(shap_values, X_test) |