{ "cells": [ { "cell_type": "code", "execution_count": 30, "id": "3c4d096e", "metadata": {}, "outputs": [], "source": [ "import os\n", "import re\n", "import csv\n", "import math\n", "import time\n", "import json\n", "import random\n", "import finnhub\n", "import datasets\n", "import pandas as pd\n", "import yfinance as yf\n", "from datetime import datetime\n", "from collections import defaultdict\n", "from datasets import Dataset\n", "from openai import OpenAI" ] }, { "cell_type": "code", "execution_count": 31, "id": "ace9fdb4", "metadata": {}, "outputs": [], "source": [ "START_DATE = \"2022-12-31\"\n", "END_DATE = \"2023-05-31\"\n", "\n", "DATA_DIR = f\"./{START_DATE}_{END_DATE}\"\n", "os.makedirs(DATA_DIR, exist_ok=True)\n", "\n", "finnhub_client = finnhub.Client(api_key=\"your finnhub key\")\n", "\n", "client = OpenAI(api_key = 'your openai key')" ] }, { "cell_type": "markdown", "id": "2fce2503", "metadata": {}, "source": [ "# Raw Financial Data Acquisition" ] }, { "cell_type": "code", "execution_count": 43, "id": "c6564114", "metadata": {}, "outputs": [], "source": [ "def bin_mapping(ret):\n", " \n", " up_down = 'U' if ret >= 0 else 'D'\n", " integer = math.ceil(abs(100 * ret))\n", " \n", " return up_down + (str(integer) if integer <= 5 else '5+')\n", "\n", "\n", "def get_returns(stock_symbol):\n", " \n", " # Download historical stock data\n", " stock_data = yf.download(stock_symbol, start=START_DATE, end=END_DATE)\n", " \n", " weekly_data = stock_data['Adj Close'].resample('W').ffill()\n", " weekly_returns = weekly_data.pct_change()[1:]\n", " weekly_start_prices = weekly_data[:-1]\n", " weekly_end_prices = weekly_data[1:]\n", "\n", " weekly_data = pd.DataFrame({\n", " 'Start Date': weekly_start_prices.index,\n", " 'Start Price': weekly_start_prices.values,\n", " 'End Date': weekly_end_prices.index,\n", " 'End Price': weekly_end_prices.values,\n", " 'Weekly Returns': weekly_returns.values\n", " })\n", " \n", " weekly_data['Bin Label'] = weekly_data['Weekly Returns'].map(bin_mapping)\n", "\n", " return weekly_data\n", "\n", "\n", "def get_news(symbol, data):\n", " \n", " news_list = []\n", " \n", " for end_date, row in data.iterrows():\n", " start_date = row['Start Date'].strftime('%Y-%m-%d')\n", " end_date = row['End Date'].strftime('%Y-%m-%d')\n", " print(symbol, ': ', start_date, ' - ', end_date)\n", " time.sleep(1) # control qpm\n", " weekly_news = finnhub_client.company_news(symbol, _from=start_date, to=end_date)\n", " weekly_news = [\n", " {\n", " \"date\": datetime.fromtimestamp(n['datetime']).strftime('%Y%m%d%H%M%S'),\n", " \"headline\": n['headline'],\n", " \"summary\": n['summary'],\n", " } for n in weekly_news\n", " ]\n", " weekly_news.sort(key=lambda x: x['date'])\n", " news_list.append(json.dumps(weekly_news))\n", " \n", " data['News'] = news_list\n", " \n", " return data\n", "\n", "\n", "def get_basics(symbol, data, always=False):\n", " \n", " basic_financials = finnhub_client.company_basic_financials(symbol, 'all')\n", " \n", " final_basics, basic_list, basic_dict = [], [], defaultdict(dict)\n", " \n", " for metric, value_list in basic_financials['series']['quarterly'].items():\n", " for value in value_list:\n", " basic_dict[value['period']].update({metric: value['v']})\n", "\n", " for k, v in basic_dict.items():\n", " v.update({'period': k})\n", " basic_list.append(v)\n", " \n", " basic_list.sort(key=lambda x: x['period'])\n", " \n", " for i, row in data.iterrows():\n", " \n", " start_date = row['End Date'].strftime('%Y-%m-%d')\n", " last_start_date = START_DATE if i < 2 else data.loc[i-2, 'Start Date'].strftime('%Y-%m-%d')\n", " \n", " used_basic = {}\n", " for basic in basic_list[::-1]:\n", " if (always and basic['period'] < start_date) or (last_start_date <= basic['period'] < start_date):\n", " used_basic = basic\n", " break\n", " final_basics.append(json.dumps(used_basic))\n", " \n", " data['Basics'] = final_basics\n", " \n", " return data\n", " \n", "\n", "def prepare_data_for_company(symbol, with_basics=True):\n", " \n", " data = get_returns(symbol)\n", " data = get_news(symbol, data)\n", " \n", " if with_basics:\n", " data = get_basics(symbol, data)\n", " data.to_csv(f\"{DATA_DIR}/{symbol}_{START_DATE}_{END_DATE}.csv\")\n", " else:\n", " data['Basics'] = [json.dumps({})] * len(data)\n", " data.to_csv(f\"{DATA_DIR}/{symbol}_{START_DATE}_{END_DATE}_nobasics.csv\")\n", " \n", " return data\n" ] }, { "cell_type": "code", "execution_count": 59, "id": "caf02ab7", "metadata": {}, "outputs": [], "source": [ "DOW_30 = [\n", " \"AXP\", \"AMGN\", \"AAPL\", \"BA\", \"CAT\", \"CSCO\", \"CVX\", \"GS\", \"HD\", \"HON\",\n", " \"IBM\", \"INTC\", \"JNJ\", \"KO\", \"JPM\", \"MCD\", \"MMM\", \"MRK\", \"MSFT\", \"NKE\",\n", " \"PG\", \"TRV\", \"UNH\", \"CRM\", \"VZ\", \"V\", \"WBA\", \"WMT\", \"DIS\", \"DOW\"\n", "]\n", "\n", "# prepare_data_for_company(\"DOW\", False)" ] }, { "cell_type": "code", "execution_count": 81, "id": "43d65960", "metadata": { "scrolled": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "[*********************100%%**********************] 1 of 1 completed\n", "AXP : 2023-01-08 - 2023-01-15\n", "AXP : 2023-01-15 - 2023-01-22\n", "AXP : 2023-01-22 - 2023-01-29\n", "AXP : 2023-01-29 - 2023-02-05\n", "AXP : 2023-02-05 - 2023-02-12\n", "AXP : 2023-02-12 - 2023-02-19\n", "AXP : 2023-02-19 - 2023-02-26\n", "AXP : 2023-02-26 - 2023-03-05\n", "AXP : 2023-03-05 - 2023-03-12\n", "AXP : 2023-03-12 - 2023-03-19\n", "AXP : 2023-03-19 - 2023-03-26\n", "AXP : 2023-03-26 - 2023-04-02\n", "AXP : 2023-04-02 - 2023-04-09\n", "AXP : 2023-04-09 - 2023-04-16\n", "AXP : 2023-04-16 - 2023-04-23\n", "AXP : 2023-04-23 - 2023-04-30\n", "AXP : 2023-04-30 - 2023-05-07\n", "AXP : 2023-05-07 - 2023-05-14\n", "AXP : 2023-05-14 - 2023-05-21\n", "AXP : 2023-05-21 - 2023-05-28\n", "AXP : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "AMGN : 2023-01-08 - 2023-01-15\n", "AMGN : 2023-01-15 - 2023-01-22\n", "AMGN : 2023-01-22 - 2023-01-29\n", "AMGN : 2023-01-29 - 2023-02-05\n", "AMGN : 2023-02-05 - 2023-02-12\n", "AMGN : 2023-02-12 - 2023-02-19\n", "AMGN : 2023-02-19 - 2023-02-26\n", "AMGN : 2023-02-26 - 2023-03-05\n", "AMGN : 2023-03-05 - 2023-03-12\n", "AMGN : 2023-03-12 - 2023-03-19\n", "AMGN : 2023-03-19 - 2023-03-26\n", "AMGN : 2023-03-26 - 2023-04-02\n", "AMGN : 2023-04-02 - 2023-04-09\n", "AMGN : 2023-04-09 - 2023-04-16\n", "AMGN : 2023-04-16 - 2023-04-23\n", "AMGN : 2023-04-23 - 2023-04-30\n", "AMGN : 2023-04-30 - 2023-05-07\n", "AMGN : 2023-05-07 - 2023-05-14\n", "AMGN : 2023-05-14 - 2023-05-21\n", "AMGN : 2023-05-21 - 2023-05-28\n", "AMGN : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "AAPL : 2023-01-08 - 2023-01-15\n", "AAPL : 2023-01-15 - 2023-01-22\n", "AAPL : 2023-01-22 - 2023-01-29\n", "AAPL : 2023-01-29 - 2023-02-05\n", "AAPL : 2023-02-05 - 2023-02-12\n", "AAPL : 2023-02-12 - 2023-02-19\n", "AAPL : 2023-02-19 - 2023-02-26\n", "AAPL : 2023-02-26 - 2023-03-05\n", "AAPL : 2023-03-05 - 2023-03-12\n", "AAPL : 2023-03-12 - 2023-03-19\n", "AAPL : 2023-03-19 - 2023-03-26\n", "AAPL : 2023-03-26 - 2023-04-02\n", "AAPL : 2023-04-02 - 2023-04-09\n", "AAPL : 2023-04-09 - 2023-04-16\n", "AAPL : 2023-04-16 - 2023-04-23\n", "AAPL : 2023-04-23 - 2023-04-30\n", "AAPL : 2023-04-30 - 2023-05-07\n", "AAPL : 2023-05-07 - 2023-05-14\n", "AAPL : 2023-05-14 - 2023-05-21\n", "AAPL : 2023-05-21 - 2023-05-28\n", "AAPL : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "BA : 2023-01-08 - 2023-01-15\n", "BA : 2023-01-15 - 2023-01-22\n", "BA : 2023-01-22 - 2023-01-29\n", "BA : 2023-01-29 - 2023-02-05\n", "BA : 2023-02-05 - 2023-02-12\n", "BA : 2023-02-12 - 2023-02-19\n", "BA : 2023-02-19 - 2023-02-26\n", "BA : 2023-02-26 - 2023-03-05\n", "BA : 2023-03-05 - 2023-03-12\n", "BA : 2023-03-12 - 2023-03-19\n", "BA : 2023-03-19 - 2023-03-26\n", "BA : 2023-03-26 - 2023-04-02\n", "BA : 2023-04-02 - 2023-04-09\n", "BA : 2023-04-09 - 2023-04-16\n", "BA : 2023-04-16 - 2023-04-23\n", "BA : 2023-04-23 - 2023-04-30\n", "BA : 2023-04-30 - 2023-05-07\n", "BA : 2023-05-07 - 2023-05-14\n", "BA : 2023-05-14 - 2023-05-21\n", "BA : 2023-05-21 - 2023-05-28\n", "BA : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "CAT : 2023-01-08 - 2023-01-15\n", "CAT : 2023-01-15 - 2023-01-22\n", "CAT : 2023-01-22 - 2023-01-29\n", "CAT : 2023-01-29 - 2023-02-05\n", "CAT : 2023-02-05 - 2023-02-12\n", "CAT : 2023-02-12 - 2023-02-19\n", "CAT : 2023-02-19 - 2023-02-26\n", "CAT : 2023-02-26 - 2023-03-05\n", "CAT : 2023-03-05 - 2023-03-12\n", "CAT : 2023-03-12 - 2023-03-19\n", "CAT : 2023-03-19 - 2023-03-26\n", "CAT : 2023-03-26 - 2023-04-02\n", "CAT : 2023-04-02 - 2023-04-09\n", "CAT : 2023-04-09 - 2023-04-16\n", "CAT : 2023-04-16 - 2023-04-23\n", "CAT : 2023-04-23 - 2023-04-30\n", "CAT : 2023-04-30 - 2023-05-07\n", "CAT : 2023-05-07 - 2023-05-14\n", "CAT : 2023-05-14 - 2023-05-21\n", "CAT : 2023-05-21 - 2023-05-28\n", "CAT : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "CSCO : 2023-01-08 - 2023-01-15\n", "CSCO : 2023-01-15 - 2023-01-22\n", "CSCO : 2023-01-22 - 2023-01-29\n", "CSCO : 2023-01-29 - 2023-02-05\n", "CSCO : 2023-02-05 - 2023-02-12\n", "CSCO : 2023-02-12 - 2023-02-19\n", "CSCO : 2023-02-19 - 2023-02-26\n", "CSCO : 2023-02-26 - 2023-03-05\n", "CSCO : 2023-03-05 - 2023-03-12\n", "CSCO : 2023-03-12 - 2023-03-19\n", "CSCO : 2023-03-19 - 2023-03-26\n", "CSCO : 2023-03-26 - 2023-04-02\n", "CSCO : 2023-04-02 - 2023-04-09\n", "CSCO : 2023-04-09 - 2023-04-16\n", "CSCO : 2023-04-16 - 2023-04-23\n", "CSCO : 2023-04-23 - 2023-04-30\n", "CSCO : 2023-04-30 - 2023-05-07\n", "CSCO : 2023-05-07 - 2023-05-14\n", "CSCO : 2023-05-14 - 2023-05-21\n", "CSCO : 2023-05-21 - 2023-05-28\n", "CSCO : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "CVX : 2023-01-08 - 2023-01-15\n", "CVX : 2023-01-15 - 2023-01-22\n", "CVX : 2023-01-22 - 2023-01-29\n", "CVX : 2023-01-29 - 2023-02-05\n", "CVX : 2023-02-05 - 2023-02-12\n", "CVX : 2023-02-12 - 2023-02-19\n", "CVX : 2023-02-19 - 2023-02-26\n", "CVX : 2023-02-26 - 2023-03-05\n", "CVX : 2023-03-05 - 2023-03-12\n", "CVX : 2023-03-12 - 2023-03-19\n", "CVX : 2023-03-19 - 2023-03-26\n", "CVX : 2023-03-26 - 2023-04-02\n", "CVX : 2023-04-02 - 2023-04-09\n", "CVX : 2023-04-09 - 2023-04-16\n", "CVX : 2023-04-16 - 2023-04-23\n", "CVX : 2023-04-23 - 2023-04-30\n", "CVX : 2023-04-30 - 2023-05-07\n", "CVX : 2023-05-07 - 2023-05-14\n", "CVX : 2023-05-14 - 2023-05-21\n", "CVX : 2023-05-21 - 2023-05-28\n", "CVX : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "GS : 2023-01-08 - 2023-01-15\n", "GS : 2023-01-15 - 2023-01-22\n", "GS : 2023-01-22 - 2023-01-29\n", "GS : 2023-01-29 - 2023-02-05\n", "GS : 2023-02-05 - 2023-02-12\n", "GS : 2023-02-12 - 2023-02-19\n", "GS : 2023-02-19 - 2023-02-26\n", "GS : 2023-02-26 - 2023-03-05\n", "GS : 2023-03-05 - 2023-03-12\n", "GS : 2023-03-12 - 2023-03-19\n", "GS : 2023-03-19 - 2023-03-26\n", "GS : 2023-03-26 - 2023-04-02\n", "GS : 2023-04-02 - 2023-04-09\n", "GS : 2023-04-09 - 2023-04-16\n", "GS : 2023-04-16 - 2023-04-23\n", "GS : 2023-04-23 - 2023-04-30\n", "GS : 2023-04-30 - 2023-05-07\n", "GS : 2023-05-07 - 2023-05-14\n", "GS : 2023-05-14 - 2023-05-21\n", "GS : 2023-05-21 - 2023-05-28\n", "GS : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "HD : 2023-01-08 - 2023-01-15\n", "HD : 2023-01-15 - 2023-01-22\n", "HD : 2023-01-22 - 2023-01-29\n", "HD : 2023-01-29 - 2023-02-05\n", "HD : 2023-02-05 - 2023-02-12\n", "HD : 2023-02-12 - 2023-02-19\n", "HD : 2023-02-19 - 2023-02-26\n", "HD : 2023-02-26 - 2023-03-05\n", "HD : 2023-03-05 - 2023-03-12\n", "HD : 2023-03-12 - 2023-03-19\n", "HD : 2023-03-19 - 2023-03-26\n", "HD : 2023-03-26 - 2023-04-02\n", "HD : 2023-04-02 - 2023-04-09\n", "HD : 2023-04-09 - 2023-04-16\n", "HD : 2023-04-16 - 2023-04-23\n", "HD : 2023-04-23 - 2023-04-30\n", "HD : 2023-04-30 - 2023-05-07\n", "HD : 2023-05-07 - 2023-05-14\n", "HD : 2023-05-14 - 2023-05-21\n", "HD : 2023-05-21 - 2023-05-28\n", "HD : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "HON : 2023-01-08 - 2023-01-15\n", "HON : 2023-01-15 - 2023-01-22\n", "HON : 2023-01-22 - 2023-01-29\n", "HON : 2023-01-29 - 2023-02-05\n", "HON : 2023-02-05 - 2023-02-12\n", "HON : 2023-02-12 - 2023-02-19\n", "HON : 2023-02-19 - 2023-02-26\n", "HON : 2023-02-26 - 2023-03-05\n", "HON : 2023-03-05 - 2023-03-12\n", "HON : 2023-03-12 - 2023-03-19\n", "HON : 2023-03-19 - 2023-03-26\n", "HON : 2023-03-26 - 2023-04-02\n", "HON : 2023-04-02 - 2023-04-09\n", "HON : 2023-04-09 - 2023-04-16\n", "HON : 2023-04-16 - 2023-04-23\n", "HON : 2023-04-23 - 2023-04-30\n", "HON : 2023-04-30 - 2023-05-07\n", "HON : 2023-05-07 - 2023-05-14\n", "HON : 2023-05-14 - 2023-05-21\n", "HON : 2023-05-21 - 2023-05-28\n", "HON : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "IBM : 2023-01-08 - 2023-01-15\n", "IBM : 2023-01-15 - 2023-01-22\n", "IBM : 2023-01-22 - 2023-01-29\n", "IBM : 2023-01-29 - 2023-02-05\n", "IBM : 2023-02-05 - 2023-02-12\n", "IBM : 2023-02-12 - 2023-02-19\n", "IBM : 2023-02-19 - 2023-02-26\n", "IBM : 2023-02-26 - 2023-03-05\n", "IBM : 2023-03-05 - 2023-03-12\n", "IBM : 2023-03-12 - 2023-03-19\n", "IBM : 2023-03-19 - 2023-03-26\n", "IBM : 2023-03-26 - 2023-04-02\n", "IBM : 2023-04-02 - 2023-04-09\n", "IBM : 2023-04-09 - 2023-04-16\n", "IBM : 2023-04-16 - 2023-04-23\n", "IBM : 2023-04-23 - 2023-04-30\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "IBM : 2023-04-30 - 2023-05-07\n", "IBM : 2023-05-07 - 2023-05-14\n", "IBM : 2023-05-14 - 2023-05-21\n", "IBM : 2023-05-21 - 2023-05-28\n", "IBM : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "INTC : 2023-01-08 - 2023-01-15\n", "INTC : 2023-01-15 - 2023-01-22\n", "INTC : 2023-01-22 - 2023-01-29\n", "INTC : 2023-01-29 - 2023-02-05\n", "INTC : 2023-02-05 - 2023-02-12\n", "INTC : 2023-02-12 - 2023-02-19\n", "INTC : 2023-02-19 - 2023-02-26\n", "INTC : 2023-02-26 - 2023-03-05\n", "INTC : 2023-03-05 - 2023-03-12\n", "INTC : 2023-03-12 - 2023-03-19\n", "INTC : 2023-03-19 - 2023-03-26\n", "INTC : 2023-03-26 - 2023-04-02\n", "INTC : 2023-04-02 - 2023-04-09\n", "INTC : 2023-04-09 - 2023-04-16\n", "INTC : 2023-04-16 - 2023-04-23\n", "INTC : 2023-04-23 - 2023-04-30\n", "INTC : 2023-04-30 - 2023-05-07\n", "INTC : 2023-05-07 - 2023-05-14\n", "INTC : 2023-05-14 - 2023-05-21\n", "INTC : 2023-05-21 - 2023-05-28\n", "INTC : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "JNJ : 2023-01-08 - 2023-01-15\n", "JNJ : 2023-01-15 - 2023-01-22\n", "JNJ : 2023-01-22 - 2023-01-29\n", "JNJ : 2023-01-29 - 2023-02-05\n", "JNJ : 2023-02-05 - 2023-02-12\n", "JNJ : 2023-02-12 - 2023-02-19\n", "JNJ : 2023-02-19 - 2023-02-26\n", "JNJ : 2023-02-26 - 2023-03-05\n", "JNJ : 2023-03-05 - 2023-03-12\n", "JNJ : 2023-03-12 - 2023-03-19\n", "JNJ : 2023-03-19 - 2023-03-26\n", "JNJ : 2023-03-26 - 2023-04-02\n", "JNJ : 2023-04-02 - 2023-04-09\n", "JNJ : 2023-04-09 - 2023-04-16\n", "JNJ : 2023-04-16 - 2023-04-23\n", "JNJ : 2023-04-23 - 2023-04-30\n", "JNJ : 2023-04-30 - 2023-05-07\n", "JNJ : 2023-05-07 - 2023-05-14\n", "JNJ : 2023-05-14 - 2023-05-21\n", "JNJ : 2023-05-21 - 2023-05-28\n", "JNJ : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "KO : 2023-01-08 - 2023-01-15\n", "KO : 2023-01-15 - 2023-01-22\n", "KO : 2023-01-22 - 2023-01-29\n", "KO : 2023-01-29 - 2023-02-05\n", "KO : 2023-02-05 - 2023-02-12\n", "KO : 2023-02-12 - 2023-02-19\n", "KO : 2023-02-19 - 2023-02-26\n", "KO : 2023-02-26 - 2023-03-05\n", "KO : 2023-03-05 - 2023-03-12\n", "KO : 2023-03-12 - 2023-03-19\n", "KO : 2023-03-19 - 2023-03-26\n", "KO : 2023-03-26 - 2023-04-02\n", "KO : 2023-04-02 - 2023-04-09\n", "KO : 2023-04-09 - 2023-04-16\n", "KO : 2023-04-16 - 2023-04-23\n", "KO : 2023-04-23 - 2023-04-30\n", "KO : 2023-04-30 - 2023-05-07\n", "KO : 2023-05-07 - 2023-05-14\n", "KO : 2023-05-14 - 2023-05-21\n", "KO : 2023-05-21 - 2023-05-28\n", "KO : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "JPM : 2023-01-08 - 2023-01-15\n", "JPM : 2023-01-15 - 2023-01-22\n", "JPM : 2023-01-22 - 2023-01-29\n", "JPM : 2023-01-29 - 2023-02-05\n", "JPM : 2023-02-05 - 2023-02-12\n", "JPM : 2023-02-12 - 2023-02-19\n", "JPM : 2023-02-19 - 2023-02-26\n", "JPM : 2023-02-26 - 2023-03-05\n", "JPM : 2023-03-05 - 2023-03-12\n", "JPM : 2023-03-12 - 2023-03-19\n", "JPM : 2023-03-19 - 2023-03-26\n", "JPM : 2023-03-26 - 2023-04-02\n", "JPM : 2023-04-02 - 2023-04-09\n", "JPM : 2023-04-09 - 2023-04-16\n", "JPM : 2023-04-16 - 2023-04-23\n", "JPM : 2023-04-23 - 2023-04-30\n", "JPM : 2023-04-30 - 2023-05-07\n", "JPM : 2023-05-07 - 2023-05-14\n", "JPM : 2023-05-14 - 2023-05-21\n", "JPM : 2023-05-21 - 2023-05-28\n", "JPM : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "MCD : 2023-01-08 - 2023-01-15\n", "MCD : 2023-01-15 - 2023-01-22\n", "MCD : 2023-01-22 - 2023-01-29\n", "MCD : 2023-01-29 - 2023-02-05\n", "MCD : 2023-02-05 - 2023-02-12\n", "MCD : 2023-02-12 - 2023-02-19\n", "MCD : 2023-02-19 - 2023-02-26\n", "MCD : 2023-02-26 - 2023-03-05\n", "MCD : 2023-03-05 - 2023-03-12\n", "MCD : 2023-03-12 - 2023-03-19\n", "MCD : 2023-03-19 - 2023-03-26\n", "MCD : 2023-03-26 - 2023-04-02\n", "MCD : 2023-04-02 - 2023-04-09\n", "MCD : 2023-04-09 - 2023-04-16\n", "MCD : 2023-04-16 - 2023-04-23\n", "MCD : 2023-04-23 - 2023-04-30\n", "MCD : 2023-04-30 - 2023-05-07\n", "MCD : 2023-05-07 - 2023-05-14\n", "MCD : 2023-05-14 - 2023-05-21\n", "MCD : 2023-05-21 - 2023-05-28\n", "MCD : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "MMM : 2023-01-08 - 2023-01-15\n", "MMM : 2023-01-15 - 2023-01-22\n", "MMM : 2023-01-22 - 2023-01-29\n", "MMM : 2023-01-29 - 2023-02-05\n", "MMM : 2023-02-05 - 2023-02-12\n", "MMM : 2023-02-12 - 2023-02-19\n", "MMM : 2023-02-19 - 2023-02-26\n", "MMM : 2023-02-26 - 2023-03-05\n", "MMM : 2023-03-05 - 2023-03-12\n", "MMM : 2023-03-12 - 2023-03-19\n", "MMM : 2023-03-19 - 2023-03-26\n", "MMM : 2023-03-26 - 2023-04-02\n", "MMM : 2023-04-02 - 2023-04-09\n", "MMM : 2023-04-09 - 2023-04-16\n", "MMM : 2023-04-16 - 2023-04-23\n", "MMM : 2023-04-23 - 2023-04-30\n", "MMM : 2023-04-30 - 2023-05-07\n", "MMM : 2023-05-07 - 2023-05-14\n", "MMM : 2023-05-14 - 2023-05-21\n", "MMM : 2023-05-21 - 2023-05-28\n", "MMM : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "MRK : 2023-01-08 - 2023-01-15\n", "MRK : 2023-01-15 - 2023-01-22\n", "MRK : 2023-01-22 - 2023-01-29\n", "MRK : 2023-01-29 - 2023-02-05\n", "MRK : 2023-02-05 - 2023-02-12\n", "MRK : 2023-02-12 - 2023-02-19\n", "MRK : 2023-02-19 - 2023-02-26\n", "MRK : 2023-02-26 - 2023-03-05\n", "MRK : 2023-03-05 - 2023-03-12\n", "MRK : 2023-03-12 - 2023-03-19\n", "MRK : 2023-03-19 - 2023-03-26\n", "MRK : 2023-03-26 - 2023-04-02\n", "MRK : 2023-04-02 - 2023-04-09\n", "MRK : 2023-04-09 - 2023-04-16\n", "MRK : 2023-04-16 - 2023-04-23\n", "MRK : 2023-04-23 - 2023-04-30\n", "MRK : 2023-04-30 - 2023-05-07\n", "MRK : 2023-05-07 - 2023-05-14\n", "MRK : 2023-05-14 - 2023-05-21\n", "MRK : 2023-05-21 - 2023-05-28\n", "MRK : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "MSFT : 2023-01-08 - 2023-01-15\n", "MSFT : 2023-01-15 - 2023-01-22\n", "MSFT : 2023-01-22 - 2023-01-29\n", "MSFT : 2023-01-29 - 2023-02-05\n", "MSFT : 2023-02-05 - 2023-02-12\n", "MSFT : 2023-02-12 - 2023-02-19\n", "MSFT : 2023-02-19 - 2023-02-26\n", "MSFT : 2023-02-26 - 2023-03-05\n", "MSFT : 2023-03-05 - 2023-03-12\n", "MSFT : 2023-03-12 - 2023-03-19\n", "MSFT : 2023-03-19 - 2023-03-26\n", "MSFT : 2023-03-26 - 2023-04-02\n", "MSFT : 2023-04-02 - 2023-04-09\n", "MSFT : 2023-04-09 - 2023-04-16\n", "MSFT : 2023-04-16 - 2023-04-23\n", "MSFT : 2023-04-23 - 2023-04-30\n", "MSFT : 2023-04-30 - 2023-05-07\n", "MSFT : 2023-05-07 - 2023-05-14\n", "MSFT : 2023-05-14 - 2023-05-21\n", "MSFT : 2023-05-21 - 2023-05-28\n", "MSFT : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "NKE : 2023-01-08 - 2023-01-15\n", "NKE : 2023-01-15 - 2023-01-22\n", "NKE : 2023-01-22 - 2023-01-29\n", "NKE : 2023-01-29 - 2023-02-05\n", "NKE : 2023-02-05 - 2023-02-12\n", "NKE : 2023-02-12 - 2023-02-19\n", "NKE : 2023-02-19 - 2023-02-26\n", "NKE : 2023-02-26 - 2023-03-05\n", "NKE : 2023-03-05 - 2023-03-12\n", "NKE : 2023-03-12 - 2023-03-19\n", "NKE : 2023-03-19 - 2023-03-26\n", "NKE : 2023-03-26 - 2023-04-02\n", "NKE : 2023-04-02 - 2023-04-09\n", "NKE : 2023-04-09 - 2023-04-16\n", "NKE : 2023-04-16 - 2023-04-23\n", "NKE : 2023-04-23 - 2023-04-30\n", "NKE : 2023-04-30 - 2023-05-07\n", "NKE : 2023-05-07 - 2023-05-14\n", "NKE : 2023-05-14 - 2023-05-21\n", "NKE : 2023-05-21 - 2023-05-28\n", "NKE : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "PG : 2023-01-08 - 2023-01-15\n", "PG : 2023-01-15 - 2023-01-22\n", "PG : 2023-01-22 - 2023-01-29\n", "PG : 2023-01-29 - 2023-02-05\n", "PG : 2023-02-05 - 2023-02-12\n", "PG : 2023-02-12 - 2023-02-19\n", "PG : 2023-02-19 - 2023-02-26\n", "PG : 2023-02-26 - 2023-03-05\n", "PG : 2023-03-05 - 2023-03-12\n", "PG : 2023-03-12 - 2023-03-19\n", "PG : 2023-03-19 - 2023-03-26\n", "PG : 2023-03-26 - 2023-04-02\n", "PG : 2023-04-02 - 2023-04-09\n", "PG : 2023-04-09 - 2023-04-16\n", "PG : 2023-04-16 - 2023-04-23\n", "PG : 2023-04-23 - 2023-04-30\n", "PG : 2023-04-30 - 2023-05-07\n", "PG : 2023-05-07 - 2023-05-14\n", "PG : 2023-05-14 - 2023-05-21\n", "PG : 2023-05-21 - 2023-05-28\n", "PG : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "TRV : 2023-01-08 - 2023-01-15\n", "TRV : 2023-01-15 - 2023-01-22\n", "TRV : 2023-01-22 - 2023-01-29\n", "TRV : 2023-01-29 - 2023-02-05\n", "TRV : 2023-02-05 - 2023-02-12\n", "TRV : 2023-02-12 - 2023-02-19\n", "TRV : 2023-02-19 - 2023-02-26\n", "TRV : 2023-02-26 - 2023-03-05\n", "TRV : 2023-03-05 - 2023-03-12\n", "TRV : 2023-03-12 - 2023-03-19\n", "TRV : 2023-03-19 - 2023-03-26\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "TRV : 2023-03-26 - 2023-04-02\n", "TRV : 2023-04-02 - 2023-04-09\n", "TRV : 2023-04-09 - 2023-04-16\n", "TRV : 2023-04-16 - 2023-04-23\n", "TRV : 2023-04-23 - 2023-04-30\n", "TRV : 2023-04-30 - 2023-05-07\n", "TRV : 2023-05-07 - 2023-05-14\n", "TRV : 2023-05-14 - 2023-05-21\n", "TRV : 2023-05-21 - 2023-05-28\n", "TRV : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "UNH : 2023-01-08 - 2023-01-15\n", "UNH : 2023-01-15 - 2023-01-22\n", "UNH : 2023-01-22 - 2023-01-29\n", "UNH : 2023-01-29 - 2023-02-05\n", "UNH : 2023-02-05 - 2023-02-12\n", "UNH : 2023-02-12 - 2023-02-19\n", "UNH : 2023-02-19 - 2023-02-26\n", "UNH : 2023-02-26 - 2023-03-05\n", "UNH : 2023-03-05 - 2023-03-12\n", "UNH : 2023-03-12 - 2023-03-19\n", "UNH : 2023-03-19 - 2023-03-26\n", "UNH : 2023-03-26 - 2023-04-02\n", "UNH : 2023-04-02 - 2023-04-09\n", "UNH : 2023-04-09 - 2023-04-16\n", "UNH : 2023-04-16 - 2023-04-23\n", "UNH : 2023-04-23 - 2023-04-30\n", "UNH : 2023-04-30 - 2023-05-07\n", "UNH : 2023-05-07 - 2023-05-14\n", "UNH : 2023-05-14 - 2023-05-21\n", "UNH : 2023-05-21 - 2023-05-28\n", "UNH : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "CRM : 2023-01-08 - 2023-01-15\n", "CRM : 2023-01-15 - 2023-01-22\n", "CRM : 2023-01-22 - 2023-01-29\n", "CRM : 2023-01-29 - 2023-02-05\n", "CRM : 2023-02-05 - 2023-02-12\n", "CRM : 2023-02-12 - 2023-02-19\n", "CRM : 2023-02-19 - 2023-02-26\n", "CRM : 2023-02-26 - 2023-03-05\n", "CRM : 2023-03-05 - 2023-03-12\n", "CRM : 2023-03-12 - 2023-03-19\n", "CRM : 2023-03-19 - 2023-03-26\n", "CRM : 2023-03-26 - 2023-04-02\n", "CRM : 2023-04-02 - 2023-04-09\n", "CRM : 2023-04-09 - 2023-04-16\n", "CRM : 2023-04-16 - 2023-04-23\n", "CRM : 2023-04-23 - 2023-04-30\n", "CRM : 2023-04-30 - 2023-05-07\n", "CRM : 2023-05-07 - 2023-05-14\n", "CRM : 2023-05-14 - 2023-05-21\n", "CRM : 2023-05-21 - 2023-05-28\n", "CRM : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "VZ : 2023-01-08 - 2023-01-15\n", "VZ : 2023-01-15 - 2023-01-22\n", "VZ : 2023-01-22 - 2023-01-29\n", "VZ : 2023-01-29 - 2023-02-05\n", "VZ : 2023-02-05 - 2023-02-12\n", "VZ : 2023-02-12 - 2023-02-19\n", "VZ : 2023-02-19 - 2023-02-26\n", "VZ : 2023-02-26 - 2023-03-05\n", "VZ : 2023-03-05 - 2023-03-12\n", "VZ : 2023-03-12 - 2023-03-19\n", "VZ : 2023-03-19 - 2023-03-26\n", "VZ : 2023-03-26 - 2023-04-02\n", "VZ : 2023-04-02 - 2023-04-09\n", "VZ : 2023-04-09 - 2023-04-16\n", "VZ : 2023-04-16 - 2023-04-23\n", "VZ : 2023-04-23 - 2023-04-30\n", "VZ : 2023-04-30 - 2023-05-07\n", "VZ : 2023-05-07 - 2023-05-14\n", "VZ : 2023-05-14 - 2023-05-21\n", "VZ : 2023-05-21 - 2023-05-28\n", "VZ : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "V : 2023-01-08 - 2023-01-15\n", "V : 2023-01-15 - 2023-01-22\n", "V : 2023-01-22 - 2023-01-29\n", "V : 2023-01-29 - 2023-02-05\n", "V : 2023-02-05 - 2023-02-12\n", "V : 2023-02-12 - 2023-02-19\n", "V : 2023-02-19 - 2023-02-26\n", "V : 2023-02-26 - 2023-03-05\n", "V : 2023-03-05 - 2023-03-12\n", "V : 2023-03-12 - 2023-03-19\n", "V : 2023-03-19 - 2023-03-26\n", "V : 2023-03-26 - 2023-04-02\n", "V : 2023-04-02 - 2023-04-09\n", "V : 2023-04-09 - 2023-04-16\n", "V : 2023-04-16 - 2023-04-23\n", "V : 2023-04-23 - 2023-04-30\n", "V : 2023-04-30 - 2023-05-07\n", "V : 2023-05-07 - 2023-05-14\n", "V : 2023-05-14 - 2023-05-21\n", "V : 2023-05-21 - 2023-05-28\n", "V : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "WBA : 2023-01-08 - 2023-01-15\n", "WBA : 2023-01-15 - 2023-01-22\n", "WBA : 2023-01-22 - 2023-01-29\n", "WBA : 2023-01-29 - 2023-02-05\n", "WBA : 2023-02-05 - 2023-02-12\n", "WBA : 2023-02-12 - 2023-02-19\n", "WBA : 2023-02-19 - 2023-02-26\n", "WBA : 2023-02-26 - 2023-03-05\n", "WBA : 2023-03-05 - 2023-03-12\n", "WBA : 2023-03-12 - 2023-03-19\n", "WBA : 2023-03-19 - 2023-03-26\n", "WBA : 2023-03-26 - 2023-04-02\n", "WBA : 2023-04-02 - 2023-04-09\n", "WBA : 2023-04-09 - 2023-04-16\n", "WBA : 2023-04-16 - 2023-04-23\n", "WBA : 2023-04-23 - 2023-04-30\n", "WBA : 2023-04-30 - 2023-05-07\n", "WBA : 2023-05-07 - 2023-05-14\n", "WBA : 2023-05-14 - 2023-05-21\n", "WBA : 2023-05-21 - 2023-05-28\n", "WBA : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "WMT : 2023-01-08 - 2023-01-15\n", "WMT : 2023-01-15 - 2023-01-22\n", "WMT : 2023-01-22 - 2023-01-29\n", "WMT : 2023-01-29 - 2023-02-05\n", "WMT : 2023-02-05 - 2023-02-12\n", "WMT : 2023-02-12 - 2023-02-19\n", "WMT : 2023-02-19 - 2023-02-26\n", "WMT : 2023-02-26 - 2023-03-05\n", "WMT : 2023-03-05 - 2023-03-12\n", "WMT : 2023-03-12 - 2023-03-19\n", "WMT : 2023-03-19 - 2023-03-26\n", "WMT : 2023-03-26 - 2023-04-02\n", "WMT : 2023-04-02 - 2023-04-09\n", "WMT : 2023-04-09 - 2023-04-16\n", "WMT : 2023-04-16 - 2023-04-23\n", "WMT : 2023-04-23 - 2023-04-30\n", "WMT : 2023-04-30 - 2023-05-07\n", "WMT : 2023-05-07 - 2023-05-14\n", "WMT : 2023-05-14 - 2023-05-21\n", "WMT : 2023-05-21 - 2023-05-28\n", "WMT : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "DIS : 2023-01-08 - 2023-01-15\n", "DIS : 2023-01-15 - 2023-01-22\n", "DIS : 2023-01-22 - 2023-01-29\n", "DIS : 2023-01-29 - 2023-02-05\n", "DIS : 2023-02-05 - 2023-02-12\n", "DIS : 2023-02-12 - 2023-02-19\n", "DIS : 2023-02-19 - 2023-02-26\n", "DIS : 2023-02-26 - 2023-03-05\n", "DIS : 2023-03-05 - 2023-03-12\n", "DIS : 2023-03-12 - 2023-03-19\n", "DIS : 2023-03-19 - 2023-03-26\n", "DIS : 2023-03-26 - 2023-04-02\n", "DIS : 2023-04-02 - 2023-04-09\n", "DIS : 2023-04-09 - 2023-04-16\n", "DIS : 2023-04-16 - 2023-04-23\n", "DIS : 2023-04-23 - 2023-04-30\n", "DIS : 2023-04-30 - 2023-05-07\n", "DIS : 2023-05-07 - 2023-05-14\n", "DIS : 2023-05-14 - 2023-05-21\n", "DIS : 2023-05-21 - 2023-05-28\n", "DIS : 2023-05-28 - 2023-06-04\n", "[*********************100%%**********************] 1 of 1 completed\n", "DOW : 2023-01-08 - 2023-01-15\n", "DOW : 2023-01-15 - 2023-01-22\n", "DOW : 2023-01-22 - 2023-01-29\n", "DOW : 2023-01-29 - 2023-02-05\n", "DOW : 2023-02-05 - 2023-02-12\n", "DOW : 2023-02-12 - 2023-02-19\n", "DOW : 2023-02-19 - 2023-02-26\n", "DOW : 2023-02-26 - 2023-03-05\n", "DOW : 2023-03-05 - 2023-03-12\n", "DOW : 2023-03-12 - 2023-03-19\n", "DOW : 2023-03-19 - 2023-03-26\n", "DOW : 2023-03-26 - 2023-04-02\n", "DOW : 2023-04-02 - 2023-04-09\n", "DOW : 2023-04-09 - 2023-04-16\n", "DOW : 2023-04-16 - 2023-04-23\n", "DOW : 2023-04-23 - 2023-04-30\n", "DOW : 2023-04-30 - 2023-05-07\n", "DOW : 2023-05-07 - 2023-05-14\n", "DOW : 2023-05-14 - 2023-05-21\n", "DOW : 2023-05-21 - 2023-05-28\n", "DOW : 2023-05-28 - 2023-06-04\n" ] } ], "source": [ "for symbol in DOW_30:\n", " prepare_data_for_company(symbol)\n", "# prepare_data_for_company(symbol, False)" ] }, { "cell_type": "markdown", "id": "af655d8b", "metadata": {}, "source": [ "# Generate Prompt from Financial Data" ] }, { "cell_type": "code", "execution_count": 65, "id": "5a53c0ae", "metadata": { "scrolled": true }, "outputs": [], "source": [ "def get_company_prompt(symbol):\n", " \n", " profile = finnhub_client.company_profile2(symbol=symbol)\n", "\n", " company_template = \"[Company Introduction]:\\n\\n{name} is a leading entity in the {finnhubIndustry} sector. Incorporated and publicly traded since {ipo}, the company has established its reputation as one of the key players in the market. As of today, {name} has a market capitalization of {marketCapitalization:.2f} in {currency}, with {shareOutstanding:.2f} shares outstanding.\" \\\n", " \"\\n\\n{name} operates primarily in the {country}, trading under the ticker {ticker} on the {exchange}. As a dominant force in the {finnhubIndustry} space, the company continues to innovate and drive progress within the industry.\"\n", "\n", " formatted_str = company_template.format(**profile)\n", " \n", " return formatted_str\n", "\n", "\n", "def get_prompt_by_row(symbol, row):\n", "\n", " start_date = row['Start Date'] if isinstance(row['Start Date'], str) else row['Start Date'].strftime('%Y-%m-%d')\n", " end_date = row['End Date'] if isinstance(row['End Date'], str) else row['End Date'].strftime('%Y-%m-%d')\n", " term = 'increased' if row['End Price'] > row['Start Price'] else 'decreased'\n", " head = \"From {} to {}, {}'s stock price {} from {:.2f} to {:.2f}. Company news during this period are listed below:\\n\\n\".format(\n", " start_date, end_date, symbol, term, row['Start Price'], row['End Price'])\n", " \n", " news = json.loads(row[\"News\"])\n", " news = [\"[Headline]: {}\\n[Summary]: {}\\n\".format(\n", " n['headline'], n['summary']) for n in news if n['date'][:8] <= end_date.replace('-', '') and \\\n", " not n['summary'].startswith(\"Looking for stock market analysis and research with proves results?\")]\n", "\n", " basics = json.loads(row['Basics'])\n", " if basics:\n", " basics = \"Some recent basic financials of {}, reported at {}, are presented below:\\n\\n[Basic Financials]:\\n\\n\".format(\n", " symbol, basics['period']) + \"\\n\".join(f\"{k}: {v}\" for k, v in basics.items() if k != 'period')\n", " else:\n", " basics = \"[Basic Financials]:\\n\\nNo basic financial reported.\"\n", " \n", " return head, news, basics\n", "\n", "\n", "def sample_news(news, k=5):\n", " \n", " return [news[i] for i in sorted(random.sample(range(len(news)), k))]\n", "\n", "\n", "def map_bin_label(bin_lb):\n", " \n", " lb = bin_lb.replace('U', 'up by ')\n", " lb = lb.replace('D', 'down by ')\n", " lb = lb.replace('1', '0-1%')\n", " lb = lb.replace('2', '1-2%')\n", " lb = lb.replace('3', '2-3%')\n", " lb = lb.replace('4', '3-4%')\n", " if lb.endswith('+'):\n", " lb = lb.replace('5+', 'more than 5%')\n", "# lb = lb.replace('5+', '5+%')\n", " else:\n", " lb = lb.replace('5', '4-5%')\n", " \n", " return lb\n", "\n", "\n", "def get_all_prompts(symbol, min_past_weeks=1, max_past_weeks=3, with_basics=True):\n", "\n", " \n", " if with_basics:\n", " df = pd.read_csv(f'{DATA_DIR}/{symbol}_{START_DATE}_{END_DATE}.csv')\n", " else:\n", " df = pd.read_csv(f'{DATA_DIR}/{symbol}_{START_DATE}_{END_DATE}_nobasics.csv')\n", " \n", " company_prompt = get_company_prompt(symbol)\n", "\n", " prev_rows = []\n", " all_prompts = []\n", "\n", " for row_idx, row in df.iterrows():\n", "\n", " prompt = \"\"\n", " if len(prev_rows) >= min_past_weeks:\n", " idx = min(random.choice(range(min_past_weeks, max_past_weeks+1)), len(prev_rows))\n", " for i in range(-idx, 0):\n", " # Add Price Movement (Head)\n", " prompt += \"\\n\" + prev_rows[i][0]\n", " # Add News of previous weeks\n", " sampled_news = sample_news(\n", " prev_rows[i][1],\n", " min(5, len(prev_rows[i][1]))\n", " )\n", " if sampled_news:\n", " prompt += \"\\n\".join(sampled_news)\n", " else:\n", " prompt += \"No relative news reported.\"\n", "\n", " head, news, basics = get_prompt_by_row(symbol, row)\n", "\n", " prev_rows.append((head, news, basics))\n", " if len(prev_rows) > max_past_weeks:\n", " prev_rows.pop(0) \n", "\n", " if not prompt:\n", " continue\n", "\n", " prediction = map_bin_label(row['Bin Label'])\n", " \n", " prompt = company_prompt + '\\n' + prompt + '\\n' + basics\n", " prompt += f\"\\n\\nBased on all the information before {row['Start Date']}, let's first analyze the positive developments and potential concerns for {symbol}. Come up with 2-4 most important factors respectively and keep them concise. Most factors should be inferred from company related news. \" \\\n", " f\"Then let's assume your prediction for next week ({row['Start Date']} to {row['End Date']}) is {prediction}. Provide a summary analysis to support your prediction. The prediction result need to be inferred from your analysis at the end, and thus not appearing as a foundational factor of your analysis.\"\n", "\n", " all_prompts.append(prompt.strip())\n", " \n", " return all_prompts" ] }, { "cell_type": "code", "execution_count": null, "id": "92208b72", "metadata": {}, "outputs": [], "source": [ "B_INST, E_INST = \"[INST]\", \"[/INST]\"\n", "B_SYS, E_SYS = \"<>\\n\", \"\\n<>\\n\\n\"\n", "\n", "\n", "SYSTEM_PROMPT = \"You are a seasoned stock market analyst. Your task is to list the positive developments and potential concerns for companies based on relevant news and basic financials from the past weeks, then provide an analysis and prediction for the companies' stock price movement for the upcoming week. \" \\\n", " \"Your answer format should be as follows:\\n\\n[Positive Developments]:\\n1. ...\\n\\n[Potential Concerns]:\\n1. ...\\n\\n[Prediction & Analysis]:\\n...\\n\"\n", "\n", "print(SYSTEM_PROMPT)\n", "\n", "# prompts = get_all_prompts(\"AAPL\", 1, 3)\n", "# prompts = get_all_prompts(\"MSFT\", 1, 3, False)\n", "prompts = get_all_prompts(\"TRV\", 1, 4)\n", "\n", "print(prompts[0])\n" ] }, { "cell_type": "markdown", "id": "2b010a45", "metadata": {}, "source": [ "# Request to GPT-4 for Financial Analysis" ] }, { "cell_type": "code", "execution_count": 86, "id": "3e355117", "metadata": {}, "outputs": [], "source": [ "def append_to_csv(filename, input_data, output_data):\n", " \n", " with open(filename, mode='a', newline='') as file:\n", " writer = csv.writer(file)\n", " writer.writerow([input_data, output_data])\n", "\n", " \n", "def initialize_csv(filename):\n", " \n", " with open(filename, mode='w', newline='') as file:\n", " writer = csv.writer(file)\n", " writer.writerow([\"prompt\", \"answer\"])\n", "\n", "\n", "def query_gpt4(symbol_list, min_past_weeks=1, max_past_weeks=3, with_basics=True):\n", "\n", " for symbol in symbol_list:\n", " \n", " csv_file = f'{DATA_DIR}/{symbol}_{START_DATE}_{END_DATE}_gpt-4.csv' if with_basics else \\\n", " f'{DATA_DIR}/{symbol}_{START_DATE}_{END_DATE}_nobasics_gpt-4.csv'\n", " \n", " if not os.path.exists(csv_file):\n", " initialize_csv(csv_file)\n", " pre_done = 0\n", " else:\n", " df = pd.read_csv(csv_file)\n", " pre_done = len(df)\n", "\n", " prompts = get_all_prompts(symbol, min_past_weeks, max_past_weeks, with_basics)\n", "\n", " for i, prompt in enumerate(prompts):\n", " \n", " if i < pre_done:\n", " continue\n", "\n", " print(f\"{symbol} - {i}\")\n", " \n", " cnt = 0\n", " while cnt < 5:\n", " try:\n", " completion = client.chat.completions.create(\n", " model=\"gpt-4\",\n", " messages=[\n", " {\"role\": \"system\", \"content\": SYSTEM_PROMPT},\n", " {\"role\": \"user\", \"content\": prompt}\n", " ]\n", " )\n", " break \n", " except Exception:\n", " cnt += 1\n", " print(f'retry cnt {cnt}')\n", " \n", " answer = completion.choices[0].message.content if cnt < 5 else \"\"\n", " append_to_csv(csv_file, prompt, answer)\n", " " ] }, { "cell_type": "code", "execution_count": 121, "id": "a9ff6ff3", "metadata": { "scrolled": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "WBA - 12\n", "WBA - 13\n", "WBA - 14\n", "WBA - 15\n", "WBA - 16\n", "WBA - 17\n", "WBA - 18\n", "WBA - 19\n" ] } ], "source": [ "# query_gpt4(DOW_30, 1, 3)\n", "query_gpt4(DOW_30, 1, 4)\n", "# query_gpt4(['WBA'], 1, 4)" ] }, { "cell_type": "markdown", "id": "238ba9f0", "metadata": {}, "source": [ "# Transform into Llama2 Training Format" ] }, { "cell_type": "code", "execution_count": 93, "id": "d2627f5a", "metadata": {}, "outputs": [], "source": [ "def gpt4_to_llama(symbol, with_basics=True):\n", " \n", " csv_file = f'{DATA_DIR}/{symbol}_{START_DATE}_{END_DATE}_gpt-4.csv' if with_basics else \\\n", " f'{DATA_DIR}/{symbol}_{START_DATE}_{END_DATE}_nobasics_gpt-4.csv'\n", " \n", " df = pd.read_csv(csv_file)\n", " \n", " prompts, answers, periods, labels = [], [], [], []\n", " \n", " for i, row in df.iterrows():\n", " \n", " prompt, answer = row['prompt'], row['answer']\n", " \n", " res = re.search(r\"Then let's assume your prediction for next week \\((.*)\\) is ((:?up|down) by .*%).\", prompt)\n", " \n", " period, label = res.group(1), res.group(2)\n", "# label = label.replace('more than 5', '5+')\n", " \n", " prompt = re.sub(\n", " r\"Then let's assume your prediction for next week \\((.*)\\) is (up|down) by ((:?.*)%). Provide a summary analysis to support your prediction. The prediction result need to be inferred from your analysis at the end, and thus not appearing as a foundational factor of your analysis.\", \n", " f\"Then make your prediction of the {symbol} stock price movement for next week ({period}). Provide a summary analysis to support your prediction.\",\n", " prompt\n", " )\n", " try:\n", " answer = re.sub(\n", " r\"\\[Prediction & Analysis\\]:\\s*\",\n", " f\"[Prediction & Analysis]:\\nPrediction: {label.capitalize()}\\nAnalysis: \",\n", " answer\n", " )\n", " except Exception:\n", " print(symbol, i)\n", " print(label)\n", " print(answer)\n", " continue\n", " \n", " new_system_prompt = SYSTEM_PROMPT.replace(':\\n...', '\\nPrediction: ...\\nAnalysis: ...')\n", "# new_system_prompt = SYSTEM_PROMPT.replace(':\\n...', '\\nPrediction: {Up|Down} by {1-2|2-3|3-4|4-5|5+}%\\nAnalysis: ...')\n", " \n", " prompt = B_INST + B_SYS + new_system_prompt + E_SYS + prompt + E_INST\n", " \n", " prompts.append(prompt)\n", " answers.append(answer)\n", " periods.append(period)\n", " labels.append(label)\n", " \n", " return {\n", " \"prompt\": prompts,\n", " \"answer\": answers,\n", " \"period\": periods,\n", " \"label\": labels,\n", " }\n", "\n", "\n", "def create_dataset(symbol_list, train_ratio=0.8, with_basics=True):\n", "\n", " train_dataset_list = []\n", " test_dataset_list = []\n", "\n", " for symbol in symbol_list:\n", "\n", " data_dict = gpt4_to_llama(symbol, with_basics)\n", "# print(data_dict['prompt'][-1])\n", "# print(data_dict['answer'][-1])\n", " symbols = [symbol] * len(data_dict['label'])\n", " data_dict.update({\"symbol\": symbols})\n", "\n", " dataset = Dataset.from_dict(data_dict)\n", " train_size = round(train_ratio * len(dataset))\n", "\n", " train_dataset_list.append(dataset.select(range(train_size)))\n", " test_dataset_list.append(dataset.select(range(train_size, len(dataset))))\n", "\n", " train_dataset = datasets.concatenate_datasets(train_dataset_list)\n", " test_dataset = datasets.concatenate_datasets(test_dataset_list)\n", "\n", " dataset = datasets.DatasetDict({\n", " 'train': train_dataset,\n", " 'test': test_dataset\n", " })\n", " \n", " return dataset\n", " " ] }, { "cell_type": "code", "execution_count": 129, "id": "e089b1bf", "metadata": { "scrolled": true }, "outputs": [], "source": [ "# v1\n", "# dow30_dataset = create_dataset(DOW30, True)\n", "# v2\n", "# dow30_nobasic_dataset = create_dataset(DOW_30, 0.8, False)\n", "# v3\n", "dow30_v3_dataset = create_dataset(DOW_30, 0.9)" ] }, { "cell_type": "code", "execution_count": 130, "id": "123f2db9", "metadata": {}, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "439535ce3e804a3d847f1e03df02283d", "version_major": 2, "version_minor": 0 }, "text/plain": [ "Saving the dataset (0/1 shards): 0%| | 0/540 [00:00= date:\n", " prices.append(stock_data['Close'][i])\n", " dates.append(datetime.strptime(available_dates[i], \"%Y-%m-%d\"))\n", " break\n", "\n", " dates.append(datetime.strptime(available_dates[-1], \"%Y-%m-%d\"))\n", " prices.append(stock_data['Close'][-1])\n", " \n", " return pd.DataFrame({\n", " \"Start Date\": dates[:-1], \"End Date\": dates[1:],\n", " \"Start Price\": prices[:-1], \"End Price\": prices[1:]\n", " })\n", "\n", "\n", "def get_current_basics(symbol, curday):\n", "\n", " basic_financials = finnhub_client.company_basic_financials(symbol, 'all')\n", " \n", " final_basics, basic_list, basic_dict = [], [], defaultdict(dict)\n", " \n", " for metric, value_list in basic_financials['series']['quarterly'].items():\n", " for value in value_list:\n", " basic_dict[value['period']].update({metric: value['v']})\n", "\n", " for k, v in basic_dict.items():\n", " v.update({'period': k})\n", " basic_list.append(v)\n", " \n", " basic_list.sort(key=lambda x: x['period'])\n", " \n", " for basic in basic_list[::-1]:\n", " if basic['period'] <= curday:\n", " break\n", " \n", " return basic\n", " \n", "\n", "def get_all_prompts_online(symbol, data, curday, with_basics=True):\n", "\n", " company_prompt = get_company_prompt(symbol)\n", "\n", " prev_rows = []\n", "\n", " for row_idx, row in data.iterrows():\n", " head, news, _ = get_prompt_by_row(symbol, row)\n", " prev_rows.append((head, news, None))\n", " \n", " prompt = \"\"\n", " for i in range(-len(prev_rows), 0):\n", " prompt += \"\\n\" + prev_rows[i][0]\n", " sampled_news = sample_news(\n", " prev_rows[i][1],\n", " min(5, len(prev_rows[i][1]))\n", " )\n", " if sampled_news:\n", " prompt += \"\\n\".join(sampled_news)\n", " else:\n", " prompt += \"No relative news reported.\"\n", " \n", " period = \"{} to {}\".format(curday, n_weeks_before(curday, -1))\n", " \n", " if with_basics:\n", " basics = get_current_basics(symbol, curday)\n", " basics = \"Some recent basic financials of {}, reported at {}, are presented below:\\n\\n[Basic Financials]:\\n\\n\".format(\n", " symbol, basics['period']) + \"\\n\".join(f\"{k}: {v}\" for k, v in basics.items() if k != 'period')\n", " else:\n", " basics = \"[Basic Financials]:\\n\\nNo basic financial reported.\"\n", "\n", " info = company_prompt + '\\n' + prompt + '\\n' + basics\n", " prompt = info + f\"\\n\\nBased on all the information before {curday}, let's first analyze the positive developments and potential concerns for {symbol}. Come up with 2-4 most important factors respectively and keep them concise. Most factors should be inferred from company related news. \" \\\n", " f\"Then make your prediction of the {symbol} stock price movement for next week ({period}). Provide a summary analysis to support your prediction.\"\n", " \n", " return info, prompt" ] }, { "cell_type": "code", "execution_count": 76, "id": "8f48aab1", "metadata": { "scrolled": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "[*********************100%%**********************] 1 of 1 completed\n", "AAPL : 2023-10-25 - 2023-11-01\n", "AAPL : 2023-11-01 - 2023-11-07\n" ] } ], "source": [ "ticker = \"AAPL\"\n", "n_weeks = 2\n", "curday = get_curday()\n", "steps = [n_weeks_before(curday, n) for n in range(n_weeks + 1)][::-1]\n", "\n", "data = get_stock_data(ticker, steps)\n", "\n", "data = get_news(ticker, data)\n", "\n", "data['Basics'] = [json.dumps({})] * len(data)\n", "# data = get_basics(ticker, data, always=True)\n" ] }, { "cell_type": "code", "execution_count": 91, "id": "84bb302a", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "[Company Introduction]:\n", "\n", "Apple Inc is a leading entity in the Technology sector. Incorporated and publicly traded since 1980-12-12, the company has established its reputation as one of the key players in the market. As of today, Apple Inc has a market capitalization of 2809837.86 in USD, with 15634.23 shares outstanding.\n", "\n", "Apple Inc operates primarily in the US, trading under the ticker AAPL on the NASDAQ NMS - GLOBAL MARKET. As a dominant force in the Technology space, the company continues to innovate and drive progress within the industry.\n", "\n", "From 2023-10-25 to 2023-11-01, AAPL's stock price increased from 171.10 to 173.97. Company news during this period are listed below:\n", "\n", "[Headline]: 25 Largest Economies in the World by 2075\n", "[Summary]: In this article, we will be taking a look at the 25 largest economies in the world by 2075. To skip our detailed analysis, you can go directly to see the 5 largest economies in the world by 2075. In both 2022 and 2023, the global economy has struggled significantly after record inflation enveloped most countries across […]\n", "\n", "[Headline]: India opposition accuses govt of trying to hack lawmakers' iPhones\n", "[Summary]: Indian opposition leader Rahul Gandhi on Tuesday accused Prime Minister Narendra Modi's government of trying to hack into senior opposition politicians' mobile phones, after they reported receiving warning messages from Apple. Some of the lawmakers shared screenshots on social media of a notification quoting the iPhone manufacturer as saying: \"Apple believes you are being targeted by state-sponsored attackers who are trying to remotely compromise the iPhone associated with your Apple ID\". \"Hack us all you want,\" Gandhi told a news conference in New Delhi, in reference to Modi.\n", "\n", "[Headline]: 39% Of This Apple Insider's Holdings Were Sold\n", "[Summary]: Looking at Apple Inc.'s ( NASDAQ:AAPL ) insider transactions over the last year, we can see that insiders were net...\n", "\n", "[Headline]: Indian opposition MPs accuse government of trying to hack their iPhones\n", "[Summary]: Ruling BJP rejects claims of involvement following Apple notifications of possible ‘state-sponsored’ attacks\n", "\n", "[Headline]: Should You Buy These 2 ‘Magnificent Seven’ Stocks Ahead of Earnings? Apple and Nvidia in Focus\n", "[Summary]: What should investors make of this year’s third-quarter earnings? The Q3 results have been pretty good, with 78% of companies reporting so far beating the forecasts, but stocks are still feeling pressure. One obvious sign of that pressure: the S&P 500 this week hit its lowest point since last May, and is just shy of correction territory. The effect is most clearly seen in the ‘Magnificent Seven,’ a group of Big Tech giants whose gains earlier in the year carried the markets generally – but which\n", "\n", "From 2023-11-01 to 2023-11-07, AAPL's stock price increased from 173.97 to 181.25. Company news during this period are listed below:\n", "\n", "[Headline]: Apple Earnings: Why Guidance Will Be Key\n", "[Summary]: Tech giant Apple (NASDAQ: AAPL) is scheduled to report its fiscal fourth-quarter results on Thursday. After all, the company's approximately $2.7 trillion market cap is big enough to influence major market indexes like the S&P 500; Apple represents about 7% of the index. While the company's fiscal fourth-quarter financial performance will definitely be important, investors may pay even closer attention to another metric: management's guidance for its fiscal first-quarter revenue.\n", "\n", "[Headline]: Analysts offer hot takes on Q4 2023 Apple results\n", "[Summary]: Analysts have weighed in on Apple's Q4 2023 financial results, with most taking the view that the quarter is decent-performing, but with caution about a shorter Q1 2024.\n", "\n", "[Headline]: How to run new macOS versions on older Macs with OpenCore\n", "[Summary]: Apple removes support for old Mac hardware in new macOS releases. Here's how to run modern macOS on older Macs using OpenCore.\n", "\n", "[Headline]: Apple Watch import ban: what you need to know\n", "[Summary]: There is a possibility of an import ban in the U.S. on the Apple Watch. Here's what you need to know before it potentially goes into effect on Christmas Day, 2023.\n", "\n", "[Headline]: ChatGPT: Everything you need to know about the AI-powered chatbot\n", "[Summary]: ChatGPT, OpenAI’s text-generating AI chatbot, has taken the world by storm. What started as a tool to hyper-charge productivity through writing essays and code with short text prompts has evolved into a behemoth used by more than 92% of Fortune 500 companies for more wide-ranging needs. While there is a more…nefarious side to ChatGPT, it’s clear that AI tools are not going away anytime soon. Since its initial launch nearly a year ago, ChatGPT has hit 100 million weekly active users, and OpenAI i\n", "\n", "[Basic Financials]:\n", "\n", "No basic financial reported.\n", "\n", "Based on all the information before 2023-11-08, let's first analyze the positive developments and potential concerns for AAPL. Come up with 2-4 most important factors respectively and keep them concise. Most factors should be inferred from company related news. Then make your prediction of the AAPL stock price movement for next week (2023-11-08 to 2023-11-15). Provide a summary analysis to support your prediction.\n" ] } ], "source": [ "info, prompt = get_all_prompts_online(ticker, data, curday, False)\n", "\n", "print(prompt)" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.9" } }, "nbformat": 4, "nbformat_minor": 5 }