wnstnb commited on
Commit
af7c3c8
·
1 Parent(s): cdb2f42

use lightgbm now

Browse files
Files changed (5) hide show
  1. model_1h.py +3 -1
  2. model_30m.py +4 -2
  3. model_90m.py +3 -1
  4. model_day.py +4 -1
  5. requirements.txt +1 -0
model_1h.py CHANGED
@@ -16,6 +16,7 @@ from sklearn.metrics import roc_auc_score, precision_score, recall_score
16
  import datetime
17
  from pandas.tseries.offsets import BDay
18
  from datasets import load_dataset
 
19
 
20
  def walk_forward_validation(df, target_column, num_training_rows, num_periods):
21
 
@@ -73,7 +74,8 @@ def walk_forward_validation_seq(df, target_column_clf, target_column_regr, num_t
73
  df['RegrModelOut'] = df['RegrModelOut'].astype(bool)
74
 
75
  # Create an XGBRegressor model
76
- model2 = xgb.XGBClassifier(n_estimators=10, random_state = 42)
 
77
  # model = linear_model.LogisticRegression(max_iter=1500)
78
 
79
  overall_results = []
 
16
  import datetime
17
  from pandas.tseries.offsets import BDay
18
  from datasets import load_dataset
19
+ import lightgbm as lgb
20
 
21
  def walk_forward_validation(df, target_column, num_training_rows, num_periods):
22
 
 
74
  df['RegrModelOut'] = df['RegrModelOut'].astype(bool)
75
 
76
  # Create an XGBRegressor model
77
+ # model2 = xgb.XGBClassifier(n_estimators=10, random_state = 42)
78
+ model2 = lgb.LGBMClassifier(n_estimators=10, random_state=42, verbosity=-1)
79
  # model = linear_model.LogisticRegression(max_iter=1500)
80
 
81
  overall_results = []
model_30m.py CHANGED
@@ -16,6 +16,7 @@ from sklearn.metrics import roc_auc_score, precision_score, recall_score
16
  import datetime
17
  from pandas.tseries.offsets import BDay
18
  from datasets import load_dataset
 
19
 
20
  # If the dataset is gated/private, make sure you have run huggingface-cli login
21
  def walk_forward_validation(df, target_column, num_training_rows, num_periods):
@@ -73,7 +74,8 @@ def walk_forward_validation_seq(df, target_column_clf, target_column_regr, num_t
73
  df['RegrModelOut'] = df['RegrModelOut'].astype(bool)
74
 
75
  # Create an XGBRegressor model
76
- model2 = xgb.XGBClassifier(n_estimators=10, random_state = 42)
 
77
  # model = linear_model.LogisticRegression(max_iter=1500)
78
 
79
  overall_results = []
@@ -233,7 +235,7 @@ def get_data():
233
  # Get incremental data
234
  spx1 = yf.Ticker('^GSPC')
235
  yfp = spx1.history(start=last_date, interval='30m')
236
-
237
  if len(yfp) > 0:
238
  # Concat current and incremental
239
  df_30m = pd.concat([fr, yfp])
 
16
  import datetime
17
  from pandas.tseries.offsets import BDay
18
  from datasets import load_dataset
19
+ import lightgbm as lgb
20
 
21
  # If the dataset is gated/private, make sure you have run huggingface-cli login
22
  def walk_forward_validation(df, target_column, num_training_rows, num_periods):
 
74
  df['RegrModelOut'] = df['RegrModelOut'].astype(bool)
75
 
76
  # Create an XGBRegressor model
77
+ # model2 = xgb.XGBClassifier(n_estimators=10, random_state = 42)
78
+ model2 = lgb.LGBMClassifier(n_estimators=10, random_state=42, verbosity=-1)
79
  # model = linear_model.LogisticRegression(max_iter=1500)
80
 
81
  overall_results = []
 
235
  # Get incremental data
236
  spx1 = yf.Ticker('^GSPC')
237
  yfp = spx1.history(start=last_date, interval='30m')
238
+
239
  if len(yfp) > 0:
240
  # Concat current and incremental
241
  df_30m = pd.concat([fr, yfp])
model_90m.py CHANGED
@@ -16,6 +16,7 @@ from sklearn.metrics import roc_auc_score, precision_score, recall_score
16
  import datetime
17
  from pandas.tseries.offsets import BDay
18
  from datasets import load_dataset
 
19
 
20
  def walk_forward_validation(df, target_column, num_training_rows, num_periods):
21
 
@@ -73,7 +74,8 @@ def walk_forward_validation_seq(df, target_column_clf, target_column_regr, num_t
73
  df['RegrModelOut'] = df['RegrModelOut'].astype(bool)
74
 
75
  # Create an XGBRegressor model
76
- model2 = xgb.XGBClassifier(n_estimators=10, random_state = 42)
 
77
  # model = linear_model.LogisticRegression(max_iter=1500)
78
 
79
  overall_results = []
 
16
  import datetime
17
  from pandas.tseries.offsets import BDay
18
  from datasets import load_dataset
19
+ import lightgbm as lgb
20
 
21
  def walk_forward_validation(df, target_column, num_training_rows, num_periods):
22
 
 
74
  df['RegrModelOut'] = df['RegrModelOut'].astype(bool)
75
 
76
  # Create an XGBRegressor model
77
+ # model2 = xgb.XGBClassifier(n_estimators=10, random_state = 42)
78
+ model2 = lgb.LGBMClassifier(n_estimators=10, random_state=42, verbosity=-1)
79
  # model = linear_model.LogisticRegression(max_iter=1500)
80
 
81
  overall_results = []
model_day.py CHANGED
@@ -15,6 +15,8 @@ import os
15
  from sklearn.metrics import roc_auc_score, precision_score, recall_score
16
  import datetime
17
  from pandas.tseries.offsets import BDay
 
 
18
 
19
  def walk_forward_validation(df, target_column, num_training_rows, num_periods):
20
 
@@ -67,7 +69,8 @@ def walk_forward_validation_seq(df, target_column_clf, target_column_regr, num_t
67
  df['RegrModelOut'] = df['RegrModelOut'].astype(bool)
68
 
69
  # Create an XGBRegressor model
70
- model2 = xgb.XGBClassifier(n_estimators=10, random_state = 42)
 
71
  # model = linear_model.LogisticRegression(max_iter=1500)
72
 
73
  overall_results = []
 
15
  from sklearn.metrics import roc_auc_score, precision_score, recall_score
16
  import datetime
17
  from pandas.tseries.offsets import BDay
18
+ import lightgbm as lgb
19
+
20
 
21
  def walk_forward_validation(df, target_column, num_training_rows, num_periods):
22
 
 
69
  df['RegrModelOut'] = df['RegrModelOut'].astype(bool)
70
 
71
  # Create an XGBRegressor model
72
+ # model2 = xgb.XGBClassifier(n_estimators=10, random_state = 42)
73
+ model2 = lgb.LGBMClassifier(n_estimators=10, random_state=42, verbosity=-1)
74
  # model = linear_model.LogisticRegression(max_iter=1500)
75
 
76
  overall_results = []
requirements.txt CHANGED
@@ -7,6 +7,7 @@ requests
7
  beautifulsoup4
8
  typing_extensions
9
  xgboost
 
10
  tqdm
11
  fastjsonschema
12
  json5
 
7
  beautifulsoup4
8
  typing_extensions
9
  xgboost
10
+ lightgbm
11
  tqdm
12
  fastjsonschema
13
  json5