tebakaja commited on
Commit
0a7b47e
·
0 Parent(s):

Feat: Crafting LSTM, GRU, and LSTM_GRU model

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .github/workflows/gru_pipeline.yaml +187 -0
  2. .github/workflows/lstm_gru_pipeline.yaml +187 -0
  3. .github/workflows/lstm_pipeline.yaml +187 -0
  4. .gitignore +19 -0
  5. .vercelignore +7 -0
  6. Dockerfile +44 -0
  7. Makefile +2 -0
  8. README.md +16 -0
  9. app.py +28 -0
  10. converter.py +40 -0
  11. coret-coretan.ipynb +0 -0
  12. dev.requirements.txt +74 -0
  13. diagram/cryptocurrency_prediction.ai +0 -0
  14. diagram/cryptocurrency_prediction.jpg +0 -0
  15. diagram/icons/Yahoo!_Finance_logo_2021.png +0 -0
  16. diagram/icons/csv.png +0 -0
  17. diagram/icons/docker.png +0 -0
  18. diagram/icons/fastapi.png +0 -0
  19. diagram/icons/file.png +0 -0
  20. diagram/icons/github actions.png +0 -0
  21. diagram/icons/github.png +0 -0
  22. diagram/icons/golang.png +0 -0
  23. diagram/icons/json.png +0 -0
  24. diagram/icons/keras.png +0 -0
  25. diagram/icons/nestjs.png +0 -0
  26. diagram/icons/pickle.png +0 -0
  27. diagram/icons/spaces.png +0 -0
  28. diagram/icons/typescript.png +0 -0
  29. diagram/icons/vercel.png +0 -0
  30. go.mod +3 -0
  31. postman/Yahoo Finance.postman_collection.json +69 -0
  32. postman/response.json +0 -0
  33. postman/symbols.json +54 -0
  34. pyproject.toml +15 -0
  35. pyvenv.cfg +3 -0
  36. requirements.txt +13 -0
  37. restful/controllers.py +110 -0
  38. restful/cutils/build/lib.linux-x86_64-3.10/utilities.cpython-310-x86_64-linux-gnu.so +0 -0
  39. restful/cutils/build/temp.linux-x86_64-3.10/utilities.o +0 -0
  40. restful/cutils/setup.py +9 -0
  41. restful/cutils/utilities.c +0 -0
  42. restful/cutils/utilities.cpython-310-x86_64-linux-gnu.so +0 -0
  43. restful/cutils/utilities.pyx +56 -0
  44. restful/routes.py +25 -0
  45. restful/schemas.py +6 -0
  46. restful/services.py +19 -0
  47. restful/utilities.py +61 -0
  48. schedulers/gru_schedule.ctl +1 -0
  49. schedulers/lstm_gru_schedule.ctl +1 -0
  50. schedulers/lstm_schedule.ctl +1 -0
.github/workflows/gru_pipeline.yaml ADDED
@@ -0,0 +1,187 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: GRU Pipeline
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+ tags:
8
+ - '*'
9
+ schedule:
10
+ - cron: "0 7 * * *"
11
+ # 14 - 7 = 7
12
+
13
+ jobs:
14
+ extraction_train_modeling:
15
+ name: Data Extraction, Training, and Modeling
16
+ runs-on: ubuntu-latest
17
+
18
+ steps:
19
+ - name: Set global directory
20
+ run: git config --global --add safe.directory /github/workspace
21
+
22
+ - uses: actions/checkout@v3
23
+ with:
24
+ lfs: true
25
+ persist-credentials: false
26
+ fetch-depth: 1
27
+
28
+ - name: Read pipeline schedule date
29
+ id: read_schedule
30
+ run: |
31
+ SCHEDULE_DATE=$(cat schedulers/lstm_gru_schedule.ctl)
32
+ echo "schedule_date=${SCHEDULE_DATE}" >> $GITHUB_ENV
33
+
34
+ - name: Get current date
35
+ id: get_date
36
+ run: echo "current_date=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
37
+
38
+ - name: Check if dates match
39
+ id: date_check
40
+ run: |
41
+ if [ "$schedule_date" = "$current_date" ]; then
42
+ echo "match=true" >> $GITHUB_ENV
43
+ else
44
+ echo "match=false" >> $GITHUB_ENV
45
+ fi
46
+
47
+ - name: Scraping Yahoo Finance
48
+ if: env.match != 'true'
49
+ run: |
50
+ mkdir datasets
51
+ wget https://github.com/belajarqywok/cryptocurrency_prediction/raw/main/postman/symbols.json \
52
+ -O postman/symbols.json
53
+ go run scraper.go
54
+
55
+ - name: Install Libraries
56
+ if: env.match != 'true'
57
+ run: pip install -r requirements.txt
58
+
59
+ - name: Modeling and Training
60
+ if: env.match != 'true'
61
+ run: |
62
+ mkdir models
63
+ mkdir pickles
64
+ mkdir posttrained
65
+ python training.py --algorithm=GRU
66
+
67
+ - name: Set Pipeline Schedule
68
+ if: env.match != 'true'
69
+ run: echo "$(date +'%Y-%m-%d')" > schedulers/lstm_gru_schedule.ctl
70
+
71
+ - name: Zip Posttrained, Models, and Pickles
72
+ if: env.match != 'true'
73
+ run: |
74
+ zip -r models.zip models
75
+ zip -r pickles.zip pickles
76
+ zip -r datasets.zip datasets
77
+ zip -r posttrained.zip posttrained
78
+
79
+ - name: Store Datasets to Google Drive
80
+ if: env.match != 'true'
81
+ uses: adityak74/google-drive-upload-git-action@main
82
+ with:
83
+ credentials: ${{ secrets.GDRIVE_CRED }}
84
+ filename: datasets.zip
85
+ folderId: ${{ secrets.GDRIVE_CRYPTO_ID }}
86
+ name: datasets.zip
87
+ overwrite: "true"
88
+
89
+ - name: Store Models to Google Drive
90
+ if: env.match != 'true'
91
+ uses: adityak74/google-drive-upload-git-action@main
92
+ with:
93
+ credentials: ${{ secrets.GDRIVE_CRED }}
94
+ filename: models.zip
95
+ folderId: ${{ secrets.GDRIVE_GRU_ID }}
96
+ name: models.zip
97
+ overwrite: "true"
98
+
99
+ - name: Store Pickles to Google Drive
100
+ if: env.match != 'true'
101
+ uses: adityak74/google-drive-upload-git-action@main
102
+ with:
103
+ credentials: ${{ secrets.GDRIVE_CRED }}
104
+ filename: pickles.zip
105
+ folderId: ${{ secrets.GDRIVE_GRU_ID }}
106
+ name: pickles.zip
107
+ overwrite: "true"
108
+
109
+ - name: Store Posttrained to Google Drive
110
+ if: env.match != 'true'
111
+ uses: adityak74/google-drive-upload-git-action@main
112
+ with:
113
+ credentials: ${{ secrets.GDRIVE_CRED }}
114
+ filename: posttrained.zip
115
+ folderId: ${{ secrets.GDRIVE_GRU_ID }}
116
+ name: posttrained.zip
117
+ overwrite: "true"
118
+
119
+ - name: Remove Temporarary Files and Directories
120
+ if: env.match != 'true'
121
+ run: |
122
+ rm models.zip
123
+ rm pickles.zip
124
+ rm datasets.zip
125
+ rm posttrained.zip
126
+
127
+ rm -rf models
128
+ rm -rf pickles
129
+ rm -rf datasets
130
+ rm -rf posttrained
131
+
132
+ - name: Commit changes
133
+ if: env.match != 'true'
134
+ run: |
135
+ git config --local user.email "[email protected]"
136
+ git config --local user.name "belajarqywok"
137
+ git add -A
138
+ git commit -m "Data Extraction, Training, and Modeling"
139
+
140
+ - name: Push changes
141
+ if: env.match != 'true'
142
+ uses: ad-m/github-push-action@master
143
+ with:
144
+ github_token: ${{ secrets.GH_TOKEN }}
145
+ branch: main
146
+
147
+ # model_deployment:
148
+ # name: Model Deployment
149
+ # runs-on: ubuntu-latest
150
+ # needs: extraction_train_modeling
151
+ # environment: Production
152
+
153
+ # env:
154
+ # HF_TOKEN: ${{ secrets.HF_TOKEN }}
155
+ # SPACE_NAME: cryptocurrency_prediction
156
+ # HF_USERNAME: qywok
157
+
158
+ # steps:
159
+ # - name: Set global directory
160
+ # run: git config --global --add safe.directory /github/workspace
161
+
162
+ # - uses: actions/checkout@v3
163
+ # with:
164
+ # persist-credentials: false
165
+ # fetch-depth: 1000
166
+
167
+ # - name: Check git status
168
+ # run: git status
169
+
170
+ # - name: Configure git
171
+ # run: |
172
+ # git config --local user.email "[email protected]"
173
+ # git config --local user.name "qywok"
174
+
175
+ # - name: Pull changes from remote
176
+ # run: |
177
+ # git pull https://$HF_USERNAME:[email protected]/spaces/$HF_USERNAME/$SPACE_NAME main || \
178
+ # (git merge --strategy-option theirs)
179
+
180
+ # - name: Add and commit changes
181
+ # run: |
182
+ # git add -A
183
+ # git diff-index --quiet HEAD || git commit -m "Model Deployment"
184
+
185
+ # - name: Push to Hugging Face
186
+ # run: |
187
+ # git push https://$HF_USERNAME:[email protected]/spaces/$HF_USERNAME/$SPACE_NAME main --force
.github/workflows/lstm_gru_pipeline.yaml ADDED
@@ -0,0 +1,187 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: LSTM GRU Pipeline
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+ tags:
8
+ - '*'
9
+ schedule:
10
+ - cron: "0 9 * * *"
11
+ # 16 - 7 = 9
12
+
13
+ jobs:
14
+ extraction_train_modeling:
15
+ name: Data Extraction, Training, and Modeling
16
+ runs-on: ubuntu-latest
17
+
18
+ steps:
19
+ - name: Set global directory
20
+ run: git config --global --add safe.directory /github/workspace
21
+
22
+ - uses: actions/checkout@v3
23
+ with:
24
+ lfs: true
25
+ persist-credentials: false
26
+ fetch-depth: 1
27
+
28
+ - name: Read pipeline schedule date
29
+ id: read_schedule
30
+ run: |
31
+ SCHEDULE_DATE=$(cat schedulers/lstm_gru_schedule.ctl)
32
+ echo "schedule_date=${SCHEDULE_DATE}" >> $GITHUB_ENV
33
+
34
+ - name: Get current date
35
+ id: get_date
36
+ run: echo "current_date=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
37
+
38
+ - name: Check if dates match
39
+ id: date_check
40
+ run: |
41
+ if [ "$schedule_date" = "$current_date" ]; then
42
+ echo "match=true" >> $GITHUB_ENV
43
+ else
44
+ echo "match=false" >> $GITHUB_ENV
45
+ fi
46
+
47
+ - name: Scraping Yahoo Finance
48
+ if: env.match != 'true'
49
+ run: |
50
+ mkdir datasets
51
+ wget https://github.com/belajarqywok/cryptocurrency_prediction/raw/main/postman/symbols.json \
52
+ -O postman/symbols.json
53
+ go run scraper.go
54
+
55
+ - name: Install Libraries
56
+ if: env.match != 'true'
57
+ run: pip install -r requirements.txt
58
+
59
+ - name: Modeling and Training
60
+ if: env.match != 'true'
61
+ run: |
62
+ mkdir models
63
+ mkdir pickles
64
+ mkdir posttrained
65
+ python training.py --algorithm=LSTM_GRU
66
+
67
+ - name: Set Pipeline Schedule
68
+ if: env.match != 'true'
69
+ run: echo "$(date +'%Y-%m-%d')" > schedulers/lstm_gru_schedule.ctl
70
+
71
+ - name: Zip Posttrained, Models, and Pickles
72
+ if: env.match != 'true'
73
+ run: |
74
+ zip -r models.zip models
75
+ zip -r pickles.zip pickles
76
+ zip -r datasets.zip datasets
77
+ zip -r posttrained.zip posttrained
78
+
79
+ - name: Store Datasets to Google Drive
80
+ if: env.match != 'true'
81
+ uses: adityak74/google-drive-upload-git-action@main
82
+ with:
83
+ credentials: ${{ secrets.GDRIVE_CRED }}
84
+ filename: datasets.zip
85
+ folderId: ${{ secrets.GDRIVE_CRYPTO_ID }}
86
+ name: datasets.zip
87
+ overwrite: "true"
88
+
89
+ - name: Store Models to Google Drive
90
+ if: env.match != 'true'
91
+ uses: adityak74/google-drive-upload-git-action@main
92
+ with:
93
+ credentials: ${{ secrets.GDRIVE_CRED }}
94
+ filename: models.zip
95
+ folderId: ${{ secrets.GDRIVE_LSTM_GRU_ID }}
96
+ name: models.zip
97
+ overwrite: "true"
98
+
99
+ - name: Store Pickles to Google Drive
100
+ if: env.match != 'true'
101
+ uses: adityak74/google-drive-upload-git-action@main
102
+ with:
103
+ credentials: ${{ secrets.GDRIVE_CRED }}
104
+ filename: pickles.zip
105
+ folderId: ${{ secrets.GDRIVE_LSTM_GRU_ID }}
106
+ name: pickles.zip
107
+ overwrite: "true"
108
+
109
+ - name: Store Posttrained to Google Drive
110
+ if: env.match != 'true'
111
+ uses: adityak74/google-drive-upload-git-action@main
112
+ with:
113
+ credentials: ${{ secrets.GDRIVE_CRED }}
114
+ filename: posttrained.zip
115
+ folderId: ${{ secrets.GDRIVE_LSTM_GRU_ID }}
116
+ name: posttrained.zip
117
+ overwrite: "true"
118
+
119
+ - name: Remove Temporarary Files and Directories
120
+ if: env.match != 'true'
121
+ run: |
122
+ rm models.zip
123
+ rm pickles.zip
124
+ rm datasets.zip
125
+ rm posttrained.zip
126
+
127
+ rm -rf models
128
+ rm -rf pickles
129
+ rm -rf datasets
130
+ rm -rf posttrained
131
+
132
+ - name: Commit changes
133
+ if: env.match != 'true'
134
+ run: |
135
+ git config --local user.email "[email protected]"
136
+ git config --local user.name "belajarqywok"
137
+ git add -A
138
+ git commit -m "Data Extraction, Training, and Modeling"
139
+
140
+ - name: Push changes
141
+ if: env.match != 'true'
142
+ uses: ad-m/github-push-action@master
143
+ with:
144
+ github_token: ${{ secrets.GH_TOKEN }}
145
+ branch: main
146
+
147
+ # model_deployment:
148
+ # name: Model Deployment
149
+ # runs-on: ubuntu-latest
150
+ # needs: extraction_train_modeling
151
+ # environment: Production
152
+
153
+ # env:
154
+ # HF_TOKEN: ${{ secrets.HF_TOKEN }}
155
+ # SPACE_NAME: cryptocurrency_prediction
156
+ # HF_USERNAME: qywok
157
+
158
+ # steps:
159
+ # - name: Set global directory
160
+ # run: git config --global --add safe.directory /github/workspace
161
+
162
+ # - uses: actions/checkout@v3
163
+ # with:
164
+ # persist-credentials: false
165
+ # fetch-depth: 1000
166
+
167
+ # - name: Check git status
168
+ # run: git status
169
+
170
+ # - name: Configure git
171
+ # run: |
172
+ # git config --local user.email "[email protected]"
173
+ # git config --local user.name "qywok"
174
+
175
+ # - name: Pull changes from remote
176
+ # run: |
177
+ # git pull https://$HF_USERNAME:[email protected]/spaces/$HF_USERNAME/$SPACE_NAME main || \
178
+ # (git merge --strategy-option theirs)
179
+
180
+ # - name: Add and commit changes
181
+ # run: |
182
+ # git add -A
183
+ # git diff-index --quiet HEAD || git commit -m "Model Deployment"
184
+
185
+ # - name: Push to Hugging Face
186
+ # run: |
187
+ # git push https://$HF_USERNAME:[email protected]/spaces/$HF_USERNAME/$SPACE_NAME main --force
.github/workflows/lstm_pipeline.yaml ADDED
@@ -0,0 +1,187 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: LSTM Pipeline
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+ tags:
8
+ - '*'
9
+ schedule:
10
+ - cron: "0 11 * * *"
11
+ # 18 - 7 = 11
12
+
13
+ jobs:
14
+ extraction_train_modeling:
15
+ name: Data Extraction, Training, and Modeling
16
+ runs-on: ubuntu-latest
17
+
18
+ steps:
19
+ - name: Set global directory
20
+ run: git config --global --add safe.directory /github/workspace
21
+
22
+ - uses: actions/checkout@v3
23
+ with:
24
+ lfs: true
25
+ persist-credentials: false
26
+ fetch-depth: 1
27
+
28
+ - name: Read pipeline schedule date
29
+ id: read_schedule
30
+ run: |
31
+ SCHEDULE_DATE=$(cat schedulers/lstm_schedule.ctl)
32
+ echo "schedule_date=${SCHEDULE_DATE}" >> $GITHUB_ENV
33
+
34
+ - name: Get current date
35
+ id: get_date
36
+ run: echo "current_date=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
37
+
38
+ - name: Check if dates match
39
+ id: date_check
40
+ run: |
41
+ if [ "$schedule_date" = "$current_date" ]; then
42
+ echo "match=true" >> $GITHUB_ENV
43
+ else
44
+ echo "match=false" >> $GITHUB_ENV
45
+ fi
46
+
47
+ - name: Scraping Yahoo Finance
48
+ if: env.match != 'true'
49
+ run: |
50
+ mkdir datasets
51
+ wget https://github.com/belajarqywok/cryptocurrency_prediction/raw/main/postman/symbols.json \
52
+ -O postman/symbols.json
53
+ go run scraper.go
54
+
55
+ - name: Install Libraries
56
+ if: env.match != 'true'
57
+ run: pip install -r requirements.txt
58
+
59
+ - name: Modeling and Training
60
+ if: env.match != 'true'
61
+ run: |
62
+ mkdir models
63
+ mkdir pickles
64
+ mkdir posttrained
65
+ python training.py --algorithm=LSTM
66
+
67
+ - name: Set Pipeline Schedule
68
+ if: env.match != 'true'
69
+ run: echo "$(date +'%Y-%m-%d')" > schedulers/lstm_schedule.ctl
70
+
71
+ - name: Zip Posttrained, Models, and Pickles
72
+ if: env.match != 'true'
73
+ run: |
74
+ zip -r models.zip models
75
+ zip -r pickles.zip pickles
76
+ zip -r datasets.zip datasets
77
+ zip -r posttrained.zip posttrained
78
+
79
+ - name: Store Datasets to Google Drive
80
+ if: env.match != 'true'
81
+ uses: adityak74/google-drive-upload-git-action@main
82
+ with:
83
+ credentials: ${{ secrets.GDRIVE_CRED }}
84
+ filename: datasets.zip
85
+ folderId: ${{ secrets.GDRIVE_CRYPTO_ID }}
86
+ name: datasets.zip
87
+ overwrite: "true"
88
+
89
+ - name: Store Models to Google Drive
90
+ if: env.match != 'true'
91
+ uses: adityak74/google-drive-upload-git-action@main
92
+ with:
93
+ credentials: ${{ secrets.GDRIVE_CRED }}
94
+ filename: models.zip
95
+ folderId: ${{ secrets.GDRIVE_LSTM_ID }}
96
+ name: models.zip
97
+ overwrite: "true"
98
+
99
+ - name: Store Pickles to Google Drive
100
+ if: env.match != 'true'
101
+ uses: adityak74/google-drive-upload-git-action@main
102
+ with:
103
+ credentials: ${{ secrets.GDRIVE_CRED }}
104
+ filename: pickles.zip
105
+ folderId: ${{ secrets.GDRIVE_LSTM_ID }}
106
+ name: pickles.zip
107
+ overwrite: "true"
108
+
109
+ - name: Store Posttrained to Google Drive
110
+ if: env.match != 'true'
111
+ uses: adityak74/google-drive-upload-git-action@main
112
+ with:
113
+ credentials: ${{ secrets.GDRIVE_CRED }}
114
+ filename: posttrained.zip
115
+ folderId: ${{ secrets.GDRIVE_LSTM_ID }}
116
+ name: posttrained.zip
117
+ overwrite: "true"
118
+
119
+ - name: Remove Temporarary Files and Directories
120
+ if: env.match != 'true'
121
+ run: |
122
+ rm models.zip
123
+ rm pickles.zip
124
+ rm datasets.zip
125
+ rm posttrained.zip
126
+
127
+ rm -rf models
128
+ rm -rf pickles
129
+ rm -rf datasets
130
+ rm -rf posttrained
131
+
132
+ - name: Commit changes
133
+ if: env.match != 'true'
134
+ run: |
135
+ git config --local user.email "[email protected]"
136
+ git config --local user.name "belajarqywok"
137
+ git add -A
138
+ git commit -m "Data Extraction, Training, and Modeling"
139
+
140
+ - name: Push changes
141
+ if: env.match != 'true'
142
+ uses: ad-m/github-push-action@master
143
+ with:
144
+ github_token: ${{ secrets.GH_TOKEN }}
145
+ branch: main
146
+
147
+ # model_deployment:
148
+ # name: Model Deployment
149
+ # runs-on: ubuntu-latest
150
+ # needs: extraction_train_modeling
151
+ # environment: Production
152
+
153
+ # env:
154
+ # HF_TOKEN: ${{ secrets.HF_TOKEN }}
155
+ # SPACE_NAME: cryptocurrency_prediction
156
+ # HF_USERNAME: qywok
157
+
158
+ # steps:
159
+ # - name: Set global directory
160
+ # run: git config --global --add safe.directory /github/workspace
161
+
162
+ # - uses: actions/checkout@v3
163
+ # with:
164
+ # persist-credentials: false
165
+ # fetch-depth: 1000
166
+
167
+ # - name: Check git status
168
+ # run: git status
169
+
170
+ # - name: Configure git
171
+ # run: |
172
+ # git config --local user.email "[email protected]"
173
+ # git config --local user.name "qywok"
174
+
175
+ # - name: Pull changes from remote
176
+ # run: |
177
+ # git pull https://$HF_USERNAME:[email protected]/spaces/$HF_USERNAME/$SPACE_NAME main || \
178
+ # (git merge --strategy-option theirs)
179
+
180
+ # - name: Add and commit changes
181
+ # run: |
182
+ # git add -A
183
+ # git diff-index --quiet HEAD || git commit -m "Model Deployment"
184
+
185
+ # - name: Push to Hugging Face
186
+ # run: |
187
+ # git push https://$HF_USERNAME:[email protected]/spaces/$HF_USERNAME/$SPACE_NAME main --force
.gitignore ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Postman
2
+ /postman/dataset.url
3
+
4
+ # Environments
5
+ /bin
6
+ /Lib
7
+ /lib64
8
+ /Include
9
+ /Scripts
10
+
11
+ # Pycache
12
+ /__pycache__
13
+ /restful/__pycache__
14
+
15
+ # Temp
16
+ /models
17
+ /pickles
18
+ /datasets
19
+ /posttrained
.vercelignore ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ /.github
2
+
3
+ /bin
4
+ /include
5
+ /lib
6
+
7
+ /postman
Dockerfile ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-bullseye
2
+
3
+ LABEL organization="R6Q - Infraprasta University"
4
+ LABEL team="Group 5"
5
+
6
+ RUN useradd -m -u 1000 user
7
+
8
+ WORKDIR /app
9
+
10
+ COPY --chown=user ./requirements.txt requirements.txt
11
+
12
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
13
+
14
+ COPY --chown=user . /app
15
+
16
+ RUN apt-get update && \
17
+ apt-get install -y gcc python3-dev gnupg curl
18
+
19
+ RUN pip install cython
20
+
21
+ RUN cd /app/restful/cutils && \
22
+ python setup.py build_ext --inplace && \
23
+ chmod 777 * && cd ../..
24
+
25
+ RUN pip install gdown
26
+
27
+ RUN --mount=type=secret,id=MODELS_ID,mode=0444,required=true \
28
+ gdown https://drive.google.com/uc?id=$(cat /run/secrets/MODELS_ID) && \
29
+ unzip models.zip && rm models.zip
30
+
31
+ RUN --mount=type=secret,id=PICKLES_ID,mode=0444,required=true \
32
+ gdown https://drive.google.com/uc?id=$(cat /run/secrets/PICKLES_ID) && \
33
+ unzip pickles.zip && rm pickles.zip
34
+
35
+ RUN --mount=type=secret,id=DATASETS_ID,mode=0444,required=true \
36
+ gdown https://drive.google.com/uc?id=$(cat /run/secrets/DATASETS_ID) && \
37
+ unzip datasets.zip && rm datasets.zip
38
+
39
+ RUN --mount=type=secret,id=POSTTRAINED_ID,mode=0444,required=true \
40
+ gdown https://drive.google.com/uc?id=$(cat /run/secrets/POSTTRAINED_ID) && \
41
+ unzip posttrained.zip && rm posttrained.zip
42
+
43
+
44
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--workers", "10", "--port", "7860"]
Makefile ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ cutils:
2
+ cd restful/cutils && python setup.py build_ext --inplace && cd ../..
README.md ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Cryptocurrency Prediction Service
3
+ emoji: 📉
4
+ colorFrom: purple
5
+ colorTo: pink
6
+ sdk: docker
7
+ python_version: "3.10"
8
+ pinned: false
9
+ ---
10
+
11
+
12
+ <h1 align="center">Cryptocurrency Prediction Service</h1>
13
+ <hr />
14
+ <p align="center">
15
+ <img src="./diagram/cryptocurrency_prediction.jpg"/>
16
+ </p>
app.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from restful.routes import route
3
+ from fastapi.responses import RedirectResponse
4
+ from fastapi.middleware.cors import CORSMiddleware
5
+
6
+ app = FastAPI(
7
+ title = "Cryptocurency Prediction Service",
8
+ version = "1.0"
9
+ )
10
+
11
+ # CORS Middleware
12
+ app.add_middleware(
13
+ CORSMiddleware,
14
+ allow_origins = ["*"],
15
+ allow_methods = ["*"],
16
+ allow_headers = ["*"],
17
+ allow_credentials = True,
18
+ )
19
+
20
+ app.include_router(
21
+ router = route,
22
+ prefix = '/crypto',
23
+ tags = ['Crypto']
24
+ )
25
+
26
+ @app.get("/", tags = ['Main'])
27
+ def root():
28
+ return RedirectResponse(url="/docs")
converter.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+
3
+ """
4
+
5
+ Data Mining Assignment - Group 5
6
+
7
+ """
8
+
9
+ class JSONProcessor:
10
+ def __init__(self, input_file: str, output_file: str) -> None:
11
+ self.input_file: str = input_file
12
+ self.output_file: str = output_file
13
+ self.data = None
14
+
15
+ def load_json(self) -> None:
16
+ with open(self.input_file, 'r') as file:
17
+ self.data = json.load(file)
18
+
19
+ def extract_symbols(self) -> list:
20
+ if self.data is None:
21
+ raise ValueError("data not loaded. call load_json() first.")
22
+ quotes = self.data['finance']['result'][0]['quotes']
23
+ return [quote['symbol'] for quote in quotes]
24
+
25
+ def save_json(self, data: list) -> None:
26
+ with open(self.output_file, 'w') as file:
27
+ json.dump({'symbols': data}, file, indent = 4)
28
+ print(f'saved: {self.output_file}')
29
+
30
+ def main():
31
+ input_file = './postman/response.json'
32
+ output_file = './postman/symbols.json'
33
+
34
+ processor = JSONProcessor(input_file, output_file)
35
+ processor.load_json()
36
+ symbols = processor.extract_symbols()
37
+ processor.save_json(symbols)
38
+
39
+
40
+ if __name__ == "__main__": main()
coret-coretan.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
dev.requirements.txt ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ absl-py==2.1.0
2
+ annotated-types==0.7.0
3
+ anyio==4.4.0
4
+ astunparse==1.6.3
5
+ certifi==2024.2.2
6
+ charset-normalizer==3.3.2
7
+ click==8.1.7
8
+ dnspython==2.6.1
9
+ email_validator==2.1.1
10
+ exceptiongroup==1.2.1
11
+ fastapi==0.111.0
12
+ fastapi-cli==0.0.4
13
+ flatbuffers==24.3.25
14
+ gast==0.5.4
15
+ google-pasta==0.2.0
16
+ grpcio==1.64.0
17
+ h11==0.14.0
18
+ h5py==3.11.0
19
+ httpcore==1.0.5
20
+ httptools==0.6.1
21
+ httpx==0.27.0
22
+ idna==3.7
23
+ importlib_metadata==7.1.0
24
+ Jinja2==3.1.4
25
+ joblib==1.4.2
26
+ keras==3.3.3
27
+ libclang==18.1.1
28
+ Markdown==3.6
29
+ markdown-it-py==3.0.0
30
+ MarkupSafe==2.1.5
31
+ mdurl==0.1.2
32
+ ml-dtypes==0.3.2
33
+ namex==0.0.8
34
+ numpy==1.26.4
35
+ opt-einsum==3.3.0
36
+ optree==0.11.0
37
+ orjson==3.10.3
38
+ packaging==24.0
39
+ pandas==2.2.2
40
+ protobuf==4.25.3
41
+ pydantic==2.7.2
42
+ pydantic_core==2.18.3
43
+ Pygments==2.18.0
44
+ python-dateutil==2.9.0.post0
45
+ python-dotenv==1.0.1
46
+ python-multipart==0.0.9
47
+ pytz==2024.1
48
+ PyYAML==6.0.1
49
+ requests==2.32.3
50
+ rich==13.7.1
51
+ scikit-learn==1.5.0
52
+ scipy==1.13.1
53
+ shellingham==1.5.4
54
+ six==1.16.0
55
+ sniffio==1.3.1
56
+ starlette==0.37.2
57
+ tensorboard==2.16.2
58
+ tensorboard-data-server==0.7.2
59
+ tensorflow==2.16.1
60
+ tensorflow-io-gcs-filesystem==0.31.0
61
+ termcolor==2.4.0
62
+ threadpoolctl==3.5.0
63
+ typer==0.12.3
64
+ typing_extensions==4.12.1
65
+ tzdata==2024.1
66
+ ujson==5.10.0
67
+ urllib3==2.2.1
68
+ uvicorn==0.30.1
69
+ uvloop==0.19.0
70
+ watchfiles==0.22.0
71
+ websockets==12.0
72
+ Werkzeug==3.0.3
73
+ wrapt==1.16.0
74
+ zipp==3.19.1
diagram/cryptocurrency_prediction.ai ADDED
The diff for this file is too large to render. See raw diff
 
diagram/cryptocurrency_prediction.jpg ADDED
diagram/icons/Yahoo!_Finance_logo_2021.png ADDED
diagram/icons/csv.png ADDED
diagram/icons/docker.png ADDED
diagram/icons/fastapi.png ADDED
diagram/icons/file.png ADDED
diagram/icons/github actions.png ADDED
diagram/icons/github.png ADDED
diagram/icons/golang.png ADDED
diagram/icons/json.png ADDED
diagram/icons/keras.png ADDED
diagram/icons/nestjs.png ADDED
diagram/icons/pickle.png ADDED
diagram/icons/spaces.png ADDED
diagram/icons/typescript.png ADDED
diagram/icons/vercel.png ADDED
go.mod ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ module github.com/cryptocurrency_prediction
2
+
3
+ go 1.20
postman/Yahoo Finance.postman_collection.json ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "info": {
3
+ "_postman_id": "249fd388-44f6-45c2-9ad5-37da9c2af089",
4
+ "name": "Yahoo Finance",
5
+ "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
6
+ },
7
+ "item": [
8
+ {
9
+ "name": "cryptocurrencies",
10
+ "request": {
11
+ "method": "POST",
12
+ "header": [
13
+ {
14
+ "key": "Cookie",
15
+ "value": "GUC=AQEBCAFmWUlmh0IaaAQw&s=AQAAAH-PIsT_&g=ZlgBjg; A1=d=AQABBBR-S2YCEF6h7KkHtT6kUMd5eQmdvDIFEgEBCAFJWWaHZlpOb2UB_eMBAAcIFH5LZgmdvDI&S=AQAAAge4BvAFwzWWdJFVm5Wyq9k; A3=d=AQABBBR-S2YCEF6h7KkHtT6kUMd5eQmdvDIFEgEBCAFJWWaHZlpOb2UB_eMBAAcIFH5LZgmdvDI&S=AQAAAge4BvAFwzWWdJFVm5Wyq9k; axids=gam=y-BdfSS7lE2uLV0LrGZqbRPm.8FUDjf.82~A&dv360=eS1EdjNSYkpGRTJ1R2RYQTAwYnNhcFJmQ0ZZN3BtTmNGan5B&ydsp=y-wmHAUIFE2uKC4PXfccNh1ff.Lz1oO0cj~A&tbla=y-gt8RDdJE2uKuvojQP3_mil11ZyoZelyw~A; tbla_id=f1c3e4ae-853f-47af-ba52-d13fe18de92e-tuctd4c1d85; PRF=t%3DBTC-USD%252BETH-USD%252BLTC-USD%252BLTC-INR%252BCU%253DF%26newChartbetateaser%3D0%252C1718255372183; A1S=d=AQABBBR-S2YCEF6h7KkHtT6kUMd5eQmdvDIFEgEBCAFJWWaHZlpOb2UB_eMBAAcIFH5LZgmdvDI&S=AQAAAge4BvAFwzWWdJFVm5Wyq9k; cmp=t=1717308407&j=0&u=1---; gpp=DBAA; gpp_sid=-1",
16
+ "type": "text"
17
+ }
18
+ ],
19
+ "body": {
20
+ "mode": "raw",
21
+ "raw": "{\"offset\":0,\"size\":50,\"sortType\":\"DESC\",\"sortField\":\"intradaymarketcap\",\"quoteType\":\"CRYPTOCURRENCY\",\"query\":{\"operator\":\"and\",\"operands\":[{\"operator\":\"eq\",\"operands\":[\"currency\",\"USD\"]},{\"operator\":\"eq\",\"operands\":[\"exchange\",\"CCC\"]}]},\"userId\":\"\",\"userIdType\":\"guid\"}",
22
+ "options": {
23
+ "raw": {
24
+ "language": "json"
25
+ }
26
+ }
27
+ },
28
+ "url": {
29
+ "raw": "https://query2.finance.yahoo.com/v1/finance/screener?crumb=55ovV9srjcg&lang=en-US&region=US&formatted=true&corsDomain=finance.yahoo.com",
30
+ "protocol": "https",
31
+ "host": [
32
+ "query2",
33
+ "finance",
34
+ "yahoo",
35
+ "com"
36
+ ],
37
+ "path": [
38
+ "v1",
39
+ "finance",
40
+ "screener"
41
+ ],
42
+ "query": [
43
+ {
44
+ "key": "crumb",
45
+ "value": "55ovV9srjcg"
46
+ },
47
+ {
48
+ "key": "lang",
49
+ "value": "en-US"
50
+ },
51
+ {
52
+ "key": "region",
53
+ "value": "US"
54
+ },
55
+ {
56
+ "key": "formatted",
57
+ "value": "true"
58
+ },
59
+ {
60
+ "key": "corsDomain",
61
+ "value": "finance.yahoo.com"
62
+ }
63
+ ]
64
+ }
65
+ },
66
+ "response": []
67
+ }
68
+ ]
69
+ }
postman/response.json ADDED
The diff for this file is too large to render. See raw diff
 
postman/symbols.json ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "symbols": [
3
+ "BTC-USD",
4
+ "ETH-USD",
5
+ "USDT-USD",
6
+ "BNB-USD",
7
+ "SOL-USD",
8
+ "STETH-USD",
9
+ "USDC-USD",
10
+ "XRP-USD",
11
+ "DOGE-USD",
12
+ "ADA-USD",
13
+ "TON11419-USD",
14
+ "SHIB-USD",
15
+ "AVAX-USD",
16
+ "WSTETH-USD",
17
+ "WETH-USD",
18
+ "LINK-USD",
19
+ "WBTC-USD",
20
+ "DOT-USD",
21
+ "TRX-USD",
22
+ "WTRX-USD",
23
+ "BCH-USD",
24
+ "NEAR-USD",
25
+ "MATIC-USD",
26
+ "LTC-USD",
27
+ "PEPE24478-USD",
28
+ "EETH-USD",
29
+ "UNI7083-USD",
30
+ "ICP-USD",
31
+ "LEO-USD",
32
+ "DAI-USD",
33
+ "WEETH-USD",
34
+ "ETC-USD",
35
+ "EZETH-USD",
36
+ "APT21794-USD",
37
+ "RNDR-USD",
38
+ "BTCB-USD",
39
+ "HBAR-USD",
40
+ "WHBAR-USD",
41
+ "WBETH-USD",
42
+ "IMX10603-USD",
43
+ "KAS-USD",
44
+ "ATOM-USD",
45
+ "ARB11841-USD",
46
+ "MNT27075-USD",
47
+ "FIL-USD",
48
+ "WIF-USD",
49
+ "XLM-USD",
50
+ "USDE29470-USD",
51
+ "CRO-USD",
52
+ "AR-USD"
53
+ ]
54
+ }
pyproject.toml ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [tool.poetry]
2
+ name = "cryptocurrency-prediction"
3
+ version = "0.1.0"
4
+ description = "Data Mining Assignment - Group 5"
5
+ authors = ["belajarqywok <[email protected]>"]
6
+ license = "MIT"
7
+ readme = "README.md"
8
+
9
+ [tool.poetry.dependencies]
10
+ python = "^3.9"
11
+
12
+
13
+ [build-system]
14
+ requires = ["poetry-core"]
15
+ build-backend = "poetry.core.masonry.api"
pyvenv.cfg ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ home = /usr/bin
2
+ include-system-site-packages = false
3
+ version = 3.10.12
requirements.txt ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ fastapi==0.111.0
2
+ h5py==3.11.0
3
+ joblib==1.4.2
4
+ keras==3.3.3
5
+ numpy==1.26.4
6
+ pandas==2.2.2
7
+ protobuf==4.25.3
8
+ pydantic==2.7.2
9
+ pydantic_core==2.18.3
10
+ scikit-learn==1.5.0
11
+ scipy==1.13.1
12
+ tensorflow==2.16.1
13
+ uvicorn==0.30.1
restful/controllers.py ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from http import HTTPStatus
3
+ from fastapi.responses import JSONResponse
4
+ from restful.services import cryptocurrency_svc
5
+ from restful.schemas import CryptocurrencyPredictionSchema
6
+
7
+
8
+ # Cryptocurrency Controller
9
+ class cryptocurrency_controller:
10
+ # Cryptocurrency Service
11
+ __SERVICE = cryptocurrency_svc()
12
+
13
+ # Cryptocurrency List
14
+ async def crypto_list(self) -> JSONResponse:
15
+ try:
16
+ DATASETS_PATH = './datasets'
17
+ DATASETS = sorted(
18
+ [
19
+ item.replace(".csv", "") for item in os.listdir(DATASETS_PATH)
20
+ if os.path.isfile(os.path.join(DATASETS_PATH, item)) and item.endswith('.csv')
21
+ ]
22
+ )
23
+
24
+ return JSONResponse(
25
+ content = {
26
+ 'message': 'Success',
27
+ 'status_code': HTTPStatus.OK,
28
+ 'data': DATASETS
29
+ },
30
+ status_code = HTTPStatus.OK
31
+ )
32
+
33
+ except Exception as error_message:
34
+ print(error_message)
35
+ return JSONResponse(
36
+ content = {
37
+ 'message': 'Internal Server Error',
38
+ 'status_code': HTTPStatus.INTERNAL_SERVER_ERROR,
39
+ 'data': None
40
+ },
41
+ status_code = HTTPStatus.INTERNAL_SERVER_ERROR
42
+ )
43
+
44
+ # Cryptocurrency Controller
45
+ async def prediction(self, payload: CryptocurrencyPredictionSchema) -> JSONResponse:
46
+ try:
47
+ DATASETS_PATH = './datasets'
48
+ DATASETS = sorted(
49
+ [
50
+ item.replace(".csv", "") for item in os.listdir(DATASETS_PATH)
51
+ if os.path.isfile(os.path.join(DATASETS_PATH, item)) and item.endswith('.csv')
52
+ ]
53
+ )
54
+
55
+ # Validation
56
+ if (payload.days > 31) or (payload.days < 1):
57
+ return JSONResponse(
58
+ content = {
59
+ 'message': 'prediction days cannot be more than a month and cannot be less than 1',
60
+ 'status_code': HTTPStatus.BAD_REQUEST,
61
+ 'data': None
62
+ },
63
+ status_code = HTTPStatus.BAD_REQUEST
64
+ )
65
+
66
+ if payload.currency not in DATASETS:
67
+ return JSONResponse(
68
+ content = {
69
+ 'message': f'cryptocurrency {payload.currency} is not available.',
70
+ 'status_code': HTTPStatus.BAD_REQUEST,
71
+ 'data': None
72
+ },
73
+ status_code = HTTPStatus.BAD_REQUEST
74
+ )
75
+
76
+
77
+ prediction: dict = await self.__SERVICE.prediction(payload)
78
+
79
+ if not prediction :
80
+ return JSONResponse(
81
+ content = {
82
+ 'message': 'prediction could not be generated, please try again.',
83
+ 'status_code': HTTPStatus.BAD_REQUEST,
84
+ 'data': None
85
+ },
86
+ status_code = HTTPStatus.BAD_REQUEST
87
+ )
88
+
89
+ return JSONResponse(
90
+ content = {
91
+ 'message': 'prediction success',
92
+ 'status_code': HTTPStatus.OK,
93
+ 'data': {
94
+ 'currency': payload.currency,
95
+ 'predictions': prediction
96
+ }
97
+ },
98
+ status_code = HTTPStatus.OK
99
+ )
100
+
101
+ except Exception as error_message:
102
+ print(error_message)
103
+ return JSONResponse(
104
+ content = {
105
+ 'message': 'internal server error',
106
+ 'status_code': HTTPStatus.INTERNAL_SERVER_ERROR,
107
+ 'data': None
108
+ },
109
+ status_code = HTTPStatus.INTERNAL_SERVER_ERROR
110
+ )
restful/cutils/build/lib.linux-x86_64-3.10/utilities.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (360 kB). View file
 
restful/cutils/build/temp.linux-x86_64-3.10/utilities.o ADDED
Binary file (491 kB). View file
 
restful/cutils/setup.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ from setuptools import setup
2
+ from Cython.Build import cythonize
3
+ import numpy
4
+
5
+ setup(
6
+ ext_modules=cythonize("utilities.pyx"),
7
+ include_dirs=[numpy.get_include()]
8
+ )
9
+
restful/cutils/utilities.c ADDED
The diff for this file is too large to render. See raw diff
 
restful/cutils/utilities.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (360 kB). View file
 
restful/cutils/utilities.pyx ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from joblib import load
3
+ from numpy import append, expand_dims
4
+ from pandas import read_json, to_datetime, Timedelta
5
+ from tensorflow.keras.models import load_model
6
+ import cython
7
+
8
+ cdef class Utilities:
9
+ async def cryptocurrency_prediction_utils(self,
10
+ int days, int sequence_length, str model_name) -> tuple:
11
+ cdef str model_path = os.path.join('./models', f'{model_name}.keras')
12
+ model = load_model(model_path)
13
+
14
+ cdef str dataframe_path = os.path.join('./posttrained', f'{model_name}-posttrained.json')
15
+ dataframe = read_json(dataframe_path)
16
+ dataframe.set_index('Date', inplace=True)
17
+
18
+ minmax_scaler = load(os.path.join('./pickles', f'{model_name}_minmax_scaler.pickle'))
19
+ standard_scaler = load(os.path.join('./pickles', f'{model_name}_standard_scaler.pickle'))
20
+
21
+ # Prediction
22
+ lst_seq = dataframe[-sequence_length:].values
23
+ lst_seq = expand_dims(lst_seq, axis=0)
24
+
25
+ cdef dict predicted_prices = {}
26
+ last_date = to_datetime(dataframe.index[-1])
27
+
28
+ for _ in range(days):
29
+ predicted_price = model.predict(lst_seq)
30
+ last_date = last_date + Timedelta(days=1)
31
+
32
+ predicted_prices[last_date] = minmax_scaler.inverse_transform(predicted_price)
33
+ predicted_prices[last_date] = standard_scaler.inverse_transform(predicted_prices[last_date])
34
+
35
+ lst_seq = append(lst_seq[:, 1:, :], [predicted_price], axis=1)
36
+
37
+ predictions = [
38
+ {'date': date.strftime('%Y-%m-%d'), 'price': float(price)} \
39
+ for date, price in predicted_prices.items()
40
+ ]
41
+
42
+ # Actual
43
+ df_date = dataframe.index[-sequence_length:].values
44
+ df_date = [to_datetime(date) for date in df_date]
45
+
46
+ dataframe[['Close']] = minmax_scaler.inverse_transform(dataframe)
47
+ dataframe[['Close']] = standard_scaler.inverse_transform(dataframe)
48
+ df_close = dataframe.iloc[-sequence_length:]['Close'].values
49
+
50
+ actuals = [
51
+ {'date': date.strftime('%Y-%m-%d'), 'price': close} \
52
+ for date, close in zip(df_date, df_close)
53
+ ]
54
+
55
+ return actuals, predictions
56
+
restful/routes.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, Body
2
+ from fastapi.responses import JSONResponse
3
+ from restful.controllers import cryptocurrency_controller
4
+ from restful.schemas import CryptocurrencyPredictionSchema
5
+
6
+ # Route
7
+ route = APIRouter()
8
+
9
+ # Controller
10
+ __CONTROLLER = cryptocurrency_controller()
11
+
12
+ # Cryptocurrency List
13
+ @route.get(path = '/lists')
14
+ async def cryptocurrency_list_route() -> JSONResponse:
15
+ # Cryptocurrency Controller
16
+ return await __CONTROLLER.crypto_list()
17
+
18
+ # Cryptocurrency Prediction
19
+ @route.post(path = '/prediction')
20
+ async def cryptocurrency_pred_route(
21
+ payload: CryptocurrencyPredictionSchema = Body(...)
22
+ ) -> JSONResponse:
23
+ # Cryptocurrency Controller
24
+ return await __CONTROLLER.prediction(payload = payload)
25
+
restful/schemas.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from pydantic import BaseModel
2
+
3
+ class CryptocurrencyPredictionSchema(BaseModel) :
4
+ days: int
5
+ currency: str
6
+
restful/services.py ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from restful.cutils.utilities import Utilities
2
+ from restful.schemas import CryptocurrencyPredictionSchema
3
+
4
+ class cryptocurrency_svc:
5
+ # Prediction Utilities
6
+ __PRED_UTILS = Utilities()
7
+
8
+ # Prediction Service
9
+ async def prediction(self, payload: CryptocurrencyPredictionSchema) -> dict:
10
+ days: int = payload.days
11
+ currency: str = payload.currency
12
+
13
+ actuals, predictions = await self.__PRED_UTILS.cryptocurrency_prediction_utils(
14
+ days = days,
15
+ model_name = currency,
16
+ sequence_length = 60
17
+ )
18
+
19
+ return {'actuals': actuals, 'predictions': predictions}
restful/utilities.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from joblib import load
3
+ from numpy import append, expand_dims
4
+ from pandas import read_json, to_datetime, Timedelta
5
+
6
+ from tensorflow.keras.models import load_model
7
+
8
+
9
+ class Utilities:
10
+ def __init__(self) -> None:
11
+ self.model_path = './models'
12
+ self.posttrained_path = './posttrained'
13
+ self.scaler_path = './pickles'
14
+
15
+ async def cryptocurrency_prediction_utils(self,
16
+ days: int, sequence_length: int, model_name: str) -> tuple:
17
+ model_path = os.path.join(self.model_path, f'{model_name}.keras')
18
+ model = load_model(model_path)
19
+
20
+ dataframe_path = os.path.join(self.posttrained_path, f'{model_name}-posttrained.json')
21
+ dataframe = read_json(dataframe_path)
22
+ dataframe.set_index('Date', inplace = True)
23
+
24
+ minmax_scaler = load(os.path.join(self.scaler_path, f'{model_name}_minmax_scaler.pickle'))
25
+ standard_scaler = load(os.path.join(self.scaler_path, f'{model_name}_standard_scaler.pickle'))
26
+
27
+ lst_seq = dataframe[-sequence_length:].values
28
+ lst_seq = expand_dims(lst_seq, axis = 0)
29
+
30
+ # Predicted
31
+ predicted_prices = {}
32
+ last_date = to_datetime(dataframe.index[-1])
33
+
34
+ for _ in range(days):
35
+ predicted_price = model.predict(lst_seq)
36
+ last_date = last_date + Timedelta(days = 1)
37
+
38
+ predicted_prices[last_date] = minmax_scaler.inverse_transform(predicted_price)
39
+ predicted_prices[last_date] = standard_scaler.inverse_transform(predicted_prices[last_date])
40
+
41
+ lst_seq = append(lst_seq[:, 1:, :], [predicted_price], axis = 1)
42
+
43
+ predictions = [
44
+ {'date': date.strftime('%Y-%m-%d'), 'price': float(price)} \
45
+ for date, price in predicted_prices.items()
46
+ ]
47
+
48
+ # Actual
49
+ df_date = dataframe.index[-sequence_length:].values
50
+ df_date = [to_datetime(date) for date in df_date]
51
+
52
+ dataframe[['Close']] = minmax_scaler.inverse_transform(dataframe)
53
+ dataframe[['Close']] = standard_scaler.inverse_transform(dataframe)
54
+ df_close = dataframe.iloc[-sequence_length:]['Close'].values
55
+
56
+ actuals = [
57
+ {'date': date.strftime('%Y-%m-%d'), 'price': close} \
58
+ for date, close in zip(df_date, df_close)
59
+ ]
60
+
61
+ return actuals, predictions
schedulers/gru_schedule.ctl ADDED
@@ -0,0 +1 @@
 
 
1
+ 2024-07-28
schedulers/lstm_gru_schedule.ctl ADDED
@@ -0,0 +1 @@
 
 
1
+ 2024-07-28
schedulers/lstm_schedule.ctl ADDED
@@ -0,0 +1 @@
 
 
1
+ 2024-07-28