tebakaja commited on
Commit
d7df8d0
1 Parent(s): 03edaa4

feat: crafting utilities

Browse files
.github/workflows/gru_pipeline.yaml CHANGED
@@ -11,8 +11,8 @@ on:
11
  # 14 - 7 = 7
12
 
13
  jobs:
14
- extraction_train_modeling:
15
- name: Data Extraction, Training, and Modeling
16
  runs-on: ubuntu-latest
17
 
18
  steps:
@@ -51,13 +51,81 @@ jobs:
51
  go run scraper.go \
52
  --symbols-file=./postman/symbols_test.json
53
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
54
  - name: Install Libraries
55
  if: env.match != 'true'
56
  run: pip install -r requirements.txt
57
 
 
 
 
 
 
58
  - name: Modeling and Training
59
  if: env.match != 'true'
60
  run: |
 
 
61
  mkdir models
62
  mkdir pickles
63
  mkdir posttrained
@@ -77,18 +145,7 @@ jobs:
77
  run: |
78
  zip -r models.zip models
79
  zip -r pickles.zip pickles
80
- zip -r datasets.zip datasets
81
  zip -r posttrained.zip posttrained
82
-
83
- - name: Store Datasets to Google Drive
84
- if: env.match != 'true'
85
- uses: adityak74/google-drive-upload-git-action@main
86
- with:
87
- credentials: ${{ secrets.GDRIVE_GRU_CRED }}
88
- filename: datasets.zip
89
- folderId: ${{ secrets.GDRIVE_GRU_ID }}
90
- name: datasets.zip
91
- overwrite: "true"
92
 
93
  - name: Store Models to Google Drive
94
  if: env.match != 'true'
@@ -125,12 +182,10 @@ jobs:
125
  run: |
126
  rm models.zip
127
  rm pickles.zip
128
- rm datasets.zip
129
  rm posttrained.zip
130
 
131
  rm -rf models
132
  rm -rf pickles
133
- rm -rf datasets
134
  rm -rf posttrained
135
 
136
  - name: Commit changes
@@ -148,44 +203,217 @@ jobs:
148
  github_token: ${{ secrets.GH_TOKEN }}
149
  branch: production
150
 
151
- # model_deployment:
152
- # name: Model Deployment
153
- # runs-on: ubuntu-latest
154
- # needs: extraction_train_modeling
155
- # environment: Production
156
-
157
- # env:
158
- # HF_TOKEN: ${{ secrets.HF_TOKEN }}
159
- # SPACE_NAME: cryptocurrency_prediction
160
- # HF_USERNAME: qywok
161
-
162
- # steps:
163
- # - name: Set global directory
164
- # run: git config --global --add safe.directory /github/workspace
165
-
166
- # - uses: actions/checkout@v3
167
- # with:
168
- # persist-credentials: false
169
- # fetch-depth: 1000
170
-
171
- # - name: Check git status
172
- # run: git status
173
-
174
- # - name: Configure git
175
- # run: |
176
- # git config --local user.email "alfariqyraihan@gmail.com"
177
- # git config --local user.name "qywok"
178
-
179
- # - name: Pull changes from remote
180
- # run: |
181
- # git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME main || \
182
- # (git merge --strategy-option theirs)
183
-
184
- # - name: Add and commit changes
185
- # run: |
186
- # git add -A
187
- # git diff-index --quiet HEAD || git commit -m "Model Deployment"
188
-
189
- # - name: Push to Hugging Face
190
- # run: |
191
- # git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME main --force
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  # 14 - 7 = 7
12
 
13
  jobs:
14
+ scraping_extracting:
15
+ name: Scraping, and Extracting
16
  runs-on: ubuntu-latest
17
 
18
  steps:
 
51
  go run scraper.go \
52
  --symbols-file=./postman/symbols_test.json
53
 
54
+ - name: Zip Datasets
55
+ if: env.match != 'true'
56
+ run: zip -r datasets.zip datasets
57
+
58
+ - name: Store Datasets to Google Drive
59
+ if: env.match != 'true'
60
+ uses: adityak74/google-drive-upload-git-action@main
61
+ with:
62
+ credentials: ${{ secrets.GDRIVE_GRU_CRED }}
63
+ filename: datasets.zip
64
+ folderId: ${{ secrets.GDRIVE_GRU_ID }}
65
+ name: datasets.zip
66
+ overwrite: "true"
67
+
68
+ - name: Upload Artifact (datasets)
69
+ uses: actions/upload-artifact@v3
70
+ with:
71
+ name: datasets
72
+ path: datasets.zip
73
+
74
+ - name: Remove Temporarary Files and Directories
75
+ if: env.match != 'true'
76
+ run: |
77
+ rm datasets.zip
78
+ rm -rf datasets
79
+
80
+
81
+ preprocessing_training:
82
+ name: Preprocessing, and Training
83
+ runs-on: ubuntu-latest
84
+ needs: scraping_extracting
85
+
86
+ steps:
87
+ - name: Set global directory
88
+ run: git config --global --add safe.directory /github/workspace
89
+
90
+ - uses: actions/checkout@v3
91
+ with:
92
+ lfs: true
93
+ persist-credentials: false
94
+ fetch-depth: 1
95
+
96
+ - name: Read pipeline schedule date
97
+ id: read_schedule
98
+ run: |
99
+ SCHEDULE_DATE=$(cat schedulers/gru_schedule.ctl)
100
+ echo "schedule_date=${SCHEDULE_DATE}" >> $GITHUB_ENV
101
+
102
+ - name: Get current date
103
+ id: get_date
104
+ run: echo "current_date=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
105
+
106
+ - name: Check if dates match
107
+ id: date_check
108
+ run: |
109
+ if [ "$schedule_date" = "$current_date" ]; then
110
+ echo "match=true" >> $GITHUB_ENV
111
+ else
112
+ echo "match=false" >> $GITHUB_ENV
113
+ fi
114
+
115
  - name: Install Libraries
116
  if: env.match != 'true'
117
  run: pip install -r requirements.txt
118
 
119
+ - name: Download Artifact (datasets)
120
+ uses: actions/download-artifact@v3
121
+ with:
122
+ name: datasets
123
+
124
  - name: Modeling and Training
125
  if: env.match != 'true'
126
  run: |
127
+ unzip datasets.zip
128
+
129
  mkdir models
130
  mkdir pickles
131
  mkdir posttrained
 
145
  run: |
146
  zip -r models.zip models
147
  zip -r pickles.zip pickles
 
148
  zip -r posttrained.zip posttrained
 
 
 
 
 
 
 
 
 
 
149
 
150
  - name: Store Models to Google Drive
151
  if: env.match != 'true'
 
182
  run: |
183
  rm models.zip
184
  rm pickles.zip
 
185
  rm posttrained.zip
186
 
187
  rm -rf models
188
  rm -rf pickles
 
189
  rm -rf posttrained
190
 
191
  - name: Commit changes
 
203
  github_token: ${{ secrets.GH_TOKEN }}
204
  branch: production
205
 
206
+
207
+ tebakaja_crypto_space-0:
208
+ name: crypto-forecast-svc-0
209
+ runs-on: ubuntu-latest
210
+ needs: preprocessing_training
211
+ environment: Production
212
+
213
+ env:
214
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
215
+ SPACE_NAME: tebakaja_cryptocurrency_space-0
216
+ HF_USERNAME: tebakaja
217
+
218
+ steps:
219
+ - name: Set global directory
220
+ run: git config --global --add safe.directory /github/workspace
221
+
222
+ - uses: actions/checkout@v3
223
+ with:
224
+ persist-credentials: false
225
+ fetch-depth: 1000
226
+
227
+ - name: Check git status
228
+ run: git status
229
+
230
+ - name: Configure git
231
+ run: |
232
+ git config --local user.email "alfariqyraihan@gmail.com"
233
+ git config --local user.name "qywok"
234
+
235
+ - name: Pull changes from remote
236
+ run: |
237
+ git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production || \
238
+ (git merge --strategy-option theirs)
239
+
240
+ - name: Add and commit changes
241
+ run: |
242
+ git add -A
243
+ git diff-index --quiet HEAD || git commit -m "Model Deployment"
244
+
245
+ - name: Push to Hugging Face
246
+ run: |
247
+ git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production --force
248
+
249
+
250
+ tebakaja_crypto_space-1:
251
+ name: crypto-forecast-svc-1
252
+ runs-on: ubuntu-latest
253
+ needs: preprocessing_training
254
+ environment: Production
255
+
256
+ env:
257
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
258
+ SPACE_NAME: tebakaja_cryptocurrency_space-1
259
+ HF_USERNAME: tebakaja
260
+
261
+ steps:
262
+ - name: Set global directory
263
+ run: git config --global --add safe.directory /github/workspace
264
+
265
+ - uses: actions/checkout@v3
266
+ with:
267
+ persist-credentials: false
268
+ fetch-depth: 1000
269
+
270
+ - name: Check git status
271
+ run: git status
272
+
273
+ - name: Configure git
274
+ run: |
275
+ git config --local user.email "alfariqyraihan@gmail.com"
276
+ git config --local user.name "qywok"
277
+
278
+ - name: Pull changes from remote
279
+ run: |
280
+ git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production || \
281
+ (git merge --strategy-option theirs)
282
+
283
+ - name: Add and commit changes
284
+ run: |
285
+ git add -A
286
+ git diff-index --quiet HEAD || git commit -m "Model Deployment"
287
+
288
+ - name: Push to Hugging Face
289
+ run: |
290
+ git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production --force
291
+
292
+
293
+ tebakaja_crypto_space-2:
294
+ name: crypto-forecast-svc-2
295
+ runs-on: ubuntu-latest
296
+ needs: preprocessing_training
297
+ environment: Production
298
+
299
+ env:
300
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
301
+ SPACE_NAME: tebakaja_cryptocurrency_space-2
302
+ HF_USERNAME: tebakaja
303
+
304
+ steps:
305
+ - name: Set global directory
306
+ run: git config --global --add safe.directory /github/workspace
307
+
308
+ - uses: actions/checkout@v3
309
+ with:
310
+ persist-credentials: false
311
+ fetch-depth: 1000
312
+
313
+ - name: Check git status
314
+ run: git status
315
+
316
+ - name: Configure git
317
+ run: |
318
+ git config --local user.email "alfariqyraihan@gmail.com"
319
+ git config --local user.name "qywok"
320
+
321
+ - name: Pull changes from remote
322
+ run: |
323
+ git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production || \
324
+ (git merge --strategy-option theirs)
325
+
326
+ - name: Add and commit changes
327
+ run: |
328
+ git add -A
329
+ git diff-index --quiet HEAD || git commit -m "Model Deployment"
330
+
331
+ - name: Push to Hugging Face
332
+ run: |
333
+ git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production --force
334
+
335
+
336
+ tebakaja_crypto_space-3:
337
+ name: crypto-forecast-svc-3
338
+ runs-on: ubuntu-latest
339
+ needs: preprocessing_training
340
+ environment: Production
341
+
342
+ env:
343
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
344
+ SPACE_NAME: tebakaja_cryptocurrency_space-3
345
+ HF_USERNAME: tebakaja
346
+
347
+ steps:
348
+ - name: Set global directory
349
+ run: git config --global --add safe.directory /github/workspace
350
+
351
+ - uses: actions/checkout@v3
352
+ with:
353
+ persist-credentials: false
354
+ fetch-depth: 1000
355
+
356
+ - name: Check git status
357
+ run: git status
358
+
359
+ - name: Configure git
360
+ run: |
361
+ git config --local user.email "alfariqyraihan@gmail.com"
362
+ git config --local user.name "qywok"
363
+
364
+ - name: Pull changes from remote
365
+ run: |
366
+ git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production || \
367
+ (git merge --strategy-option theirs)
368
+
369
+ - name: Add and commit changes
370
+ run: |
371
+ git add -A
372
+ git diff-index --quiet HEAD || git commit -m "Model Deployment"
373
+
374
+ - name: Push to Hugging Face
375
+ run: |
376
+ git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production --force
377
+
378
+
379
+ tebakaja_crypto_space-4:
380
+ name: crypto-forecast-svc-4
381
+ runs-on: ubuntu-latest
382
+ needs: preprocessing_training
383
+ environment: Production
384
+
385
+ env:
386
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
387
+ SPACE_NAME: tebakaja_cryptocurrency_space-4
388
+ HF_USERNAME: tebakaja
389
+
390
+ steps:
391
+ - name: Set global directory
392
+ run: git config --global --add safe.directory /github/workspace
393
+
394
+ - uses: actions/checkout@v3
395
+ with:
396
+ persist-credentials: false
397
+ fetch-depth: 1000
398
+
399
+ - name: Check git status
400
+ run: git status
401
+
402
+ - name: Configure git
403
+ run: |
404
+ git config --local user.email "alfariqyraihan@gmail.com"
405
+ git config --local user.name "qywok"
406
+
407
+ - name: Pull changes from remote
408
+ run: |
409
+ git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production || \
410
+ (git merge --strategy-option theirs)
411
+
412
+ - name: Add and commit changes
413
+ run: |
414
+ git add -A
415
+ git diff-index --quiet HEAD || git commit -m "Model Deployment"
416
+
417
+ - name: Push to Hugging Face
418
+ run: |
419
+ git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production --force
.github/workflows/lstm_gru_pipeline.yaml CHANGED
@@ -11,8 +11,8 @@ on:
11
  # 18 - 7 = 11
12
 
13
  jobs:
14
- extraction_train_modeling:
15
- name: Data Extraction, Training, and Modeling
16
  runs-on: ubuntu-latest
17
 
18
  steps:
@@ -51,13 +51,81 @@ jobs:
51
  go run scraper.go \
52
  --symbols-file=./postman/symbols_test.json
53
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
54
  - name: Install Libraries
55
  if: env.match != 'true'
56
  run: pip install -r requirements.txt
57
 
 
 
 
 
 
58
  - name: Modeling and Training
59
  if: env.match != 'true'
60
  run: |
 
 
61
  mkdir models
62
  mkdir pickles
63
  mkdir posttrained
@@ -77,18 +145,7 @@ jobs:
77
  run: |
78
  zip -r models.zip models
79
  zip -r pickles.zip pickles
80
- zip -r datasets.zip datasets
81
  zip -r posttrained.zip posttrained
82
-
83
- - name: Store Datasets to Google Drive
84
- if: env.match != 'true'
85
- uses: adityak74/google-drive-upload-git-action@main
86
- with:
87
- credentials: ${{ secrets.GDRIVE_LSTM_GRU_CRED }}
88
- filename: datasets.zip
89
- folderId: ${{ secrets.GDRIVE_LSTM_GRU_ID }}
90
- name: datasets.zip
91
- overwrite: "true"
92
 
93
  - name: Store Models to Google Drive
94
  if: env.match != 'true'
@@ -125,12 +182,10 @@ jobs:
125
  run: |
126
  rm models.zip
127
  rm pickles.zip
128
- rm datasets.zip
129
  rm posttrained.zip
130
 
131
  rm -rf models
132
  rm -rf pickles
133
- rm -rf datasets
134
  rm -rf posttrained
135
 
136
  - name: Commit changes
@@ -148,44 +203,217 @@ jobs:
148
  github_token: ${{ secrets.GH_TOKEN }}
149
  branch: production
150
 
151
- # model_deployment:
152
- # name: Model Deployment
153
- # runs-on: ubuntu-latest
154
- # needs: extraction_train_modeling
155
- # environment: Production
156
-
157
- # env:
158
- # HF_TOKEN: ${{ secrets.HF_TOKEN }}
159
- # SPACE_NAME: cryptocurrency_prediction
160
- # HF_USERNAME: qywok
161
-
162
- # steps:
163
- # - name: Set global directory
164
- # run: git config --global --add safe.directory /github/workspace
165
-
166
- # - uses: actions/checkout@v3
167
- # with:
168
- # persist-credentials: false
169
- # fetch-depth: 1000
170
-
171
- # - name: Check git status
172
- # run: git status
173
-
174
- # - name: Configure git
175
- # run: |
176
- # git config --local user.email "alfariqyraihan@gmail.com"
177
- # git config --local user.name "qywok"
178
-
179
- # - name: Pull changes from remote
180
- # run: |
181
- # git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME main || \
182
- # (git merge --strategy-option theirs)
183
-
184
- # - name: Add and commit changes
185
- # run: |
186
- # git add -A
187
- # git diff-index --quiet HEAD || git commit -m "Model Deployment"
188
-
189
- # - name: Push to Hugging Face
190
- # run: |
191
- # git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME main --force
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  # 18 - 7 = 11
12
 
13
  jobs:
14
+ scraping_extracting:
15
+ name: Scraping, and Extracting
16
  runs-on: ubuntu-latest
17
 
18
  steps:
 
51
  go run scraper.go \
52
  --symbols-file=./postman/symbols_test.json
53
 
54
+ - name: Zip Datasets
55
+ if: env.match != 'true'
56
+ run: zip -r datasets.zip datasets
57
+
58
+ - name: Store Datasets to Google Drive
59
+ if: env.match != 'true'
60
+ uses: adityak74/google-drive-upload-git-action@main
61
+ with:
62
+ credentials: ${{ secrets.GDRIVE_LSTM_GRU_CRED }}
63
+ filename: datasets.zip
64
+ folderId: ${{ secrets.GDRIVE_LSTM_GRU_ID }}
65
+ name: datasets.zip
66
+ overwrite: "true"
67
+
68
+ - name: Upload Artifact (datasets)
69
+ uses: actions/upload-artifact@v3
70
+ with:
71
+ name: datasets
72
+ path: datasets.zip
73
+
74
+ - name: Remove Temporarary Files and Directories
75
+ if: env.match != 'true'
76
+ run: |
77
+ rm datasets.zip
78
+ rm -rf datasets
79
+
80
+
81
+ preprocessing_training:
82
+ name: Preprocessing, and Training
83
+ runs-on: ubuntu-latest
84
+ needs: scraping_extracting
85
+
86
+ steps:
87
+ - name: Set global directory
88
+ run: git config --global --add safe.directory /github/workspace
89
+
90
+ - uses: actions/checkout@v3
91
+ with:
92
+ lfs: true
93
+ persist-credentials: false
94
+ fetch-depth: 1
95
+
96
+ - name: Read pipeline schedule date
97
+ id: read_schedule
98
+ run: |
99
+ SCHEDULE_DATE=$(cat schedulers/lstm_gru_schedule.ctl)
100
+ echo "schedule_date=${SCHEDULE_DATE}" >> $GITHUB_ENV
101
+
102
+ - name: Get current date
103
+ id: get_date
104
+ run: echo "current_date=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
105
+
106
+ - name: Check if dates match
107
+ id: date_check
108
+ run: |
109
+ if [ "$schedule_date" = "$current_date" ]; then
110
+ echo "match=true" >> $GITHUB_ENV
111
+ else
112
+ echo "match=false" >> $GITHUB_ENV
113
+ fi
114
+
115
  - name: Install Libraries
116
  if: env.match != 'true'
117
  run: pip install -r requirements.txt
118
 
119
+ - name: Download Artifact (datasets)
120
+ uses: actions/download-artifact@v3
121
+ with:
122
+ name: datasets
123
+
124
  - name: Modeling and Training
125
  if: env.match != 'true'
126
  run: |
127
+ unzip datasets.zip
128
+
129
  mkdir models
130
  mkdir pickles
131
  mkdir posttrained
 
145
  run: |
146
  zip -r models.zip models
147
  zip -r pickles.zip pickles
 
148
  zip -r posttrained.zip posttrained
 
 
 
 
 
 
 
 
 
 
149
 
150
  - name: Store Models to Google Drive
151
  if: env.match != 'true'
 
182
  run: |
183
  rm models.zip
184
  rm pickles.zip
 
185
  rm posttrained.zip
186
 
187
  rm -rf models
188
  rm -rf pickles
 
189
  rm -rf posttrained
190
 
191
  - name: Commit changes
 
203
  github_token: ${{ secrets.GH_TOKEN }}
204
  branch: production
205
 
206
+
207
+ tebakaja_crypto_space-0:
208
+ name: crypto-forecast-svc-0
209
+ runs-on: ubuntu-latest
210
+ needs: preprocessing_training
211
+ environment: Production
212
+
213
+ env:
214
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
215
+ SPACE_NAME: tebakaja_cryptocurrency_space-0
216
+ HF_USERNAME: tebakaja
217
+
218
+ steps:
219
+ - name: Set global directory
220
+ run: git config --global --add safe.directory /github/workspace
221
+
222
+ - uses: actions/checkout@v3
223
+ with:
224
+ persist-credentials: false
225
+ fetch-depth: 1000
226
+
227
+ - name: Check git status
228
+ run: git status
229
+
230
+ - name: Configure git
231
+ run: |
232
+ git config --local user.email "alfariqyraihan@gmail.com"
233
+ git config --local user.name "qywok"
234
+
235
+ - name: Pull changes from remote
236
+ run: |
237
+ git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production || \
238
+ (git merge --strategy-option theirs)
239
+
240
+ - name: Add and commit changes
241
+ run: |
242
+ git add -A
243
+ git diff-index --quiet HEAD || git commit -m "Model Deployment"
244
+
245
+ - name: Push to Hugging Face
246
+ run: |
247
+ git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production --force
248
+
249
+
250
+ tebakaja_crypto_space-1:
251
+ name: crypto-forecast-svc-1
252
+ runs-on: ubuntu-latest
253
+ needs: preprocessing_training
254
+ environment: Production
255
+
256
+ env:
257
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
258
+ SPACE_NAME: tebakaja_cryptocurrency_space-1
259
+ HF_USERNAME: tebakaja
260
+
261
+ steps:
262
+ - name: Set global directory
263
+ run: git config --global --add safe.directory /github/workspace
264
+
265
+ - uses: actions/checkout@v3
266
+ with:
267
+ persist-credentials: false
268
+ fetch-depth: 1000
269
+
270
+ - name: Check git status
271
+ run: git status
272
+
273
+ - name: Configure git
274
+ run: |
275
+ git config --local user.email "alfariqyraihan@gmail.com"
276
+ git config --local user.name "qywok"
277
+
278
+ - name: Pull changes from remote
279
+ run: |
280
+ git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production || \
281
+ (git merge --strategy-option theirs)
282
+
283
+ - name: Add and commit changes
284
+ run: |
285
+ git add -A
286
+ git diff-index --quiet HEAD || git commit -m "Model Deployment"
287
+
288
+ - name: Push to Hugging Face
289
+ run: |
290
+ git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production --force
291
+
292
+
293
+ tebakaja_crypto_space-2:
294
+ name: crypto-forecast-svc-2
295
+ runs-on: ubuntu-latest
296
+ needs: preprocessing_training
297
+ environment: Production
298
+
299
+ env:
300
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
301
+ SPACE_NAME: tebakaja_cryptocurrency_space-2
302
+ HF_USERNAME: tebakaja
303
+
304
+ steps:
305
+ - name: Set global directory
306
+ run: git config --global --add safe.directory /github/workspace
307
+
308
+ - uses: actions/checkout@v3
309
+ with:
310
+ persist-credentials: false
311
+ fetch-depth: 1000
312
+
313
+ - name: Check git status
314
+ run: git status
315
+
316
+ - name: Configure git
317
+ run: |
318
+ git config --local user.email "alfariqyraihan@gmail.com"
319
+ git config --local user.name "qywok"
320
+
321
+ - name: Pull changes from remote
322
+ run: |
323
+ git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production || \
324
+ (git merge --strategy-option theirs)
325
+
326
+ - name: Add and commit changes
327
+ run: |
328
+ git add -A
329
+ git diff-index --quiet HEAD || git commit -m "Model Deployment"
330
+
331
+ - name: Push to Hugging Face
332
+ run: |
333
+ git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production --force
334
+
335
+
336
+ tebakaja_crypto_space-3:
337
+ name: crypto-forecast-svc-3
338
+ runs-on: ubuntu-latest
339
+ needs: preprocessing_training
340
+ environment: Production
341
+
342
+ env:
343
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
344
+ SPACE_NAME: tebakaja_cryptocurrency_space-3
345
+ HF_USERNAME: tebakaja
346
+
347
+ steps:
348
+ - name: Set global directory
349
+ run: git config --global --add safe.directory /github/workspace
350
+
351
+ - uses: actions/checkout@v3
352
+ with:
353
+ persist-credentials: false
354
+ fetch-depth: 1000
355
+
356
+ - name: Check git status
357
+ run: git status
358
+
359
+ - name: Configure git
360
+ run: |
361
+ git config --local user.email "alfariqyraihan@gmail.com"
362
+ git config --local user.name "qywok"
363
+
364
+ - name: Pull changes from remote
365
+ run: |
366
+ git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production || \
367
+ (git merge --strategy-option theirs)
368
+
369
+ - name: Add and commit changes
370
+ run: |
371
+ git add -A
372
+ git diff-index --quiet HEAD || git commit -m "Model Deployment"
373
+
374
+ - name: Push to Hugging Face
375
+ run: |
376
+ git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production --force
377
+
378
+
379
+ tebakaja_crypto_space-4:
380
+ name: crypto-forecast-svc-4
381
+ runs-on: ubuntu-latest
382
+ needs: preprocessing_training
383
+ environment: Production
384
+
385
+ env:
386
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
387
+ SPACE_NAME: tebakaja_cryptocurrency_space-4
388
+ HF_USERNAME: tebakaja
389
+
390
+ steps:
391
+ - name: Set global directory
392
+ run: git config --global --add safe.directory /github/workspace
393
+
394
+ - uses: actions/checkout@v3
395
+ with:
396
+ persist-credentials: false
397
+ fetch-depth: 1000
398
+
399
+ - name: Check git status
400
+ run: git status
401
+
402
+ - name: Configure git
403
+ run: |
404
+ git config --local user.email "alfariqyraihan@gmail.com"
405
+ git config --local user.name "qywok"
406
+
407
+ - name: Pull changes from remote
408
+ run: |
409
+ git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production || \
410
+ (git merge --strategy-option theirs)
411
+
412
+ - name: Add and commit changes
413
+ run: |
414
+ git add -A
415
+ git diff-index --quiet HEAD || git commit -m "Model Deployment"
416
+
417
+ - name: Push to Hugging Face
418
+ run: |
419
+ git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production --force
.github/workflows/lstm_pipeline.yaml CHANGED
@@ -11,8 +11,8 @@ on:
11
  # 16 - 7 = 9
12
 
13
  jobs:
14
- extraction_train_modeling:
15
- name: Data Extraction, Training, and Modeling
16
  runs-on: ubuntu-latest
17
 
18
  steps:
@@ -51,13 +51,81 @@ jobs:
51
  go run scraper.go \
52
  --symbols-file=./postman/symbols_test.json
53
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
54
  - name: Install Libraries
55
  if: env.match != 'true'
56
  run: pip install -r requirements.txt
57
 
 
 
 
 
 
58
  - name: Modeling and Training
59
  if: env.match != 'true'
60
  run: |
 
 
61
  mkdir models
62
  mkdir pickles
63
  mkdir posttrained
@@ -77,18 +145,7 @@ jobs:
77
  run: |
78
  zip -r models.zip models
79
  zip -r pickles.zip pickles
80
- zip -r datasets.zip datasets
81
  zip -r posttrained.zip posttrained
82
-
83
- - name: Store Datasets to Google Drive
84
- if: env.match != 'true'
85
- uses: adityak74/google-drive-upload-git-action@main
86
- with:
87
- credentials: ${{ secrets.GDRIVE_LSTM_CRED }}
88
- filename: datasets.zip
89
- folderId: ${{ secrets.GDRIVE_LSTM_ID }}
90
- name: datasets.zip
91
- overwrite: "true"
92
 
93
  - name: Store Models to Google Drive
94
  if: env.match != 'true'
@@ -125,12 +182,10 @@ jobs:
125
  run: |
126
  rm models.zip
127
  rm pickles.zip
128
- rm datasets.zip
129
  rm posttrained.zip
130
 
131
  rm -rf models
132
  rm -rf pickles
133
- rm -rf datasets
134
  rm -rf posttrained
135
 
136
  - name: Commit changes
@@ -148,44 +203,217 @@ jobs:
148
  github_token: ${{ secrets.GH_TOKEN }}
149
  branch: production
150
 
151
- # model_deployment:
152
- # name: Model Deployment
153
- # runs-on: ubuntu-latest
154
- # needs: extraction_train_modeling
155
- # environment: Production
156
-
157
- # env:
158
- # HF_TOKEN: ${{ secrets.HF_TOKEN }}
159
- # SPACE_NAME: cryptocurrency_prediction
160
- # HF_USERNAME: qywok
161
-
162
- # steps:
163
- # - name: Set global directory
164
- # run: git config --global --add safe.directory /github/workspace
165
-
166
- # - uses: actions/checkout@v3
167
- # with:
168
- # persist-credentials: false
169
- # fetch-depth: 1000
170
-
171
- # - name: Check git status
172
- # run: git status
173
-
174
- # - name: Configure git
175
- # run: |
176
- # git config --local user.email "alfariqyraihan@gmail.com"
177
- # git config --local user.name "qywok"
178
-
179
- # - name: Pull changes from remote
180
- # run: |
181
- # git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME main || \
182
- # (git merge --strategy-option theirs)
183
-
184
- # - name: Add and commit changes
185
- # run: |
186
- # git add -A
187
- # git diff-index --quiet HEAD || git commit -m "Model Deployment"
188
-
189
- # - name: Push to Hugging Face
190
- # run: |
191
- # git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME main --force
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  # 16 - 7 = 9
12
 
13
  jobs:
14
+ scraping_extracting:
15
+ name: Scraping, and Extracting
16
  runs-on: ubuntu-latest
17
 
18
  steps:
 
51
  go run scraper.go \
52
  --symbols-file=./postman/symbols_test.json
53
 
54
+ - name: Zip Datasets
55
+ if: env.match != 'true'
56
+ run: zip -r datasets.zip datasets
57
+
58
+ - name: Store Datasets to Google Drive
59
+ if: env.match != 'true'
60
+ uses: adityak74/google-drive-upload-git-action@main
61
+ with:
62
+ credentials: ${{ secrets.GDRIVE_LSTM_CRED }}
63
+ filename: datasets.zip
64
+ folderId: ${{ secrets.GDRIVE_LSTM_ID }}
65
+ name: datasets.zip
66
+ overwrite: "true"
67
+
68
+ - name: Upload Artifact (datasets)
69
+ uses: actions/upload-artifact@v3
70
+ with:
71
+ name: datasets
72
+ path: datasets.zip
73
+
74
+ - name: Remove Temporarary Files and Directories
75
+ if: env.match != 'true'
76
+ run: |
77
+ rm datasets.zip
78
+ rm -rf datasets
79
+
80
+
81
+ preprocessing_training:
82
+ name: Preprocessing, and Training
83
+ runs-on: ubuntu-latest
84
+ needs: scraping_extracting
85
+
86
+ steps:
87
+ - name: Set global directory
88
+ run: git config --global --add safe.directory /github/workspace
89
+
90
+ - uses: actions/checkout@v3
91
+ with:
92
+ lfs: true
93
+ persist-credentials: false
94
+ fetch-depth: 1
95
+
96
+ - name: Read pipeline schedule date
97
+ id: read_schedule
98
+ run: |
99
+ SCHEDULE_DATE=$(cat schedulers/lstm_schedule.ctl)
100
+ echo "schedule_date=${SCHEDULE_DATE}" >> $GITHUB_ENV
101
+
102
+ - name: Get current date
103
+ id: get_date
104
+ run: echo "current_date=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
105
+
106
+ - name: Check if dates match
107
+ id: date_check
108
+ run: |
109
+ if [ "$schedule_date" = "$current_date" ]; then
110
+ echo "match=true" >> $GITHUB_ENV
111
+ else
112
+ echo "match=false" >> $GITHUB_ENV
113
+ fi
114
+
115
  - name: Install Libraries
116
  if: env.match != 'true'
117
  run: pip install -r requirements.txt
118
 
119
+ - name: Download Artifact (datasets)
120
+ uses: actions/download-artifact@v3
121
+ with:
122
+ name: datasets
123
+
124
  - name: Modeling and Training
125
  if: env.match != 'true'
126
  run: |
127
+ unzip datasets.zip
128
+
129
  mkdir models
130
  mkdir pickles
131
  mkdir posttrained
 
145
  run: |
146
  zip -r models.zip models
147
  zip -r pickles.zip pickles
 
148
  zip -r posttrained.zip posttrained
 
 
 
 
 
 
 
 
 
 
149
 
150
  - name: Store Models to Google Drive
151
  if: env.match != 'true'
 
182
  run: |
183
  rm models.zip
184
  rm pickles.zip
 
185
  rm posttrained.zip
186
 
187
  rm -rf models
188
  rm -rf pickles
 
189
  rm -rf posttrained
190
 
191
  - name: Commit changes
 
203
  github_token: ${{ secrets.GH_TOKEN }}
204
  branch: production
205
 
206
+
207
+ tebakaja_crypto_space-0:
208
+ name: crypto-forecast-svc-0
209
+ runs-on: ubuntu-latest
210
+ needs: preprocessing_training
211
+ environment: Production
212
+
213
+ env:
214
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
215
+ SPACE_NAME: tebakaja_cryptocurrency_space-0
216
+ HF_USERNAME: tebakaja
217
+
218
+ steps:
219
+ - name: Set global directory
220
+ run: git config --global --add safe.directory /github/workspace
221
+
222
+ - uses: actions/checkout@v3
223
+ with:
224
+ persist-credentials: false
225
+ fetch-depth: 1000
226
+
227
+ - name: Check git status
228
+ run: git status
229
+
230
+ - name: Configure git
231
+ run: |
232
+ git config --local user.email "alfariqyraihan@gmail.com"
233
+ git config --local user.name "qywok"
234
+
235
+ - name: Pull changes from remote
236
+ run: |
237
+ git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production || \
238
+ (git merge --strategy-option theirs)
239
+
240
+ - name: Add and commit changes
241
+ run: |
242
+ git add -A
243
+ git diff-index --quiet HEAD || git commit -m "Model Deployment"
244
+
245
+ - name: Push to Hugging Face
246
+ run: |
247
+ git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production --force
248
+
249
+
250
+ tebakaja_crypto_space-1:
251
+ name: crypto-forecast-svc-1
252
+ runs-on: ubuntu-latest
253
+ needs: preprocessing_training
254
+ environment: Production
255
+
256
+ env:
257
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
258
+ SPACE_NAME: tebakaja_cryptocurrency_space-1
259
+ HF_USERNAME: tebakaja
260
+
261
+ steps:
262
+ - name: Set global directory
263
+ run: git config --global --add safe.directory /github/workspace
264
+
265
+ - uses: actions/checkout@v3
266
+ with:
267
+ persist-credentials: false
268
+ fetch-depth: 1000
269
+
270
+ - name: Check git status
271
+ run: git status
272
+
273
+ - name: Configure git
274
+ run: |
275
+ git config --local user.email "alfariqyraihan@gmail.com"
276
+ git config --local user.name "qywok"
277
+
278
+ - name: Pull changes from remote
279
+ run: |
280
+ git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production || \
281
+ (git merge --strategy-option theirs)
282
+
283
+ - name: Add and commit changes
284
+ run: |
285
+ git add -A
286
+ git diff-index --quiet HEAD || git commit -m "Model Deployment"
287
+
288
+ - name: Push to Hugging Face
289
+ run: |
290
+ git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production --force
291
+
292
+
293
+ tebakaja_crypto_space-2:
294
+ name: crypto-forecast-svc-2
295
+ runs-on: ubuntu-latest
296
+ needs: preprocessing_training
297
+ environment: Production
298
+
299
+ env:
300
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
301
+ SPACE_NAME: tebakaja_cryptocurrency_space-2
302
+ HF_USERNAME: tebakaja
303
+
304
+ steps:
305
+ - name: Set global directory
306
+ run: git config --global --add safe.directory /github/workspace
307
+
308
+ - uses: actions/checkout@v3
309
+ with:
310
+ persist-credentials: false
311
+ fetch-depth: 1000
312
+
313
+ - name: Check git status
314
+ run: git status
315
+
316
+ - name: Configure git
317
+ run: |
318
+ git config --local user.email "alfariqyraihan@gmail.com"
319
+ git config --local user.name "qywok"
320
+
321
+ - name: Pull changes from remote
322
+ run: |
323
+ git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production || \
324
+ (git merge --strategy-option theirs)
325
+
326
+ - name: Add and commit changes
327
+ run: |
328
+ git add -A
329
+ git diff-index --quiet HEAD || git commit -m "Model Deployment"
330
+
331
+ - name: Push to Hugging Face
332
+ run: |
333
+ git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production --force
334
+
335
+
336
+ tebakaja_crypto_space-3:
337
+ name: crypto-forecast-svc-3
338
+ runs-on: ubuntu-latest
339
+ needs: preprocessing_training
340
+ environment: Production
341
+
342
+ env:
343
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
344
+ SPACE_NAME: tebakaja_cryptocurrency_space-3
345
+ HF_USERNAME: tebakaja
346
+
347
+ steps:
348
+ - name: Set global directory
349
+ run: git config --global --add safe.directory /github/workspace
350
+
351
+ - uses: actions/checkout@v3
352
+ with:
353
+ persist-credentials: false
354
+ fetch-depth: 1000
355
+
356
+ - name: Check git status
357
+ run: git status
358
+
359
+ - name: Configure git
360
+ run: |
361
+ git config --local user.email "alfariqyraihan@gmail.com"
362
+ git config --local user.name "qywok"
363
+
364
+ - name: Pull changes from remote
365
+ run: |
366
+ git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production || \
367
+ (git merge --strategy-option theirs)
368
+
369
+ - name: Add and commit changes
370
+ run: |
371
+ git add -A
372
+ git diff-index --quiet HEAD || git commit -m "Model Deployment"
373
+
374
+ - name: Push to Hugging Face
375
+ run: |
376
+ git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production --force
377
+
378
+
379
+ tebakaja_crypto_space-4:
380
+ name: crypto-forecast-svc-4
381
+ runs-on: ubuntu-latest
382
+ needs: preprocessing_training
383
+ environment: Production
384
+
385
+ env:
386
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
387
+ SPACE_NAME: tebakaja_cryptocurrency_space-4
388
+ HF_USERNAME: tebakaja
389
+
390
+ steps:
391
+ - name: Set global directory
392
+ run: git config --global --add safe.directory /github/workspace
393
+
394
+ - uses: actions/checkout@v3
395
+ with:
396
+ persist-credentials: false
397
+ fetch-depth: 1000
398
+
399
+ - name: Check git status
400
+ run: git status
401
+
402
+ - name: Configure git
403
+ run: |
404
+ git config --local user.email "alfariqyraihan@gmail.com"
405
+ git config --local user.name "qywok"
406
+
407
+ - name: Pull changes from remote
408
+ run: |
409
+ git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production || \
410
+ (git merge --strategy-option theirs)
411
+
412
+ - name: Add and commit changes
413
+ run: |
414
+ git add -A
415
+ git diff-index --quiet HEAD || git commit -m "Model Deployment"
416
+
417
+ - name: Push to Hugging Face
418
+ run: |
419
+ git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME production --force
.gitignore CHANGED
@@ -2,6 +2,7 @@
2
  /postman/dataset.url
3
 
4
  # Environments
 
5
  /bin
6
  /Lib
7
  /lib64
@@ -16,4 +17,5 @@
16
  /models
17
  /pickles
18
  /datasets
 
19
  /posttrained
 
2
  /postman/dataset.url
3
 
4
  # Environments
5
+ /venv
6
  /bin
7
  /Lib
8
  /lib64
 
17
  /models
18
  /pickles
19
  /datasets
20
+ /resources
21
  /posttrained
.vercelignore DELETED
@@ -1,7 +0,0 @@
1
- /.github
2
-
3
- /bin
4
- /include
5
- /lib
6
-
7
- /postman
 
 
 
 
 
 
 
 
Dockerfile CHANGED
@@ -1,44 +1,120 @@
1
  FROM python:3.9-bullseye
2
 
3
- LABEL organization="R6Q - Infraprasta University"
4
- LABEL team="Group 5"
 
 
5
 
6
  RUN useradd -m -u 1000 user
7
 
8
  WORKDIR /app
9
 
 
 
 
 
 
10
  COPY --chown=user ./requirements.txt requirements.txt
11
 
12
  RUN pip install --no-cache-dir --upgrade -r requirements.txt
13
 
14
  COPY --chown=user . /app
15
 
16
- RUN apt-get update && \
17
- apt-get install -y gcc python3-dev gnupg curl
18
 
 
19
  RUN pip install cython
20
 
21
  RUN cd /app/restful/cutils && \
22
  python setup.py build_ext --inplace && \
23
  chmod 777 * && cd ../..
24
 
25
- RUN pip install gdown
26
 
27
- RUN --mount=type=secret,id=MODELS_ID,mode=0444,required=true \
28
- gdown https://drive.google.com/uc?id=$(cat /run/secrets/MODELS_ID) && \
29
- unzip models.zip && rm models.zip
 
 
30
 
31
- RUN --mount=type=secret,id=PICKLES_ID,mode=0444,required=true \
32
- gdown https://drive.google.com/uc?id=$(cat /run/secrets/PICKLES_ID) && \
33
- unzip pickles.zip && rm pickles.zip
34
 
 
35
  RUN --mount=type=secret,id=DATASETS_ID,mode=0444,required=true \
36
  gdown https://drive.google.com/uc?id=$(cat /run/secrets/DATASETS_ID) && \
37
- unzip datasets.zip && rm datasets.zip
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
 
39
- RUN --mount=type=secret,id=POSTTRAINED_ID,mode=0444,required=true \
40
- gdown https://drive.google.com/uc?id=$(cat /run/secrets/POSTTRAINED_ID) && \
41
- unzip posttrained.zip && rm posttrained.zip
 
 
42
 
43
 
44
- CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--workers", "10", "--port", "7860"]
 
1
  FROM python:3.9-bullseye
2
 
3
+ LABEL PRODUCT="TebakAja"
4
+ LABEL SERVICE="Cryptocurrency Service"
5
+ LABEL TEAM="System and Machine Learning Engineering Team"
6
+
7
 
8
  RUN useradd -m -u 1000 user
9
 
10
  WORKDIR /app
11
 
12
+
13
+ # Install Requirements
14
+ RUN apt-get update && \
15
+ apt-get install -y gcc python3-dev gnupg curl
16
+
17
  COPY --chown=user ./requirements.txt requirements.txt
18
 
19
  RUN pip install --no-cache-dir --upgrade -r requirements.txt
20
 
21
  COPY --chown=user . /app
22
 
 
 
23
 
24
+ # Cythonizing Utilities
25
  RUN pip install cython
26
 
27
  RUN cd /app/restful/cutils && \
28
  python setup.py build_ext --inplace && \
29
  chmod 777 * && cd ../..
30
 
 
31
 
32
+ # Initialization Resources
33
+ RUN mkdir /app/resources && \
34
+ chmod 444 /app/resources
35
+
36
+ RUN pip install gdown
37
 
 
 
 
38
 
39
+ # Datasets Resources
40
  RUN --mount=type=secret,id=DATASETS_ID,mode=0444,required=true \
41
  gdown https://drive.google.com/uc?id=$(cat /run/secrets/DATASETS_ID) && \
42
+ mv datasets.zip /app/resources/datasets.zip && unzip /app/resources/datasets.zip && \
43
+ rm /app/resources/datasets.zip
44
+
45
+
46
+ # Algorithms Resources
47
+ RUN mkdir /app/resources/algorithms && \
48
+ chmod 444 /app/resources/algorithms
49
+
50
+
51
+ # GRU Algorithm Resources
52
+ RUN mkdir /app/resources/algorithms/GRU && \
53
+ chmod 444 /app/resources/algorithms/GRU
54
+
55
+ RUN --mount=type=secret,id=GRU_MODELS_ID,mode=0444,required=true \
56
+ gdown https://drive.google.com/uc?id=$(cat /run/secrets/GRU_MODELS_ID) && \
57
+ mv models.zip /app/resources/algorithms/GRU/models.zip && \
58
+ unzip /app/resources/algorithms/GRU/models.zip && \
59
+ rm /app/resources/algorithms/GRU/models.zip
60
+
61
+ RUN --mount=type=secret,id=GRU_PICKLES_ID,mode=0444,required=true \
62
+ gdown https://drive.google.com/uc?id=$(cat /run/secrets/GRU_PICKLES_ID) && \
63
+ mv pickles.zip /app/resources/algorithms/GRU/pickles.zip && \
64
+ unzip /app/resources/algorithms/GRU/pickles.zip && \
65
+ rm /app/resources/algorithms/GRU/pickles.zip
66
+
67
+ RUN --mount=type=secret,id=GRU_POSTTRAINED_ID,mode=0444,required=true \
68
+ gdown https://drive.google.com/uc?id=$(cat /run/secrets/GRU_POSTTRAINED_ID) && \
69
+ mv posttrained.zip /app/resources/algorithms/GRU/posttrained.zip && \
70
+ unzip /app/resources/algorithms/GRU/posttrained.zip && \
71
+ rm /app/resources/algorithms/GRU/posttrained.zip
72
+
73
+
74
+ # LSTM Algorithm Resources
75
+ RUN mkdir /app/resources/algorithms/LSTM && \
76
+ chmod 444 /app/resources/algorithms/LSTM
77
+
78
+ RUN --mount=type=secret,id=LSTM_MODELS_ID,mode=0444,required=true \
79
+ gdown https://drive.google.com/uc?id=$(cat /run/secrets/LSTM_MODELS_ID) && \
80
+ mv models.zip /app/resources/algorithms/LSTM/models.zip && \
81
+ unzip /app/resources/algorithms/LSTM/models.zip && \
82
+ rm /app/resources/algorithms/LSTM/models.zip
83
+
84
+ RUN --mount=type=secret,id=LSTM_PICKLES_ID,mode=0444,required=true \
85
+ gdown https://drive.google.com/uc?id=$(cat /run/secrets/LSTM_PICKLES_ID) && \
86
+ mv pickles.zip /app/resources/algorithms/LSTM/pickles.zip && \
87
+ unzip /app/resources/algorithms/LSTM/pickles.zip && \
88
+ rm /app/resources/algorithms/LSTM/pickles.zip
89
+
90
+ RUN --mount=type=secret,id=LSTM_POSTTRAINED_ID,mode=0444,required=true \
91
+ gdown https://drive.google.com/uc?id=$(cat /run/secrets/LSTM_POSTTRAINED_ID) && \
92
+ mv posttrained.zip /app/resources/algorithms/LSTM/posttrained.zip && \
93
+ unzip /app/resources/algorithms/LSTM/posttrained.zip && \
94
+ rm /app/resources/algorithms/LSTM/posttrained.zip
95
+
96
+
97
+ # LSTM_GRU Algorithm Resources
98
+ RUN mkdir /app/resources/algorithms/LSTM_GRU && \
99
+ chmod 444 /app/resources/algorithms/LSTM_GRU
100
+
101
+ RUN --mount=type=secret,id=LSTM_GRU_MODELS_ID,mode=0444,required=true \
102
+ gdown https://drive.google.com/uc?id=$(cat /run/secrets/LSTM_GRU_MODELS_ID) && \
103
+ mv models.zip /app/resources/algorithms/LSTM_GRU/models.zip && \
104
+ unzip /app/resources/algorithms/LSTM_GRU/models.zip && \
105
+ rm /app/resources/algorithms/LSTM_GRU/models.zip
106
+
107
+ RUN --mount=type=secret,id=LSTM_GRU_PICKLES_ID,mode=0444,required=true \
108
+ gdown https://drive.google.com/uc?id=$(cat /run/secrets/LSTM_GRU_PICKLES_ID) && \
109
+ mv pickles.zip /app/resources/algorithms/LSTM_GRU/pickles.zip && \
110
+ unzip /app/resources/algorithms/LSTM_GRU/pickles.zip && \
111
+ rm /app/resources/algorithms/LSTM_GRU/pickles.zip
112
 
113
+ RUN --mount=type=secret,id=LSTM_GRU_POSTTRAINED_ID,mode=0444,required=true \
114
+ gdown https://drive.google.com/uc?id=$(cat /run/secrets/LSTM_GRU_POSTTRAINED_ID) && \
115
+ mv posttrained.zip /app/resources/algorithms/LSTM_GRU/posttrained.zip && \
116
+ unzip /app/resources/algorithms/LSTM_GRU/posttrained.zip && \
117
+ rm /app/resources/algorithms/LSTM_GRU/posttrained.zip
118
 
119
 
120
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--workers", "50", "--port", "7860"]
Makefile CHANGED
@@ -1,2 +1,5 @@
1
  cutils:
2
  cd restful/cutils && python setup.py build_ext --inplace && cd ../..
 
 
 
 
1
  cutils:
2
  cd restful/cutils && python setup.py build_ext --inplace && cd ../..
3
+
4
+ run:
5
+ uvicorn app:app --host 0.0.0.0 --port 7860 --reload
app.py CHANGED
@@ -20,7 +20,7 @@ app.add_middleware(
20
  app.include_router(
21
  router = route,
22
  prefix = '/crypto',
23
- tags = ['Crypto']
24
  )
25
 
26
  @app.get("/", tags = ['Main'])
 
20
  app.include_router(
21
  router = route,
22
  prefix = '/crypto',
23
+ tags = ['Cryptocurrency']
24
  )
25
 
26
  @app.get("/", tags = ['Main'])
converter.py CHANGED
@@ -1,11 +1,5 @@
1
  import json
2
 
3
- """
4
-
5
- Data Mining Assignment - Group 5
6
-
7
- """
8
-
9
  class JSONProcessor:
10
  def __init__(self, input_file: str, output_file: str) -> None:
11
  self.input_file: str = input_file
 
1
  import json
2
 
 
 
 
 
 
 
3
  class JSONProcessor:
4
  def __init__(self, input_file: str, output_file: str) -> None:
5
  self.input_file: str = input_file
go.mod CHANGED
@@ -1,3 +1,3 @@
1
- module github.com/cryptocurrency_prediction
2
 
3
  go 1.20
 
1
+ module github.com/tebakaja/tebakaja_cryptocurrency
2
 
3
  go 1.20
restful/controllers.py CHANGED
@@ -1,23 +1,54 @@
1
  import os
2
  from http import HTTPStatus
3
  from fastapi.responses import JSONResponse
4
- from restful.services import cryptocurrency_svc
5
- from restful.schemas import CryptocurrencyPredictionSchema
6
 
7
 
8
- # Cryptocurrency Controller
9
- class cryptocurrency_controller:
10
- # Cryptocurrency Service
11
- __SERVICE = cryptocurrency_svc()
12
 
13
- # Cryptocurrency List
14
- async def crypto_list(self) -> JSONResponse:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  try:
16
- DATASETS_PATH = './datasets'
17
- DATASETS = sorted(
18
  [
19
- item.replace(".csv", "") for item in os.listdir(DATASETS_PATH)
20
- if os.path.isfile(os.path.join(DATASETS_PATH, item)) and item.endswith('.csv')
21
  ]
22
  )
23
 
@@ -25,7 +56,7 @@ class cryptocurrency_controller:
25
  content = {
26
  'message': 'Success',
27
  'status_code': HTTPStatus.OK,
28
- 'data': DATASETS
29
  },
30
  status_code = HTTPStatus.OK
31
  )
@@ -41,32 +72,47 @@ class cryptocurrency_controller:
41
  status_code = HTTPStatus.INTERNAL_SERVER_ERROR
42
  )
43
 
44
- # Cryptocurrency Controller
45
- async def prediction(self, payload: CryptocurrencyPredictionSchema) -> JSONResponse:
 
 
 
 
46
  try:
47
- DATASETS_PATH = './datasets'
48
- DATASETS = sorted(
49
  [
50
- item.replace(".csv", "") for item in os.listdir(DATASETS_PATH)
51
- if os.path.isfile(os.path.join(DATASETS_PATH, item)) and item.endswith('.csv')
52
  ]
53
  )
54
 
55
- # Validation
 
 
 
 
 
 
 
 
 
 
56
  if (payload.days > 31) or (payload.days < 1):
57
  return JSONResponse(
58
  content = {
59
- 'message': 'prediction days cannot be more than a month and cannot be less than 1',
60
  'status_code': HTTPStatus.BAD_REQUEST,
61
  'data': None
62
  },
63
  status_code = HTTPStatus.BAD_REQUEST
64
  )
65
 
66
- if payload.currency not in DATASETS:
 
67
  return JSONResponse(
68
  content = {
69
- 'message': f'cryptocurrency {payload.currency} is not available.',
70
  'status_code': HTTPStatus.BAD_REQUEST,
71
  'data': None
72
  },
@@ -74,7 +120,7 @@ class cryptocurrency_controller:
74
  )
75
 
76
 
77
- prediction: dict = await self.__SERVICE.prediction(payload)
78
 
79
  if not prediction :
80
  return JSONResponse(
 
1
  import os
2
  from http import HTTPStatus
3
  from fastapi.responses import JSONResponse
4
+ from restful.services import ForecastingService
5
+ from restful.schemas import ForecastingServiceSchema
6
 
7
 
8
+ """ Forecasting Controller """
9
+ class ForecastingControllers:
 
 
10
 
11
+ __SERVICE: ForecastingService = ForecastingService()
12
+
13
+
14
+ """
15
+ Algorithms Controller
16
+ """
17
+ async def algorithms_controller(self) -> JSONResponse:
18
+ try:
19
+ algorithms: list = sorted(os.listdir("resources/algorithms"))
20
+ return JSONResponse(
21
+ content = {
22
+ 'message': 'Success',
23
+ 'status_code': HTTPStatus.OK,
24
+ 'data': algorithms
25
+ },
26
+ status_code = HTTPStatus.OK
27
+ )
28
+
29
+ except Exception as error_message:
30
+ print(error_message)
31
+ return JSONResponse(
32
+ content = {
33
+ 'message': 'Internal Server Error',
34
+ 'status_code': HTTPStatus.INTERNAL_SERVER_ERROR,
35
+ 'data': None
36
+ },
37
+ status_code = HTTPStatus.INTERNAL_SERVER_ERROR
38
+ )
39
+
40
+
41
+
42
+ """
43
+ Currency Controller
44
+ """
45
+ async def currencies_controller(self) -> JSONResponse:
46
  try:
47
+ path: str = './resources/datasets'
48
+ datasets: list = sorted(
49
  [
50
+ item.replace(".csv", "") for item in os.listdir(path)
51
+ if os.path.isfile(os.path.join(path, item)) and item.endswith('.csv')
52
  ]
53
  )
54
 
 
56
  content = {
57
  'message': 'Success',
58
  'status_code': HTTPStatus.OK,
59
+ 'data': datasets
60
  },
61
  status_code = HTTPStatus.OK
62
  )
 
72
  status_code = HTTPStatus.INTERNAL_SERVER_ERROR
73
  )
74
 
75
+
76
+
77
+ """
78
+ Forecasting Controller
79
+ """
80
+ async def forecasting_controller(self, payload: ForecastingServiceSchema) -> JSONResponse:
81
  try:
82
+ path: str = './resources/datasets'
83
+ datasets: list = sorted(
84
  [
85
+ item.replace(".csv", "") for item in os.listdir(path)
86
+ if os.path.isfile(os.path.join(path, item)) and item.endswith('.csv')
87
  ]
88
  )
89
 
90
+ if payload.currency not in datasets:
91
+ return JSONResponse(
92
+ content = {
93
+ 'message': f'symbols "{payload.currency}" is not available.',
94
+ 'status_code': HTTPStatus.BAD_REQUEST,
95
+ 'data': None
96
+ },
97
+ status_code = HTTPStatus.BAD_REQUEST
98
+ )
99
+
100
+
101
  if (payload.days > 31) or (payload.days < 1):
102
  return JSONResponse(
103
  content = {
104
+ 'message': 'days cannot be more than a month and cannot be less than 1',
105
  'status_code': HTTPStatus.BAD_REQUEST,
106
  'data': None
107
  },
108
  status_code = HTTPStatus.BAD_REQUEST
109
  )
110
 
111
+
112
+ if payload.algorithm not in os.listdir("resources/algorithms"):
113
  return JSONResponse(
114
  content = {
115
+ 'message': f'algorithm "{payload.algorithm}" is not available.',
116
  'status_code': HTTPStatus.BAD_REQUEST,
117
  'data': None
118
  },
 
120
  )
121
 
122
 
123
+ prediction: dict = await self.__SERVICE.forecasting(payload)
124
 
125
  if not prediction :
126
  return JSONResponse(
restful/cutils/build/lib.linux-x86_64-3.10/utilities.cpython-310-x86_64-linux-gnu.so CHANGED
Binary files a/restful/cutils/build/lib.linux-x86_64-3.10/utilities.cpython-310-x86_64-linux-gnu.so and b/restful/cutils/build/lib.linux-x86_64-3.10/utilities.cpython-310-x86_64-linux-gnu.so differ
 
restful/cutils/build/temp.linux-x86_64-3.10/utilities.o CHANGED
Binary files a/restful/cutils/build/temp.linux-x86_64-3.10/utilities.o and b/restful/cutils/build/temp.linux-x86_64-3.10/utilities.o differ
 
restful/cutils/utilities.c CHANGED
The diff for this file is too large to render. See raw diff
 
restful/cutils/utilities.cpython-310-x86_64-linux-gnu.so CHANGED
Binary files a/restful/cutils/utilities.cpython-310-x86_64-linux-gnu.so and b/restful/cutils/utilities.cpython-310-x86_64-linux-gnu.so differ
 
restful/cutils/utilities.pyx CHANGED
@@ -6,17 +6,21 @@ from tensorflow.keras.models import load_model
6
  import cython
7
 
8
  cdef class Utilities:
9
- async def cryptocurrency_prediction_utils(self,
10
- int days, int sequence_length, str model_name) -> tuple:
11
- cdef str model_path = os.path.join('./models', f'{model_name}.keras')
 
12
  model = load_model(model_path)
13
 
14
- cdef str dataframe_path = os.path.join('./posttrained', f'{model_name}-posttrained.json')
 
15
  dataframe = read_json(dataframe_path)
16
  dataframe.set_index('Date', inplace=True)
17
 
18
- minmax_scaler = load(os.path.join('./pickles', f'{model_name}_minmax_scaler.pickle'))
19
- standard_scaler = load(os.path.join('./pickles', f'{model_name}_standard_scaler.pickle'))
 
 
20
 
21
  # Prediction
22
  lst_seq = dataframe[-sequence_length:].values
 
6
  import cython
7
 
8
  cdef class Utilities:
9
+ async def forecasting_utils(self, int sequence_length,
10
+ int days, str model_name, str algorithm) -> tuple:
11
+ cdef str model_path = os.path.join(f'./resources/algorithms/{algorithm}/models',
12
+ f'{model_name}.keras')
13
  model = load_model(model_path)
14
 
15
+ cdef str dataframe_path = os.path.join(f'./resources/algorithms/{algorithm}/posttrained',
16
+ f'{model_name}-posttrained.json')
17
  dataframe = read_json(dataframe_path)
18
  dataframe.set_index('Date', inplace=True)
19
 
20
+ minmax_scaler = load(os.path.join(f'./resources/algorithms/{algorithm}/pickles',
21
+ f'{model_name}_minmax_scaler.pickle'))
22
+ standard_scaler = load(os.path.join(f'./resources/algorithms/{algorithm}/pickles',
23
+ f'{model_name}_standard_scaler.pickle'))
24
 
25
  # Prediction
26
  lst_seq = dataframe[-sequence_length:].values
restful/routes.py CHANGED
@@ -1,25 +1,31 @@
1
  from fastapi import APIRouter, Body
2
  from fastapi.responses import JSONResponse
3
- from restful.controllers import cryptocurrency_controller
4
- from restful.schemas import CryptocurrencyPredictionSchema
5
 
6
- # Route
7
  route = APIRouter()
8
 
9
- # Controller
10
- __CONTROLLER = cryptocurrency_controller()
11
 
12
- # Cryptocurrency List
13
- @route.get(path = '/lists')
14
- async def cryptocurrency_list_route() -> JSONResponse:
15
- # Cryptocurrency Controller
16
- return await __CONTROLLER.crypto_list()
17
 
18
- # Cryptocurrency Prediction
19
- @route.post(path = '/prediction')
20
- async def cryptocurrency_pred_route(
21
- payload: CryptocurrencyPredictionSchema = Body(...)
 
 
 
 
 
 
 
 
 
 
 
 
22
  ) -> JSONResponse:
23
- # Cryptocurrency Controller
24
- return await __CONTROLLER.prediction(payload = payload)
25
 
 
1
  from fastapi import APIRouter, Body
2
  from fastapi.responses import JSONResponse
3
+ from restful.controllers import ForecastingControllers
4
+ from restful.schemas import ForecastingServiceSchema
5
 
6
+ """ API Router """
7
  route = APIRouter()
8
 
9
+ """ Forecasting Controller """
10
+ __CONTROLLER = ForecastingControllers()
11
 
 
 
 
 
 
12
 
13
+ """ Algorithms Route """
14
+ @route.get(path = '/algorithms')
15
+ async def algorithms_route() -> JSONResponse:
16
+ return await __CONTROLLER.algorithms_controller()
17
+
18
+
19
+ """ Currencies Route """
20
+ @route.get(path = '/currencies')
21
+ async def currencies_route() -> JSONResponse:
22
+ return await __CONTROLLER.currencies_controller()
23
+
24
+
25
+ """ Forecasting Route """
26
+ @route.post(path = '/forecasting')
27
+ async def forecasting_route(
28
+ payload: ForecastingServiceSchema = Body(...)
29
  ) -> JSONResponse:
30
+ return await __CONTROLLER.forecasting_controller(payload = payload)
 
31
 
restful/schemas.py CHANGED
@@ -1,6 +1,7 @@
1
  from pydantic import BaseModel
2
 
3
- class CryptocurrencyPredictionSchema(BaseModel) :
4
  days: int
5
  currency: str
 
6
 
 
1
  from pydantic import BaseModel
2
 
3
+ class ForecastingServiceSchema(BaseModel) :
4
  days: int
5
  currency: str
6
+ algorithm: str
7
 
restful/services.py CHANGED
@@ -1,18 +1,22 @@
1
  from restful.cutils.utilities import Utilities
2
- from restful.schemas import CryptocurrencyPredictionSchema
3
 
4
- class cryptocurrency_svc:
5
- # Prediction Utilities
6
- __PRED_UTILS = Utilities()
7
 
8
- # Prediction Service
9
- async def prediction(self, payload: CryptocurrencyPredictionSchema) -> dict:
10
- days: int = payload.days
11
- currency: str = payload.currency
12
 
13
- actuals, predictions = await self.__PRED_UTILS.cryptocurrency_prediction_utils(
 
 
 
 
 
 
 
14
  days = days,
 
15
  model_name = currency,
 
16
  sequence_length = 60
17
  )
18
 
 
1
  from restful.cutils.utilities import Utilities
2
+ from restful.schemas import ForecastingServiceSchema
3
 
 
 
 
4
 
5
+ """ Forecasting Service """
6
+ class ForecastingService:
 
 
7
 
8
+ __FORECAST_UTILS = Utilities()
9
+
10
+ async def forecasting(self, payload: ForecastingServiceSchema) -> dict:
11
+ days: int = payload.days
12
+ currency: str = payload.currency
13
+ algorithm: str = payload.algorithm
14
+
15
+ actuals, predictions = await self.__FORECAST_UTILS.forecasting_utils(
16
  days = days,
17
+ algorithm = algorithm,
18
  model_name = currency,
19
+
20
  sequence_length = 60
21
  )
22
 
training.py CHANGED
@@ -173,8 +173,6 @@ def main(algorithm: str, sequence_length: int, epochs: int, batch_size: int):
173
  posttrained = './posttrained'
174
  pickle_file = './pickles'
175
 
176
- batch_size = 32
177
-
178
  data_processor = DataProcessor(datasets_path)
179
 
180
  for dataset in data_processor.datasets:
 
173
  posttrained = './posttrained'
174
  pickle_file = './pickles'
175
 
 
 
176
  data_processor = DataProcessor(datasets_path)
177
 
178
  for dataset in data_processor.datasets: