Soutrik commited on
Commit
9e7b284
β€’
1 Parent(s): f25067e

pushed ci.yaml file

Browse files
.flake8 ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [flake8]
2
+ max-line-length = 120
3
+
4
+ # Exclude the virtual environment, notebooks folder, tests folder, and other unnecessary directories
5
+ exclude =
6
+ .venv,
7
+ __pycache__,
8
+ .git,
9
+ build,
10
+ dist,
11
+ notebooks,
12
+ tests,
13
+ .ipynb_checkpoints,
14
+ .mypy_cache,
15
+ .pytest_cache,
16
+ pytorch_project
17
+
18
+ ignore =
19
+ E203,
20
+ W503,
21
+ E501,
22
+ E402,
23
+ F401,
24
+ E401
25
+
26
+ max-complexity = 10
27
+ show-source = True
.github/workflows/ci.yaml ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: CI Pipeline
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+ - feat/pytorch-catdogs-setup
8
+ pull_request:
9
+ branches:
10
+ - main
11
+
12
+ jobs:
13
+ python_basic_test:
14
+ name: Test current codebase and setup Python environment
15
+ runs-on: self-hosted
16
+
17
+ strategy:
18
+ matrix:
19
+ python-version: [3.10.15]
20
+
21
+ env:
22
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
23
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
24
+ AWS_REGION: ${{ secrets.AWS_REGION }}
25
+
26
+ steps:
27
+ # Step 0: Configure AWS credentials
28
+ - name: Configure AWS credentials
29
+ uses: aws-actions/configure-aws-credentials@v4
30
+ with:
31
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
32
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
33
+ aws-region: ${{ secrets.AWS_REGION }}
34
+
35
+ # Step 1: Print the branch name
36
+ - name: Print branch name
37
+ run: echo "Branch name is ${{ github.ref_name }}"
38
+
39
+ # Step 2: Checkout the repository code
40
+ - name: Checkout code
41
+ uses: actions/checkout@v3
42
+
43
+ # Step 3: Set up Python environment with the specified version
44
+ - name: Set up Python ${{ matrix.python-version }}
45
+ uses: actions/setup-python@v4
46
+ with:
47
+ python-version: ${{ matrix.python-version }}
48
+
49
+ # Step 4: Install Poetry and set virtualenv to be created inside the project directory
50
+ - name: Install Poetry
51
+ run: |
52
+ python -m pip install --upgrade pip
53
+ pip install poetry
54
+ poetry config virtualenvs.in-project true # Ensure the virtual environment is created in the project directory
55
+
56
+ # Step 5: Cache Poetry dependencies
57
+ - name: Cache Poetry dependencies
58
+ uses: actions/cache@v3
59
+ with:
60
+ path: |
61
+ .venv # Cache the virtual environment in the project directory
62
+ ~/.cache/pypoetry
63
+ key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }}
64
+ restore-keys: |
65
+ ${{ runner.os }}-poetry-
66
+
67
+ # Step 6: Install dependencies with Poetry
68
+ - name: Install dependencies
69
+ run: |
70
+ poetry install --no-root
71
+
72
+ # Step 7: Check Poetry environment
73
+ - name: Check Poetry environment
74
+ run: |
75
+ source .venv/bin/activate
76
+ poetry env info
77
+
78
+ # Step 8: Set environment variables from GitHub Secrets and write to .env
79
+ - name: Create .env file
80
+ run: |
81
+ echo "AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}" >> .env
82
+ echo "AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}" >> .env
83
+ echo "AWS_REGION=${AWS_REGION}" >> .env
84
+ echo ".env file created"
85
+
86
+ # Step 9: Run lint checks at the root level
87
+ - name: Run lint checks
88
+ run: |
89
+ source .venv/bin/activate
90
+ flake8 . --exclude=.venv,tests,notebooks
91
+
92
+ # Step 10: Run black code formatter at the root level
93
+ - name: black
94
+ run: |
95
+ source .venv/bin/activate
96
+ black . --exclude=.venv,tests,notebooks
97
+
98
+
99
+ pytorch_code_test:
100
+ name: Test PyTorch code
101
+ runs-on: self-hosted
102
+
103
+ env:
104
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
105
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
106
+ AWS_REGION: ${{ secrets.AWS_REGION }}
107
+
108
+
109
+ needs: python_basic_test # This ensures pytorch_code_test runs only after python_basic_test completes successfully
110
+
111
+ strategy:
112
+ matrix:
113
+ python-version: [3.10.15]
114
+
115
+ steps:
116
+ # Step 0: Configure AWS credentials
117
+ - name: Configure AWS credentials
118
+ uses: aws-actions/configure-aws-credentials@v4
119
+ with:
120
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
121
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
122
+ aws-region: ${{ secrets.AWS_REGION }}
123
+
124
+ # Step 1: Checkout the repository code
125
+ - name: Checkout code
126
+ uses: actions/checkout@v3
127
+
128
+ # Step 2: Set up Python environment with the specified version
129
+ - name: Set up Python ${{ matrix.python-version }}
130
+ uses: actions/setup-python@v4
131
+ with:
132
+ python-version: ${{ matrix.python-version }}
133
+
134
+ # Step 3: Install Poetry and set virtualenv to be created inside the project directory
135
+ - name: Install Poetry
136
+ run: |
137
+ python -m pip install --upgrade pip
138
+ pip install poetry
139
+ poetry config virtualenvs.in-project true # Ensure the virtual environment is created in the project directory
140
+
141
+ # Step 4: Cache Poetry dependencies
142
+ - name: Cache Poetry dependencies
143
+ uses: actions/cache@v3
144
+ with:
145
+ path: |
146
+ .venv # Cache the virtual environment in the project directory
147
+ ~/.cache/pypoetry
148
+ key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }}
149
+ restore-keys: |
150
+ ${{ runner.os }}-poetry-
151
+
152
+ # Step 5: Install dependencies with Poetry
153
+ - name: Install dependencies
154
+ run: |
155
+ poetry install --no-root
156
+
157
+ # Step 6: Check Poetry environment
158
+ - name: Check Poetry environment
159
+ run: |
160
+ source .venv/bin/activate
161
+ poetry env info
162
+
163
+ # Step 7: DVD Pull to get data
164
+ - name: Get data from DVC
165
+ run: |
166
+ source .venv/bin/activate
167
+ echo "Getting data from DVC"
168
+ dvc pull || echo "No data to pull from DVC"
169
+
170
+ # Step 8: Run Train code
171
+ - name: Run Train code
172
+ run: |
173
+ source .venv/bin/activate
174
+ echo "Training the model"
175
+ python -m src.train_optuna_callbacks experiment=catdog_experiment ++task_name=train ++train=True ++test=False
176
+ python -m src.create_artifacts
177
+
178
+ # Step 9: Run the model testing code
179
+ - name: Run Test code
180
+ run: |
181
+ source .venv/bin/activate
182
+ echo "Testing the model"
183
+ python -m src.train_optuna_callbacks experiment=catdog_experiment ++task_name=test ++train=False ++test=True
184
+
185
+ # Step 10: Upload the model checkpoints, logs, and configs as artifacts
186
+ - name: upload model checkpoints
187
+ uses: actions/upload-artifact@v4
188
+ with:
189
+ name: model-checkpoints
190
+ path: ./checkpoints/
191
+
192
+ - name: upload logs
193
+ uses: actions/upload-artifact@v4
194
+ with:
195
+ name: logs
196
+ path: ./logs/
197
+
198
+ - name: upload configs
199
+ uses: actions/upload-artifact@v4
200
+ with:
201
+ name: configs
202
+ path: ./configs/
203
+
204
+ - name: upload artifacts
205
+ uses: actions/upload-artifact@v4
206
+ with:
207
+ name: artifacts
208
+ path: ./artifacts/
.github/workflows/deploy.yml DELETED
@@ -1,59 +0,0 @@
1
- name: Deploy to ECR and Run Docker Compose
2
-
3
- on:
4
- push:
5
- branches:
6
- - main
7
- # - feat/framework-setup
8
-
9
- jobs:
10
- deploy:
11
- runs-on: self-hosted
12
-
13
- steps:
14
- - name: Checkout repository
15
- uses: actions/checkout@v4
16
-
17
- - name: Set up Docker Buildx
18
- uses: docker/setup-buildx-action@v3
19
-
20
- - name: Log in to Amazon ECR
21
- env:
22
- AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
23
- AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
24
- AWS_REGION: ${{ secrets.AWS_REGION }}
25
- run: |
26
- aws ecr get-login-password --region $AWS_REGION | docker login --username AWS --password-stdin 007758426687.dkr.ecr.$AWS_REGION.amazonaws.com
27
-
28
- - name: Build Docker image
29
- env:
30
- POSTGRES_DB: ${{ secrets.POSTGRES_DB }}
31
- POSTGRES_USER: ${{ secrets.POSTGRES_USER }}
32
- POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
33
- REDIS_PORT: ${{ secrets.REDIS_PORT }}
34
- REDIS_HOST: ${{ secrets.REDIS_HOST }}
35
- FLOWER_BASIC_AUTH: ${{ secrets.FLOWER_BASIC_AUTH }}
36
- REDIS_URL: ${{ secrets.REDIS_URL }}
37
- DATABASE_URL: ${{ secrets.DATABASE_URL }}
38
- BROKER_URL: ${{ secrets.BROKER_URL }}
39
- run: |
40
- docker build -t soutrik71/test .
41
- docker tag soutrik71/test:latest 007758426687.dkr.ecr.${{ secrets.AWS_REGION }}.amazonaws.com/soutrik71/test:latest
42
-
43
- - name: Push to Amazon ECR
44
- run: |
45
- docker push 007758426687.dkr.ecr.${{ secrets.AWS_REGION }}.amazonaws.com/soutrik71/test:latest
46
-
47
- - name: Run Docker Compose
48
- env:
49
- POSTGRES_DB: ${{ secrets.POSTGRES_DB }}
50
- POSTGRES_USER: ${{ secrets.POSTGRES_USER }}
51
- POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
52
- REDIS_PORT: ${{ secrets.REDIS_PORT }}
53
- REDIS_HOST: ${{ secrets.REDIS_HOST }}
54
- FLOWER_BASIC_AUTH: ${{ secrets.FLOWER_BASIC_AUTH }}
55
- REDIS_URL: ${{ secrets.REDIS_URL }}
56
- DATABASE_URL: ${{ secrets.DATABASE_URL }}
57
- BROKER_URL: ${{ secrets.BROKER_URL }}
58
- run: |
59
- docker-compose up -d --build app
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
.github/workflows/{deploy_new.yml β†’ test_deploy.yml} RENAMED
File without changes