Spaces:
Sleeping
Sleeping
Chris
commited on
Commit
·
c9ec478
1
Parent(s):
a3281f6
More WIP. Returns a sensible result now!
Browse files- .gitignore +1 -1
- =1.12 +8 -8
- app.py +17 -10
- body_shape_lookup.py +19 -0
- body_shape_measures_normalised.csv +11 -0
- body_shape_measures_normalised_updated.csv +11 -0
- calculate_masks.py +1 -10
- calculate_measures.py +9 -2
- select_body_shape.py +80 -0
- volunteers_measures_normalised_updated.csv +11 -0
.gitignore
CHANGED
@@ -7,4 +7,4 @@ input_img.jpg
|
|
7 |
app.py
|
8 |
input_img.jpg
|
9 |
requirements.txt
|
10 |
-
__pycache__
|
|
|
7 |
app.py
|
8 |
input_img.jpg
|
9 |
requirements.txt
|
10 |
+
__pycache__
|
=1.12
CHANGED
@@ -1,14 +1,14 @@
|
|
1 |
Requirement already satisfied: xtcocotools in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (1.14.3)
|
|
|
2 |
Requirement already satisfied: cython>=0.27.3 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from xtcocotools) (3.0.7)
|
|
|
3 |
Requirement already satisfied: setuptools>=18.0 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from xtcocotools) (60.2.0)
|
4 |
-
Requirement already satisfied:
|
5 |
-
Requirement already satisfied:
|
6 |
-
Requirement already satisfied: pyparsing>=2.3.1 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from matplotlib>=2.1.0->xtcocotools) (3.1.1)
|
7 |
-
Requirement already satisfied: fonttools>=4.22.0 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from matplotlib>=2.1.0->xtcocotools) (4.47.0)
|
8 |
-
Requirement already satisfied: cycler>=0.10 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from matplotlib>=2.1.0->xtcocotools) (0.12.1)
|
9 |
Requirement already satisfied: packaging>=20.0 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from matplotlib>=2.1.0->xtcocotools) (23.2)
|
10 |
-
Requirement already satisfied:
|
11 |
-
Requirement already satisfied: contourpy>=1.0.1 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from matplotlib>=2.1.0->xtcocotools) (1.2.0)
|
12 |
Requirement already satisfied: kiwisolver>=1.3.1 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from matplotlib>=2.1.0->xtcocotools) (1.4.5)
|
13 |
-
Requirement already satisfied:
|
|
|
|
|
14 |
Requirement already satisfied: six>=1.5 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from python-dateutil>=2.7->matplotlib>=2.1.0->xtcocotools) (1.16.0)
|
|
|
1 |
Requirement already satisfied: xtcocotools in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (1.14.3)
|
2 |
+
Requirement already satisfied: numpy>=1.20.0 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from xtcocotools) (1.26.1)
|
3 |
Requirement already satisfied: cython>=0.27.3 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from xtcocotools) (3.0.7)
|
4 |
+
Requirement already satisfied: matplotlib>=2.1.0 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from xtcocotools) (3.8.1)
|
5 |
Requirement already satisfied: setuptools>=18.0 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from xtcocotools) (60.2.0)
|
6 |
+
Requirement already satisfied: fonttools>=4.22.0 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from matplotlib>=2.1.0->xtcocotools) (4.43.1)
|
7 |
+
Requirement already satisfied: pillow>=8 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from matplotlib>=2.1.0->xtcocotools) (10.1.0)
|
|
|
|
|
|
|
8 |
Requirement already satisfied: packaging>=20.0 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from matplotlib>=2.1.0->xtcocotools) (23.2)
|
9 |
+
Requirement already satisfied: contourpy>=1.0.1 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from matplotlib>=2.1.0->xtcocotools) (1.1.1)
|
|
|
10 |
Requirement already satisfied: kiwisolver>=1.3.1 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from matplotlib>=2.1.0->xtcocotools) (1.4.5)
|
11 |
+
Requirement already satisfied: pyparsing>=2.3.1 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from matplotlib>=2.1.0->xtcocotools) (3.1.1)
|
12 |
+
Requirement already satisfied: python-dateutil>=2.7 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from matplotlib>=2.1.0->xtcocotools) (2.8.2)
|
13 |
+
Requirement already satisfied: cycler>=0.10 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from matplotlib>=2.1.0->xtcocotools) (0.12.1)
|
14 |
Requirement already satisfied: six>=1.5 in /Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages (from python-dateutil>=2.7->matplotlib>=2.1.0->xtcocotools) (1.16.0)
|
app.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
from keypoints_extraction import predict_pose
|
2 |
from calculate_measures import calculate_all_measures
|
3 |
from calculate_masks import calculate_seg_mask
|
|
|
4 |
|
5 |
import os
|
6 |
os.system("pip install xtcocotools>=1.12")
|
@@ -24,19 +25,25 @@ def generate_output(front_img_path, side_img_path):
|
|
24 |
side_keypoint_data = side_keypoint_result[1]
|
25 |
|
26 |
front_seg_mask = calculate_seg_mask(front_img_path)
|
27 |
-
side_rcnn_mask = calculate_seg_mask(side_img_path)
|
28 |
|
29 |
-
|
30 |
|
31 |
-
|
|
|
|
|
|
|
|
|
|
|
32 |
|
33 |
input_image_front = gr.inputs.Image(type='pil', label="Front Image")
|
34 |
input_image_side = gr.inputs.Image(type='pil', label="Side Image")
|
35 |
-
output_image_front = gr.outputs.Image(type="pil", label="Front Output Image")
|
36 |
-
output_text_front = gr.outputs.Textbox(label="Front Output Text")
|
37 |
-
output_image_side = gr.outputs.Image(type="pil", label="Front Output Image")
|
38 |
-
output_text_side = gr.outputs.Textbox(label="Side Output Text")
|
39 |
-
|
40 |
-
|
41 |
-
|
|
|
42 |
iface.launch()
|
|
|
1 |
from keypoints_extraction import predict_pose
|
2 |
from calculate_measures import calculate_all_measures
|
3 |
from calculate_masks import calculate_seg_mask
|
4 |
+
from select_body_shape import select_body_shape
|
5 |
|
6 |
import os
|
7 |
os.system("pip install xtcocotools>=1.12")
|
|
|
25 |
side_keypoint_data = side_keypoint_result[1]
|
26 |
|
27 |
front_seg_mask = calculate_seg_mask(front_img_path)
|
28 |
+
side_rcnn_mask = calculate_seg_mask(side_img_path) # TODO: Is this the correct mask? In the original code there is a function called 'get_rcnn_mask' which is not used anywhere. The name implies that it should be a rcnn mask, but the code actually requests a seg mask.
|
29 |
|
30 |
+
measures_data_frame = calculate_all_measures(front_image, side_image, front_keypoint_data, side_keypoint_data, front_seg_mask, side_rcnn_mask)
|
31 |
|
32 |
+
# TODO: Normalise the measures somehow? Don't understand how this works yet if it is for a single person. Do we need to do this? Or not?
|
33 |
+
normalised_measures_data_frame = measures_data_frame
|
34 |
+
|
35 |
+
selected_body_shape = select_body_shape(normalised_measures_data_frame)
|
36 |
+
|
37 |
+
return (selected_body_shape)
|
38 |
|
39 |
input_image_front = gr.inputs.Image(type='pil', label="Front Image")
|
40 |
input_image_side = gr.inputs.Image(type='pil', label="Side Image")
|
41 |
+
# output_image_front = gr.outputs.Image(type="pil", label="Front Output Image")
|
42 |
+
# output_text_front = gr.outputs.Textbox(label="Front Output Text")
|
43 |
+
# output_image_side = gr.outputs.Image(type="pil", label="Front Output Image")
|
44 |
+
# output_text_side = gr.outputs.Textbox(label="Side Output Text")
|
45 |
+
output_body_shape = gr.outputs.Textbox(label="Body Shape")
|
46 |
+
|
47 |
+
title = "ShopByShape"
|
48 |
+
iface = gr.Interface(fn=generate_output, inputs=[input_image_front, input_image_side], outputs=[output_body_shape], title=title)
|
49 |
iface.launch()
|
body_shape_lookup.py
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# TODO: How do we know what the body shape is?
|
2 |
+
|
3 |
+
def body_shape_lookup(index):
|
4 |
+
if index == 1:
|
5 |
+
return "Hourglass"
|
6 |
+
elif index == 2:
|
7 |
+
return "Triangle"
|
8 |
+
elif index == 3:
|
9 |
+
return "Inverted Triangle"
|
10 |
+
elif index == 4:
|
11 |
+
return "Rectangle"
|
12 |
+
elif index == 5:
|
13 |
+
return "Diamond"
|
14 |
+
elif index == 6:
|
15 |
+
return "Oval"
|
16 |
+
elif index == 7:
|
17 |
+
return "Round"
|
18 |
+
else:
|
19 |
+
return "Unknown"
|
body_shape_measures_normalised.csv
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
id,shoulder_width,hip_width,shoulder_to_hip_distance,hip_to_ankle_distance,torso_to_leg_ratio,waist_width,thigh_area,torso_area,lower_torso_area,upper_torso_area,full_side_body_area,thigh_normalised,torso_normalised,thigh_to_torso_ratio_normalised,thigh_to_torso_ratio,upper_torso_normalised,lower_torso_normalised,upper_to_lower_torso_normalised_ratio,upper_to_lower_torso_ratio,shoulder_to_hip_ratio,shoulder_to_waist_ratio,waist_to_hip_ratio,thigh_to_body_ratio,upper_torso_to_body_ratio,front_img_jpg,front_img_json,side_img_jpg,side_img_json
|
2 |
+
2A,309,269,323.125,454.39453125,0.7111111111111111,235,34229,50778,25191,25587,129184,39.727990098179454,58.935635899540046,0.6740911418330774,0.6740911418330773,29.697627235447065,29.23800866409298,1.015719899964273,1.015719899964273,1.1486988847583643,1.3148936170212766,0.8736059479553904,0.26496315333168197,0.1980663240029725,data/volunteers_full_res/2. A front.JPG,data/volunteers_full_res/keypoints/2. A front_keypoints.json,data/volunteers_full_res/2. A side.JPG,data/volunteers_full_res/keypoints/2. A side_keypoints.json
|
3 |
+
5A,751,784,799.8046875,1538.0859375,0.52,820,359036,478409,244104,234305,1317742,132.59746694466295,176.68373523415272,0.7504791924901079,0.7504791924901079,86.53240759274627,90.15132764140644,0.9598572739488087,0.9598572739488087,0.9579081632653061,0.9158536585365854,1.0459183673469388,0.27246304663583615,0.17780794723094506,data/volunteers_full_res/5. A front.jpg,data/volunteers_full_res/keypoints/5. A front_keypoints.json,data/volunteers_full_res/5. A side.jpg,data/volunteers_full_res/keypoints/5. A side_keypoints.json
|
4 |
+
7A,888,771,895.234375,1694.55078125,0.5283018867924528,549,252555,338580,163668,174912,874524,99.19811098210289,132.9868599565259,0.7459241538188905,0.7459241538188907,68.70162930095061,64.28523065557529,1.0687000513234108,1.0687000513234108,1.1517509727626458,1.6174863387978142,0.7120622568093385,0.2887913882294825,0.20000823305020787,data/volunteers_full_res/7. A front.jpg,data/volunteers_full_res/keypoints/7. A front_keypoints.json,data/volunteers_full_res/7. A side.jpg,data/volunteers_full_res/keypoints/7. A side_keypoints.json
|
5 |
+
7B,718,696,974.501953125,1446.03515625,0.6739130434782609,537,252642,372623,181265,191358,979634,96.55081855617931,142.40330452917252,0.6780096773414416,0.6780096773414416,73.13024571240474,69.27305881676777,1.0556809091661379,1.0556809091661379,1.0316091954022988,1.3370577281191807,0.771551724137931,0.2578942748005888,0.1953362174036426,data/volunteers_full_res/7. B front.jpg,data/volunteers_full_res/keypoints/7. B front_keypoints.json,data/volunteers_full_res/7. B side.jpg,data/volunteers_full_res/keypoints/7. B side_keypoints.json
|
6 |
+
7C,233,198,280.13671875,340.166015625,0.8235294117647058,160,21697,39647,20911,18736,72801,32.398362709200065,59.201635540934454,0.5472545211491412,0.5472545211491412,27.97694260587051,31.22469293506395,0.89598775763952,0.8959877576395199,1.1767676767676767,1.45625,0.8080808080808081,0.2980316204447741,0.25735910221013447,data/volunteers_full_res/7. C front.JPG,data/volunteers_full_res/keypoints/7. C front_keypoints.json,data/volunteers_full_res/7. C side.JPG,data/volunteers_full_res/keypoints/7. C side_keypoints.json
|
7 |
+
9A,211,178,259.423828125,306.591796875,0.8461538461538461,141,13005,29217,12972,16245,63173,20.589812332439678,46.257020139707045,0.4451175685388643,0.44511756853886436,25.719454159206656,20.537565980500386,1.2523126734505088,1.2523126734505088,1.1853932584269662,1.49645390070922,0.7921348314606742,0.2058632643692717,0.25715099805296565,data/volunteers_full_res/9. A front.JPG,data/volunteers_full_res/keypoints/9. A front_keypoints.json,data/volunteers_full_res/9. A side.JPG,data/volunteers_full_res/keypoints/9. A side_keypoints.json
|
8 |
+
9B,368,314,440.0,700.0,0.6285714285714286,282,54350,88963,42901,46062,225792,43.11781039270131,70.57754859182864,0.6109281386643886,0.6109281386643886,36.54264180880603,34.03490678302261,1.0736812661709516,1.0736812661709518,1.1719745222929936,1.3049645390070923,0.8980891719745223,0.24070826247165533,0.20400191326530612,data/volunteers_full_res/9. B front.jpg,data/volunteers_full_res/keypoints/9. B front_keypoints.json,data/volunteers_full_res/9. B side.jpg,data/volunteers_full_res/keypoints/9. B side_keypoints.json
|
9 |
+
10A,189,179,224.580078125,348.486328125,0.6444444444444445,155,13346,21964,10953,11011,49817,23.882954108559645,39.3050505050505,0.6076306683664178,0.6076306683664178,19.704421376393693,19.60062912865681,1.0052953528713595,1.0052953528713595,1.0558659217877095,1.2193548387096773,0.8659217877094972,0.2679005158881506,0.22102896601561717,data/volunteers_full_res/10. A front.JPG,data/volunteers_full_res/keypoints/10. A front_keypoints.json,data/volunteers_full_res/10. A side.JPG,data/volunteers_full_res/keypoints/10. A side_keypoints.json
|
10 |
+
11A,203,169,223.330078125,304.541015625,0.7333333333333333,147,16750,30912,15171,15741,63617,25.30289865121034,46.69631063320681,0.5418607660455487,0.5418607660455487,23.77868224887773,22.917628384329078,1.0375716828158987,1.0375716828158987,1.2011834319526626,1.380952380952381,0.8698224852071006,0.26329440243959945,0.24743386201801404,data/volunteers_full_res/11. A front.jpg,data/volunteers_full_res/keypoints/11. A front_keypoints.json,data/volunteers_full_res/11. A side.jpg,data/volunteers_full_res/keypoints/11. A side_keypoints.json
|
11 |
+
12A,669,672,783.056640625,1269.091796875,0.6170212765957447,567,211624,335094,170606,164488,813075,90.92243084724528,143.97025404645413,0.6315362256560846,0.6315362256560846,70.67085399199374,73.2994000544604,0.9641395964971924,0.9641395964971924,0.9955357142857143,1.17989417989418,0.84375,0.26027611228976416,0.20230360052885651,data/volunteers_full_res/12. A front.jpg,data/volunteers_full_res/keypoints/12. A front_keypoints.json,data/volunteers_full_res/12. A side.jpg,data/volunteers_full_res/keypoints/12. A side_keypoints.json
|
body_shape_measures_normalised_updated.csv
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
id,shoulder_width,hip_width,shoulder_to_hip_distance,hip_to_ankle_distance,torso_to_leg_ratio,waist_width,thigh_area,torso_area,lower_torso_area,upper_torso_area,full_side_body_area,thigh_normalised,torso_normalised,thigh_to_torso_ratio_normalised,thigh_to_torso_ratio,upper_torso_normalised,lower_torso_normalised,upper_to_lower_torso_normalised_ratio,upper_to_lower_torso_ratio,shoulder_to_hip_ratio,shoulder_to_waist_ratio,waist_to_hip_ratio,thigh_to_body_ratio,upper_torso_to_body_ratio,front_img_jpg,front_img_json,side_img_jpg,side_img_json
|
2 |
+
2A,309,269,-0.6451853254425501,-0.6682325556525112,0.3512052843480335,235,34229,50778,25191,25587,129184,-0.5136746421535362,-0.6393085847116429,0.5341966631577374,0.5341966631577363,-0.6530399046830264,-0.6235671698990638,-0.18048946872604607,-0.18048946872604604,0.46262354383444276,-0.038443926936658726,0.2848605088833572,0.11603850407389217,-0.6352860522185653,data/volunteers_full_res/2. A front.JPG,data/volunteers_full_res/keypoints/2. A front_keypoints.json,data/volunteers_full_res/2. A side.JPG,data/volunteers_full_res/keypoints/2. A side_keypoints.json
|
3 |
+
5A,751,784,0.9144271139787139,1.2087976272001555,-1.393368840086257,820,359036,478409,244104,234305,1317742,1.7908025462101405,1.6581076973120041,1.3373436638658922,1.3373436638658922,1.5898707794468867,1.7184597534399404,-0.7675499961012099,-0.7675499961012098,-1.6885839094884345,-2.105222050862348,2.2088413671422513,0.4115935545489668,-1.3509388020203796,data/volunteers_full_res/5. A front.jpg,data/volunteers_full_res/keypoints/5. A front_keypoints.json,data/volunteers_full_res/5. A side.jpg,data/volunteers_full_res/keypoints/5. A side_keypoints.json
|
4 |
+
7A,888,771,1.2266563354411533,1.4798058058940855,-1.3175843564008711,549,252555,338580,163668,174912,874524,0.9620258960628703,0.8055256743012406,1.2894518062332987,1.2894518062332998,0.8862023059241599,0.7239466229852285,0.3762791226520531,0.37627912265205304,0.49703650905427676,1.5287976601007285,-1.5188804555519377,1.0550592184153258,-0.5666856621104159,data/volunteers_full_res/7. A front.jpg,data/volunteers_full_res/keypoints/7. A front_keypoints.json,data/volunteers_full_res/7. A side.jpg,data/volunteers_full_res/keypoints/7. A side_keypoints.json
|
5 |
+
7B,718,696,1.4860059660807992,1.0493591618169957,0.01163954223114654,537,252642,372623,181265,191358,979634,0.8963355830168163,0.9892525726734528,0.5753963012377995,0.5753963012377995,1.0609718924822236,0.9157212305526141,0.2394609133057007,0.23946091330570068,-0.8575885214005697,0.07635234591621287,-0.8546416455323987,-0.16253115903080398,-0.7317305165035971,data/volunteers_full_res/7. B front.jpg,data/volunteers_full_res/keypoints/7. B front_keypoints.json,data/volunteers_full_res/7. B side.jpg,data/volunteers_full_res/keypoints/7. B side_keypoints.json
|
6 |
+
7C,233,198,-0.7858354516068623,-0.8660844259672597,1.3774253575446744,160,21697,39647,20911,18736,72801,-0.6955531203216642,-0.6341185908976849,-0.7993686079294058,-0.7993686079294058,-0.7209444855652456,-0.5471821012653706,-1.4387549608901573,-1.4387549608901582,0.7791053627807573,0.6936939800717078,-0.4467705325621465,1.4191973718580178,1.4593062364358502,data/volunteers_full_res/7. C front.JPG,data/volunteers_full_res/keypoints/7. C front_keypoints.json,data/volunteers_full_res/7. C side.JPG,data/volunteers_full_res/keypoints/7. C side_keypoints.json
|
7 |
+
9A,211,178,-0.8536044015374634,-0.9242373480961905,1.5839544420431042,141,13005,29217,12972,16245,63173,-0.9885722682153351,-0.8866846225820019,-1.8732405871833364,-1.8732405871833364,-0.810033330890077,-0.9580863087680326,2.305864794991042,2.3058647949910416,0.8763606978908735,0.9019251126074833,-0.6248177905120642,-2.212964050626634,1.4519546942072203,data/volunteers_full_res/9. A front.JPG,data/volunteers_full_res/keypoints/9. A front_keypoints.json,data/volunteers_full_res/9. A side.JPG,data/volunteers_full_res/keypoints/9. A side_keypoints.json
|
8 |
+
9B,368,314,-0.2627907979699269,-0.24282650808339729,-0.40226526773322185,282,54350,88963,42901,46062,225792,-0.4295591307213896,-0.4121599519583041,-0.12990166261860167,-0.12990166261860167,-0.3829103075755036,-0.4391335397560143,0.42862672647856803,0.4286267264785703,0.7250614892098834,-0.08987035878672849,0.558231717572826,-0.8397958260534285,-0.4256038637053921,data/volunteers_full_res/9. B front.jpg,data/volunteers_full_res/keypoints/9. B front_keypoints.json,data/volunteers_full_res/9. B side.jpg,data/volunteers_full_res/keypoints/9. B side_keypoints.json
|
9 |
+
10A,189,179,-0.967607048096815,-0.8516730560853955,-0.2573670846406723,155,13346,21964,10953,11011,49817,-0.9068557521025037,-1.0223264573211466,-0.16457139639188673,-0.16457139639188673,-1.0474087761570212,-0.9941101434453615,-0.29004106968943183,-0.2900410696894318,-0.5840885829069261,-0.5332752158903319,0.19906183414420206,0.2317937959738227,0.1758982832290134,data/volunteers_full_res/10. A front.JPG,data/volunteers_full_res/keypoints/10. A front_keypoints.json,data/volunteers_full_res/10. A side.JPG,data/volunteers_full_res/keypoints/10. A side_keypoints.json
|
10 |
+
11A,203,169,-0.9716968291393029,-0.9277894463065092,0.5540627406776014,147,16750,30912,15171,15741,63617,-0.871621034320157,-0.8781135022156633,-0.8560787634674033,-0.8560787634674033,-0.886623370048677,-0.866576433381505,0.04915099181033875,0.049150991810338746,1.0543983860547377,0.3036992727189106,0.24261567283875252,0.050276534651395643,1.1086845891110646,data/volunteers_full_res/11. A front.jpg,data/volunteers_full_res/keypoints/11. A front_keypoints.json,data/volunteers_full_res/11. A side.jpg,data/volunteers_full_res/keypoints/11. A side_keypoints.json
|
11 |
+
12A,669,672,0.8596304382922542,0.7428807452800266,-0.5077018179835446,567,211624,335094,170606,164488,813075,0.7566719225447575,1.0198257653997465,0.08677258309590243,0.08677258309590243,0.9639151970662792,1.070528089537564,-0.7225470538308504,-0.7225470538308503,-1.2643249750290122,-0.7376568189389741,-0.04850067642283835,-0.06866794381056764,-0.48559890642479764,data/volunteers_full_res/12. A front.jpg,data/volunteers_full_res/keypoints/12. A front_keypoints.json,data/volunteers_full_res/12. A side.jpg,data/volunteers_full_res/keypoints/12. A side_keypoints.json
|
calculate_masks.py
CHANGED
@@ -1,4 +1,3 @@
|
|
1 |
-
from PIL import Image
|
2 |
from transformers import SegformerImageProcessor, AutoModelForSemanticSegmentation
|
3 |
import torch.nn as nn
|
4 |
|
@@ -6,14 +5,6 @@ def calculate_seg_mask(image):
|
|
6 |
processor = SegformerImageProcessor.from_pretrained("mattmdjaga/segformer_b2_clothes")
|
7 |
model = AutoModelForSemanticSegmentation.from_pretrained("mattmdjaga/segformer_b2_clothes")
|
8 |
|
9 |
-
class_names = {
|
10 |
-
0: "Background", 1: "Hat", 2: "Hair", 3: "Sunglasses",
|
11 |
-
4: "Upper-clothes", 5: "Skirt", 6: "Pants", 7: "Dress",
|
12 |
-
8: "Belt", 9: "Left-shoe", 10: "Right-shoe", 11: "Face",
|
13 |
-
12: "Left-leg", 13: "Right-leg", 14: "Left-arm", 15: "Right-arm",
|
14 |
-
16: "Bag", 17: "Scarf"
|
15 |
-
}
|
16 |
-
|
17 |
inputs = processor(images=image, return_tensors="pt")
|
18 |
|
19 |
outputs = model(**inputs)
|
@@ -27,4 +18,4 @@ def calculate_seg_mask(image):
|
|
27 |
)
|
28 |
|
29 |
pred_seg = upsampled_logits.argmax(dim=1)[0]
|
30 |
-
return pred_seg
|
|
|
|
|
1 |
from transformers import SegformerImageProcessor, AutoModelForSemanticSegmentation
|
2 |
import torch.nn as nn
|
3 |
|
|
|
5 |
processor = SegformerImageProcessor.from_pretrained("mattmdjaga/segformer_b2_clothes")
|
6 |
model = AutoModelForSemanticSegmentation.from_pretrained("mattmdjaga/segformer_b2_clothes")
|
7 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
inputs = processor(images=image, return_tensors="pt")
|
9 |
|
10 |
outputs = model(**inputs)
|
|
|
18 |
)
|
19 |
|
20 |
pred_seg = upsampled_logits.argmax(dim=1)[0]
|
21 |
+
return pred_seg
|
calculate_measures.py
CHANGED
@@ -33,6 +33,9 @@ def get_volume_result(mask_of_interest, original_image, max_x, min_x, max_y, min
|
|
33 |
return None
|
34 |
|
35 |
def calculate_all_measures(front_image, side_image, front_keypoint_data, side_keypoint_data, front_seg_mask, side_rcnn_mask):
|
|
|
|
|
|
|
36 |
results_dict = {}
|
37 |
|
38 |
front_keypoints = front_keypoint_data['keypoints']
|
@@ -128,6 +131,10 @@ def calculate_all_measures(front_image, side_image, front_keypoint_data, side_ke
|
|
128 |
results_dict['upper_torso_to_body_ratio'] = upper_torso_area / full_side_body_area
|
129 |
results_dict['upper_torso_to_body_ratio'] = upper_torso_area / full_side_body_area
|
130 |
|
131 |
-
|
|
|
132 |
|
133 |
-
|
|
|
|
|
|
|
|
33 |
return None
|
34 |
|
35 |
def calculate_all_measures(front_image, side_image, front_keypoint_data, side_keypoint_data, front_seg_mask, side_rcnn_mask):
|
36 |
+
# Initialize an empty DataFrame
|
37 |
+
results = []
|
38 |
+
|
39 |
results_dict = {}
|
40 |
|
41 |
front_keypoints = front_keypoint_data['keypoints']
|
|
|
131 |
results_dict['upper_torso_to_body_ratio'] = upper_torso_area / full_side_body_area
|
132 |
results_dict['upper_torso_to_body_ratio'] = upper_torso_area / full_side_body_area
|
133 |
|
134 |
+
# TODO: Temporary to force thigns to work.
|
135 |
+
results_dict['id'] = "1"
|
136 |
|
137 |
+
results.append(results_dict)
|
138 |
+
|
139 |
+
results_df = pd.DataFrame(results)
|
140 |
+
return results_df
|
select_body_shape.py
ADDED
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import re
|
2 |
+
import numpy as np
|
3 |
+
import pandas as pd
|
4 |
+
from numpy import dot
|
5 |
+
from numpy.linalg import norm
|
6 |
+
from body_shape_lookup import body_shape_lookup
|
7 |
+
|
8 |
+
BODY_SHAPE_MEASURES = "body_shape_measures_normalised_updated.csv"
|
9 |
+
VOLUNTEERS_MEASURES = "volunteers_measures_normalised_updated.csv"
|
10 |
+
|
11 |
+
# selecting specific features
|
12 |
+
RATIOS_TO_USE = ['shoulder_to_hip_distance',
|
13 |
+
'hip_to_ankle_distance',
|
14 |
+
'thigh_to_torso_ratio_normalised',
|
15 |
+
'upper_to_lower_torso_normalised_ratio',
|
16 |
+
'shoulder_to_hip_ratio',
|
17 |
+
'thigh_to_body_ratio',
|
18 |
+
'upper_torso_to_body_ratio']
|
19 |
+
|
20 |
+
def extract_digits(input_string):
|
21 |
+
# find digits in the format '1A' or '12B'
|
22 |
+
match = re.search(r'\d+', input_string)
|
23 |
+
if match:
|
24 |
+
return int(match.group())
|
25 |
+
else:
|
26 |
+
return -1 # not found
|
27 |
+
|
28 |
+
def is_match(row):
|
29 |
+
# check whether there was a match for this record
|
30 |
+
# extract the user class from id
|
31 |
+
ground_truth = extract_digits(row['Volunteer_ID'])
|
32 |
+
return ground_truth == row['Rank_1_Body_Shape'] or ground_truth == row['Rank_2_Body_Shape'] or ground_truth == row['Rank_3_Body_Shape']
|
33 |
+
|
34 |
+
def select_body_shape(normalised_body_shape_measures):
|
35 |
+
# load the body shape measures
|
36 |
+
body_shape_df = pd.read_csv(BODY_SHAPE_MEASURES)
|
37 |
+
# body_shape_df = normalised_body_shape_measures
|
38 |
+
|
39 |
+
# load the volunteers measures
|
40 |
+
# volunteers_df = pd.read_csv(VOLUNTEERS_MEASURES)
|
41 |
+
volunteers_df = normalised_body_shape_measures
|
42 |
+
|
43 |
+
# select only the columns corresponding to the ratios
|
44 |
+
body_shape_ratios = body_shape_df[RATIOS_TO_USE]
|
45 |
+
|
46 |
+
# Create a DataFrame to store the results
|
47 |
+
results_df = pd.DataFrame(columns=["Volunteer_ID", "Rank_1_Body_Shape", "Score_1",
|
48 |
+
"Rank_2_Body_Shape", "Score_2",
|
49 |
+
"Rank_3_Body_Shape", "Score_3"])
|
50 |
+
|
51 |
+
# calculate euclidean distance for each volunteer
|
52 |
+
for index, volunteer_row in volunteers_df.iterrows():
|
53 |
+
print(f"\nProcessing volunteer {volunteer_row['id']}")
|
54 |
+
volunteer_ratios = volunteer_row[RATIOS_TO_USE]
|
55 |
+
|
56 |
+
top_scores = [(-1000, 'n/a')] * 3
|
57 |
+
|
58 |
+
for body_index, body_shape_row in body_shape_ratios.iterrows():
|
59 |
+
# euclidean distance
|
60 |
+
# similarity = np.linalg.norm(volunteer_ratios - body_shape_row)
|
61 |
+
# calculate cosine similarity
|
62 |
+
similarity = dot(volunteer_ratios, body_shape_row) / (norm(volunteer_ratios)*norm(body_shape_row))
|
63 |
+
|
64 |
+
# Check if the current score is among the top 3
|
65 |
+
for i, (score, _) in enumerate(top_scores):
|
66 |
+
if similarity > score:
|
67 |
+
top_scores.insert(i, (similarity, body_index + 1))
|
68 |
+
top_scores = top_scores[:3]
|
69 |
+
break
|
70 |
+
|
71 |
+
print(f"Volunteer {volunteer_row['id']} (body shape {body_index + 1}) Similarity:\t{similarity:.3f}")
|
72 |
+
|
73 |
+
# Print the top 3 best body shapes and scores for the current volunteer
|
74 |
+
print(f"Volunteer {volunteer_row['id']} top 3 body shapes and scores are:")
|
75 |
+
for i, (score, body_shape) in enumerate(top_scores):
|
76 |
+
print(f"Rank {i + 1}: Body Shape {body_shape} with score {score:.3f}")
|
77 |
+
|
78 |
+
body_shape_index = top_scores[0][1]
|
79 |
+
|
80 |
+
return body_shape_lookup(body_shape_index)
|
volunteers_measures_normalised_updated.csv
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
id,shoulder_width,hip_width,shoulder_to_hip_distance,hip_to_ankle_distance,torso_to_leg_ratio,waist_width,thigh_area,torso_area,lower_torso_area,upper_torso_area,full_side_body_area,thigh_normalised,torso_normalised,thigh_to_torso_ratio_normalised,thigh_to_torso_ratio,upper_torso_normalised,lower_torso_normalised,upper_to_lower_torso_normalised_ratio,upper_to_lower_torso_ratio,shoulder_to_hip_ratio,shoulder_to_waist_ratio,waist_to_hip_ratio,thigh_to_body_ratio,upper_torso_to_body_ratio,front_img_jpg,front_img_json,side_img_jpg,side_img_json
|
2 |
+
2A,309,269,-0.6451853254425501,-0.6682325556525112,0.3512052843480335,235,34229,50778,25191,25587,129184,-0.5136746421535362,-0.6393085847116429,0.5341966631577374,0.5341966631577363,-0.6530399046830264,-0.6235671698990638,-0.18048946872604607,-0.18048946872604604,0.46262354383444276,-0.038443926936658726,0.2848605088833572,0.11603850407389217,-0.6352860522185653,data/volunteers_full_res/2. A front.JPG,data/volunteers_full_res/keypoints/2. A front_keypoints.json,data/volunteers_full_res/2. A side.JPG,data/volunteers_full_res/keypoints/2. A side_keypoints.json
|
3 |
+
5A,751,784,0.9144271139787139,1.2087976272001555,-1.393368840086257,820,359036,478409,244104,234305,1317742,1.7908025462101405,1.6581076973120041,1.3373436638658922,1.3373436638658922,1.5898707794468867,1.7184597534399404,-0.7675499961012099,-0.7675499961012098,-1.6885839094884345,-2.105222050862348,2.2088413671422513,0.4115935545489668,-1.3509388020203796,data/volunteers_full_res/5. A front.jpg,data/volunteers_full_res/keypoints/5. A front_keypoints.json,data/volunteers_full_res/5. A side.jpg,data/volunteers_full_res/keypoints/5. A side_keypoints.json
|
4 |
+
7A,888,771,1.2266563354411533,1.4798058058940855,-1.3175843564008711,549,252555,338580,163668,174912,874524,0.9620258960628703,0.8055256743012406,1.2894518062332987,1.2894518062332998,0.8862023059241599,0.7239466229852285,0.3762791226520531,0.37627912265205304,0.49703650905427676,1.5287976601007285,-1.5188804555519377,1.0550592184153258,-0.5666856621104159,data/volunteers_full_res/7. A front.jpg,data/volunteers_full_res/keypoints/7. A front_keypoints.json,data/volunteers_full_res/7. A side.jpg,data/volunteers_full_res/keypoints/7. A side_keypoints.json
|
5 |
+
7B,718,696,1.4860059660807992,1.0493591618169957,0.01163954223114654,537,252642,372623,181265,191358,979634,0.8963355830168163,0.9892525726734528,0.5753963012377995,0.5753963012377995,1.0609718924822236,0.9157212305526141,0.2394609133057007,0.23946091330570068,-0.8575885214005697,0.07635234591621287,-0.8546416455323987,-0.16253115903080398,-0.7317305165035971,data/volunteers_full_res/7. B front.jpg,data/volunteers_full_res/keypoints/7. B front_keypoints.json,data/volunteers_full_res/7. B side.jpg,data/volunteers_full_res/keypoints/7. B side_keypoints.json
|
6 |
+
7C,233,198,-0.7858354516068623,-0.8660844259672597,1.3774253575446744,160,21697,39647,20911,18736,72801,-0.6955531203216642,-0.6341185908976849,-0.7993686079294058,-0.7993686079294058,-0.7209444855652456,-0.5471821012653706,-1.4387549608901573,-1.4387549608901582,0.7791053627807573,0.6936939800717078,-0.4467705325621465,1.4191973718580178,1.4593062364358502,data/volunteers_full_res/7. C front.JPG,data/volunteers_full_res/keypoints/7. C front_keypoints.json,data/volunteers_full_res/7. C side.JPG,data/volunteers_full_res/keypoints/7. C side_keypoints.json
|
7 |
+
9A,211,178,-0.8536044015374634,-0.9242373480961905,1.5839544420431042,141,13005,29217,12972,16245,63173,-0.9885722682153351,-0.8866846225820019,-1.8732405871833364,-1.8732405871833364,-0.810033330890077,-0.9580863087680326,2.305864794991042,2.3058647949910416,0.8763606978908735,0.9019251126074833,-0.6248177905120642,-2.212964050626634,1.4519546942072203,data/volunteers_full_res/9. A front.JPG,data/volunteers_full_res/keypoints/9. A front_keypoints.json,data/volunteers_full_res/9. A side.JPG,data/volunteers_full_res/keypoints/9. A side_keypoints.json
|
8 |
+
9B,368,314,-0.2627907979699269,-0.24282650808339729,-0.40226526773322185,282,54350,88963,42901,46062,225792,-0.4295591307213896,-0.4121599519583041,-0.12990166261860167,-0.12990166261860167,-0.3829103075755036,-0.4391335397560143,0.42862672647856803,0.4286267264785703,0.7250614892098834,-0.08987035878672849,0.558231717572826,-0.8397958260534285,-0.4256038637053921,data/volunteers_full_res/9. B front.jpg,data/volunteers_full_res/keypoints/9. B front_keypoints.json,data/volunteers_full_res/9. B side.jpg,data/volunteers_full_res/keypoints/9. B side_keypoints.json
|
9 |
+
10A,189,179,-0.967607048096815,-0.8516730560853955,-0.2573670846406723,155,13346,21964,10953,11011,49817,-0.9068557521025037,-1.0223264573211466,-0.16457139639188673,-0.16457139639188673,-1.0474087761570212,-0.9941101434453615,-0.29004106968943183,-0.2900410696894318,-0.5840885829069261,-0.5332752158903319,0.19906183414420206,0.2317937959738227,0.1758982832290134,data/volunteers_full_res/10. A front.JPG,data/volunteers_full_res/keypoints/10. A front_keypoints.json,data/volunteers_full_res/10. A side.JPG,data/volunteers_full_res/keypoints/10. A side_keypoints.json
|
10 |
+
11A,203,169,-0.9716968291393029,-0.9277894463065092,0.5540627406776014,147,16750,30912,15171,15741,63617,-0.871621034320157,-0.8781135022156633,-0.8560787634674033,-0.8560787634674033,-0.886623370048677,-0.866576433381505,0.04915099181033875,0.049150991810338746,1.0543983860547377,0.3036992727189106,0.24261567283875252,0.050276534651395643,1.1086845891110646,data/volunteers_full_res/11. A front.jpg,data/volunteers_full_res/keypoints/11. A front_keypoints.json,data/volunteers_full_res/11. A side.jpg,data/volunteers_full_res/keypoints/11. A side_keypoints.json
|
11 |
+
12A,669,672,0.8596304382922542,0.7428807452800266,-0.5077018179835446,567,211624,335094,170606,164488,813075,0.7566719225447575,1.0198257653997465,0.08677258309590243,0.08677258309590243,0.9639151970662792,1.070528089537564,-0.7225470538308504,-0.7225470538308503,-1.2643249750290122,-0.7376568189389741,-0.04850067642283835,-0.06866794381056764,-0.48559890642479764,data/volunteers_full_res/12. A front.jpg,data/volunteers_full_res/keypoints/12. A front_keypoints.json,data/volunteers_full_res/12. A side.jpg,data/volunteers_full_res/keypoints/12. A side_keypoints.json
|