Spaces:
Running
Running
BraydenMoore
commited on
Commit
•
7e19f9a
1
Parent(s):
df33fd2
Adding rest of files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- main.py +76 -0
- plantvision.py +115 -0
- requirements.txt +11 -0
- resources/flowerImageIndices.pkl +3 -0
- resources/flowerImageLabels.pkl +3 -0
- resources/flowerLabelSet.pkl +3 -0
- resources/flowerMeansAndStds.pkl +3 -0
- resources/flowerspeciesIndexDict.pkl +3 -0
- resources/fruitImageIndices.pkl +3 -0
- resources/fruitImageLabels.pkl +3 -0
- resources/fruitLabelSet.pkl +3 -0
- resources/fruitMeansAndStds.pkl +3 -0
- resources/fruitspeciesIndexDict.pkl +3 -0
- resources/fruitspeciesNameToID.pkl +3 -0
- resources/imageIndices.pkl +3 -0
- resources/imageLabels.pkl +3 -0
- resources/imageLocations.pkl +3 -0
- resources/imageSelections.pkl +3 -0
- resources/infoDict.pkl +3 -0
- resources/labelSet.pkl +3 -0
- resources/leafImageIndices.pkl +3 -0
- resources/leafImageLabels.pkl +3 -0
- resources/leafLabelSet.pkl +3 -0
- resources/leafMeansAndStds.pkl +3 -0
- resources/leafspeciesIndexDict.pkl +3 -0
- resources/meansAndStds.pkl +3 -0
- resources/speciesNameToID.pkl +3 -0
- resources/speciesNameToKey.pkl +3 -0
- resources/speciesNameToVernacular.pkl +3 -0
- web/static/loading.gif +0 -0
- web/static/predicted-images/img0.jpeg +0 -0
- web/static/predicted-images/img1.jpeg +0 -0
- web/static/predicted-images/img10.jpeg +0 -0
- web/static/predicted-images/img11.jpeg +0 -0
- web/static/predicted-images/img12.jpeg +0 -0
- web/static/predicted-images/img13.jpeg +0 -0
- web/static/predicted-images/img14.jpeg +0 -0
- web/static/predicted-images/img15.jpeg +0 -0
- web/static/predicted-images/img16.jpeg +0 -0
- web/static/predicted-images/img17.jpeg +0 -0
- web/static/predicted-images/img18.jpeg +0 -0
- web/static/predicted-images/img19.jpeg +0 -0
- web/static/predicted-images/img2.jpeg +0 -0
- web/static/predicted-images/img3.jpeg +0 -0
- web/static/predicted-images/img4.jpeg +0 -0
- web/static/predicted-images/img5.jpeg +0 -0
- web/static/predicted-images/img6.jpeg +0 -0
- web/static/predicted-images/img7.jpeg +0 -0
- web/static/predicted-images/img8.jpeg +0 -0
- web/static/predicted-images/img9.jpeg +0 -0
main.py
ADDED
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import plantvision
|
2 |
+
import requests
|
3 |
+
from io import BytesIO
|
4 |
+
import pickle as pkl
|
5 |
+
from flask import Flask, render_template, request, session, jsonify, url_for
|
6 |
+
from PIL import Image
|
7 |
+
import os
|
8 |
+
import time
|
9 |
+
import random
|
10 |
+
from pathlib import Path
|
11 |
+
THIS_FOLDER = Path(__file__).parent.resolve()
|
12 |
+
|
13 |
+
app = Flask(__name__)
|
14 |
+
app.secret_key = 'pi-33pp-co-sk-33'
|
15 |
+
app.template_folder = os.path.abspath(f'{THIS_FOLDER}/web/templates')
|
16 |
+
app.static_folder = os.path.abspath(f'{THIS_FOLDER}/web/static')
|
17 |
+
print(app.static_folder)
|
18 |
+
|
19 |
+
flowerLayers = None
|
20 |
+
leafLayers = None
|
21 |
+
fruitLayers = None
|
22 |
+
|
23 |
+
@app.route('/')
|
24 |
+
def home():
|
25 |
+
return render_template('index.html')
|
26 |
+
|
27 |
+
@app.route('/guess', methods=['POST'])
|
28 |
+
def guess():
|
29 |
+
global flowerLayers, leafLayers, fruitLayers
|
30 |
+
|
31 |
+
if request.method == 'POST':
|
32 |
+
print('Thinking...')
|
33 |
+
|
34 |
+
img = request.files.get('uploaded-image')
|
35 |
+
feature = request.form.get('feature')
|
36 |
+
|
37 |
+
tensor = plantvision.processImage(img, feature)
|
38 |
+
predictions = plantvision.see(tensor, feature, 6)
|
39 |
+
|
40 |
+
with open(f'{THIS_FOLDER}/resources/speciesNameToKey.pkl','rb') as f:
|
41 |
+
speciesNameToKey = pkl.load(f)
|
42 |
+
with open(f'{THIS_FOLDER}/resources/speciesNameToVernacular.pkl','rb') as f:
|
43 |
+
speciesNameToVernacular = pkl.load(f)
|
44 |
+
with open(f'{THIS_FOLDER}/resources/{feature}speciesIndexDict.pkl','rb') as f:
|
45 |
+
speciesNameToIndex = pkl.load(f)
|
46 |
+
|
47 |
+
urls = []
|
48 |
+
predicted_image_urls = []
|
49 |
+
for p in predictions:
|
50 |
+
key = speciesNameToKey[p]
|
51 |
+
img = speciesNameToIndex[p]
|
52 |
+
query = ''
|
53 |
+
for i in p.split(' '):
|
54 |
+
query += i
|
55 |
+
query += '+'
|
56 |
+
urls.append(f'https://www.google.com/search?q={query[:-1]}')
|
57 |
+
predicted_image_urls.append(f"https://storage.googleapis.com/bmllc-images-bucket/images/img{img}.jpeg")
|
58 |
+
|
59 |
+
names = []
|
60 |
+
for p in predictions:
|
61 |
+
try:
|
62 |
+
names.append(speciesNameToVernacular[p])
|
63 |
+
except:
|
64 |
+
names.append(p)
|
65 |
+
|
66 |
+
response = {
|
67 |
+
'names': names,
|
68 |
+
'species': predictions,
|
69 |
+
'predictions': urls,
|
70 |
+
'images': predicted_image_urls
|
71 |
+
}
|
72 |
+
|
73 |
+
return jsonify(response)
|
74 |
+
|
75 |
+
if __name__ == '__main__':
|
76 |
+
app.run(port=int(os.environ.get("PORT", 8080)),host='0.0.0.0',debug=True)
|
plantvision.py
ADDED
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
from io import BytesIO
|
3 |
+
from PIL import Image, ImageOps
|
4 |
+
import torchvision.transforms as T
|
5 |
+
import torch
|
6 |
+
import gc
|
7 |
+
import pickle as pkl
|
8 |
+
from pathlib import Path
|
9 |
+
THIS_FOLDER = Path(__file__).parent.resolve()
|
10 |
+
import datetime as dt
|
11 |
+
from transformers import AutoModel
|
12 |
+
import torch.nn as nn
|
13 |
+
import torch.nn.functional as F
|
14 |
+
import logging
|
15 |
+
logging.disable(logging.INFO)
|
16 |
+
logging.disable(logging.WARNING)
|
17 |
+
|
18 |
+
visionTransformer = AutoModel.from_pretrained(r"google/vit-base-patch16-224-in21k")
|
19 |
+
|
20 |
+
class PlantVision(nn.Module):
|
21 |
+
def __init__(self, num_classes):
|
22 |
+
super(PlantVision, self).__init__()
|
23 |
+
self.vit = visionTransformer
|
24 |
+
count = 0
|
25 |
+
for child in self.vit.children():
|
26 |
+
count += 1
|
27 |
+
if count < 4:
|
28 |
+
for param in child.parameters():
|
29 |
+
param.requires_grad = False
|
30 |
+
self.vitLayers = list(self.vit.children())
|
31 |
+
self.vitTop = nn.Sequential(*self.vitLayers[:-2])
|
32 |
+
self.vitNorm = list(self.vit.children())[2]
|
33 |
+
self.vit = None
|
34 |
+
gc.collect()
|
35 |
+
self.vitFlatten = nn.Flatten()
|
36 |
+
self.vitLinear = nn.Linear(151296,num_classes)
|
37 |
+
self.fc = nn.Linear(num_classes, num_classes)
|
38 |
+
|
39 |
+
def forward(self, input):
|
40 |
+
output = self.vitTop(input).last_hidden_state
|
41 |
+
output = self.vitNorm(output)
|
42 |
+
output = self.vitFlatten(output)
|
43 |
+
output = F.relu(self.vitLinear(output))
|
44 |
+
output = self.fc(output)
|
45 |
+
return output
|
46 |
+
|
47 |
+
device = 'cpu' # ('cuda' if torch.cuda.is_available else 'cpu')
|
48 |
+
|
49 |
+
with open(fr'{THIS_FOLDER}/resources/flowerLabelSet.pkl', 'rb') as f:
|
50 |
+
flowerLabelSet = pkl.load(f)
|
51 |
+
|
52 |
+
with open(fr'{THIS_FOLDER}/resources/leafLabelSet.pkl', 'rb') as f:
|
53 |
+
leafLabelSet = pkl.load(f)
|
54 |
+
|
55 |
+
with open(fr'{THIS_FOLDER}/resources/fruitLabelSet.pkl', 'rb') as f:
|
56 |
+
fruitLabelSet = pkl.load(f)
|
57 |
+
|
58 |
+
def loadModel(feature, labelSet):
|
59 |
+
model = PlantVision(num_classes=len(labelSet))
|
60 |
+
model.vitFlatten.load_state_dict(torch.load(BytesIO(requests.get(f"https://storage.googleapis.com/bmllc-plant-model-bucket/{feature}-vitFlatten-weights.pt").content), map_location=torch.device(device)), strict=False)
|
61 |
+
model.vitLinear.load_state_dict(torch.load(BytesIO(requests.get(f"https://storage.googleapis.com/bmllc-plant-model-bucket/{feature}-vitLinear-weights.pt").content), map_location=torch.device(device)), strict=False)
|
62 |
+
model.fc.load_state_dict(torch.load(BytesIO(requests.get(f"https://storage.googleapis.com/bmllc-plant-model-bucket/{feature}-fc-weights.pt").content), map_location=torch.device(device)), strict=False)
|
63 |
+
model = model.half()
|
64 |
+
return model
|
65 |
+
|
66 |
+
start = dt.datetime.now()
|
67 |
+
flower = loadModel('flower',flowerLabelSet)
|
68 |
+
leaf = loadModel('leaf',leafLabelSet)
|
69 |
+
fruit = loadModel('fruit',fruitLabelSet)
|
70 |
+
print(dt.datetime.now() - start)
|
71 |
+
|
72 |
+
def processImage(imagePath, feature):
|
73 |
+
with open(fr'{THIS_FOLDER}/resources/{feature}MeansAndStds.pkl', 'rb') as f:
|
74 |
+
meansAndStds = pkl.load(f)
|
75 |
+
|
76 |
+
img = Image.open(imagePath).convert('RGB')
|
77 |
+
cropped = ImageOps.fit(img, (224,224), Image.Resampling.LANCZOS)
|
78 |
+
|
79 |
+
process = T.Compose([
|
80 |
+
T.CenterCrop(224),
|
81 |
+
T.ToTensor(),
|
82 |
+
T.ConvertImageDtype(torch.float32),
|
83 |
+
T.Normalize(
|
84 |
+
mean=meansAndStds['mean'],
|
85 |
+
std=meansAndStds['std'])
|
86 |
+
])
|
87 |
+
|
88 |
+
return process(cropped)
|
89 |
+
|
90 |
+
def see(tensor,feature,k):
|
91 |
+
|
92 |
+
if feature=='flower':
|
93 |
+
model = flower.float()
|
94 |
+
labelSet = flowerLabelSet
|
95 |
+
|
96 |
+
elif feature=='leaf':
|
97 |
+
model = leaf.float()
|
98 |
+
labelSet = leafLabelSet
|
99 |
+
|
100 |
+
elif feature=='fruit':
|
101 |
+
model = fruit.float()
|
102 |
+
labelSet = fruitLabelSet
|
103 |
+
|
104 |
+
with torch.no_grad():
|
105 |
+
output = model(tensor.unsqueeze(0))
|
106 |
+
top = torch.topk(output,k,dim=1)
|
107 |
+
predictions = top.indices[0]
|
108 |
+
|
109 |
+
predictedSpecies = []
|
110 |
+
for i in predictions:
|
111 |
+
predictedSpecies.append(labelSet[i])
|
112 |
+
|
113 |
+
model = None
|
114 |
+
gc.collect()
|
115 |
+
return predictedSpecies
|
requirements.txt
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Flask==2.1.0
|
2 |
+
gunicorn==20.1.0
|
3 |
+
torch
|
4 |
+
torchvision
|
5 |
+
pillow
|
6 |
+
tqdm
|
7 |
+
argparse
|
8 |
+
pathlib
|
9 |
+
transformers
|
10 |
+
fastapi
|
11 |
+
uvicorn
|
resources/flowerImageIndices.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9189a92359774fb40bb605424ae3b3a2e6ae2b9a92b1c0bb61fb79981549ddfb
|
3 |
+
size 113164
|
resources/flowerImageLabels.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9fd985d5e407b226c439e966aadc18f6332195f8cd118ccdd18bd4cf1de54b63
|
3 |
+
size 203485
|
resources/flowerLabelSet.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:89ba3712eb686badc02a768e6b0f09974c4161466ea92cee1c9b86f70e33af15
|
3 |
+
size 23964
|
resources/flowerMeansAndStds.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:76a3349b54abf757e5ef18d4801320c8194561eedab4ce3ea9739ff2bca25d8f
|
3 |
+
size 236
|
resources/flowerspeciesIndexDict.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:571c2b76bc6f92e97bd51bd37224b62f7c1ecc6c02dd7768c2c1cc0212f9d26b
|
3 |
+
size 70059
|
resources/fruitImageIndices.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a5a902edb6214f6c4eedba3213b7bdd24c5f87cc62db33c66f645a4bd0444f0f
|
3 |
+
size 17734
|
resources/fruitImageLabels.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c260c9bce0614a1197a92ffa29600d3890e97f733e702c4a5870a21eb8f665a8
|
3 |
+
size 27866
|
resources/fruitLabelSet.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:875210d10918a499c84815b3393afc8f3e4fc07979c872714053c40059857da5
|
3 |
+
size 5164
|
resources/fruitMeansAndStds.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1479a0b5e531db1b783fa3fe8f47bb6278fc43c0dc9448decb53f855df83fe72
|
3 |
+
size 236
|
resources/fruitspeciesIndexDict.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b045988de9a625ce8ebb81adf64f00bf2709ce76868c388c95aeaeb1a918c930
|
3 |
+
size 22526
|
resources/fruitspeciesNameToID.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8ffb83cf9e6053dbc3888d5e6b3e2d784bfd09333fb5058b51f3f1da0d005fc1
|
3 |
+
size 584518
|
resources/imageIndices.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:99ebbdcb3b5124aab5cb156b8be0672041fa16b70acb5e1efbb0a796dd54cf5b
|
3 |
+
size 650154
|
resources/imageLabels.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:76c77a8a0bf3e1316e382b13db60c9b6dc90143ba9712f07f24ea6684b5b7da9
|
3 |
+
size 1771457
|
resources/imageLocations.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3af966e13a46d4fddda1abe0764db5a0fa3b1996b113756b7fe2db1160a4ebbb
|
3 |
+
size 2234643
|
resources/imageSelections.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c306952141e794e935a1bdcc4a35aa45bc2858756533683a159ac1f44ec4fed5
|
3 |
+
size 2407338
|
resources/infoDict.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c5602ffd87f6bb46b53ac6c4cece0703a3595721a1b8cc6cc4e650154bdbfd6e
|
3 |
+
size 10577505
|
resources/labelSet.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3f46f078cdab57f85ac70a3da7e11cc9f001c493a40e1ea32c7da50acfb8af2b
|
3 |
+
size 36457
|
resources/leafImageIndices.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:26463c81aa1fdde63490deafb70016d6efe5d5241d25f4e4dd63aeac14b4452b
|
3 |
+
size 144737
|
resources/leafImageLabels.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2f8ae09eb663be008a260a2f30d668ff28b042a72578178783017add5a5034f7
|
3 |
+
size 259577
|
resources/leafLabelSet.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c25e4e98ea2bd5a313ca032c212aeb688e84afca6458757889be84a282c4580e
|
3 |
+
size 25482
|
resources/leafMeansAndStds.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ab072ba014fe39029378b2c088d8a8ab85f1291163e25e060e621f00cedb194c
|
3 |
+
size 236
|
resources/leafspeciesIndexDict.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:92fbc241dc2825a4f83beb61e3be3a4ae59e0d04cb3a68a80e58fbb3aff0d4ec
|
3 |
+
size 69399
|
resources/meansAndStds.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5f9c146afe67291a4143714b466a3cb7eed425bfed1218103930ebafeefc0747
|
3 |
+
size 236
|
resources/speciesNameToID.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8ffb83cf9e6053dbc3888d5e6b3e2d784bfd09333fb5058b51f3f1da0d005fc1
|
3 |
+
size 584518
|
resources/speciesNameToKey.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0edd05755d1b9ca01291426ff35a9964589d4cd52f1cbea3f032b9dbfc7f2b17
|
3 |
+
size 94428
|
resources/speciesNameToVernacular.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2207518c0f274b0b579fe8bf70c6605b3abaeb7a725adebab5b2bd3ec6201e41
|
3 |
+
size 140185
|
web/static/loading.gif
ADDED
web/static/predicted-images/img0.jpeg
ADDED
web/static/predicted-images/img1.jpeg
ADDED
web/static/predicted-images/img10.jpeg
ADDED
web/static/predicted-images/img11.jpeg
ADDED
web/static/predicted-images/img12.jpeg
ADDED
web/static/predicted-images/img13.jpeg
ADDED
web/static/predicted-images/img14.jpeg
ADDED
web/static/predicted-images/img15.jpeg
ADDED
web/static/predicted-images/img16.jpeg
ADDED
web/static/predicted-images/img17.jpeg
ADDED
web/static/predicted-images/img18.jpeg
ADDED
web/static/predicted-images/img19.jpeg
ADDED
web/static/predicted-images/img2.jpeg
ADDED
web/static/predicted-images/img3.jpeg
ADDED
web/static/predicted-images/img4.jpeg
ADDED
web/static/predicted-images/img5.jpeg
ADDED
web/static/predicted-images/img6.jpeg
ADDED
web/static/predicted-images/img7.jpeg
ADDED
web/static/predicted-images/img8.jpeg
ADDED
web/static/predicted-images/img9.jpeg
ADDED