Caleb Spradlin
commited on
Commit
·
0a97993
1
Parent(s):
18903a3
changed pre-trained
Browse files- app.py +22 -10
- images/.DS_Store +0 -0
- images/images/ft_demo_10_1076_img copy.png +0 -0
- images/predictions/10/cnn-ls/ft_cnn_demo_10_1071_cnn-ls_pred.png +0 -0
- images/predictions/10/cnn-ls/ft_cnn_demo_10_1076_cnn-ls_pred.png +0 -0
- images/predictions/10/cnn-ls/ft_cnn_demo_10_1541_cnn-ls_pred.png +0 -0
- images/predictions/10/cnn/ft_cnn_demo_10_1071_cnn-plain_pred.png +0 -0
- images/predictions/10/cnn/ft_cnn_demo_10_1071_pred.png +0 -0
- images/predictions/10/cnn/ft_cnn_demo_10_1076_cnn-plain_pred.png +0 -0
- images/predictions/10/cnn/ft_cnn_demo_10_1076_pred.png +0 -0
- images/predictions/10/cnn/ft_cnn_demo_10_1541_cnn-plain_pred.png +0 -0
- images/predictions/10/cnn/ft_cnn_demo_10_1541_pred.png +0 -0
- images/predictions/100/cnn/ft_cnn_demo_100_1071_pred.png +0 -0
- images/predictions/100/cnn/ft_cnn_demo_100_1076_pred.png +0 -0
- images/predictions/100/cnn/ft_cnn_demo_100_1541_pred.png +0 -0
- images/predictions/100/svb/ft_demo_100_1071_pred.png +0 -0
- images/predictions/100/svb/ft_demo_100_1076_pred.png +0 -0
- images/predictions/100/svb/ft_demo_100_1541_pred.png +0 -0
- images/predictions/1000/cnn-ls/ft_cnn_demo_1000_1071_cnn-ls_pred.png +0 -0
- images/predictions/1000/cnn-ls/ft_cnn_demo_1000_1076_cnn-ls_pred.png +0 -0
- images/predictions/1000/cnn-ls/ft_cnn_demo_1000_1541_cnn-ls_pred.png +0 -0
- images/predictions/1000/cnn/ft_cnn_demo_1000_1071_cnn-plain_pred.png +0 -0
- images/predictions/1000/cnn/ft_cnn_demo_1000_1071_pred.png +0 -0
- images/predictions/1000/cnn/ft_cnn_demo_1000_1076_cnn-plain_pred.png +0 -0
- images/predictions/1000/cnn/ft_cnn_demo_1000_1076_pred.png +0 -0
- images/predictions/1000/cnn/ft_cnn_demo_1000_1541_cnn-plain_pred.png +0 -0
- images/predictions/1000/cnn/ft_cnn_demo_1000_1541_pred.png +0 -0
- images/predictions/500/cnn/ft_cnn_demo_500_1071_pred.png +0 -0
- images/predictions/500/cnn/ft_cnn_demo_500_1076_pred.png +0 -0
- images/predictions/500/cnn/ft_cnn_demo_500_1541_pred.png +0 -0
- images/predictions/500/svb/ft_demo_500_1071_pred.png +0 -0
- images/predictions/500/svb/ft_demo_500_1076_pred.png +0 -0
- images/predictions/500/svb/ft_demo_500_1541_pred.png +0 -0
- images/predictions/5000/cnn-ls/ft_cnn_demo_5000_1071_cnn-ls_pred.png +0 -0
- images/predictions/5000/cnn-ls/ft_cnn_demo_5000_1076_cnn-ls_pred.png +0 -0
- images/predictions/5000/cnn-ls/ft_cnn_demo_5000_1541_cnn-ls_pred.png +0 -0
- images/predictions/5000/cnn/ft_cnn_demo_5000_1071_cnn-plain_pred.png +0 -0
- images/predictions/5000/cnn/ft_cnn_demo_5000_1071_pred.png +0 -0
- images/predictions/5000/cnn/ft_cnn_demo_5000_1076_cnn-plain_pred.png +0 -0
- images/predictions/5000/cnn/ft_cnn_demo_5000_1076_pred.png +0 -0
- images/predictions/5000/cnn/ft_cnn_demo_5000_1541_cnn-plain_pred.png +0 -0
- images/predictions/5000/cnn/ft_cnn_demo_5000_1541_pred.png +0 -0
app.py
CHANGED
@@ -11,7 +11,7 @@ def main():
|
|
11 |
st.write("")
|
12 |
|
13 |
selected_option = st.selectbox(
|
14 |
-
"Number of training samples", [10,
|
15 |
)
|
16 |
st.markdown(
|
17 |
"Move slider to select how many training "
|
@@ -24,22 +24,23 @@ def main():
|
|
24 |
|
25 |
preds = load_predictions(selected_option, Path("./images/predictions"))
|
26 |
|
27 |
-
zipped_st_images = zip(images, preds["svb"], preds["unet"], labels)
|
28 |
|
29 |
st.write("")
|
30 |
|
31 |
-
titleCol0, titleCol1, titleCol2, titleCol3 = st.columns(
|
32 |
|
33 |
titleCol0.markdown(f"### MOD09GA [3-2-1] Image Chip")
|
34 |
titleCol1.markdown(f"### SatVision-B Prediction")
|
35 |
titleCol2.markdown(f"### UNet (CNN) Prediction")
|
36 |
-
titleCol3.markdown(f
|
|
|
37 |
|
38 |
st.write("")
|
39 |
|
40 |
-
grid = make_grid(
|
41 |
|
42 |
-
for i, (image_data, svb_data, unet_data, label_data) in enumerate(zipped_st_images):
|
43 |
# if i == 0:
|
44 |
|
45 |
# grid[0][0].markdown(f'## MOD09GA 3-2-1 Image Chip')
|
@@ -50,7 +51,8 @@ def main():
|
|
50 |
grid[i][0].image(image_data[0], image_data[1], use_column_width=True)
|
51 |
grid[i][1].image(svb_data[0], svb_data[1], use_column_width=True)
|
52 |
grid[i][2].image(unet_data[0], unet_data[1], use_column_width=True)
|
53 |
-
grid[i][3].image(
|
|
|
54 |
|
55 |
st.markdown("### Few-Shot Learning with SatVision-Base")
|
56 |
description = (
|
@@ -135,6 +137,7 @@ def load_predictions(selected_option: str, pred_dir: Path):
|
|
135 |
svb_pred_paths = find_preds(selected_option, pred_dir, "svb")
|
136 |
|
137 |
unet_pred_paths = find_preds(selected_option, pred_dir, "cnn")
|
|
|
138 |
|
139 |
svb_preds = [
|
140 |
(str(path), f"SatVision-B Prediction Example {i}")
|
@@ -146,7 +149,12 @@ def load_predictions(selected_option: str, pred_dir: Path):
|
|
146 |
for i, path in enumerate(unet_pred_paths, 1)
|
147 |
]
|
148 |
|
149 |
-
|
|
|
|
|
|
|
|
|
|
|
150 |
|
151 |
return prediction_dict
|
152 |
|
@@ -155,8 +163,12 @@ def load_predictions(selected_option: str, pred_dir: Path):
|
|
155 |
# find_preds
|
156 |
# -----------------------------------------------------------------------------
|
157 |
def find_preds(selected_option: int, pred_dir: Path, model: str):
|
|
|
158 |
if model == "cnn":
|
159 |
-
pred_regex = f"ft_cnn_demo_{selected_option}_*
|
|
|
|
|
|
|
160 |
|
161 |
else:
|
162 |
pred_regex = f"ft_demo_{selected_option}_*_pred.png"
|
@@ -168,7 +180,7 @@ def find_preds(selected_option: int, pred_dir: Path, model: str):
|
|
168 |
preds_matching_regex = sorted(model_specific_dir.glob(pred_regex))
|
169 |
|
170 |
assert (
|
171 |
-
len(preds_matching_regex) == 3
|
172 |
), "Should be 3 prediction images matching regex"
|
173 |
|
174 |
assert "1071" in str(preds_matching_regex[0]), "Should be 1071"
|
|
|
11 |
st.write("")
|
12 |
|
13 |
selected_option = st.selectbox(
|
14 |
+
"Number of training samples", [10, 1000, 5000]
|
15 |
)
|
16 |
st.markdown(
|
17 |
"Move slider to select how many training "
|
|
|
24 |
|
25 |
preds = load_predictions(selected_option, Path("./images/predictions"))
|
26 |
|
27 |
+
zipped_st_images = zip(images, preds["svb"], preds["unet"], preds["unet-ls"], labels)
|
28 |
|
29 |
st.write("")
|
30 |
|
31 |
+
titleCol0, titleCol1, titleCol2, titleCol3, titleCol4 = st.columns(5)
|
32 |
|
33 |
titleCol0.markdown(f"### MOD09GA [3-2-1] Image Chip")
|
34 |
titleCol1.markdown(f"### SatVision-B Prediction")
|
35 |
titleCol2.markdown(f"### UNet (CNN) Prediction")
|
36 |
+
titleCol3.markdown(f'### UNet (CNN) LS Pretrained Prediction')
|
37 |
+
titleCol4.markdown(f"### MCD12Q1 LandCover Target")
|
38 |
|
39 |
st.write("")
|
40 |
|
41 |
+
grid = make_grid(5, 5)
|
42 |
|
43 |
+
for i, (image_data, svb_data, unet_data, unet_ls_data, label_data) in enumerate(zipped_st_images):
|
44 |
# if i == 0:
|
45 |
|
46 |
# grid[0][0].markdown(f'## MOD09GA 3-2-1 Image Chip')
|
|
|
51 |
grid[i][0].image(image_data[0], image_data[1], use_column_width=True)
|
52 |
grid[i][1].image(svb_data[0], svb_data[1], use_column_width=True)
|
53 |
grid[i][2].image(unet_data[0], unet_data[1], use_column_width=True)
|
54 |
+
grid[i][3].image(unet_ls_data[0], unet_ls_data[1], use_column_width=True)
|
55 |
+
grid[i][4].image(label_data[0], label_data[1], use_column_width=True)
|
56 |
|
57 |
st.markdown("### Few-Shot Learning with SatVision-Base")
|
58 |
description = (
|
|
|
137 |
svb_pred_paths = find_preds(selected_option, pred_dir, "svb")
|
138 |
|
139 |
unet_pred_paths = find_preds(selected_option, pred_dir, "cnn")
|
140 |
+
unet_ls_pred_paths = find_preds(selected_option, pred_dir, "cnn-ls")
|
141 |
|
142 |
svb_preds = [
|
143 |
(str(path), f"SatVision-B Prediction Example {i}")
|
|
|
149 |
for i, path in enumerate(unet_pred_paths, 1)
|
150 |
]
|
151 |
|
152 |
+
unet_ls_preds = [
|
153 |
+
(str(path), f"Unet LS Pre-trained Prediction Example {i}")
|
154 |
+
for i, path in enumerate(unet_ls_pred_paths, 1)
|
155 |
+
]
|
156 |
+
|
157 |
+
prediction_dict = {"svb": svb_preds, "unet": unet_preds, "unet-ls": unet_ls_preds}
|
158 |
|
159 |
return prediction_dict
|
160 |
|
|
|
163 |
# find_preds
|
164 |
# -----------------------------------------------------------------------------
|
165 |
def find_preds(selected_option: int, pred_dir: Path, model: str):
|
166 |
+
|
167 |
if model == "cnn":
|
168 |
+
pred_regex = f"ft_cnn_demo_{selected_option}_*cnn-plain_pred.png"
|
169 |
+
|
170 |
+
elif model == "cnn-ls":
|
171 |
+
pred_regex = f"ft_cnn_demo_{selected_option}_*cnn-ls_pred.png"
|
172 |
|
173 |
else:
|
174 |
pred_regex = f"ft_demo_{selected_option}_*_pred.png"
|
|
|
180 |
preds_matching_regex = sorted(model_specific_dir.glob(pred_regex))
|
181 |
|
182 |
assert (
|
183 |
+
len(preds_matching_regex) == 3
|
184 |
), "Should be 3 prediction images matching regex"
|
185 |
|
186 |
assert "1071" in str(preds_matching_regex[0]), "Should be 1071"
|
images/.DS_Store
ADDED
Binary file (6.15 kB). View file
|
|
images/images/ft_demo_10_1076_img copy.png
ADDED
images/predictions/10/cnn-ls/ft_cnn_demo_10_1071_cnn-ls_pred.png
ADDED
images/predictions/10/cnn-ls/ft_cnn_demo_10_1076_cnn-ls_pred.png
ADDED
images/predictions/10/cnn-ls/ft_cnn_demo_10_1541_cnn-ls_pred.png
ADDED
images/predictions/10/cnn/ft_cnn_demo_10_1071_cnn-plain_pred.png
ADDED
images/predictions/10/cnn/ft_cnn_demo_10_1071_pred.png
DELETED
Binary file (57.6 kB)
|
|
images/predictions/10/cnn/ft_cnn_demo_10_1076_cnn-plain_pred.png
ADDED
images/predictions/10/cnn/ft_cnn_demo_10_1076_pred.png
DELETED
Binary file (58.5 kB)
|
|
images/predictions/10/cnn/ft_cnn_demo_10_1541_cnn-plain_pred.png
ADDED
images/predictions/10/cnn/ft_cnn_demo_10_1541_pred.png
DELETED
Binary file (59 kB)
|
|
images/predictions/100/cnn/ft_cnn_demo_100_1071_pred.png
DELETED
Binary file (57.5 kB)
|
|
images/predictions/100/cnn/ft_cnn_demo_100_1076_pred.png
DELETED
Binary file (54.3 kB)
|
|
images/predictions/100/cnn/ft_cnn_demo_100_1541_pred.png
DELETED
Binary file (65.8 kB)
|
|
images/predictions/100/svb/ft_demo_100_1071_pred.png
DELETED
Binary file (25 kB)
|
|
images/predictions/100/svb/ft_demo_100_1076_pred.png
DELETED
Binary file (24.7 kB)
|
|
images/predictions/100/svb/ft_demo_100_1541_pred.png
DELETED
Binary file (25 kB)
|
|
images/predictions/1000/cnn-ls/ft_cnn_demo_1000_1071_cnn-ls_pred.png
ADDED
images/predictions/1000/cnn-ls/ft_cnn_demo_1000_1076_cnn-ls_pred.png
ADDED
images/predictions/1000/cnn-ls/ft_cnn_demo_1000_1541_cnn-ls_pred.png
ADDED
images/predictions/1000/cnn/ft_cnn_demo_1000_1071_cnn-plain_pred.png
ADDED
images/predictions/1000/cnn/ft_cnn_demo_1000_1071_pred.png
DELETED
Binary file (37.3 kB)
|
|
images/predictions/1000/cnn/ft_cnn_demo_1000_1076_cnn-plain_pred.png
ADDED
images/predictions/1000/cnn/ft_cnn_demo_1000_1076_pred.png
DELETED
Binary file (45.7 kB)
|
|
images/predictions/1000/cnn/ft_cnn_demo_1000_1541_cnn-plain_pred.png
ADDED
images/predictions/1000/cnn/ft_cnn_demo_1000_1541_pred.png
DELETED
Binary file (40.6 kB)
|
|
images/predictions/500/cnn/ft_cnn_demo_500_1071_pred.png
DELETED
Binary file (55 kB)
|
|
images/predictions/500/cnn/ft_cnn_demo_500_1076_pred.png
DELETED
Binary file (44.6 kB)
|
|
images/predictions/500/cnn/ft_cnn_demo_500_1541_pred.png
DELETED
Binary file (60.6 kB)
|
|
images/predictions/500/svb/ft_demo_500_1071_pred.png
DELETED
Binary file (25.2 kB)
|
|
images/predictions/500/svb/ft_demo_500_1076_pred.png
DELETED
Binary file (25.1 kB)
|
|
images/predictions/500/svb/ft_demo_500_1541_pred.png
DELETED
Binary file (25.9 kB)
|
|
images/predictions/5000/cnn-ls/ft_cnn_demo_5000_1071_cnn-ls_pred.png
ADDED
images/predictions/5000/cnn-ls/ft_cnn_demo_5000_1076_cnn-ls_pred.png
ADDED
images/predictions/5000/cnn-ls/ft_cnn_demo_5000_1541_cnn-ls_pred.png
ADDED
images/predictions/5000/cnn/ft_cnn_demo_5000_1071_cnn-plain_pred.png
ADDED
images/predictions/5000/cnn/ft_cnn_demo_5000_1071_pred.png
DELETED
Binary file (26.6 kB)
|
|
images/predictions/5000/cnn/ft_cnn_demo_5000_1076_cnn-plain_pred.png
ADDED
images/predictions/5000/cnn/ft_cnn_demo_5000_1076_pred.png
DELETED
Binary file (27.3 kB)
|
|
images/predictions/5000/cnn/ft_cnn_demo_5000_1541_cnn-plain_pred.png
ADDED
images/predictions/5000/cnn/ft_cnn_demo_5000_1541_pred.png
DELETED
Binary file (30.4 kB)
|
|