Spaces:
Build error
Build error
added status update
Browse files- .gitignore +2 -1
- app.py +2 -6
- funcs/processor.py +11 -3
- funcs/tools.py +1 -1
- output.csv +0 -23
.gitignore
CHANGED
@@ -5,7 +5,8 @@ __pycache__/
|
|
5 |
wetransfer_files_2023-05-04_1807/
|
6 |
# C extensions
|
7 |
*.so
|
8 |
-
|
|
|
9 |
# Distribution / packaging
|
10 |
.Python
|
11 |
build/
|
|
|
5 |
wetransfer_files_2023-05-04_1807/
|
6 |
# C extensions
|
7 |
*.so
|
8 |
+
output.json
|
9 |
+
output.csv
|
10 |
# Distribution / packaging
|
11 |
.Python
|
12 |
build/
|
app.py
CHANGED
@@ -15,7 +15,7 @@ with gr.Blocks(title='Cabasus') as cabasus_sensor:
|
|
15 |
sample_rate = gr.inputs.Slider(1, 199, 1, 20, label="Sample rate")
|
16 |
with gr.Row():
|
17 |
window_size_slider = gr.inputs.Slider(0, 100, 2, 10, label="Window Size")
|
18 |
-
repeat_process = gr.Button(
|
19 |
with gr.Row():
|
20 |
leg_dropdown = gr.Dropdown(choices=['GZ1', 'GZ2', 'GZ3', 'GZ4'], label='select leg', value='GZ1')
|
21 |
|
@@ -25,12 +25,8 @@ with gr.Blocks(title='Cabasus') as cabasus_sensor:
|
|
25 |
|
26 |
slices_per_leg = gr.Textbox(label="Number of slices found per LEG")
|
27 |
|
28 |
-
# try:
|
29 |
-
# example = gr.Examples(examples=filename, inputs=[csv_file_box])
|
30 |
-
# except:
|
31 |
-
# pass
|
32 |
-
|
33 |
csv_file_box.change(process_data, inputs=[csv_file_box, slice_size_slider, sample_rate, window_size_slider], outputs=[processed_file_box, json_file_box, slices_per_leg, plot_box_leg, plot_box_overlay])
|
34 |
leg_dropdown.change(plot_sensor_data_from_json, inputs=[json_file_box, leg_dropdown], outputs=[plot_box_leg])
|
|
|
35 |
|
36 |
cabasus_sensor.queue(concurrency_count=2).launch(debug=True)
|
|
|
15 |
sample_rate = gr.inputs.Slider(1, 199, 1, 20, label="Sample rate")
|
16 |
with gr.Row():
|
17 |
window_size_slider = gr.inputs.Slider(0, 100, 2, 10, label="Window Size")
|
18 |
+
repeat_process = gr.Button('Restart process')
|
19 |
with gr.Row():
|
20 |
leg_dropdown = gr.Dropdown(choices=['GZ1', 'GZ2', 'GZ3', 'GZ4'], label='select leg', value='GZ1')
|
21 |
|
|
|
25 |
|
26 |
slices_per_leg = gr.Textbox(label="Number of slices found per LEG")
|
27 |
|
|
|
|
|
|
|
|
|
|
|
28 |
csv_file_box.change(process_data, inputs=[csv_file_box, slice_size_slider, sample_rate, window_size_slider], outputs=[processed_file_box, json_file_box, slices_per_leg, plot_box_leg, plot_box_overlay])
|
29 |
leg_dropdown.change(plot_sensor_data_from_json, inputs=[json_file_box, leg_dropdown], outputs=[plot_box_leg])
|
30 |
+
repeat_process.click(process_data, inputs=[csv_file_box, slice_size_slider, sample_rate, window_size_slider], outputs=[processed_file_box, json_file_box, slices_per_leg, plot_box_leg, plot_box_overlay])
|
31 |
|
32 |
cabasus_sensor.queue(concurrency_count=2).launch(debug=True)
|
funcs/processor.py
CHANGED
@@ -7,10 +7,15 @@ from funcs.plot_func import plot_sensor_data_from_json, plot_overlay_data_from_j
|
|
7 |
def process_data(input_file, slice_size=64, min_slice_size=16, sample_rate=20, window_size=40, threshold=1000, span_limit=10000000,):
|
8 |
# Read the data from the file, including the CRC column
|
9 |
try:
|
10 |
-
|
|
|
|
|
11 |
except:
|
|
|
|
|
12 |
data = pd.read_csv(input_file, delimiter=";", index_col="NR", usecols=["NR", "TS", "LEG", "GX", "GY", "GZ", "AX", "AY", "AZ", "CRC"])
|
13 |
|
|
|
14 |
# Replace the values with NaN when the CRC value is not zero
|
15 |
data.loc[data["CRC"] != 0, ["GX", "GY", "GZ", "AX", "AY", "AZ"]] = np.nan
|
16 |
|
@@ -44,12 +49,15 @@ def process_data(input_file, slice_size=64, min_slice_size=16, sample_rate=20, w
|
|
44 |
# Check if the gap between two timestamps is bigger than 80 ms and show a warning
|
45 |
gaps = data.isna().all(axis=1).astype(int).groupby(data.notna().all(axis=1).astype(int).cumsum()).sum()
|
46 |
big_gaps = gaps[gaps > 3]
|
|
|
47 |
if not big_gaps.empty:
|
48 |
gap_start_index = big_gaps.index[0] * 20
|
49 |
gap_size = big_gaps.iloc[0] * 20
|
|
|
50 |
print(f"Warning: gap of {gap_size} ms found at line {gap_start_index}")
|
51 |
# Save the data up to the point where there is a gap of more than 80 ms
|
52 |
data = data.iloc[:gap_start_index]
|
|
|
53 |
|
54 |
# Calculate the absolute differences between consecutive rows for all channels
|
55 |
differences = data.diff().abs()
|
@@ -60,7 +68,7 @@ def process_data(input_file, slice_size=64, min_slice_size=16, sample_rate=20, w
|
|
60 |
if not no_significant_change_index.empty:
|
61 |
# Save the data up to the point where no significant change appears in all channels
|
62 |
data = data.loc[:no_significant_change_index[0]]
|
63 |
-
|
64 |
|
65 |
# Save the resulting DataFrame to a new file
|
66 |
data.to_csv('output.csv', sep=";", na_rep="NaN", float_format="%.0f")
|
@@ -72,4 +80,4 @@ def process_data(input_file, slice_size=64, min_slice_size=16, sample_rate=20, w
|
|
72 |
overlay_fig = plot_overlay_data_from_json(file, ["GZ1", "GZ2", "GZ3", "GZ4"], use_precise_timestamp=True)
|
73 |
|
74 |
|
75 |
-
return 'output.csv', file, len_, sensor_fig, overlay_fig
|
|
|
7 |
def process_data(input_file, slice_size=64, min_slice_size=16, sample_rate=20, window_size=40, threshold=1000, span_limit=10000000,):
|
8 |
# Read the data from the file, including the CRC column
|
9 |
try:
|
10 |
+
if input_file.name is None:
|
11 |
+
return None, None, None, None, None
|
12 |
+
data = pd.read_csv(input_file.name, delimiter=";", index_col="NR", usecols=["NR", "TS", "LEG", "GX", "GY", "GZ", "AX", "AY", "AZ", "CRC"])
|
13 |
except:
|
14 |
+
if input_file is None:
|
15 |
+
return None, None, None, None, None
|
16 |
data = pd.read_csv(input_file, delimiter=";", index_col="NR", usecols=["NR", "TS", "LEG", "GX", "GY", "GZ", "AX", "AY", "AZ", "CRC"])
|
17 |
|
18 |
+
|
19 |
# Replace the values with NaN when the CRC value is not zero
|
20 |
data.loc[data["CRC"] != 0, ["GX", "GY", "GZ", "AX", "AY", "AZ"]] = np.nan
|
21 |
|
|
|
49 |
# Check if the gap between two timestamps is bigger than 80 ms and show a warning
|
50 |
gaps = data.isna().all(axis=1).astype(int).groupby(data.notna().all(axis=1).astype(int).cumsum()).sum()
|
51 |
big_gaps = gaps[gaps > 3]
|
52 |
+
|
53 |
if not big_gaps.empty:
|
54 |
gap_start_index = big_gaps.index[0] * 20
|
55 |
gap_size = big_gaps.iloc[0] * 20
|
56 |
+
|
57 |
print(f"Warning: gap of {gap_size} ms found at line {gap_start_index}")
|
58 |
# Save the data up to the point where there is a gap of more than 80 ms
|
59 |
data = data.iloc[:gap_start_index]
|
60 |
+
|
61 |
|
62 |
# Calculate the absolute differences between consecutive rows for all channels
|
63 |
differences = data.diff().abs()
|
|
|
68 |
if not no_significant_change_index.empty:
|
69 |
# Save the data up to the point where no significant change appears in all channels
|
70 |
data = data.loc[:no_significant_change_index[0]]
|
71 |
+
return None, None, f'Warning: Shorten', None, None
|
72 |
|
73 |
# Save the resulting DataFrame to a new file
|
74 |
data.to_csv('output.csv', sep=";", na_rep="NaN", float_format="%.0f")
|
|
|
80 |
overlay_fig = plot_overlay_data_from_json(file, ["GZ1", "GZ2", "GZ3", "GZ4"], use_precise_timestamp=True)
|
81 |
|
82 |
|
83 |
+
return 'output.csv', file, f'{len_}', sensor_fig, overlay_fig
|
funcs/tools.py
CHANGED
@@ -19,7 +19,7 @@ def numpy_to_native(data):
|
|
19 |
return data
|
20 |
|
21 |
def process_signals(gz_signal, upsample_factor, window_size=40, poly_order=2, peak_distance=2, peak_prominence=1):
|
22 |
-
smoothed_signal = savgol_filter(gz_signal, window_size, poly_order)
|
23 |
upsampled_smoothed_signal = upsample_signal(smoothed_signal, upsample_factor)
|
24 |
autocorr = correlate(upsampled_smoothed_signal, upsampled_smoothed_signal, mode='full')
|
25 |
autocorr = autocorr[autocorr.size // 2:]
|
|
|
19 |
return data
|
20 |
|
21 |
def process_signals(gz_signal, upsample_factor, window_size=40, poly_order=2, peak_distance=2, peak_prominence=1):
|
22 |
+
smoothed_signal = savgol_filter(gz_signal, window_size, poly_order,mode='interp')
|
23 |
upsampled_smoothed_signal = upsample_signal(smoothed_signal, upsample_factor)
|
24 |
autocorr = correlate(upsampled_smoothed_signal, upsampled_smoothed_signal, mode='full')
|
25 |
autocorr = autocorr[autocorr.size // 2:]
|
output.csv
DELETED
@@ -1,23 +0,0 @@
|
|
1 |
-
;AX1;AX2;AX3;AX4;AY1;AY2;AY3;AY4;AZ1;AZ2;AZ3;AZ4;GX1;GX2;GX3;GX4;GY1;GY2;GY3;GY4;GZ1;GZ2;GZ3;GZ4
|
2 |
-
2580020;4926;5315;2288;3585;3098;-689;-678;222;866;-674;981;83;377;-194;-6246;-288;988;2516;-1098;1489;-2594;2883;-11311;-3891
|
3 |
-
2580040;4403;6229;1530;3387;1887;-2546;-1007;-65;1349;514;706;62;346;212;-5938;-378;945;3128;-1989;1459;-2629;4099;-11307;-3948
|
4 |
-
2580060;4429;7135;537;3247;1470;-5883;1433;-334;1129;3621;201;-11;769;-1064;-5709;-569;1062;4311;-2845;1405;-2752;6052;-11660;-3967
|
5 |
-
2580080;4713;9543;-750;3136;1365;-14268;-1966;-629;504;6309;736;-10;735;-3555;-5840;-636;1163;6832;-3339;1352;-2989;10261;-11348;-3977
|
6 |
-
2580100;4618;8353;-63;3140;689;-15127;-3031;-847;-193;6047;1325;54;232;-346;-4215;-761;1282;6063;-3050;1267;-3538;10679;-9047;-4054
|
7 |
-
2580120;4400;4954;935;3112;782;-5457;-1349;-982;119;2440;321;13;367;10630;-1597;-759;1397;3099;-3013;1190;-3897;4301;-6479;-4166
|
8 |
-
2580140;4496;11096;2566;3099;1513;545;1706;-1104;683;1208;-586;56;223;17105;-252;-736;1550;2242;-2765;1112;-4258;3400;-3556;-4285
|
9 |
-
2580160;4562;12367;3632;3167;1425;11064;2831;-1337;880;552;-1033;129;143;18480;-223;-636;1718;1870;-2505;1014;-4653;6127;-2278;-4387
|
10 |
-
2580180;4660;8801;4602;3240;1509;11605;3190;-1422;970;-670;-1290;131;304;3464;156;-499;1800;993;-2339;962;-4743;7106;-1273;-4386
|
11 |
-
2580200;4823;6870;6902;3362;1426;6465;2492;-1386;1167;382;-758;124;483;-7931;1127;-296;1891;884;-1906;911;-4783;5056;453;-4398
|
12 |
-
2580220;4741;4777;8477;3578;1239;-2;3633;-1248;1117;444;-484;276;765;-10900;2630;-89;2059;-1206;-1899;882;-4841;870;1427;-4523
|
13 |
-
2580240;4570;5378;9453;3778;1102;-2164;6113;-1213;908;-12;433;388;892;3429;3643;183;2137;-4147;-1769;911;-4833;-6739;1935;-4655
|
14 |
-
2580260;4432;6287;9891;4022;992;4084;8878;-1113;770;164;834;345;872;11656;3380;476;2181;-4462;-1554;963;-4800;-10488;2175;-4731
|
15 |
-
2580280;4182;5300;12757;4356;690;10857;10012;-923;575;-942;-1384;296;733;6743;-3005;614;2213;-2662;-482;1055;-4703;-12554;693;-4679
|
16 |
-
2580300;3994;3426;10567;4708;522;8515;4170;-905;405;-949;-2028;362;563;577;-4757;690;2210;-2233;1175;1243;-4613;-13103;638;-4295
|
17 |
-
2580320;1853;1838;7965;4983;-4023;4673;-78;-751;1784;-706;-2991;622;298;-59;-5729;690;2635;-2713;1915;1442;-5412;-13766;1108;-3766
|
18 |
-
2580340;3329;414;5708;5128;1393;3767;-5781;-489;2036;-1144;-2873;845;-1883;5648;-1372;780;2411;-3286;1488;1697;-4871;-15529;2118;-3103
|
19 |
-
2580360;3113;159;4867;5095;-2620;7798;-5517;-451;1059;-1165;-3102;428;3134;2131;765;897;2086;-3713;819;2149;-4199;-15736;3000;-2151
|
20 |
-
2580380;3083;-1201;4705;5132;87;7071;-5311;-759;-24;-1203;-2233;-256;1470;-3774;1996;1026;2091;-4413;126;2164;-4019;-15601;4016;-1441
|
21 |
-
2580400;3266;-1798;4132;5057;-1258;4968;-4116;-39;-122;-408;-130;284;988;-5236;3365;1962;1861;-5601;-354;2109;-3525;-15158;4914;-844
|
22 |
-
2580420;3421;-2086;4066;4602;-991;4974;-2754;1254;-166;178;823;547;397;-4411;3607;3698;1675;-5693;-266;2230;-3198;-14935;4970;-469
|
23 |
-
2580440;3631;-3048;3985;4554;-955;4741;-1889;1285;-163;-180;1379;852;531;-4181;3441;3497;1535;-5117;6;2464;-2874;-14929;4767;-819
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|