import json import matplotlib import pandas as pd import matplotlib.pyplot as plt matplotlib.use('Agg') def plot_sensor_data_from_json(json_file, sensor): # Read the JSON file try: with open(json_file, "r") as f: slices = json.load(f) except: with open(json_file.name, "r") as f: slices = json.load(f) # Concatenate the slices and create a new timestamp series with 20ms intervals timestamps = [] sensor_data = [] for slice_dict in slices: start_timestamp = slice_dict["timestamp"] slice_length = len(slice_dict[sensor]) slice_timestamps = [start_timestamp + 20 * i for i in range(slice_length)] timestamps.extend(slice_timestamps) sensor_data.extend(slice_dict[sensor]) # Create a DataFrame with the sensor data data = pd.DataFrame({sensor: sensor_data}, index=timestamps) # Plot the sensor data fig, ax = plt.subplots(figsize=(12, 6)) ax = plt.plot(data[sensor], label=sensor) # Mark the slice start and end points for slice_dict in slices: start_timestamp = slice_dict["timestamp"] end_timestamp = start_timestamp + 20 * (len(slice_dict[sensor]) - 1) plt.axvline(x=start_timestamp, color='black', linestyle=':', label='Start' if start_timestamp == slices[0]["timestamp"] else None) plt.axvline(x=end_timestamp, color='red', linestyle=':', label='End' if end_timestamp == slices[0]["timestamp"] + 20 * (len(slices[0][sensor]) - 1) else None) plt.xlabel("Timestamp") plt.ylabel(sensor) plt.legend() plt.tight_layout() return fig def plot_overlay_data_from_json(json_file, sensors, use_precise_timestamp=False): # Read the JSON file with open(json_file, "r") as f: slices = json.load(f) # Set up the colormap cmap = plt.get_cmap('viridis') # Create subplots for each sensor fig, axs = plt.subplots(len(sensors), 1, figsize=(12, 2 * len(sensors)), sharex=True) for idx, sensor in enumerate(sensors): # Plot the overlay of the slices for slice_idx, slice_dict in enumerate(slices): slice_length = len(slice_dict[sensor]) # Create timestamp array starting from 0 for each slice slice_timestamps = [20 * i for i in range(slice_length)] sensor_data = slice_dict[sensor] data = pd.DataFrame({sensor: sensor_data}, index=slice_timestamps) color = cmap(slice_idx / len(slices)) axs[idx].plot(data[sensor], color=color, label=f'Slice {slice_idx + 1}') axs[idx].set_ylabel(sensor) axs[-1].set_xlabel("Timestamp") axs[0].legend() return fig def plot_slices(original_signal, imputed_signal, precise_slice_points, normal_slice_points, sample_rate, first_timestamp): plt.figure(figsize=(12, 6)) plt.plot(imputed_signal.index, imputed_signal, label="Imputed Signal") # Find the missing values and the predicted values missing_value_indices = original_signal.isna() missing_values = original_signal.loc[missing_value_indices] predicted_values = imputed_signal.loc[missing_value_indices] # Plot the original missing values and the predicted values as separate scatter plots plt.scatter(missing_values.index, missing_values, color='r', marker='x', label='Original Missing Values') plt.scatter(predicted_values.index, predicted_values, color='r', marker='o', label='Predicted Values') for index in precise_slice_points: plt.axvline(x=first_timestamp + (index), color='r', linestyle='--', label='Precise Slice Points' if index == precise_slice_points[0] else "") for index in normal_slice_points: plt.axvline(x=first_timestamp + (index), color='g', linestyle='-', label='Normal Slice Points' if index == normal_slice_points[0] else "") plt.legend() plt.xlabel("Time (s)") plt.ylabel("Signal Amplitude") plt.title("Imputed Signal and Slice Points") return True