Spaces:
Runtime error
Runtime error
Commit
•
aaf6ffd
0
Parent(s):
Duplicate from DHEIVER/timeseries-anomaly-detection-autoencoders
Browse filesCo-authored-by: Dheiver Santos <DHEIVER@users.noreply.huggingface.co>
- .gitattributes +27 -0
- README.md +13 -0
- app.py +85 -0
- art_daily_jumpsup.csv +0 -0
- example_1.csv +44 -0
- example_2.csv +43 -0
- requirements.txt +5 -0
- scaler.json +1 -0
.gitattributes
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
6 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
7 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
8 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
9 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
10 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
11 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
12 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
13 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
14 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
15 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
16 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
17 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
18 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
19 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
20 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
21 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
22 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
23 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
24 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
25 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
26 |
+
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
27 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
title: Timeseries Anomaly Detection
|
3 |
+
emoji: 🌍
|
4 |
+
colorFrom: blue
|
5 |
+
colorTo: indigo
|
6 |
+
sdk: gradio
|
7 |
+
sdk_version: 3.0.1
|
8 |
+
app_file: app.py
|
9 |
+
pinned: false
|
10 |
+
duplicated_from: DHEIVER/timeseries-anomaly-detection-autoencoders
|
11 |
+
---
|
12 |
+
|
13 |
+
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.py
ADDED
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from huggingface_hub import from_pretrained_keras
|
3 |
+
import pandas as pd
|
4 |
+
import numpy as np
|
5 |
+
import json
|
6 |
+
from matplotlib import pyplot as plt
|
7 |
+
|
8 |
+
f = open('scaler.json')
|
9 |
+
scaler = json.load(f)
|
10 |
+
|
11 |
+
TIME_STEPS = 288
|
12 |
+
|
13 |
+
# Generated training sequences for use in the model.
|
14 |
+
def create_sequences(values, time_steps=TIME_STEPS):
|
15 |
+
output = []
|
16 |
+
for i in range(len(values) - time_steps + 1):
|
17 |
+
output.append(values[i : (i + time_steps)])
|
18 |
+
return np.stack(output)
|
19 |
+
|
20 |
+
|
21 |
+
def normalize_data(data):
|
22 |
+
df_test_value = (data - scaler["mean"]) / scaler["std"]
|
23 |
+
return df_test_value
|
24 |
+
|
25 |
+
def plot_test_data(df_test_value):
|
26 |
+
fig, ax = plt.subplots(figsize=(12, 6))
|
27 |
+
df_test_value.plot(legend=False, ax=ax)
|
28 |
+
ax.set_xlabel("Time")
|
29 |
+
ax.set_ylabel("Value")
|
30 |
+
ax.set_title("Input Test Data")
|
31 |
+
return fig
|
32 |
+
|
33 |
+
def get_anomalies(df_test_value):
|
34 |
+
# Create sequences from test values.
|
35 |
+
x_test = create_sequences(df_test_value.values)
|
36 |
+
model = from_pretrained_keras("keras-io/timeseries-anomaly-detection")
|
37 |
+
|
38 |
+
# Get test MAE loss.
|
39 |
+
x_test_pred = model.predict(x_test)
|
40 |
+
test_mae_loss = np.mean(np.abs(x_test_pred - x_test), axis=1)
|
41 |
+
test_mae_loss = test_mae_loss.reshape((-1))
|
42 |
+
|
43 |
+
# Detect all the samples which are anomalies.
|
44 |
+
anomalies = test_mae_loss > scaler["threshold"]
|
45 |
+
return anomalies
|
46 |
+
|
47 |
+
def plot_anomalies(df_test_value, data, anomalies):
|
48 |
+
# data i is an anomaly if samples [(i - timesteps + 1) to (i)] are anomalies
|
49 |
+
anomalous_data_indices = []
|
50 |
+
for data_idx in range(TIME_STEPS - 1, len(df_test_value) - TIME_STEPS + 1):
|
51 |
+
if np.all(anomalies[data_idx - TIME_STEPS + 1 : data_idx]):
|
52 |
+
anomalous_data_indices.append(data_idx)
|
53 |
+
df_subset = data.iloc[anomalous_data_indices]
|
54 |
+
fig, ax = plt.subplots(figsize=(12, 6))
|
55 |
+
data.plot(legend=False, ax=ax)
|
56 |
+
df_subset.plot(legend=False, ax=ax, color="r")
|
57 |
+
ax.set_xlabel("Time")
|
58 |
+
ax.set_ylabel("Value")
|
59 |
+
ax.set_title("Anomalous Data Points")
|
60 |
+
return fig
|
61 |
+
|
62 |
+
def master(file):
|
63 |
+
# read file
|
64 |
+
data = pd.read_csv(file, parse_dates=True, index_col="timestamp")
|
65 |
+
df_test_value = normalize_data(data)
|
66 |
+
# plot input test data
|
67 |
+
plot1 = plot_test_data(df_test_value)
|
68 |
+
# predict
|
69 |
+
anomalies = get_anomalies(df_test_value)
|
70 |
+
#plot anomalous data points
|
71 |
+
plot2 = plot_anomalies(df_test_value, data, anomalies)
|
72 |
+
return plot2
|
73 |
+
|
74 |
+
outputs = gr.outputs.Image()
|
75 |
+
|
76 |
+
iface = gr.Interface(
|
77 |
+
fn=master,
|
78 |
+
inputs=gr.inputs.File(label="CSV File"),
|
79 |
+
outputs=outputs,
|
80 |
+
examples=["art_daily_jumpsup.csv"],
|
81 |
+
title="Timeseries Anomaly Detection Using an Autoencoder",
|
82 |
+
description="Anomaly detection of timeseries data."
|
83 |
+
)
|
84 |
+
|
85 |
+
iface.launch()
|
art_daily_jumpsup.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
example_1.csv
ADDED
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
timestamp,value
|
2 |
+
2023-07-01 09:00:00,0.256
|
3 |
+
2023-07-01 09:01:00,0.342
|
4 |
+
2023-07-01 09:02:00,0.421
|
5 |
+
2023-07-01 09:03:00,0.189
|
6 |
+
2023-07-01 09:04:00,0.289
|
7 |
+
2023-07-01 09:05:00,0.512
|
8 |
+
2023-07-01 09:06:00,0.367
|
9 |
+
2023-07-01 09:07:00,0.428
|
10 |
+
2023-07-01 09:08:00,0.196
|
11 |
+
2023-07-01 09:09:00,0.317
|
12 |
+
2023-07-01 09:10:00,0.425
|
13 |
+
2023-07-01 09:11:00,0.274
|
14 |
+
2023-07-01 09:12:00,0.312
|
15 |
+
2023-07-01 09:13:00,0.397
|
16 |
+
2023-07-01 09:14:00,0.251
|
17 |
+
2023-07-01 09:15:00,0.361
|
18 |
+
2023-07-01 09:16:00,0.429
|
19 |
+
2023-07-01 09:17:00,0.185
|
20 |
+
2023-07-01 09:18:00,0.327
|
21 |
+
2023-07-01 09:19:00,0.412
|
22 |
+
2023-07-01 09:20:00,0.197
|
23 |
+
2023-07-01 09:21:00,0.303
|
24 |
+
2023-07-01 09:22:00,0.439
|
25 |
+
2023-07-01 09:23:00,0.254
|
26 |
+
2023-07-01 09:24:00,0.367
|
27 |
+
2023-07-01 09:25:00,0.421
|
28 |
+
2023-07-01 09:26:00,0.193
|
29 |
+
2023-07-01 09:27:00,0.297
|
30 |
+
2023-07-01 09:28:00,0.416
|
31 |
+
2023-07-01 09:29:00,0.224
|
32 |
+
2023-07-01 09:30:00,0.359
|
33 |
+
2023-07-01 09:31:00,0.428
|
34 |
+
2023-07-01 09:32:00,0.165
|
35 |
+
2023-07-01 09:33:00,0.338
|
36 |
+
2023-07-01 09:34:00,0.405
|
37 |
+
2023-07-01 09:35:00,0.217
|
38 |
+
2023-07-01 09:36:00,0.292
|
39 |
+
2023-07-01 09:37:00,0.434
|
40 |
+
2023-07-01 09:38:00,0.235
|
41 |
+
2023-07-01 09:39:00,0.341
|
42 |
+
2023-07-01 09:40:00,0.398
|
43 |
+
2023-07-01 09:41:00,0.203
|
44 |
+
2023-07-01 09:42:00,0.282
|
example_2.csv
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
timestamp,value
|
2 |
+
2023-07-01 08:00:00,10.2
|
3 |
+
2023-07-01 08:01:00,10.5
|
4 |
+
2023-07-01 08:02:00,11.1
|
5 |
+
2023-07-01 08:03:00,9.8
|
6 |
+
2023-07-01 08:04:00,11.5
|
7 |
+
2023-07-01 08:05:00,10.9
|
8 |
+
2023-07-01 08:06:00,10.7
|
9 |
+
2023-07-01 08:07:00,11.2
|
10 |
+
2023-07-01 08:08:00,10.4
|
11 |
+
2023-07-01 08:09:00,10.6
|
12 |
+
2023-07-01 08:10:00,11.3
|
13 |
+
2023-07-01 08:11:00,10.8
|
14 |
+
2023-07-01 08:12:00,10.5
|
15 |
+
2023-07-01 08:13:00,10.2
|
16 |
+
2023-07-01 08:14:00,11.1
|
17 |
+
2023-07-01 08:15:00,10.3
|
18 |
+
2023-07-01 08:16:00,10.6
|
19 |
+
2023-07-01 08:17:00,11.0
|
20 |
+
2023-07-01 08:18:00,10.8
|
21 |
+
2023-07-01 08:19:00,10.4
|
22 |
+
2023-07-01 08:20:00,11.2
|
23 |
+
2023-07-01 08:21:00,10.7
|
24 |
+
2023-07-01 08:22:00,10.5
|
25 |
+
2023-07-01 08:23:00,10.9
|
26 |
+
2023-07-01 08:24:00,10.2
|
27 |
+
2023-07-01 08:25:00,10.8
|
28 |
+
2023-07-01 08:26:00,11.3
|
29 |
+
2023-07-01 08:27:00,10.6
|
30 |
+
2023-07-01 08:28:00,10.3
|
31 |
+
2023-07-01 08:29:00,11.1
|
32 |
+
2023-07-01 08:30:00,10.7
|
33 |
+
2023-07-01 08:31:00,10.9
|
34 |
+
2023-07-01 08:32:00,11.2
|
35 |
+
2023-07-01 08:33:00,10.4
|
36 |
+
2023-07-01 08:34:00,10.2
|
37 |
+
2023-07-01 08:35:00,10.6
|
38 |
+
2023-07-01 08:36:00,11.0
|
39 |
+
2023-07-01 08:37:00,10.5
|
40 |
+
2023-07-01 08:38:00,10.8
|
41 |
+
2023-07-01 08:39:00,11.1
|
42 |
+
2023-07-01 08:40:00,10.3
|
43 |
+
2023-07-01 08:41:00,10.7
|
requirements.txt
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
gradio
|
2 |
+
pandas
|
3 |
+
numpy
|
4 |
+
matplotlib
|
5 |
+
tensorflow
|
scaler.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"mean": 42.43835333580657, "std": 28.07712228126252, "threshold": 0.1001741920131276}
|