rdpahalavan
commited on
Commit
•
5d0c5dc
1
Parent(s):
18caf57
Create CIC-IDS2017.py
Browse files- CIC-IDS2017.py +106 -0
CIC-IDS2017.py
ADDED
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright 2023 The HuggingFace Datasets Authors and the current dataset script contributor.
|
2 |
+
#
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
#
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
#
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.
|
14 |
+
|
15 |
+
"""CIC-IDS2017"""
|
16 |
+
|
17 |
+
import os
|
18 |
+
import csv
|
19 |
+
import pandas as pd
|
20 |
+
import pyarrow.csv
|
21 |
+
import datasets
|
22 |
+
|
23 |
+
_DESCRIPTION = ""
|
24 |
+
|
25 |
+
_HOMEPAGE = ""
|
26 |
+
|
27 |
+
_URLS = {
|
28 |
+
"Network-Flows": "Network-Flows/CICIDS-Flow.csv",
|
29 |
+
"Packet-Bytes": "Packet-Bytes/Packet_Bytes_File_1.csv",
|
30 |
+
"Packet-Fields": "Packet-Fields/Packet_Fields_File_1.csv",
|
31 |
+
"Payload-Bytes": "Payload-Bytes/Payload_Bytes_File_1.csv",
|
32 |
+
}
|
33 |
+
|
34 |
+
class NewDataset(datasets.GeneratorBasedBuilder):
|
35 |
+
VERSION = datasets.Version("1.0.0")
|
36 |
+
|
37 |
+
BUILDER_CONFIGS = [
|
38 |
+
datasets.BuilderConfig(name="Network-Flows", version=VERSION, description="Network-Flows"),
|
39 |
+
datasets.BuilderConfig(name="Packet-Bytes", version=VERSION, description="Packet-Bytes"),
|
40 |
+
datasets.BuilderConfig(name="Packet-Fields", version=VERSION, description="Packet-Fields"),
|
41 |
+
datasets.BuilderConfig(name="Payload-Bytes", version=VERSION, description="Payload-Bytes"),
|
42 |
+
]
|
43 |
+
|
44 |
+
DEFAULT_CONFIG_NAME = "Network-Flows"
|
45 |
+
|
46 |
+
def _info(self):
|
47 |
+
filepath = _URLS[self.config.name]
|
48 |
+
csv_file = pyarrow.csv.open_csv(filepath)
|
49 |
+
columns = csv_file.schema.names
|
50 |
+
features = {column: datasets.Value("string") for column in columns}
|
51 |
+
return datasets.DatasetInfo(
|
52 |
+
description=_DESCRIPTION,
|
53 |
+
features=datasets.Features(features),
|
54 |
+
homepage=_HOMEPAGE,
|
55 |
+
)
|
56 |
+
|
57 |
+
def _split_generators(self, dl_manager):
|
58 |
+
url = _URLS[self.config.name]
|
59 |
+
return [
|
60 |
+
datasets.SplitGenerator(
|
61 |
+
name=datasets.Split.TRAIN,
|
62 |
+
gen_kwargs={
|
63 |
+
"filepath": url,
|
64 |
+
},
|
65 |
+
),
|
66 |
+
]
|
67 |
+
|
68 |
+
def _generate_examples(self, filepath):
|
69 |
+
with open(filepath, encoding="utf-8") as f:
|
70 |
+
reader = csv.DictReader(f)
|
71 |
+
numerical_columns = ['flow_id', 'packet_id', 'source_port', 'destination_port', 'payload_length'] + [f'payload_byte_{i}' for i in range(1, 1600)] + ['Flow Duration', 'Total Fwd Packets', 'Total Backward Packets',
|
72 |
+
'Total Length of Fwd Packets', 'Total Length of Bwd Packets',
|
73 |
+
'Fwd Packet Length Max', 'Fwd Packet Length Min',
|
74 |
+
'Fwd Packet Length Mean', 'Fwd Packet Length Std',
|
75 |
+
'Bwd Packet Length Max', 'Bwd Packet Length Min',
|
76 |
+
'Bwd Packet Length Mean', 'Bwd Packet Length Std', 'Flow Bytes/s',
|
77 |
+
'Flow Packets/s', 'Flow IAT Mean', 'Flow IAT Std', 'Flow IAT Max',
|
78 |
+
'Flow IAT Min', 'Fwd IAT Total', 'Fwd IAT Mean', 'Fwd IAT Std',
|
79 |
+
'Fwd IAT Max', 'Fwd IAT Min', 'Bwd IAT Total', 'Bwd IAT Mean',
|
80 |
+
'Bwd IAT Std', 'Bwd IAT Max', 'Bwd IAT Min', 'Fwd PSH Flags',
|
81 |
+
'Bwd PSH Flags', 'Fwd URG Flags', 'Bwd URG Flags', 'Fwd Header Length',
|
82 |
+
'Bwd Header Length', 'Fwd Packets/s', 'Bwd Packets/s',
|
83 |
+
'Min Packet Length', 'Max Packet Length', 'Packet Length Mean',
|
84 |
+
'Packet Length Std', 'Packet Length Variance', 'FIN Flag Count',
|
85 |
+
'SYN Flag Count', 'RST Flag Count', 'PSH Flag Count', 'ACK Flag Count',
|
86 |
+
'URG Flag Count', 'CWE Flag Count', 'ECE Flag Count', 'Down/Up Ratio',
|
87 |
+
'Average Packet Size', 'Avg Fwd Segment Size', 'Avg Bwd Segment Size',
|
88 |
+
'Fwd Avg Bytes/Bulk', 'Fwd Avg Packets/Bulk', 'Fwd Avg Bulk Rate',
|
89 |
+
'Bwd Avg Bytes/Bulk', 'Bwd Avg Packets/Bulk', 'Bwd Avg Bulk Rate',
|
90 |
+
'Subflow Fwd Packets', 'Subflow Fwd Bytes', 'Subflow Bwd Packets',
|
91 |
+
'Subflow Bwd Bytes', 'Init_Win_bytes_forward',
|
92 |
+
'Init_Win_bytes_backward', 'act_data_pkt_fwd', 'min_seg_size_forward',
|
93 |
+
'Active Mean', 'Active Std', 'Active Max', 'Active Min', 'Idle Mean',
|
94 |
+
'Idle Std', 'Idle Max', 'Idle Min'] + [f'packet_byte_{i}' for i in range(1, 1601)]
|
95 |
+
for key, row in enumerate(reader):
|
96 |
+
processed_row = {}
|
97 |
+
for column, value in row.items():
|
98 |
+
if column in numerical_columns:
|
99 |
+
try:
|
100 |
+
processed_value = int(value) if float(value).is_integer() else float(value)
|
101 |
+
except ValueError:
|
102 |
+
processed_value = 0
|
103 |
+
else:
|
104 |
+
processed_value = value
|
105 |
+
processed_row[column] = processed_value
|
106 |
+
yield key, processed_row
|