-
Notifications
You must be signed in to change notification settings - Fork 14
/
IDS_RyuApp.py
255 lines (214 loc) · 9.19 KB
/
IDS_RyuApp.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
# IDS using ML RyuApp
import pandas as pd
from sklearn.ensemble import RandomForestClassifier
from operator import attrgetter
from ryu.app import simple_switch_13
from ryu.controller import ofp_event
from ryu.controller.handler import MAIN_DISPATCHER, DEAD_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.lib import hub
# instantiating the Random Forest Classifier
rf_flow = RandomForestClassifier(n_estimators = 10)
rf_port = RandomForestClassifier(n_estimators = 10)
rf_group = RandomForestClassifier(n_estimators = 10)
flag_flow = True
flag_port = True
flag_group = True
class IDS_Application(simple_switch_13.SimpleSwitch13):
def __init__(self, *args, **kwargs):
super(IDS_Application, self).__init__(*args, **kwargs)
self.datapaths = {}
# Create a CSV file to store the Test data
file = open("/home/arsheen/Downloads/MalPredictFlowStatsfile.txt","a+")
file.write('dp_id,in_port,eth_dst,packets,bytes\n')
file.write('516,1,1,10002203,238000000\n')
file.write('516,1,1,40022000,2380000000\n')
file.close()
file = open("/home/arsheen/Downloads/MalPredictPortStatsfile.txt","a+")
file.write('dp_id,port_no,rx_bytes,rx_pkts,tx_bytes,tx_pkts\n')
file.write('1,2,10,10,10,10\n')
file.close()
self.monitor_thread = hub.spawn(self.main)
# Asynchronous message to inform controller of the status of links in the network
@set_ev_cls(ofp_event.EventOFPStateChange,[MAIN_DISPATCHER, DEAD_DISPATCHER])
def state_change_handler(self, ev):
datapath = ev.datapath
if ev.state == MAIN_DISPATCHER:
if datapath.id not in self.datapaths:
self.datapaths[datapath.id] = datapath
elif ev.state == DEAD_DISPATCHER:
if datapath.id in self.datapaths:
del self.datapaths[datapath.id]
def main(self):
training_flag = True
while True:
# Train the model once
if training_flag:
self.IDS_training()
training_flag = False
for dp in self.datapaths.values():
# Give 30 second wait time to create topology
if training_flag:
hub.sleep(30)
self.request_stats(dp)
# Implement IDS application every 30 seconds
self.IDS_impl()
hub.sleep(30)
# Remove unnecessary columns and calculate and store only current number of packets and bytes that have arrived instead of cummulative count
# For each flow entry
def data_cleaning_flow(self, input_path,output_path):
file = open(input_path,"r")
file1 = open(output_path,"a+")
b = [0,0,0,0,0]
a = []
c = [0,0,0,0,0]
dict = {}
first_line_flag_flow = True
for line in file:
a = line.split(",")
if first_line_flag_flow:
file1.write(str(a[3])+","+str(a[4]))
first_line_flag_flow = False
else:
key = str(a[0])+"_"+str(a[1])+"_"+str(a[2])
if key in dict:
ab = dict[key].split(",")
for i in range(len(ab)):
if i == 2:
continue
else:
c[i] = int(a[i]) - int(ab[i])
file1.write(str(c[3])+","+ str(c[4])+"\n")
dict[key] =( str(a[0])+ "," +str(a[1]) + "," + str(a[2])+ "," + str(a[3])+"," + str(a[4]))
else:
dict[key] = (str(a[0])+ "," +str(a[1]) + "," + str(a[2])+ "," + str(a[3])+"," + str(a[4]))
file1.write(str(a[3])+","+str(a[4])+"\n")
file.close()
file1.close()
''' Remove unnecessary columns and calculate and store only current number of packets and bytes that have
arrived instead of cummulative count'''
# for each port entry
def data_cleaning_port(self, input_path, output_path):
file = open(input_path,"r")
file1 = open(output_path,"a+")
b = [0,0,0,0,0,0]
a = []
c = [0,0,0,0,0,0]
dict = {}
first_line_flag_port = True
for line in file:
a = line.split(",")
if first_line_flag_port:
file1.write(str(a[2])+ "," + str(a[3])+","+str(a[4])+ "," + str(a[5]))
first_line_flag_port = False
else:
key = str(a[0])+ "_" +str(a[1])
if key in dict:
ab = dict[key].split(",")
for i in range(len(ab)):
c[i] = int(a[i]) - int(ab[i])
file1.write(str(c[2])+ "," + str(c[3])+","+str(c[4])+ "," + str(c[5])+"\n")
dict[key] =( str(a[0])+ "," +str(a[1]) + "," + str(a[2])+ "," + str(a[3])+"," + str(a[4])+ "," + str(a[5]))
else:
dict[key] = (str(a[0])+ "," +str(a[1]) + "," + str(a[2])+ "," + str(a[3])+"," + str(a[4])+ "," + str(a[5]))
file1.write(str(a[2])+ "," + str(a[3])+","+str(a[4])+ "," + str(a[5])+"\n")
file.close()
file1.close()
# Model training
def IDS_training(self):
# Read cleaned flow statistics data
self.data_cleaning_flow('/home/arsheen/Downloads/FlowStatsfile.txt','/home/arsheen/Downloads/FlowStatsfile_cleaned.txt')
flow_without_key = pd.read_csv('/home/arsheen/Downloads/MalFlowStatsfile_cleaned.txt')
flow_stat_input_target = pd.read_csv('/home/arsheen/Downloads/MalFlowStatsfile_target.txt')
# Read cleaned port statistics data
self.data_cleaning_port('/home/arsheen/Downloads/PortStatsfile.txt','/home/arsheen/Downloads/PortStatsfile_cleaned.txt')
port_without_key = pd.read_csv('/home/arsheen/Downloads/MalPortStatsfile_cleaned.txt')
port_stat_input_target = pd.read_csv('/home/arsheen/Downloads/MalPortStatsfile_target.txt')
# Convert flow statistics data from string to int and fit it on the machine learning model
flow_without_key = flow_without_key.apply(pd.to_numeric)
flow_stat_input_target = flow_stat_input_target.apply(pd.to_numeric, errors='ignore')
rf_flow.fit(flow_without_key,flow_stat_input_target.values.ravel())
# Convert port statistics data from string to int and fit it on the machine learning model
port_without_key = port_without_key.apply(pd.to_numeric)
port_stat_input_target = port_stat_input_target.apply(pd.to_numeric)
rf_port.fit(port_without_key,port_stat_input_target.values.ravel())
# Check for the accuracy of the model
def check_accuracy(self,model,input_data, input_target):
cross_val_score_stat = cross_val_score(model, input_data, input_target, scoring = 'accuracy', cv = 10)
mean_cross_val_score = cross_val_score_stat.mean()
print(mean_cross_val_score)
# IDS Implementation: monitoring real time traffic and predicting whether it is malicious.
def IDS_impl(self):
self.data_cleaning_flow('/home/arsheen/Downloads/PredictFlowStatsfile.txt','/home/arsheen/Downloads/PredictFlowStatsfile_cleaned.txt')
self.data_cleaning_port('/home/arsheen/Downloads/PredictPortStatsfile.txt','/home/arsheen/Downloads/PredictPortStatsfile_cleaned.txt')
#Read cleaned file
flow_predict_without_key = pd.read_csv('/home/arsheen/Downloads/PredictFlowStatsfile_cleaned.txt')
port_predict_without_key = pd.read_csv('/home/arsheen/Downloads/PredictPortStatsfile_cleaned.txt')
#convert to numeric form
flow_predict_without_key = flow_predict_without_key.apply(pd.to_numeric)
port_predict_without_key = port_predict_without_key.apply(pd.to_numeric)
#convert to list form for iteration
flow_predict_list=list(flow_predict_without_key.values.tolist())
#Use test data to check for intrusion
for i in flow_predict_list:
if i==['packets','bytes']:
continue
else:
temp = list(i)
test = []
test.append(temp)
# Binary classification
flag_flow = rf_flow.predict(test)
if flag_flow == 1:
self.anomaly_specific_actions(True,True)
else:
self.anomaly_specific_actions(False,True)
port_predict_list=list(port_predict_without_key.values.tolist())
for j in port_predict_list:
if j==['rx_bytes','rx_pkts','tx_bytes','tx_pkts']:
continue
else:
temp = list(j)
test = []
test.append(temp)
# Binary classification
flag_port = rf_port.predict(test)
if flag_port == 1:
self.anomaly_specific_actions(True,True)
else:
self.anomaly_specific_actions(True,False)
# Controller to Switch request for stats
def request_stats(self, datapath):
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
# to collect dp_id, pkt_count, byte_count
req = parser.OFPFlowStatsRequest(datapath)
datapath.send_msg(req)
# to collect dp_id, port_no, rx_bytes, rx_pkts, tx_bytes, tx_pkts
req = parser.OFPPortStatsRequest(datapath, 0, ofproto.OFPP_ANY)
datapath.send_msg(req)
# Switch to Controller reply with statistics
@set_ev_cls(ofp_event.EventOFPFlowStatsReply, MAIN_DISPATCHER)
def flow_stats_reply_handler(self, ev):
body = ev.msg.body
file = open("MalPredictFlowStatsfile.txt","a+")
for stat in sorted([flow for flow in body if flow.priority == 1], key=lambda flow: (flow.match['in_port'],flow.match['eth_dst'])):
file.write("\n"+str(ev.msg.datapath.id) + ","+ str(stat.match['in_port'])+ "," + str(stat.match['eth_dst'])+ "," +
str(stat.packet_count) + "," + str(stat.byte_count))
file.close()
@set_ev_cls(ofp_event.EventOFPPortStatsReply, MAIN_DISPATCHER)
def port_stats_reply_handler(self, ev):
body = ev.msg.body
file = open("MalPredictPortStatsfile.txt","a+")
for stat in sorted(body, key=attrgetter('port_no')):
file.write("\n"+str(ev.msg.datapath.id) + "," + str(stat.port_no) + "," +
str(stat.rx_bytes)+ "," + str(stat.rx_packets) + "," + str(stat.tx_bytes) + "," + str(stat.tx_packets))
file.close()
#Anomaly specific actions
def anomaly_specific_actions(self,flag_flow, flag_port):
if (not flag_flow) or (not flag_port):
self.logger.debug("Intrusion Detected")
print("Intrusion Detected")
else:
self.logger.debug("Everything is awesome")
print("Everything is awesome")