-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathapp_offline.py
154 lines (133 loc) · 6.84 KB
/
app_offline.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
"""
@author Zhida Li
@email zhidal@sfu.ca
@date Feb. 19, 2022
@version: 1.1.0
@description:
This module contains the function for off-line classification.
It used for HTTP 'POST' method for the front-end.
@copyright Copyright (c) Feb. 19, 2022
All Rights Reserved
This Python code (versions 3.6 and newer)
"""
# ==============================================
# bgpGuard off-line module
# ==============================================
# Last modified: June 24, 2022
# Import the built-in libraries
import time
import random
# Import external libraries
import numpy as np
import psutil
from flask_socketio import SocketIO, emit, disconnect
# Import customized libraries
# sys.path.append('./src')
from src.dataDownload import updateMessageName
from src.dataDownload import data_downloader_single
from src.dataDownload import data_downloader_multi
from src.featureExtraction import feature_extractor_single
from src.featureExtraction import feature_extractor_multi
from src.time_tracker import time_tracker_single
from src.label_generation import label_generator
from src.data_partition import data_partition
from src.data_process import normTrainTest
from src.subprocess_cmd import subprocess_cmd
# sys.path.append('./src/VFBLS_v110')
from src.VFBLS_v110.VFBLS_realtime import vfbls_demo
def app_offline_classification(header_offLine, input_exp_key):
"""
:param header_offLine: header for off-line route
:param input_exp_key:
:return: data for render_template (POST method)
"""
# Placeholder for test the transmission between font-end and back-end
"""
if site_choice == 'ripe':
result_prediction = random.randint(100, 200)
elif site_choice == 'routeviews':
result_prediction = random.randint(200, 300)
# store the var
site_choice = 'RIPE' if site_choice == 'ripe' else 'Route Views'
context_offLine = {"result_prediction": result_prediction,
"site_selected": site_choice,
"header2": header_offLine}
"""
# store the var
# input_exp_key = [site, start_date, end_date, start_date_anomaly, end_date_anomaly, start_time_anomaly,
# end_time_anomaly, cut_pct, rnn_seq]
print(input_exp_key)
site = input_exp_key[0]
start_date, end_date = input_exp_key[1], input_exp_key[2]
start_date_anomaly, end_date_anomaly = input_exp_key[3], input_exp_key[4]
start_time_anomaly, end_time_anomaly = input_exp_key[5], input_exp_key[6]
cut_pct = input_exp_key[7]
ALGO = input_exp_key[8]
rnn_seq = int(input_exp_key[9])
print("--------------------Loading settings successfully-------------")
collector_ripe = 'rrc04'
data_downloader_multi(start_date, end_date, site, collector_ripe)
output_file_list = feature_extractor_multi(start_date, end_date, site)
# dataAdjustment(site, output_file_list)
# output_file_list = ["DUMP_20030123_out.txt", "DUMP_20030124_out.txt", "DUMP_20030125_out.txt"] # for debug
labels = label_generator(start_date_anomaly, end_date_anomaly, start_time_anomaly, end_time_anomaly, site,
output_file_list)
data_partition(cut_pct, site, output_file_list, labels, rnn_seq)
# selected_features = feature_select_ExtraTrees(cut_pct, site, topFeatures=15)
# print("Top features:", selected_features)
normTrainTest(cut_pct, site)
if ALGO == 'LSTM and GRU':
print("--------------------RNNs Experiment-Begin--------------------------")
subprocess_cmd("cd src/; \
cp ./data_split/train_%s_%s_n.csv ./data_split/test_%s_%s_n.csv ./RNN_Running_Code/RNN_Run/dataset/ ; \
cd RNN_Running_Code/RNN_Run/dataset/; \
mv train_%s_%s_n.csv train.csv; mv test_%s_%s_n.csv test.csv; \
cd ..; cd ..; \
chmod +x integrate_run.sh; sh ./integrate_run.sh ; \
cd RNN_Run/; sh ./collect.sh; \
cp -r res_acc res_run ../data_representation/ ; \
cd .. ; cd data_representation/ ; \
python TableGenerator.py; " \
% (cut_pct, site, cut_pct, site, cut_pct, site, cut_pct, site))
print("--------------------RNNs Experiment-end----------------------------")
subprocess_cmd("cd src/; \
mv ./RNN_Running_Code/data_representation/data_representation_table.csv ./STAT/ ; \
mv ./STAT/data_representation_table.csv ./STAT/results_%s_%s.csv" \
% (cut_pct, site))
# Remove generated folders
subprocess_cmd("cd src/; \
cd RNN_Running_Code/RNN_Run/; \
rm -rf ./experiment/ ./res_acc/ ./res_run/ ./tmp/")
elif ALGO == 'Bi-LSTM and Bi-GRU':
print("--------------------RNNs Experiment-Begin--------------------------")
subprocess_cmd("cd src/; \
cp ./data_split/train_%s_%s_n.csv ./data_split/test_%s_%s_n.csv ./BiRNN_Running_Code/BiRNN_Run/dataset/ ; \
cd BiRNN_Running_Code/BiRNN_Run/dataset/; \
mv train_%s_%s_n.csv train.csv; mv test_%s_%s_n.csv test.csv; \
cd ..; cd ..; \
chmod +x integrate_run.sh; sh ./integrate_run.sh ; \
cd BiRNN_Run/; sh ./collect.sh; \
cp -r res_acc res_run ../data_representation/ ; \
cd .. ; cd data_representation/ ; \
python TableGenerator.py; " \
% (cut_pct, site, cut_pct, site, cut_pct, site, cut_pct, site))
print("--------------------RNNs Experiment-end----------------------------")
subprocess_cmd("cd src/; \
mv ./BiRNN_Running_Code/data_representation/data_representation_table.csv ./STAT/ ; \
mv ./STAT/data_representation_table.csv ./STAT/results_%s_%s.csv" \
% (cut_pct, site))
# Remove generated folders
subprocess_cmd("cd src/; \
cd BiRNN_Running_Code/BiRNN_Run/; \
rm -rf ./experiment/ ./res_acc/ ./res_run/ ./tmp/")
# Information from back-end to front-end, "Results are available"
context_offLine = {'result_prediction': input_exp_key,
'site_selected': "Results are ready to download!",
'header2': header_offLine}
return context_offLine
"""
Data format received from the front-end:
ImmutableMultiDict([('site_choice', 'ripe'), ('start_date_key', '20050523'), ('end_date_key', '20050527'),
('start_date_anomaly_key', '20050525'), ('end_date_anomaly_key', '20050525'),
('start_time_anomaly_key', '0400'), ('end_time_anomaly_key', '1159'), ('cut_pct_key', '82'), ('rnn_seq_key', '10')])
"""