Browse Source

rename interval statistics columns

Jens Keim 5 years ago
parent
commit
fcc29f1aee
3 changed files with 68 additions and 68 deletions
  1. 20 20
      code/Core/Statistics.py
  2. 47 47
      code_boost/src/cxx/statistics_db.cpp
  3. 1 1
      code_boost/src/cxx/statistics_db.h

+ 20 - 20
code/Core/Statistics.py

@@ -239,7 +239,7 @@ class Statistics:
             result += ", " + name
 
         interval_stats = self.stats_db.process_interval_statistics_query(
-            "SELECT {} FROM %s ORDER BY starttimestamp ASC".format(result),
+            "SELECT {} FROM %s ORDER BY first_pkt_timestamp ASC".format(result),
             table_name)
 
         inverted_table = {}
@@ -336,7 +336,7 @@ class Statistics:
         :return: normalized packet rates for each time interval.
         """
         result = self.stats_db.process_interval_statistics_query(
-            "SELECT lastPktTimestamp,pktsCount FROM %s ORDER BY lastPktTimestamp")
+            "SELECT last_pkt_timestamp,pkts_count FROM %s ORDER BY last_pkt_timestamp")
         # print(result)
         bg_interval_pps = []
         complement_interval_pps = []
@@ -381,7 +381,7 @@ class Statistics:
             return values, freq_output
 
         # Payload Tests
-        sum_payload_count = self.stats_db.process_interval_statistics_query("SELECT sum(payloadCount) FROM %s")
+        sum_payload_count = self.stats_db.process_interval_statistics_query("SELECT sum(payload_count) FROM %s")
         pkt_count = self.stats_db.process_user_defined_query("SELECT packetCount FROM file_statistics")
         if sum_payload_count and pkt_count:
             payload_ratio = 0
@@ -392,9 +392,9 @@ class Statistics:
 
         # TCP checksum Tests
         incorrect_checksum_count = self.stats_db.process_interval_statistics_query(
-            "SELECT sum(incorrectTCPChecksumCount) FROM %s")
+            "SELECT sum(incorrect_tcp_checksum_count) FROM %s")
         correct_checksum_count = self.stats_db.process_interval_statistics_query(
-            "SELECT avg(correctTCPChecksumCount) FROM %s")
+            "SELECT avg(correct_tcp_checksum_count) FROM %s")
         if incorrect_checksum_count and correct_checksum_count:
             incorrect_checksum_ratio = 0
             if (incorrect_checksum_count[0][0] + correct_checksum_count[0][0]) != 0:
@@ -413,7 +413,7 @@ class Statistics:
         ip_src_entropy, ip_src_norm_entropy = self.calculate_entropy(src_frequency, True)
         ip_dst_entropy, ip_dst_norm_entropy = self.calculate_entropy(dst_frequency, True)
 
-        new_ip_count = self.stats_db.process_interval_statistics_query("SELECT newIPCount FROM %s")
+        new_ip_count = self.stats_db.process_interval_statistics_query("SELECT ip_novel_Count FROM %s")
         ip_novels_per_interval, ip_novels_per_interval_frequency = count_frequncy(new_ip_count)
         ip_novelty_dist_entropy = self.calculate_entropy(ip_novels_per_interval_frequency)
 
@@ -439,7 +439,7 @@ class Statistics:
         for row in result:
             frequency.append(row[1])
         ttl_entropy, ttl_norm_entropy = self.calculate_entropy(frequency, True)
-        new_ttl_count = self.stats_db.process_interval_statistics_query("SELECT newTTLCount FROM %s")
+        new_ttl_count = self.stats_db.process_interval_statistics_query("SELECT ttl_novel_count FROM %s")
         ttl_novels_per_interval, ttl_novels_per_interval_frequency = count_frequncy(new_ttl_count)
         ttl_novelty_dist_entropy = self.calculate_entropy(ttl_novels_per_interval_frequency)
 
@@ -449,7 +449,7 @@ class Statistics:
         for row in result:
             frequency.append(row[1])
         win_entropy, win_norm_entropy = self.calculate_entropy(frequency, True)
-        new_win_size_count = self.stats_db.process_interval_statistics_query("SELECT newWinSizeCount FROM %s")
+        new_win_size_count = self.stats_db.process_interval_statistics_query("SELECT win_size_novel_count FROM %s")
         win_novels_per_interval, win_novels_per_interval_frequency = count_frequncy(new_win_size_count)
         win_novelty_dist_entropy = self.calculate_entropy(win_novels_per_interval_frequency)
 
@@ -460,7 +460,7 @@ class Statistics:
         for row in result:
             frequency.append(row[1])
         tos_entropy, tos_norm_entropy = self.calculate_entropy(frequency, True)
-        new_tos_count = self.stats_db.process_interval_statistics_query("SELECT newToSCount FROM %s")
+        new_tos_count = self.stats_db.process_interval_statistics_query("SELECT tos_novel_count FROM %s")
         tos_novels_per_interval, tos_novels_per_interval_frequency = count_frequncy(new_tos_count)
         tos_novelty_dist_entropy = self.calculate_entropy(tos_novels_per_interval_frequency)
 
@@ -471,7 +471,7 @@ class Statistics:
         for row in result:
             frequency.append(row[1])
         mss_entropy, mss_norm_entropy = self.calculate_entropy(frequency, True)
-        new_mss_count = self.stats_db.process_interval_statistics_query("SELECT newMSSCount FROM %s")
+        new_mss_count = self.stats_db.process_interval_statistics_query("SELECT mss_novel_count FROM %s")
         mss_novels_per_interval, mss_novels_per_interval_frequency = count_frequncy(new_mss_count)
         mss_novelty_dist_entropy = self.calculate_entropy(mss_novels_per_interval_frequency)
 
@@ -1245,7 +1245,7 @@ class Statistics:
             :return:
             """
             query_output = self.stats_db.process_interval_statistics_query(
-                "SELECT lastPktTimestamp, pktsCount FROM %s ORDER BY lastPktTimestamp")
+                "SELECT last_pkt_timestamp, pkts_count FROM %s ORDER BY last_pkt_timestamp")
             title = "Packet Rate"
             x_label = "Time Interval"
             y_label = "Number of Packets"
@@ -1259,7 +1259,7 @@ class Statistics:
             :return:
             """
             query_output = self.stats_db.process_interval_statistics_query(
-                "SELECT lastPktTimestamp, ipSrcEntropy FROM %s ORDER BY lastPktTimestamp")
+                "SELECT last_pkt_timestamp, ip_src_entropy FROM %s ORDER BY last_pkt_timestamp")
             title = "Source IP Entropy"
             x_label = "Time Interval"
             y_label = "Entropy"
@@ -1273,7 +1273,7 @@ class Statistics:
             :return:
             """
             query_output = self.stats_db.process_interval_statistics_query(
-                "SELECT lastPktTimestamp, ipDstEntropy FROM %s ORDER BY lastPktTimestamp")
+                "SELECT last_pkt_timestamp, ip_dst_entropy FROM %s ORDER BY last_pkt_timestamp")
             title = "Destination IP Entropy"
             x_label = "Time Interval"
             y_label = "Entropy"
@@ -1287,7 +1287,7 @@ class Statistics:
             :return:
             """
             query_output = self.stats_db.process_interval_statistics_query(
-                "SELECT lastPktTimestamp, newIPCount FROM %s ORDER BY lastPktTimestamp")
+                "SELECT last_pkt_timestamp, newIPCount FROM %s ORDER BY last_pkt_timestamp")
             title = "IP Novelty Distribution"
             x_label = "Time Interval"
             y_label = "Novel values count"
@@ -1301,7 +1301,7 @@ class Statistics:
             :return:
             """
             query_output = self.stats_db.process_interval_statistics_query(
-                "SELECT lastPktTimestamp, newPortCount FROM %s ORDER BY lastPktTimestamp")
+                "SELECT last_pkt_timestamp, port_novel_count FROM %s ORDER BY last_pkt_timestamp")
             title = "Port Novelty Distribution"
             x_label = "Time Interval"
             y_label = "Novel values count"
@@ -1315,7 +1315,7 @@ class Statistics:
             :return:
             """
             query_output = self.stats_db.process_interval_statistics_query(
-                "SELECT lastPktTimestamp, newTTLCount FROM %s ORDER BY lastPktTimestamp")
+                "SELECT last_pkt_timestamp, ttl_novel_count FROM %s ORDER BY last_pkt_timestamp")
             title = "TTL Novelty Distribution"
             x_label = "Time Interval"
             y_label = "Novel values count"
@@ -1329,7 +1329,7 @@ class Statistics:
             :return:
             """
             query_output = self.stats_db.process_interval_statistics_query(
-                "SELECT lastPktTimestamp, newToSCount FROM %s ORDER BY lastPktTimestamp")
+                "SELECT last_pkt_timestamp, tos_novel_count FROM %s ORDER BY last_pkt_timestamp")
             title = "ToS Novelty Distribution"
             x_label = "Time Interval"
             y_label = "Novel values count"
@@ -1343,7 +1343,7 @@ class Statistics:
             :return:
             """
             query_output = self.stats_db.process_interval_statistics_query(
-                "SELECT lastPktTimestamp, newWinSizeCount FROM %s ORDER BY lastPktTimestamp")
+                "SELECT last_pkt_timestamp, win_size_novel_count FROM %s ORDER BY last_pkt_timestamp")
             title = "Window Size Novelty Distribution"
             x_label = "Time Interval"
             y_label = "Novel values count"
@@ -1357,7 +1357,7 @@ class Statistics:
             :return:
             """
             query_output = self.stats_db.process_interval_statistics_query(
-                "SELECT lastPktTimestamp, newMSSCount FROM %s ORDER BY lastPktTimestamp")
+                "SELECT last_pkt_timestamp, mss_novel_count FROM %s ORDER BY last_pkt_timestamp")
             title = "MSS Novelty Distribution"
             x_label = "Time Interval"
             y_label = "Novel values count"
@@ -1382,7 +1382,7 @@ class Statistics:
 
             plt.gcf().clear()
             result = self.stats_db.process_interval_statistics_query(
-                "SELECT lastPktTimestamp, ip{0}CumEntropy FROM %s ORDER BY lastPktTimestamp".format(sod))
+                "SELECT last_pkt_timestamp, ip{0}_cum_entropy FROM %s ORDER BY last_pkt_timestamp".format(sod))
             graphx, graphy = [], []
             for row in result:
                 graphx.append(row[0])

+ 47 - 47
code_boost/src/cxx/statistics_db.cpp

@@ -663,55 +663,55 @@ void statistics_db::writeStatisticsInterval(const std::unordered_map<std::string
             db->exec("DROP TABLE IF EXISTS " + table_name);
             SQLite::Transaction transaction(*db);
             db->exec("CREATE TABLE " + table_name + " ("
-                    "lastPktTimestamp TEXT,"
-                    "startTimestamp TEXT,"
+                    "last_pkt_timestamp TEXT,"
+                    "first_pkt_timestamp TEXT,"
                     "endTimestamp TEXT,"
-                    "pktsCount INTEGER,"
-                    "pktRate REAL,"
+                    "pkts_count INTEGER,"
+                    "pkt_rate REAL,"
                     "kBytes REAL,"
-                    "kByteRate REAL,"
-                    "ipSrcEntropy REAL,"
-                    "ipDstEntropy REAL,"
-                    "ipSrcCumEntropy REAL,"
-                    "ipDstCumEntropy REAL,"
-                    "payloadCount INTEGER,"
-                    "incorrectTCPChecksumCount INTEGER,"
-                    "correctTCPChecksumCount INTEGER,"
-                    "newIPCount INTEGER,"
-                    "newPortCount INTEGER,"
-                    "newTTLCount INTEGER,"
-                    "newWinSizeCount INTEGER,"
-                    "newToSCount INTEGER,"
-                    "newMSSCount INTEGER,"
-                    "PortEntropy REAL,"
-                    "TTLEntropy REAL,"
-                    "WinSizeEntropy REAL,"
-                    "ToSEntropy REAL,"
-                    "MSSEntropy REAL,"
-                    "newPortEntropy REAL,"
-                    "newTTLEntropy REAL,"
-                    "newWinSizeEntropy REAL,"
-                    "newToSEntropy REAL,"
-                    "newMSSEntropy REAL,"
-                    "PortEntropyNormalized REAL,"
-                    "TTLEntropyNormalized REAL,"
-                    "WinSizeEntropyNormalized REAL,"
-                    "ToSEntropyNormalized REAL,"
-                    "MSSEntropyNormalized REAL,"
-                    "newPortEntropyNormalized REAL,"
-                    "newTTLEntropyNormalized REAL,"
-                    "newWinSizeEntropyNormalized REAL,"
-                    "newToSEntropyNormalized REAL,"
-                    "newMSSEntropyNormalized REAL,"
-                    "ipSrcEntropyNormalized REAL,"
-                    "ipDstEntropyNormalized REAL,"
-                    "ipSrcCumEntropyNormalized REAL,"
-                    "ipDstCumEntropyNormalized REAL,"
-                    "ipSrcNovelEntropy REAL,"
-                    "ipDstNovelEntropy REAL,"
-                    "ipSrcNovelEntropyNormalized REAL,"
-                    "ipDstNovelEntropyNormalized REAL,"
-                    "PRIMARY KEY(lastPktTimestamp));");
+                    "kByte_rate REAL,"
+                    "ip_src_entropy REAL,"
+                    "ip_dst_entropy REAL,"
+                    "ip_src_cum_entropy REAL,"
+                    "ip_dst_cum_entropy REAL,"
+                    "payload_count INTEGER,"
+                    "incorrect_tcp_checksum_count INTEGER,"
+                    "correct_tcp_checksum_count INTEGER,"
+                    "ip_novel_Count INTEGER,"
+                    "port_novel_count INTEGER,"
+                    "ttl_novel_count INTEGER,"
+                    "win_size_novel_count INTEGER,"
+                    "tos_novel_count INTEGER,"
+                    "mss_novel_count INTEGER,"
+                    "port_entropy REAL,"
+                    "ttl_entropy REAL,"
+                    "win_size_entropy REAL,"
+                    "tos_entropy REAL,"
+                    "mss_entropy REAL,"
+                    "port_novel_entropy REAL,"
+                    "ttl_novel_entropy REAL,"
+                    "win_size_novel_entropy REAL,"
+                    "tos_novel_entropy REAL,"
+                    "mss_novel_entropy REAL,"
+                    "port_entropy_normalized REAL,"
+                    "ttl_entropy_normalized REAL,"
+                    "win_size_entropy_normalized REAL,"
+                    "tos_entropy_normalized REAL,"
+                    "mss_entropy_normalized REAL,"
+                    "port_novel_entropy_normalized REAL,"
+                    "ttl_novel_entropy_normalized REAL,"
+                    "win_size_novel_entropy_normalized REAL,"
+                    "tos_novel_entropy_normalized REAL,"
+                    "mss_novel_entropy_normalized REAL,"
+                    "ip_src_entropy_normalized REAL,"
+                    "ip_dst_entropy_normalized REAL,"
+                    "ip_src_cum_entropy_normalized REAL,"
+                    "ip_dst_cum_entropy_normalized REAL,"
+                    "ip_src_novel_entropy REAL,"
+                    "ip_dst_novel_entropy REAL,"
+                    "ip_src_novel_entropy_normalized REAL,"
+                    "ip_dst_novel_entropy_normalized REAL,"
+                    "PRIMARY KEY(last_pkt_timestamp));");
 
             double ip_src_entropy = 0.0;
             double ip_dst_entropy = 0.0;

+ 1 - 1
code_boost/src/cxx/statistics_db.h

@@ -25,7 +25,7 @@ public:
     /*
      * Database version: Increment number on every change in the C++ code!
      */
-    static const int DB_VERSION = 23;
+    static const int DB_VERSION = 24;
 
     /*
      * Methods to read from database