1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165 |
- from operator import itemgetter
- from math import sqrt, ceil, log
- import os
- import time
- import ID2TLib.libpcapreader as pr
- import matplotlib
- matplotlib.use('Agg')
- import matplotlib.pyplot as plt
- from ID2TLib.PcapFile import PcapFile
- from ID2TLib.StatsDatabase import StatsDatabase
- from ID2TLib.IPv4 import IPAddress
- class Statistics:
- def __init__(self, pcap_file: PcapFile):
- """
- Creates a new Statistics object.
- :param pcap_file: A reference to the PcapFile object
- """
- # Fields
- self.pcap_filepath = pcap_file.pcap_file_path
- self.pcap_proc = None
- self.do_extra_tests = False
- # Create folder for statistics database if required
- self.path_db = pcap_file.get_db_path()
- path_dir = os.path.dirname(self.path_db)
- if not os.path.isdir(path_dir):
- os.makedirs(path_dir)
- # Class instances
- self.stats_db = StatsDatabase(self.path_db)
- def load_pcap_statistics(self, flag_write_file: bool, flag_recalculate_stats: bool, flag_print_statistics: bool):
- """
- Loads the PCAP statistics for the file specified by pcap_filepath. If the database is not existing yet, the
- statistics are calculated by the PCAP file processor and saved into the newly created database. Otherwise the
- statistics are gathered directly from the existing database.
- :param flag_write_file: Indicates whether the statistics should be written addiotionally into a text file (True)
- or not (False)
- :param flag_recalculate_stats: Indicates whether eventually existing statistics should be recalculated
- :param flag_print_statistics: Indicates whether the gathered basic statistics should be printed to the terminal
- """
- # Load pcap and get loading time
- time_start = time.clock()
- # Inform user about recalculation of statistics and its reason
- if flag_recalculate_stats:
- print("Flag -r/--recalculate found. Recalculating statistics.")
- # Recalculate statistics if database does not exist OR param -r/--recalculate is provided
- if (not self.stats_db.get_db_exists()) or flag_recalculate_stats:
- self.pcap_proc = pr.pcap_processor(self.pcap_filepath, str(self.do_extra_tests))
- self.pcap_proc.collect_statistics()
- self.pcap_proc.write_to_database(self.path_db)
- outstring_datasource = "by PCAP file processor."
- else:
- outstring_datasource = "from statistics database."
- # Load statistics from database
- self.file_info = self.stats_db.get_file_info()
- time_end = time.clock()
- print("Loaded file statistics in " + str(time_end - time_start)[:4] + " sec " + outstring_datasource)
- # Write statistics if param -e/--export provided
- if flag_write_file:
- self.write_statistics_to_file()
- # Print statistics if param -s/--statistics provided
- if flag_print_statistics:
- self.print_statistics()
- def get_file_information(self):
- """
- Returns a list of tuples, each containing a information of the file.
- :return: a list of tuples, each consisting of (description, value, unit), where unit is optional.
- """
- return [("Pcap file", self.pcap_filepath),
- ("Packets", self.get_packet_count(), "packets"),
- ("Capture length", self.get_capture_duration(), "seconds"),
- ("Capture start", self.get_pcap_timestamp_start()),
- ("Capture end", self.get_pcap_timestamp_end())]
- def get_general_file_statistics(self):
- """
- Returns a list of tuples, each containing a file statistic.
- :return: a list of tuples, each consisting of (description, value, unit).
- """
- return [("Avg. packet rate", self.file_info['avgPacketRate'], "packets/sec"),
- ("Avg. packet size", self.file_info['avgPacketSize'], "kbytes"),
- ("Avg. packets sent", self.file_info['avgPacketsSentPerHost'], "packets"),
- ("Avg. bandwidth in", self.file_info['avgBandwidthIn'], "kbit/s"),
- ("Avg. bandwidth out", self.file_info['avgBandwidthOut'], "kbit/s")]
- @staticmethod
- def write_list(desc_val_unit_list, func, line_ending="\n"):
- """
- Takes a list of tuples (statistic name, statistic value, unit) as input, generates a string of these three values
- and applies the function func on this string.
- Before generating the string, it identifies text containing a float number, casts the string to a
- float and rounds the value to two decimal digits.
- :param desc_val_unit_list: The list of tuples consisting of (description, value, unit)
- :param func: The function to be applied to each generated string
- :param line_ending: The formatting string to be applied at the end of each string
- """
- for entry in desc_val_unit_list:
- # Convert text containing float into float
- (description, value) = entry[0:2]
- if isinstance(value, str) and "." in value:
- try:
- value = float(value)
- except ValueError:
- pass # do nothing -> value was not a float
- # round float
- if isinstance(value, float):
- value = round(value, 4)
- # write into file
- if len(entry) == 3:
- unit = entry[2]
- func(description + ":\t" + str(value) + " " + unit + line_ending)
- else:
- func(description + ":\t" + str(value) + line_ending)
- def print_statistics(self):
- """
- Prints the basic file statistics to the terminal.
- """
- print("\nPCAP FILE INFORMATION ------------------------------")
- Statistics.write_list(self.get_file_information(), print, "")
- print("\nGENERAL FILE STATISTICS ----------------------------")
- Statistics.write_list(self.get_general_file_statistics(), print, "")
- print("\n")
- def calculate_entropy(self, frequency:list, normalized:bool = False):
- """
- Calculates entropy and normalized entropy of list of elements that have specific frequency
- :param frequency: The frequency of the elements.
- :param normalized: Calculate normalized entropy
- :return: entropy or (entropy, normalized entropy)
- """
- entropy, normalizedEnt, n = 0, 0, 0
- sumFreq = sum(frequency)
- for i, x in enumerate(frequency):
- p_x = float(frequency[i] / sumFreq)
- if p_x > 0:
- n += 1
- entropy += - p_x * log(p_x, 2)
- if normalized:
- if log(n)>0:
- normalizedEnt = entropy/log(n, 2)
- return entropy, normalizedEnt
- else:
- return entropy
- def calculate_complement_packet_rates(self, pps):
- """
- Calculates the complement packet rates of the background traffic packet rates for each interval.
- Then normalize it to maximum boundary, which is the input parameter pps
- :return: normalized packet rates for each time interval.
- """
- result = self.process_db_query(
- "SELECT lastPktTimestamp,pktsCount FROM interval_statistics ORDER BY lastPktTimestamp")
- # print(result)
- bg_interval_pps = []
- complement_interval_pps = []
- intervalsSum = 0
- if result:
- # Get the interval in seconds
- for i, row in enumerate(result):
- if i < len(result) - 1:
- intervalsSum += ceil((int(result[i + 1][0]) * 10 ** -6) - (int(row[0]) * 10 ** -6))
- interval = intervalsSum / (len(result) - 1)
- # Convert timestamp from micro to seconds, convert packet rate "per interval" to "per second"
- for row in result:
- bg_interval_pps.append((int(row[0]) * 10 ** -6, int(row[1] / interval)))
- # Find max PPS
- maxPPS = max(bg_interval_pps, key=itemgetter(1))[1]
- for row in bg_interval_pps:
- complement_interval_pps.append((row[0], int(pps * (maxPPS - row[1]) / maxPPS)))
- return complement_interval_pps
- def get_tests_statistics(self):
- """
- Writes the calculated basic defects tests statistics into a file.
- """
- # self.stats_db._process_user_defined_query output is list of tuples, thus, we ned [0][0] to access data
- def count_frequncy(valuesList):
- values, frequency = [] , []
- for x in valuesList:
- if x in values:
- frequency[values.index(x)] += 1
- else:
- values.append(x)
- frequency.append(1)
- return values, frequency
- ####### Payload Tests #######
- sumPayloadCount = self.stats_db._process_user_defined_query("SELECT sum(payloadCount) FROM interval_statistics")
- pktCount = self.stats_db._process_user_defined_query("SELECT packetCount FROM file_statistics")
- if sumPayloadCount and pktCount:
- payloadRatio=0
- if(pktCount[0][0]!=0):
- payloadRatio = float(sumPayloadCount[0][0] / pktCount[0][0] * 100)
- else:
- payloadRatio = -1
- ####### TCP checksum Tests #######
- incorrectChecksumCount = self.stats_db._process_user_defined_query("SELECT sum(incorrectTCPChecksumCount) FROM interval_statistics")
- correctChecksumCount = self.stats_db._process_user_defined_query("SELECT avg(correctTCPChecksumCount) FROM interval_statistics")
- if incorrectChecksumCount and correctChecksumCount:
- incorrectChecksumRatio=0
- if(incorrectChecksumCount[0][0] + correctChecksumCount[0][0])!=0:
- incorrectChecksumRatio = float(incorrectChecksumCount[0][0] / (incorrectChecksumCount[0][0] + correctChecksumCount[0][0] ) * 100)
- else:
- incorrectChecksumRatio = -1
- ####### IP Src & Dst Tests #######
- result = self.stats_db._process_user_defined_query("SELECT ipAddress,pktsSent,pktsReceived FROM ip_statistics")
- data, srcFrequency, dstFrequency = [], [], []
- if result:
- for row in result:
- srcFrequency.append(row[1])
- dstFrequency.append(row[2])
- ipSrcEntropy, ipSrcNormEntropy = self.calculate_entropy(srcFrequency, True)
- ipDstEntropy, ipDstNormEntropy = self.calculate_entropy(dstFrequency, True)
- newIPCount = self.stats_db._process_user_defined_query("SELECT newIPCount FROM interval_statistics")
- ipNovelsPerInterval, ipNovelsPerIntervalFrequency = count_frequncy(newIPCount)
- ipNoveltyDistEntropy = self.calculate_entropy(ipNovelsPerIntervalFrequency)
- ####### Ports Tests #######
- port0Count = self.stats_db._process_user_defined_query("SELECT SUM(portCount) FROM ip_ports WHERE portNumber = 0")
- if not port0Count[0][0]:
- port0Count = 0
- else:
- port0Count = port0Count[0][0]
- reservedPortCount = self.stats_db._process_user_defined_query(
- "SELECT SUM(portCount) FROM ip_ports WHERE portNumber IN (100,114,1023,1024,49151,49152,65535)")# could be extended
- if not reservedPortCount[0][0]:
- reservedPortCount = 0
- else:
- reservedPortCount = reservedPortCount[0][0]
- ####### TTL Tests #######
- result = self.stats_db._process_user_defined_query("SELECT ttlValue,SUM(ttlCount) FROM ip_ttl GROUP BY ttlValue")
- data, frequency = [], []
- for row in result:
- frequency.append(row[1])
- ttlEntropy, ttlNormEntropy = self.calculate_entropy(frequency,True)
- newTTLCount = self.stats_db._process_user_defined_query("SELECT newTTLCount FROM interval_statistics")
- ttlNovelsPerInterval, ttlNovelsPerIntervalFrequency = count_frequncy(newTTLCount)
- ttlNoveltyDistEntropy = self.calculate_entropy(ttlNovelsPerIntervalFrequency)
- ####### Window Size Tests #######
- result = self.stats_db._process_user_defined_query("SELECT winSize,SUM(winCount) FROM tcp_win GROUP BY winSize")
- data, frequency = [], []
- for row in result:
- frequency.append(row[1])
- winEntropy, winNormEntropy = self.calculate_entropy(frequency, True)
- newWinSizeCount = self.stats_db._process_user_defined_query("SELECT newWinSizeCount FROM interval_statistics")
- winNovelsPerInterval, winNovelsPerIntervalFrequency = count_frequncy(newWinSizeCount)
- winNoveltyDistEntropy = self.calculate_entropy(winNovelsPerIntervalFrequency)
- ####### ToS Tests #######
- result = self.stats_db._process_user_defined_query(
- "SELECT tosValue,SUM(tosCount) FROM ip_tos GROUP BY tosValue")
- data, frequency = [], []
- for row in result:
- frequency.append(row[1])
- tosEntropy, tosNormEntropy = self.calculate_entropy(frequency, True)
- newToSCount = self.stats_db._process_user_defined_query("SELECT newToSCount FROM interval_statistics")
- tosNovelsPerInterval, tosNovelsPerIntervalFrequency = count_frequncy(newToSCount)
- tosNoveltyDistEntropy = self.calculate_entropy(tosNovelsPerIntervalFrequency)
- ####### MSS Tests #######
- result = self.stats_db._process_user_defined_query(
- "SELECT mssValue,SUM(mssCount) FROM tcp_mss GROUP BY mssValue")
- data, frequency = [], []
- for row in result:
- frequency.append(row[1])
- mssEntropy, mssNormEntropy = self.calculate_entropy(frequency, True)
- newMSSCount = self.stats_db._process_user_defined_query("SELECT newMSSCount FROM interval_statistics")
- mssNovelsPerInterval, mssNovelsPerIntervalFrequency = count_frequncy(newMSSCount)
- mssNoveltyDistEntropy = self.calculate_entropy(mssNovelsPerIntervalFrequency)
- result = self.stats_db._process_user_defined_query("SELECT SUM(mssCount) FROM tcp_mss WHERE mssValue > 1460")
- # The most used MSS < 1460. Calculate the ratio of the values bigger that 1460.
- if not result[0][0]:
- result = 0
- else:
- result = result[0][0]
- bigMSS = (result / sum(frequency)) * 100
- output = []
- if self.do_extra_tests:
- output = [("Payload ratio", payloadRatio, "%"),
- ("Incorrect TCP checksum ratio", incorrectChecksumRatio, "%")]
- output = output + [("# IP addresses", sum([x[0] for x in newIPCount]), ""),
- ("IP Src Entropy", ipSrcEntropy, ""),
- ("IP Src Normalized Entropy", ipSrcNormEntropy, ""),
- ("IP Dst Entropy", ipDstEntropy, ""),
- ("IP Dst Normalized Entropy", ipDstNormEntropy, ""),
- ("IP Novelty Distribution Entropy", ipNoveltyDistEntropy, ""),
- ("# TTL values", sum([x[0] for x in newTTLCount]), ""),
- ("TTL Entropy", ttlEntropy, ""),
- ("TTL Normalized Entropy", ttlNormEntropy, ""),
- ("TTL Novelty Distribution Entropy", ttlNoveltyDistEntropy, ""),
- ("# WinSize values", sum([x[0] for x in newWinSizeCount]), ""),
- ("WinSize Entropy", winEntropy, ""),
- ("WinSize Normalized Entropy", winNormEntropy, ""),
- ("WinSize Novelty Distribution Entropy", winNoveltyDistEntropy, ""),
- ("# ToS values", sum([x[0] for x in newToSCount]), ""),
- ("ToS Entropy", tosEntropy, ""),
- ("ToS Normalized Entropy", tosNormEntropy, ""),
- ("ToS Novelty Distribution Entropy", tosNoveltyDistEntropy, ""),
- ("# MSS values", sum([x[0] for x in newMSSCount]), ""),
- ("MSS Entropy", mssEntropy, ""),
- ("MSS Normalized Entropy", mssNormEntropy, ""),
- ("MSS Novelty Distribution Entropy", mssNoveltyDistEntropy, ""),
- ("======================","","")]
- # Reasoning the statistics values
- if self.do_extra_tests:
- if payloadRatio > 80:
- output.append(("WARNING: Too high payload ratio", payloadRatio, "%."))
- if payloadRatio < 30:
- output.append(("WARNING: Too low payload ratio", payloadRatio, "% (Injecting attacks that are carried out in the packet payloads is not recommmanded)."))
- if incorrectChecksumRatio > 5:
- output.append(("WARNING: High incorrect TCP checksum ratio",incorrectChecksumRatio,"%."))
- if ipSrcNormEntropy > 0.65:
- output.append(("WARNING: High IP source normalized entropy",ipSrcNormEntropy,"."))
- if ipSrcNormEntropy < 0.2:
- output.append(("WARNING: Low IP source normalized entropy", ipSrcNormEntropy, "."))
- if ipDstNormEntropy > 0.65:
- output.append(("WARNING: High IP destination normalized entropy", ipDstNormEntropy, "."))
- if ipDstNormEntropy < 0.2:
- output.append(("WARNING: Low IP destination normalized entropy", ipDstNormEntropy, "."))
- if ttlNormEntropy > 0.65:
- output.append(("WARNING: High TTL normalized entropy", ttlNormEntropy, "."))
- if ttlNormEntropy < 0.2:
- output.append(("WARNING: Low TTL normalized entropy", ttlNormEntropy, "."))
- if ttlNoveltyDistEntropy < 1:
- output.append(("WARNING: Too low TTL novelty distribution entropy", ttlNoveltyDistEntropy,
- "(The distribution of the novel TTL values is suspicious)."))
- if winNormEntropy > 0.6:
- output.append(("WARNING: High Window Size normalized entropy", winNormEntropy, "."))
- if winNormEntropy < 0.1:
- output.append(("WARNING: Low Window Size normalized entropy", winNormEntropy, "."))
- if winNoveltyDistEntropy < 4:
- output.append(("WARNING: Low Window Size novelty distribution entropy", winNoveltyDistEntropy,
- "(The distribution of the novel Window Size values is suspicious)."))
- if tosNormEntropy > 0.4:
- output.append(("WARNING: High ToS normalized entropy", tosNormEntropy, "."))
- if tosNormEntropy < 0.1:
- output.append(("WARNING: Low ToS normalized entropy", tosNormEntropy, "."))
- if tosNoveltyDistEntropy < 0.5:
- output.append(("WARNING: Low ToS novelty distribution entropy", tosNoveltyDistEntropy,
- "(The distribution of the novel ToS values is suspicious)."))
- if mssNormEntropy > 0.4:
- output.append(("WARNING: High MSS normalized entropy", mssNormEntropy, "."))
- if mssNormEntropy < 0.1:
- output.append(("WARNING: Low MSS normalized entropy", mssNormEntropy, "."))
- if mssNoveltyDistEntropy < 0.5:
- output.append(("WARNING: Low MSS novelty distribution entropy", mssNoveltyDistEntropy,
- "(The distribution of the novel MSS values is suspicious)."))
- if bigMSS > 50:
- output.append(("WARNING: High ratio of MSS > 1460", bigMSS, "% (High fragmentation rate in Ethernet)."))
- if port0Count > 0:
- output.append(("WARNING: Port number 0 is used in ",port0Count,"packets (awkward-looking port)."))
- if reservedPortCount > 0:
- output.append(("WARNING: Reserved port numbers are used in ",reservedPortCount,"packets (uncommonly-used ports)."))
- return output
- def write_statistics_to_file(self):
- """
- Writes the calculated basic statistics into a file.
- """
- def _write_header(title: str):
- """
- Writes the section header into the open file.
- :param title: The section title
- """
- target.write("====================== \n")
- target.write(title + " \n")
- target.write("====================== \n")
- target = open(self.pcap_filepath + ".stat", 'w')
- target.truncate()
- _write_header("PCAP file information")
- Statistics.write_list(self.get_file_information(), target.write)
- _write_header("General statistics")
- Statistics.write_list(self.get_general_file_statistics(), target.write)
- _write_header("Tests statistics")
- Statistics.write_list(self.get_tests_statistics(), target.write)
- target.close()
- def get_capture_duration(self):
- """
- :return: The duration of the capture in seconds
- """
- return self.file_info['captureDuration']
- def get_pcap_timestamp_start(self):
- """
- :return: The timestamp of the first packet in the PCAP file
- """
- return self.file_info['timestampFirstPacket']
- def get_pcap_timestamp_end(self):
- """
- :return: The timestamp of the last packet in the PCAP file
- """
- return self.file_info['timestampLastPacket']
- def get_pps_sent(self, ip_address: str):
- """
- Calculates the sent packets per seconds for a given IP address.
- :param ip_address: The IP address whose packets per second should be calculated
- :return: The sent packets per seconds for the given IP address
- """
- packets_sent = self.stats_db.process_db_query("SELECT pktsSent from ip_statistics WHERE ipAddress=?", False,
- (ip_address,))
- capture_duration = float(self.get_capture_duration())
- return int(float(packets_sent) / capture_duration)
- def get_pps_received(self, ip_address: str):
- """
- Calculate the packets per second received for a given IP address.
- :param ip_address: The IP address used for the calculation
- :return: The number of packets per second received
- """
- packets_received = self.stats_db.process_db_query("SELECT pktsReceived FROM ip_statistics WHERE ipAddress=?",
- False,
- (ip_address,))
- capture_duration = float(self.get_capture_duration())
- return int(float(packets_received) / capture_duration)
- def get_packet_count(self):
- """
- :return: The number of packets in the loaded PCAP file
- """
- return self.file_info['packetCount']
- def get_most_used_ip_address(self):
- """
- :return: The IP address/addresses with the highest sum of packets sent and received
- """
- return self.process_db_query("most_used(ipAddress)")
- def get_ttl_distribution(self, ipAddress: str):
- result = self.process_db_query('SELECT ttlValue, ttlCount from ip_ttl WHERE ipAddress="' + ipAddress + '"')
- result_dict = {key: value for (key, value) in result}
- return result_dict
- def get_mss_distribution(self, ipAddress: str):
- result = self.process_db_query('SELECT mssValue, mssCount from tcp_mss WHERE ipAddress="' + ipAddress + '"')
- result_dict = {key: value for (key, value) in result}
- return result_dict
- def get_win_distribution(self, ipAddress: str):
- result = self.process_db_query('SELECT winSize, winCount from tcp_win WHERE ipAddress="' + ipAddress + '"')
- result_dict = {key: value for (key, value) in result}
- return result_dict
- def get_tos_distribution(self, ipAddress: str):
- result = self.process_db_query('SELECT tosValue, tosCount from ip_tos WHERE ipAddress="' + ipAddress + '"')
- result_dict = {key: value for (key, value) in result}
- return result_dict
- def get_random_ip_address(self, count: int = 1):
- """
- :param count: The number of IP addreses to return
- :return: A randomly chosen IP address from the dataset or iff param count is greater than one, a list of randomly
- chosen IP addresses
- """
- if count == 1:
- return self.process_db_query("random(all(ipAddress))")
- else:
- ip_address_list = []
- for i in range(0, count):
- ip_address_list.append(self.process_db_query("random(all(ipAddress))"))
- return ip_address_list
- def get_mac_address(self, ipAddress: str):
- """
- :return: The MAC address used in the dataset for the given IP address.
- """
- return self.process_db_query('macAddress(ipAddress=' + ipAddress + ")")
- def get_most_used_mss(self, ipAddress: str):
- """
- :param ipAddress: The IP address whose used MSS should be determined
- :return: The TCP MSS value used by the IP address, or if the IP addresses never specified a MSS,
- then None is returned
- """
- mss_value = self.process_db_query('SELECT mssValue from tcp_mss WHERE ipAddress="' + ipAddress + '" ORDER BY mssCount DESC LIMIT 1')
- if isinstance(mss_value, int):
- return mss_value
- else:
- return None
- def get_most_used_ttl(self, ipAddress: str):
- """
- :param ipAddress: The IP address whose used TTL should be determined
- :return: The TTL value used by the IP address, or if the IP addresses never specified a TTL,
- then None is returned
- """
- ttl_value = self.process_db_query(
- 'SELECT ttlValue from ip_ttl WHERE ipAddress="' + ipAddress + '" ORDER BY ttlCount DESC LIMIT 1')
- if isinstance(ttl_value, int):
- return ttl_value
- else:
- return None
- def get_in_degree(self):
- """
- determines the in-degree for each local ipAddress, i.e. for every IP the count of ipAddresses it has received packets from
- :return: a list, each entry consists of one local IPAddress and its associated in-degree
- """
- in_degree_raw = self.stats_db._process_user_defined_query(
- "SELECT ipAddressA, Count(DISTINCT ipAddressB) FROM ip_ports JOIN conv_statistics ON ipAddress = ipAddressA WHERE portDirection=\'in\' AND portNumber = portA GROUP BY ipAddress " +
- "UNION " +
- "SELECT ipAddressB, Count(DISTINCT ipAddressA) FROM ip_ports JOIN conv_statistics ON ipAddress = ipAddressB WHERE portDirection=\'in\' AND portNumber = portB GROUP BY ipAddress")
- #Because of the structure of the database, there could be 2 entries for the same IP Address, therefore accumulate their sums
- in_degree = self.filter_multiples(in_degree_raw)
- return in_degree
- def get_out_degree(self):
- """
- determines the out-degree for each local ipAddress, i.e. for every IP the count of ipAddresses it has sent packets to
- :return: a list, each entry consists of one local IPAddress and its associated out-degree
- """
- """
- test = self.stats_db._process_user_defined_query("SELECT DISTINCT * FROM conv_statistics")
- #test2 = self.stats_db._process_user_defined_query("SELECT DISTINCT ipAddressB, portB FROM conv_statistics")
- print("############# conv_statistics IP's + Ports")
- for p in test:
- print(p)
- #for p in test2:
- # print(p)
- print("############## ip_ports ##################")
- test3 = self.stats_db._process_user_defined_query("SELECT DISTINCT ipAddress, portNumber, portDirection FROM ip_ports")
- for p in test3:
- print(p)
- print("")
- print("############## AFTER JOIN - A #############")
- test4 = self.stats_db._process_user_defined_query(
- "SELECT * FROM ip_ports JOIN conv_statistics ON ipAddress = ipAddressA WHERE portDirection=\'out\' AND portNumber = portA") # Hier werden die anfang locals rausgefiltert!
- for p in test4:
- print(p)
- print("")
- print("############## AFTER JOIN - B #############")
- test6 = self.stats_db._process_user_defined_query(
- "SELECT * FROM ip_ports JOIN conv_statistics ON ipAddress = ipAddressB WHERE portDirection=\'out\' AND portNumber = portB") # Hier werden die anfang locals rausgefiltert!
- for p in test6:
- print(p)
- print("")
- print("############## BUILD UP PART FOR PART#############")
- test5 = self.stats_db._process_user_defined_query(
- "SELECT ipAddress, Count(DISTINCT ipAddressB) FROM ip_ports JOIN conv_statistics ON ipAddress = ipAddressA WHERE portDirection=\'out\' GROUP BY ipAddress")
- for p in test5:
- print(p)
- """
- out_degree_raw = self.stats_db._process_user_defined_query(
- "SELECT ipAddressA, Count(DISTINCT ipAddressB) FROM ip_ports JOIN conv_statistics ON ipAddress = ipAddressA WHERE portDirection=\'out\' AND portNumber = portA GROUP BY ipAddress " +
- "UNION " +
- "SELECT ipAddressB, Count(DISTINCT ipAddressA) FROM ip_ports JOIN conv_statistics ON ipAddress = ipAddressB WHERE portDirection=\'out\' AND portNumber = portB GROUP BY ipAddress")
- #filter out non-local IPs
- #out_degree_raw_2 = []
- #for entry in out_degree_raw:
- # if IPAddress.parse(entry[0]).is_reserved():
- # out_degree_raw_2.append(entry)
- #Because of the structure of the database, there could be 2 entries for the same IP Address, therefore accumulate their sums
- out_degree = self.filter_multiples(out_degree_raw)
- return out_degree
- def filter_multiples(self, entries):
- """
- helper function, for get_out_degree and get_in_degree
- filters the given list for duplicate IpAddresses and, if duplciates are present, accumulates their values
- :param entries: list, each entry consists of an ipAddress and a numeric value
- :return: a filtered list, without duplicate ipAddresses
- """
- filtered_entries = []
- done = []
- for p1 in entries:
- added = False
- if p1 in done:
- continue
- for p2 in entries:
- if p1[0] == p2[0] and p1 != p2:
- filtered_entries.append((p1[0], p1[1] + p2[1]))
- done.append(p1)
- done.append(p2)
- #entries.remove(p2)
- added = True
- break
- if not added:
- filtered_entries.append(p1)
- return filtered_entries
- def get_statistics_database(self):
- """
- :return: A reference to the statistics database object
- """
- return self.stats_db
- def process_db_query(self, query_string_in: str, print_results: bool = False):
- """
- Executes a string identified previously as a query. This can be a standard SQL SELECT/INSERT query or a named
- query.
- :param query_string_in: The query to be processed
- :param print_results: Indicates whether the results should be printed to terminal
- :return: The result of the query
- """
- return self.stats_db.process_db_query(query_string_in, print_results)
- def is_query(self, value: str):
- """
- Checks whether the given string is a standard SQL query (SELECT, INSERT) or a named query.
- :param value: The string to be checked
- :return: True if the string is recognized as a query, otherwise False.
- """
- if not isinstance(value, str):
- return False
- else:
- return (any(x in value.lower().strip() for x in self.stats_db.get_all_named_query_keywords()) or
- any(x in value.lower().strip() for x in self.stats_db.get_all_sql_query_keywords()))
- def calculate_standard_deviation(self, lst):
- """
- Calculates the standard deviation of a list of numbers.
- :param lst: The list of numbers to calculate its SD.
- """
- num_items = len(lst)
- mean = sum(lst) / num_items
- differences = [x - mean for x in lst]
- sq_differences = [d ** 2 for d in differences]
- ssd = sum(sq_differences)
- variance = ssd / num_items
- sd = sqrt(variance)
- return sd
- def plot_statistics(self, format: str = 'pdf'): #'png'
- """
- Plots the statistics associated with the dataset.
- :param format: The format to be used to save the statistics diagrams.
- """
- def plot_distribution(queryOutput, title, xLabel, yLabel, file_ending: str):
- plt.gcf().clear()
- graphx, graphy = [], []
- for row in queryOutput:
- graphx.append(row[0])
- graphy.append(row[1])
- plt.autoscale(enable=True, axis='both')
- plt.title(title)
- plt.xlabel(xLabel)
- plt.ylabel(yLabel)
- width = 0.1
- plt.xlim([0, max(graphx)])
- plt.grid(True)
- plt.bar(graphx, graphy, width, align='center', linewidth=1, color='red', edgecolor='red')
- out = self.pcap_filepath.replace('.pcap', '_plot-' + title + file_ending)
- plt.savefig(out,dpi=500)
- return out
- def plot_ttl(file_ending: str):
- queryOutput = self.stats_db._process_user_defined_query(
- "SELECT ttlValue, SUM(ttlCount) FROM ip_ttl GROUP BY ttlValue")
- title = "TTL Distribution"
- xLabel = "TTL Value"
- yLabel = "Number of Packets"
- if queryOutput:
- return plot_distribution(queryOutput, title, xLabel, yLabel, file_ending)
- def plot_mss(file_ending: str):
- queryOutput = self.stats_db._process_user_defined_query(
- "SELECT mssValue, SUM(mssCount) FROM tcp_mss GROUP BY mssValue")
- title = "MSS Distribution"
- xLabel = "MSS Value"
- yLabel = "Number of Packets"
- if queryOutput:
- return plot_distribution(queryOutput, title, xLabel, yLabel, file_ending)
- def plot_win(file_ending: str):
- queryOutput = self.stats_db._process_user_defined_query(
- "SELECT winSize, SUM(winCount) FROM tcp_win GROUP BY winSize")
- title = "Window Size Distribution"
- xLabel = "Window Size"
- yLabel = "Number of Packets"
- if queryOutput:
- return plot_distribution(queryOutput, title, xLabel, yLabel, file_ending)
- def plot_protocol(file_ending: str):
- plt.gcf().clear()
- result = self.stats_db._process_user_defined_query(
- "SELECT protocolName, SUM(protocolCount) FROM ip_protocols GROUP BY protocolName")
- if (result):
- graphx, graphy = [], []
- for row in result:
- graphx.append(row[0])
- graphy.append(row[1])
- plt.autoscale(enable=True, axis='both')
- plt.title("Protocols Distribution")
- plt.xlabel('Protocols')
- plt.ylabel('Number of Packets')
- width = 0.1
- plt.xlim([0, len(graphx)])
- plt.grid(True)
- # Protocols' names on x-axis
- x = range(0,len(graphx))
- my_xticks = graphx
- plt.xticks(x, my_xticks)
- plt.bar(x, graphy, width, align='center', linewidth=1, color='red', edgecolor='red')
- out = self.pcap_filepath.replace('.pcap', '_plot-protocol' + file_ending)
- plt.savefig(out,dpi=500)
- return out
- else:
- print("Error plot protocol: No protocol values found!")
- def plot_port(file_ending: str):
- plt.gcf().clear()
- result = self.stats_db._process_user_defined_query(
- "SELECT portNumber, SUM(portCount) FROM ip_ports GROUP BY portNumber")
- graphx, graphy = [], []
- for row in result:
- graphx.append(row[0])
- graphy.append(row[1])
- plt.autoscale(enable=True, axis='both')
- plt.title("Ports Distribution")
- plt.xlabel('Ports Numbers')
- plt.ylabel('Number of Packets')
- width = 0.1
- plt.xlim([0, max(graphx)])
- plt.grid(True)
- plt.bar(graphx, graphy, width, align='center', linewidth=1, color='red', edgecolor='red')
- out = self.pcap_filepath.replace('.pcap', '_plot-port' + file_ending)
- plt.savefig(out,dpi=500)
- return out
- # This distribution is not drawable for big datasets
- def plot_ip_src(file_ending: str):
- plt.gcf().clear()
- result = self.stats_db._process_user_defined_query(
- "SELECT ipAddress, pktsSent FROM ip_statistics")
- graphx, graphy = [], []
- for row in result:
- graphx.append(row[0])
- graphy.append(row[1])
- plt.autoscale(enable=True, axis='both')
- plt.title("Source IP Distribution")
- plt.xlabel('Source IP')
- plt.ylabel('Number of Packets')
- width = 0.1
- plt.xlim([0, len(graphx)])
- plt.grid(True)
- # IPs on x-axis
- x = range(0, len(graphx))
- my_xticks = graphx
- plt.xticks(x, my_xticks, rotation='vertical', fontsize=5)
- plt.tight_layout()
- # limit the number of xticks
- plt.locator_params(axis='x', nbins=20)
- plt.bar(x, graphy, width, align='center', linewidth=1, color='red', edgecolor='red')
- out = self.pcap_filepath.replace('.pcap', '_plot-ip-src' + file_ending)
- plt.savefig(out, dpi=500)
- return out
- # This distribution is not drawable for big datasets
- def plot_ip_dst(file_ending: str):
- plt.gcf().clear()
- result = self.stats_db._process_user_defined_query(
- "SELECT ipAddress, pktsReceived FROM ip_statistics")
- graphx, graphy = [], []
- for row in result:
- graphx.append(row[0])
- graphy.append(row[1])
- plt.autoscale(enable=True, axis='both')
- plt.title("Destination IP Distribution")
- plt.xlabel('Destination IP')
- plt.ylabel('Number of Packets')
- width = 0.1
- plt.xlim([0, len(graphx)])
- plt.grid(True)
- # IPs on x-axis
- x = range(0, len(graphx))
- my_xticks = graphx
- plt.xticks(x, my_xticks, rotation='vertical', fontsize=5)
- plt.tight_layout()
- # limit the number of xticks
- plt.locator_params(axis='x', nbins=20)
- plt.bar(x, graphy, width, align='center', linewidth=1, color='red', edgecolor='red')
- out = self.pcap_filepath.replace('.pcap', '_plot-ip-dst' + file_ending)
- plt.savefig(out, dpi=500)
- return out
- def plot_interval_statistics(queryOutput, title, xLabel, yLabel, file_ending: str):
- plt.gcf().clear()
- graphx, graphy = [], []
- for row in queryOutput:
- graphx.append(row[0])
- graphy.append(row[1])
- plt.autoscale(enable=True, axis='both')
- plt.title(title)
- plt.xlabel(xLabel)
- plt.ylabel(yLabel)
- width = 0.5
- plt.xlim([0, len(graphx)])
- plt.grid(True)
- # timestamp on x-axis
- x = range(0, len(graphx))
- # limit the number of xticks
- plt.locator_params(axis='x', nbins=20)
- plt.bar(x, graphy, width, align='center', linewidth=1, color='red', edgecolor='red')
- out = self.pcap_filepath.replace('.pcap', '_plot-' + title + file_ending)
- plt.savefig(out, dpi=500)
- return out
- def plot_interval_pktCount(file_ending: str):
- queryOutput = self.stats_db._process_user_defined_query(
- "SELECT lastPktTimestamp, pktsCount FROM interval_statistics ORDER BY lastPktTimestamp")
- title = "Packet Rate"
- xLabel = "Time Interval"
- yLabel = "Number of Packets"
- if queryOutput:
- return plot_interval_statistics(queryOutput, title, xLabel, yLabel, file_ending)
- def plot_interval_ip_src_ent(file_ending: str):
- queryOutput = self.stats_db._process_user_defined_query(
- "SELECT lastPktTimestamp, ipSrcEntropy FROM interval_statistics ORDER BY lastPktTimestamp")
- title = "Source IP Entropy"
- xLabel = "Time Interval"
- yLabel = "Entropy"
- if queryOutput:
- return plot_interval_statistics(queryOutput, title, xLabel, yLabel, file_ending)
- def plot_interval_ip_dst_ent(file_ending: str):
- queryOutput = self.stats_db._process_user_defined_query(
- "SELECT lastPktTimestamp, ipDstEntropy FROM interval_statistics ORDER BY lastPktTimestamp")
- title = "Destination IP Entropy"
- xLabel = "Time Interval"
- yLabel = "Entropy"
- if queryOutput:
- return plot_interval_statistics(queryOutput, title, xLabel, yLabel, file_ending)
- def plot_interval_new_ip(file_ending: str):
- queryOutput = self.stats_db._process_user_defined_query(
- "SELECT lastPktTimestamp, newIPCount FROM interval_statistics ORDER BY lastPktTimestamp")
- title = "IP Novelty Distribution"
- xLabel = "Time Interval"
- yLabel = "Novel values count"
- if queryOutput:
- return plot_interval_statistics(queryOutput, title, xLabel, yLabel, file_ending)
- def plot_interval_new_port(file_ending: str):
- queryOutput = self.stats_db._process_user_defined_query(
- "SELECT lastPktTimestamp, newPortCount FROM interval_statistics ORDER BY lastPktTimestamp")
- title = "Port Novelty Distribution"
- xLabel = "Time Interval"
- yLabel = "Novel values count"
- if queryOutput:
- return plot_interval_statistics(queryOutput, title, xLabel, yLabel, file_ending)
- def plot_interval_new_ttl(file_ending: str):
- queryOutput = self.stats_db._process_user_defined_query(
- "SELECT lastPktTimestamp, newTTLCount FROM interval_statistics ORDER BY lastPktTimestamp")
- title = "TTL Novelty Distribution"
- xLabel = "Time Interval"
- yLabel = "Novel values count"
- if queryOutput:
- return plot_interval_statistics(queryOutput, title, xLabel, yLabel, file_ending)
- def plot_interval_new_tos(file_ending: str):
- queryOutput = self.stats_db._process_user_defined_query(
- "SELECT lastPktTimestamp, newToSCount FROM interval_statistics ORDER BY lastPktTimestamp")
- title = "ToS Novelty Distribution"
- xLabel = "Time Interval"
- yLabel = "Novel values count"
- if queryOutput:
- return plot_interval_statistics(queryOutput, title, xLabel, yLabel, file_ending)
- def plot_interval_new_win_size(file_ending: str):
- queryOutput = self.stats_db._process_user_defined_query(
- "SELECT lastPktTimestamp, newWinSizeCount FROM interval_statistics ORDER BY lastPktTimestamp")
- title = "Window Size Novelty Distribution"
- xLabel = "Time Interval"
- yLabel = "Novel values count"
- if queryOutput:
- return plot_interval_statistics(queryOutput, title, xLabel, yLabel, file_ending)
- def plot_interval_new_mss(file_ending: str):
- queryOutput = self.stats_db._process_user_defined_query(
- "SELECT lastPktTimestamp, newMSSCount FROM interval_statistics ORDER BY lastPktTimestamp")
- title = "MSS Novelty Distribution"
- xLabel = "Time Interval"
- yLabel = "Novel values count"
- if queryOutput:
- return plot_interval_statistics(queryOutput, title, xLabel, yLabel, file_ending)
- def plot_interval_ip_dst_cum_ent(file_ending: str):
- plt.gcf().clear()
- result = self.stats_db._process_user_defined_query(
- "SELECT lastPktTimestamp, ipDstCumEntropy FROM interval_statistics ORDER BY lastPktTimestamp")
- graphx, graphy = [], []
- for row in result:
- graphx.append(row[0])
- graphy.append(row[1])
- # If entropy was not calculated do not plot the graph
- if graphy[0] != -1:
- plt.autoscale(enable=True, axis='both')
- plt.title("Destination IP Cumulative Entropy")
- # plt.xlabel('Timestamp')
- plt.xlabel('Time Interval')
- plt.ylabel('Entropy')
- plt.xlim([0, len(graphx)])
- plt.grid(True)
- # timestamp on x-axis
- x = range(0, len(graphx))
- # my_xticks = graphx
- # plt.xticks(x, my_xticks, rotation='vertical', fontsize=5)
- # plt.tight_layout()
- # limit the number of xticks
- plt.locator_params(axis='x', nbins=20)
- plt.plot(x, graphy, 'r')
- out = self.pcap_filepath.replace('.pcap', '_plot-interval-ip-dst-cum-ent' + file_ending)
- plt.savefig(out, dpi=500)
- return out
- def plot_interval_ip_src_cum_ent(file_ending: str):
- plt.gcf().clear()
- result = self.stats_db._process_user_defined_query(
- "SELECT lastPktTimestamp, ipSrcCumEntropy FROM interval_statistics ORDER BY lastPktTimestamp")
- graphx, graphy = [], []
- for row in result:
- graphx.append(row[0])
- graphy.append(row[1])
- # If entropy was not calculated do not plot the graph
- if graphy[0] != -1:
- plt.autoscale(enable=True, axis='both')
- plt.title("Source IP Cumulative Entropy")
- # plt.xlabel('Timestamp')
- plt.xlabel('Time Interval')
- plt.ylabel('Entropy')
- plt.xlim([0, len(graphx)])
- plt.grid(True)
- # timestamp on x-axis
- x = range(0, len(graphx))
- # my_xticks = graphx
- # plt.xticks(x, my_xticks, rotation='vertical', fontsize=5)
- # plt.tight_layout()
- # limit the number of xticks
- plt.locator_params(axis='x', nbins=20)
- plt.plot(x, graphy, 'r')
- out = self.pcap_filepath.replace('.pcap', '_plot-interval-ip-src-cum-ent' + file_ending)
- plt.savefig(out, dpi=500)
- return out
- def plot_packets_per_connection(file_ending: str):
- plt.gcf().clear()
- result = self.stats_db._process_user_defined_query(
- "SELECT ipAddressA, portA, ipAddressB, portB, pktsCount FROM conv_statistics_stateless")
- if (result):
- graphy, graphx = [], []
- # plot data in descending order
- result = sorted(result, key=lambda row: row[4])
- # compute plot data
- for i, row in enumerate(result):
- addr1, addr2 = "%s:%d" % (row[0], row[1]), "%s:%d" % (row[2], row[3])
- # adjust the justification of strings to improve appearance
- len_max = max(len(addr1), len(addr2))
- addr1 = addr1.ljust(len_max)
- addr2 = addr2.ljust(len_max)
- # add plot data
- graphy.append("%s\n%s" % (addr1, addr2))
- graphx.append(row[4])
- # compute plot height in inches
- dist_mult_height, dist_mult_width = 0.55, 0.07 # these values turned out to work well
- plt_height, plt_width = len(graphy) * dist_mult_height, max(graphx) * dist_mult_width
- title_distance = 1 + 0.012*52.8/plt_height # orginally, a good title distance turned out to be 1.012 with a plot height of 52.8
- # have x axis and its label appear at the top (instead of bottom)
- fig, ax = plt.subplots()
- ax.xaxis.tick_top()
- ax.xaxis.set_label_position("top")
- # set additional plot parameters
- plt.title("Sent packets per connection", y=title_distance)
- plt.xlabel('Number of Packets')
- plt.ylabel('Connection')
- width = 0.5
- plt.grid(True)
- plt.gca().margins(y=0) # removes the space between data and x-axis within the plot
- plt.gcf().set_size_inches(plt_width, plt_height) # set plot size
- # plot the above data, first use plain numbers as graphy to maintain sorting
- plt.barh(range(len(graphy)), graphx, width, align='center', linewidth=1, color='red', edgecolor='red')
- # now change the y numbers to the respective address labels
- plt.yticks(range(len(graphy)), graphy)
- # use tight layout to cut off unnecessary space
- plt.tight_layout(pad=4)
- # save created figure
- out = self.pcap_filepath.replace('.pcap', '_plot-connection' + file_ending)
- plt.savefig(out, dpi=500)
- return out
- else:
- print("Error plot protocol: No protocol values found!")
- def plot_out_degree(file_ending: str):
- plt.gcf().clear()
- out_degree = self.get_out_degree()
- #print("")
- #print("#############in plot_out_degree###########")
- #print(out_degree)
- graphx, graphy = [], []
- for entry in out_degree:
- graphx.append(entry[0])
- graphy.append(entry[1])
- plt.autoscale(enable=True, axis='both')
- plt.title("Outdegree")
- plt.xlabel('IpAddress')
- plt.ylabel('Outdegree')
- width = 0.1
- plt.xlim([0, len(graphx)])
- plt.grid(True)
- x = range(0,len(graphx))
- my_xticks = graphx
- plt.xticks(x, my_xticks)
- plt.bar(x, graphy, width, align='center', linewidth=1, color='red', edgecolor='red')
- out = self.pcap_filepath.replace('.pcap', '_out_degree' + file_ending)
- plt.savefig(out,dpi=500)
- return out
- def plot_in_degree(file_ending: str):
- plt.gcf().clear()
- in_degree = self.get_in_degree()
- graphx, graphy = [], []
- for entry in in_degree:
- graphx.append(entry[0])
- graphy.append(entry[1])
- plt.autoscale(enable=True, axis='both')
- plt.title("Indegree")
- plt.xlabel('IpAddress')
- plt.ylabel('Indegree')
- width = 0.1
- plt.xlim([0, len(graphx)])
- plt.grid(True)
- x = range(0,len(graphx))
- my_xticks = graphx
- plt.xticks(x, my_xticks)
- plt.bar(x, graphy, width, align='center', linewidth=1, color='red', edgecolor='red')
- out = self.pcap_filepath.replace('.pcap', '_in_degree' + file_ending)
- plt.savefig(out,dpi=500)
- return out
- ttl_out_path = plot_ttl('.' + format)
- mss_out_path = plot_mss('.' + format)
- win_out_path = plot_win('.' + format)
- protocol_out_path = plot_protocol('.' + format)
- plot_interval_pktCount = plot_interval_pktCount('.' + format)
- plot_interval_ip_src_ent = plot_interval_ip_src_ent('.' + format)
- plot_interval_ip_dst_ent = plot_interval_ip_dst_ent('.' + format)
- plot_interval_ip_src_cum_ent = plot_interval_ip_src_cum_ent('.' + format)
- plot_interval_ip_dst_cum_ent = plot_interval_ip_dst_cum_ent('.' + format)
- plot_interval_new_ip = plot_interval_new_ip('.' + format)
- plot_interval_new_port = plot_interval_new_port('.' + format)
- plot_interval_new_ttl = plot_interval_new_ttl('.' + format)
- plot_interval_new_tos = plot_interval_new_tos('.' + format)
- plot_interval_new_win_size = plot_interval_new_win_size('.' + format)
- plot_interval_new_mss = plot_interval_new_mss('.' + format)
- plot_packets_per_connection_out = plot_packets_per_connection('.' + format)
- plot_out_degree = plot_out_degree('.' + format)
- plot_in_degree = plot_in_degree('.' + format)
- ## Time consuming plot
- # port_out_path = plot_port('.' + format)
- ## Not drawable for too many IPs
- # ip_src_out_path = plot_ip_src('.' + format)
- # ip_dst_out_path = plot_ip_dst('.' + format)
- print("Saved plots in the input PCAP directory.")
- print("In-/Out-/Overall-degree plots not fully finished yet")
|