DebianModel.py 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509
  1. import configparser
  2. import json
  3. import datetime
  4. import logging
  5. from pymongo import MongoClient
  6. import numpy as np
  7. import os
  8. from dateutil import parser
  9. from .DebianAdvisory import DebianAdvisory
  10. from .CVEParse import CVEParse
  11. from ..VendorModel import VendorModel
  12. from .CSVReader import CSVReader
  13. from .Tests import Tests
  14. from fixcwes import ret_roots
  15. class DebianModel(VendorModel):
  16. """
  17. This class represents M-Star debian module. It is responsible for handling debian package infos.
  18. """
  19. module_path = os.path.dirname(__file__)
  20. def __init__(self, action, configfile=os.path.join(module_path, 'config_default.txt')):
  21. ## DBs to track
  22. """
  23. TODO: Tables to manage.
  24. """
  25. self.dsatable = dict()
  26. self.src2dsa = dict()
  27. self.dsa2cve = dict()
  28. self.cvetable = dict()
  29. self.src2month = dict()
  30. self.src2sloccount = dict()
  31. self.src2pop = dict()
  32. self.src2deps = dict()
  33. self.pkg_with_cvss = dict()
  34. self.src2sum = dict()
  35. self.src2monthDLA = dict()
  36. self.pkg_with_cvss_DLA = dict()
  37. self.src2cwe = dict()
  38. ## config
  39. self.configfile = configfile
  40. self.config = configparser.ConfigParser()
  41. if not self.config.read(configfile):
  42. raise IOError('Cannot open configuration file: ')
  43. (self.state, self.err) = self.load_state()
  44. self.client = MongoClient()
  45. if action == 'update':
  46. self.load_dbs()
  47. self.update_dbs()
  48. self.store_dbs()
  49. self.save_state(self.state)
  50. # lstm.predict(src2month, src2sloccount, src2pop, src2deps)
  51. """
  52. with open('dsatable.txt', 'w') as file:
  53. file.write(str(sorted(self.dsatable.keys(), key=lambda x: str(x).lower())))
  54. with open('src2dsa.txt', 'w') as file:
  55. file.write(str(sorted(self.src2dsa.keys(), key=lambda x: str(x).lower())))
  56. with open('dsa2cve.txt', 'w') as file:
  57. file.write(str(sorted(self.dsa2cve.keys(), key=lambda x: str(x).lower())))
  58. with open('cvetable.txt', 'w') as file:
  59. file.write(str(sorted(self.cvetable.keys(), key=lambda x: str(x).lower())))
  60. with open('src2month.txt', 'w') as file:
  61. file.write(str(sorted(self.src2month.keys(), key=lambda x: str(x).lower())))
  62. with open('src2sloccount.txt', 'w') as file:
  63. file.write(str(sorted(self.src2sloccount.keys(), key=lambda x: str(x).lower())))
  64. with open('src2pop.txt', 'w') as file:
  65. file.write(str(sorted(self.src2pop.keys(), key=lambda x: str(x).lower())))
  66. with open('src2deps.txt', 'w') as file:
  67. file.write(str(sorted(self.src2deps.keys(), key=lambda x: str(x).lower())))
  68. """
  69. elif action == 'status':
  70. self.load_dbs()
  71. # aptsec_status(sys.argv[2])
  72. elif action == 'show':
  73. self.load_dbs()
  74. self.store_dbs()
  75. else:
  76. self.print_help(self)
  77. def get_src2month(self):
  78. return self.src2month
  79. def get_vendor_dir(self):
  80. return self.module_path
  81. def load_dbs(self):
  82. """
  83. Loads the required databases into the model. Can either be implemented as read from file, or read from DB.
  84. Currently reading it from files in the cache folder.
  85. """
  86. self.dsatable = self.load_single_db_from_cache('dsatable')
  87. self.src2dsa = self.load_single_db_from_cache('src2dsa')
  88. self.dsa2cve = self.load_single_db_from_cache('dsa2cve')
  89. self.cvetable = self.load_single_db_from_cache('cvetable')
  90. self.src2deps = self.load_single_db_from_cache('src2deps')
  91. self.src2month = self.load_single_db_from_cache('src2month')
  92. self.src2sloccount = self.load_single_db_from_cache('src2sloccount')
  93. self.src2pop = self.load_single_db_from_cache('src2pop')
  94. self.pkg_with_cvss = self.load_single_db_from_cache('pkg_with_cvss')
  95. self.src2monthDLA = self.load_single_db_from_cache('src2monthDLA')
  96. self.pkg_with_cvss_DLA = self.load_single_db_from_cache('plg_with_cvss_DLA')
  97. self.src2cwe = self.load_single_db_from_cache('src2cwe')
  98. def load_single_db_from_cache(self, file_name):
  99. cache_dir = os.path.join(self.module_path, self.config['DIR']['cache_dir'])
  100. try:
  101. with open(os.path.join(cache_dir, file_name)) as f:
  102. return json.load(f)
  103. except (IOError, ValueError):
  104. print('Read cache ' + file_name + ' failed!! Maybe first run of the system?')
  105. return dict()
  106. def store_dbs(self):
  107. self.store_db_single('dsatable', self.dsatable)
  108. self.store_db_single('src2dsa', self.src2dsa)
  109. self.store_db_single('dsa2cve', self.dsa2cve)
  110. self.store_db_single('cvetable', self.cvetable)
  111. self.store_db_single('src2deps', self.src2deps)
  112. self.store_db_single('src2sloccount', self.src2sloccount)
  113. self.store_db_single('src2pop', self.src2pop)
  114. self.store_db_single('pkg_with_cvss', self.pkg_with_cvss)
  115. self.store_db_single('src2monthDLA', self.src2monthDLA)
  116. self.store_db_single('pkg_with_cvss_DLA', self.pkg_with_cvss_DLA)
  117. self.store_db_single('src2cwe', self.src2cwe)
  118. # src2month needs special handling
  119. # Check later if the same is true for other dicts
  120. cache_src2month = os.path.join(self.module_path, self.config['DIR']['cache_dir'], 'src2month')
  121. int_list = dict()
  122. for element in self.src2month:
  123. for i in range(len(self.src2month[element])):
  124. if element in int_list:
  125. int_list[element].append(int(self.src2month[element][i]))
  126. else:
  127. int_list[element] = []
  128. int_list[element].append(int(self.src2month[element][i]))
  129. try:
  130. with open(cache_src2month, 'w') as fp:
  131. json.dump(int_list, fp, default=self.converter)
  132. except IOError:
  133. print('write cache src2month failed!! Fatal error')
  134. def store_db_single(self, file_name, db):
  135. cache_dir = os.path.join(self.module_path, self.config['DIR']['cache_dir'])
  136. try:
  137. with open(os.path.join(cache_dir, file_name), 'w') as f:
  138. json.dump(db, f, default=self.converter)
  139. except (IOError, ValueError):
  140. print('Read cache ' + file_name + ' failed!! Maybe first run of the system?')
  141. def save_state(self, state):
  142. """Save state, different from DBs in that we always need it"""
  143. state_file = os.path.join(self.module_path, self.config['DIR']['cache_dir'], 'state')
  144. try:
  145. with open(state_file, 'w') as sf:
  146. json.dump(state, sf)
  147. except IOError:
  148. print('Write cache state failed!! Fatal error')
  149. def converter(self, o):
  150. """Help for save_DBs"""
  151. if isinstance(o, datetime.datetime) or isinstance(o, datetime.timedelta):
  152. return str(o)
  153. if isinstance(o, np.float):
  154. return o.astype(int)
  155. def update_dbs(self):
  156. now = datetime.datetime.now()
  157. new_adv = DebianAdvisory.checkDSAs(self.state, self.config)
  158. for id in new_adv:
  159. if id in self.dsatable:
  160. logging.info(self.state['vendor'] + ' advisory ' + id + ' already known.\n')
  161. else:
  162. ## store advisory and parse it
  163. self.dsatable[id] = new_adv[id]
  164. self.updateCVETables(id)
  165. # recompute all pkg statistics
  166. for srcpkg in self.src2dsa:
  167. self.processCVEs(srcpkg, now)
  168. def updateCVETables(self, myid):
  169. logging.info('Updating vulnerability database with advisory ' + self.state['vendor'] + str(myid) + ' \n')
  170. adv = self.dsatable[myid]
  171. dsastats = DebianAdvisory.parseDSAhtml(adv)
  172. dsastats = DebianAdvisory.fixDSAquirks(myid, dsastats)
  173. for srcpkg in dsastats[0]:
  174. if srcpkg in self.src2dsa:
  175. self.src2dsa[srcpkg].append(myid)
  176. else:
  177. self.src2dsa[srcpkg] = []
  178. self.src2dsa[srcpkg].append(myid)
  179. self.dsa2cve[str(myid)] = dsastats[2]
  180. for cve_id in dsastats[2]:
  181. # No fetch CVE We use mongodb and cve-search
  182. cve = CVEParse.fetchCVE(cve_id, self.client)
  183. cvestats = CVEParse.parseCVE(cve_id, cve)
  184. finaldate = cvestats[0]
  185. if cvestats[0] > dsastats[1] or cvestats[0] == 0:
  186. finaldate = dsastats[1]
  187. self.cvetable[cve_id] = (finaldate, dsastats[1] - finaldate, cvestats[1], cvestats[2], cvestats[3], cvestats[4])
  188. def load_state(self):
  189. """
  190. Load state, different from DBs in that we always need it.
  191. Retrieves the cached state for current configuration.
  192. :return: state , error number
  193. """
  194. state_file = os.path.join(self.module_path, self.config['DIR']['cache_dir'], 'state')
  195. err = 0
  196. try:
  197. with open(state_file) as json_data:
  198. state = json.load(json_data)
  199. except FileNotFoundError:
  200. # Load default state - start from the beginning
  201. print('File not found in: ' + state_file)
  202. print('Loading default state.')
  203. state = dict()
  204. state['cache_dir'] = 'cache/'
  205. state['vendor'] = 'debian'
  206. state['next_adv'] = 0
  207. state['next_fsa'] = 0
  208. state['Packages'] = ''
  209. state['Sources'] = ''
  210. state['Sha1Sums'] = ''
  211. err += 1
  212. return state, err
  213. def processCVEs(self, srcpkg, now):
  214. stats = [now, 0, 0, 0, 0, 0, 0]
  215. cvestats = dict()
  216. logging.info('Processing package: ' + srcpkg + '.\n')
  217. ## keep track of the number of low-medium-high severity vulnerabilities
  218. ## TODO see how cvss affects vulnerability prediction - if some packages show patterns
  219. with_cvss = dict()
  220. ## Keep track of the vulnerability types
  221. with_cwe = dict()
  222. root_list = ['682', '118', '330', '435', '664', '691', '693', '697', '703', '707', '710' ]
  223. ## To eliminate duplicate cves
  224. haveseen = dict()
  225. ## cvestats = (date: number)
  226. for dsa_id in self.src2dsa[srcpkg]:
  227. for cve_id in self.dsa2cve[str(dsa_id)]:
  228. if cve_id in haveseen:
  229. continue
  230. else:
  231. haveseen[cve_id] = 1
  232. tt = self.cvetable[cve_id][0]
  233. if tt in cvestats:
  234. cvestats[tt] += 1
  235. else:
  236. cvestats[tt] = 1
  237. stats[1] += 1
  238. haveseen = dict()
  239. haveseen2 = dict()
  240. ## with_cvss = (date: number low, number med, number high, number undefined)
  241. for dsa_id in self.src2dsa[srcpkg]:
  242. for cve_id in self.dsa2cve[str(dsa_id)]:
  243. tt = self.cvetable[cve_id][0]
  244. try:
  245. temp_cvss = float(self.cvetable[cve_id][2])
  246. except TypeError:
  247. print(cve_id)
  248. continue
  249. if cve_id in haveseen:
  250. continue
  251. else:
  252. haveseen[cve_id] = 1
  253. if tt in with_cvss:
  254. if (temp_cvss < 0.0):
  255. with_cvss[tt][3] += 1
  256. elif (temp_cvss < 4.0):
  257. with_cvss[tt][0] += 1
  258. elif (temp_cvss < 7.0):
  259. with_cvss[tt][1] += 1
  260. else:
  261. with_cvss[tt][2] += 1
  262. else:
  263. with_cvss[tt] = [0, 0, 0, 0]
  264. if (temp_cvss < 0.0):
  265. with_cvss[tt][3] += 1
  266. elif (temp_cvss < 4.0):
  267. with_cvss[tt][0] += 1
  268. elif (temp_cvss < 7.0):
  269. with_cvss[tt][1] += 1
  270. else:
  271. with_cvss[tt][2] += 1
  272. ## with_cwe = (date: 11*[root type])
  273. for dsa_id in self.src2dsa[srcpkg]:
  274. for cve_id in self.dsa2cve[str(dsa_id)]:
  275. tt = self.cvetable[cve_id][0]
  276. try:
  277. temp_cwe = self.cvetable[cve_id][5]
  278. except TypeError:
  279. print(cve_id)
  280. continue
  281. if cve_id in haveseen2:
  282. continue
  283. else:
  284. haveseen2[cve_id] = 1
  285. if tt in with_cwe:
  286. for i in ret_roots(temp_cwe):
  287. if i == 0:
  288. with_cwe[tt][11] += 1
  289. print('Problem with cve: ', cve_id)
  290. for j in range(len(root_list)):
  291. if i == root_list[j]:
  292. with_cwe[tt][j] += 1
  293. else:
  294. with_cwe[tt] = [0]*12
  295. for i in ret_roots(temp_cwe):
  296. if i == 0:
  297. with_cwe[tt][11] += 1
  298. print('Problem with cve: ', cve_id)
  299. for j in range(len(root_list)):
  300. print('This is the with_cwe')
  301. if i == root_list[j]:
  302. with_cwe[tt][j] += 1
  303. print('This is the with_cwe')
  304. print(with_cwe)
  305. # Ignore pkgs with less than one incident, should not happen..
  306. if stats[1] < 1:
  307. return
  308. dates = sorted(cvestats, key=cvestats.get)
  309. try:
  310. stats[0] = dates[0]
  311. except IndexError:
  312. print(srcpkg + str(dates))
  313. stats[0] = 0
  314. count = sum(cvestats.values())
  315. self.format_data(srcpkg, with_cvss, self.pkg_with_cvss, True)
  316. self.format_data(srcpkg, cvestats, self.src2month, False)
  317. self.format_cwes(srcpkg, with_cwe)
  318. #########################################################################
  319. def format_cwes(self, srcpkg, with_cwe):
  320. x = []
  321. y = []
  322. monthyear = []
  323. year = []
  324. temp_items = list(with_cwe.items())
  325. items = []
  326. for cwe_dict in temp_items:
  327. if isinstance(cwe_dict[0], str):
  328. tmpx = (parser.parse(cwe_dict[0]))
  329. else:
  330. tmpx = cwe_dict[0]
  331. x.append(tmpx)
  332. tmpy = cwe_dict[1]
  333. y.append(tmpy)
  334. items.append((tmpx, tmpy))
  335. items.sort(key=lambda tup: tup[0])
  336. for i in range(2000,2019):
  337. temp = []
  338. for j in range(12):
  339. temp.append([0]*12)
  340. monthyear.append(temp)
  341. for i in range(len(x)):
  342. for j in range(len(y[i])):
  343. monthyear[x[i].year - 2000][x[i].month - 1][j] += y[i][j]
  344. months_list = [item for sublist in monthyear for item in sublist]
  345. self.src2cwe[srcpkg] = months_list
  346. def format_data(self, pkg, cvestats, src2temp, cvss):
  347. x = []
  348. y = []
  349. monthyear = []
  350. year = []
  351. temp_items = list(cvestats.items())
  352. items = []
  353. for data_dict in temp_items:
  354. if isinstance(data_dict[0], str):
  355. tmpx = (parser.parse(data_dict[0]))
  356. else:
  357. tmpx = data_dict[0]
  358. x.append(tmpx)
  359. try:
  360. tmpy = int(data_dict[1])
  361. except TypeError:
  362. tmpy = data_dict[1]
  363. y.append(tmpy)
  364. items.append((tmpx, tmpy))
  365. items.sort(key=lambda tup: tup[0])
  366. for i in range(2000, 2019):
  367. temp = []
  368. for j in range(12):
  369. if cvss:
  370. temp.append([0, 0, 0, 0])
  371. else:
  372. temp.append(0)
  373. monthyear.append(temp)
  374. for i in range(len(x)):
  375. if cvss:
  376. tmp0 = y[i][0]
  377. tmp1 = y[i][1]
  378. tmp2 = y[i][2]
  379. tmp3 = y[i][3]
  380. monthyear[x[i].year - 2000][x[i].month - 1][0] += tmp0
  381. monthyear[x[i].year - 2000][x[i].month - 1][1] += tmp1
  382. monthyear[x[i].year - 2000][x[i].month - 1][2] += tmp2
  383. monthyear[x[i].year - 2000][x[i].month - 1][3] += tmp3
  384. else:
  385. monthyear[x[i].year - 2000][x[i].month - 1] += y[i]
  386. months_list = [item for sublist in monthyear for item in sublist]
  387. if not cvss:
  388. temp_months = np.zeros(len(months_list))
  389. i = 0
  390. for element in months_list:
  391. temp_months[i] = np.float32(element)
  392. i += 1
  393. src2temp[pkg] = temp_months
  394. else:
  395. src2temp[pkg] = months_list
  396. return
  397. def unifySrcName(self, name):
  398. return DebianAdvisory.unifySrcName(name)
  399. def performTests(self):
  400. #Tests.system_input_prediction_error_test(self)
  401. #Tests.random_input_prediction_error_test(self)
  402. Tests.relativity_of_expectations_test(self)
  403. def load_latest_prediction_model(self):
  404. return CSVReader.read_csv_prediction_errorcompl(os.path.join(self.module_path, 'models', 'latest_model.csv'), self, 9)
  405. def gen_model_opinion_set(self, filename, month, norm_param):
  406. """
  407. Generates opinion set from the model input.
  408. :param filename: model (package:prediction:errorcompl:f)
  409. :param month: month parameter of the model
  410. :param norm_param: normalization factor of the model
  411. :return: dictionary of opinions
  412. """
  413. res = CSVReader.read_csv_prediction_errorcompl(filename, self, month, norm_param=norm_param)
  414. # with open('vendors/debian/models/dummy_model_' + str(month) + '.csv', 'w') as file:
  415. # for key in res:
  416. # file.write(key + ":" + str(res[key].t) + ":" + str(res[key].c) + ":" + str(res[key].f) + "\n")
  417. return res
  418. @staticmethod
  419. def print_help():
  420. """
  421. Prints help message to this vendor model.
  422. """
  423. print("Debian mstar model supports only update status and show actions.")