|
@@ -0,0 +1,95 @@
|
|
|
+#!/usr/bin/python3
|
|
|
+
|
|
|
+###############################################################################
|
|
|
+##
|
|
|
+## Functions for downloading and parsing Debian Security Advisories (DSAs)
|
|
|
+##
|
|
|
+###############################################################################
|
|
|
+
|
|
|
+
|
|
|
+import re
|
|
|
+import datetime
|
|
|
+import urllib.request
|
|
|
+import logging, sys
|
|
|
+
|
|
|
+logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
|
|
|
+
|
|
|
+## Fetch DSA from debian archive. Can't use tracker since dates are missing.
|
|
|
+## DSA started counting in November 2000. We'll simply bruteforce which DSA
|
|
|
+## was in which year and start in 2000 til current.
|
|
|
+def fetchDSA(dsa_id, base_url):
|
|
|
+ year = 2000
|
|
|
+ now = datetime.datetime.now()
|
|
|
+ current_year = now.year
|
|
|
+
|
|
|
+ logging.info('Fetching DSA-%d records\n', dsa_id)
|
|
|
+
|
|
|
+ if dsa_id >= 3431:
|
|
|
+ year = 2016
|
|
|
+ elif dsa_id >= 3118:
|
|
|
+ year = 2015
|
|
|
+ elif dsa_id >= 2832:
|
|
|
+ year = 2014
|
|
|
+ elif dsa_id >= 2597:
|
|
|
+ year = 2013
|
|
|
+ elif dsa_id >= 2377:
|
|
|
+ year = 2012
|
|
|
+ elif dsa_id >= 2140:
|
|
|
+ year = 2011
|
|
|
+ elif dsa_id >= 1965:
|
|
|
+ year = 2010
|
|
|
+ elif dsa_id >= 1694:
|
|
|
+ year = 2009
|
|
|
+ elif dsa_id >= 1443:
|
|
|
+ year = 2008
|
|
|
+ elif dsa_id >= 1245:
|
|
|
+ year = 2007
|
|
|
+ elif dsa_id >= 929:
|
|
|
+ year = 2006
|
|
|
+ elif dsa_id >= 622:
|
|
|
+ year = 2005
|
|
|
+ elif dsa_id >= 406:
|
|
|
+ year = 2004
|
|
|
+ elif dsa_id >= 220:
|
|
|
+ year = 2003
|
|
|
+ elif dsa_id >= 96:
|
|
|
+ year = 2002
|
|
|
+ elif dsa_id >= 11:
|
|
|
+ year = 2001
|
|
|
+
|
|
|
+ dsa_id2string = '%03d' % dsa_id
|
|
|
+
|
|
|
+ flag = True
|
|
|
+ while flag:
|
|
|
+ try:
|
|
|
+ flag = False
|
|
|
+ logging.info('Opening url: ' + base_url + str(year) + '/dsa-' + dsa_id2string + '\n')
|
|
|
+ dsa = urllib.request.urlopen(base_url + str(year) + '/dsa-' + dsa_id2string).read()
|
|
|
+ return dsa
|
|
|
+ except urllib.error.HTTPError as err:
|
|
|
+ if year < current_year:
|
|
|
+ year += 1
|
|
|
+ flag = True
|
|
|
+ else:
|
|
|
+ pass
|
|
|
+###############################################################################
|
|
|
+
|
|
|
+## Try to find new DSAs by iteration, return table of DSAs to process
|
|
|
+def checkDSAs(state, config):
|
|
|
+ next_dsa = state['next_adv']
|
|
|
+ #state implemented as dictionary
|
|
|
+
|
|
|
+ logging.info('Checking for new DSAs.. \n')
|
|
|
+
|
|
|
+ if next_dsa < config['first_dsa']:
|
|
|
+ logging.debug('Cache was deleted, starting at DSA ' + str(next_dsa) + '\n')
|
|
|
+ next_dsa = config['first_dsa']
|
|
|
+
|
|
|
+ if blacklistedDSA('DSA-' + str(next_dsa)):
|
|
|
+ next_dsa += 1
|
|
|
+
|
|
|
+ dsa = fetchDSA(next_dsa, config['dsa_base_url']
|
|
|
+ while dsa
|
|
|
+
|
|
|
+
|
|
|
+fetchDSA(3200,'https://www.debian.org/security/')
|