|
@ -34,29 +34,40 @@ class bc(object): |
|
|
""" |
|
|
""" |
|
|
Init defaults |
|
|
Init defaults |
|
|
""" |
|
|
""" |
|
|
self.operating_system = '' #The operating system being used |
|
|
# Global variables organised by the function in which they first occur. |
|
|
|
|
|
|
|
|
|
|
|
# check_browser(): |
|
|
|
|
|
self.operating_system = '' #The operating system being used. Either darwin/linux |
|
|
self.browser = "" # "F" Firefox / "C" Chrome |
|
|
self.browser = "" # "F" Firefox / "C" Chrome |
|
|
self.browser_path = "" #the path to the browser application |
|
|
self.browser_path = "" #the path to the browser application |
|
|
self.browser_history_path = "" # the path to the browser history file |
|
|
self.browser_history_path = "" # the path to the browser history file |
|
|
self.browser_version = "" # the version of the browser |
|
|
self.browser_version = "" # the version of the browser |
|
|
self.url = "" |
|
|
|
|
|
self.old_url = "" |
|
|
# lft(): |
|
|
self.destination_ip = "" #the final destination of a trace |
|
|
self.content = '' # the un-parsed results of a traceroute |
|
|
self.hop_ip = "" #the ip of servers on the route |
|
|
self.attempts = 0 # the number of attempts at a traceroute |
|
|
self.longitude = "" |
|
|
self.method = '-e' # the tracing method, -e to use TCP packets, -u for UDP packets |
|
|
self.latitude = "" |
|
|
|
|
|
self.hop_host_name = "" #hostname of servers on the route |
|
|
# traces(): |
|
|
self.city = "" |
|
|
self.url = "" # the last visited url from the history file, type is tuple |
|
|
self.country = "" |
|
|
self.old_url = "" # the before last url from the history file |
|
|
self.server_name = "" |
|
|
self.destination_ip = "" #the ip adress of self.url |
|
|
self.hop_count = 1 # number of hops |
|
|
self.hop_ip = "" #the ip of the servers/router on a hop |
|
|
|
|
|
self.timestamp = "" #the time it took to go to a hop in miliseconds. |
|
|
|
|
|
|
|
|
|
|
|
# these variables are all the result of Maxmind DB lookups |
|
|
|
|
|
self.longitude = "" # the lat/long that corresponds the an ip as per Maxmind DB |
|
|
|
|
|
self.latitude = "" # idem |
|
|
|
|
|
self.asn = '' #ASN number of a server |
|
|
|
|
|
self.hop_host_name = "" #hostname of server/router on a hop |
|
|
|
|
|
self.city = "" # |
|
|
|
|
|
self.country = "" # |
|
|
|
|
|
self.server_name = "" # same as self.hop_host_name. perhaps good to clean this. |
|
|
|
|
|
self.hop_count = 1 # number of the current hop in a trace |
|
|
|
|
|
|
|
|
self.result_list = [] #list to collect all the variables of a trace |
|
|
self.result_list = [] #list to collect all the variables of a trace |
|
|
self.vardict ={} #dict to store all the variables of a hop |
|
|
self.vardict ={} #dict to store all the variables of a hop |
|
|
self.asn = '' #ASN number of a server |
|
|
|
|
|
self.method = '-e' # the tracing method, -e = TCP, -u = UDP |
|
|
|
|
|
self.content = '' # the results of a traceroute |
|
|
|
|
|
self.attempts = 0 # the number of attempts at a traceroute |
|
|
|
|
|
self.timestamp = '' #the time it took to go to a hop |
|
|
|
|
|
|
|
|
|
|
|
if os.path.exists('data.xml'): # removing xml data to has a new map each time that bc is launched |
|
|
if os.path.exists('data.xml'): # removing xml data to has a new map each time that bc is launched |
|
|
os.remove('data.xml') |
|
|
os.remove('data.xml') |
|
@ -119,7 +130,7 @@ class bc(object): |
|
|
if os.path.exists(f_his_osx): |
|
|
if os.path.exists(f_his_osx): |
|
|
if len(os.listdir(f_his_osx)) > 2: |
|
|
if len(os.listdir(f_his_osx)) > 2: |
|
|
print 'You have multiple profiles, choosing the last one used' |
|
|
print 'You have multiple profiles, choosing the last one used' |
|
|
#filtering the directory that was last modified |
|
|
#filter to use the directory that was last modified. |
|
|
all_subdirs = [os.path.join(f_his_osx,d)for d in os.listdir(f_his_osx)] |
|
|
all_subdirs = [os.path.join(f_his_osx,d)for d in os.listdir(f_his_osx)] |
|
|
try: |
|
|
try: |
|
|
all_subdirs.remove(os.path.join(f_his_osx,'.DS_Store')) #throwing out .DS_store |
|
|
all_subdirs.remove(os.path.join(f_his_osx,'.DS_Store')) #throwing out .DS_store |
|
@ -201,7 +212,8 @@ class bc(object): |
|
|
""" |
|
|
""" |
|
|
Set urls to visit |
|
|
Set urls to visit |
|
|
""" |
|
|
""" |
|
|
if self.browser == "F": #Firefox history database |
|
|
if self.browser == "F": |
|
|
|
|
|
#sqlite operation to get the last visited url from history db. |
|
|
conn = sqlite3.connect(self.browser_history_path) |
|
|
conn = sqlite3.connect(self.browser_history_path) |
|
|
c = conn.cursor() |
|
|
c = conn.cursor() |
|
|
c.execute('select url, last_visit_date from moz_places ORDER BY last_visit_date DESC') |
|
|
c.execute('select url, last_visit_date from moz_places ORDER BY last_visit_date DESC') |
|
@ -210,7 +222,7 @@ class bc(object): |
|
|
elif self.browser == "C" or self.browser == "CHROMIUM": #Chrome/Chromium history database |
|
|
elif self.browser == "C" or self.browser == "CHROMIUM": #Chrome/Chromium history database |
|
|
#Hack that makes a copy of the locked database to access it while Chrome is running. |
|
|
#Hack that makes a copy of the locked database to access it while Chrome is running. |
|
|
#Removes the copied database afterwards |
|
|
#Removes the copied database afterwards |
|
|
import filecmp |
|
|
import filecmp # is this a standard module? |
|
|
a = self.browser_history_path + 'Copy' |
|
|
a = self.browser_history_path + 'Copy' |
|
|
if os.path.exists(a): |
|
|
if os.path.exists(a): |
|
|
if filecmp.cmp(self.browser_history_path, a) == False: |
|
|
if filecmp.cmp(self.browser_history_path, a) == False: |
|
@ -229,13 +241,13 @@ class bc(object): |
|
|
try: |
|
|
try: |
|
|
from biplist import readPlist |
|
|
from biplist import readPlist |
|
|
except: |
|
|
except: |
|
|
print "\nError importing: biplist lib. \n\nTo run BC with Safari you need the biplist Python Library:\n\n $ pip install biplist\n" |
|
|
print "\nError importing: biplist lib. \n\nTo run BC with Safari you need the biplist Python library:\n\n $ pip install biplist\n" |
|
|
|
|
|
|
|
|
plist = readPlist(self.browser_history_path) |
|
|
plist = readPlist(self.browser_history_path) |
|
|
url = [plist['WebHistoryDates'][0][''], ''] |
|
|
url = [plist['WebHistoryDates'][0][''], ''] |
|
|
|
|
|
|
|
|
else: # Browser not allowed |
|
|
else: # Browser not allowed |
|
|
print "\nSorry, don't have a compatible browser\n\n" |
|
|
print "\nSorry, you don't have a compatible browser\n\n" |
|
|
exit(2) |
|
|
exit(2) |
|
|
|
|
|
|
|
|
self.url = url |
|
|
self.url = url |
|
@ -312,9 +324,9 @@ class bc(object): |
|
|
|
|
|
|
|
|
def traces(self): |
|
|
def traces(self): |
|
|
''' |
|
|
''' |
|
|
Use LFT to traceroute objetives and pass data to webserver |
|
|
call LFT to traceroute target and pass data to webserver |
|
|
''' |
|
|
''' |
|
|
# Set database (GeoLiteCity) |
|
|
# Set the maxmind geo databases |
|
|
self.geoip = pygeoip.GeoIP('GeoLiteCity.dat') |
|
|
self.geoip = pygeoip.GeoIP('GeoLiteCity.dat') |
|
|
self.geoasn = pygeoip.GeoIP('GeoIPASNum.dat') |
|
|
self.geoasn = pygeoip.GeoIP('GeoIPASNum.dat') |
|
|
|
|
|
|
|
@ -324,6 +336,7 @@ class bc(object): |
|
|
url = urlparse(self.getURL()).netloc #changed this for prototyping |
|
|
url = urlparse(self.getURL()).netloc #changed this for prototyping |
|
|
#url = url.replace('www.','') #--> doing a tracert to example.com and www.example.com yields different results. |
|
|
#url = url.replace('www.','') #--> doing a tracert to example.com and www.example.com yields different results. |
|
|
url_ip = socket.gethostbyname(url) |
|
|
url_ip = socket.gethostbyname(url) |
|
|
|
|
|
self.url = url |
|
|
self.destination_ip = url_ip |
|
|
self.destination_ip = url_ip |
|
|
print "Host:", url, "\n" |
|
|
print "Host:", url, "\n" |
|
|
if url != self.old_url: |
|
|
if url != self.old_url: |
|
@ -380,10 +393,10 @@ class bc(object): |
|
|
self.city = '-' |
|
|
self.city = '-' |
|
|
self.server_name = self.hop_host_name |
|
|
self.server_name = self.hop_host_name |
|
|
#self.hop_count+=1 |
|
|
#self.hop_count+=1 |
|
|
self.vardict = {'destination_ip': self.destination_ip, 'hop_count': self.hop_count,'hop_ip': self.hop_ip, 'server_name': self.server_name, 'country': self.country, 'city': self.city, 'longitude': self.longitude, 'latitude': self.latitude, 'asn' : self.asn, 'timestamp' : self.timestamp } |
|
|
self.vardict = {'url': self.url, 'destination_ip': self.destination_ip, 'hop_count': self.hop_count,'hop_ip': self.hop_ip, 'server_name': self.server_name, 'country': self.country, 'city': self.city, 'longitude': self.longitude, 'latitude': self.latitude, 'asn' : self.asn, 'timestamp' : self.timestamp } |
|
|
except: |
|
|
except: |
|
|
print "Trace:", self.hop_count, "->", "Not allowed" |
|
|
print "Trace:", self.hop_count, "->", "Not allowed" |
|
|
self.vardict = {'destination_ip': self.destination_ip, 'hop_count': self.hop_count,'hop_ip': self.hop_ip, 'server_name': self.server_name, 'country': '-', 'city': '-', 'longitude': '-', 'latitude': '-', 'asn' : self.asn, 'timestamp' : self.timestamp } |
|
|
self.vardict = {'url': self.url, 'destination_ip': self.destination_ip, 'hop_count': self.hop_count,'hop_ip': self.hop_ip, 'server_name': self.server_name, 'country': '-', 'city': '-', 'longitude': '-', 'latitude': '-', 'asn' : self.asn, 'timestamp' : self.timestamp } |
|
|
|
|
|
|
|
|
self.hop_count+=1 |
|
|
self.hop_count+=1 |
|
|
# write xml data to file |
|
|
# write xml data to file |
|
@ -407,6 +420,7 @@ class bc(object): |
|
|
""" |
|
|
""" |
|
|
maxmind = 'http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz' |
|
|
maxmind = 'http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz' |
|
|
geo_db_mirror1 = 'http://xsser.sf.net/map/GeoLiteCity.dat.gz' |
|
|
geo_db_mirror1 = 'http://xsser.sf.net/map/GeoLiteCity.dat.gz' |
|
|
|
|
|
|
|
|
print "="*45 + "\n", "GeoIP Options:\n" + '='*45 + "\n" |
|
|
print "="*45 + "\n", "GeoIP Options:\n" + '='*45 + "\n" |
|
|
# Download, extract and set geoipdatabase |
|
|
# Download, extract and set geoipdatabase |
|
|
if not os.path.exists('GeoLiteCity.dat'): |
|
|
if not os.path.exists('GeoLiteCity.dat'): |
|
@ -480,7 +494,11 @@ class bc(object): |
|
|
geo = self.try_running(self.getGEO, "\nInternal error setting geoIP database.") |
|
|
geo = self.try_running(self.getGEO, "\nInternal error setting geoIP database.") |
|
|
# run traceroutes |
|
|
# run traceroutes |
|
|
match_ip = self.url[0].strip('http://').strip(':8080') |
|
|
match_ip = self.url[0].strip('http://').strip(':8080') |
|
|
if re.match(r'^127\.\d{1,3}\.\d{1,3}\.\d{1,3}$', match_ip) or re.match(r'^10\.\d{1,3}\.\d{1,3}\.\d{1,3}$', match_ip) or re.match(r'^192.168\.\d{1,3}$', match_ip) or re.match(r'^172.(1[6-9]|2[0-9]|3[0-1]).[0-9]{1,3}.[0-9]{1,3}$', match_ip): |
|
|
#regex for filtering local network IPs |
|
|
|
|
|
if re.match(r'^127\.\d{1,3}\.\d{1,3}\.\d{1,3}$', match_ip) or re.match(r'^10\.\d{1,3}\.\d{1,3}\.\d{1,3}$', match_ip) or re.match(r'^192.168\.\d{1,3}$', match_ip) or re.match(r'^172.(1[6-9]|2[0-9]|3[0-1]).[0-9]{1,3}.[0-9]{1,3}$', match_ip) or match_ip.startswith('file://'): |
|
|
|
|
|
pass |
|
|
|
|
|
else: |
|
|
|
|
|
if self.url[0].startswith('file://'): |
|
|
pass |
|
|
pass |
|
|
else: |
|
|
else: |
|
|
traces = self.try_running(self.traces, "\nInternal error tracerouting.") |
|
|
traces = self.try_running(self.traces, "\nInternal error tracerouting.") |
|
@ -508,12 +526,15 @@ class bc(object): |
|
|
if url != self.old_url: |
|
|
if url != self.old_url: |
|
|
if re.match(r'^127\.\d{1,3}\.\d{1,3}\.\d{1,3}$', match_ip) or re.match(r'^10\.\d{1,3}\.\d{1,3}\.\d{1,3}$', match_ip) or re.match(r'^192.168\.\d{1,3}$', match_ip) or re.match(r'^172.(1[6-9]|2[0-9]|3[0-1]).[0-9]{1,3}.[0-9]{1,3}$', match_ip): |
|
|
if re.match(r'^127\.\d{1,3}\.\d{1,3}\.\d{1,3}$', match_ip) or re.match(r'^10\.\d{1,3}\.\d{1,3}\.\d{1,3}$', match_ip) or re.match(r'^192.168\.\d{1,3}$', match_ip) or re.match(r'^172.(1[6-9]|2[0-9]|3[0-1]).[0-9]{1,3}.[0-9]{1,3}$', match_ip): |
|
|
pass |
|
|
pass |
|
|
|
|
|
else: |
|
|
|
|
|
if self.url[0].startswith('file://'): |
|
|
|
|
|
pass |
|
|
else: |
|
|
else: |
|
|
if os.path.exists('data.xml'): # removing xml data to has a new map each time that bc is launched |
|
|
if os.path.exists('data.xml'): # removing xml data to has a new map each time that bc is launched |
|
|
os.remove('data.xml') |
|
|
os.remove('data.xml') |
|
|
open('data.xml', 'w') # starting a new xml data container in write mode |
|
|
open('data.xml', 'w') # starting a new xml data container in write mode |
|
|
|
|
|
|
|
|
traces = self.try_running(self.traces, "\nInternal error tracerouting.") |
|
|
traces = self.try_running(self.traces, "\nInternal error tracerouting.") |
|
|
|
|
|
time.sleep(2) |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
if __name__ == "__main__": |
|
|
app = bc() |
|
|
app = bc() |
|
|