Python Web site Background Scan script
#!/usr/bin/python#coding=utf-8import sysimport Urllibimport timeurl = "http://123.207.123.228/" txt = open (r "C:\Users\ Ww\desktop\houtaiphp.txt "," r ") Open_url = []all_url = []def search_url (url,txt): with open (r" C:\Users\ww\Desktop\ Houtaiphp.txt "," R ") as F:for each in F:each = Each.replace (' \ n ', ') urllist = Url+eachall_url.append (urllist) print (" Find: " +urllist+ ' \ n ') Try:req = Urllib.urlopen (urllist) if req.getcode () = = 200:open_url.append (urllist) if req.getcode () = = 301 : Open_url.append (Urllist) except:passdef main (): Search_url (Url,txt) if Open_url:print ("Back Address:") for each in Open_url: Print ("[+]" +each) else:print ("No site found") if __name__ = = "__main__": Main ()
#!/usr/bin/python#coding=utf-8import sysimport Urllibimport timeurl = "http://123.207.123.228/" txt = open (r "C:\Users\ Ww\desktop\houtaiphp.txt "," r ") Open_url = []all_url = []def search_url (url,txt): with open (r" C:\Users\ww\Desktop\ Houtaiphp.txt "," R ") as F:for each in F:each = Each.replace (' \ n ', ') urllist = Url+eachall_url.append (urllist) Handle_url ( urllist) def handle_url (urllist):p rint ("Find:" +urllist+ ' \ n ') Try:req = Urllib.urlopen (urllist) if req.getcode () = = 200:o Pen_url.append (urllist) if req.getcode () = = 301:open_url.append (urllist) except:passdef main (): Search_url (Url,txt) if Open_url:print ("Background address:") for each in Open_url:print ("[+]" +each) else:print ("No web site found") if __name__ = = "__main__": Main ()
Python Web site Background Scan script