0x01:
Hello,My name is XiaoTouM1ng,is a hack.
My blog is : www.f4ckweb.top
0x02:
Since it is a passive scanner,it must have an agent,there are to ideas:
- on burp
- on Python
of course, i chose Python and Python-mitmproxy
first of all is main,it is used load ini:
class ProxyStart():
def __init__(self,option,obj):
self.opts = options.Options()
for i in option:
self.__addOption(i)
self.cf = config.ProxyConfig(self.opts)
self.server = ProxyServer(config=self.cf)
self.master = Master(opts = self.opts)
self.master.server = self.server
self.master.addons.add(obj)
def __addOption(self,*args):
self.opts.add_option(args[0][0],args[0][1],args[0][2],args[0][3])
def run(self):
self.master.run()
then there are other function,because direct calls don’t look very beautiful
def server_run():
conf = [('listen_host',str,'127.0.0.1','this is host'),
('listen_proxy',int,8080,'this is proxy'),
('mode',str,'regular','this is mode'),
("body_size_limit",int,100000,"this is response size")]
start = ProxyStart(conf,filterRq())
start.run()
and its filters:
class getHttp():
def __init__(self,f):
self.flow = f
self.header = dict()
if(str(self.flow.response.headers['Content-Type']).split(';')[0] not in config.ContentType):
self.header['method'] = self.__getMethod()
self.header['url'] = self.__getUrl()
self.header['Referer'] = self.__getReferer()
self.header['cookie'] = self.__getCookie()
self.header['Accept'] = self.__getAccept()
self.header['data'] = self.__getData()
self.header['Content-Type'] = self.__getContentType()
res = SqlMapApi(config.sqlmapapi_url,self.header['url'],self.header['cookie'],self.header['Referer'],self.header['data'])
taskid = res.getTaskId()
if str(self.header['method']).upper() == 'GET':
res.startScan_G(taskid)
else:
res.startScan_P(taskid)
print(self.header)
def __getMethod(self):
return self.flow.request.method
def __getUrl(self):
return self.flow.request.url
def __getReferer(self):
if('Referer' in self.flow.request.headers):
return self.flow.request.headers['Referer']
else:
return ''
def __getCookie(self):
if('Cookie' in self.flow.request.headers['Cookie']):
return self.flow.request.headers['Cookie']
else:
return ''
def __getData(self):
if(str(self.flow.request.method).upper() != 'GET'):
return bytes(self.flow.request.content).decode('utf-8')
else:
return ''
def __getAccept(self):
return self.flow.request.headers['Accept']
def __getContentType(self):
return self.flow.response.headers['Content-Type']
called in the filters class:
class filterRq():
def request(self,f):
getHttp(f)
def response(self,f):
getHttp(f)
scan
send SQMAP:
class SqlMapApi:
def __init__(self,sqlurl,url,cookie,referer,data=''):
self.sqlurl = str(sqlurl)
self.url = str(url)
self.cookie = str(cookie)
self.referer = str(referer)
self.data = str(data)
def getTaskId(self):
taskid = requests.get(url ='http://'+self.sqlurl + '/task/new')
taskid = json.loads(taskid.text)
return str(taskid['taskid'])
def startScan_P(self,taskid):
start = requests.post(url = "http://"+self.sqlurl + '/scan/' + taskid + '/start',data=json.dumps({"url":self.url,"data":self.data,"referfer":self.referer,"cookie":self.cookie}),headers={"Content-Type":"application/json"})
if json.loads(start.text)["success"] == True:
return True
else:
return False
def startScan_G(self,taskid):
start = requests.post(url = "http://"+self.sqlurl + '/scan/' + taskid + '/start',data=json.dumps({"url":self.url,"referfer":self.referer,"cookie":self.cookie}),timeout=5,headers={"Content-Type":"application/json"})
if json.loads(start.text)["success"] == True:
return True
else:
return False
def getStatus(self,taskid):
r = requests.get(url='http://'+self.sqlurl + taskid + '/status')
if json.loads(r.text)['success'] == True:
if json.loads(r.text)['status'] != 'running':
return True
else:
return 'running'
else:
return False
def getData(self,taskid):
data = json.loads(requests.get(url='http://'+self.sqlurl + taskid + '/data').text)['data']
if data is None:
return False
else:
return data