Merge branch 'nico' adding more info to XML output

pull/29/head
lanjelot 9 years ago
commit d1dd6c49b0

@ -692,14 +692,23 @@ class CSVFormatter(logging.Formatter):
class XMLFormatter(logging.Formatter): class XMLFormatter(logging.Formatter):
def __init__(self, indicatorsfmt): def __init__(self, indicatorsfmt):
fmt = '''<result time="%(asctime)s" level="%(levelname)s"> fmt = '''<result time="%(asctime)s" level="%(levelname)s">
''' + '\n'.join(' <{0}>%({0})s</{0}>'.format(name) for name, _ in indicatorsfmt) + ''' ''' + '\n'.join(' <{0}>%({1})s</{0}>'.format(name.replace(':', '_'), name) for name, _ in indicatorsfmt) + '''
<candidate><![CDATA[%(candidate)s]]></candidate> <candidate>%(candidate)s</candidate>
<num>%(num)s</num> <num>%(num)s</num>
<mesg><![CDATA[%(mesg)s]]></mesg> <mesg>%(mesg)s</mesg>
<target %(target)s/>
</result>''' </result>'''
logging.Formatter.__init__(self, fmt, datefmt='%H:%M:%S') logging.Formatter.__init__(self, fmt, datefmt='%H:%M:%S')
def format(self, record):
for k, v in record.__dict__.iteritems():
if isinstance(v, basestring):
record.__dict__[k] = xmlescape(v)
return super(XMLFormatter, self).format(record)
class MsgFilter(logging.Filter): class MsgFilter(logging.Filter):
def filter(self, record): def filter(self, record):
@ -712,7 +721,12 @@ def process_logs(pipe, indicatorsfmt, argv, log_dir):
ignore_ctrlc() ignore_ctrlc()
logging._levelNames[logging.ERROR] = 'FAIL' try:
# python3
logging._levelToName[logging.ERROR] = 'FAIL'
except:
# python2
logging._levelNames[logging.ERROR] = 'FAIL'
handler_out = logging.StreamHandler() handler_out = logging.StreamHandler()
handler_out.setFormatter(TXTFormatter(indicatorsfmt)) handler_out.setFormatter(TXTFormatter(indicatorsfmt))
@ -721,6 +735,8 @@ def process_logs(pipe, indicatorsfmt, argv, log_dir):
logger.setLevel(logging.DEBUG) logger.setLevel(logging.DEBUG)
logger.addHandler(handler_out) logger.addHandler(handler_out)
names = [name for name, _ in indicatorsfmt] + ['candidate', 'num', 'mesg']
if log_dir: if log_dir:
runtime_log = os.path.join(log_dir, 'RUNTIME.log') runtime_log = os.path.join(log_dir, 'RUNTIME.log')
results_csv = os.path.join(log_dir, 'RESULTS.csv') results_csv = os.path.join(log_dir, 'RESULTS.csv')
@ -729,19 +745,44 @@ def process_logs(pipe, indicatorsfmt, argv, log_dir):
with open(runtime_log, 'a') as f: with open(runtime_log, 'a') as f:
f.write('$ %s\n' % ' '.join(argv)) f.write('$ %s\n' % ' '.join(argv))
names = [name for name, _ in indicatorsfmt] + ['candidate', 'num', 'mesg']
if not os.path.exists(results_csv): if not os.path.exists(results_csv):
with open(results_csv, 'w') as f: with open(results_csv, 'w') as f:
f.write('time,level,%s\n' % ','.join(names)) f.write('time,level,%s\n' % ','.join(names))
if not os.path.exists(results_xml): if not os.path.exists(results_xml):
with open(results_xml, 'w') as f: with open(results_xml, 'w') as f:
f.write('<?xml version="1.0" ?>\n<results>\n') f.write('<?xml version="1.0" encoding="UTF-8"?>\n<root>\n')
f.write('<start utc=%s local=%s/>\n' % (xmlquoteattr(strfutctime()), xmlquoteattr(strflocaltime())))
f.write('<cmdline>%s</cmdline>\n' % xmlescape(' '.join(argv)))
f.write('<module>%s</module>\n' % xmlescape(argv[0]))
f.write('<options>\n')
else: # remove "</results>\n" i = 0
del argv[0]
while i < len(argv):
arg = argv[i]
if arg[0] == '-':
if arg in ('-d', '--debug'):
f.write(' <option type="global" name=%s/>\n' % xmlquoteattr(arg))
else:
if not arg.startswith('--') and len(arg) > 2:
name, value = arg[:2], arg[2:]
elif '=' in arg:
name, value = arg.split('=', 1)
else:
name, value = arg, argv[i+1]
i += 1
f.write(' <option type="global" name=%s>%s</option>\n' % (xmlquoteattr(name), xmlescape(value)))
else:
name, value = arg.split('=', 1)
f.write(' <option type="module" name=%s>%s</option>\n' % (xmlquoteattr(name), xmlescape(value)))
i += 1
f.write('</options>\n')
f.write('<results>\n')
else: # remove "</results>...</root>"
with open(results_xml, 'r+') as f: with open(results_xml, 'r+') as f:
f.seek(-11, 2) f.seek(f.read().find('</results>'))
f.truncate(f.tell()) f.truncate(f.tell())
handler_log = logging.FileHandler(runtime_log) handler_log = logging.FileHandler(runtime_log)
@ -766,13 +807,11 @@ def process_logs(pipe, indicatorsfmt, argv, log_dir):
if action == 'quit': if action == 'quit':
if log_dir: if log_dir:
with open(os.path.join(log_dir, 'RESULTS.xml'), 'a') as f: with open(os.path.join(log_dir, 'RESULTS.xml'), 'a') as f:
f.write('</results>\n') f.write('</results>\n<stop utc=%s local=%s/>\n</root>\n' % (xmlquoteattr(strfutctime()), xmlquoteattr(strflocaltime())))
break break
elif action == 'headers': elif action == 'headers':
names = [name for name, _ in indicatorsfmt] + ['candidate', 'num', 'mesg']
logger.info(' '*77) logger.info(' '*77)
logger.info('headers', extra=dict((n, n) for n in names)) logger.info('headers', extra=dict((n, n) for n in names))
logger.info('-'*77) logger.info('-'*77)
@ -782,7 +821,7 @@ def process_logs(pipe, indicatorsfmt, argv, log_dir):
typ, resp, candidate, num = args typ, resp, candidate, num = args
results = [(name, value) for (name, _), value in zip(indicatorsfmt, resp.indicators())] results = [(name, value) for (name, _), value in zip(indicatorsfmt, resp.indicators())]
results += [('candidate', candidate), ('num', num), ('mesg', resp)] results += [('candidate', candidate), ('num', num), ('mesg', str(resp)), ('target', resp.str_target())]
if typ == 'fail': if typ == 'fail':
logger.error(None, extra=dict(results)) logger.error(None, extra=dict(results))
@ -810,7 +849,7 @@ def process_logs(pipe, indicatorsfmt, argv, log_dir):
import re import re
import os import os
import sys import sys
from time import localtime, strftime, sleep, time from time import localtime, gmtime, strftime, sleep, time
from platform import system from platform import system
from functools import reduce from functools import reduce
from select import select from select import select
@ -828,17 +867,20 @@ from collections import defaultdict
import multiprocessing import multiprocessing
import signal import signal
import ctypes import ctypes
from xml.sax.saxutils import escape as xmlescape, quoteattr as xmlquoteattr
try: try:
# python3+ # python3+
from queue import Empty, Full from queue import Empty, Full
from urllib.parse import quote, urlencode, urlparse, urlunparse, parse_qsl, quote_plus from urllib.parse import quote, urlencode, urlparse, urlunparse, parse_qsl, quote_plus
from io import StringIO from io import StringIO
from sys import maxsize as maxint
except ImportError: except ImportError:
# python2.6+ # python2.6+
from Queue import Empty, Full from Queue import Empty, Full
from urllib import quote, urlencode, quote_plus from urllib import quote, urlencode, quote_plus
from urlparse import urlparse, urlunparse, parse_qsl from urlparse import urlparse, urlunparse, parse_qsl
from cStringIO import StringIO from cStringIO import StringIO
from sys import maxint
notfound = [] notfound = []
try: try:
@ -878,6 +920,12 @@ from multiprocessing.managers import SyncManager
# imports }}} # imports }}}
# utils {{{ # utils {{{
def strfutctime():
return strftime("%Y-%m-%d %H:%M:%S", gmtime())
def strflocaltime():
return strftime("%Y-%m-%d %H:%M:%S %Z", localtime())
def which(program): def which(program):
def is_exe(fpath): def is_exe(fpath):
return os.path.exists(fpath) and os.access(fpath, os.X_OK) return os.path.exists(fpath) and os.access(fpath, os.X_OK)
@ -1089,7 +1137,7 @@ class RangeIter:
if random: if random:
self.generator = random_generator, () self.generator = random_generator, ()
self.size = sys.maxint self.size = maxint
def __iter__(self): def __iter__(self):
fn, args = self.generator fn, args = self.generator
@ -1509,7 +1557,7 @@ Please read the README inside for more examples and usage information.
except KeyboardInterrupt: except KeyboardInterrupt:
pass pass
if self.ns.total_size >= sys.maxint: if self.ns.total_size >= maxint:
total_size = -1 total_size = -1
else: else:
total_size = self.ns.total_size total_size = self.ns.total_size
@ -1618,7 +1666,7 @@ Please read the README inside for more examples and usage information.
if m: if m:
prog, size = m.groups() prog, size = m.groups()
else: else:
prog, size = v, sys.maxint prog, size = v, maxint
logger.debug('prog: %s, size: %s' % (prog, size)) logger.debug('prog: %s, size: %s' % (prog, size))
@ -1935,7 +1983,7 @@ Please read the README inside for more examples and usage information.
total_count = sum(p.done_count+p.skip_count for p in thread_progress) total_count = sum(p.done_count+p.skip_count for p in thread_progress)
speed_avg = num_threads / (sum(sum(p.seconds) / len(p.seconds) for p in thread_progress) / num_threads) speed_avg = num_threads / (sum(sum(p.seconds) / len(p.seconds) for p in thread_progress) / num_threads)
if total_size >= sys.maxint: if total_size >= maxint:
etc_time = 'inf' etc_time = 'inf'
remain_time = 'inf' remain_time = 'inf'
else: else:
@ -2039,6 +2087,8 @@ class Response_Base:
def dump(self): def dump(self):
return self.trace or str(self) return self.trace or str(self)
def str_target(self):
return ''
class Timing: class Timing:
def __enter__(self): def __enter__(self):
@ -3301,9 +3351,10 @@ class Response_HTTP(Response_Base):
indicatorsfmt = [('code', -4), ('size:clen', -13), ('time', 6)] indicatorsfmt = [('code', -4), ('size:clen', -13), ('time', 6)]
def __init__(self, code, response, timing=0, trace=None, content_length=-1): def __init__(self, code, response, timing=0, trace=None, content_length=-1, target={}):
Response_Base.__init__(self, code, response, timing, trace=trace) Response_Base.__init__(self, code, response, timing, trace=trace)
self.content_length = content_length self.content_length = content_length
self.target = target
def indicators(self): def indicators(self):
return self.code, '%d:%d' % (self.size, self.content_length), '%.3f' % self.time return self.code, '%d:%d' % (self.size, self.content_length), '%.3f' % self.time
@ -3311,7 +3362,7 @@ class Response_HTTP(Response_Base):
def __str__(self): def __str__(self):
lines = re.findall('^(HTTP/.+)$', self.mesg, re.M) lines = re.findall('^(HTTP/.+)$', self.mesg, re.M)
if lines: if lines:
return lines[-1] return lines[-1].rstrip('\r')
else: else:
return self.mesg return self.mesg
@ -3324,6 +3375,9 @@ class Response_HTTP(Response_Base):
def match_egrep(self, val): def match_egrep(self, val):
return re.search(val, self.mesg, re.M) return re.search(val, self.mesg, re.M)
def str_target(self):
return ' '.join('%s=%s' % (k, xmlquoteattr(str(v))) for k, v in self.target.iteritems())
available_conditions = Response_Base.available_conditions available_conditions = Response_Base.available_conditions
available_conditions += ( available_conditions += (
('clen', 'match Content-Length header (N or N-M or N- or -N)'), ('clen', 'match Content-Length header (N or N-M or N- or -N)'),
@ -3487,6 +3541,18 @@ class HTTP_fuzz(TCP_Cache):
url = urlunparse((scheme, host, path, params, query, fragment)) url = urlunparse((scheme, host, path, params, query, fragment))
perform_fp(fp, method, url, header, body) perform_fp(fp, method, url, header, body)
target = {}
target['ip'] = fp.getinfo(pycurl.PRIMARY_IP)
target['port'] = fp.getinfo(pycurl.PRIMARY_PORT)
target['hostname'] = host
for h in header.split('\n'):
if ': ' in h:
k, v = h.split(': ', 1)
if k.lower() == 'host':
target['vhost'] = v.rstrip('\r')
break
if after_urls: if after_urls:
for after_url in after_urls.split(','): for after_url in after_urls.split(','):
perform_fp(fp, 'GET', after_url) perform_fp(fp, 'GET', after_url)
@ -3498,7 +3564,7 @@ class HTTP_fuzz(TCP_Cache):
if persistent == '0': if persistent == '0':
self.reset() self.reset()
return self.Response(http_code, response.getvalue(), response_time, trace.getvalue(), content_length) return self.Response(http_code, response.getvalue(), response_time, trace.getvalue(), content_length, target)
# }}} # }}}

Loading…
Cancel
Save