diff options
Diffstat (limited to 'extras/failed-tests.py')
| -rwxr-xr-x | extras/failed-tests.py | 228 |
1 files changed, 172 insertions, 56 deletions
diff --git a/extras/failed-tests.py b/extras/failed-tests.py index 4b21d132c1f..f7f110246b5 100755 --- a/extras/failed-tests.py +++ b/extras/failed-tests.py @@ -1,64 +1,180 @@ -#!/usr/bin/python +#!/usr/bin/python3 +from __future__ import print_function import blessings -import HTMLParser import requests -import sys +from requests.packages.urllib3.exceptions import InsecureRequestWarning +import re +import argparse +from collections import defaultdict +from datetime import timedelta, datetime +from pystache import render -BASE='https://build.gluster.org' -TERM=blessings.Terminal() +# This tool goes though the Gluster regression links and checks for failures -class FailureFinder (HTMLParser.HTMLParser): - def __init__ (*args): - apply(HTMLParser.HTMLParser.__init__,args) - self = args[0] - self.last_href = None - def handle_starttag (self, tag, attrs): - if tag == 'a': - return self.is_a_tag (attrs) - if tag == 'img': - return self.is_img_tag (attrs) - def is_a_tag (self, attrs): - attrs_dict = dict(attrs) - try: - if attrs_dict['class'] != 'build-status-link': - return - except KeyError: - return - self.last_href = attrs_dict['href'] - def is_img_tag (self, attrs): - if self.last_href == None: - return - attrs_dict = dict(attrs) - try: - if attrs_dict['alt'].find('Failed') == -1: - return - except KeyError: - return - self.process_failure(self.last_href) - self.last_href = None - def process_failure (self, url): - text = requests.get(BASE+url+'Full',verify=False).text - accum = [] - for t in text.split('\n'): - if t == 'Result: FAIL': - print TERM.red + ('FAILURE on %s' % BASE+url) + TERM.normal - for t2 in accum: - print t2.encode('utf-8') - accum = [] - elif t == 'Result: PASS': - accum = [] - else: - accum.append(t) +BASE = 'https://build.gluster.org' +TERM = blessings.Terminal() +MAX_BUILDS = 1000 +summary = defaultdict(list) +VERBOSE = None -def main (url): - parser = FailureFinder() - text = requests.get(url,verify=False).text - parser.feed(text) +def process_failure(url, node): + text = requests.get(url, verify=False).text + accum = [] + for t in text.split('\n'): + if t.find("Result: FAIL") != -1: + for t2 in accum: + if VERBOSE: + print(t2.encode('utf-8')) + if t2.find("Wstat") != -1: + test_case = re.search('\./tests/.*\.t', t2) + if test_case: + summary[test_case.group()].append((url, node)) + accum = [] + elif t.find("cur_cores=/") != -1: + summary["core"].append([t.split("/")[1]]) + summary["core"].append(url) + else: + accum.append(t) -if len(sys.argv) < 2: - main(BASE+'/job/rackspace-regression-2GB-triggered/') -else: - for u in sys.argv[1:]: - main(BASE+u) + +def print_summary(failed_builds, total_builds, html=False): + # All the templates + count = [ + '{{failed}} of {{total}} regressions failed', + '<p><b>{{failed}}</b> of <b>{{total}}</b> regressions failed</p>' + ] + regression_link = [ + '\tRegression Link: {{link}}\n' + '\tNode: {{node}}', + '<p> Regression Link: {{link}}</p>' + '<p> Node: {{node}}</p>' + ] + component = [ + '\tComponent: {{comp}}', + '<p> Component: {{comp}}</p>' + ] + failure_count = [ + ''.join([ + TERM.red, + '{{test}} ; Failed {{count}} times', + TERM.normal + ]), + ( + '<p><font color="red"><b>{{test}};</b> Failed <b>{{count}}' + '</b> times</font></p>' + ) + ] + + template = 0 + if html: + template = 1 + print(render( + count[template], + {'failed': failed_builds, 'total': total_builds} + )) + for k, v in summary.items(): + if k == 'core': + print(''.join([TERM.red, "Found cores:", TERM.normal])) + for comp, link in zip(v[::2], v[1::2]): + print(render(component[template], {'comp': comp})) + print(render( + regression_link[template], + {'link': link[0], 'node': link[1]} + )) + else: + print(render(failure_count[template], {'test': k, 'count': len(v)})) + for link in v: + print(render( + regression_link[template], + {'link': link[0], 'node': link[1]} + )) + + +def get_summary(cut_off_date, reg_link): + ''' + Get links to the failed jobs + ''' + success_count = 0 + failure_count = 0 + for page in range(0, MAX_BUILDS, 100): + build_info = requests.get(''.join([ + BASE, + reg_link, + 'api/json?depth=1&tree=allBuilds' + '[url,result,timestamp,builtOn]', + '{{{0},{1}}}'.format(page, page+100) + ]), verify=False).json() + for build in build_info.get('allBuilds'): + if datetime.fromtimestamp(build['timestamp']/1000) < cut_off_date: + # stop when timestamp older than cut off date + return failure_count, failure_count + success_count + if build['result'] in [None, 'SUCCESS']: + # pass when build is a success or ongoing + success_count += 1 + continue + if VERBOSE: + print(''.join([ + TERM.red, + 'FAILURE on {0}'.format(build['url']), + TERM.normal + ])) + url = ''.join([build['url'], 'consoleText']) + failure_count += 1 + process_failure(url, build['builtOn']) + return failure_count, failure_count + success_count + + +def main(num_days, regression_link, html_report): + cut_off_date = datetime.today() - timedelta(days=num_days) + failure = 0 + total = 0 + for reg in regression_link: + if reg == 'centos': + reg_link = '/job/centos6-regression/' + elif reg == 'netbsd': + reg_link = '/job/netbsd7-regression/' + else: + reg_link = reg + counts = get_summary(cut_off_date, reg_link) + failure += counts[0] + total += counts[1] + print_summary(failure, total, html_report) + + +if __name__ == '__main__': + requests.packages.urllib3.disable_warnings(InsecureRequestWarning) + parser = argparse.ArgumentParser() + parser.add_argument("get-summary") + parser.add_argument( + "last_no_of_days", + default=1, + type=int, + help="Regression summary of last number of days" + ) + parser.add_argument( + "regression_link", + default="centos", + nargs='+', + help="\"centos\" | \"netbsd\" | any other regression link" + ) + parser.add_argument( + "--verbose", + default=False, + action="store_true", + help="Print a detailed report of each test case that is failed" + ) + parser.add_argument( + "--html-report", + default=False, + action="store_true", + help="Print a brief report of failed regressions in html format" + ) + args = parser.parse_args() + VERBOSE = args.verbose + main( + num_days=args.last_no_of_days, + regression_link=args.regression_link, + html_report=args.html_report + ) |
