From de2fa5a14d8b73b980e4d5a0f463ac1513f56d0e Mon Sep 17 00:00:00 2001 From: erneyja Date: Wed, 16 Sep 2015 08:23:15 -0400 Subject: [PATCH 1/7] integrating Tidy errors --- .travis.yml | 2 + errorlogparser.py | 4 + githubapiprovider.py | 28 +++++++ newpr.py | 159 ++++++------------------------------ newpr.pyc | Bin 0 -> 12928 bytes payloadhandler.py | 168 ++++++++++++++++++++++++++++++++++++++ test.py | 180 ++++++++++++++++++++++++++++++++++++++++- travisciapiprovider.py | 30 +++++++ 8 files changed, 437 insertions(+), 134 deletions(-) create mode 100644 errorlogparser.py create mode 100644 githubapiprovider.py create mode 100644 newpr.pyc create mode 100644 payloadhandler.py create mode 100644 travisciapiprovider.py diff --git a/.travis.yml b/.travis.yml index d1ffe46..4ba146b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,7 @@ language: python python: - "2.7" +install: + - pip install -r requirements.txt script: python test.py sudo: false diff --git a/errorlogparser.py b/errorlogparser.py new file mode 100644 index 0000000..aa77f2c --- /dev/null +++ b/errorlogparser.py @@ -0,0 +1,4 @@ +class ErrorLogParser(): + def parse_log(self, log, error_re): + raise NotImplementedError + \ No newline at end of file diff --git a/githubapiprovider.py b/githubapiprovider.py new file mode 100644 index 0000000..bfc6e34 --- /dev/null +++ b/githubapiprovider.py @@ -0,0 +1,28 @@ +import base64 +import json +import urllib2 + +class GithubApiProvider(): + host_url = 'https://api.github.com' + login_url = host_url + '/user' + review_comment_url = host_url + '/repos/{repo}/pulls/{pr_num}/comments' + + def __init__(self, username, token): + self.username = username + self.token = token + + def login(self, username=None, token=None): + username = None if not username else self.username + token = None if not token else self.token + base64string = base64.standard_b64encode('{}:{}'.format(username, token)) + req = urllib2.Request(self.login_url) + req.add_header('Authorization', 'Basic {}'.format(base64string)) + + return json.loads(urllib2.urlopen(req).read()) + + def post_review_comment_on_pr(self, repo, pr_num, commit_id, message, file_path, line): + data = json.dumps({"body":message,"commit_id":commit_id,"path":file_path,"position":line}) + + req = urllib2.Request(self.review_comment_url.format(repo=repo, pr_num=pr_num), data) + + return json.loads(urllib2.urlopen(req).read()) \ No newline at end of file diff --git a/newpr.py b/newpr.py index 929cb6e..c212e1a 100755 --- a/newpr.py +++ b/newpr.py @@ -1,9 +1,15 @@ #!/usr/bin/env python +from errorlogparser import ErrorLogParser +from githubapiprovider import GithubApiProvider +from payloadhandler import PayloadHandler +from StringIO import StringIO +from travisciapiprovider import TravisCiApiProvider import base64 -import urllib, urllib2 import cgi import cgitb +import ConfigParser +import gzip try: import simplejson as json except: @@ -11,9 +17,7 @@ import random import re import sys -import ConfigParser -from StringIO import StringIO -import gzip +import urllib, urllib2 class APIProvider: def __init__(self, payload, user): @@ -48,12 +52,14 @@ def set_assignee(self, assignee): class GithubAPIProvider(APIProvider): contributors_url = "https://api.github.com/repos/%s/%s/contributors?per_page=400" post_comment_url = "https://api.github.com/repos/%s/%s/issues/%s/comments" + post_review_comment_url = "https://api.github.com/repos/%s/pulls/%s/comments" collaborators_url = "https://api.github.com/repos/%s/%s/collaborators" issue_url = "https://api.github.com/repos/%s/%s/issues/%s" get_label_url = "https://api.github.com/repos/%s/%s/issues/%s/labels" add_label_url = "https://api.github.com/repos/%s/%s/issues/%s/labels" remove_label_url = "https://api.github.com/repos/%s/%s/issues/%s/labels/%s" + def __init__(self, payload, user, token): APIProvider.__init__(self, payload, user) self.token = token @@ -174,136 +180,16 @@ def set_assignee(self, assignee): else: raise e + # find a way to get + def post_review_comment_on_pr(self, repo, pr_num, commit_id, message, file_path, line): + try: + result = self.api_req("POST", self.post_review_comment_url % (self.repo, self.pr_num)) + data = json.dumps({"body":message,"commit_id":commit_id,"path":file_path,"position":line}) + + req = urllib2.Request(self.review_comment_url.format(repo=repo, pr_num=pr_num), data) + + return json.loads(urllib2.urlopen(req).read()) -# If the user specified a reviewer, return the username, otherwise returns None. -def find_reviewer(commit_msg): - match = reviewer_re.search(commit_msg) - if not match: - return None - return match.group(1) - - -welcome_msg = "Thanks for the pull request, and welcome! The Servo team is excited to review your changes, and you should hear from @%s (or someone else) soon." -warning_summary = 'warning **Warning** warning\n\n%s' -unsafe_warning_msg = 'These commits modify **unsafe code**. Please review it carefully!' -reftest_required_msg = 'These commits modify layout code, but no reftests are modified. Please consider adding a reftest!' - -reviewer_re = re.compile("\\b[rR]\?[:\- ]*@([a-zA-Z0-9\-]+)") - -def extract_globals_from_payload(payload): - if payload["action"] == "created": - owner = payload['repository']['owner']['login'] - repo = payload['repository']['name'] - issue = str(payload['issue']['number']) - else: - owner = payload['pull_request']['base']['repo']['owner']['login'] - repo = payload['pull_request']['base']['repo']['name'] - issue = str(payload["number"]) - return (owner, repo, issue) - - -def manage_pr_state(api, payload): - labels = api.get_labels(); - - if payload["action"] in ["synchronize", "opened"]: - for label in ["S-awaiting-merge", "S-tests-failed", "S-needs-code-changes"]: - if label in labels: - api.remove_label(label) - if not "S-awaiting-review" in labels: - api.add_label("S-awaiting-review") - - # If mergeable is null, the data wasn't available yet. It would be nice to try to fetch that - # information again. - if payload["action"] == "synchronize" and payload['pull_request']['mergeable']: - if "S-needs-rebase" in labels: - api.remove_label("S-needs-rebase") - - -def new_comment(api, payload): - # We only care about comments in open PRs - if payload['issue']['state'] != 'open' or 'pull_request' not in payload['issue']: - return - - commenter = payload['comment']['user']['login'] - # Ignore our own comments. - if commenter == api.user: - return - - msg = payload["comment"]["body"] - reviewer = find_reviewer(msg) - if reviewer: - api.set_assignee(reviewer) - - if commenter == 'bors-servo': - labels = api.get_labels(); - - if 'has been approved by' in msg or 'Testing commit' in msg: - for label in ["S-awaiting-review", "S-needs-rebase", "S-tests-failed", - "S-needs-code-changes", "S-needs-squash", "S-awaiting-answer"]: - if label in labels: - api.remove_label(label) - if not "S-awaiting-merge" in labels: - api.add_label("S-awaiting-merge") - - elif 'Test failed' in msg: - api.remove_label("S-awaiting-merge") - api.add_label("S-tests-failed") - - elif 'Please resolve the merge conflicts' in msg: - api.remove_label("S-awaiting-merge") - api.add_label("S-needs-rebase") - - -def new_pr(api, payload): - manage_pr_state(api, payload) - - author = payload["pull_request"]['user']['login'] - if api.is_new_contributor(author): - #collaborators = json.load(urllib2.urlopen(collaborators_url)) - collaborators = ['jdm', 'larsbergstrom', 'metajack', 'mbrubeck', 'Ms2ger', 'Manishearth', 'glennw', 'pcwalton', 'SimonSapin'] if api.repo == 'servo' and api.owner == 'servo' else ['test_user_selection_ignore_this'] - random.seed() - to_notify = random.choice(collaborators) - api.post_comment(welcome_msg % to_notify) - - warn_unsafe = False - layout_changed = False - saw_reftest = False - diff = api.get_diff() - for line in diff.split('\n'): - if line.startswith('+') and not line.startswith('+++') and line.find('unsafe') > -1: - warn_unsafe = True - if line.startswith('diff --git') and line.find('components/layout/') > -1: - layout_changed = True - if line.startswith('diff --git') and line.find('tests/ref') > -1: - saw_reftest = True - if line.startswith('diff --git') and line.find('tests/wpt') > -1: - saw_reftest = True - - warnings = [] - if warn_unsafe: - warnings += [unsafe_warning_msg] - - if layout_changed: - if not saw_reftest: - warnings += [reftest_required_msg] - - if warnings: - api.post_comment(warning_summary % '\n'.join(map(lambda x: '* ' + x, warnings))) - - -def update_pr(api, payload): - manage_pr_state(api, payload) - - -def handle_payload(api, payload): - if payload["action"] == "opened": - new_pr(api, payload) - elif payload["action"] == "synchronize": - update_pr(api, payload) - elif payload["action"] == "created": - new_comment(api, payload) - else: - pass if __name__ == "__main__": print "Content-Type: text/html;charset=utf-8" @@ -320,4 +206,11 @@ def handle_payload(api, payload): payload_raw = post.getfirst("payload",'') payload = json.loads(payload_raw) + if "action" in payload: + payload_handler = GithubPayloadHandler(payload) + elif "state" in payload: + payload_hanlder = TravisPayloadHandler(payload) + # get TravisCiApiProvider + # get ErrorParser + handle_payload(GithubAPIProvider(payload, user, token), payload) diff --git a/newpr.pyc b/newpr.pyc new file mode 100644 index 0000000000000000000000000000000000000000..def429d2da9ddcf27afccee21517a15c330b7780 GIT binary patch literal 12928 zcmcgy%X3s$dOx>YuND##l6VLhd=UuRfDrLuGPc160|rk9ncQZq85z=C^*!Bvo9=sC z_uNLv1z7}F;zg>G%*<-Cn7pd8NnDj=IkU-Ts*+R|-cC}s7w(QB9@RdU;2!Io93>`?*cpHTs(omF~FHRe<>p`PNndsQ$g{r9P0w`BIKU`jFvR4^@>c@^xD%s~~* zNam2zhgC4E9^_Q=?@lnMZrr@7hg9Pm+?u1uRpSVAYp*E%ifXtj*vrKn7LKX_OYK)r z0pA5_wlxQ&=9toX)i^GL7nNRAjT0)s`Uj=;q|zg*QB?Y*?EO`xv606PQc9CBs=fa~x(kIXx8J{=#1F$jCp{`*&v)^e+ekEy zAv8gtf*>3L0FaTB%n%X7rr``XKt1rn8sJ>;LWEnK6qHvgatoC_k`a9Te-)p(gd|n! zvBQ(y&8sK)8&Xe()#JQ+tW?=ikB20aQ;$)J8q_0?47Pwn$@x}_r0|rd){oM}ucYN# zGhX+brd&xMJd1hFIA?eEItBTy)R%Yv-`c6ge~gXSLk)1J0w-rbZWO}PP}M;q&ToxKXZMZI~&V(7KGL6 zt9|==dDefHWjLd6OjwKbVELSAHUMIG@-y4pEs9{`%H=ZUOSzn~rd)2tL8pnLM>_U6 zgSVT>G?P6{C_Ow%FK;iBZLCY~tIJrA1jRKBmmF~lqlNtTZ^|v)2-A9J{dpRaqW@iv z{Q(l))sR*_(o%wM7sY6`B2^-+Cg?*LOHhVZE5h`Fb%H{I^#Yp&R+y7nEn+Xhrg<=;l8ZEMU_Q-* zI_7D&YHlSJL=_jy&p9+e&Tj zk=Lw!a)=H124$7V!f#+c!!?vM3tMEusZhPqlH>+?5OW%wh4UK_vQp86N*&dJrIx2< z2eXb%Csl~1aTN+g0FVo?^io*9u<@iTY&?MR$%y(rB?f+8{VpaQQDt-kEdN}=9BY7; z#3pPM2>ix~s^-)_EYg4{0dsVOmhh8N4r_qBzUB+dU>C!LZ$)9k|`;|kKF0fxjR=&2k`0!)WE7)i+s&}zr(ql|H%uG>gs|^mI>8oMy0g{R6_n+N!b=Q8LjBWi zt;9x)bM^qJlxpd;9tTqS)upD;whQQ*JNUn6S zcoKdrXA-)3*EeCsT{N<{E0v0Fr*ii0F|S9JIDmg5hIj4buq}pnHEim5y&;3@+K0E| zarz=*AU#*&$Xf1%n{Y?aX|;_E2KzL__17ftWxUl@{1!SMc||5)_l>@AJ{84~`cdE~ zL3#bcdCgsE;VYR20<`H@v?w!LVRld5)#5e~M$Cj!H3m6}jp5;+-D`pact=?xBi+Cm z-^1~WWFoO}yu)ncFzj6B3H;QTwOTp|{c;LYkhScxM*2c(hzPMKFyiEa^}s*gvF?29 zo$5Aq$T9;U35p%)7mBTSkTpk;DCdwf4}WFG8F5ZK`*S0Z7E_QGtl4Yreky(r6)O5~ zWFQ{l=RjtZARL~E=!Cd<^sj7ye1((%cU`;pPjn2o4wGS73vDZ$dl$SuOo_1?9Z^GR zPCXb`Nu5y%bc8s_*;a-nAY2+4W*K}~J$fioK<1JaG^NJYJcR{U80EU0eoQ@aF#Sw# z`k!+8+cG6sT0|QKfXEC88;XZd#a#vK;|$#n7dJjt_*>mLp;jU8N=1TV#Sr1{$pyjL z8A;!Qd<0Dppu%Do-|n_w%21x^h2!WUL~kHKQlXR&{bomtt7jWP4{ceB9d$-KQP|6Sw|yI>fS^AM9W63 zcoIP+BtgP85KX3>_?yyKO5QaNWw*W!Siyrj8|?W?0TOW-nw)AMCv@g>1qhZ|=WuS) znRdojdslmKIO2*e9RGy~BqauRAlt6>3PL zzQ^THP}o|2IkKaGr%K=#@qPpQ5K5FSa+?IVXt6P5zzuW;OA^zxB4RrL%ihAij|NI}jA#BCK7&tL16pLc zV4Pgi3x&N9-NGfJl3IRSREPJQ7%XnA)ev6~%g`?PF}|MkSVx=hS(mL=+3El@xR*^_#Z%5!sjuWumXL6UxZ!zJ4z2i*8MiQfl9{I02 zQh5Mt(ik%fNND%={pJcIQ*(v+!gOIg`^^h4kfiwuFPuOIKDiB2J%yDj#*M+fA_NkI z7rh}dUo&YWZinDdjJQNU4D}`=gNYVpVYHuA>H-;|n_3cg+9Joq^WfzPF0NXpCao<$ zt<(p}N#dte!zjRh?8g2M=gYLh6rBUdP*2>c`|xVqYMi)fUAqjexmjrKj0@M^-PBF& zRUdWlpt;1WI5*XP%MFdIA5}tJmAPq*AzZ}WiaUu5uPCZ%V~3;0HTAgD3|#o(iOU;C z_wu4~i&)WMVuU1I4b%Bml;Y^DA+25vTQ%1tl}if@bs{pmxw(0^fm=d=3A@}hXA#F( z-t?0QF_5$7VQsI$eE$F~dL&f)znJErMP0TzU90oMVZN2y2=W34jWUn3MKk)8^IMj%LG zFp1uWh?#F$f9@6rcx5J(<-(#-r?rk4**wcUHG@KOS8bmG5D176Suss(WZN+u_+_?7 zG4%IbkS4E!e>UUnMo8!&-1q5iFRh1{zeOhi8H)Rd`l8Y}o*B zEe}U~A&V*lgzRxfMgyXGI^@pA8HK-9;Ee6?v^S(3rhvqCoQ(|zXS<77Ywt5D13kEj znNL1rz&D#5Eh0y42-w;9tQW%3sJ%){xTp)v5}9NvlRSn-aNh)0I9WVXUCD(0db2yJogvZ0 zzr8)DmEq9kd$bc^^AQ`Mj!6x!Kx2%y>OfqlA)9+FLo6jnVPIj4u-`9 zq*_X&PkC!cCm^4ppDr0O=7al!CmHP^Pue2kfRGB-{dYJQhz?YXFb+rtn;B8&uX$5o z^T{sf(M8@1;1&R}f85yd{%?<}jaV|4co73aAmg9QQdm~Pl+MP_75-N7{wNC=-IL|8 z(2OPXd(Osv2Tzi+mCj2u_{27IYv{Y%_C?3JZpZu2?T8Bx^3HvENCb*2($-qB@`XZ1 za3zw3gl*}Mh@rAT?Au;IiCm&vA&V5W2Oc0n*RF2~C)|M! zpn%Wx&T|PqW0`*H{!(0IInE9@8y(-&pJgmRGO&1_&#&~ExJx%<)&-P%TF}JJhgu8{ z;YA!Hs^U^7HS(Ou0_Fh*$gQ@$2UZjP&hhjPUBo|C~>WH36* zv%#$M73qwJmCtmJx}EoNY}PrF&ZA%G{JAWInehOz*ZFhBZH%go&-u4n!xfess2*H(GCeJKH>XdZmg%j1EUzv_@bL=YU0TBP3qx%ShHk?% z;5$HT1uq+Nhp2x%%&7L2Hrr{1^Yf$XE8!DA2Wk&M-k-T^5>YtiS_#0u-hCv46bK$8 zN5?QV4F_d?>rLeB!yOu!VnE9A6S!8u4l51p7pbe%*Y&*SJ>CYO;2 zBA{K8)NJCticCXa&%4b=!q0-YoH<0|X3|>9ya#za5hgm21`5V*N7HGw{A5dJY{X&Y zt)tOix^7EcQjRZo)Yr6lI1|(Q*efkkR0YEcJpNy|>>u*6G9Pl-;PUTtC?SUU6GxaWBQG7WU2NetZ;BI2 z5haf*>?0}0%sjEP{(!k3G5G`gzn*cp>75x`0a3bL -1: + warn_unsafe = True + if line.startswith('diff --git') and line.find('components/layout/') > -1: + layout_changed = True + if line.startswith('diff --git') and line.find('tests/ref') > -1: + saw_reftest = True + if line.startswith('diff --git') and line.find('tests/wpt') > -1: + saw_reftest = True + + warnings = [] + if warn_unsafe: + warnings += [self.unsafe_warning_msg] + + if layout_changed: + if not saw_reftest: + warnings += [self.reftest_required_msg] + + if warnings: + github.post_comment(self.warning_summary % '\n'.join(map(lambda x: '* ' + x, warnings))) + + + def update_pr(self, github): + self.manage_pr_state(github) + + + # If the user specified a reviewer, return the username, otherwise returns None. + def _find_reviewer(self, commit_msg): + reviewer_re = re.compile("\\b[rR]\?[:\- ]*@([a-zA-Z0-9\-]+)") + match = reviewer_re.search(commit_msg) + if not match: + return None + return match.group(1) + diff --git a/test.py b/test.py index 8917538..018d9cc 100644 --- a/test.py +++ b/test.py @@ -1,8 +1,15 @@ +from errorlogparser import ErrorLogParser +from githubapiprovider import GithubApiProvider +from mock import patch from newpr import APIProvider, handle_payload +from payloadhandler import PayloadHandler +from travisciapiprovider import TravisCiApiProvider import json import os import sys import traceback +import unittest +import urlparse class TestAPIProvider(APIProvider): def __init__(self, payload, user, new_contributor, labels, assignee, diff=""): @@ -121,4 +128,175 @@ def run_tests(tests): add_test('test_post_retry.json', {'labels': ['S-awaiting-merge']}, {'labels': ['S-awaiting-merge']}) -run_tests(tests) +### Mock Setup +def mock_urllib2_urlopen(url): + parsed_url = urlparse.urlparse(url.get_full_url()) + local_file = os.path.normpath('test-files/{}'.format(parsed_url.path[1:].replace('/', '.'))) + + return open(local_file, 'rb') + +def setup_mock_urllib2_urlopen(self, module): + self.patcher = patch('{}.urllib2.urlopen'.format(module), mock_urllib2_urlopen) + self.patcher.start() + + +def mock_urllib_urlopen(url): + parsed_url = urlparse.urlparse(url) + local_file = os.path.normpath('test-files/{}'.format(parsed_url.path[1:].replace('/', '.'))) + + return open(local_file, 'rb') + + +def setup_mock_urllib_urlopen(self, module): + self.patcher = patch('{}.urllib.urlopen'.format(module), mock_urllib_urlopen) + self.patcher.start() + + +### Tests + +### Todo - Remove these tests and use existing GithubAPI Provider Code +class TestGithubApiProvider(unittest.TestCase): + def setUp(self): + setup_mock_urllib2_urlopen(self, 'githubapiprovider') + self.github = GithubApiProvider("lowfive-servo", "some_fake_token") + + self.addCleanup(self.patcher.stop) + + + def test_login(self): + self.github.login() + self.github.login('lowfive-servo', 'some_fake_token') + + + def test_post_review_comment_on_pr(self): + repo = "servo/servo" + pr_num = 1 + commit_id = "6dcb09b5b57875f334f61aebed695e2e4193db5e" + message = "Great stuff" + file_path = "file1.txt" + line = 1 + + self.github.post_review_comment_on_pr(repo, pr_num, commit_id, message, file_path, line) + + +class TestTravisCiApiProvider(unittest.TestCase): + def setUp(self): + setup_mock_urllib_urlopen(self, 'travisciapiprovider') + self.travis = TravisCiApiProvider() + + self.build_data = self.travis.get_build(1) + + self.addCleanup(self.patcher.stop) + + + # These tests are weak + def test_get_build(self): + self.assertIn('matrix', self.travis.get_build(1)) + + + def test_get_log(self): + self.travis.get_log(self.build_data) + + + def test_get_pull_request_number(self): + build_data = self.travis.get_build(1) + + self.assertEqual(self.travis.get_pull_request_number(build_data), 7601) + + +class TestErrorLogParser(unittest.TestCase): + def setUp(self): + self.log_parser = ErrorLogParser() + + + def test_parse_log(self): + with self.assertRaises(NotImplementedError): + self.log_parser.parse_log("log", "regex") + + +class TestPayloadError(unittest.TestCase): + def setUp(self): + payload = \ + { + "target_url": "https://travis-ci.org/servo/servo/builds/74856035", + "name": "servo/servo", + "commit": { + "sha": "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6" + } + } + + self.payload_handler = PayloadHandler(payload) + + + def test_get_build_id(self): + self.assertEqual(self.payload_handler.get_build_id(), 74856035) + + + def test_get_commit_id(self): + self.assertEqual(self.payload_handler.get_commit_id(), "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6") + + + def test_get_repo_name(self): + self.assertEqual(self.payload_handler.get_repo_name(), 'servo/servo') + + + def test_handle_payload(self): + with self.assertRaises(NotImplementedError): + self.payload_handler.handle_payload() + +class TestServoErrorLogParser(unittest.TestCase): + def setUp(self): + self.error_parser = run.ServoErrorLogParser() + self.multi_log = open('test-files/multi-line-comment.log').read() + self.expected_multi_errors = \ + [ + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull\n\tfound: dom::bindings::conversions::get_dom_class", + "line": "7", + "file": "./components/script/dom/eventtarget.rs" + }, + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::codegen::Bindings::EventListenerBinding::EventListener\n\tfound: dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull", + "line": "8", + "file": "./components/script/dom/eventtarget.rs" + }, + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::codegen::Bindings::EventTargetBinding::EventTargetMethods\n\tfound: dom::bindings::codegen::Bindings::EventListenerBinding::EventListener", + "line": "9", + "file": "./components/script/dom/eventtarget.rs" + }, + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::conversions::get_dom_class\n\tfound: dom::bindings::codegen::Bindings::EventTargetBinding::EventTargetMethods", + "line": "10", + "file": "./components/script/dom/eventtarget.rs" + }, + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::browsercontext\n\tfound: dom::eventtarget::EventTargetTypeId", + "line": "17", + "file": "./components/script/dom/bindings/utils.rs" + }, + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::eventtarget::EventTargetTypeId\n\tfound: dom::browsercontext", + "line": "18", + "file": "./components/script/dom/bindings/utils.rs" + } + ] + + self.single_log = open('test-files/single-line-comment.log').read() + self.expected_single_errors = \ + [ + { + 'comment': 'missing space before {', + 'line': '49', + 'file': './components/plugins/lints/sorter.rs' + } + ] + + def test_parse_errors(self): + self.assertEqual(self.expected_multi_errors, list(self.error_parser.parse_log(self.multi_log))) + self.assertEqual(self.expected_single_errors, list(self.error_parser.parse_log(self.single_log))) + +if __name__ == "__main__": + run_tests(tests) + unittest.main() + diff --git a/travisciapiprovider.py b/travisciapiprovider.py new file mode 100644 index 0000000..ef4b7ff --- /dev/null +++ b/travisciapiprovider.py @@ -0,0 +1,30 @@ +import urllib +import json + +# If more functionality is needed from this class, it might be a +# better decision to use travispy +class TravisCiApiProvider(): + host_url = 'https://api.travis-ci.org' + build_url = host_url + '/builds/{build_id}' + log_url = host_url + '/jobs/{job_id}/log' + + def get_build(self, build_id): + return json.loads(urllib.urlopen(self.build_url.format(build_id=build_id)).read()) + + + def _get_job_id(self, build_data, job_index=0): + try: + job_id = build_data['matrix'][job_index]['id'] + except IndexError: + print "job_index out of bounds" + job_id = -1 + + return job_id + + + def get_log(self, build_data): + return urllib.urlopen(self.log_url.format(job_id=self._get_job_id(build_data))).read() + + + def get_pull_request_number(self, build_data): + return int(build_data['compare_url'].split('/')[-1]) \ No newline at end of file From 008b3e6cc4a8780fb6bc4933d267a8cc86726efc Mon Sep 17 00:00:00 2001 From: erneyja Date: Wed, 16 Sep 2015 23:23:38 -0400 Subject: [PATCH 2/7] refactoring completed. need to make sure tests pass --- newpr.py | 199 ++-------------------------------------------- payloadhandler.py | 105 +++++++++++++----------- test.py | 12 +++ 3 files changed, 74 insertions(+), 242 deletions(-) diff --git a/newpr.py b/newpr.py index c212e1a..574eb44 100755 --- a/newpr.py +++ b/newpr.py @@ -1,195 +1,7 @@ #!/usr/bin/env python - -from errorlogparser import ErrorLogParser -from githubapiprovider import GithubApiProvider -from payloadhandler import PayloadHandler -from StringIO import StringIO -from travisciapiprovider import TravisCiApiProvider -import base64 -import cgi -import cgitb +from payloadhandler import TravisPayloadHandler, GithubPayloadHandler +import cgi, cgitb import ConfigParser -import gzip -try: - import simplejson as json -except: - import json -import random -import re -import sys -import urllib, urllib2 - -class APIProvider: - def __init__(self, payload, user): - (owner, repo, issue) = extract_globals_from_payload(payload) - self.owner = owner - self.repo = repo - self.issue = issue - self.user = user - - def is_new_contributor(self, username): - raise NotImplementedError - - def post_comment(self, body): - raise NotImplementedError - - def add_label(self, label): - raise NotImplementedError - - def remove_label(self, label): - raise NotImplementedError - - def get_labels(self): - raise NotImplementedError - - def get_diff(self): - return NotImplementedError - - def set_assignee(self, assignee): - raise NotImplementedError - - -class GithubAPIProvider(APIProvider): - contributors_url = "https://api.github.com/repos/%s/%s/contributors?per_page=400" - post_comment_url = "https://api.github.com/repos/%s/%s/issues/%s/comments" - post_review_comment_url = "https://api.github.com/repos/%s/pulls/%s/comments" - collaborators_url = "https://api.github.com/repos/%s/%s/collaborators" - issue_url = "https://api.github.com/repos/%s/%s/issues/%s" - get_label_url = "https://api.github.com/repos/%s/%s/issues/%s/labels" - add_label_url = "https://api.github.com/repos/%s/%s/issues/%s/labels" - remove_label_url = "https://api.github.com/repos/%s/%s/issues/%s/labels/%s" - - - def __init__(self, payload, user, token): - APIProvider.__init__(self, payload, user) - self.token = token - if "pull_request" in payload: - self.diff_url = payload["pull_request"]["diff_url"] - - def api_req(self, method, url, data=None, media_type=None): - data = None if not data else json.dumps(data) - headers = {} if not data else {'Content-Type': 'application/json'} - req = urllib2.Request(url, data, headers) - req.get_method = lambda: method - if token: - base64string = base64.standard_b64encode('%s:%s' % (self.user, self.token)).replace('\n', '') - req.add_header("Authorization", "Basic %s" % base64string) - - if media_type: - req.add_header("Accept", media_type) - f = urllib2.urlopen(req) - header = f.info() - if header.get('Content-Encoding') == 'gzip': - buf = StringIO(f.read()) - f = gzip.GzipFile(fileobj=buf) - return { "header": header, "body": f.read() } - - # This function is adapted from https://github.com/kennethreitz/requests/blob/209a871b638f85e2c61966f82e547377ed4260d9/requests/utils.py#L562 - # Licensed under Apache 2.0: http://www.apache.org/licenses/LICENSE-2.0 - def parse_header_links(self, value): - if not value: - return None - - links = {} - replace_chars = " '\"" - for val in value.split(","): - try: - url, params = val.split(";", 1) - except ValueError: - url, params = val, '' - - url = url.strip("<> '\"") - - for param in params.split(";"): - try: - key, value = param.split("=") - except ValueError: - break - key = key.strip(replace_chars) - if key == 'rel': - links[value.strip(replace_chars)] = url - - return links - - def is_new_contributor(self, username): - url = self.contributors_url % (self.owner, self.repo) - # iterate through the pages to try and find the contributor - while True: - stats_raw = self.api_req("GET", url) - stats = json.loads(stats_raw['body']) - links = self.parse_header_links(stats_raw['header'].get('Link')) - - for contributor in stats: - if contributor['login'] == username: - return False - - if not links or 'next' not in links: - return True - url = links['next'] - - def post_comment(self, body): - try: - result = self.api_req("POST", self.post_comment_url % (self.owner, self.repo, self.issue), - {"body": body}) - except urllib2.HTTPError, e: - if e.code == 201: - pass - else: - raise e - - def add_label(self, label): - try: - result = self.api_req("POST", self.add_label_url % (self.owner, self.repo, self.issue), - [label]) - except urllib2.HTTPError, e: - if e.code == 201: - pass - else: - raise e - - def remove_label(self, label): - try: - result = self.api_req("DELETE", self.remove_label_url % (self.owner, self.repo, self.issue, label), {}) - except urllib2.HTTPError, e: - #if e.code == 201: - # pass - #else: - # raise e - pass - - def get_labels(self): - try: - result = self.api_req("GET", self.get_label_url % (self.owner, self.repo, self.issue)) - except urllib2.HTTPError, e: - if e.code == 201: - pass - else: - raise e - return map(lambda x: x["name"], json.loads(result['body'])) - - def get_diff(self): - return self.api_req("GET", self.diff_url)['body'] - - def set_assignee(self, assignee): - try: - result = self.api_req("PATCH", self.issue_url % (self.owner, self.repo, self.issue), - {"assignee": assignee})['body'] - except urllib2.HTTPError, e: - if e.code == 201: - pass - else: - raise e - - # find a way to get - def post_review_comment_on_pr(self, repo, pr_num, commit_id, message, file_path, line): - try: - result = self.api_req("POST", self.post_review_comment_url % (self.repo, self.pr_num)) - data = json.dumps({"body":message,"commit_id":commit_id,"path":file_path,"position":line}) - - req = urllib2.Request(self.review_comment_url.format(repo=repo, pr_num=pr_num), data) - - return json.loads(urllib2.urlopen(req).read()) - if __name__ == "__main__": print "Content-Type: text/html;charset=utf-8" @@ -209,8 +21,7 @@ def post_review_comment_on_pr(self, repo, pr_num, commit_id, message, file_path, if "action" in payload: payload_handler = GithubPayloadHandler(payload) elif "state" in payload: - payload_hanlder = TravisPayloadHandler(payload) - # get TravisCiApiProvider - # get ErrorParser + payload_handler = TravisPayloadHandler(payload) - handle_payload(GithubAPIProvider(payload, user, token), payload) + owner, repo = payload_handler.extract_globals_from_payload() + payload_handler.handle_payload(user, token, owner, repo) \ No newline at end of file diff --git a/payloadhandler.py b/payloadhandler.py index 6b40011..ccb1320 100644 --- a/payloadhandler.py +++ b/payloadhandler.py @@ -1,39 +1,44 @@ +from errorlogparser import ErrorLogParser +from githubapiprovider import GithubApiProvider +from travisciapiprovider import TravisCiApiProvider + + class PayloadHandler(): def __init__(self, payload): self.payload = payload - def handle_payload(self): - raise NotImplementedError - def extract_globals_from_payload(self): + def handle_payload(self, user, token, owner, repo): raise NotImplementedError -class TravisPayloadHandler(PayloadHandler): - def handle_payload(self, travis, github, error_parser): - build_id, commit_id, repo_name = self.extract_globals_from_payload() + def extract_globals_from_payload(self): + raise NotImplementedError - build_data = travis.get_build(build_id) +class TravisPayloadHandler(PayloadHandler): + msg_template = "Please fix the error below and push your changes when complete:\n\n" + \ + "File: {}\nLine Number: {}\nError: {}" + + def handle_payload(self, user, token, owner, repo): + travis = TravisCiApiProvider() + github = GithubApiProvider(user, token, owner, repo) + error_parser = ErrorLogParser() + build_id = int(self.payload["target_url"].split("/")[-1]) + commit_id = self.payload["commit"]["sha"] + build_id, commit_id, owner, repo = self._extract_globals_from_payload() + build_data = travis.get_build(build_id) log = travis.get_log(travis.get_first_job_id(build_data)) err_data = error_parser.parse_log(log) - pr_num = travis.get_pull_request_number(build_data) - message_temp = "Please fix the error below and push your changes when complete:\n\n" + \ - "File: {}\nLine Number: {}\nError: {}" - - gh.login() for err_datum in err_data: - err_message = message_temp.format(err_datum['file'], err_datum['line'], err_datum['comment']) - gh.post_review_comment_on_pr(repo_name, pr_num, commit_id, err_message, err_datum['file'], err_datum['line']) + err_message = self.msg_template.format(err_datum['file'], err_datum['line'], err_datum['comment']) + gh.post_review_comment(pr_num, commit_id, err_message, err_datum['file'], err_datum['line']) - def extract_globals_from_payload(self): - build_id = int(self.payload["target_url"].split("/")[-1]) - commit_id = self.payload["commit"]["sha"] - repo_name = self.payload["name"] - return (build_id, commit_id, repo_name) + def extract_globals_from_payload(self): + return self.payload["name"].split("/") class GithubPayloadHandler(PayloadHandler): @@ -41,48 +46,51 @@ class GithubPayloadHandler(PayloadHandler): warning_summary = 'warning **Warning** warning\n\n%s' unsafe_warning_msg = 'These commits modify **unsafe code**. Please review it carefully!' reftest_required_msg = 'These commits modify layout code, but no reftests are modified. Please consider adding a reftest!' - - def handle_payload(self, github): + + def handle_payload(self, user, token, owner, repo): + if self.payload["action"] == "created": + issue = str(self.payload['issue']['number']) + else: + issue = str(self.payload["number"]) + if self.payload["action"] == "opened": - self.new_pr(github) + self.new_pr(github, issue) elif self.payload["action"] == "synchronize": - self.update_pr(github) + self.update_pr(github, issue) elif self.payload["action"] == "created": - self.new_comment(github) + self.new_comment(github, issue) else: pass - def extract_globals_from_payload(self): if self.payload["action"] == "created": owner = self.payload['repository']['owner']['login'] repo = self.payload['repository']['name'] - issue = str(self.payload['issue']['number']) else: owner = self.payload['pull_request']['base']['repo']['owner']['login'] repo = self.payload['pull_request']['base']['repo']['name'] - issue = str(self.payload["number"]) - return (owner, repo, issue) + + return (owner, repo) - def manage_pr_state(self, github): - labels = github.get_labels(); + def manage_pr_state(self, github, issue): + labels = github.get_labels(issue); if self.payload["action"] in ["synchronize", "opened"]: for label in ["S-awaiting-merge", "S-tests-failed", "S-needs-code-changes"]: if label in labels: - github.remove_label(label) + github.remove_label(label, issue) if not "S-awaiting-review" in labels: - github.add_label("S-awaiting-review") + github.add_label("S-awaiting-review", issue) # If mergeable is null, the data wasn't available yet. It would be nice to try to fetch that # information again. if self.payload["action"] == "synchronize" and self.payload['pull_request']['mergeable']: if "S-needs-rebase" in labels: - github.remove_label("S-needs-rebase") + github.remove_label("S-needs-rebase", issue) - def new_comment(self, github): + def new_comment(self, github, issue): # We only care about comments in open PRs if self.payload['issue']['state'] != 'open' or 'pull_request' not in self.payload['issue']: return @@ -95,30 +103,30 @@ def new_comment(self, github): msg = self.payload["comment"]["body"] reviewer = self._find_reviewer(msg) if reviewer: - github.set_assignee(reviewer) + github.set_assignee(reviewer, issue) if commenter == 'bors-servo': - labels = github.get_labels(); + labels = github.get_labels(issue); if 'has been approved by' in msg or 'Testing commit' in msg: for label in ["S-awaiting-review", "S-needs-rebase", "S-tests-failed", "S-needs-code-changes", "S-needs-squash", "S-awaiting-answer"]: if label in labels: - github.remove_label(label) + github.remove_label(label, issue) if not "S-awaiting-merge" in labels: github.add_label("S-awaiting-merge") elif 'Test failed' in msg: - github.remove_label("S-awaiting-merge") - github.add_label("S-tests-failed") + github.remove_label("S-awaiting-merge", issue), + github.add_label("S-tests-failed", issue) elif 'Please resolve the merge conflicts' in msg: - github.remove_label("S-awaiting-merge") - github.add_label("S-needs-rebase") + github.remove_label("S-awaiting-merge", issue) + github.add_label("S-needs-rebase", issue) - def new_pr(self, github): - manage_pr_state(github) + def new_pr(self, github, issue): + manage_pr_state(github, issue) author = self.payload["pull_request"]['user']['login'] if github.is_new_contributor(author): @@ -126,12 +134,12 @@ def new_pr(self, github): collaborators = ['jdm', 'larsbergstrom', 'metajack', 'mbrubeck', 'Ms2ger', 'Manishearth', 'glennw', 'pcwalton', 'SimonSapin'] if github.repo == 'servo' and github.owner == 'servo' else ['test_user_selection_ignore_this'] random.seed() to_notify = random.choice(collaborators) - github.post_comment(self.welcome_msg % to_notify) + github.post_comment(self.welcome_msg % to_notify, issue) warn_unsafe = False layout_changed = False saw_reftest = False - diff = github.get_diff() + diff = github.get_diff(self.payload["pull_request"]["diff_url"]) for line in diff.split('\n'): if line.startswith('+') and not line.startswith('+++') and line.find('unsafe') > -1: warn_unsafe = True @@ -151,11 +159,11 @@ def new_pr(self, github): warnings += [self.reftest_required_msg] if warnings: - github.post_comment(self.warning_summary % '\n'.join(map(lambda x: '* ' + x, warnings))) + github.post_comment(self.warning_summary % '\n'.join(map(lambda x: '* ' + x, warnings)), issue) - def update_pr(self, github): - self.manage_pr_state(github) + def update_pr(self, github, issue): + self.manage_pr_state(github, issue) # If the user specified a reviewer, return the username, otherwise returns None. @@ -164,5 +172,6 @@ def _find_reviewer(self, commit_msg): match = reviewer_re.search(commit_msg) if not match: return None + return match.group(1) diff --git a/test.py b/test.py index 018d9cc..a963caf 100644 --- a/test.py +++ b/test.py @@ -20,31 +20,40 @@ def __init__(self, payload, user, new_contributor, labels, assignee, diff=""): self.assignee = assignee self.diff = diff + def is_new_contributor(self, username): return self.new_contributor + def post_comment(self, body): self.comments_posted += [body] + def add_label(self, label): self.labels += [label] + def remove_label(self, label): self.labels.remove(label) + def get_labels(self): return self.labels + def get_diff(self): return self.diff + def set_assignee(self, assignee): self.assignee = assignee + def get_payload(filename): with open(filename) as f: return json.load(f) + tests = [] def add_test(filename, initial, expected): global tests @@ -59,6 +68,7 @@ def add_test(filename, initial, expected): 'initial': initial_values, 'expected': expected_values}] + def run_tests(tests): failed = 0 for test in tests: @@ -135,6 +145,7 @@ def mock_urllib2_urlopen(url): return open(local_file, 'rb') + def setup_mock_urllib2_urlopen(self, module): self.patcher = patch('{}.urllib2.urlopen'.format(module), mock_urllib2_urlopen) self.patcher.start() @@ -296,6 +307,7 @@ def test_parse_errors(self): self.assertEqual(self.expected_multi_errors, list(self.error_parser.parse_log(self.multi_log))) self.assertEqual(self.expected_single_errors, list(self.error_parser.parse_log(self.single_log))) + if __name__ == "__main__": run_tests(tests) unittest.main() From 43fdbdabdc4d28005f8b2d18a0fee01bda3ea5cb Mon Sep 17 00:00:00 2001 From: erneyja Date: Thu, 17 Sep 2015 00:38:06 -0400 Subject: [PATCH 3/7] further refactoring --- errorlogparser.py | 49 +++++++++++++++++++++++++++++++++++++++++++++++ newpr.py | 35 +++++++++++++++++++++++++++++---- payloadhandler.py | 36 ++++++++++------------------------ 3 files changed, 90 insertions(+), 30 deletions(-) diff --git a/errorlogparser.py b/errorlogparser.py index aa77f2c..1905181 100644 --- a/errorlogparser.py +++ b/errorlogparser.py @@ -1,4 +1,53 @@ class ErrorLogParser(): def parse_log(self, log, error_re): raise NotImplementedError + +class ServoErrorLogParser(ErrorLogParser): + def parse_log(self, log): + error_re = "\\x1b\[94m(.+?)\\x1b\[0m:\\x1b\[93m(.+?)\\x1b\[0m:\s\\x1b\[91m(.+?)(?:\\x1b\[0m|$)" + cont_comment_re = "\t\\x1b\[\d{2}m(.+?)\\x1b\[0m" + # error_re = "File:\s(.+?)\sLine:\s(.+?)\sComment:\s(.+)" + # cont_comment_re = "(\t.+)" + matches = [] + log_list = log.splitlines() + + abbr_log_list = self._trim_log(log_list, error_re) + + for log_line in abbr_log_list: + err_match = re.match(error_re, log_line) + if err_match: + matches.append(list(err_match.groups())) + else: + cont_comment_match = re.match(cont_comment_re, log_line) + if cont_comment_match: + matches[-1][-1] += "\n\t{}".format(list(cont_comment_match.groups())[0]) + + return self._process_errors(matches) + + + def _trim_log(self, log_list, error_re): + """ + Cut off irrelevant details so cont_comment_re doesn't match something + that isn't an error comment + """ + abbr_log_list = log_list + err_match = None + i = 0 + + while not err_match and i < len(log_list): + err_match = re.match(error_re, log_list[i]) + i += 1 + + if err_match: + abbr_log_list = log_list[i - 1:] + + return abbr_log_list + + + def _process_errors(self, matches): + return (self._convert_match_to_dict(match) for match in matches) + + + def _convert_match_to_dict(self, match): + return {"file": match[0], "line": match[1], "comment": match[2]} \ No newline at end of file diff --git a/newpr.py b/newpr.py index 574eb44..0dfc58d 100755 --- a/newpr.py +++ b/newpr.py @@ -1,8 +1,28 @@ #!/usr/bin/env python from payloadhandler import TravisPayloadHandler, GithubPayloadHandler +from import cgi, cgitb import ConfigParser +def extract_globals_from_payload(payload): + if "action" in payload: + owner, repo = extract_globals_from_github_payload(payload) + elif "state" in payload: + owner, repo = extract_globals_from_travis_payload(payload) + +def extract_globals_from_github_payload(payload): + if payload["action"] == "created": + owner = payload['repository']['owner']['login'] + repo = payload['repository']['name'] + else: + owner = payload['pull_request']['base']['repo']['owner']['login'] + repo = payload['pull_request']['base']['repo']['name'] + + return (owner, repo) + +def extract_globals_from_travis_payload(payload): + return payload['name'].split('/') + if __name__ == "__main__": print "Content-Type: text/html;charset=utf-8" print @@ -18,10 +38,17 @@ payload_raw = post.getfirst("payload",'') payload = json.loads(payload_raw) + owner, repo = extract_globals_from_payload(payload) + github = GithubApiProvider(user, token, owner, repo) + if "action" in payload: - payload_handler = GithubPayloadHandler(payload) + payload_handler = GithubPayloadHandler(payload, github) elif "state" in payload: - payload_handler = TravisPayloadHandler(payload) + travis = TravisCiApiProvider() + error_parser = ServoErrorLogParser() + payload_handler = TravisPayloadHandler(payload, github, travis, error_parser) + else: + pass - owner, repo = payload_handler.extract_globals_from_payload() - payload_handler.handle_payload(user, token, owner, repo) \ No newline at end of file + if payload_handler: + payload_handler.handle_payload() \ No newline at end of file diff --git a/payloadhandler.py b/payloadhandler.py index ccb1320..b7d02f6 100644 --- a/payloadhandler.py +++ b/payloadhandler.py @@ -8,22 +8,21 @@ def __init__(self, payload): self.payload = payload - def handle_payload(self, user, token, owner, repo): + def handle_payload(self): raise NotImplementedError - def extract_globals_from_payload(self): - raise NotImplementedError - - class TravisPayloadHandler(PayloadHandler): msg_template = "Please fix the error below and push your changes when complete:\n\n" + \ "File: {}\nLine Number: {}\nError: {}" - def handle_payload(self, user, token, owner, repo): - travis = TravisCiApiProvider() - github = GithubApiProvider(user, token, owner, repo) - error_parser = ErrorLogParser() + def __init__(self, payload, github, travis, error_parser): + PayloadHandler.__init__(self, payload) + self.travis = travis + self.github = github + self.error_parser = error_parser + + def handle_payload(self): build_id = int(self.payload["target_url"].split("/")[-1]) commit_id = self.payload["commit"]["sha"] build_id, commit_id, owner, repo = self._extract_globals_from_payload() @@ -34,11 +33,7 @@ def handle_payload(self, user, token, owner, repo): for err_datum in err_data: err_message = self.msg_template.format(err_datum['file'], err_datum['line'], err_datum['comment']) - gh.post_review_comment(pr_num, commit_id, err_message, err_datum['file'], err_datum['line']) - - - def extract_globals_from_payload(self): - return self.payload["name"].split("/") + github.post_review_comment(pr_num, commit_id, err_message, err_datum['file'], err_datum['line']) class GithubPayloadHandler(PayloadHandler): @@ -47,7 +42,7 @@ class GithubPayloadHandler(PayloadHandler): unsafe_warning_msg = 'These commits modify **unsafe code**. Please review it carefully!' reftest_required_msg = 'These commits modify layout code, but no reftests are modified. Please consider adding a reftest!' - def handle_payload(self, user, token, owner, repo): + def handle_payload(self): if self.payload["action"] == "created": issue = str(self.payload['issue']['number']) else: @@ -62,17 +57,6 @@ def handle_payload(self, user, token, owner, repo): else: pass - def extract_globals_from_payload(self): - if self.payload["action"] == "created": - owner = self.payload['repository']['owner']['login'] - repo = self.payload['repository']['name'] - else: - owner = self.payload['pull_request']['base']['repo']['owner']['login'] - repo = self.payload['pull_request']['base']['repo']['name'] - - return (owner, repo) - - def manage_pr_state(self, github, issue): labels = github.get_labels(issue); From a31245d30bafe934d496dfca899a8fadb0b3a733 Mon Sep 17 00:00:00 2001 From: erneyja Date: Sat, 26 Sep 2015 20:00:06 -0400 Subject: [PATCH 4/7] Some further refactoring. Tests completed --- .coveragerc | 7 + .gitignore | 9 + config.example | 3 + errorlogparser.py | 2 + githubapiprovider.py | 210 ++++- newpr.pyc | Bin 12928 -> 0 bytes payloadhandler.py | 127 +-- newpr.py => payloadreceiver.py | 23 +- resources/expected_errors.json | 32 + resources/multi-line-comment.log | 767 ++++++++++++++++++ resources/needs_reftest.diff | 64 ++ resources/single-line-comment.log | 1 + resources/test-data-lowfive.json | 7 + .../test_comment.json | 0 .../test_ignored_action.json | 0 .../test_merge_approved.json | 0 .../test_merge_conflict.json | 0 .../test_new_pr.json | 0 .../test_post_retry.json | 0 .../test_synchronize.json | 0 .../test_tests_failed.json | 0 resources/test_travis_payload.json | 206 +++++ resources/unsafe.diff | 223 +++++ test.py | 634 ++++++++++----- 24 files changed, 2014 insertions(+), 301 deletions(-) create mode 100644 .coveragerc create mode 100644 .gitignore create mode 100644 config.example delete mode 100644 newpr.pyc rename newpr.py => payloadreceiver.py (67%) create mode 100644 resources/expected_errors.json create mode 100644 resources/multi-line-comment.log create mode 100644 resources/needs_reftest.diff create mode 100644 resources/single-line-comment.log create mode 100644 resources/test-data-lowfive.json rename test_comment.json => resources/test_comment.json (100%) rename test_ignored_action.json => resources/test_ignored_action.json (100%) rename test_merge_approved.json => resources/test_merge_approved.json (100%) rename test_merge_conflict.json => resources/test_merge_conflict.json (100%) rename test_new_pr.json => resources/test_new_pr.json (100%) rename test_post_retry.json => resources/test_post_retry.json (100%) rename test_synchronize.json => resources/test_synchronize.json (100%) rename test_tests_failed.json => resources/test_tests_failed.json (100%) create mode 100644 resources/test_travis_payload.json create mode 100644 resources/unsafe.diff diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..a2d7a71 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,7 @@ +[run] +include: + errorlogparser.py + githubapiprovider.py + payloadhandler.py + payloadreceiver.py + travisapiprovider.py \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..7aa59b9 --- /dev/null +++ b/.gitignore @@ -0,0 +1,9 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class +*.pyc + +.DS_Store +.coverage +htmlcov \ No newline at end of file diff --git a/config.example b/config.example new file mode 100644 index 0000000..70d1386 --- /dev/null +++ b/config.example @@ -0,0 +1,3 @@ +[github] +user=UserNameHere +token=SomeTokenHere \ No newline at end of file diff --git a/errorlogparser.py b/errorlogparser.py index 1905181..f92cd12 100644 --- a/errorlogparser.py +++ b/errorlogparser.py @@ -1,3 +1,5 @@ +import re + class ErrorLogParser(): def parse_log(self, log, error_re): raise NotImplementedError diff --git a/githubapiprovider.py b/githubapiprovider.py index bfc6e34..b884cad 100644 --- a/githubapiprovider.py +++ b/githubapiprovider.py @@ -1,28 +1,200 @@ +from StringIO import StringIO import base64 -import json +import gzip +try: + import simplejson as json +except: + import json import urllib2 -class GithubApiProvider(): - host_url = 'https://api.github.com' - login_url = host_url + '/user' - review_comment_url = host_url + '/repos/{repo}/pulls/{pr_num}/comments' - def __init__(self, username, token): - self.username = username - self.token = token +class APIProvider: + def __init__(self, user): + self.user = user - def login(self, username=None, token=None): - username = None if not username else self.username - token = None if not token else self.token - base64string = base64.standard_b64encode('{}:{}'.format(username, token)) - req = urllib2.Request(self.login_url) - req.add_header('Authorization', 'Basic {}'.format(base64string)) - return json.loads(urllib2.urlopen(req).read()) + def is_new_contributor(self, username): + raise NotImplementedError - def post_review_comment_on_pr(self, repo, pr_num, commit_id, message, file_path, line): - data = json.dumps({"body":message,"commit_id":commit_id,"path":file_path,"position":line}) - req = urllib2.Request(self.review_comment_url.format(repo=repo, pr_num=pr_num), data) + def post_comment(self, body, issue): + raise NotImplementedError - return json.loads(urllib2.urlopen(req).read()) \ No newline at end of file + + def post_review_comment(self, pr_num, commit_id, path, pos, body): + raise NotImplementedError + + + def add_label(self, label, issue): + raise NotImplementedError + + + def remove_label(self, label, issue): + raise NotImplementedError + + + def get_labels(self, issue): + raise NotImplementedError + + + def get_diff(self, url): + raise NotImplementedError + + + def set_assignee(self, assignee, issue): + raise NotImplementedError + + +class GithubApiProvider(APIProvider): + contributors_url = "https://api.github.com/repos/%s/%s/contributors?per_page=400" + post_comment_url = "https://api.github.com/repos/%s/%s/issues/%s/comments" + post_review_comment_url = "https://api.github.com/repos/%s/%s/pulls/%s/comments" + collaborators_url = "https://api.github.com/repos/%s/%s/collaborators" + issue_url = "https://api.github.com/repos/%s/%s/issues/%s" + get_label_url = "https://api.github.com/repos/%s/%s/issues/%s/labels" + add_label_url = "https://api.github.com/repos/%s/%s/issues/%s/labels" + remove_label_url = "https://api.github.com/repos/%s/%s/issues/%s/labels/%s" + + def __init__(self, user, token, owner, repo): + APIProvider.__init__(self, user) + self.token = token + + self.owner = owner + self.repo = repo + + def api_req(self, method, url, data=None, media_type=None): + data = None if not data else json.dumps(data) + headers = {} if not data else {'Content-Type': 'application/json'} + req = urllib2.Request(url, data, headers) + req.get_method = lambda: method + if self.token: + base64string = base64.standard_b64encode('%s:%s' % (self.user, self.token)).replace('\n', '') + req.add_header("Authorization", "Basic %s" % base64string) + + if media_type: + req.add_header("Accept", media_type) + f = urllib2.urlopen(req) + header = f.info() + if header.get('Content-Encoding') == 'gzip': + buf = StringIO(f.read()) + f = gzip.GzipFile(fileobj=buf) + + return { "header": header, "body": f.read() } + + + # This function is adapted from https://github.com/kennethreitz/requests/blob/209a871b638f85e2c61966f82e547377ed4260d9/requests/utils.py#L562 + # Licensed under Apache 2.0: http://www.apache.org/licenses/LICENSE-2.0 + def parse_header_links(self, value): + if not value: + return None + + links = {} + replace_chars = " '\"" + for val in value.split(","): + try: + url, params = val.split(";", 1) + except ValueError: + url, params = val, '' + + url = url.strip("<> '\"") + + for param in params.split(";"): + try: + key, value = param.split("=") + except ValueError: + break + key = key.strip(replace_chars) + if key == 'rel': + links[value.strip(replace_chars)] = url + + return links + + + def is_new_contributor(self, username): + url = self.contributors_url % (self.owner, self.repo) + # iterate through the pages to try and find the contributor + while True: + stats_raw = self.api_req("GET", url) + stats = json.loads(stats_raw['body']) + links = self.parse_header_links(stats_raw['header'].get('Link')) + + for contributor in stats: + if contributor['login'] == username: + return False + + if not links or 'next' not in links: + return True + + url = links['next'] + + + def post_comment(self, body, issue): + try: + result = self.api_req("POST", self.post_comment_url % (self.owner, self.repo, issue), + {"body": body}) + except urllib2.HTTPError, e: + if e.code == 201: + pass + else: + raise e + + + def post_review_comment(self, pr_num, commit_id, path, pos, body): + try: + result = self.api_req("POST", self.post_review_comment_url % (self.owner, self.repo, pr_num), + {"body": body, "commit_id":commit_id, "path":path, "position":pos}) + except urllib2.HTTPError, e: + if e.code == 201: + pass + else: + raise e + + + def add_label(self, label, issue): + try: + result = self.api_req("POST", self.add_label_url % (self.owner, self.repo, issue), + [label]) + except urllib2.HTTPError, e: + if e.code == 201: + pass + else: + raise e + + + def remove_label(self, label, issue): + try: + result = self.api_req("DELETE", self.remove_label_url % (self.owner, self.repo, issue, label), {}) + except urllib2.HTTPError, e: + #if e.code == 201: + # pass + #else: + # raise e + pass + + + def get_labels(self, issue): + try: + result = self.api_req("GET", self.get_label_url % (self.owner, self.repo, issue)) + except urllib2.HTTPError, e: + if e.code == 201: + pass + else: + raise e + return map(lambda x: x["name"], json.loads(result['body'])) + + + def get_diff(self, diff_url): + return self.api_req("GET", diff_url)['body'] + + + def set_assignee(self, assignee, issue): + try: + result = self.api_req("PATCH", self.issue_url % (self.owner, self.repo, issue), + {"assignee": assignee}) + + return result['body'] + except urllib2.HTTPError, e: + if e.code == 201: + pass + else: + raise e \ No newline at end of file diff --git a/newpr.pyc b/newpr.pyc deleted file mode 100644 index def429d2da9ddcf27afccee21517a15c330b7780..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12928 zcmcgy%X3s$dOx>YuND##l6VLhd=UuRfDrLuGPc160|rk9ncQZq85z=C^*!Bvo9=sC z_uNLv1z7}F;zg>G%*<-Cn7pd8NnDj=IkU-Ts*+R|-cC}s7w(QB9@RdU;2!Io93>`?*cpHTs(omF~FHRe<>p`PNndsQ$g{r9P0w`BIKU`jFvR4^@>c@^xD%s~~* zNam2zhgC4E9^_Q=?@lnMZrr@7hg9Pm+?u1uRpSVAYp*E%ifXtj*vrKn7LKX_OYK)r z0pA5_wlxQ&=9toX)i^GL7nNRAjT0)s`Uj=;q|zg*QB?Y*?EO`xv606PQc9CBs=fa~x(kIXx8J{=#1F$jCp{`*&v)^e+ekEy zAv8gtf*>3L0FaTB%n%X7rr``XKt1rn8sJ>;LWEnK6qHvgatoC_k`a9Te-)p(gd|n! zvBQ(y&8sK)8&Xe()#JQ+tW?=ikB20aQ;$)J8q_0?47Pwn$@x}_r0|rd){oM}ucYN# zGhX+brd&xMJd1hFIA?eEItBTy)R%Yv-`c6ge~gXSLk)1J0w-rbZWO}PP}M;q&ToxKXZMZI~&V(7KGL6 zt9|==dDefHWjLd6OjwKbVELSAHUMIG@-y4pEs9{`%H=ZUOSzn~rd)2tL8pnLM>_U6 zgSVT>G?P6{C_Ow%FK;iBZLCY~tIJrA1jRKBmmF~lqlNtTZ^|v)2-A9J{dpRaqW@iv z{Q(l))sR*_(o%wM7sY6`B2^-+Cg?*LOHhVZE5h`Fb%H{I^#Yp&R+y7nEn+Xhrg<=;l8ZEMU_Q-* zI_7D&YHlSJL=_jy&p9+e&Tj zk=Lw!a)=H124$7V!f#+c!!?vM3tMEusZhPqlH>+?5OW%wh4UK_vQp86N*&dJrIx2< z2eXb%Csl~1aTN+g0FVo?^io*9u<@iTY&?MR$%y(rB?f+8{VpaQQDt-kEdN}=9BY7; z#3pPM2>ix~s^-)_EYg4{0dsVOmhh8N4r_qBzUB+dU>C!LZ$)9k|`;|kKF0fxjR=&2k`0!)WE7)i+s&}zr(ql|H%uG>gs|^mI>8oMy0g{R6_n+N!b=Q8LjBWi zt;9x)bM^qJlxpd;9tTqS)upD;whQQ*JNUn6S zcoKdrXA-)3*EeCsT{N<{E0v0Fr*ii0F|S9JIDmg5hIj4buq}pnHEim5y&;3@+K0E| zarz=*AU#*&$Xf1%n{Y?aX|;_E2KzL__17ftWxUl@{1!SMc||5)_l>@AJ{84~`cdE~ zL3#bcdCgsE;VYR20<`H@v?w!LVRld5)#5e~M$Cj!H3m6}jp5;+-D`pact=?xBi+Cm z-^1~WWFoO}yu)ncFzj6B3H;QTwOTp|{c;LYkhScxM*2c(hzPMKFyiEa^}s*gvF?29 zo$5Aq$T9;U35p%)7mBTSkTpk;DCdwf4}WFG8F5ZK`*S0Z7E_QGtl4Yreky(r6)O5~ zWFQ{l=RjtZARL~E=!Cd<^sj7ye1((%cU`;pPjn2o4wGS73vDZ$dl$SuOo_1?9Z^GR zPCXb`Nu5y%bc8s_*;a-nAY2+4W*K}~J$fioK<1JaG^NJYJcR{U80EU0eoQ@aF#Sw# z`k!+8+cG6sT0|QKfXEC88;XZd#a#vK;|$#n7dJjt_*>mLp;jU8N=1TV#Sr1{$pyjL z8A;!Qd<0Dppu%Do-|n_w%21x^h2!WUL~kHKQlXR&{bomtt7jWP4{ceB9d$-KQP|6Sw|yI>fS^AM9W63 zcoIP+BtgP85KX3>_?yyKO5QaNWw*W!Siyrj8|?W?0TOW-nw)AMCv@g>1qhZ|=WuS) znRdojdslmKIO2*e9RGy~BqauRAlt6>3PL zzQ^THP}o|2IkKaGr%K=#@qPpQ5K5FSa+?IVXt6P5zzuW;OA^zxB4RrL%ihAij|NI}jA#BCK7&tL16pLc zV4Pgi3x&N9-NGfJl3IRSREPJQ7%XnA)ev6~%g`?PF}|MkSVx=hS(mL=+3El@xR*^_#Z%5!sjuWumXL6UxZ!zJ4z2i*8MiQfl9{I02 zQh5Mt(ik%fNND%={pJcIQ*(v+!gOIg`^^h4kfiwuFPuOIKDiB2J%yDj#*M+fA_NkI z7rh}dUo&YWZinDdjJQNU4D}`=gNYVpVYHuA>H-;|n_3cg+9Joq^WfzPF0NXpCao<$ zt<(p}N#dte!zjRh?8g2M=gYLh6rBUdP*2>c`|xVqYMi)fUAqjexmjrKj0@M^-PBF& zRUdWlpt;1WI5*XP%MFdIA5}tJmAPq*AzZ}WiaUu5uPCZ%V~3;0HTAgD3|#o(iOU;C z_wu4~i&)WMVuU1I4b%Bml;Y^DA+25vTQ%1tl}if@bs{pmxw(0^fm=d=3A@}hXA#F( z-t?0QF_5$7VQsI$eE$F~dL&f)znJErMP0TzU90oMVZN2y2=W34jWUn3MKk)8^IMj%LG zFp1uWh?#F$f9@6rcx5J(<-(#-r?rk4**wcUHG@KOS8bmG5D176Suss(WZN+u_+_?7 zG4%IbkS4E!e>UUnMo8!&-1q5iFRh1{zeOhi8H)Rd`l8Y}o*B zEe}U~A&V*lgzRxfMgyXGI^@pA8HK-9;Ee6?v^S(3rhvqCoQ(|zXS<77Ywt5D13kEj znNL1rz&D#5Eh0y42-w;9tQW%3sJ%){xTp)v5}9NvlRSn-aNh)0I9WVXUCD(0db2yJogvZ0 zzr8)DmEq9kd$bc^^AQ`Mj!6x!Kx2%y>OfqlA)9+FLo6jnVPIj4u-`9 zq*_X&PkC!cCm^4ppDr0O=7al!CmHP^Pue2kfRGB-{dYJQhz?YXFb+rtn;B8&uX$5o z^T{sf(M8@1;1&R}f85yd{%?<}jaV|4co73aAmg9QQdm~Pl+MP_75-N7{wNC=-IL|8 z(2OPXd(Osv2Tzi+mCj2u_{27IYv{Y%_C?3JZpZu2?T8Bx^3HvENCb*2($-qB@`XZ1 za3zw3gl*}Mh@rAT?Au;IiCm&vA&V5W2Oc0n*RF2~C)|M! zpn%Wx&T|PqW0`*H{!(0IInE9@8y(-&pJgmRGO&1_&#&~ExJx%<)&-P%TF}JJhgu8{ z;YA!Hs^U^7HS(Ou0_Fh*$gQ@$2UZjP&hhjPUBo|C~>WH36* zv%#$M73qwJmCtmJx}EoNY}PrF&ZA%G{JAWInehOz*ZFhBZH%go&-u4n!xfess2*H(GCeJKH>XdZmg%j1EUzv_@bL=YU0TBP3qx%ShHk?% z;5$HT1uq+Nhp2x%%&7L2Hrr{1^Yf$XE8!DA2Wk&M-k-T^5>YtiS_#0u-hCv46bK$8 zN5?QV4F_d?>rLeB!yOu!VnE9A6S!8u4l51p7pbe%*Y&*SJ>CYO;2 zBA{K8)NJCticCXa&%4b=!q0-YoH<0|X3|>9ya#za5hgm21`5V*N7HGw{A5dJY{X&Y zt)tOix^7EcQjRZo)Yr6lI1|(Q*efkkR0YEcJpNy|>>u*6G9Pl-;PUTtC?SUU6GxaWBQG7WU2NetZ;BI2 z5haf*>?0}0%sjEP{(!k3G5G`gzn*cp>75x`0a3bL -1: warn_unsafe = True @@ -143,11 +144,11 @@ def new_pr(self, github, issue): warnings += [self.reftest_required_msg] if warnings: - github.post_comment(self.warning_summary % '\n'.join(map(lambda x: '* ' + x, warnings)), issue) + self.github.post_comment(self.warning_summary % '\n'.join(map(lambda x: '* ' + x, warnings)), issue) - def update_pr(self, github, issue): - self.manage_pr_state(github, issue) + def update_pr(self, issue, payload): + self.manage_pr_state(issue, payload) # If the user specified a reviewer, return the username, otherwise returns None. diff --git a/newpr.py b/payloadreceiver.py similarity index 67% rename from newpr.py rename to payloadreceiver.py index 0dfc58d..2ca979a 100755 --- a/newpr.py +++ b/payloadreceiver.py @@ -1,16 +1,21 @@ #!/usr/bin/env python + from payloadhandler import TravisPayloadHandler, GithubPayloadHandler -from +from errorlogparser import ServoErrorLogParser +from githubapiprovider import GithubApiProvider +from travisciapiprovider import TravisCiApiProvider import cgi, cgitb import ConfigParser def extract_globals_from_payload(payload): if "action" in payload: - owner, repo = extract_globals_from_github_payload(payload) + owner, repo = _extract_globals_from_github_payload(payload) elif "state" in payload: - owner, repo = extract_globals_from_travis_payload(payload) + owner, repo = _extract_globals_from_travis_payload(payload) + + return owner, repo -def extract_globals_from_github_payload(payload): +def _extract_globals_from_github_payload(payload): if payload["action"] == "created": owner = payload['repository']['owner']['login'] repo = payload['repository']['name'] @@ -18,9 +23,9 @@ def extract_globals_from_github_payload(payload): owner = payload['pull_request']['base']['repo']['owner']['login'] repo = payload['pull_request']['base']['repo']['name'] - return (owner, repo) + return owner, repo -def extract_globals_from_travis_payload(payload): +def _extract_globals_from_travis_payload(payload): return payload['name'].split('/') if __name__ == "__main__": @@ -42,13 +47,13 @@ def extract_globals_from_travis_payload(payload): github = GithubApiProvider(user, token, owner, repo) if "action" in payload: - payload_handler = GithubPayloadHandler(payload, github) + payload_handler = GithubPayloadHandler(github) elif "state" in payload: travis = TravisCiApiProvider() error_parser = ServoErrorLogParser() - payload_handler = TravisPayloadHandler(payload, github, travis, error_parser) + payload_handler = TravisPayloadHandler(github, travis, error_parser) else: pass if payload_handler: - payload_handler.handle_payload() \ No newline at end of file + payload_handler.handle_payload(payload) \ No newline at end of file diff --git a/resources/expected_errors.json b/resources/expected_errors.json new file mode 100644 index 0000000..c792a94 --- /dev/null +++ b/resources/expected_errors.json @@ -0,0 +1,32 @@ +[ + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull\n\tfound: dom::bindings::conversions::get_dom_class", + "line": "7", + "file": "./components/script/dom/eventtarget.rs" + }, + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::codegen::Bindings::EventListenerBinding::EventListener\n\tfound: dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull", + "line": "8", + "file": "./components/script/dom/eventtarget.rs" + }, + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::codegen::Bindings::EventTargetBinding::EventTargetMethods\n\tfound: dom::bindings::codegen::Bindings::EventListenerBinding::EventListener", + "line": "9", + "file": "./components/script/dom/eventtarget.rs" + }, + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::conversions::get_dom_class\n\tfound: dom::bindings::codegen::Bindings::EventTargetBinding::EventTargetMethods", + "line": "10", + "file": "./components/script/dom/eventtarget.rs" + }, + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::browsercontext\n\tfound: dom::eventtarget::EventTargetTypeId", + "line": "17", + "file": "./components/script/dom/bindings/utils.rs" + }, + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::eventtarget::EventTargetTypeId\n\tfound: dom::browsercontext", + "line": "18", + "file": "./components/script/dom/bindings/utils.rs" + } +] \ No newline at end of file diff --git a/resources/multi-line-comment.log b/resources/multi-line-comment.log new file mode 100644 index 0000000..eaac944 --- /dev/null +++ b/resources/multi-line-comment.log @@ -0,0 +1,767 @@ +Using worker: worker-linux-docker-176118fe.prod.travis-ci.org:travis-linux-8 + +travis_fold:start:system_info +Build system information +Build language: python +Build image provisioning date and time +Thu Feb 5 15:09:33 UTC 2015 +Operating System Details +Distributor ID: Ubuntu +Description: Ubuntu 12.04.5 LTS +Release: 12.04 +Codename: precise +Linux Version +3.13.0-29-generic +Cookbooks Version +a68419e https://github.com/travis-ci/travis-cookbooks/tree/a68419e +GCC version +gcc (Ubuntu/Linaro 4.6.3-1ubuntu5) 4.6.3 +Copyright (C) 2011 Free Software Foundation, Inc. +This is free software; see the source for copying conditions. There is NO +warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + +LLVM version +clang version 3.4 (tags/RELEASE_34/final) +Target: x86_64-unknown-linux-gnu +Thread model: posix +Pre-installed Ruby versions +ruby-1.9.3-p551 +Pre-installed Node.js versions +v0.10.36 +Pre-installed Go versions +1.4.1 +Redis version +redis-server 2.8.19 +riak version +2.0.2 +MongoDB version +MongoDB 2.4.12 +CouchDB version +couchdb 1.6.1 +Neo4j version +1.9.4 +RabbitMQ Version +3.4.3 +ElasticSearch version +1.4.0 +Installed Sphinx versions +2.0.10 +2.1.9 +2.2.6 +Default Sphinx version +2.2.6 +Installed Firefox version +firefox 31.0esr +PhantomJS version +1.9.8 +ant -version +Apache Ant(TM) version 1.8.2 compiled on December 3 2011 +mvn -version +Apache Maven 3.2.5 (12a6b3acb947671f09b81f49094c53f426d8cea1; 2014-12-14T17:29:23+00:00) +Maven home: /usr/local/maven +Java version: 1.7.0_76, vendor: Oracle Corporation +Java home: /usr/lib/jvm/java-7-oracle/jre +Default locale: en_US, platform encoding: ANSI_X3.4-1968 +OS name: "linux", version: "3.13.0-29-generic", arch: "amd64", family: "unix" +travis_fold:end:system_info + +travis_fold:start:git.checkout +travis_time:start:16daecd0 +$ git clone --depth=50 https://github.com/servo/servo.git servo/servo +Cloning into 'servo/servo'... +remote: Counting objects: 101276, done. +remote: Compressing objects: 0% (1/37782)  +remote: Compressing objects: 1% (378/37782)  +remote: Compressing objects: 2% (756/37782)  +remote: Compressing objects: 3% (1134/37782)  +remote: Compressing objects: 4% (1512/37782)  +remote: Compressing objects: 5% (1890/37782)  +remote: Compressing objects: 6% (2267/37782)  +remote: Compressing objects: 7% (2645/37782)  +remote: Compressing objects: 8% (3023/37782)  +remote: Compressing objects: 9% (3401/37782)  +remote: Compressing objects: 10% (3779/37782)  +remote: Compressing objects: 11% (4157/37782)  +remote: Compressing objects: 12% (4534/37782)  +remote: Compressing objects: 13% (4912/37782)  +remote: Compressing objects: 14% (5290/37782)  +remote: Compressing objects: 15% (5668/37782)  +remote: Compressing objects: 16% (6046/37782)  +remote: Compressing objects: 17% (6423/37782)  +remote: Compressing objects: 18% (6801/37782)  +remote: Compressing objects: 19% (7179/37782)  +remote: Compressing objects: 20% (7557/37782)  +remote: Compressing objects: 21% (7935/37782)  +remote: Compressing objects: 22% (8313/37782)  +remote: Compressing objects: 23% (8690/37782)  +remote: Compressing objects: 24% (9068/37782)  +remote: Compressing objects: 25% (9446/37782)  +remote: Compressing objects: 26% (9824/37782)  +remote: Compressing objects: 27% (10202/37782)  +remote: Compressing objects: 28% (10579/37782)  +remote: Compressing objects: 29% (10957/37782)  +remote: Compressing objects: 30% (11335/37782)  +remote: Compressing objects: 31% (11713/37782)  +remote: Compressing objects: 32% (12091/37782)  +remote: Compressing objects: 33% (12469/37782)  +remote: Compressing objects: 34% (12846/37782)  +remote: Compressing objects: 35% (13224/37782)  +remote: Compressing objects: 36% (13602/37782)  +remote: Compressing objects: 37% (13980/37782)  +remote: Compressing objects: 38% (14358/37782)  +remote: Compressing objects: 39% (14735/37782)  +remote: Compressing objects: 40% (15113/37782)  +remote: Compressing objects: 41% (15491/37782)  +remote: Compressing objects: 42% (15869/37782)  +remote: Compressing objects: 43% (16247/37782)  +remote: Compressing objects: 44% (16625/37782)  +remote: Compressing objects: 45% (17002/37782)  +remote: Compressing objects: 46% (17380/37782)  +remote: Compressing objects: 47% (17758/37782)  +remote: Compressing objects: 48% (18136/37782)  +remote: Compressing objects: 49% (18514/37782)  +remote: Compressing objects: 50% (18891/37782)  +remote: Compressing objects: 51% (19269/37782)  +remote: Compressing objects: 52% (19647/37782)  +remote: Compressing objects: 53% (20025/37782)  +remote: Compressing objects: 54% (20403/37782)  +remote: Compressing objects: 55% (20781/37782)  +remote: Compressing objects: 56% (21158/37782)  +remote: Compressing objects: 57% (21536/37782)  +remote: Compressing objects: 58% (21914/37782)  +remote: Compressing objects: 59% (22292/37782)  +remote: Compressing objects: 60% (22670/37782)  +remote: Compressing objects: 61% (23048/37782)  +remote: Compressing objects: 62% (23425/37782)  +remote: Compressing objects: 63% (23803/37782)  +remote: Compressing objects: 64% (24181/37782)  +remote: Compressing objects: 64% (24498/37782)  +remote: Compressing objects: 65% (24559/37782)  +remote: Compressing objects: 66% (24937/37782)  +remote: Compressing objects: 67% (25314/37782)  +remote: Compressing objects: 68% (25692/37782)  +remote: Compressing objects: 69% (26070/37782)  +remote: Compressing objects: 70% (26448/37782)  +remote: Compressing objects: 71% (26826/37782)  +remote: Compressing objects: 72% (27204/37782)  +remote: Compressing objects: 73% (27581/37782)  +remote: Compressing objects: 74% (27959/37782)  +remote: Compressing objects: 75% (28337/37782)  +remote: Compressing objects: 76% (28715/37782)  +remote: Compressing objects: 77% (29093/37782)  +remote: Compressing objects: 78% (29470/37782)  +remote: Compressing objects: 79% (29848/37782)  +remote: Compressing objects: 80% (30226/37782)  +remote: Compressing objects: 81% (30604/37782)  +remote: Compressing objects: 82% (30982/37782)  +remote: Compressing objects: 83% (31360/37782)  +remote: Compressing objects: 84% (31737/37782)  +remote: Compressing objects: 85% (32115/37782)  +remote: Compressing objects: 86% (32493/37782)  +remote: Compressing objects: 87% (32871/37782)  +remote: Compressing objects: 88% (33249/37782)  +remote: Compressing objects: 89% (33626/37782)  +remote: Compressing objects: 90% (34004/37782)  +remote: Compressing objects: 91% (34382/37782)  +remote: Compressing objects: 92% (34760/37782)  +remote: Compressing objects: 93% (35138/37782)  +remote: Compressing objects: 94% (35516/37782)  +remote: Compressing objects: 95% (35893/37782)  +remote: Compressing objects: 96% (36271/37782)  +remote: Compressing objects: 97% (36649/37782)  +remote: Compressing objects: 98% (37027/37782)  +remote: Compressing objects: 99% (37405/37782)  +remote: Compressing objects: 100% (37782/37782)  +remote: Compressing objects: 100% (37782/37782), done. +Receiving objects: 0% (1/101276) +Receiving objects: 1% (1013/101276) +Receiving objects: 2% (2026/101276) +Receiving objects: 3% (3039/101276) +Receiving objects: 4% (4052/101276) +Receiving objects: 5% (5064/101276) +Receiving objects: 6% (6077/101276) +Receiving objects: 7% (7090/101276) +Receiving objects: 8% (8103/101276) +Receiving objects: 9% (9115/101276) +Receiving objects: 10% (10128/101276) +Receiving objects: 11% (11141/101276) +Receiving objects: 12% (12154/101276) +Receiving objects: 13% (13166/101276) +Receiving objects: 14% (14179/101276) +Receiving objects: 15% (15192/101276) +Receiving objects: 16% (16205/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 17% (17217/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 18% (18230/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 19% (19243/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 20% (20256/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 21% (21268/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 22% (22281/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 23% (23294/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 24% (24307/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 25% (25319/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 26% (26332/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 27% (27345/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 28% (28358/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 29% (29371/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 30% (30383/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 31% (31396/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 32% (32409/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 33% (33422/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 34% (34434/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 35% (35447/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 36% (36460/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 37% (37473/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 38% (38485/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 39% (39498/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 40% (40511/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 40% (41312/101276), 16.13 MiB | 32.19 MiB/s +Receiving objects: 41% (41524/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 42% (42536/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 43% (43549/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 44% (44562/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 45% (45575/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 46% (46587/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 47% (47600/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 48% (48613/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 49% (49626/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 50% (50638/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 51% (51651/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 52% (52664/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 53% (53677/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 54% (54690/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 55% (55702/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 56% (56715/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 57% (57728/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 58% (58741/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 59% (59753/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 60% (60766/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 61% (61779/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 62% (62792/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 63% (63804/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 64% (64817/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 65% (65830/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 66% (66843/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 67% (67855/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 68% (68868/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 69% (69881/101276), 34.46 MiB | 34.39 MiB/s +Receiving objects: 70% (70894/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 71% (71906/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 72% (72919/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 73% (73932/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 74% (74945/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 75% (75957/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 76% (76970/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 77% (77983/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 78% (78996/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 79% (80009/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 80% (81021/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 81% (82034/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 82% (83047/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 83% (84060/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 84% (85072/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 85% (86085/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 86% (87098/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 87% (88111/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 87% (88482/101276), 46.09 MiB | 30.67 MiB/s +Receiving objects: 88% (89123/101276), 60.51 MiB | 30.19 MiB/s +Receiving objects: 89% (90136/101276), 60.51 MiB | 30.19 MiB/s +Receiving objects: 90% (91149/101276), 60.51 MiB | 30.19 MiB/s +Receiving objects: 91% (92162/101276), 60.51 MiB | 30.19 MiB/s +Receiving objects: 92% (93174/101276), 60.51 MiB | 30.19 MiB/s +Receiving objects: 93% (94187/101276), 60.51 MiB | 30.19 MiB/s +Receiving objects: 94% (95200/101276), 60.51 MiB | 30.19 MiB/s +Receiving objects: 95% (96213/101276), 60.51 MiB | 30.19 MiB/s +Receiving objects: 96% (97225/101276), 60.51 MiB | 30.19 MiB/s +Receiving objects: 97% (98238/101276), 60.51 MiB | 30.19 MiB/s +Receiving objects: 98% (99251/101276), 60.51 MiB | 30.19 MiB/s +Receiving objects: 99% (100264/101276), 60.51 MiB | 30.19 MiB/s +remote: Total 101276 (delta 64964), reused 97481 (delta 62116), pack-reused 0 +Receiving objects: 100% (101276/101276), 60.51 MiB | 30.19 MiB/s +Receiving objects: 100% (101276/101276), 71.83 MiB | 30.19 MiB/s, done. +Resolving deltas: 0% (0/64964) +Resolving deltas: 1% (719/64964) +Resolving deltas: 2% (1326/64964) +Resolving deltas: 3% (1970/64964) +Resolving deltas: 4% (2608/64964) +Resolving deltas: 5% (3250/64964) +Resolving deltas: 6% (3921/64964) +Resolving deltas: 7% (4548/64964) +Resolving deltas: 8% (5234/64964) +Resolving deltas: 9% (5847/64964) +Resolving deltas: 10% (6558/64964) +Resolving deltas: 11% (7160/64964) +Resolving deltas: 12% (7800/64964) +Resolving deltas: 13% (8446/64964) +Resolving deltas: 14% (9106/64964) +Resolving deltas: 15% (9747/64964) +Resolving deltas: 16% (10412/64964) +Resolving deltas: 17% (11044/64964) +Resolving deltas: 18% (11731/64964) +Resolving deltas: 19% (12381/64964) +Resolving deltas: 20% (13002/64964) +Resolving deltas: 21% (13644/64964) +Resolving deltas: 22% (14312/64964) +Resolving deltas: 23% (14952/64964) +Resolving deltas: 24% (15607/64964) +Resolving deltas: 25% (16304/64964) +Resolving deltas: 26% (16894/64964) +Resolving deltas: 27% (17541/64964) +Resolving deltas: 28% (18202/64964) +Resolving deltas: 29% (18841/64964) +Resolving deltas: 30% (19496/64964) +Resolving deltas: 31% (20144/64964) +Resolving deltas: 32% (20799/64964) +Resolving deltas: 33% (21703/64964) +Resolving deltas: 34% (22121/64964) +Resolving deltas: 35% (22907/64964) +Resolving deltas: 36% (23425/64964) +Resolving deltas: 37% (24146/64964) +Resolving deltas: 38% (24798/64964) +Resolving deltas: 39% (25511/64964) +Resolving deltas: 40% (25992/64964) +Resolving deltas: 41% (26691/64964) +Resolving deltas: 42% (27305/64964) +Resolving deltas: 43% (28051/64964) +Resolving deltas: 44% (28654/64964) +Resolving deltas: 45% (29257/64964) +Resolving deltas: 46% (29895/64964) +Resolving deltas: 47% (30534/64964) +Resolving deltas: 48% (31201/64964) +Resolving deltas: 49% (31852/64964) +Resolving deltas: 50% (32496/64964) +Resolving deltas: 51% (33137/64964) +Resolving deltas: 52% (33782/64964) +Resolving deltas: 53% (34442/64964) +Resolving deltas: 54% (35081/64964) +Resolving deltas: 55% (35731/64964) +Resolving deltas: 56% (36380/64964) +Resolving deltas: 57% (37030/64964) +Resolving deltas: 58% (37686/64964) +Resolving deltas: 59% (38377/64964) +Resolving deltas: 60% (38980/64964) +Resolving deltas: 61% (39712/64964) +Resolving deltas: 62% (40287/64964) +Resolving deltas: 63% (40929/64964) +Resolving deltas: 63% (41089/64964) +Resolving deltas: 64% (41590/64964) +Resolving deltas: 65% (42231/64964) +Resolving deltas: 66% (42880/64964) +Resolving deltas: 67% (43528/64964) +Resolving deltas: 68% (44176/64964) +Resolving deltas: 69% (44828/64964) +Resolving deltas: 70% (45531/64964) +Resolving deltas: 71% (46132/64964) +Resolving deltas: 72% (46805/64964) +Resolving deltas: 73% (47479/64964) +Resolving deltas: 74% (48074/64964) +Resolving deltas: 75% (48740/64964) +Resolving deltas: 76% (49450/64964) +Resolving deltas: 77% (50030/64964) +Resolving deltas: 78% (50672/64964) +Resolving deltas: 79% (51327/64964) +Resolving deltas: 80% (51972/64964) +Resolving deltas: 81% (52621/64964) +Resolving deltas: 82% (53276/64964) +Resolving deltas: 83% (53921/64964) +Resolving deltas: 84% (54589/64964) +Resolving deltas: 85% (55243/64964) +Resolving deltas: 86% (55871/64964) +Resolving deltas: 87% (56520/64964) +Resolving deltas: 88% (57360/64964) +Resolving deltas: 89% (57822/64964) +Resolving deltas: 90% (58468/64964) +Resolving deltas: 91% (59120/64964) +Resolving deltas: 92% (59768/64964) +Resolving deltas: 93% (60422/64964) +Resolving deltas: 94% (61069/64964) +Resolving deltas: 95% (61721/64964) +Resolving deltas: 96% (62679/64964) +Resolving deltas: 97% (63035/64964) +Resolving deltas: 98% (63671/64964) +Resolving deltas: 99% (64317/64964) +Resolving deltas: 100% (64964/64964) +Resolving deltas: 100% (64964/64964), done. +Checking connectivity... done. +Checking out files: 15% (17972/118796) +Checking out files: 16% (19008/118796) +Checking out files: 17% (20196/118796) +Checking out files: 18% (21384/118796) +Checking out files: 19% (22572/118796) +Checking out files: 20% (23760/118796) +Checking out files: 21% (24948/118796) +Checking out files: 22% (26136/118796) +Checking out files: 23% (27324/118796) +Checking out files: 24% (28512/118796) +Checking out files: 25% (29699/118796) +Checking out files: 26% (30887/118796) +Checking out files: 27% (32075/118796) +Checking out files: 28% (33263/118796) +Checking out files: 29% (34451/118796) +Checking out files: 29% (34953/118796) +Checking out files: 30% (35639/118796) +Checking out files: 31% (36827/118796) +Checking out files: 32% (38015/118796) +Checking out files: 33% (39203/118796) +Checking out files: 34% (40391/118796) +Checking out files: 35% (41579/118796) +Checking out files: 36% (42767/118796) +Checking out files: 37% (43955/118796) +Checking out files: 38% (45143/118796) +Checking out files: 39% (46331/118796) +Checking out files: 40% (47519/118796) +Checking out files: 41% (48707/118796) +Checking out files: 42% (49895/118796) +Checking out files: 42% (50145/118796) +Checking out files: 43% (51083/118796) +Checking out files: 44% (52271/118796) +Checking out files: 45% (53459/118796) +Checking out files: 46% (54647/118796) +Checking out files: 47% (55835/118796) +Checking out files: 48% (57023/118796) +Checking out files: 49% (58211/118796) +Checking out files: 50% (59398/118796) +Checking out files: 51% (60586/118796) +Checking out files: 52% (61774/118796) +Checking out files: 53% (62962/118796) +Checking out files: 54% (64150/118796) +Checking out files: 55% (65338/118796) +Checking out files: 56% (66526/118796) +Checking out files: 56% (66983/118796) +Checking out files: 57% (67714/118796) +Checking out files: 58% (68902/118796) +Checking out files: 59% (70090/118796) +Checking out files: 60% (71278/118796) +Checking out files: 61% (72466/118796) +Checking out files: 62% (73654/118796) +Checking out files: 63% (74842/118796) +Checking out files: 64% (76030/118796) +Checking out files: 65% (77218/118796) +Checking out files: 66% (78406/118796) +Checking out files: 67% (79594/118796) +Checking out files: 68% (80782/118796) +Checking out files: 68% (81956/118796) +Checking out files: 69% (81970/118796) +Checking out files: 70% (83158/118796) +Checking out files: 71% (84346/118796) +Checking out files: 72% (85534/118796) +Checking out files: 73% (86722/118796) +Checking out files: 74% (87910/118796) +Checking out files: 75% (89097/118796) +Checking out files: 76% (90285/118796) +Checking out files: 77% (91473/118796) +Checking out files: 78% (92661/118796) +Checking out files: 79% (93849/118796) +Checking out files: 80% (95037/118796) +Checking out files: 81% (96225/118796) +Checking out files: 82% (97413/118796) +Checking out files: 82% (97614/118796) +Checking out files: 83% (98601/118796) +Checking out files: 84% (99789/118796) +Checking out files: 85% (100977/118796) +Checking out files: 86% (102165/118796) +Checking out files: 87% (103353/118796) +Checking out files: 88% (104541/118796) +Checking out files: 89% (105729/118796) +Checking out files: 90% (106917/118796) +Checking out files: 91% (108105/118796) +Checking out files: 92% (109293/118796) +Checking out files: 93% (110481/118796) +Checking out files: 94% (111669/118796) +Checking out files: 95% (112857/118796) +Checking out files: 95% (113057/118796) +Checking out files: 96% (114045/118796) +Checking out files: 97% (115233/118796) +Checking out files: 98% (116421/118796) +Checking out files: 99% (117609/118796) +Checking out files: 100% (118796/118796) +Checking out files: 100% (118796/118796), done. +travis_time:end:16daecd0:start=1442012004129913797,finish=1442012018866609279,duration=14736695482 +$ cd servo/servo +travis_time:start:04bda51d +$ git fetch origin +refs/pull/7606/merge: +remote: Counting objects: 61, done. +remote: Compressing objects: 4% (1/22)  +remote: Compressing objects: 9% (2/22)  +remote: Compressing objects: 13% (3/22)  +remote: Compressing objects: 18% (4/22)  +remote: Compressing objects: 22% (5/22)  +remote: Compressing objects: 27% (6/22)  +remote: Compressing objects: 31% (7/22)  +remote: Compressing objects: 36% (8/22)  +remote: Compressing objects: 40% (9/22)  +remote: Compressing objects: 45% (10/22)  +remote: Compressing objects: 50% (11/22)  +remote: Compressing objects: 54% (12/22)  +remote: Compressing objects: 59% (13/22)  +remote: Compressing objects: 63% (14/22)  +remote: Compressing objects: 68% (15/22)  +remote: Compressing objects: 72% (16/22)  +remote: Compressing objects: 77% (17/22)  +remote: Compressing objects: 81% (18/22)  +remote: Compressing objects: 86% (19/22)  +remote: Compressing objects: 90% (20/22)  +remote: Compressing objects: 95% (21/22)  +remote: Compressing objects: 100% (22/22)  +remote: Compressing objects: 100% (22/22), done. +remote: Total 61 (delta 55), reused 44 (delta 39), pack-reused 0 +Unpacking objects: 1% (1/61) +Unpacking objects: 3% (2/61) +Unpacking objects: 4% (3/61) +Unpacking objects: 6% (4/61) +Unpacking objects: 8% (5/61) +Unpacking objects: 9% (6/61) +Unpacking objects: 11% (7/61) +Unpacking objects: 13% (8/61) +Unpacking objects: 14% (9/61) +Unpacking objects: 16% (10/61) +Unpacking objects: 18% (11/61) +Unpacking objects: 19% (12/61) +Unpacking objects: 21% (13/61) +Unpacking objects: 22% (14/61) +Unpacking objects: 24% (15/61) +Unpacking objects: 26% (16/61) +Unpacking objects: 27% (17/61) +Unpacking objects: 29% (18/61) +Unpacking objects: 31% (19/61) +Unpacking objects: 32% (20/61) +Unpacking objects: 34% (21/61) +Unpacking objects: 36% (22/61) +Unpacking objects: 37% (23/61) +Unpacking objects: 39% (24/61) +Unpacking objects: 40% (25/61) +Unpacking objects: 42% (26/61) +Unpacking objects: 44% (27/61) +Unpacking objects: 45% (28/61) +Unpacking objects: 47% (29/61) +Unpacking objects: 49% (30/61) +Unpacking objects: 50% (31/61) +Unpacking objects: 52% (32/61) +Unpacking objects: 54% (33/61) +Unpacking objects: 55% (34/61) +Unpacking objects: 57% (35/61) +Unpacking objects: 59% (36/61) +Unpacking objects: 60% (37/61) +Unpacking objects: 62% (38/61) +Unpacking objects: 63% (39/61) +Unpacking objects: 65% (40/61) +Unpacking objects: 67% (41/61) +Unpacking objects: 68% (42/61) +Unpacking objects: 70% (43/61) +Unpacking objects: 72% (44/61) +Unpacking objects: 73% (45/61) +Unpacking objects: 75% (46/61) +Unpacking objects: 77% (47/61) +Unpacking objects: 78% (48/61) +Unpacking objects: 80% (49/61) +Unpacking objects: 81% (50/61) +Unpacking objects: 83% (51/61) +Unpacking objects: 85% (52/61) +Unpacking objects: 86% (53/61) +Unpacking objects: 88% (54/61) +Unpacking objects: 90% (55/61) +Unpacking objects: 91% (56/61) +Unpacking objects: 93% (57/61) +Unpacking objects: 95% (58/61) +Unpacking objects: 96% (59/61) +Unpacking objects: 98% (60/61) +Unpacking objects: 100% (61/61) +Unpacking objects: 100% (61/61), done. +From https://github.com/servo/servo + * branch refs/pull/7606/merge -> FETCH_HEAD +travis_time:end:04bda51d:start=1442012018870347891,finish=1442012019489804985,duration=619457094 +$ git checkout -qf FETCH_HEAD +travis_fold:end:git.checkout +travis_fold:start:git.submodule +travis_time:start:04378686 +$ git submodule init +Submodule 'support/android-rs-glue' (https://github.com/tomaka/android-rs-glue) registered for path 'support/android-rs-glue' +travis_time:end:04378686:start=1442012020599535551,finish=1442012020975447711,duration=375912160 +travis_time:start:326023b0 +$ git submodule update +Cloning into 'support/android-rs-glue'... +remote: Counting objects: 564, done. +Receiving objects: 0% (1/564) +Receiving objects: 1% (6/564) +Receiving objects: 2% (12/564) +Receiving objects: 3% (17/564) +Receiving objects: 4% (23/564) +Receiving objects: 5% (29/564) +Receiving objects: 6% (34/564) +Receiving objects: 7% (40/564) +Receiving objects: 8% (46/564) +Receiving objects: 9% (51/564) +Receiving objects: 10% (57/564) +Receiving objects: 11% (63/564) +Receiving objects: 12% (68/564) +Receiving objects: 13% (74/564) +Receiving objects: 14% (79/564) +Receiving objects: 15% (85/564) +Receiving objects: 16% (91/564) +Receiving objects: 17% (96/564) +Receiving objects: 18% (102/564) +Receiving objects: 19% (108/564) +Receiving objects: 20% (113/564) +Receiving objects: 21% (119/564) +Receiving objects: 22% (125/564) +Receiving objects: 23% (130/564) +Receiving objects: 24% (136/564) +Receiving objects: 25% (141/564) +Receiving objects: 26% (147/564) +Receiving objects: 27% (153/564) +Receiving objects: 28% (158/564) +Receiving objects: 29% (164/564) +Receiving objects: 30% (170/564) +Receiving objects: 31% (175/564) +Receiving objects: 32% (181/564) +Receiving objects: 33% (187/564) +Receiving objects: 34% (192/564) +Receiving objects: 35% (198/564) +Receiving objects: 36% (204/564) +Receiving objects: 37% (209/564) +Receiving objects: 38% (215/564) +Receiving objects: 39% (220/564) +Receiving objects: 40% (226/564) +Receiving objects: 41% (232/564) +Receiving objects: 42% (237/564) +Receiving objects: 43% (243/564) +Receiving objects: 44% (249/564) +Receiving objects: 45% (254/564) +Receiving objects: 46% (260/564) +Receiving objects: 47% (266/564) +Receiving objects: 48% (271/564) +Receiving objects: 49% (277/564) +Receiving objects: 50% (282/564) +Receiving objects: 51% (288/564) +Receiving objects: 52% (294/564) +Receiving objects: 53% (299/564) +Receiving objects: 54% (305/564) +Receiving objects: 55% (311/564) +Receiving objects: 56% (316/564) +Receiving objects: 57% (322/564) +Receiving objects: 58% (328/564) +Receiving objects: 59% (333/564) +Receiving objects: 60% (339/564) +Receiving objects: 61% (345/564) +Receiving objects: 62% (350/564) +Receiving objects: 63% (356/564) +Receiving objects: 64% (361/564) +Receiving objects: 65% (367/564) +Receiving objects: 66% (373/564) +Receiving objects: 67% (378/564) +Receiving objects: 68% (384/564) +Receiving objects: 69% (390/564) +Receiving objects: 70% (395/564) +Receiving objects: 71% (401/564) +Receiving objects: 72% (407/564) +Receiving objects: 73% (412/564) +Receiving objects: 74% (418/564) +Receiving objects: 75% (423/564) +Receiving objects: 76% (429/564) +Receiving objects: 77% (435/564) +Receiving objects: 78% (440/564) +Receiving objects: 79% (446/564) +Receiving objects: 80% (452/564) +Receiving objects: 81% (457/564) +remote: Total 564 (delta 0), reused 0 (delta 0), pack-reused 564 +Receiving objects: 82% (463/564) +Receiving objects: 83% (469/564) +Receiving objects: 84% (474/564) +Receiving objects: 85% (480/564) +Receiving objects: 86% (486/564) +Receiving objects: 87% (491/564) +Receiving objects: 88% (497/564) +Receiving objects: 89% (502/564) +Receiving objects: 90% (508/564) +Receiving objects: 91% (514/564) +Receiving objects: 92% (519/564) +Receiving objects: 93% (525/564) +Receiving objects: 94% (531/564) +Receiving objects: 95% (536/564) +Receiving objects: 96% (542/564) +Receiving objects: 97% (548/564) +Receiving objects: 98% (553/564) +Receiving objects: 99% (559/564) +Receiving objects: 100% (564/564) +Receiving objects: 100% (564/564), 133.16 KiB | 0 bytes/s, done. +Resolving deltas: 0% (0/276) +Resolving deltas: 4% (13/276) +Resolving deltas: 5% (15/276) +Resolving deltas: 6% (17/276) +Resolving deltas: 7% (21/276) +Resolving deltas: 8% (24/276) +Resolving deltas: 9% (25/276) +Resolving deltas: 14% (39/276) +Resolving deltas: 17% (48/276) +Resolving deltas: 19% (54/276) +Resolving deltas: 31% (87/276) +Resolving deltas: 32% (90/276) +Resolving deltas: 35% (99/276) +Resolving deltas: 44% (124/276) +Resolving deltas: 51% (142/276) +Resolving deltas: 57% (160/276) +Resolving deltas: 67% (187/276) +Resolving deltas: 69% (192/276) +Resolving deltas: 70% (194/276) +Resolving deltas: 81% (225/276) +Resolving deltas: 83% (231/276) +Resolving deltas: 84% (232/276) +Resolving deltas: 87% (242/276) +Resolving deltas: 88% (245/276) +Resolving deltas: 92% (255/276) +Resolving deltas: 93% (257/276) +Resolving deltas: 95% (264/276) +Resolving deltas: 96% (267/276) +Resolving deltas: 100% (276/276) +Resolving deltas: 100% (276/276), done. +Checking connectivity... done. +Submodule path 'support/android-rs-glue': checked out '4ed3cb30b289aa0aa84a00e0d5682bc853108e5a' +travis_time:end:326023b0:start=1442012020979079782,finish=1442012021621488690,duration=642408908 +travis_fold:end:git.submodule + +This job is running on container-based infrastructure, which does not allow use of 'sudo', setuid and setguid executables. +If you require sudo, add 'sudo: required' to your .travis.yml +See http://docs.travis-ci.com/user/workers/container-based-infrastructure/ for details. +travis_time:start:1d674e1b +$ source ~/virtualenv/python2.7/bin/activate +travis_time:end:1d674e1b:start=1442012024198107459,finish=1442012024205105107,duration=6997648 +$ python --version +Python 2.7.9 +$ pip --version +pip 6.0.7 from /home/travis/virtualenv/python2.7.9/lib/python2.7/site-packages (python 2.7) +Could not locate requirements.txt. Override the install: key in your .travis.yml to install dependencies. +travis_time:start:011277c0 +$ ./mach test-tidy +Running virtualenv with interpreter /home/travis/virtualenv/python2.7.9/bin/python2 +Using real prefix '/opt/python/2.7.9' +New python executable in /home/travis/build/servo/servo/python/_virtualenv/bin/python2 +Also creating executable in /home/travis/build/servo/servo/python/_virtualenv/bin/python +Installing setuptools, pip...done. +You are using pip version 6.0.7, however version 7.1.2 is available. +You should consider upgrading via the 'pip install --upgrade pip' command. +You are using pip version 6.0.7, however version 7.1.2 is available. +You should consider upgrading via the 'pip install --upgrade pip' command. +You are using pip version 6.0.7, however version 7.1.2 is available. +You should consider upgrading via the 'pip install --upgrade pip' command. +./components/script/dom/eventtarget.rs:7: use statement is not in alphabetical order + expected: dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull + found: dom::bindings::conversions::get_dom_class +./components/script/dom/eventtarget.rs:8: use statement is not in alphabetical order + expected: dom::bindings::codegen::Bindings::EventListenerBinding::EventListener + found: dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull +./components/script/dom/eventtarget.rs:9: use statement is not in alphabetical order + expected: dom::bindings::codegen::Bindings::EventTargetBinding::EventTargetMethods + found: dom::bindings::codegen::Bindings::EventListenerBinding::EventListener +./components/script/dom/eventtarget.rs:10: use statement is not in alphabetical order + expected: dom::bindings::conversions::get_dom_class + found: dom::bindings::codegen::Bindings::EventTargetBinding::EventTargetMethods +./components/script/dom/bindings/utils.rs:17: use statement is not in alphabetical order + expected: dom::browsercontext + found: dom::eventtarget::EventTargetTypeId +./components/script/dom/bindings/utils.rs:18: use statement is not in alphabetical order + expected: dom::eventtarget::EventTargetTypeId + found: dom::browsercontext +travis_time:end:011277c0:start=1442012024569595079,finish=1442012039323338483,duration=14753743404 + +The command "./mach test-tidy" exited with 1. + +Done. Your build exited with 1. diff --git a/resources/needs_reftest.diff b/resources/needs_reftest.diff new file mode 100644 index 0000000..344a170 --- /dev/null +++ b/resources/needs_reftest.diff @@ -0,0 +1,64 @@ +diff --git a/components/layout/layout_task.rs b/components/layout/layout_task.rs +index 28eacfd..f0aba98 100644 +--- a/components/layout/layout_task.rs ++++ b/components/layout/layout_task.rs +@@ -834,19 +834,6 @@ impl LayoutTask { + traversal); + } + +- /// Verifies that every node was either marked as a leaf or as a nonleaf in the flow tree. +- /// This is only on in debug builds. +- #[inline(never)] +- #[cfg(debug)] +- fn verify_flow_tree(&self, layout_root: &mut FlowRef) { +- let mut traversal = traversal::FlowTreeVerification; +- layout_root.traverse_preorder(&mut traversal); +- } +- +- #[cfg(not(debug))] +- fn verify_flow_tree(&self, _: &mut FlowRef) { +- } +- + fn process_node_geometry_request<'a>(&'a self, + requested_node: TrustedNodeAddress, + layout_root: &mut FlowRef, +@@ -1338,11 +1325,6 @@ impl LayoutTask { + } + }); + +- // Verification of the flow tree, which ensures that all nodes were either marked as +- // leaves or as non-leaves. This becomes a no-op in release builds. (It is +- // inconsequential to memory safety but is a useful debugging tool.) +- self.verify_flow_tree(&mut root_flow); +- + if opts::get().trace_layout { + layout_debug::begin_trace(root_flow.clone()); + } +diff --git a/components/layout/traversal.rs b/components/layout/traversal.rs +index 43199d7..7742ee3 100644 +--- a/components/layout/traversal.rs ++++ b/components/layout/traversal.rs +@@ -292,23 +292,6 @@ impl<'a> PostorderDomTraversal for ConstructFlows<'a> { + } + } + +-/// The flow tree verification traversal. This is only on in debug builds. +-#[cfg(debug)] +-struct FlowTreeVerification; +- +-#[cfg(debug)] +-impl PreorderFlow for FlowTreeVerification { +- #[inline] +- fn process(&mut self, flow: &mut Flow) { +- let base = flow::base(flow); +- if !base.flags.is_leaf() && !base.flags.is_nonleaf() { +- println!("flow tree verification failed: flow wasn't a leaf or a nonleaf!"); +- flow.dump(); +- panic!("flow tree verification failed") +- } +- } +-} +- + /// The bubble-inline-sizes traversal, the first part of layout computation. This computes + /// preferred and intrinsic inline-sizes and bubbles them up the tree. + pub struct BubbleISizes<'a> { \ No newline at end of file diff --git a/resources/single-line-comment.log b/resources/single-line-comment.log new file mode 100644 index 0000000..22fff91 --- /dev/null +++ b/resources/single-line-comment.log @@ -0,0 +1 @@ +Using worker: worker-linux-docker-201055c9.prod.travis-ci.org:travis-linux-6 travis_fold:start:system_info Build system information Build language: python Build image provisioning date and time Thu Feb 5 15:09:33 UTC 2015 Operating System Details Distributor ID: Ubuntu Description: Ubuntu 12.04.5 LTS Release: 12.04 Codename: precise Linux Version 3.13.0-29-generic Cookbooks Version a68419e https://github.com/travis-ci/travis-cookbooks/tree/a68419e GCC version gcc (Ubuntu/Linaro 4.6.3-1ubuntu5) 4.6.3 Copyright (C) 2011 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. LLVM version clang version 3.4 (tags/RELEASE_34/final) Target: x86_64-unknown-linux-gnu Thread model: posix Pre-installed Ruby versions ruby-1.9.3-p551 Pre-installed Node.js versions v0.10.36 Pre-installed Go versions 1.4.1 Redis version redis-server 2.8.19 riak version 2.0.2 MongoDB version MongoDB 2.4.12 CouchDB version couchdb 1.6.1 Neo4j version 1.9.4 RabbitMQ Version 3.4.3 ElasticSearch version 1.4.0 Installed Sphinx versions 2.0.10 2.1.9 2.2.6 Default Sphinx version 2.2.6 Installed Firefox version firefox 31.0esr PhantomJS version 1.9.8 ant -version Apache Ant(TM) version 1.8.2 compiled on December 3 2011 mvn -version Apache Maven 3.2.5 (12a6b3acb947671f09b81f49094c53f426d8cea1; 2014-12-14T17:29:23+00:00) Maven home: /usr/local/maven Java version: 1.7.0_76, vendor: Oracle Corporation Java home: /usr/lib/jvm/java-7-oracle/jre Default locale: en_US, platform encoding: ANSI_X3.4-1968 OS name: "linux", version: "3.13.0-29-generic", arch: "amd64", family: "unix" travis_fold:end:system_info  travis_fold:start:git.checkout travis_time:start:1f55a9d0 $ git clone --depth=50 https://github.com/servo/servo.git servo/servo Cloning into 'servo/servo'... remote: Counting objects: 100268, done. remote: Compressing objects: 0% (1/37443)  remote: Compressing objects: 1% (375/37443)  remote: Compressing objects: 2% (749/37443)  remote: Compressing objects: 3% (1124/37443)  remote: Compressing objects: 4% (1498/37443)  remote: Compressing objects: 5% (1873/37443)  remote: Compressing objects: 6% (2247/37443)  remote: Compressing objects: 7% (2622/37443)  remote: Compressing objects: 8% (2996/37443)  remote: Compressing objects: 9% (3370/37443)  remote: Compressing objects: 10% (3745/37443)  remote: Compressing objects: 11% (4119/37443)  remote: Compressing objects: 12% (4494/37443)  remote: Compressing objects: 13% (4868/37443)  remote: Compressing objects: 14% (5243/37443)  remote: Compressing objects: 15% (5617/37443)  remote: Compressing objects: 16% (5991/37443)  remote: Compressing objects: 17% (6366/37443)  remote: Compressing objects: 18% (6740/37443)  remote: Compressing objects: 19% (7115/37443)  remote: Compressing objects: 20% (7489/37443)  remote: Compressing objects: 21% (7864/37443)  remote: Compressing objects: 22% (8238/37443)  remote: Compressing objects: 23% (8612/37443)  remote: Compressing objects: 24% (8987/37443)  remote: Compressing objects: 25% (9361/37443)  remote: Compressing objects: 26% (9736/37443)  remote: Compressing objects: 27% (10110/37443)  remote: Compressing objects: 27% (10133/37443)  remote: Compressing objects: 28% (10485/37443)  remote: Compressing objects: 29% (10859/37443)  remote: Compressing objects: 30% (11233/37443)  remote: Compressing objects: 31% (11608/37443)  remote: Compressing objects: 32% (11982/37443)  remote: Compressing objects: 33% (12357/37443)  remote: Compressing objects: 34% (12731/37443)  remote: Compressing objects: 35% (13106/37443)  remote: Compressing objects: 36% (13480/37443)  remote: Compressing objects: 37% (13854/37443)  remote: Compressing objects: 38% (14229/37443)  remote: Compressing objects: 39% (14603/37443)  remote: Compressing objects: 40% (14978/37443)  remote: Compressing objects: 41% (15352/37443)  remote: Compressing objects: 42% (15727/37443)  remote: Compressing objects: 43% (16101/37443)  remote: Compressing objects: 44% (16475/37443)  remote: Compressing objects: 45% (16850/37443)  remote: Compressing objects: 46% (17224/37443)  remote: Compressing objects: 47% (17599/37443)  remote: Compressing objects: 48% (17973/37443)  remote: Compressing objects: 49% (18348/37443)  remote: Compressing objects: 50% (18722/37443)  remote: Compressing objects: 51% (19096/37443)  remote: Compressing objects: 52% (19471/37443)  remote: Compressing objects: 53% (19845/37443)  remote: Compressing objects: 54% (20220/37443)  remote: Compressing objects: 55% (20594/37443)  remote: Compressing objects: 56% (20969/37443)  remote: Compressing objects: 57% (21343/37443)  remote: Compressing objects: 58% (21717/37443)  remote: Compressing objects: 59% (22092/37443)  remote: Compressing objects: 60% (22466/37443)  remote: Compressing objects: 61% (22841/37443)  remote: Compressing objects: 62% (23215/37443)  remote: Compressing objects: 63% (23590/37443)  remote: Compressing objects: 64% (23964/37443)  remote: Compressing objects: 65% (24338/37443)  remote: Compressing objects: 66% (24713/37443)  remote: Compressing objects: 67% (25087/37443)  remote: Compressing objects: 68% (25462/37443)  remote: Compressing objects: 69% (25836/37443)  remote: Compressing objects: 70% (26211/37443)  remote: Compressing objects: 71% (26585/37443)  remote: Compressing objects: 72% (26959/37443)  remote: Compressing objects: 73% (27334/37443)  remote: Compressing objects: 74% (27708/37443)  remote: Compressing objects: 75% (28083/37443)  remote: Compressing objects: 76% (28457/37443)  remote: Compressing objects: 77% (28832/37443)  remote: Compressing objects: 78% (29206/37443)  remote: Compressing objects: 79% (29580/37443)  remote: Compressing objects: 80% (29955/37443)  remote: Compressing objects: 81% (30329/37443)  remote: Compressing objects: 82% (30704/37443)  remote: Compressing objects: 83% (31078/37443)  remote: Compressing objects: 84% (31453/37443)  remote: Compressing objects: 85% (31827/37443)  remote: Compressing objects: 86% (32201/37443)  remote: Compressing objects: 87% (32576/37443)  remote: Compressing objects: 88% (32950/37443)  remote: Compressing objects: 89% (33325/37443)  remote: Compressing objects: 90% (33699/37443)  remote: Compressing objects: 91% (34074/37443)  remote: Compressing objects: 92% (34448/37443)  remote: Compressing objects: 93% (34822/37443)  remote: Compressing objects: 94% (35197/37443)  remote: Compressing objects: 95% (35571/37443)  remote: Compressing objects: 96% (35946/37443)  remote: Compressing objects: 97% (36320/37443)  remote: Compressing objects: 98% (36695/37443)  remote: Compressing objects: 99% (37069/37443)  remote: Compressing objects: 100% (37443/37443)  remote: Compressing objects: 100% (37443/37443), done. Receiving objects: 0% (1/100268) Receiving objects: 1% (1003/100268) Receiving objects: 2% (2006/100268) Receiving objects: 3% (3009/100268) Receiving objects: 4% (4011/100268) Receiving objects: 5% (5014/100268), 9.87 MiB | 19.69 MiB/s Receiving objects: 6% (6017/100268), 9.87 MiB | 19.69 MiB/s Receiving objects: 7% (7019/100268), 9.87 MiB | 19.69 MiB/s Receiving objects: 8% (8022/100268), 9.87 MiB | 19.69 MiB/s Receiving objects: 9% (9025/100268), 9.87 MiB | 19.69 MiB/s Receiving objects: 10% (10027/100268), 9.87 MiB | 19.69 MiB/s Receiving objects: 11% (11030/100268), 9.87 MiB | 19.69 MiB/s Receiving objects: 12% (12033/100268), 9.87 MiB | 19.69 MiB/s Receiving objects: 13% (13035/100268), 9.87 MiB | 19.69 MiB/s Receiving objects: 14% (14038/100268), 9.87 MiB | 19.69 MiB/s Receiving objects: 15% (15041/100268), 9.87 MiB | 19.69 MiB/s Receiving objects: 16% (16043/100268), 9.87 MiB | 19.69 MiB/s Receiving objects: 17% (17046/100268), 9.87 MiB | 19.69 MiB/s Receiving objects: 17% (17527/100268), 19.21 MiB | 19.17 MiB/s Receiving objects: 18% (18049/100268), 19.21 MiB | 19.17 MiB/s Receiving objects: 19% (19051/100268), 19.21 MiB | 19.17 MiB/s Receiving objects: 20% (20054/100268), 19.21 MiB | 19.17 MiB/s Receiving objects: 21% (21057/100268), 19.21 MiB | 19.17 MiB/s Receiving objects: 22% (22059/100268), 19.21 MiB | 19.17 MiB/s Receiving objects: 23% (23062/100268), 19.21 MiB | 19.17 MiB/s Receiving objects: 24% (24065/100268), 19.21 MiB | 19.17 MiB/s Receiving objects: 25% (25067/100268), 19.21 MiB | 19.17 MiB/s Receiving objects: 26% (26070/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 27% (27073/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 28% (28076/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 29% (29078/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 30% (30081/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 31% (31084/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 32% (32086/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 33% (33089/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 34% (34092/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 35% (35094/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 36% (36097/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 37% (37100/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 38% (38102/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 39% (39105/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 40% (40108/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 41% (41110/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 42% (42113/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 43% (43116/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 44% (44118/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 45% (45121/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 46% (46124/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 47% (47126/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 47% (47779/100268), 30.19 MiB | 20.08 MiB/s Receiving objects: 48% (48129/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 49% (49132/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 50% (50134/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 51% (51137/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 52% (52140/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 53% (53143/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 54% (54145/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 55% (55148/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 56% (56151/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 57% (57153/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 58% (58156/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 59% (59159/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 60% (60161/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 61% (61164/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 62% (62167/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 63% (63169/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 64% (64172/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 65% (65175/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 66% (66177/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 67% (67180/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 68% (68183/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 69% (69185/100268), 38.65 MiB | 19.29 MiB/s Receiving objects: 70% (70188/100268), 45.49 MiB | 18.16 MiB/s Receiving objects: 71% (71191/100268), 45.49 MiB | 18.16 MiB/s Receiving objects: 72% (72193/100268), 45.49 MiB | 18.16 MiB/s Receiving objects: 73% (73196/100268), 45.49 MiB | 18.16 MiB/s Receiving objects: 74% (74199/100268), 45.49 MiB | 18.16 MiB/s Receiving objects: 75% (75201/100268), 45.49 MiB | 18.16 MiB/s Receiving objects: 76% (76204/100268), 45.49 MiB | 18.16 MiB/s Receiving objects: 77% (77207/100268), 45.49 MiB | 18.16 MiB/s Receiving objects: 78% (78210/100268), 45.49 MiB | 18.16 MiB/s Receiving objects: 78% (78473/100268), 51.43 MiB | 17.11 MiB/s Receiving objects: 79% (79212/100268), 51.43 MiB | 17.11 MiB/s Receiving objects: 80% (80215/100268), 51.43 MiB | 17.11 MiB/s Receiving objects: 81% (81218/100268), 51.43 MiB | 17.11 MiB/s Receiving objects: 82% (82220/100268), 51.43 MiB | 17.11 MiB/s Receiving objects: 83% (83223/100268), 51.43 MiB | 17.11 MiB/s Receiving objects: 84% (84226/100268), 51.43 MiB | 17.11 MiB/s Receiving objects: 85% (85228/100268), 51.43 MiB | 17.11 MiB/s Receiving objects: 86% (86231/100268), 62.97 MiB | 17.96 MiB/s Receiving objects: 87% (87234/100268), 62.97 MiB | 17.96 MiB/s Receiving objects: 88% (88236/100268), 62.97 MiB | 17.96 MiB/s Receiving objects: 89% (89239/100268), 62.97 MiB | 17.96 MiB/s Receiving objects: 90% (90242/100268), 62.97 MiB | 17.96 MiB/s Receiving objects: 91% (91244/100268), 62.97 MiB | 17.96 MiB/s Receiving objects: 92% (92247/100268), 62.97 MiB | 17.96 MiB/s Receiving objects: 93% (93250/100268), 62.97 MiB | 17.96 MiB/s Receiving objects: 94% (94252/100268), 62.97 MiB | 17.96 MiB/s Receiving objects: 95% (95255/100268), 62.97 MiB | 17.96 MiB/s Receiving objects: 96% (96258/100268), 62.97 MiB | 17.96 MiB/s Receiving objects: 97% (97260/100268), 62.97 MiB | 17.96 MiB/s Receiving objects: 98% (98263/100268), 62.97 MiB | 17.96 MiB/s Receiving objects: 99% (99266/100268), 62.97 MiB | 17.96 MiB/s remote: Total 100268 (delta 64207), reused 96530 (delta 61453), pack-reused 0 Receiving objects: 100% (100268/100268), 62.97 MiB | 17.96 MiB/s Receiving objects: 100% (100268/100268), 71.58 MiB | 17.96 MiB/s, done. Resolving deltas: 0% (0/64207) Resolving deltas: 1% (672/64207) Resolving deltas: 2% (1287/64207) Resolving deltas: 3% (1928/64207) Resolving deltas: 4% (2595/64207) Resolving deltas: 5% (3217/64207) Resolving deltas: 6% (3861/64207) Resolving deltas: 7% (4495/64207) Resolving deltas: 8% (5137/64207) Resolving deltas: 9% (5781/64207) Resolving deltas: 10% (6421/64207) Resolving deltas: 11% (7085/64207) Resolving deltas: 12% (7709/64207) Resolving deltas: 13% (8360/64207) Resolving deltas: 14% (9000/64207) Resolving deltas: 15% (9707/64207) Resolving deltas: 16% (10276/64207) Resolving deltas: 17% (10916/64207) Resolving deltas: 18% (11558/64207) Resolving deltas: 19% (12200/64207) Resolving deltas: 20% (12854/64207) Resolving deltas: 21% (13496/64207) Resolving deltas: 22% (14146/64207) Resolving deltas: 23% (14816/64207) Resolving deltas: 24% (15411/64207) Resolving deltas: 25% (16114/64207) Resolving deltas: 26% (16814/64207) Resolving deltas: 27% (17346/64207) Resolving deltas: 28% (18004/64207) Resolving deltas: 29% (18679/64207) Resolving deltas: 30% (19476/64207) Resolving deltas: 31% (19918/64207) Resolving deltas: 32% (20564/64207) Resolving deltas: 33% (21216/64207) Resolving deltas: 34% (21964/64207) Resolving deltas: 35% (22496/64207) Resolving deltas: 35% (22885/64207) Resolving deltas: 36% (23138/64207) Resolving deltas: 37% (23771/64207) Resolving deltas: 38% (24404/64207) Resolving deltas: 39% (25041/64207) Resolving deltas: 40% (25715/64207) Resolving deltas: 41% (26341/64207) Resolving deltas: 42% (26968/64207) Resolving deltas: 43% (27612/64207) Resolving deltas: 44% (28278/64207) Resolving deltas: 45% (28895/64207) Resolving deltas: 46% (29536/64207) Resolving deltas: 47% (30179/64207) Resolving deltas: 48% (30820/64207) Resolving deltas: 49% (31463/64207) Resolving deltas: 50% (32120/64207) Resolving deltas: 51% (32746/64207) Resolving deltas: 52% (33436/64207) Resolving deltas: 53% (34064/64207) Resolving deltas: 54% (34672/64207) Resolving deltas: 55% (35317/64207) Resolving deltas: 56% (35961/64207) Resolving deltas: 57% (36607/64207) Resolving deltas: 58% (37241/64207) Resolving deltas: 59% (37883/64207) Resolving deltas: 60% (38530/64207) Resolving deltas: 61% (39172/64207) Resolving deltas: 62% (39904/64207) Resolving deltas: 63% (40516/64207) Resolving deltas: 64% (41094/64207) Resolving deltas: 65% (41739/64207) Resolving deltas: 66% (42379/64207) Resolving deltas: 67% (43024/64207) Resolving deltas: 68% (43722/64207) Resolving deltas: 69% (44303/64207) Resolving deltas: 70% (44947/64207) Resolving deltas: 71% (45588/64207) Resolving deltas: 72% (46239/64207) Resolving deltas: 73% (46872/64207) Resolving deltas: 74% (47516/64207) Resolving deltas: 75% (48156/64207) Resolving deltas: 76% (48800/64207) Resolving deltas: 77% (49440/64207) Resolving deltas: 78% (50082/64207) Resolving deltas: 79% (50724/64207) Resolving deltas: 79% (50845/64207) Resolving deltas: 80% (51368/64207) Resolving deltas: 81% (52011/64207) Resolving deltas: 82% (52683/64207) Resolving deltas: 83% (53295/64207) Resolving deltas: 84% (54001/64207) Resolving deltas: 85% (54592/64207) Resolving deltas: 86% (55219/64207) Resolving deltas: 87% (56307/64207) Resolving deltas: 88% (56507/64207) Resolving deltas: 89% (57180/64207) Resolving deltas: 90% (57787/64207) Resolving deltas: 91% (58429/64207) Resolving deltas: 92% (59076/64207) Resolving deltas: 93% (59715/64207) Resolving deltas: 94% (60357/64207) Resolving deltas: 95% (61032/64207) Resolving deltas: 96% (61641/64207) Resolving deltas: 97% (62460/64207) Resolving deltas: 98% (62924/64207) Resolving deltas: 99% (63577/64207) Resolving deltas: 100% (64207/64207) Resolving deltas: 100% (64207/64207), done. Checking connectivity... done. Checking out files: 11% (13688/118796) Checking out files: 12% (14256/118796) Checking out files: 13% (15444/118796) Checking out files: 14% (16632/118796) Checking out files: 15% (17820/118796) Checking out files: 16% (19008/118796) Checking out files: 17% (20196/118796) Checking out files: 18% (21384/118796) Checking out files: 19% (22572/118796) Checking out files: 20% (23760/118796) Checking out files: 21% (24948/118796) Checking out files: 22% (26136/118796) Checking out files: 23% (27324/118796) Checking out files: 23% (28377/118796) Checking out files: 24% (28512/118796) Checking out files: 25% (29699/118796) Checking out files: 26% (30887/118796) Checking out files: 27% (32075/118796) Checking out files: 28% (33263/118796) Checking out files: 29% (34451/118796) Checking out files: 30% (35639/118796) Checking out files: 31% (36827/118796) Checking out files: 32% (38015/118796) Checking out files: 33% (39203/118796) Checking out files: 34% (40391/118796) Checking out files: 34% (40927/118796) Checking out files: 35% (41579/118796) Checking out files: 36% (42767/118796) Checking out files: 37% (43955/118796) Checking out files: 38% (45143/118796) Checking out files: 39% (46331/118796) Checking out files: 40% (47519/118796) Checking out files: 41% (48707/118796) Checking out files: 42% (49895/118796) Checking out files: 43% (51083/118796) Checking out files: 44% (52271/118796) Checking out files: 45% (53459/118796) Checking out files: 46% (54647/118796) Checking out files: 47% (55835/118796) Checking out files: 47% (56675/118796) Checking out files: 48% (57023/118796) Checking out files: 49% (58211/118796) Checking out files: 50% (59398/118796) Checking out files: 51% (60586/118796) Checking out files: 52% (61774/118796) Checking out files: 53% (62962/118796) Checking out files: 54% (64150/118796) Checking out files: 55% (65338/118796) Checking out files: 56% (66526/118796) Checking out files: 57% (67714/118796) Checking out files: 58% (68902/118796) Checking out files: 59% (70090/118796) Checking out files: 59% (70952/118796) Checking out files: 60% (71278/118796) Checking out files: 61% (72466/118796) Checking out files: 62% (73654/118796) Checking out files: 63% (74842/118796) Checking out files: 64% (76030/118796) Checking out files: 65% (77218/118796) Checking out files: 66% (78406/118796) Checking out files: 67% (79594/118796) Checking out files: 68% (80782/118796) Checking out files: 69% (81970/118796) Checking out files: 70% (83158/118796) Checking out files: 70% (83859/118796) Checking out files: 71% (84346/118796) Checking out files: 72% (85534/118796) Checking out files: 73% (86722/118796) Checking out files: 74% (87910/118796) Checking out files: 75% (89097/118796) Checking out files: 76% (90285/118796) Checking out files: 77% (91473/118796) Checking out files: 78% (92661/118796) Checking out files: 79% (93849/118796) Checking out files: 80% (95037/118796) Checking out files: 81% (96225/118796) Checking out files: 82% (97413/118796) Checking out files: 82% (97447/118796) Checking out files: 83% (98601/118796) Checking out files: 84% (99789/118796) Checking out files: 85% (100977/118796) Checking out files: 86% (102165/118796) Checking out files: 87% (103353/118796) Checking out files: 88% (104541/118796) Checking out files: 89% (105729/118796) Checking out files: 90% (106917/118796) Checking out files: 91% (108105/118796) Checking out files: 92% (109293/118796) Checking out files: 93% (110481/118796) Checking out files: 94% (111669/118796) Checking out files: 95% (112857/118796) Checking out files: 95% (113416/118796) Checking out files: 96% (114045/118796) Checking out files: 97% (115233/118796) Checking out files: 98% (116421/118796) Checking out files: 99% (117609/118796) Checking out files: 100% (118796/118796) Checking out files: 100% (118796/118796), done. travis_time:end:1f55a9d0:start=1442247746413860778,finish=1442247765356038094,duration=18942177316 $ cd servo/servo travis_time:start:004b80a4 $ git fetch origin +refs/pull/7546/merge: remote: Counting objects: 11, done. remote: Compressing objects: 33% (1/3)  remote: Compressing objects: 66% (2/3)  remote: Compressing objects: 100% (3/3)  remote: Compressing objects: 100% (3/3), done. remote: Total 11 (delta 8), reused 11 (delta 8), pack-reused 0 Unpacking objects: 9% (1/11) Unpacking objects: 18% (2/11) Unpacking objects: 27% (3/11) Unpacking objects: 36% (4/11) Unpacking objects: 45% (5/11) Unpacking objects: 54% (6/11) Unpacking objects: 63% (7/11) Unpacking objects: 72% (8/11) Unpacking objects: 81% (9/11) Unpacking objects: 90% (10/11) Unpacking objects: 100% (11/11) Unpacking objects: 100% (11/11), done. From https://github.com/servo/servo * branch refs/pull/7546/merge -> FETCH_HEAD travis_time:end:004b80a4:start=1442247765360657996,finish=1442247766168170067,duration=807512071 $ git checkout -qf FETCH_HEAD travis_fold:end:git.checkout travis_fold:start:git.submodule travis_time:start:3160d4ee $ git submodule init Submodule 'support/android-rs-glue' (https://github.com/tomaka/android-rs-glue) registered for path 'support/android-rs-glue' travis_time:end:3160d4ee:start=1442247767205028590,finish=1442247767688167701,duration=483139111 travis_time:start:0aeec95e $ git submodule update Cloning into 'support/android-rs-glue'... remote: Counting objects: 564, done. Receiving objects: 0% (1/564) Receiving objects: 1% (6/564) Receiving objects: 2% (12/564) Receiving objects: 3% (17/564) Receiving objects: 4% (23/564) Receiving objects: 5% (29/564) Receiving objects: 6% (34/564) Receiving objects: 7% (40/564) Receiving objects: 8% (46/564) Receiving objects: 9% (51/564) Receiving objects: 10% (57/564) Receiving objects: 11% (63/564) Receiving objects: 12% (68/564) Receiving objects: 13% (74/564) Receiving objects: 14% (79/564) Receiving objects: 15% (85/564) Receiving objects: 16% (91/564) Receiving objects: 17% (96/564) Receiving objects: 18% (102/564) Receiving objects: 19% (108/564) Receiving objects: 20% (113/564) Receiving objects: 21% (119/564) Receiving objects: 22% (125/564) Receiving objects: 23% (130/564) Receiving objects: 24% (136/564) Receiving objects: 25% (141/564) Receiving objects: 26% (147/564) Receiving objects: 27% (153/564) Receiving objects: 28% (158/564) Receiving objects: 29% (164/564) Receiving objects: 30% (170/564) remote: Total 564 (delta 0), reused 0 (delta 0), pack-reused 564 Receiving objects: 31% (175/564) Receiving objects: 32% (181/564) Receiving objects: 33% (187/564) Receiving objects: 34% (192/564) Receiving objects: 35% (198/564) Receiving objects: 36% (204/564) Receiving objects: 37% (209/564) Receiving objects: 38% (215/564) Receiving objects: 39% (220/564) Receiving objects: 40% (226/564) Receiving objects: 41% (232/564) Receiving objects: 42% (237/564) Receiving objects: 43% (243/564) Receiving objects: 44% (249/564) Receiving objects: 45% (254/564) Receiving objects: 46% (260/564) Receiving objects: 47% (266/564) Receiving objects: 48% (271/564) Receiving objects: 49% (277/564) Receiving objects: 50% (282/564) Receiving objects: 51% (288/564) Receiving objects: 52% (294/564) Receiving objects: 53% (299/564) Receiving objects: 54% (305/564) Receiving objects: 55% (311/564) Receiving objects: 56% (316/564) Receiving objects: 57% (322/564) Receiving objects: 58% (328/564) Receiving objects: 59% (333/564) Receiving objects: 60% (339/564) Receiving objects: 61% (345/564) Receiving objects: 62% (350/564) Receiving objects: 63% (356/564) Receiving objects: 64% (361/564) Receiving objects: 65% (367/564) Receiving objects: 66% (373/564) Receiving objects: 67% (378/564) Receiving objects: 68% (384/564) Receiving objects: 69% (390/564) Receiving objects: 70% (395/564) Receiving objects: 71% (401/564) Receiving objects: 72% (407/564) Receiving objects: 73% (412/564) Receiving objects: 74% (418/564) Receiving objects: 75% (423/564) Receiving objects: 76% (429/564) Receiving objects: 77% (435/564) Receiving objects: 78% (440/564) Receiving objects: 79% (446/564) Receiving objects: 80% (452/564) Receiving objects: 81% (457/564) Receiving objects: 82% (463/564) Receiving objects: 83% (469/564) Receiving objects: 84% (474/564) Receiving objects: 85% (480/564) Receiving objects: 86% (486/564) Receiving objects: 87% (491/564) Receiving objects: 88% (497/564) Receiving objects: 89% (502/564) Receiving objects: 90% (508/564) Receiving objects: 91% (514/564) Receiving objects: 92% (519/564) Receiving objects: 93% (525/564) Receiving objects: 94% (531/564) Receiving objects: 95% (536/564) Receiving objects: 96% (542/564) Receiving objects: 97% (548/564) Receiving objects: 98% (553/564) Receiving objects: 99% (559/564) Receiving objects: 100% (564/564) Receiving objects: 100% (564/564), 133.16 KiB | 0 bytes/s, done. Resolving deltas: 0% (0/276) Resolving deltas: 4% (13/276) Resolving deltas: 6% (17/276) Resolving deltas: 16% (45/276) Resolving deltas: 17% (47/276) Resolving deltas: 18% (52/276) Resolving deltas: 19% (53/276) Resolving deltas: 22% (63/276) Resolving deltas: 25% (71/276) Resolving deltas: 27% (76/276) Resolving deltas: 39% (108/276) Resolving deltas: 43% (119/276) Resolving deltas: 53% (147/276) Resolving deltas: 58% (161/276) Resolving deltas: 68% (188/276) Resolving deltas: 69% (191/276) Resolving deltas: 71% (196/276) Resolving deltas: 82% (228/276) Resolving deltas: 85% (235/276) Resolving deltas: 88% (243/276) Resolving deltas: 92% (255/276) Resolving deltas: 95% (263/276) Resolving deltas: 96% (265/276) Resolving deltas: 98% (272/276) Resolving deltas: 100% (276/276) Resolving deltas: 100% (276/276), done. Checking connectivity... done. Submodule path 'support/android-rs-glue': checked out '4ed3cb30b289aa0aa84a00e0d5682bc853108e5a' travis_time:end:0aeec95e:start=1442247767691765610,finish=1442247768647542158,duration=955776548 travis_fold:end:git.submodule  This job is running on container-based infrastructure, which does not allow use of 'sudo', setuid and setguid executables. If you require sudo, add 'sudo: required' to your .travis.yml See http://docs.travis-ci.com/user/workers/container-based-infrastructure/ for details. travis_time:start:12ea261e $ source ~/virtualenv/python2.7/bin/activate travis_time:end:12ea261e:start=1442247771676023426,finish=1442247771679776983,duration=3753557 $ python --version Python 2.7.9 $ pip --version pip 6.0.7 from /home/travis/virtualenv/python2.7.9/lib/python2.7/site-packages (python 2.7) Could not locate requirements.txt. Override the install: key in your .travis.yml to install dependencies. travis_time:start:0d35b7a4 $ ./mach test-tidy Running virtualenv with interpreter /home/travis/virtualenv/python2.7.9/bin/python2 Using real prefix '/opt/python/2.7.9' New python executable in /home/travis/build/servo/servo/python/_virtualenv/bin/python2 Also creating executable in /home/travis/build/servo/servo/python/_virtualenv/bin/python Installing setuptools, pip...done. You are using pip version 6.0.7, however version 7.1.2 is available. You should consider upgrading via the 'pip install --upgrade pip' command. You are using pip version 6.0.7, however version 7.1.2 is available. You should consider upgrading via the 'pip install --upgrade pip' command. You are using pip version 6.0.7, however version 7.1.2 is available. You should consider upgrading via the 'pip install --upgrade pip' command. ./components/plugins/lints/sorter.rs:49: missing space before { travis_time:end:0d35b7a4:start=1442247773149902192,finish=1442247792879237673,duration=19729335481  The command "./mach test-tidy" exited with 1. Done. Your build exited with 1. \ No newline at end of file diff --git a/resources/test-data-lowfive.json b/resources/test-data-lowfive.json new file mode 100644 index 0000000..f43ebd5 --- /dev/null +++ b/resources/test-data-lowfive.json @@ -0,0 +1,7 @@ +{ + "name": "JoshTheGoldfish/lowfive", + "target_url": "https://travis-ci.org/servo/servo/builds/74856035", + "commit": { + "sha": "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", + } +} \ No newline at end of file diff --git a/test_comment.json b/resources/test_comment.json similarity index 100% rename from test_comment.json rename to resources/test_comment.json diff --git a/test_ignored_action.json b/resources/test_ignored_action.json similarity index 100% rename from test_ignored_action.json rename to resources/test_ignored_action.json diff --git a/test_merge_approved.json b/resources/test_merge_approved.json similarity index 100% rename from test_merge_approved.json rename to resources/test_merge_approved.json diff --git a/test_merge_conflict.json b/resources/test_merge_conflict.json similarity index 100% rename from test_merge_conflict.json rename to resources/test_merge_conflict.json diff --git a/test_new_pr.json b/resources/test_new_pr.json similarity index 100% rename from test_new_pr.json rename to resources/test_new_pr.json diff --git a/test_post_retry.json b/resources/test_post_retry.json similarity index 100% rename from test_post_retry.json rename to resources/test_post_retry.json diff --git a/test_synchronize.json b/resources/test_synchronize.json similarity index 100% rename from test_synchronize.json rename to resources/test_synchronize.json diff --git a/test_tests_failed.json b/resources/test_tests_failed.json similarity index 100% rename from test_tests_failed.json rename to resources/test_tests_failed.json diff --git a/resources/test_travis_payload.json b/resources/test_travis_payload.json new file mode 100644 index 0000000..80b322e --- /dev/null +++ b/resources/test_travis_payload.json @@ -0,0 +1,206 @@ +{ + "id": 281385646, + "sha": "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", + "name": "servo/servo", + "target_url": "https://travis-ci.org/servo/servo/builds/74856035", + "context": "continuous-integration/travis-ci/pr", + "description": "The Travis CI build failed", + "state": "failure", + "commit": { + "sha": "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", + "commit": { + "author": { + "name": "Fabrice Desré", + "email": "fabrice@desre.org", + "date": "2015-08-10T02:52:05Z" + }, + "committer": { + "name": "Fabrice Desré", + "email": "fabrice@desre.org", + "date": "2015-08-10T02:52:05Z" + }, + "message": "merging master", + "tree": { + "sha": "f8acfc1d29d1d9c9024e9e4785d7877e3d09d631", + "url": "https://api.github.com/repos/servo/servo/git/trees/f8acfc1d29d1d9c9024e9e4785d7877e3d09d631" + }, + "url": "https://api.github.com/repos/servo/servo/git/commits/9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", + "comment_count": 0 + }, + "url": "https://api.github.com/repos/servo/servo/commits/9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", + "html_url": "https://github.com/servo/servo/commit/9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", + "comments_url": "https://api.github.com/repos/servo/servo/commits/9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6/comments", + "author": { + "login": "fabricedesre", + "id": 984767, + "avatar_url": "https://avatars.githubusercontent.com/u/984767?v=3", + "gravatar_id": "", + "url": "https://api.github.com/users/fabricedesre", + "html_url": "https://github.com/fabricedesre", + "followers_url": "https://api.github.com/users/fabricedesre/followers", + "following_url": "https://api.github.com/users/fabricedesre/following{/other_user}", + "gists_url": "https://api.github.com/users/fabricedesre/gists{/gist_id}", + "starred_url": "https://api.github.com/users/fabricedesre/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/fabricedesre/subscriptions", + "organizations_url": "https://api.github.com/users/fabricedesre/orgs", + "repos_url": "https://api.github.com/users/fabricedesre/repos", + "events_url": "https://api.github.com/users/fabricedesre/events{/privacy}", + "received_events_url": "https://api.github.com/users/fabricedesre/received_events", + "type": "User", + "site_admin": false + }, + "committer": { + "login": "fabricedesre", + "id": 984767, + "avatar_url": "https://avatars.githubusercontent.com/u/984767?v=3", + "gravatar_id": "", + "url": "https://api.github.com/users/fabricedesre", + "html_url": "https://github.com/fabricedesre", + "followers_url": "https://api.github.com/users/fabricedesre/followers", + "following_url": "https://api.github.com/users/fabricedesre/following{/other_user}", + "gists_url": "https://api.github.com/users/fabricedesre/gists{/gist_id}", + "starred_url": "https://api.github.com/users/fabricedesre/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/fabricedesre/subscriptions", + "organizations_url": "https://api.github.com/users/fabricedesre/orgs", + "repos_url": "https://api.github.com/users/fabricedesre/repos", + "events_url": "https://api.github.com/users/fabricedesre/events{/privacy}", + "received_events_url": "https://api.github.com/users/fabricedesre/received_events", + "type": "User", + "site_admin": false + }, + "parents": [ + { + "sha": "d90f28648c9e56d73fdd65026b01a820ab154890", + "url": "https://api.github.com/repos/servo/servo/commits/d90f28648c9e56d73fdd65026b01a820ab154890", + "html_url": "https://github.com/servo/servo/commit/d90f28648c9e56d73fdd65026b01a820ab154890" + }, + { + "sha": "f77973c903a3e08067feed3ba39cff3c6bf8ac11", + "url": "https://api.github.com/repos/servo/servo/commits/f77973c903a3e08067feed3ba39cff3c6bf8ac11", + "html_url": "https://github.com/servo/servo/commit/f77973c903a3e08067feed3ba39cff3c6bf8ac11" + } + ] + }, + "branches": [ + + ], + "created_at": "2015-08-10T02:53:52Z", + "updated_at": "2015-08-10T02:53:52Z", + "repository": { + "id": 3390243, + "name": "servo", + "full_name": "servo/servo", + "owner": { + "login": "servo", + "id": 2566135, + "avatar_url": "https://avatars.githubusercontent.com/u/2566135?v=3", + "gravatar_id": "", + "url": "https://api.github.com/users/servo", + "html_url": "https://github.com/servo", + "followers_url": "https://api.github.com/users/servo/followers", + "following_url": "https://api.github.com/users/servo/following{/other_user}", + "gists_url": "https://api.github.com/users/servo/gists{/gist_id}", + "starred_url": "https://api.github.com/users/servo/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/servo/subscriptions", + "organizations_url": "https://api.github.com/users/servo/orgs", + "repos_url": "https://api.github.com/users/servo/repos", + "events_url": "https://api.github.com/users/servo/events{/privacy}", + "received_events_url": "https://api.github.com/users/servo/received_events", + "type": "Organization", + "site_admin": false + }, + "private": false, + "html_url": "https://github.com/servo/servo", + "description": "The Servo Browser Engine", + "fork": false, + "url": "https://api.github.com/repos/servo/servo", + "forks_url": "https://api.github.com/repos/servo/servo/forks", + "keys_url": "https://api.github.com/repos/servo/servo/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/servo/servo/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/servo/servo/teams", + "hooks_url": "https://api.github.com/repos/servo/servo/hooks", + "issue_events_url": "https://api.github.com/repos/servo/servo/issues/events{/number}", + "events_url": "https://api.github.com/repos/servo/servo/events", + "assignees_url": "https://api.github.com/repos/servo/servo/assignees{/user}", + "branches_url": "https://api.github.com/repos/servo/servo/branches{/branch}", + "tags_url": "https://api.github.com/repos/servo/servo/tags", + "blobs_url": "https://api.github.com/repos/servo/servo/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/servo/servo/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/servo/servo/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/servo/servo/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/servo/servo/statuses/{sha}", + "languages_url": "https://api.github.com/repos/servo/servo/languages", + "stargazers_url": "https://api.github.com/repos/servo/servo/stargazers", + "contributors_url": "https://api.github.com/repos/servo/servo/contributors", + "subscribers_url": "https://api.github.com/repos/servo/servo/subscribers", + "subscription_url": "https://api.github.com/repos/servo/servo/subscription", + "commits_url": "https://api.github.com/repos/servo/servo/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/servo/servo/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/servo/servo/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/servo/servo/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/servo/servo/contents/{+path}", + "compare_url": "https://api.github.com/repos/servo/servo/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/servo/servo/merges", + "archive_url": "https://api.github.com/repos/servo/servo/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/servo/servo/downloads", + "issues_url": "https://api.github.com/repos/servo/servo/issues{/number}", + "pulls_url": "https://api.github.com/repos/servo/servo/pulls{/number}", + "milestones_url": "https://api.github.com/repos/servo/servo/milestones{/number}", + "notifications_url": "https://api.github.com/repos/servo/servo/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/servo/servo/labels{/name}", + "releases_url": "https://api.github.com/repos/servo/servo/releases{/id}", + "created_at": "2012-02-08T19:07:25Z", + "updated_at": "2015-08-09T23:35:17Z", + "pushed_at": "2015-08-10T02:53:32Z", + "git_url": "git://github.com/servo/servo.git", + "ssh_url": "git@github.com:servo/servo.git", + "clone_url": "https://github.com/servo/servo.git", + "svn_url": "https://github.com/servo/servo", + "homepage": "", + "size": 204624, + "stargazers_count": 4584, + "watchers_count": 4584, + "language": "Rust", + "has_issues": true, + "has_downloads": true, + "has_wiki": true, + "has_pages": false, + "forks_count": 722, + "mirror_url": null, + "open_issues_count": 1049, + "forks": 722, + "open_issues": 1049, + "watchers": 4584, + "default_branch": "master" + }, + "organization": { + "login": "servo", + "id": 2566135, + "url": "https://api.github.com/orgs/servo", + "repos_url": "https://api.github.com/orgs/servo/repos", + "events_url": "https://api.github.com/orgs/servo/events", + "members_url": "https://api.github.com/orgs/servo/members{/member}", + "public_members_url": "https://api.github.com/orgs/servo/public_members{/member}", + "avatar_url": "https://avatars.githubusercontent.com/u/2566135?v=3", + "description": null + }, + "sender": { + "login": "mrobinson", + "id": 13536, + "avatar_url": "https://avatars.githubusercontent.com/u/13536?v=3", + "gravatar_id": "", + "url": "https://api.github.com/users/mrobinson", + "html_url": "https://github.com/mrobinson", + "followers_url": "https://api.github.com/users/mrobinson/followers", + "following_url": "https://api.github.com/users/mrobinson/following{/other_user}", + "gists_url": "https://api.github.com/users/mrobinson/gists{/gist_id}", + "starred_url": "https://api.github.com/users/mrobinson/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/mrobinson/subscriptions", + "organizations_url": "https://api.github.com/users/mrobinson/orgs", + "repos_url": "https://api.github.com/users/mrobinson/repos", + "events_url": "https://api.github.com/users/mrobinson/events{/privacy}", + "received_events_url": "https://api.github.com/users/mrobinson/received_events", + "type": "User", + "site_admin": false + } +} \ No newline at end of file diff --git a/resources/unsafe.diff b/resources/unsafe.diff new file mode 100644 index 0000000..a047d74 --- /dev/null +++ b/resources/unsafe.diff @@ -0,0 +1,223 @@ +diff --git a/components/script/dom/domexception.rs b/components/script/dom/domexception.rs +index 1975128..e63cbed 100644 +--- a/components/script/dom/domexception.rs ++++ b/components/script/dom/domexception.rs +@@ -104,4 +104,9 @@ impl DOMExceptionMethods for DOMException { + + message.to_owned() + } ++ ++ // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-error.prototype.tostring ++ fn Stringifier(&self) -> String { ++ format!("{}: {}", self.Name(), self.Message()) ++ } + } +diff --git a/components/script/dom/htmlbuttonelement.rs b/components/script/dom/htmlbuttonelement.rs +index 075df00..7023a8f 100644 +--- a/components/script/dom/htmlbuttonelement.rs ++++ b/components/script/dom/htmlbuttonelement.rs +@@ -15,7 +15,7 @@ use dom::event::Event; + use dom::eventtarget::{EventTarget, EventTargetTypeId}; + use dom::htmlelement::{HTMLElement, HTMLElementTypeId}; + use dom::htmlformelement::{FormControl, FormSubmitter}; +-use dom::htmlformelement::{SubmittedFrom}; ++use dom::htmlformelement::{SubmittedFrom, HTMLFormElement}; + use dom::node::{Node, NodeTypeId, document_from_node, window_from_node}; + use dom::validitystate::ValidityState; + use dom::virtualmethods::VirtualMethods; +@@ -82,6 +82,11 @@ impl HTMLButtonElementMethods for HTMLButtonElement { + // https://www.whatwg.org/html/#dom-fe-disabled + make_bool_setter!(SetDisabled, "disabled"); + ++ // https://html.spec.whatwg.org/multipage#dom-fae-form ++ fn GetForm(&self) -> Option> { ++ self.form_owner() ++ } ++ + // https://html.spec.whatwg.org/multipage/#dom-button-type + fn Type(&self) -> DOMString { + let elem = ElementCast::from_ref(self); +diff --git a/components/script/dom/htmlinputelement.rs b/components/script/dom/htmlinputelement.rs +index 0de98f1..33098d1 100644 +--- a/components/script/dom/htmlinputelement.rs ++++ b/components/script/dom/htmlinputelement.rs +@@ -229,6 +229,11 @@ impl HTMLInputElementMethods for HTMLInputElement { + // https://www.whatwg.org/html/#dom-fe-disabled + make_bool_setter!(SetDisabled, "disabled"); + ++ // https://html.spec.whatwg.org/multipage/#dom-fae-form ++ fn GetForm(&self) -> Option> { ++ self.form_owner() ++ } ++ + // https://html.spec.whatwg.org/multipage/#dom-input-defaultchecked + make_bool_getter!(DefaultChecked, "checked"); + +diff --git a/components/script/dom/webidls/DOMException.webidl b/components/script/dom/webidls/DOMException.webidl +index 7c09054..0dfb714 100644 +--- a/components/script/dom/webidls/DOMException.webidl ++++ b/components/script/dom/webidls/DOMException.webidl +@@ -44,4 +44,6 @@ interface DOMException { + + // A custom message set by the thrower. + readonly attribute DOMString message; ++ ++ stringifier; + }; +diff --git a/components/script/dom/webidls/HTMLButtonElement.webidl b/components/script/dom/webidls/HTMLButtonElement.webidl +index 7613bd5..73eec85 100644 +--- a/components/script/dom/webidls/HTMLButtonElement.webidl ++++ b/components/script/dom/webidls/HTMLButtonElement.webidl +@@ -7,7 +7,7 @@ + interface HTMLButtonElement : HTMLElement { + // attribute boolean autofocus; + attribute boolean disabled; +- //readonly attribute HTMLFormElement? form; ++ readonly attribute HTMLFormElement? form; + attribute DOMString formAction; + attribute DOMString formEnctype; + attribute DOMString formMethod; +diff --git a/components/script/dom/webidls/HTMLInputElement.webidl b/components/script/dom/webidls/HTMLInputElement.webidl +index afd605b..60c45c4 100644 +--- a/components/script/dom/webidls/HTMLInputElement.webidl ++++ b/components/script/dom/webidls/HTMLInputElement.webidl +@@ -13,7 +13,7 @@ interface HTMLInputElement : HTMLElement { + attribute boolean checked; + // attribute DOMString dirName; + attribute boolean disabled; +- //readonly attribute HTMLFormElement? form; ++ readonly attribute HTMLFormElement? form; + //readonly attribute FileList? files; + attribute DOMString formAction; + attribute DOMString formEnctype; +diff --git a/components/script/script_task.rs b/components/script/script_task.rs +index d8ac155..96ff1a7 100644 +--- a/components/script/script_task.rs ++++ b/components/script/script_task.rs +@@ -55,6 +55,7 @@ use js::jsapi::{DisableIncrementalGC, JS_AddExtraGCRootsTracer, JS_SetWrapObject + use js::jsapi::{GCDescription, GCProgress, JSGCInvocationKind, SetGCSliceCallback}; + use js::jsapi::{JSAutoRequest, JSGCStatus, JS_GetRuntime, JS_SetGCCallback, SetDOMCallbacks}; + use js::jsapi::{JSContext, JSRuntime, JSTracer}; ++use js::jsapi::{JSObject, SetPreserveWrapperCallback}; + use js::jsval::UndefinedValue; + use js::rust::Runtime; + use layout_interface::{ReflowQueryType}; +@@ -654,8 +655,10 @@ impl ScriptTask { + } + + unsafe { ++ unsafe extern "C" fn empty_wrapper_callback(_: *mut JSContext, _: *mut JSObject) -> u8 { 1 } + SetDOMProxyInformation(ptr::null(), 0, Some(shadow_check_callback)); + SetDOMCallbacks(runtime.rt(), &DOM_CALLBACKS); ++ SetPreserveWrapperCallback(runtime.rt(), Some(empty_wrapper_callback)); + // Pre barriers aren't working correctly at the moment + DisableIncrementalGC(runtime.rt()); + } +diff --git a/tests/wpt/metadata/html/dom/interfaces.html.ini b/tests/wpt/metadata/html/dom/interfaces.html.ini +index 29648c3..f4b6a11 100644 +--- a/tests/wpt/metadata/html/dom/interfaces.html.ini ++++ b/tests/wpt/metadata/html/dom/interfaces.html.ini +@@ -5079,9 +5079,6 @@ + [HTMLInputElement interface: attribute dirName] + expected: FAIL + +- [HTMLInputElement interface: attribute form] +- expected: FAIL +- + [HTMLInputElement interface: attribute files] + expected: FAIL + +@@ -5208,9 +5205,6 @@ + [HTMLInputElement interface: document.createElement("input") must inherit property "dirName" with the proper type (6)] + expected: FAIL + +- [HTMLInputElement interface: document.createElement("input") must inherit property "form" with the proper type (8)] +- expected: FAIL +- + [HTMLInputElement interface: document.createElement("input") must inherit property "files" with the proper type (9)] + expected: FAIL + +@@ -5346,9 +5340,6 @@ + [HTMLButtonElement interface: attribute autofocus] + expected: FAIL + +- [HTMLButtonElement interface: attribute form] +- expected: FAIL +- + [HTMLButtonElement interface: attribute formNoValidate] + expected: FAIL + +@@ -5376,9 +5367,6 @@ + [HTMLButtonElement interface: document.createElement("button") must inherit property "autofocus" with the proper type (0)] + expected: FAIL + +- [HTMLButtonElement interface: document.createElement("button") must inherit property "form" with the proper type (2)] +- expected: FAIL +- + [HTMLButtonElement interface: document.createElement("button") must inherit property "formNoValidate" with the proper type (6)] + expected: FAIL + +diff --git a/tests/ref/metadata/html/semantics/forms/form-control-infrastructure/form.html.ini b/tests/wpt/metadata/html/semantics/forms/form-control-infrastructure/form.html.ini +index 4ba6553..3a4b932 100644 +--- a/tests/wpt/metadata/html/semantics/forms/form-control-infrastructure/form.html.ini ++++ b/tests/wpt/metadata/html/semantics/forms/form-control-infrastructure/form.html.ini +@@ -1,14 +1,8 @@ + [form.html] + type: testharness +- [button.form] +- expected: FAIL +- + [fieldset.form] + expected: FAIL + +- [input.form] +- expected: FAIL +- + [keygen.form] + expected: FAIL + +diff --git a/tests/wpt/mozilla/meta/MANIFEST.json b/tests/wpt/mozilla/meta/MANIFEST.json +index 3cd5550..1449da6 100644 +--- a/tests/wpt/mozilla/meta/MANIFEST.json ++++ b/tests/wpt/mozilla/meta/MANIFEST.json +@@ -749,6 +749,12 @@ + "url": "/_mozilla/mozilla/parentnodes.html" + } + ], ++ "mozilla/preserve_wrapper_callback.html": [ ++ { ++ "path": "mozilla/preserve_wrapper_callback.html", ++ "url": "/_mozilla/mozilla/preserve_wrapper_callback.html" ++ } ++ ], + "mozilla/proxy_setter.html": [ + { + "path": "mozilla/proxy_setter.html", +diff --git a/tests/wpt/mozilla/tests/mozilla/preserve_wrapper_callback.html b/tests/wpt/mozilla/tests/mozilla/preserve_wrapper_callback.html +new file mode 100644 +index 0000000..13369b9 +--- /dev/null ++++ b/tests/wpt/mozilla/tests/mozilla/preserve_wrapper_callback.html +@@ -0,0 +1,22 @@ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ \ No newline at end of file diff --git a/test.py b/test.py index a963caf..4c18327 100644 --- a/test.py +++ b/test.py @@ -1,218 +1,272 @@ -from errorlogparser import ErrorLogParser -from githubapiprovider import GithubApiProvider -from mock import patch -from newpr import APIProvider, handle_payload -from payloadhandler import PayloadHandler +from errorlogparser import ErrorLogParser, ServoErrorLogParser +from githubapiprovider import APIProvider, GithubApiProvider +from mock import call, Mock, patch +from payloadhandler import PayloadHandler, GithubPayloadHandler, TravisPayloadHandler from travisciapiprovider import TravisCiApiProvider import json import os +import payloadreceiver import sys import traceback import unittest import urlparse -class TestAPIProvider(APIProvider): - def __init__(self, payload, user, new_contributor, labels, assignee, diff=""): - APIProvider.__init__(self, payload, user) - self.new_contributor = new_contributor - self.comments_posted = [] - self.labels = labels - self.assignee = assignee - self.diff = diff - +error_sample = [ + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull\n\tfound: dom::bindings::conversions::get_dom_class", + "line": "7", + "file": "./components/script/dom/eventtarget.rs" + }, + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::codegen::Bindings::EventListenerBinding::EventListener\n\tfound: dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull", + "line": "8", + "file": "./components/script/dom/eventtarget.rs" + }, + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::codegen::Bindings::EventTargetBinding::EventTargetMethods\n\tfound: dom::bindings::codegen::Bindings::EventListenerBinding::EventListener", + "line": "9", + "file": "./components/script/dom/eventtarget.rs" + }, + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::conversions::get_dom_class\n\tfound: dom::bindings::codegen::Bindings::EventTargetBinding::EventTargetMethods", + "line": "10", + "file": "./components/script/dom/eventtarget.rs" + }, + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::browsercontext\n\tfound: dom::eventtarget::EventTargetTypeId", + "line": "17", + "file": "./components/script/dom/bindings/utils.rs" + }, + { + "comment": "use statement is not in alphabetical order\n\texpected: dom::eventtarget::EventTargetTypeId\n\tfound: dom::browsercontext", + "line": "18", + "file": "./components/script/dom/bindings/utils.rs" + } +] + + +class TestAPIProvider(unittest.TestCase): + def setUp(self): + self.api_provider = APIProvider('jdm') - def is_new_contributor(self, username): - return self.new_contributor + def test_is_new_contributor(self): + with self.assertRaises(NotImplementedError): + self.api_provider.is_new_contributor("jdm") - def post_comment(self, body): - self.comments_posted += [body] + def test_post_comment(self): + with self.assertRaises(NotImplementedError): + self.api_provider.post_comment("Nice job!", 3947) - def add_label(self, label): - self.labels += [label] + def test_post_review_comment(self): + with self.assertRaises(NotImplementedError): + self.api_provider.post_review_comment(1234, "a453b3923e893f0383cd2893f...", "foo/bar/spam/eggs", 3, "Remove extra space") - def remove_label(self, label): - self.labels.remove(label) + def test_add_label(self): + with self.assertRaises(NotImplementedError): + self.api_provider.add_label("S-awaiting-review", 1234) - def get_labels(self): - return self.labels + def test_remove_label(self): + with self.assertRaises(NotImplementedError): + self.api_provider.remove_label("S-awaiting-review", 1234) - def get_diff(self): - return self.diff + def test_get_labels(self): + with self.assertRaises(NotImplementedError): + self.api_provider.get_labels(1234) - def set_assignee(self, assignee): - self.assignee = assignee + def test_get_diff(self): + with self.assertRaises(NotImplementedError): + self.api_provider.get_diff("https://github.com/servo/servo/pull/1234.diff") -def get_payload(filename): - with open(filename) as f: - return json.load(f) + def test_set_assignee(self): + with self.assertRaises(NotImplementedError): + self.api_provider.set_assignee("jdm", 1234) -tests = [] -def add_test(filename, initial, expected): - global tests - initial_values = {'new_contributor': initial.get('new_contributor', False), - 'labels': initial.get('labels', []), - 'diff': initial.get('diff', ''), - 'assignee': initial.get('assignee', None)} - expected_values = {'labels': expected.get('labels', []), - 'assignee': expected.get('assignee', None), - 'comments': expected.get('comments', 0)} - tests += [{'filename': filename, - 'initial': initial_values, - 'expected': expected_values}] +# TODO - add tests for exception handling +class TestGithubApiProvider(unittest.TestCase): + class FakeHeader(): + def get(self, something): + pass -def run_tests(tests): - failed = 0 - for test in tests: - try: - payload = get_payload(test['filename']) - initial = test['initial'] - api = TestAPIProvider(payload, 'highfive', initial['new_contributor'], initial['labels'], - initial['assignee'], initial['diff']) - handle_payload(api, payload) - expected = test['expected'] - assert len(api.comments_posted) == expected['comments'] - assert api.labels == expected['labels'] - assert api.assignee == expected['assignee'] - except AssertionError: - _, _, tb = sys.exc_info() - traceback.print_tb(tb) # Fixed format - tb_info = traceback.extract_tb(tb) - filename, line, func, text = tb_info[-1] - print('{}: An error occurred on line {} in statement {}'.format(test['filename'], line, text)) - failed += 1 + def setUp(self): + self.owner = "servo" + self.repo = "servo" + self.gh_provider = GithubApiProvider("jdm", "a453b3923e893f0383cd2893f...", self.owner, self.repo) + self.gh_provider.api_req = Mock() - possible_tests = [f for f in os.listdir('.') if f.endswith('.json')] - test_files = set([test['filename'] for test in tests]) - if len(possible_tests) != len(test_files): - print 'Found unused JSON test data: %s' % ', '.join(filter(lambda x: x not in test_files, possible_tests)) - sys.exit(1) - print 'Ran %d tests, %d failed' % (len(tests), failed) + @patch('githubapiprovider.gzip') + @patch('githubapiprovider.urllib2') + @patch('githubapiprovider.base64') + def test_api_req(self, base64_mock, urllib_mock, gzip_mock): + class FakeRequest(): + def __init__(self): + self.headers = {} + self.get_method = lambda: "Get" - if failed: - sys.exit(1) -add_test('test_new_pr.json', {'new_contributor': True}, - {'labels': ['S-awaiting-review'], 'comments': 1}) + def add_header(self, key, value): + self.headers[key] = value -add_test('test_new_pr.json', {'diff': "+ unsafe fn foo()"}, - {'labels': ['S-awaiting-review'], 'comments': 1}) -add_test('test_new_pr.json', {'diff': "diff --git components/layout/"}, - {'labels': ['S-awaiting-review'], 'comments': 1}) + class FakeHeader(): + def get(self, type): + return 'gzip' -add_test('test_new_pr.json', {'diff': "diff --git components/layout/\ndiff --git tests/wpt"}, - {'labels': ['S-awaiting-review'], 'comments': 0}) -add_test('test_new_pr.json', {'new_contributor': True}, - {'labels': ['S-awaiting-review'], 'comments': 1}) + class FakeResponse(): + def __init__(self, fakeHeader): + self.header = fakeHeader -add_test('test_ignored_action.json', {}, {}) -add_test('test_synchronize.json', {'labels': ['S-needs-code-changes', 'S-tests-failed', 'S-awaiting-merge']}, - {'labels': ['S-awaiting-review']}) + def info(self): + return self.header -add_test('test_comment.json', {}, {'assignee': 'jdm'}) -add_test('test_merge_approved.json', {'labels': ['S-needs-code-changes', 'S-needs-rebase', - 'S-tests-failed', 'S-needs-squash', - 'S-awaiting-review']}, {'labels': ['S-awaiting-merge']}) + def read(self): + return "content" -add_test('test_merge_conflict.json', {'labels': ['S-awaiting-merge']}, - {'labels': ['S-needs-rebase']}) + base64_mock.standard_b64encode = Mock(return_value="User:Token") + urllib_mock.Request = Mock(return_value=FakeRequest()) + urllib_mock.urlopen = Mock(return_value=FakeResponse(FakeHeader())) + gzip_mock.GzipFile = Mock(return_value=FakeResponse(FakeHeader())) + gh_provider = GithubApiProvider("jdm", "a453b3923e893f0383cd2893f...", self.owner, self.repo) + gh_provider.api_req("GET", "https://api.github.com/repos/servo/servo/contibutors", data={"test":"data"}, media_type="Test Media") -add_test('test_tests_failed.json', {'labels': ['S-awaiting-merge']}, - {'labels': ['S-tests-failed']}) + header = FakeHeader() + print header.get('Content-Encoding') -add_test('test_post_retry.json', {'labels': ['S-tests-failed']}, - {'labels': ['S-awaiting-merge']}) -add_test('test_post_retry.json', {'labels': ['S-awaiting-merge']}, - {'labels': ['S-awaiting-merge']}) + def test_parse_header_links(self): + header_links = '; rel="next", ; rel="last"' + expected = {'last': 'https://api.github.com/repos/servo/servo/contributors?page=11', 'next': 'https://api.github.com/repos/servo/servo/contributors?page=2'} + gh_provider = GithubApiProvider("jdm", "a453b3923e893f0383cd2893f...", self.owner, self.repo) + self.assertEquals(expected, gh_provider.parse_header_links(header_links)) + self.assertEquals(None, gh_provider.parse_header_links(None)) -### Mock Setup -def mock_urllib2_urlopen(url): - parsed_url = urlparse.urlparse(url.get_full_url()) - local_file = os.path.normpath('test-files/{}'.format(parsed_url.path[1:].replace('/', '.'))) - return open(local_file, 'rb') + def test_is_new_contributor(self): + next_page_url = "https://api.github.com/repos/servo/servo/contributors?page=2" + calls = [call("GET", GithubApiProvider.contributors_url % (self.owner, self.repo)), call("GET", next_page_url)] + gh_provider = GithubApiProvider("jdm", "a453b3923e893f0383cd2893f...", self.owner, self.repo) + gh_provider.api_req = Mock(side_effect = [{ "header": self.FakeHeader(), "body": json.dumps([{"login":"Ms2ger"}])}, { "header": self.FakeHeader(), "body": json.dumps([{"login":"jdm"}, {"login":"Ms2ger"}])}] ) + gh_provider.parse_header_links = Mock(side_effect = [{"next": next_page_url}, ""]) + + self.assertFalse(gh_provider.is_new_contributor("jdm")) -def setup_mock_urllib2_urlopen(self, module): - self.patcher = patch('{}.urllib2.urlopen'.format(module), mock_urllib2_urlopen) - self.patcher.start() + gh_provider.api_req = Mock(side_effect = [{ "header": self.FakeHeader(), "body": json.dumps([{"login":"Ms2ger"}])}, { "header": self.FakeHeader(), "body": json.dumps([{"login":"jdm"}, {"login":"Ms2ger"}])}] ) + gh_provider.parse_header_links = Mock(side_effect = [{"next": next_page_url}, ""]) + + self.assertTrue(gh_provider.is_new_contributor("JoshTheGoldfish")) + + + gh_provider.api_req.assert_has_calls(calls) -def mock_urllib_urlopen(url): - parsed_url = urlparse.urlparse(url) - local_file = os.path.normpath('test-files/{}'.format(parsed_url.path[1:].replace('/', '.'))) + def test_post_comment(self): + body = "Great job!" + issue_num = 1 + self.gh_provider.post_comment(body, issue_num) + + self.gh_provider.api_req.assert_called_with("POST", GithubApiProvider.post_comment_url % (self.owner, self.repo, issue_num), {"body":body}) - return open(local_file, 'rb') + def test_post_review_comment(self): + pr_num = 1 + commit_id = "a453b3923e893f0383cd2893f..." + path = "./file/path", + pos = 1 + body = "Revmoe extra newline" + self.gh_provider.post_review_comment(pr_num, commit_id, path, pos, body) + + self.gh_provider.api_req.assert_called_with("POST", GithubApiProvider.post_review_comment_url % (self.owner, self.repo, pr_num), {"body": body, "commit_id":commit_id, "path":path, "position":pos}) -def setup_mock_urllib_urlopen(self, module): - self.patcher = patch('{}.urllib.urlopen'.format(module), mock_urllib_urlopen) - self.patcher.start() + def test_add_label(self): + label = "S-awaiting-review" + issue_num = 1 + self.gh_provider.add_label(label, issue_num) + + self.gh_provider.api_req.assert_called_with("POST", GithubApiProvider.add_label_url % (self.owner, self.repo, issue_num), [label]) -### Tests -### Todo - Remove these tests and use existing GithubAPI Provider Code -class TestGithubApiProvider(unittest.TestCase): - def setUp(self): - setup_mock_urllib2_urlopen(self, 'githubapiprovider') - self.github = GithubApiProvider("lowfive-servo", "some_fake_token") + def test_remove_label(self): + label = "S-awaiting-review" + issue_num = 1 + self.gh_provider.remove_label(label, issue_num) - self.addCleanup(self.patcher.stop) + self.gh_provider.api_req.assert_called_with("DELETE", GithubApiProvider.remove_label_url % (self.owner, self.repo, issue_num, label), {}) - def test_login(self): - self.github.login() - self.github.login('lowfive-servo', 'some_fake_token') + def test_get_labels(self): + gh_provider = GithubApiProvider("jdm", "a453b3923e893f0383cd2893f...", self.owner, self.repo) + issue_num = 1 + label1 = 'S-awaiting-review' + label2 = 'C-assigned' + gh_provider.api_req = Mock(return_value = {"header":"", "body":json.dumps([{'name':label1}, {'name':label2}])}) + + self.assertEquals([label1, label2], gh_provider.get_labels(issue_num)) + gh_provider.api_req.assert_called_with("GET", GithubApiProvider.get_label_url % (self.owner, self.repo, issue_num)) + + def test_get_diff(self): + gh_provider = GithubApiProvider("jdm", "a453b3923e893f0383cd2893f...", self.owner, self.repo) + diff = "fake diff" + diff_url = "https://github.com/servo/servo/pull/1.diff" + gh_provider.api_req = Mock(return_value = {"header":"", "body":diff}) + + self.assertEquals(diff, gh_provider.get_diff("https://github.com/servo/servo/pull/1.diff")) + gh_provider.api_req.assert_called_with("GET", diff_url) - def test_post_review_comment_on_pr(self): - repo = "servo/servo" - pr_num = 1 - commit_id = "6dcb09b5b57875f334f61aebed695e2e4193db5e" - message = "Great stuff" - file_path = "file1.txt" - line = 1 - self.github.post_review_comment_on_pr(repo, pr_num, commit_id, message, file_path, line) + def test_set_assignee(self): + gh_provider = GithubApiProvider("jdm", "a453b3923e893f0383cd2893f...", self.owner, self.repo) + issue_num = 1 + assignee = "jdm" + gh_provider.api_req = Mock(return_value = {"header":"", "body":assignee}) + self.assertEquals(assignee, gh_provider.set_assignee(assignee, issue_num)) + gh_provider.api_req.assert_called_with("PATCH", GithubApiProvider.issue_url % (self.owner, self.repo, issue_num), {"assignee":assignee}) +@patch('travisciapiprovider.urllib.urlopen') class TestTravisCiApiProvider(unittest.TestCase): - def setUp(self): - setup_mock_urllib_urlopen(self, 'travisciapiprovider') - self.travis = TravisCiApiProvider() + class FakeFile(): + def __init__(self, contents): + self.contents = contents - self.build_data = self.travis.get_build(1) - self.addCleanup(self.patcher.stop) + def read(self): + return self.contents - # These tests are weak - def test_get_build(self): - self.assertIn('matrix', self.travis.get_build(1)) + def setUp(self): + self.travis = TravisCiApiProvider() + + def test_get_build(self, urlopen_mock): + urlopen_mock.return_value = self.FakeFile(json.dumps({"Something":"here"})) + self.travis.get_build(1) + urlopen_mock.assert_called_with(TravisCiApiProvider.build_url.format(build_id=1)) - def test_get_log(self): - self.travis.get_log(self.build_data) + def test_get_log(self, urlopen_mock): + job_id = 1 + urlopen_mock.return_value = self.FakeFile(open('resources/single-line-comment.log').read()) + self.travis.get_log({"matrix":[{'id':job_id}]}) + urlopen_mock.assert_called_with(TravisCiApiProvider.log_url.format(job_id=job_id)) - def test_get_pull_request_number(self): - build_data = self.travis.get_build(1) - self.assertEqual(self.travis.get_pull_request_number(build_data), 7601) + def test_get_pull_request_number(self, urlopen_mock): + pr_num = 1234 + self.assertEquals(1234, self.travis.get_pull_request_number({"compare_url":"https://github.com/servo/servo/{}".format(pr_num)})) class TestErrorLogParser(unittest.TestCase): @@ -225,90 +279,250 @@ def test_parse_log(self): self.log_parser.parse_log("log", "regex") -class TestPayloadError(unittest.TestCase): +class TestServoErrorLogParser(unittest.TestCase): def setUp(self): - payload = \ - { - "target_url": "https://travis-ci.org/servo/servo/builds/74856035", - "name": "servo/servo", - "commit": { - "sha": "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6" + self.error_parser = ServoErrorLogParser() + self.multi_log = open('resources/multi-line-comment.log').read() + self.expected_multi_errors = error_sample + self.single_log = open('resources/single-line-comment.log').read() + self.expected_single_errors = [ + { + 'comment': 'missing space before {', + 'line': '49', + 'file': './components/plugins/lints/sorter.rs' } - } + ] + + + def test_parse_errors(self): + self.assertEqual(self.expected_multi_errors, list(self.error_parser.parse_log(self.multi_log))) + self.assertEqual(self.expected_single_errors, list(self.error_parser.parse_log(self.single_log))) + + +class TestPayloadHandler(unittest.TestCase): + def setUp(self): + self.handler = PayloadHandler() + + + def test_handle_payload(self): + with self.assertRaises(NotImplementedError): + self.handler.handle_payload("payload") - self.payload_handler = PayloadHandler(payload) +class TestTravisPayloadHandler(unittest.TestCase): + def setUp(self): + class TravisDouble(): + def get_build(self, build_id): + return 1 - def test_get_build_id(self): - self.assertEqual(self.payload_handler.get_build_id(), 74856035) + def get_log(self, build_data): + return open('resources/multi-line-comment.log').read() - def test_get_commit_id(self): - self.assertEqual(self.payload_handler.get_commit_id(), "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6") + def get_pull_request_number(self, build_data): + return 1 - def test_get_repo_name(self): - self.assertEqual(self.payload_handler.get_repo_name(), 'servo/servo') + class ErrorParserDouble(): + def parse_log(self, log): + return error_sample + + travis_dbl = TravisDouble() + error_parser_dbl = ErrorParserDouble() + self.github = GithubApiProvider("jdm", "a453b3923e893f0383cd2893f...", "servo", "servo") + self.github.post_review_comment = Mock(name="post_review_comment") + + self.payload_handler = TravisPayloadHandler(self.github, travis_dbl, error_parser_dbl) + def test_handle_payload(self): - with self.assertRaises(NotImplementedError): - self.payload_handler.handle_payload() + payload = json.loads(open('resources/test_travis_payload.json').read()) + self.payload_handler.handle_payload(payload) + err_msg = TravisPayloadHandler.msg_template + calls = [ + call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[0]['file'], error_sample[0]['line'], error_sample[0]['comment']), error_sample[0]['file'], error_sample[0]['line']), + call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[1]['file'], error_sample[1]['line'], error_sample[1]['comment']), error_sample[1]['file'], error_sample[1]['line']), + call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[2]['file'], error_sample[2]['line'], error_sample[2]['comment']), error_sample[2]['file'], error_sample[2]['line']), + call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[3]['file'], error_sample[3]['line'], error_sample[3]['comment']), error_sample[3]['file'], error_sample[3]['line']), + call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[4]['file'], error_sample[4]['line'], error_sample[4]['comment']), error_sample[4]['file'], error_sample[4]['line']), + call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[5]['file'], error_sample[5]['line'], error_sample[5]['comment']), error_sample[5]['file'], error_sample[5]['line']), + ] -class TestServoErrorLogParser(unittest.TestCase): + self.github.post_review_comment.assert_has_calls(calls) + +class TestGithubPayloadHandler(unittest.TestCase): def setUp(self): - self.error_parser = run.ServoErrorLogParser() - self.multi_log = open('test-files/multi-line-comment.log').read() - self.expected_multi_errors = \ - [ - { - "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull\n\tfound: dom::bindings::conversions::get_dom_class", - "line": "7", - "file": "./components/script/dom/eventtarget.rs" - }, - { - "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::codegen::Bindings::EventListenerBinding::EventListener\n\tfound: dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull", - "line": "8", - "file": "./components/script/dom/eventtarget.rs" - }, - { - "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::codegen::Bindings::EventTargetBinding::EventTargetMethods\n\tfound: dom::bindings::codegen::Bindings::EventListenerBinding::EventListener", - "line": "9", - "file": "./components/script/dom/eventtarget.rs" - }, - { - "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::conversions::get_dom_class\n\tfound: dom::bindings::codegen::Bindings::EventTargetBinding::EventTargetMethods", - "line": "10", - "file": "./components/script/dom/eventtarget.rs" - }, - { - "comment": "use statement is not in alphabetical order\n\texpected: dom::browsercontext\n\tfound: dom::eventtarget::EventTargetTypeId", - "line": "17", - "file": "./components/script/dom/bindings/utils.rs" - }, - { - "comment": "use statement is not in alphabetical order\n\texpected: dom::eventtarget::EventTargetTypeId\n\tfound: dom::browsercontext", - "line": "18", - "file": "./components/script/dom/bindings/utils.rs" - } - ] + self.github_user = "jdm" + self.github = GithubApiProvider(self.github_user, "a453b3923e893f0383cd2893f...", "servo", "servo") + self.github.remove_label = Mock() + self.github.add_label = Mock() - self.single_log = open('test-files/single-line-comment.log').read() - self.expected_single_errors = \ - [ - { - 'comment': 'missing space before {', - 'line': '49', - 'file': './components/plugins/lints/sorter.rs' - } + + def test_handle_payload(self): + pl_handler = GithubPayloadHandler(None) + pl_handler.new_pr = Mock() + pl_handler.update_pr = Mock() + pl_handler.new_comment = Mock() + + payload = {"action":"created", "issue":{"number":1}} + pl_handler.handle_payload(payload) + pl_handler.new_comment.assert_called_with("1", payload) + + payload = {"action":"opened", "number":"1"} + pl_handler.handle_payload(payload) + pl_handler.new_pr.assert_called_with("1", payload) + + payload = {"action":"synchronize", "number":"1"} + pl_handler.handle_payload(payload) + pl_handler.new_pr.update_pr("1", payload) + + # should not break + payload = {"action":"other", "number":"1"} + pl_handler.handle_payload(payload) + + + def test_manage_pr_state(self): + self.github.get_labels = Mock(return_value=["S-awaiting-merge", "S-tests-failed", "S-needs-code-changes", "S-needs-rebase"]) + pl_handler = GithubPayloadHandler(self.github) + full_cov_payload = {"action":"synchronize", "pull_request":{"mergeable":True}} + issue_num = 1 + + pl_handler.manage_pr_state(issue_num, full_cov_payload) + self.github.get_labels.assert_called_with(issue_num) + calls = [ + call("S-awaiting-merge", issue_num), + call("S-tests-failed", issue_num), + call("S-needs-code-changes", issue_num), + call("S-needs-rebase", issue_num) ] + self.github.remove_label.assert_has_calls(calls) + self.github.add_label.assert_called_with("S-awaiting-review", issue_num) + + # Not sure it's worth the effort to cover all branches of manage_pr_state + + + def test_new_comment_no_action(self): + pl_handler = GithubPayloadHandler(self.github) + pl_handler._find_reviewer = Mock() + payload = {"issue":{"state":"closed"}} + issue_num = 1 + + pl_handler.new_comment(issue_num, payload) + pl_handler._find_reviewer.assert_not_called() - def test_parse_errors(self): - self.assertEqual(self.expected_multi_errors, list(self.error_parser.parse_log(self.multi_log))) - self.assertEqual(self.expected_single_errors, list(self.error_parser.parse_log(self.single_log))) + payload = {"issue":{"state":"open","pull_request":1},"comment":{"user":{"login":self.github_user}}} + pl_handler.new_comment(issue_num, payload) + pl_handler._find_reviewer.assert_not_called() + + + def test_new_comment_approved(self): + self.github.set_assignee = Mock() + self.github.get_labels = Mock(return_value=["S-awaiting-review"]) + pl_handler = GithubPayloadHandler(self.github) + payload = {"issue":{"state":"open","pull_request":1},"comment":{"user":{"login":"bors-servo"},"body":"\br?:@jdm Testing commit"}} + issue_num = 1 + + pl_handler.new_comment(issue_num, payload) + self.github.set_assignee.assert_called_with("jdm", issue_num) + self.github.get_labels.assert_called_with(issue_num) + self.github.remove_label.assert_called_with("S-awaiting-review", issue_num) + self.github.add_label.assert_called_with("S-awaiting-merge") + + + def test_new_comment_failed_test(self): + self.github.set_assignee = Mock() + self.github.get_labels = Mock() + pl_handler = GithubPayloadHandler(self.github) + payload = {"issue":{"state":"open","pull_request":1},"comment":{"user":{"login":"bors-servo"},"body":"\br?:@jdm Test failed"}} + issue_num = 1 + + pl_handler.new_comment(issue_num, payload) + self.github.remove_label.assert_called_with("S-awaiting-merge", issue_num) + self.github.add_label.assert_called_with("S-tests-failed", issue_num) + + + def test_new_comment_merge_conflicts(self): + self.github.set_assignee = Mock() + self.github.get_labels = Mock() + pl_handler = GithubPayloadHandler(self.github) + payload = {"issue":{"state":"open","pull_request":1},"comment":{"user":{"login":"bors-servo"},"body":"\br?:@jdm Please resolve the merge conflicts"}} + issue_num = 1 + + pl_handler.new_comment(issue_num, payload) + self.github.remove_label.assert_called_with("S-awaiting-merge", issue_num) + self.github.add_label.assert_called_with("S-needs-rebase", issue_num) + + + def test_new_pr_no_msg(self): + self.github.is_new_contributor = Mock(return_value=True) + self.github.post_comment = Mock() + self.github.get_diff = Mock(return_value="") + pl_handler = GithubPayloadHandler(self.github) + pl_handler.manage_pr_state = Mock() + pl_handler.post_comment = Mock() + diff_url = "https://github.com/servo/servo/pull/1234.diff" + payload = {"pull_request":{"user":{"login":"jdm"},"diff_url":diff_url}} + issue_num = 1 + rand_val = 'jdm' + + with patch('payloadhandler.random.choice', return_value=rand_val) as mock_random: + pl_handler.new_pr(issue_num, payload) + pl_handler.manage_pr_state.assert_called_with(issue_num, payload) + self.github.post_comment.assert_called_with(GithubPayloadHandler.welcome_msg % rand_val, issue_num) + self.github.get_diff.assert_called_with(diff_url) + + + def test_new_pr_unsafe_msg(self): + self.github.is_new_contributor = Mock(return_value=False) + self.github.post_comment = Mock() + self.github.get_diff = Mock(return_value=open('resources/unsafe.diff').read()) + pl_handler = GithubPayloadHandler(self.github) + pl_handler.manage_pr_state = Mock() + pl_handler.post_comment = Mock() + payload = {"pull_request":{"user":{"login":"jdm"},"diff_url":"https://github.com/servo/servo/pull/1234.diff"}} + issue_num = 1 + + pl_handler.new_pr(issue_num, payload) + self.github.post_comment.assert_called_with(GithubPayloadHandler.warning_summary % '* ' + GithubPayloadHandler.unsafe_warning_msg, issue_num) + + + def test_new_pr_needs_reftest(self): + self.github.is_new_contributor = Mock(return_value=False) + self.github.post_comment = Mock() + self.github.get_diff = Mock(return_value=open('resources/needs_reftest.diff').read()) + pl_handler = GithubPayloadHandler(self.github) + pl_handler.manage_pr_state = Mock() + pl_handler.post_comment = Mock() + payload = {"pull_request":{"user":{"login":"jdm"},"diff_url":"https://github.com/servo/servo/pull/1234.diff"}} + issue_num = 1 + + pl_handler.new_pr(issue_num, payload) + self.github.post_comment.assert_called_with(GithubPayloadHandler.warning_summary % '* ' + GithubPayloadHandler.reftest_required_msg, issue_num) + + + def test_update_pr(self): + pl_handler = GithubPayloadHandler(self.github) + pl_handler.manage_pr_state = Mock() + issue_num = 1 + payload = {"payload"} + + pl_handler.update_pr(issue_num, payload) + pl_handler.manage_pr_state.assert_called_with(issue_num, payload) + + +class TestPayloadReceiver(unittest.TestCase): + def setUp(self): + self.github_created_payload = json.loads(open('resources/test_post_retry.json').read()) + self.github_other_payload = json.loads(open('resources/test_new_pr.json').read()) + self.travis_payload = json.loads(open('resources/test_travis_payload.json').read()) -if __name__ == "__main__": - run_tests(tests) - unittest.main() + def test_extract_globals_from_payload(self): + self.assertEqual(("servo", "servo"), payloadreceiver.extract_globals_from_payload(self.github_created_payload)) + self.assertEqual(("servo", "servo"), payloadreceiver.extract_globals_from_payload(self.github_other_payload)) + self.assertEqual(("servo", "servo"), payloadreceiver.extract_globals_from_payload(self.travis_payload)) + +if __name__ == "__main__": + unittest.main() \ No newline at end of file From 1e47cc5d50eab4063fe9c97b368a4868d04d9caf Mon Sep 17 00:00:00 2001 From: erneyja Date: Sat, 26 Sep 2015 20:43:47 -0400 Subject: [PATCH 5/7] fixup! Some further refactoring. Tests completed --- .travis.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 4ba146b..db4df32 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,5 @@ language: python python: - "2.7" -install: - - pip install -r requirements.txt script: python test.py -sudo: false +sudo: false \ No newline at end of file From 4ac8544027f668310752ad93cb9f2ca68831779c Mon Sep 17 00:00:00 2001 From: erneyja Date: Sun, 27 Sep 2015 00:47:03 -0400 Subject: [PATCH 6/7] Do not re-post existing comments --- errorlogparser.py | 18 ++++--- githubapiprovider.py | 16 +++++- payloadhandler.py | 45 ++++++++++++++-- resources/review_comments.json | 93 ++++++++++++++++++++++++++++++++++ test.py | 85 ++++++++++++++++++------------- 5 files changed, 209 insertions(+), 48 deletions(-) create mode 100644 resources/review_comments.json diff --git a/errorlogparser.py b/errorlogparser.py index f92cd12..ffaa655 100644 --- a/errorlogparser.py +++ b/errorlogparser.py @@ -5,6 +5,10 @@ def parse_log(self, log, error_re): raise NotImplementedError class ServoErrorLogParser(ErrorLogParser): + path_key = 'path' + position_key = 'position' + body_key = 'body' + def parse_log(self, log): error_re = "\\x1b\[94m(.+?)\\x1b\[0m:\\x1b\[93m(.+?)\\x1b\[0m:\s\\x1b\[91m(.+?)(?:\\x1b\[0m|$)" cont_comment_re = "\t\\x1b\[\d{2}m(.+?)\\x1b\[0m" @@ -13,16 +17,16 @@ def parse_log(self, log): matches = [] log_list = log.splitlines() - abbr_log_list = self._trim_log(log_list, error_re) + trimmed_log_list = self._trim_log(log_list, error_re) - for log_line in abbr_log_list: + for log_line in trimmed_log_list: err_match = re.match(error_re, log_line) if err_match: matches.append(list(err_match.groups())) else: cont_comment_match = re.match(cont_comment_re, log_line) if cont_comment_match: - matches[-1][-1] += "\n\t{}".format(list(cont_comment_match.groups())[0]) + matches[-1][-1] += "\n{}".format(list(cont_comment_match.groups())[0]) return self._process_errors(matches) @@ -32,7 +36,7 @@ def _trim_log(self, log_list, error_re): Cut off irrelevant details so cont_comment_re doesn't match something that isn't an error comment """ - abbr_log_list = log_list + trimmed_log_list = log_list err_match = None i = 0 @@ -41,9 +45,9 @@ def _trim_log(self, log_list, error_re): i += 1 if err_match: - abbr_log_list = log_list[i - 1:] + trimmed_log_list = log_list[i - 1:] - return abbr_log_list + return trimmed_log_list def _process_errors(self, matches): @@ -51,5 +55,5 @@ def _process_errors(self, matches): def _convert_match_to_dict(self, match): - return {"file": match[0], "line": match[1], "comment": match[2]} + return {self.path_key: match[0], self.position_key: int(match[1]), self.body_key: match[2]} \ No newline at end of file diff --git a/githubapiprovider.py b/githubapiprovider.py index b884cad..91880df 100644 --- a/githubapiprovider.py +++ b/githubapiprovider.py @@ -48,7 +48,7 @@ def set_assignee(self, assignee, issue): class GithubApiProvider(APIProvider): contributors_url = "https://api.github.com/repos/%s/%s/contributors?per_page=400" post_comment_url = "https://api.github.com/repos/%s/%s/issues/%s/comments" - post_review_comment_url = "https://api.github.com/repos/%s/%s/pulls/%s/comments" + review_comment_url = "https://api.github.com/repos/%s/%s/pulls/%s/comments" collaborators_url = "https://api.github.com/repos/%s/%s/collaborators" issue_url = "https://api.github.com/repos/%s/%s/issues/%s" get_label_url = "https://api.github.com/repos/%s/%s/issues/%s/labels" @@ -141,7 +141,7 @@ def post_comment(self, body, issue): def post_review_comment(self, pr_num, commit_id, path, pos, body): try: - result = self.api_req("POST", self.post_review_comment_url % (self.owner, self.repo, pr_num), + result = self.api_req("POST", self.review_comment_url % (self.owner, self.repo, pr_num), {"body": body, "commit_id":commit_id, "path":path, "position":pos}) except urllib2.HTTPError, e: if e.code == 201: @@ -150,6 +150,18 @@ def post_review_comment(self, pr_num, commit_id, path, pos, body): raise e + def get_review_comments(self, pr_num): + try: + result = self.api_req("GET", self.review_comment_url % (self.owner, self.repo, pr_num)) + + return json.loads(result['body']) + except urllib2.HTTPError, e: + if e.code == 201: + pass + else: + raise e + + def add_label(self, label, issue): try: result = self.api_req("POST", self.add_label_url % (self.owner, self.repo, issue), diff --git a/payloadhandler.py b/payloadhandler.py index 095a384..ad25c45 100644 --- a/payloadhandler.py +++ b/payloadhandler.py @@ -1,5 +1,7 @@ import random import re +from copy import copy +from errorlogparser import ServoErrorLogParser class PayloadHandler(): @@ -8,7 +10,7 @@ def handle_payload(self, payload): class TravisPayloadHandler(PayloadHandler): - msg_template = "Please fix the following error:\n\nFile: {}\nLine Number: {}\nError: {}" + msg_template = "Please fix the following error:\n\n**File:** {}\n**Line Number:** {}\n**Error:** {}" def __init__(self, github, travis, error_parser): self.travis = travis @@ -22,9 +24,44 @@ def handle_payload(self, payload): err_data = self.error_parser.parse_log(log) pr_num = self.travis.get_pull_request_number(build_data) - for err_datum in err_data: - err_message = self.msg_template.format(err_datum['file'], err_datum['line'], err_datum['comment']) - self.github.post_review_comment(pr_num, commit_id, err_message, err_datum['file'], err_datum['line']) + existing_comments = [self._build_existing_comment_dict(comment) for comment in self.github.get_review_comments(pr_num)] + new_comments = [self._build_review_comment_dict(err_datum) for err_datum in err_data] + + comments_to_post = self._delete_existing_comments(new_comments, existing_comments) + + for comment in comments_to_post: + self.github.post_review_comment(pr_num, commit_id, comment[self.error_parser.body_key], comment[self.error_parser.path_key], comment[self.error_parser.position_key]) + + def _build_review_comment_dict(self, err_datum): + new_datum = err_datum.copy() + new_datum[self.error_parser.body_key] = self.msg_template.format(err_datum[self.error_parser.path_key], err_datum[self.error_parser.position_key], err_datum[self.error_parser.body_key]) + + return new_datum + + def _build_existing_comment_dict(self, comment): + new_comment = {} + + new_comment[self.error_parser.body_key] = comment[self.error_parser.body_key] + new_comment[self.error_parser.path_key] = comment[self.error_parser.path_key] + new_comment[self.error_parser.position_key] = comment[self.error_parser.position_key] + + return new_comment + + def _delete_existing_comments(self, new, existing): + new_copy = copy(new) + to_delete = [] + + for subject in reversed(new): + for comment in existing: + if subject[self.error_parser.body_key] == comment[self.error_parser.body_key] and \ + subject[self.error_parser.path_key] == comment[self.error_parser.path_key] and \ + subject[self.error_parser.position_key] == comment[self.error_parser.position_key]: + + new_copy.remove(subject) + break + + return new_copy + def _get_build_id(self, payload): return int(payload["target_url"].split("/")[-1]) diff --git a/resources/review_comments.json b/resources/review_comments.json new file mode 100644 index 0000000..b608ec4 --- /dev/null +++ b/resources/review_comments.json @@ -0,0 +1,93 @@ +[ + { + "url": "https://api.github.com/repos/octocat/Hello-World/pulls/comments/1", + "id": 1, + "diff_hunk": "@@ -16,33 +16,40 @@ public class Connection : IConnection...", + "path": "./components/script/dom/bindings/utils.rs", + "position": 17, + "original_position": 4, + "commit_id": "6dcb09b5b57875f334f61aebed695e2e4193db5e", + "original_commit_id": "9c48853fa3dc5c1c3d6f1f1cd1f2743e72652840", + "user": { + "login": "octocat", + "id": 1, + "avatar_url": "https://github.com/images/error/octocat_happy.gif", + "gravatar_id": "", + "url": "https://api.github.com/users/octocat", + "html_url": "https://github.com/octocat", + "followers_url": "https://api.github.com/users/octocat/followers", + "following_url": "https://api.github.com/users/octocat/following{/other_user}", + "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", + "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", + "organizations_url": "https://api.github.com/users/octocat/orgs", + "repos_url": "https://api.github.com/users/octocat/repos", + "events_url": "https://api.github.com/users/octocat/events{/privacy}", + "received_events_url": "https://api.github.com/users/octocat/received_events", + "type": "User", + "site_admin": false + }, + + "body": "Please fix the following error:\n\n**File:** ./components/script/dom/bindings/utils.rs\n**Line Number:** 17\n**Error:** use statement is not in alphabetical order\nexpected: dom::browsercontext\nfound: dom::eventtarget::EventTargetTypeId", + "created_at": "2011-04-14T16:00:49Z", + "updated_at": "2011-04-14T16:00:49Z", + "html_url": "https://github.com/octocat/Hello-World/pull/1#discussion-diff-1", + "pull_request_url": "https://api.github.com/repos/octocat/Hello-World/pulls/1", + "_links": { + "self": { + "href": "https://api.github.com/repos/octocat/Hello-World/pulls/comments/1" + }, + "html": { + "href": "https://github.com/octocat/Hello-World/pull/1#discussion-diff-1" + }, + "pull_request": { + "href": "https://api.github.com/repos/octocat/Hello-World/pulls/1" + } + } + }, + { + "url": "https://api.github.com/repos/octocat/Hello-World/pulls/comments/1", + "id": 1, + "diff_hunk": "@@ -16,33 +16,40 @@ public class Connection : IConnection...", + "path": "./components/script/dom/bindings/utils.rs", + "position": 18, + "original_position": 4, + "commit_id": "6dcb09b5b57875f334f61aebed695e2e4193db5e", + "original_commit_id": "9c48853fa3dc5c1c3d6f1f1cd1f2743e72652840", + "user": { + "login": "octocat", + "id": 1, + "avatar_url": "https://github.com/images/error/octocat_happy.gif", + "gravatar_id": "", + "url": "https://api.github.com/users/octocat", + "html_url": "https://github.com/octocat", + "followers_url": "https://api.github.com/users/octocat/followers", + "following_url": "https://api.github.com/users/octocat/following{/other_user}", + "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", + "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", + "organizations_url": "https://api.github.com/users/octocat/orgs", + "repos_url": "https://api.github.com/users/octocat/repos", + "events_url": "https://api.github.com/users/octocat/events{/privacy}", + "received_events_url": "https://api.github.com/users/octocat/received_events", + "type": "User", + "site_admin": false + }, + "body": "Please fix the following error:\n\n**File:** ./components/script/dom/bindings/utils.rs\n**Line Number:** 18\n**Error:** use statement is not in alphabetical order\nexpected: dom::eventtarget::EventTargetTypeId\nfound: dom::browsercontext", + "created_at": "2011-04-14T16:00:49Z", + "updated_at": "2011-04-14T16:00:49Z", + "html_url": "https://github.com/octocat/Hello-World/pull/1#discussion-diff-1", + "pull_request_url": "https://api.github.com/repos/octocat/Hello-World/pulls/1", + "_links": { + "self": { + "href": "https://api.github.com/repos/octocat/Hello-World/pulls/comments/1" + }, + "html": { + "href": "https://github.com/octocat/Hello-World/pull/1#discussion-diff-1" + }, + "pull_request": { + "href": "https://api.github.com/repos/octocat/Hello-World/pulls/1" + } + } + } +] \ No newline at end of file diff --git a/test.py b/test.py index 4c18327..074242a 100644 --- a/test.py +++ b/test.py @@ -13,34 +13,34 @@ error_sample = [ { - "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull\n\tfound: dom::bindings::conversions::get_dom_class", - "line": "7", - "file": "./components/script/dom/eventtarget.rs" + "body": "use statement is not in alphabetical order\nexpected: dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull\nfound: dom::bindings::conversions::get_dom_class", + "position": 7, + "path": "./components/script/dom/eventtarget.rs" }, { - "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::codegen::Bindings::EventListenerBinding::EventListener\n\tfound: dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull", - "line": "8", - "file": "./components/script/dom/eventtarget.rs" + "body": "use statement is not in alphabetical order\nexpected: dom::bindings::codegen::Bindings::EventListenerBinding::EventListener\nfound: dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull", + "position": 8, + "path": "./components/script/dom/eventtarget.rs" }, { - "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::codegen::Bindings::EventTargetBinding::EventTargetMethods\n\tfound: dom::bindings::codegen::Bindings::EventListenerBinding::EventListener", - "line": "9", - "file": "./components/script/dom/eventtarget.rs" + "body": "use statement is not in alphabetical order\nexpected: dom::bindings::codegen::Bindings::EventTargetBinding::EventTargetMethods\nfound: dom::bindings::codegen::Bindings::EventListenerBinding::EventListener", + "position": 9, + "path": "./components/script/dom/eventtarget.rs" }, { - "comment": "use statement is not in alphabetical order\n\texpected: dom::bindings::conversions::get_dom_class\n\tfound: dom::bindings::codegen::Bindings::EventTargetBinding::EventTargetMethods", - "line": "10", - "file": "./components/script/dom/eventtarget.rs" + "body": "use statement is not in alphabetical order\nexpected: dom::bindings::conversions::get_dom_class\nfound: dom::bindings::codegen::Bindings::EventTargetBinding::EventTargetMethods", + "position": 10, + "path": "./components/script/dom/eventtarget.rs" }, { - "comment": "use statement is not in alphabetical order\n\texpected: dom::browsercontext\n\tfound: dom::eventtarget::EventTargetTypeId", - "line": "17", - "file": "./components/script/dom/bindings/utils.rs" + "body": "use statement is not in alphabetical order\nexpected: dom::browsercontext\nfound: dom::eventtarget::EventTargetTypeId", + "position": 17, + "path": "./components/script/dom/bindings/utils.rs" }, { - "comment": "use statement is not in alphabetical order\n\texpected: dom::eventtarget::EventTargetTypeId\n\tfound: dom::browsercontext", - "line": "18", - "file": "./components/script/dom/bindings/utils.rs" + "body": "use statement is not in alphabetical order\nexpected: dom::eventtarget::EventTargetTypeId\nfound: dom::browsercontext", + "position": 18, + "path": "./components/script/dom/bindings/utils.rs" } ] @@ -133,15 +133,15 @@ def info(self): def read(self): return "content" + faux_request = FakeRequest() base64_mock.standard_b64encode = Mock(return_value="User:Token") - urllib_mock.Request = Mock(return_value=FakeRequest()) + urllib_mock.Request = Mock(return_value=faux_request) urllib_mock.urlopen = Mock(return_value=FakeResponse(FakeHeader())) gzip_mock.GzipFile = Mock(return_value=FakeResponse(FakeHeader())) gh_provider = GithubApiProvider("jdm", "a453b3923e893f0383cd2893f...", self.owner, self.repo) - gh_provider.api_req("GET", "https://api.github.com/repos/servo/servo/contibutors", data={"test":"data"}, media_type="Test Media") - - header = FakeHeader() - print header.get('Content-Encoding') + gh_provider.api_req("GET", "https://api.github.com/repos/servo/servo/contributors", data={"test":"data"}, media_type="Test Media") + urllib_mock.Request.assert_called_with("https://api.github.com/repos/servo/servo/contributors", json.dumps({"test":"data"}), {'Content-Type':'application/json'}) + urllib_mock.urlopen.assert_called_with(faux_request) def test_parse_header_links(self): @@ -187,7 +187,7 @@ def test_post_review_comment(self): body = "Revmoe extra newline" self.gh_provider.post_review_comment(pr_num, commit_id, path, pos, body) - self.gh_provider.api_req.assert_called_with("POST", GithubApiProvider.post_review_comment_url % (self.owner, self.repo, pr_num), {"body": body, "commit_id":commit_id, "path":path, "position":pos}) + self.gh_provider.api_req.assert_called_with("POST", GithubApiProvider.review_comment_url % (self.owner, self.repo, pr_num), {"body": body, "commit_id":commit_id, "path":path, "position":pos}) def test_add_label(self): @@ -287,9 +287,9 @@ def setUp(self): self.single_log = open('resources/single-line-comment.log').read() self.expected_single_errors = [ { - 'comment': 'missing space before {', - 'line': '49', - 'file': './components/plugins/lints/sorter.rs' + 'body': 'missing space before {', + 'position': 49, + 'path': './components/plugins/lints/sorter.rs' } ] @@ -325,6 +325,10 @@ def get_pull_request_number(self, build_data): class ErrorParserDouble(): + path_key = ServoErrorLogParser.path_key + body_key = ServoErrorLogParser.body_key + position_key = ServoErrorLogParser.position_key + def parse_log(self, log): return error_sample @@ -332,24 +336,35 @@ def parse_log(self, log): travis_dbl = TravisDouble() error_parser_dbl = ErrorParserDouble() self.github = GithubApiProvider("jdm", "a453b3923e893f0383cd2893f...", "servo", "servo") - self.github.post_review_comment = Mock(name="post_review_comment") + self.github.post_review_comment = Mock() + self.github.get_review_comments = Mock(return_value=json.loads(open('resources/review_comments.json').read())) self.payload_handler = TravisPayloadHandler(self.github, travis_dbl, error_parser_dbl) + + def test_delete_dict_matches(self): + subject = [{"body":"Hello","position":1,"path":"/hello"}, {"body":"Goodbye","position":1,"path":"/goodbye"}] + test = [{"body":"Hello","position":1,"path":"/hello"}] + expected = [{"body":"Goodbye","position":1,"path":"/goodbye"}] + + self.assertEquals(expected, self.payload_handler._delete_existing_comments(subject, test)) + + def test_handle_payload(self): payload = json.loads(open('resources/test_travis_payload.json').read()) self.payload_handler.handle_payload(payload) err_msg = TravisPayloadHandler.msg_template calls = [ - call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[0]['file'], error_sample[0]['line'], error_sample[0]['comment']), error_sample[0]['file'], error_sample[0]['line']), - call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[1]['file'], error_sample[1]['line'], error_sample[1]['comment']), error_sample[1]['file'], error_sample[1]['line']), - call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[2]['file'], error_sample[2]['line'], error_sample[2]['comment']), error_sample[2]['file'], error_sample[2]['line']), - call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[3]['file'], error_sample[3]['line'], error_sample[3]['comment']), error_sample[3]['file'], error_sample[3]['line']), - call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[4]['file'], error_sample[4]['line'], error_sample[4]['comment']), error_sample[4]['file'], error_sample[4]['line']), - call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[5]['file'], error_sample[5]['line'], error_sample[5]['comment']), error_sample[5]['file'], error_sample[5]['line']), + call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[0]['path'], error_sample[0]['position'], error_sample[0]['body']), error_sample[0]['path'], error_sample[0]['position']), + call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[1]['path'], error_sample[1]['position'], error_sample[1]['body']), error_sample[1]['path'], error_sample[1]['position']), + call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[2]['path'], error_sample[2]['position'], error_sample[2]['body']), error_sample[2]['path'], error_sample[2]['position']), + call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[3]['path'], error_sample[3]['position'], error_sample[3]['body']), error_sample[3]['path'], error_sample[3]['position']), + call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[4]['path'], error_sample[4]['position'], error_sample[4]['body']), error_sample[4]['path'], error_sample[4]['position']), + call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[5]['path'], error_sample[5]['position'], error_sample[5]['body']), error_sample[5]['path'], error_sample[5]['position']), ] - self.github.post_review_comment.assert_has_calls(calls) + self.github.post_review_comment.assert_called_with(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[3]['path'], error_sample[3]['position'], error_sample[3]['body']), error_sample[3]['path'], error_sample[3]['position']) + class TestGithubPayloadHandler(unittest.TestCase): def setUp(self): From 8b7c65dc5ee6acfa8d276b956d116c55b0f9c60e Mon Sep 17 00:00:00 2001 From: erneyja Date: Sun, 27 Sep 2015 09:54:29 -0400 Subject: [PATCH 7/7] Removed tests for internal code --- test.py | 30 +++++++----------------------- 1 file changed, 7 insertions(+), 23 deletions(-) diff --git a/test.py b/test.py index 074242a..52c5a18 100644 --- a/test.py +++ b/test.py @@ -342,30 +342,13 @@ def parse_log(self, log): self.payload_handler = TravisPayloadHandler(self.github, travis_dbl, error_parser_dbl) - def test_delete_dict_matches(self): - subject = [{"body":"Hello","position":1,"path":"/hello"}, {"body":"Goodbye","position":1,"path":"/goodbye"}] - test = [{"body":"Hello","position":1,"path":"/hello"}] - expected = [{"body":"Goodbye","position":1,"path":"/goodbye"}] - - self.assertEquals(expected, self.payload_handler._delete_existing_comments(subject, test)) - - def test_handle_payload(self): payload = json.loads(open('resources/test_travis_payload.json').read()) self.payload_handler.handle_payload(payload) err_msg = TravisPayloadHandler.msg_template - calls = [ - call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[0]['path'], error_sample[0]['position'], error_sample[0]['body']), error_sample[0]['path'], error_sample[0]['position']), - call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[1]['path'], error_sample[1]['position'], error_sample[1]['body']), error_sample[1]['path'], error_sample[1]['position']), - call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[2]['path'], error_sample[2]['position'], error_sample[2]['body']), error_sample[2]['path'], error_sample[2]['position']), - call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[3]['path'], error_sample[3]['position'], error_sample[3]['body']), error_sample[3]['path'], error_sample[3]['position']), - call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[4]['path'], error_sample[4]['position'], error_sample[4]['body']), error_sample[4]['path'], error_sample[4]['position']), - call(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[5]['path'], error_sample[5]['position'], error_sample[5]['body']), error_sample[5]['path'], error_sample[5]['position']), - ] self.github.post_review_comment.assert_called_with(1, "9b6313fd5ab92de5a3fd9f13f8421a929b2a8ef6", err_msg.format(error_sample[3]['path'], error_sample[3]['position'], error_sample[3]['body']), error_sample[3]['path'], error_sample[3]['position']) - class TestGithubPayloadHandler(unittest.TestCase): def setUp(self): self.github_user = "jdm" @@ -469,7 +452,8 @@ def test_new_comment_merge_conflicts(self): self.github.add_label.assert_called_with("S-needs-rebase", issue_num) - def test_new_pr_no_msg(self): + @patch('payloadhandler.random') + def test_new_pr_no_msg(self, random_mock): self.github.is_new_contributor = Mock(return_value=True) self.github.post_comment = Mock() self.github.get_diff = Mock(return_value="") @@ -480,12 +464,12 @@ def test_new_pr_no_msg(self): payload = {"pull_request":{"user":{"login":"jdm"},"diff_url":diff_url}} issue_num = 1 rand_val = 'jdm' + random_mock.choice.return_value = rand_val - with patch('payloadhandler.random.choice', return_value=rand_val) as mock_random: - pl_handler.new_pr(issue_num, payload) - pl_handler.manage_pr_state.assert_called_with(issue_num, payload) - self.github.post_comment.assert_called_with(GithubPayloadHandler.welcome_msg % rand_val, issue_num) - self.github.get_diff.assert_called_with(diff_url) + pl_handler.new_pr(issue_num, payload) + pl_handler.manage_pr_state.assert_called_with(issue_num, payload) + self.github.post_comment.assert_called_with(GithubPayloadHandler.welcome_msg % rand_val, issue_num) + self.github.get_diff.assert_called_with(diff_url) def test_new_pr_unsafe_msg(self):