Skip to content

Commit acf5c06

Browse files
authored
Add more unittests (GH-61)
1 parent e766eaf commit acf5c06

File tree

2 files changed

+254
-2
lines changed

2 files changed

+254
-2
lines changed

miss_islington/backport_pr.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,13 +21,14 @@ async def backport_pr(event, gh, *args, **kwargs):
2121

2222
commit_hash = event.data['pull_request']['merge_commit_sha']
2323

24-
gh_issue = await gh.getitem(event.data['repository']['issues_url'],
25-
{'number': f"{event.data['pull_request']['number']}"})
2624
pr_labels = []
2725
if event.data['action'] == 'labeled':
2826
pr_labels = [event.data["label"]]
2927
else:
28+
gh_issue = await gh.getitem(event.data['repository']['issues_url'],
29+
{'number': f"{event.data['pull_request']['number']}"})
3030
pr_labels = await gh.getitem(gh_issue['labels_url'])
31+
3132
branches = [label['name'].split()[-1]
3233
for label in pr_labels
3334
if label['name'].startswith("needs backport to")]

tests/test_backport_pr.py

Lines changed: 251 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,251 @@
1+
import os
2+
3+
4+
from unittest import mock
5+
from gidgethub import sansio
6+
7+
os.environ["REDIS_URL"] = "someurl"
8+
from miss_islington import backport_pr
9+
10+
11+
class FakeGH:
12+
13+
def __init__(self, *, getitem=None, post=None):
14+
self._getitem_return = getitem
15+
self.getitem_url = None
16+
self.getiter_url = None
17+
self._post_return = post
18+
19+
async def getitem(self, url, url_vars={}):
20+
self.getitem_url = sansio.format_url(url, url_vars)
21+
return self._getitem_return[self.getitem_url]
22+
23+
async def post(self, url, *, data):
24+
self.post_url = url
25+
self.post_data = data
26+
return self._post_return
27+
28+
29+
async def test_unmerged_pr_is_ignored():
30+
data = {
31+
"action": "closed",
32+
"pull_request": {
33+
"merged": False,
34+
}
35+
}
36+
event = sansio.Event(data, event='pull_request',
37+
delivery_id='1')
38+
gh = FakeGH()
39+
await backport_pr.router.dispatch(event, gh)
40+
assert gh.getitem_url is None
41+
42+
43+
async def test_labeled_on_unmerged_pr_is_ignored():
44+
data = {
45+
"action": "labeled",
46+
"pull_request": {
47+
"merged": False,
48+
}
49+
}
50+
event = sansio.Event(data, event='pull_request',
51+
delivery_id='1')
52+
gh = FakeGH()
53+
await backport_pr.router.dispatch(event, gh)
54+
assert gh.getitem_url is None
55+
56+
57+
async def test_labeled_on_merged_pr_no_backport_label():
58+
data = {
59+
"action": "labeled",
60+
"pull_request": {
61+
"merged": True,
62+
"number": 1,
63+
"merged_by": {
64+
"login": "Mariatta"
65+
},
66+
"user": {
67+
"login": "Mariatta"
68+
},
69+
"merge_commit_sha": "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9",
70+
},
71+
"repository": {
72+
"issues_url": "https://api.github.com/repos/python/cpython/issues{/number}"
73+
},
74+
"label": {
75+
"name": "CLA signed",
76+
},
77+
}
78+
event = sansio.Event(data, event='pull_request',
79+
delivery_id='1')
80+
81+
gh = FakeGH()
82+
await backport_pr.router.dispatch(event, gh)
83+
assert not hasattr(gh, 'post_data')
84+
assert not hasattr(gh, 'post_url')
85+
86+
87+
async def test_merged_pr_no_backport_label():
88+
data = {
89+
"action": "closed",
90+
"pull_request": {
91+
"merged": True,
92+
"number": 1,
93+
"merged_by": {
94+
"login": "Mariatta"
95+
},
96+
"user": {
97+
"login": "Mariatta"
98+
},
99+
"merge_commit_sha": "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9",
100+
},
101+
"repository": {
102+
"issues_url": "https://api.github.com/repos/python/cpython/issues/1"
103+
},
104+
}
105+
event = sansio.Event(data, event='pull_request',
106+
delivery_id='1')
107+
108+
getitem = {
109+
"https://api.github.com/repos/python/cpython/issues/1": {
110+
"labels_url": "https://api.github.com/repos/python/cpython/issues/1/labels{/name}"},
111+
"https://api.github.com/repos/python/cpython/issues/1/labels":
112+
[
113+
{"name": "CLA signed", }
114+
]
115+
116+
}
117+
118+
gh = FakeGH(getitem=getitem)
119+
await backport_pr.router.dispatch(event, gh)
120+
assert not hasattr(gh, 'post_data')
121+
assert not hasattr(gh, 'post_url')
122+
123+
124+
async def test_merged_pr_with_backport_label():
125+
data = {
126+
"action": "closed",
127+
"pull_request": {
128+
"merged": True,
129+
"number": 1,
130+
"merged_by": {
131+
"login": "Mariatta"
132+
},
133+
"user": {
134+
"login": "Mariatta"
135+
},
136+
"merge_commit_sha": "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9",
137+
},
138+
"repository": {
139+
"issues_url": "https://api.github.com/repos/python/cpython/issues/1"
140+
},
141+
}
142+
event = sansio.Event(data, event='pull_request',
143+
delivery_id='1')
144+
145+
getitem = {
146+
"https://api.github.com/repos/python/cpython/issues/1": {
147+
"labels_url": "https://api.github.com/repos/python/cpython/issues/1/labels{/name}"},
148+
"https://api.github.com/repos/python/cpython/issues/1/labels":
149+
[
150+
{"name": "CLA signed", },
151+
{"name": "needs backport to 3.7", }
152+
]
153+
154+
}
155+
156+
gh = FakeGH(getitem=getitem)
157+
with mock.patch('miss_islington.tasks.backport_task.delay'):
158+
await backport_pr.router.dispatch(event, gh)
159+
assert "I'm working now to backport this PR to: 3.7" in gh.post_data["body"]
160+
assert gh.post_url == '/repos/python/cpython/issues/1/comments'
161+
162+
163+
async def test_merged_pr_with_backport_label_thank_pr_author():
164+
data = {
165+
"action": "closed",
166+
"pull_request": {
167+
"merged": True,
168+
"number": 1,
169+
"merged_by": {
170+
"login": "Mariatta"
171+
},
172+
"user": {
173+
"login": "gvanrossum"
174+
},
175+
"merge_commit_sha": "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9",
176+
},
177+
"repository": {
178+
"issues_url": "https://api.github.com/repos/python/cpython/issues/1"
179+
},
180+
}
181+
event = sansio.Event(data, event='pull_request',
182+
delivery_id='1')
183+
184+
getitem = {
185+
"https://api.github.com/repos/python/cpython/issues/1": {
186+
"labels_url": "https://api.github.com/repos/python/cpython/issues/1/labels{/name}"},
187+
"https://api.github.com/repos/python/cpython/issues/1/labels":
188+
[
189+
{"name": "CLA signed", },
190+
{"name": "needs backport to 3.7", }
191+
]
192+
193+
}
194+
195+
gh = FakeGH(getitem=getitem)
196+
with mock.patch('miss_islington.tasks.backport_task.delay'):
197+
await backport_pr.router.dispatch(event, gh)
198+
assert "I'm working now to backport this PR to: 3.7" in gh.post_data["body"]
199+
assert "Thanks @gvanrossum for the PR" in gh.post_data[
200+
"body"]
201+
assert gh.post_url == '/repos/python/cpython/issues/1/comments'
202+
203+
204+
async def test_easter_egg():
205+
data = {
206+
"action": "closed",
207+
"pull_request": {
208+
"merged": True,
209+
"number": 1,
210+
"merged_by": {
211+
"login": "Mariatta"
212+
},
213+
"user": {
214+
"login": "gvanrossum"
215+
},
216+
"merge_commit_sha": "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9",
217+
},
218+
"repository": {
219+
"issues_url": "https://api.github.com/repos/python/cpython/issues/1"
220+
},
221+
}
222+
event = sansio.Event(data, event='pull_request',
223+
delivery_id='1')
224+
225+
getitem = {
226+
"https://api.github.com/repos/python/cpython/issues/1": {
227+
"labels_url": "https://api.github.com/repos/python/cpython/issues/1/labels{/name}"},
228+
"https://api.github.com/repos/python/cpython/issues/1/labels":
229+
[
230+
{"name": "CLA signed",},
231+
{"name": "needs backport to 3.7",}
232+
]
233+
234+
}
235+
236+
gh = FakeGH(getitem=getitem)
237+
with mock.patch('miss_islington.tasks.backport_task.delay'), \
238+
mock.patch('random.random', return_value=0.1):
239+
await backport_pr.router.dispatch(event, gh)
240+
assert "I'm working now to backport this PR to: 3.7" in gh.post_data["body"]
241+
assert "Thanks @gvanrossum for the PR" in gh.post_data["body"]
242+
assert "I'm not a witch" not in gh.post_data["body"]
243+
assert gh.post_url == '/repos/python/cpython/issues/1/comments'
244+
245+
with mock.patch('miss_islington.tasks.backport_task.delay'), \
246+
mock.patch('random.random', return_value=0.01):
247+
await backport_pr.router.dispatch(event, gh)
248+
assert "I'm working now to backport this PR to: 3.7" in gh.post_data["body"]
249+
assert "Thanks @gvanrossum for the PR" in gh.post_data["body"]
250+
assert "I'm not a witch" in gh.post_data["body"]
251+
assert gh.post_url == '/repos/python/cpython/issues/1/comments'

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy