Skip to content

Commit 43fced5

Browse files
lucasmouranorvig
authored andcommitted
Fix flake8 for test files (aimacode#303)
* Add flake8 config file * Fix flake8 for test files
1 parent e76b886 commit 43fced5

12 files changed

+108
-77
lines changed

.flake8

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
[flake8]
2+
max-line-length = 100
3+
ignore = E121,E123,E126,E221,E222,E225,E226,E242,E701,E702,E704,E731,W503,F405

tests/test_csp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import pytest
2-
from csp import * #noqa
2+
from csp import * # noqa
33

44

55
def test_csp_assign():

tests/test_grid.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,5 +17,6 @@ def test_distance2():
1717
def test_vector_clip():
1818
assert vector_clip((-1, 10), (0, 0), (9, 9)) == (0, 9)
1919

20+
2021
if __name__ == '__main__':
2122
pytest.main()

tests/test_learning.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,11 @@
1-
import pytest
21
from learning import parse_csv, weighted_mode, weighted_replicate, DataSet, \
32
PluralityLearner, NaiveBayesLearner, NearestNeighborLearner
43
from utils import DataFile
54

65

76
def test_parse_csv():
87
Iris = DataFile('iris.csv').read()
9-
assert parse_csv(Iris)[0] == [5.1,3.5,1.4,0.2,'setosa']
8+
assert parse_csv(Iris)[0] == [5.1, 3.5, 1.4, 0.2, 'setosa']
109

1110

1211
def test_weighted_mode():
@@ -16,20 +15,23 @@ def test_weighted_mode():
1615
def test_weighted_replicate():
1716
assert weighted_replicate('ABC', [1, 2, 1], 4) == ['A', 'B', 'B', 'C']
1817

18+
1919
def test_plurality_learner():
2020
zoo = DataSet(name="zoo")
2121

2222
pL = PluralityLearner(zoo)
2323
assert pL([]) == "mammal"
2424

25+
2526
def test_naive_bayes():
2627
iris = DataSet(name="iris")
2728

2829
nB = NaiveBayesLearner(iris)
29-
assert nB([5,3,1,0.1]) == "setosa"
30+
assert nB([5, 3, 1, 0.1]) == "setosa"
31+
3032

3133
def test_k_nearest_neighbors():
3234
iris = DataSet(name="iris")
3335

34-
kNN = NearestNeighborLearner(iris,k=3)
35-
assert kNN([5,3,1,0.1]) == "setosa"
36+
kNN = NearestNeighborLearner(iris, k=3)
37+
assert kNN([5, 3, 1, 0.1]) == "setosa"

tests/test_logic.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import pytest
2-
from logic import *
3-
from utils import expr_handle_infix_ops, count
2+
from logic import * # noqa
3+
from utils import expr_handle_infix_ops, count, Symbol
44

55

66
def test_expr():
@@ -56,10 +56,10 @@ def test_KB_wumpus():
5656
assert kb_wumpus.ask(~P[1, 2]) == {}
5757

5858
# Statement: There is a pit in [2,2].
59-
assert kb_wumpus.ask(P[2, 2]) == False
59+
assert kb_wumpus.ask(P[2, 2]) is False
6060

6161
# Statement: There is a pit in [3,1].
62-
assert kb_wumpus.ask(P[3, 1]) == False
62+
assert kb_wumpus.ask(P[3, 1]) is False
6363

6464
# Statement: Neither [1,2] nor [2,1] contains a pit.
6565
assert kb_wumpus.ask(~P[1, 2] & ~P[2, 1]) == {}
@@ -112,7 +112,7 @@ def test_dpll():
112112
& (~D | ~F) & (B | ~C | D) & (A | ~E | F) & (~A | E | D))
113113
== {B: False, C: True, A: True, F: False, D: True, E: False})
114114
assert dpll_satisfiable(A & ~B) == {A: True, B: False}
115-
assert dpll_satisfiable(P & ~P) == False
115+
assert dpll_satisfiable(P & ~P) is False
116116

117117

118118
def test_unify():
@@ -159,7 +159,7 @@ def test_move_not_inwards():
159159
def test_to_cnf():
160160
assert (repr(to_cnf(wumpus_world_inference & ~expr('~P12'))) ==
161161
"((~P12 | B11) & (~P21 | B11) & (P12 | P21 | ~B11) & ~B11 & P12)")
162-
assert repr(to_cnf((P&Q) | (~P & ~Q))) == '((~P | P) & (~Q | P) & (~P | Q) & (~Q | Q))'
162+
assert repr(to_cnf((P & Q) | (~P & ~Q))) == '((~P | P) & (~Q | P) & (~P | Q) & (~Q | Q))'
163163
assert repr(to_cnf("B <=> (P1 | P2)")) == '((~P1 | B) & (~P2 | B) & (P1 | P2 | ~B))'
164164
assert repr(to_cnf("a | (b & c) | d")) == '((b | a | d) & (c | a | d))'
165165
assert repr(to_cnf("A & (B | (D & E))")) == '(A & (D | B) & (E | B))'
@@ -169,7 +169,7 @@ def test_to_cnf():
169169
def test_standardize_variables():
170170
e = expr('F(a, b, c) & G(c, A, 23)')
171171
assert len(variables(standardize_variables(e))) == 3
172-
#assert variables(e).intersection(variables(standardize_variables(e))) == {}
172+
# assert variables(e).intersection(variables(standardize_variables(e))) == {}
173173
assert is_variable(standardize_variables(expr('x')))
174174

175175

tests/test_mdp.py

Lines changed: 16 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,27 @@
1-
import pytest
21
from mdp import * # noqa
32

43

54
def test_value_iteration():
6-
assert value_iteration(sequential_decision_environment, .01) == {(3, 2): 1.0, (3, 1): -1.0,
7-
(3, 0): 0.12958868267972745, (0, 1): 0.39810203830605462,
8-
(0, 2): 0.50928545646220924, (1, 0): 0.25348746162470537,
9-
(0, 0): 0.29543540628363629, (1, 2): 0.64958064617168676,
10-
(2, 0): 0.34461306281476806, (2, 1): 0.48643676237737926,
11-
(2, 2): 0.79536093684710951}
5+
assert value_iteration(sequential_decision_environment, .01) == {
6+
(3, 2): 1.0, (3, 1): -1.0,
7+
(3, 0): 0.12958868267972745, (0, 1): 0.39810203830605462,
8+
(0, 2): 0.50928545646220924, (1, 0): 0.25348746162470537,
9+
(0, 0): 0.29543540628363629, (1, 2): 0.64958064617168676,
10+
(2, 0): 0.34461306281476806, (2, 1): 0.48643676237737926,
11+
(2, 2): 0.79536093684710951}
1212

1313

1414
def test_policy_iteration():
15-
assert policy_iteration(sequential_decision_environment) == {(0, 0): (0, 1), (0, 1): (0, 1), (0, 2): (1, 0),
16-
(1, 0): (1, 0), (1, 2): (1, 0),
17-
(2, 0): (0, 1), (2, 1): (0, 1), (2, 2): (1, 0),
18-
(3, 0): (-1, 0), (3, 1): None, (3, 2): None}
15+
assert policy_iteration(sequential_decision_environment) == {
16+
(0, 0): (0, 1), (0, 1): (0, 1), (0, 2): (1, 0),
17+
(1, 0): (1, 0), (1, 2): (1, 0), (2, 0): (0, 1),
18+
(2, 1): (0, 1), (2, 2): (1, 0), (3, 0): (-1, 0),
19+
(3, 1): None, (3, 2): None}
1920

2021

2122
def test_best_policy():
22-
pi = best_policy(sequential_decision_environment, value_iteration(sequential_decision_environment, .01))
23+
pi = best_policy(sequential_decision_environment,
24+
value_iteration(sequential_decision_environment, .01))
2325
assert sequential_decision_environment.to_arrows(pi) == [['>', '>', '>', '.'],
24-
['^', None, '^', '.'],
25-
['^', '>', '^', '<']]
26+
['^', None, '^', '.'],
27+
['^', '>', '^', '<']]

tests/test_nlp.py

Lines changed: 38 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,13 @@
11
import pytest
22
import nlp
3-
from nlp import loadPageHTML, stripRawHTML, determineInlinks, findOutlinks, onlyWikipediaURLS
3+
from nlp import loadPageHTML, stripRawHTML, findOutlinks, onlyWikipediaURLS
44
from nlp import expand_pages, relevant_pages, normalize, ConvergenceDetector, getInlinks
5-
from nlp import getOutlinks, Page, HITS
5+
from nlp import getOutlinks, Page
66
from nlp import Rules, Lexicon
77
# Clumsy imports because we want to access certain nlp.py globals explicitly, because
88
# they are accessed by function's within nlp.py
99

10+
1011
def test_rules():
1112
assert Rules(A="B C | D E") == {'A': [['B', 'C'], ['D', 'E']]}
1213

@@ -27,18 +28,18 @@ def test_lexicon():
2728
href="/wiki/TestLiving" href="/wiki/TestMan" >"""
2829
testHTML2 = "Nothing"
2930

30-
pA = Page("A", 1, 6, ["B","C","E"],["D"])
31-
pB = Page("B", 2, 5, ["E"],["A","C","D"])
32-
pC = Page("C", 3, 4, ["B","E"],["A","D"])
33-
pD = Page("D", 4, 3, ["A","B","C","E"],[])
34-
pE = Page("E", 5, 2, [],["A","B","C","D","F"])
35-
pF = Page("F", 6, 1, ["E"],[])
36-
pageDict = {pA.address:pA,pB.address:pB,pC.address:pC,
37-
pD.address:pD,pE.address:pE,pF.address:pF}
31+
pA = Page("A", 1, 6, ["B", "C", "E"], ["D"])
32+
pB = Page("B", 2, 5, ["E"], ["A", "C", "D"])
33+
pC = Page("C", 3, 4, ["B", "E"], ["A", "D"])
34+
pD = Page("D", 4, 3, ["A", "B", "C", "E"], [])
35+
pE = Page("E", 5, 2, [], ["A", "B", "C", "D", "F"])
36+
pF = Page("F", 6, 1, ["E"], [])
37+
pageDict = {pA.address: pA, pB.address: pB, pC.address: pC,
38+
pD.address: pD, pE.address: pE, pF.address: pF}
3839
nlp.pagesIndex = pageDict
39-
nlp.pagesContent ={pA.address:testHTML,pB.address:testHTML2,
40-
pC.address:testHTML,pD.address:testHTML2,
41-
pE.address:testHTML,pF.address:testHTML2}
40+
nlp.pagesContent ={pA.address: testHTML, pB.address: testHTML2,
41+
pC.address: testHTML, pD.address: testHTML2,
42+
pE.address: testHTML, pF.address: testHTML2}
4243

4344
# This test takes a long time (> 60 secs)
4445
# def test_loadPageHTML():
@@ -50,17 +51,20 @@ def test_lexicon():
5051
# assert all(x in loadedPages for x in fullURLs)
5152
# assert all(loadedPages.get(key,"") != "" for key in addresses)
5253

54+
5355
def test_stripRawHTML():
5456
addr = "https://en.wikipedia.org/wiki/Ethics"
5557
aPage = loadPageHTML([addr])
5658
someHTML = aPage[addr]
5759
strippedHTML = stripRawHTML(someHTML)
5860
assert "<head>" not in strippedHTML and "</head>" not in strippedHTML
5961

62+
6063
def test_determineInlinks():
6164
# TODO
6265
assert True
6366

67+
6468
def test_findOutlinks_wiki():
6569
testPage = pageDict[pA.address]
6670
outlinks = findOutlinks(testPage, handleURLs=onlyWikipediaURLS)
@@ -70,35 +74,39 @@ def test_findOutlinks_wiki():
7074
# ______________________________________________________________________________
7175
# HITS Helper Functions
7276

77+
7378
def test_expand_pages():
7479
pages = {k: pageDict[k] for k in ('F')}
75-
pagesTwo = {k: pageDict[k] for k in ('A','E')}
80+
pagesTwo = {k: pageDict[k] for k in ('A', 'E')}
7681
expanded_pages = expand_pages(pages)
77-
assert all(x in expanded_pages for x in ['F','E'])
78-
assert all(x not in expanded_pages for x in ['A','B','C','D'])
82+
assert all(x in expanded_pages for x in ['F', 'E'])
83+
assert all(x not in expanded_pages for x in ['A', 'B', 'C', 'D'])
7984
expanded_pages = expand_pages(pagesTwo)
8085
print(expanded_pages)
81-
assert all(x in expanded_pages for x in ['A','B','C','D','E','F'])
86+
assert all(x in expanded_pages for x in ['A', 'B', 'C', 'D', 'E', 'F'])
87+
8288

8389
def test_relevant_pages():
8490
pages = relevant_pages("male")
85-
assert all((x in pages.keys()) for x in ['A','C','E'])
86-
assert all((x not in pages) for x in ['B','D','F'])
91+
assert all((x in pages.keys()) for x in ['A', 'C', 'E'])
92+
assert all((x not in pages) for x in ['B', 'D', 'F'])
93+
8794

8895
def test_normalize():
89-
normalize( pageDict )
90-
print(page.hub for addr,page in nlp.pagesIndex.items())
91-
expected_hub = [1/91,2/91,3/91,4/91,5/91,6/91] # Works only for sample data above
96+
normalize(pageDict)
97+
print(page.hub for addr, page in nlp.pagesIndex.items())
98+
expected_hub = [1/91, 2/91, 3/91, 4/91, 5/91, 6/91] # Works only for sample data above
9299
expected_auth = list(reversed(expected_hub))
93100
assert len(expected_hub) == len(expected_auth) == len(nlp.pagesIndex)
94-
assert expected_hub == [page.hub for addr,page in sorted(nlp.pagesIndex.items())]
95-
assert expected_auth == [page.authority for addr,page in sorted(nlp.pagesIndex.items())]
101+
assert expected_hub == [page.hub for addr, page in sorted(nlp.pagesIndex.items())]
102+
assert expected_auth == [page.authority for addr, page in sorted(nlp.pagesIndex.items())]
103+
96104

97105
def test_detectConvergence():
98106
# run detectConvergence once to initialise history
99107
convergence = ConvergenceDetector()
100108
convergence()
101-
assert convergence() # values haven't changed so should return True
109+
assert convergence() # values haven't changed so should return True
102110
# make tiny increase/decrease to all values
103111
for _, page in nlp.pagesIndex.items():
104112
page.hub += 0.0003
@@ -111,17 +119,21 @@ def test_detectConvergence():
111119
# retest function with values. Should now return false
112120
assert not convergence()
113121

122+
114123
def test_getInlinks():
115124
inlnks = getInlinks(pageDict['A'])
116125
assert sorted([page.address for page in inlnks]) == pageDict['A'].inlinks
117126

127+
118128
def test_getOutlinks():
119129
outlnks = getOutlinks(pageDict['A'])
120130
assert sorted([page.address for page in outlnks]) == pageDict['A'].outlinks
121131

132+
122133
def test_HITS():
123134
# TODO
124-
assert True # leave for now
135+
assert True # leave for now
136+
125137

126138
if __name__ == '__main__':
127139
pytest.main()

tests/test_planning.py

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,12 @@
1-
from planning import *
1+
from planning import * # noqa
22
from utils import expr
33
from logic import FolKB
44

55

66
def test_action():
7-
precond = [[expr("P(x)"), expr("Q(y, z)")]
8-
,[expr("Q(x)")]]
9-
effect = [[expr("Q(x)")]
10-
, [expr("P(x)")]]
11-
a=Action(expr("A(x,y,z)"),precond, effect)
7+
precond = [[expr("P(x)"), expr("Q(y, z)")], [expr("Q(x)")]]
8+
effect = [[expr("Q(x)")], [expr("P(x)")]]
9+
a=Action(expr("A(x,y,z)"), precond, effect)
1210
args = [expr("A"), expr("B"), expr("C")]
1311
assert a.substitute(expr("P(x, z, y)"), args) == expr("P(A, C, B)")
1412
test_kb = FolKB([expr("P(A)"), expr("Q(B, C)"), expr("R(D)")])
@@ -34,7 +32,8 @@ def test_air_cargo():
3432
p.act(action)
3533

3634
assert p.goal_test()
37-
35+
36+
3837
def test_spare_tire():
3938
p = spare_tire()
4039
assert p.goal_test() is False
@@ -44,9 +43,10 @@ def test_spare_tire():
4443

4544
for action in solution:
4645
p.act(action)
47-
46+
4847
assert p.goal_test()
4948

49+
5050
def test_three_block_tower():
5151
p = three_block_tower()
5252
assert p.goal_test() is False
@@ -56,9 +56,10 @@ def test_three_block_tower():
5656

5757
for action in solution:
5858
p.act(action)
59-
59+
6060
assert p.goal_test()
6161

62+
6263
def test_have_cake_and_eat_cake_too():
6364
p = have_cake_and_eat_cake_too()
6465
assert p.goal_test() is False
@@ -70,6 +71,7 @@ def test_have_cake_and_eat_cake_too():
7071

7172
assert p.goal_test()
7273

74+
7375
def test_graph_call():
7476
pdll = spare_tire()
7577
negkb = FolKB([expr('At(Flat, Trunk)')])

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy